repo stringlengths 6 65 | file_url stringlengths 81 311 | file_path stringlengths 6 227 | content stringlengths 0 32.8k | language stringclasses 1
value | license stringclasses 7
values | commit_sha stringlengths 40 40 | retrieved_at stringdate 2026-01-04 15:31:58 2026-01-04 20:25:31 | truncated bool 2
classes |
|---|---|---|---|---|---|---|---|---|
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/forge/src/cmd/tree.rs | crates/forge/src/cmd/tree.rs | use clap::Parser;
use eyre::Result;
use foundry_cli::{opts::ProjectPathOpts, utils::LoadConfig};
use foundry_compilers::{
Graph,
resolver::{Charset, TreeOptions},
};
/// CLI arguments for `forge tree`.
#[derive(Clone, Debug, Parser)]
pub struct TreeArgs {
/// Do not de-duplicate (repeats all shared dependencies)
#[arg(long)]
no_dedupe: bool,
/// Character set to use in output.
///
/// [possible values: utf8, ascii]
#[arg(long, default_value = "utf8")]
charset: Charset,
#[command(flatten)]
project_paths: ProjectPathOpts,
}
foundry_config::impl_figment_convert!(TreeArgs, project_paths);
impl TreeArgs {
pub fn run(self) -> Result<()> {
let config = self.load_config()?;
let graph = <Graph>::resolve(&config.project_paths())?;
let opts = TreeOptions { charset: self.charset, no_dedupe: self.no_dedupe };
graph.print_with_options(opts);
Ok(())
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/forge/src/cmd/selectors.rs | crates/forge/src/cmd/selectors.rs | use alloy_primitives::hex;
use clap::Parser;
use comfy_table::{Table, modifiers::UTF8_ROUND_CORNERS, presets::ASCII_MARKDOWN};
use eyre::Result;
use foundry_cli::{
opts::{BuildOpts, CompilerOpts, ProjectPathOpts},
utils::{FoundryPathExt, cache_local_signatures, cache_signatures_from_abis},
};
use foundry_common::{
compile::{PathOrContractInfo, ProjectCompiler, compile_target},
selectors::{SelectorImportData, import_selectors},
shell,
};
use foundry_compilers::{artifacts::output_selection::ContractOutputSelection, info::ContractInfo};
use std::{collections::BTreeMap, fs::canonicalize};
/// CLI arguments for `forge selectors`.
#[derive(Clone, Debug, Parser)]
pub enum SelectorsSubcommands {
/// Check for selector collisions between contracts
#[command(visible_alias = "co")]
Collision {
/// The first of the two contracts for which to look selector collisions for, in the form
/// `(<path>:)?<contractname>`.
first_contract: ContractInfo,
/// The second of the two contracts for which to look selector collisions for, in the form
/// `(<path>:)?<contractname>`.
second_contract: ContractInfo,
#[command(flatten)]
build: Box<BuildOpts>,
},
/// Upload selectors to registry
#[command(visible_alias = "up")]
Upload {
/// The name of the contract to upload selectors for.
/// Can also be in form of `path:contract name`.
#[arg(required_unless_present = "all")]
contract: Option<PathOrContractInfo>,
/// Upload selectors for all contracts in the project.
#[arg(long, required_unless_present = "contract")]
all: bool,
#[command(flatten)]
project_paths: ProjectPathOpts,
},
/// List selectors from current workspace
#[command(visible_alias = "ls")]
List {
/// The name of the contract to list selectors for.
#[arg(help = "The name of the contract to list selectors for.")]
contract: Option<String>,
#[command(flatten)]
project_paths: ProjectPathOpts,
#[arg(long, help = "Do not group the selectors by contract in separate tables.")]
no_group: bool,
},
/// Find if a selector is present in the project
#[command(visible_alias = "f")]
Find {
/// The selector to search for
#[arg(help = "The selector to search for (with or without 0x prefix)")]
selector: String,
#[command(flatten)]
project_paths: ProjectPathOpts,
},
/// Cache project selectors (enables trace with local contracts functions and events).
#[command(visible_alias = "c")]
Cache {
#[arg(long, help = "Path to a folder containing additional abis to include in the cache")]
extra_abis_path: Option<String>,
#[command(flatten)]
project_paths: ProjectPathOpts,
},
}
impl SelectorsSubcommands {
pub async fn run(self) -> Result<()> {
match self {
Self::Cache { project_paths, extra_abis_path } => {
if let Some(extra_abis_path) = extra_abis_path {
sh_println!("Caching selectors for ABIs at {extra_abis_path}")?;
cache_signatures_from_abis(extra_abis_path)?;
}
sh_println!("Caching selectors for contracts in the project...")?;
let build_args = BuildOpts {
project_paths,
compiler: CompilerOpts {
extra_output: vec![ContractOutputSelection::Abi],
..Default::default()
},
..Default::default()
};
// compile the project to get the artifacts/abis
let project = build_args.project()?;
let outcome = ProjectCompiler::new().quiet(true).compile(&project)?;
cache_local_signatures(&outcome)?;
}
Self::Upload { contract, all, project_paths } => {
let build_args = BuildOpts {
project_paths: project_paths.clone(),
compiler: CompilerOpts {
extra_output: vec![ContractOutputSelection::Abi],
..Default::default()
},
..Default::default()
};
let project = build_args.project()?;
let output = if let Some(contract_info) = &contract {
let Some(contract_name) = contract_info.name() else {
eyre::bail!("No contract name provided.")
};
let target_path = contract_info
.path()
.map(Ok)
.unwrap_or_else(|| project.find_contract_path(contract_name))?;
compile_target(&target_path, &project, false)?
} else {
ProjectCompiler::new().compile(&project)?
};
let artifacts = if all {
output
.into_artifacts_with_files()
.filter(|(file, _, _)| {
let is_sources_path = file.starts_with(&project.paths.sources);
let is_test = file.is_sol_test();
is_sources_path && !is_test
})
.map(|(_, contract, artifact)| (contract, artifact))
.collect()
} else {
let contract_info = contract.unwrap();
let contract = contract_info.name().unwrap().to_string();
let found_artifact = if let Some(path) = contract_info.path() {
output.find(project.root().join(path).as_path(), &contract)
} else {
output.find_first(&contract)
};
let artifact = found_artifact
.ok_or_else(|| {
eyre::eyre!(
"Could not find artifact `{contract}` in the compiled artifacts"
)
})?
.clone();
vec![(contract, artifact)]
};
let mut artifacts = artifacts.into_iter().peekable();
while let Some((contract, artifact)) = artifacts.next() {
let abi = artifact.abi.ok_or_else(|| eyre::eyre!("Unable to fetch abi"))?;
if abi.functions.is_empty() && abi.events.is_empty() && abi.errors.is_empty() {
continue;
}
sh_println!("Uploading selectors for {contract}...")?;
// upload abi to selector database
import_selectors(SelectorImportData::Abi(vec![abi])).await?.describe();
if artifacts.peek().is_some() {
sh_println!()?
}
}
}
Self::Collision { mut first_contract, mut second_contract, build } => {
// Compile the project with the two contracts included
let project = build.project()?;
let mut compiler = ProjectCompiler::new().quiet(true);
if let Some(contract_path) = &mut first_contract.path {
let target_path = canonicalize(&*contract_path)?;
*contract_path = target_path.to_string_lossy().to_string();
compiler = compiler.files([target_path]);
}
if let Some(contract_path) = &mut second_contract.path {
let target_path = canonicalize(&*contract_path)?;
*contract_path = target_path.to_string_lossy().to_string();
compiler = compiler.files([target_path]);
}
let output = compiler.compile(&project)?;
// Check method selectors for collisions
let methods = |contract: &ContractInfo| -> eyre::Result<_> {
let artifact = output
.find_contract(contract)
.ok_or_else(|| eyre::eyre!("Could not find artifact for {contract}"))?;
artifact.method_identifiers.as_ref().ok_or_else(|| {
eyre::eyre!("Could not find method identifiers for {contract}")
})
};
let first_method_map = methods(&first_contract)?;
let second_method_map = methods(&second_contract)?;
let colliding_methods: Vec<(&String, &String, &String)> = first_method_map
.iter()
.filter_map(|(k1, v1)| {
second_method_map
.iter()
.find_map(|(k2, v2)| if **v2 == *v1 { Some((k2, v2)) } else { None })
.map(|(k2, v2)| (v2, k1, k2))
})
.collect();
if colliding_methods.is_empty() {
sh_println!("No colliding method selectors between the two contracts.")?;
} else {
let mut table = Table::new();
if shell::is_markdown() {
table.load_preset(ASCII_MARKDOWN);
} else {
table.apply_modifier(UTF8_ROUND_CORNERS);
}
table.set_header([
String::from("Selector"),
first_contract.name,
second_contract.name,
]);
for method in &colliding_methods {
table.add_row([method.0, method.1, method.2]);
}
sh_println!("{} collisions found:", colliding_methods.len())?;
sh_println!("\n{table}\n")?;
}
}
Self::List { contract, project_paths, no_group } => {
sh_println!("Listing selectors for contracts in the project...")?;
let build_args = BuildOpts {
project_paths,
compiler: CompilerOpts {
extra_output: vec![ContractOutputSelection::Abi],
..Default::default()
},
..Default::default()
};
// compile the project to get the artifacts/abis
let project = build_args.project()?;
let outcome = ProjectCompiler::new().quiet(true).compile(&project)?;
let artifacts = if let Some(contract) = contract {
let found_artifact = outcome.find_first(&contract);
let artifact = found_artifact
.ok_or_else(|| {
let candidates = outcome
.artifacts()
.map(|(name, _,)| name)
.collect::<Vec<_>>();
let suggestion = if let Some(suggestion) = foundry_cli::utils::did_you_mean(&contract, candidates).pop() {
format!("\nDid you mean `{suggestion}`?")
} else {
String::new()
};
eyre::eyre!(
"Could not find artifact `{contract}` in the compiled artifacts{suggestion}",
)
})?
.clone();
vec![(contract, artifact)]
} else {
outcome
.into_artifacts_with_files()
.filter(|(file, _, _)| {
let is_sources_path = file.starts_with(&project.paths.sources);
let is_test = file.is_sol_test();
is_sources_path && !is_test
})
.map(|(_, contract, artifact)| (contract, artifact))
.collect()
};
let mut artifacts = artifacts.into_iter().peekable();
#[derive(PartialEq, PartialOrd, Eq, Ord)]
enum SelectorType {
Function,
Event,
Error,
}
impl std::fmt::Display for SelectorType {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Self::Function => write!(f, "Function"),
Self::Event => write!(f, "Event"),
Self::Error => write!(f, "Error"),
}
}
}
let mut selectors =
BTreeMap::<String, BTreeMap<SelectorType, Vec<(String, String)>>>::new();
for (contract, artifact) in artifacts.by_ref() {
let abi = artifact.abi.ok_or_else(|| eyre::eyre!("Unable to fetch abi"))?;
let contract_selectors = selectors.entry(contract.clone()).or_default();
for func in abi.functions() {
let sig = func.signature();
let selector = func.selector();
contract_selectors
.entry(SelectorType::Function)
.or_default()
.push((hex::encode_prefixed(selector), sig));
}
for event in abi.events() {
let sig = event.signature();
let selector = event.selector();
contract_selectors
.entry(SelectorType::Event)
.or_default()
.push((hex::encode_prefixed(selector), sig));
}
for error in abi.errors() {
let sig = error.signature();
let selector = error.selector();
contract_selectors
.entry(SelectorType::Error)
.or_default()
.push((hex::encode_prefixed(selector), sig));
}
}
if no_group {
let mut table = Table::new();
if shell::is_markdown() {
table.load_preset(ASCII_MARKDOWN);
} else {
table.apply_modifier(UTF8_ROUND_CORNERS);
}
table.set_header(["Type", "Signature", "Selector", "Contract"]);
for (contract, contract_selectors) in selectors {
for (selector_type, selectors) in contract_selectors {
for (selector, sig) in selectors {
table.add_row([
selector_type.to_string(),
sig,
selector,
contract.to_string(),
]);
}
}
}
sh_println!("\n{table}")?;
} else {
for (idx, (contract, contract_selectors)) in selectors.into_iter().enumerate() {
sh_println!("{}{contract}", if idx == 0 { "" } else { "\n" })?;
let mut table = Table::new();
if shell::is_markdown() {
table.load_preset(ASCII_MARKDOWN);
} else {
table.apply_modifier(UTF8_ROUND_CORNERS);
}
table.set_header(["Type", "Signature", "Selector"]);
for (selector_type, selectors) in contract_selectors {
for (selector, sig) in selectors {
table.add_row([selector_type.to_string(), sig, selector]);
}
}
sh_println!("\n{table}")?;
}
}
}
Self::Find { selector, project_paths } => {
sh_println!("Searching for selector {selector:?} in the project...")?;
let build_args = BuildOpts {
project_paths,
compiler: CompilerOpts {
extra_output: vec![ContractOutputSelection::Abi],
..Default::default()
},
..Default::default()
};
let project = build_args.project()?;
let outcome = ProjectCompiler::new().quiet(true).compile(&project)?;
let artifacts = outcome
.into_artifacts_with_files()
.filter(|(file, _, _)| {
let is_sources_path = file.starts_with(&project.paths.sources);
let is_test = file.is_sol_test();
is_sources_path && !is_test
})
.collect::<Vec<_>>();
let mut table = Table::new();
if shell::is_markdown() {
table.load_preset(ASCII_MARKDOWN);
} else {
table.apply_modifier(UTF8_ROUND_CORNERS);
}
table.set_header(["Type", "Signature", "Selector", "Contract"]);
for (_file, contract, artifact) in artifacts {
let abi = artifact.abi.ok_or_else(|| eyre::eyre!("Unable to fetch abi"))?;
let selector_bytes =
hex::decode(selector.strip_prefix("0x").unwrap_or(&selector))?;
for func in abi.functions() {
if func.selector().as_slice().starts_with(selector_bytes.as_slice()) {
table.add_row([
"Function",
&func.signature(),
&hex::encode_prefixed(func.selector()),
contract.as_str(),
]);
}
}
for event in abi.events() {
if event.selector().as_slice().starts_with(selector_bytes.as_slice()) {
table.add_row([
"Event",
&event.signature(),
&hex::encode_prefixed(event.selector()),
contract.as_str(),
]);
}
}
for error in abi.errors() {
if error.selector().as_slice().starts_with(selector_bytes.as_slice()) {
table.add_row([
"Error",
&error.signature(),
&hex::encode_prefixed(error.selector()),
contract.as_str(),
]);
}
}
}
if table.row_count() > 0 {
sh_println!("\nFound {} instance(s)...", table.row_count())?;
sh_println!("\n{table}\n")?;
} else {
return Err(eyre::eyre!("\nSelector not found in the project."));
}
}
}
Ok(())
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/forge/src/cmd/install.rs | crates/forge/src/cmd/install.rs | use crate::{DepIdentifier, FOUNDRY_LOCK, Lockfile};
use clap::{Parser, ValueHint};
use eyre::{Context, Result};
use foundry_cli::{
opts::Dependency,
utils::{CommandUtils, Git, LoadConfig},
};
use foundry_common::fs;
use foundry_config::{Config, impl_figment_convert_basic};
use regex::Regex;
use semver::Version;
use soldeer_commands::{Command, Verbosity, commands::install::Install};
use std::{
io::IsTerminal,
path::{Path, PathBuf},
str,
sync::LazyLock,
};
use yansi::Paint;
static DEPENDENCY_VERSION_TAG_REGEX: LazyLock<Regex> =
LazyLock::new(|| Regex::new(r"^v?\d+(\.\d+)*$").unwrap());
/// CLI arguments for `forge install`.
#[derive(Clone, Debug, Parser)]
#[command(override_usage = "forge install [OPTIONS] [DEPENDENCIES]...
forge install [OPTIONS] <github username>/<github project>@<tag>...
forge install [OPTIONS] <alias>=<github username>/<github project>@<tag>...
forge install [OPTIONS] <https://<github token>@git url>...)]
forge install [OPTIONS] <https:// git url>...")]
pub struct InstallArgs {
/// The dependencies to install.
///
/// A dependency can be a raw URL, or the path to a GitHub repository.
///
/// Additionally, a ref can be provided by adding @ to the dependency path.
///
/// A ref can be:
/// - A branch: master
/// - A tag: v1.2.3
/// - A commit: 8e8128
///
/// For exact match, a ref can be provided with `@tag=`, `@branch=` or `@rev=` prefix.
///
/// Target installation directory can be added via `<alias>=` suffix.
/// The dependency will installed to `lib/<alias>`.
dependencies: Vec<Dependency>,
/// The project's root path.
///
/// By default root of the Git repository, if in one,
/// or the current working directory.
#[arg(long, value_hint = ValueHint::DirPath, value_name = "PATH")]
pub root: Option<PathBuf>,
#[command(flatten)]
opts: DependencyInstallOpts,
}
impl_figment_convert_basic!(InstallArgs);
impl InstallArgs {
pub async fn run(self) -> Result<()> {
let mut config = self.load_config()?;
self.opts.install(&mut config, self.dependencies).await
}
}
#[derive(Clone, Copy, Debug, Default, Parser)]
pub struct DependencyInstallOpts {
/// Perform shallow clones instead of deep ones.
///
/// Improves performance and reduces disk usage, but prevents switching branches or tags.
#[arg(long)]
pub shallow: bool,
/// Install without adding the dependency as a submodule.
#[arg(long)]
pub no_git: bool,
/// Create a commit after installing the dependencies.
#[arg(long)]
pub commit: bool,
}
impl DependencyInstallOpts {
pub fn git(self, config: &Config) -> Git<'_> {
Git::from_config(config).shallow(self.shallow)
}
/// Installs all missing dependencies.
///
/// See also [`Self::install`].
///
/// Returns true if any dependency was installed.
pub async fn install_missing_dependencies(self, config: &mut Config) -> bool {
let lib = config.install_lib_dir();
if self.git(config).has_missing_dependencies(Some(lib)).unwrap_or(false) {
// The extra newline is needed, otherwise the compiler output will overwrite the message
let _ = sh_println!("Missing dependencies found. Installing now...\n");
if self.install(config, Vec::new()).await.is_err() {
let _ =
sh_warn!("Your project has missing dependencies that could not be installed.");
}
true
} else {
false
}
}
/// Installs all dependencies
pub async fn install(self, config: &mut Config, dependencies: Vec<Dependency>) -> Result<()> {
let Self { no_git, commit, .. } = self;
let git = self.git(config);
let install_lib_dir = config.install_lib_dir();
let libs = git.root.join(install_lib_dir);
let mut lockfile = Lockfile::new(&config.root);
if !no_git {
lockfile = lockfile.with_git(&git);
// Check if submodules are uninitialized, if so, we need to fetch all submodules
// This is to ensure that foundry.lock syncs successfully and doesn't error out, when
// looking for commits/tags in submodules
if git.submodules_uninitialized()? {
trace!(lib = %libs.display(), "submodules uninitialized");
git.submodule_update(false, false, false, true, Some(&libs))?;
}
}
let out_of_sync_deps = lockfile.sync(config.install_lib_dir())?;
if dependencies.is_empty() && !no_git {
// Use the root of the git repository to look for submodules.
let root = Git::root_of(git.root)?;
match git.has_submodules(Some(&root)) {
Ok(true) => {
sh_println!("Updating dependencies in {}", libs.display())?;
// recursively fetch all submodules (without fetching latest)
git.submodule_update(false, false, false, true, Some(&libs))?;
lockfile.write()?;
}
Err(err) => {
sh_err!("Failed to check for submodules: {err}")?;
}
_ => {
// no submodules, nothing to do
}
}
}
fs::create_dir_all(&libs)?;
let installer = Installer { git, commit };
for dep in dependencies {
let path = libs.join(dep.name());
let rel_path = path
.strip_prefix(git.root)
.wrap_err("Library directory is not relative to the repository root")?;
sh_println!(
"Installing {} in {} (url: {}, tag: {})",
dep.name,
path.display(),
dep.url.as_deref().unwrap_or("None"),
dep.tag.as_deref().unwrap_or("None")
)?;
// this tracks the actual installed tag
let installed_tag;
let mut dep_id = None;
if no_git {
installed_tag = installer.install_as_folder(&dep, &path)?;
} else {
if commit {
git.ensure_clean()?;
}
installed_tag = installer.install_as_submodule(&dep, &path)?;
let mut new_insertion = false;
// Pin branch to submodule if branch is used
if let Some(tag_or_branch) = &installed_tag {
// First, check if this tag has a branch
dep_id = Some(DepIdentifier::resolve_type(&git, &path, tag_or_branch)?);
if git.has_branch(tag_or_branch, &path)?
&& dep_id.as_ref().is_some_and(|id| id.is_branch())
{
// always work with relative paths when directly modifying submodules
git.cmd()
.args(["submodule", "set-branch", "-b", tag_or_branch])
.arg(rel_path)
.exec()?;
let rev = git.get_rev(tag_or_branch, &path)?;
dep_id = Some(DepIdentifier::Branch {
name: tag_or_branch.to_string(),
rev,
r#override: false,
});
}
trace!(?dep_id, ?tag_or_branch, "resolved dep id");
if let Some(dep_id) = &dep_id {
new_insertion = true;
lockfile.insert(rel_path.to_path_buf(), dep_id.clone());
}
if commit {
// update .gitmodules which is at the root of the repo,
// not necessarily at the root of the current Foundry project
let root = Git::root_of(git.root)?;
git.root(&root).add(Some(".gitmodules"))?;
}
}
if new_insertion
|| out_of_sync_deps.as_ref().is_some_and(|o| !o.is_empty())
|| !lockfile.exists()
{
lockfile.write()?;
}
// commit the installation
if commit {
let mut msg = String::with_capacity(128);
msg.push_str("forge install: ");
msg.push_str(dep.name());
if let Some(tag) = &installed_tag {
msg.push_str("\n\n");
if let Some(dep_id) = &dep_id {
msg.push_str(&dep_id.to_string());
} else {
msg.push_str(tag);
}
}
if !lockfile.is_empty() {
git.root(&config.root).add(Some(FOUNDRY_LOCK))?;
}
git.commit(&msg)?;
}
}
let mut msg = format!(" {} {}", "Installed".green(), dep.name);
if let Some(tag) = dep.tag.or(installed_tag) {
msg.push(' ');
if let Some(dep_id) = dep_id {
msg.push_str(&dep_id.to_string());
} else {
msg.push_str(tag.as_str());
}
}
sh_println!("{msg}")?;
// Check if the dependency has soldeer.lock and install soldeer dependencies
if let Err(e) = install_soldeer_deps_if_needed(&path).await {
sh_warn!("Failed to install soldeer dependencies for {}: {e}", dep.name)?;
}
}
// update `libs` in config if not included yet
if !config.libs.iter().any(|p| p == install_lib_dir) {
config.libs.push(install_lib_dir.to_path_buf());
config.update_libs()?;
}
Ok(())
}
}
pub async fn install_missing_dependencies(config: &mut Config) -> bool {
DependencyInstallOpts::default().install_missing_dependencies(config).await
}
/// Checks if a dependency has soldeer.lock and installs soldeer dependencies if needed.
async fn install_soldeer_deps_if_needed(dep_path: &Path) -> Result<()> {
let soldeer_lock = dep_path.join("soldeer.lock");
if soldeer_lock.exists() {
sh_println!(" Found soldeer.lock, installing soldeer dependencies...")?;
// Change to the dependency directory and run soldeer install
let original_dir = std::env::current_dir()?;
std::env::set_current_dir(dep_path)?;
let result = soldeer_commands::run(
Command::Install(Install::default()),
Verbosity::new(
foundry_common::shell::verbosity(),
if foundry_common::shell::is_quiet() { 1 } else { 0 },
),
)
.await;
// Change back to original directory
std::env::set_current_dir(original_dir)?;
result.map_err(|e| eyre::eyre!("Failed to run soldeer install: {e}"))?;
sh_println!(" Soldeer dependencies installed successfully")?;
}
Ok(())
}
#[derive(Clone, Copy, Debug)]
struct Installer<'a> {
git: Git<'a>,
commit: bool,
}
impl Installer<'_> {
/// Installs the dependency as an ordinary folder instead of a submodule
fn install_as_folder(self, dep: &Dependency, path: &Path) -> Result<Option<String>> {
let url = dep.require_url()?;
Git::clone(dep.tag.is_none(), url, Some(&path))?;
let mut dep = dep.clone();
if dep.tag.is_none() {
// try to find latest semver release tag
dep.tag = self.last_tag(path);
}
// checkout the tag if necessary
self.git_checkout(&dep, path, false)?;
trace!("updating dependency submodules recursively");
self.git.root(path).submodule_update(
false,
false,
false,
true,
std::iter::empty::<PathBuf>(),
)?;
// remove git artifacts
fs::remove_dir_all(path.join(".git"))?;
Ok(dep.tag)
}
/// Installs the dependency as new submodule.
///
/// This will add the git submodule to the given dir, initialize it and checkout the tag if
/// provided or try to find the latest semver, release tag.
fn install_as_submodule(self, dep: &Dependency, path: &Path) -> Result<Option<String>> {
// install the dep
self.git_submodule(dep, path)?;
let mut dep = dep.clone();
if dep.tag.is_none() {
// try to find latest semver release tag
dep.tag = self.last_tag(path);
}
// checkout the tag if necessary
self.git_checkout(&dep, path, true)?;
trace!("updating dependency submodules recursively");
self.git.root(path).submodule_update(
false,
false,
false,
true,
std::iter::empty::<PathBuf>(),
)?;
// sync submodules config with changes in .gitmodules, see <https://github.com/foundry-rs/foundry/issues/9611>
self.git.root(path).submodule_sync()?;
if self.commit {
self.git.add(Some(path))?;
}
Ok(dep.tag)
}
fn last_tag(self, path: &Path) -> Option<String> {
if self.git.shallow {
None
} else {
self.git_semver_tags(path).ok().and_then(|mut tags| tags.pop()).map(|(tag, _)| tag)
}
}
/// Returns all semver git tags sorted in ascending order
fn git_semver_tags(self, path: &Path) -> Result<Vec<(String, Version)>> {
let out = self.git.root(path).tag()?;
let mut tags = Vec::new();
// tags are commonly prefixed which would make them not semver: v1.2.3 is not a semantic
// version
let common_prefixes = &["v-", "v", "release-", "release"];
for tag in out.lines() {
let mut maybe_semver = tag;
for &prefix in common_prefixes {
if let Some(rem) = tag.strip_prefix(prefix) {
maybe_semver = rem;
break;
}
}
match Version::parse(maybe_semver) {
Ok(v) => {
// ignore if additional metadata, like rc, beta, etc...
if v.build.is_empty() && v.pre.is_empty() {
tags.push((tag.to_string(), v));
}
}
Err(err) => {
warn!(?err, ?maybe_semver, "No semver tag");
}
}
}
tags.sort_by(|(_, a), (_, b)| a.cmp(b));
Ok(tags)
}
/// Install the given dependency as git submodule in `target_dir`.
fn git_submodule(self, dep: &Dependency, path: &Path) -> Result<()> {
let url = dep.require_url()?;
// make path relative to the git root, already checked above
let path = path.strip_prefix(self.git.root).unwrap();
trace!(?dep, url, ?path, "installing git submodule");
self.git.submodule_add(true, url, path)
}
fn git_checkout(self, dep: &Dependency, path: &Path, recurse: bool) -> Result<String> {
// no need to checkout if there is no tag
let Some(mut tag) = dep.tag.clone() else { return Ok(String::new()) };
let mut is_branch = false;
// only try to match tag if current terminal is a tty
if std::io::stdout().is_terminal() {
if tag.is_empty() {
tag = self.match_tag(&tag, path)?;
} else if let Some(branch) = self.match_branch(&tag, path)? {
trace!(?tag, ?branch, "selecting branch for given tag");
tag = branch;
is_branch = true;
}
}
let url = dep.url.as_ref().unwrap();
let res = self.git.root(path).checkout(recurse, &tag);
if let Err(mut e) = res {
// remove dependency on failed checkout
fs::remove_dir_all(path)?;
if e.to_string().contains("did not match any file(s) known to git") {
e = eyre::eyre!("Tag: \"{tag}\" not found for repo \"{url}\"!")
}
return Err(e);
}
if is_branch { Ok(tag) } else { Ok(String::new()) }
}
/// disambiguate tag if it is a version tag
fn match_tag(self, tag: &str, path: &Path) -> Result<String> {
// only try to match if it looks like a version tag
if !DEPENDENCY_VERSION_TAG_REGEX.is_match(tag) {
return Ok(tag.into());
}
// generate candidate list by filtering `git tag` output, valid ones are those "starting
// with" the user-provided tag (ignoring the starting 'v'), for example, if the user
// specifies 1.5, then v1.5.2 is a valid candidate, but v3.1.5 is not
let trimmed_tag = tag.trim_start_matches('v').to_string();
let output = self.git.root(path).tag()?;
let mut candidates: Vec<String> = output
.trim()
.lines()
.filter(|x| x.trim_start_matches('v').starts_with(&trimmed_tag))
.map(|x| x.to_string())
.rev()
.collect();
// no match found, fall back to the user-provided tag
if candidates.is_empty() {
return Ok(tag.into());
}
// have exact match
for candidate in &candidates {
if candidate == tag {
return Ok(tag.into());
}
}
// only one candidate, ask whether the user wants to accept or not
if candidates.len() == 1 {
let matched_tag = &candidates[0];
let input = prompt!(
"Found a similar version tag: {matched_tag}, do you want to use this instead? [Y/n] "
)?;
return if match_yn(input) { Ok(matched_tag.clone()) } else { Ok(tag.into()) };
}
// multiple candidates, ask the user to choose one or skip
candidates.insert(0, String::from("SKIP AND USE ORIGINAL TAG"));
sh_println!("There are multiple matching tags:")?;
for (i, candidate) in candidates.iter().enumerate() {
sh_println!("[{i}] {candidate}")?;
}
let n_candidates = candidates.len();
loop {
let input: String =
prompt!("Please select a tag (0-{}, default: 1): ", n_candidates - 1)?;
let s = input.trim();
// default selection, return first candidate
let n = if s.is_empty() { Ok(1) } else { s.parse() };
// match user input, 0 indicates skipping and use original tag
match n {
Ok(0) => return Ok(tag.into()),
Ok(i) if (1..=n_candidates).contains(&i) => {
let c = &candidates[i];
sh_println!("[{i}] {c} selected")?;
return Ok(c.clone());
}
_ => continue,
}
}
}
fn match_branch(self, tag: &str, path: &Path) -> Result<Option<String>> {
// fetch remote branches and check for tag
let output = self.git.root(path).cmd().args(["branch", "-r"]).get_stdout_lossy()?;
let mut candidates = output
.lines()
.map(|x| x.trim().trim_start_matches("origin/"))
.filter(|x| x.starts_with(tag))
.map(ToString::to_string)
.rev()
.collect::<Vec<_>>();
trace!(?candidates, ?tag, "found branch candidates");
// no match found, fall back to the user-provided tag
if candidates.is_empty() {
return Ok(None);
}
// have exact match
for candidate in &candidates {
if candidate == tag {
return Ok(Some(tag.to_string()));
}
}
// only one candidate, ask whether the user wants to accept or not
if candidates.len() == 1 {
let matched_tag = &candidates[0];
let input = prompt!(
"Found a similar branch: {matched_tag}, do you want to use this instead? [Y/n] "
)?;
return if match_yn(input) { Ok(Some(matched_tag.clone())) } else { Ok(None) };
}
// multiple candidates, ask the user to choose one or skip
candidates.insert(0, format!("{tag} (original branch)"));
sh_println!("There are multiple matching branches:")?;
for (i, candidate) in candidates.iter().enumerate() {
sh_println!("[{i}] {candidate}")?;
}
let n_candidates = candidates.len();
let input: String = prompt!(
"Please select a tag (0-{}, default: 1, Press <enter> to cancel): ",
n_candidates - 1
)?;
let input = input.trim();
// default selection, return None
if input.is_empty() {
sh_println!("Canceled branch matching")?;
return Ok(None);
}
// match user input, 0 indicates skipping and use original tag
match input.parse::<usize>() {
Ok(0) => Ok(Some(tag.into())),
Ok(i) if (1..=n_candidates).contains(&i) => {
let c = &candidates[i];
sh_println!("[{i}] {c} selected")?;
Ok(Some(c.clone()))
}
_ => Ok(None),
}
}
}
/// Matches on the result of a prompt for yes/no.
///
/// Defaults to true.
fn match_yn(input: String) -> bool {
let s = input.trim().to_lowercase();
matches!(s.as_str(), "" | "y" | "yes")
}
#[cfg(test)]
mod tests {
use super::*;
use tempfile::tempdir;
#[test]
#[ignore = "slow"]
fn get_oz_tags() {
let tmp = tempdir().unwrap();
let git = Git::new(tmp.path());
let installer = Installer { git, commit: false };
git.init().unwrap();
let dep: Dependency = "openzeppelin/openzeppelin-contracts".parse().unwrap();
let libs = tmp.path().join("libs");
fs::create_dir(&libs).unwrap();
let submodule = libs.join("openzeppelin-contracts");
installer.git_submodule(&dep, &submodule).unwrap();
assert!(submodule.exists());
let tags = installer.git_semver_tags(&submodule).unwrap();
assert!(!tags.is_empty());
let v480: Version = "4.8.0".parse().unwrap();
assert!(tags.iter().any(|(_, v)| v == &v480));
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/forge/src/cmd/build.rs | crates/forge/src/cmd/build.rs | use super::{install, watch::WatchArgs};
use clap::Parser;
use eyre::{Context, Result};
use forge_lint::{linter::Linter, sol::SolidityLinter};
use foundry_cli::{
opts::{BuildOpts, configure_pcx_from_solc, get_solar_sources_from_compile_output},
utils::{LoadConfig, cache_local_signatures},
};
use foundry_common::{compile::ProjectCompiler, shell};
use foundry_compilers::{
CompilationError, FileFilter, Project, ProjectCompileOutput,
compilers::{Language, multi::MultiCompilerLanguage},
solc::SolcLanguage,
utils::source_files_iter,
};
use foundry_config::{
Config, SkipBuildFilters,
figment::{
self, Metadata, Profile, Provider,
error::Kind::InvalidType,
value::{Dict, Map, Value},
},
filter::expand_globs,
};
use serde::Serialize;
use std::path::PathBuf;
foundry_config::merge_impl_figment_convert!(BuildArgs, build);
/// CLI arguments for `forge build`.
///
/// CLI arguments take the highest precedence in the Config/Figment hierarchy.
/// In order to override them in the foundry `Config` they need to be merged into an existing
/// `figment::Provider`, like `foundry_config::Config` is.
///
/// `BuildArgs` implements `figment::Provider` in which all config related fields are serialized and
/// then merged into an existing `Config`, effectively overwriting them.
///
/// Some arguments are marked as `#[serde(skip)]` and require manual processing in
/// `figment::Provider` implementation
#[derive(Clone, Debug, Default, Serialize, Parser)]
#[command(next_help_heading = "Build options", about = None, long_about = None)] // override doc
pub struct BuildArgs {
/// Build source files from specified paths.
#[serde(skip)]
pub paths: Option<Vec<PathBuf>>,
/// Print compiled contract names.
#[arg(long)]
#[serde(skip)]
pub names: bool,
/// Print compiled contract sizes.
/// Constructor argument length is not included in the calculation of initcode size.
#[arg(long)]
#[serde(skip)]
pub sizes: bool,
/// Ignore initcode contract bytecode size limit introduced by EIP-3860.
#[arg(long, alias = "ignore-initcode-size")]
#[serde(skip)]
pub ignore_eip_3860: bool,
#[command(flatten)]
#[serde(flatten)]
pub build: BuildOpts,
#[command(flatten)]
#[serde(skip)]
pub watch: WatchArgs,
}
impl BuildArgs {
pub async fn run(self) -> Result<ProjectCompileOutput> {
let mut config = self.load_config()?;
if install::install_missing_dependencies(&mut config).await && config.auto_detect_remappings
{
// need to re-configure here to also catch additional remappings
config = self.load_config()?;
}
let project = config.project()?;
// Collect sources to compile if build subdirectories specified.
let mut files = vec![];
if let Some(paths) = &self.paths {
for path in paths {
let joined = project.root().join(path);
let path = if joined.exists() { &joined } else { path };
files.extend(source_files_iter(path, MultiCompilerLanguage::FILE_EXTENSIONS));
}
if files.is_empty() {
eyre::bail!("No source files found in specified build paths.")
}
}
let format_json = shell::is_json();
let compiler = ProjectCompiler::new()
.files(files)
.dynamic_test_linking(config.dynamic_test_linking)
.print_names(self.names)
.print_sizes(self.sizes)
.ignore_eip_3860(self.ignore_eip_3860)
.bail(!format_json);
let mut output = compiler.compile(&project)?;
// Cache project selectors.
cache_local_signatures(&output)?;
if format_json && !self.names && !self.sizes {
sh_println!("{}", serde_json::to_string_pretty(&output.output())?)?;
}
// Only run the `SolidityLinter` if lint on build and no compilation errors.
if config.lint.lint_on_build && !output.output().errors.iter().any(|e| e.is_error()) {
self.lint(&project, &config, self.paths.as_deref(), &mut output)
.wrap_err("Lint failed")?;
}
Ok(output)
}
fn lint(
&self,
project: &Project,
config: &Config,
files: Option<&[PathBuf]>,
output: &mut ProjectCompileOutput,
) -> Result<()> {
let format_json = shell::is_json();
if project.compiler.solc.is_some() && !shell::is_quiet() {
let linter = SolidityLinter::new(config.project_paths())
.with_json_emitter(format_json)
.with_description(!format_json)
.with_severity(if config.lint.severity.is_empty() {
None
} else {
Some(config.lint.severity.clone())
})
.without_lints(if config.lint.exclude_lints.is_empty() {
None
} else {
Some(
config
.lint
.exclude_lints
.iter()
.filter_map(|s| forge_lint::sol::SolLint::try_from(s.as_str()).ok())
.collect(),
)
})
.with_mixed_case_exceptions(&config.lint.mixed_case_exceptions);
// Expand ignore globs and canonicalize from the get go
let ignored = expand_globs(&config.root, config.lint.ignore.iter())?
.iter()
.flat_map(foundry_common::fs::canonicalize_path)
.collect::<Vec<_>>();
let skip = SkipBuildFilters::new(config.skip.clone(), config.root.clone());
let curr_dir = std::env::current_dir()?;
let input_files = config
.project_paths::<SolcLanguage>()
.input_files_iter()
.filter(|p| {
// Lint only specified build files, if any.
if let Some(files) = files {
return files.iter().any(|file| &curr_dir.join(file) == p);
}
skip.is_match(p)
&& !(ignored.contains(p) || ignored.contains(&curr_dir.join(p)))
})
.collect::<Vec<_>>();
let solar_sources =
get_solar_sources_from_compile_output(config, output, Some(&input_files))?;
if solar_sources.input.sources.is_empty() {
if !input_files.is_empty() {
sh_warn!(
"unable to lint. Solar only supports Solidity versions prior to 0.8.0"
)?;
}
return Ok(());
}
// NOTE(rusowsky): Once solar can drop unsupported versions, rather than creating a new
// compiler, we should reuse the parser from the project output.
let mut compiler = solar::sema::Compiler::new(
solar::interface::Session::builder().with_stderr_emitter().build(),
);
// Load the solar-compatible sources to the pcx before linting
compiler.enter_mut(|compiler| {
let mut pcx = compiler.parse();
configure_pcx_from_solc(&mut pcx, &config.project_paths(), &solar_sources, true);
pcx.set_resolve_imports(true);
pcx.parse();
});
linter.lint(&input_files, config.deny, &mut compiler)?;
}
Ok(())
}
/// Returns the `Project` for the current workspace
///
/// This loads the `foundry_config::Config` for the current workspace (see
/// [`foundry_config::utils::find_project_root`] and merges the cli `BuildArgs` into it before
/// returning [`foundry_config::Config::project()`]
pub fn project(&self) -> Result<Project> {
self.build.project()
}
/// Returns whether `BuildArgs` was configured with `--watch`
pub fn is_watch(&self) -> bool {
self.watch.watch.is_some()
}
/// Returns the [`watchexec::Config`] necessary to bootstrap a new watch loop.
pub(crate) fn watchexec_config(&self) -> Result<watchexec::Config> {
// Use the path arguments or if none where provided the `src`, `test` and `script`
// directories as well as the `foundry.toml` configuration file.
self.watch.watchexec_config(|| {
let config = self.load_config()?;
let foundry_toml: PathBuf = config.root.join(Config::FILE_NAME);
Ok([config.src, config.test, config.script, foundry_toml])
})
}
}
// Make this args a `figment::Provider` so that it can be merged into the `Config`
impl Provider for BuildArgs {
fn metadata(&self) -> Metadata {
Metadata::named("Build Args Provider")
}
fn data(&self) -> Result<Map<Profile, Dict>, figment::Error> {
let value = Value::serialize(self)?;
let error = InvalidType(value.to_actual(), "map".into());
let mut dict = value.into_dict().ok_or(error)?;
if self.names {
dict.insert("names".to_string(), true.into());
}
if self.sizes {
dict.insert("sizes".to_string(), true.into());
}
if self.ignore_eip_3860 {
dict.insert("ignore_eip_3860".to_string(), true.into());
}
Ok(Map::from([(Config::selected_profile(), dict)]))
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/forge/src/cmd/watch.rs | crates/forge/src/cmd/watch.rs | use super::{
build::BuildArgs, coverage::CoverageArgs, doc::DocArgs, fmt::FmtArgs,
snapshot::GasSnapshotArgs, test::TestArgs,
};
use alloy_primitives::map::HashSet;
use clap::Parser;
use eyre::Result;
use foundry_cli::utils::{self, FoundryPathExt, LoadConfig};
use foundry_config::Config;
use parking_lot::Mutex;
use std::{
path::PathBuf,
sync::{
Arc,
atomic::{AtomicU8, Ordering},
},
time::Duration,
};
use tokio::process::Command as TokioCommand;
use watchexec::{
Watchexec,
action::ActionHandler,
command::{Command, Program},
job::{CommandState, Job},
paths::summarise_events_to_env,
};
use watchexec_events::{
Event, Priority, ProcessEnd, Tag,
filekind::{AccessKind, FileEventKind},
};
use watchexec_signals::Signal;
use yansi::{Color, Paint};
type SpawnHook = Arc<dyn Fn(&[Event], &mut TokioCommand) + Send + Sync + 'static>;
#[derive(Clone, Debug, Default, Parser)]
#[command(next_help_heading = "Watch options")]
pub struct WatchArgs {
/// Watch the given files or directories for changes.
///
/// If no paths are provided, the source and test directories of the project are watched.
#[arg(long, short, num_args(0..), value_name = "PATH")]
pub watch: Option<Vec<PathBuf>>,
/// Do not restart the command while it's still running.
#[arg(long)]
pub no_restart: bool,
/// Explicitly re-run all tests when a change is made.
///
/// By default, only the tests of the last modified test file are executed.
#[arg(long)]
pub run_all: bool,
/// Re-run only previously failed tests first when a change is made.
///
/// If all previously failed tests pass, the full test suite will be run automatically.
/// This is particularly useful for TDD workflows where you want fast feedback on failures.
#[arg(long, alias = "rerun-failures")]
pub rerun_failed: bool,
/// File update debounce delay.
///
/// During the delay, incoming change events are accumulated and
/// only once the delay has passed, is an action taken. Note that
/// this does not mean a command will be started: if --no-restart is
/// given and a command is already running, the outcome of the
/// action will be to do nothing.
///
/// Defaults to 50ms. Parses as decimal seconds by default, but
/// using an integer with the `ms` suffix may be more convenient.
///
/// When using --poll mode, you'll want a larger duration, or risk
/// overloading disk I/O.
#[arg(long, value_name = "DELAY")]
pub watch_delay: Option<String>,
}
impl WatchArgs {
/// Creates a new [`watchexec::Config`].
///
/// If paths were provided as arguments the these will be used as the watcher's pathset,
/// otherwise the path the closure returns will be used.
pub fn watchexec_config<PS: IntoIterator<Item = P>, P: Into<PathBuf>>(
&self,
default_paths: impl FnOnce() -> Result<PS>,
) -> Result<watchexec::Config> {
self.watchexec_config_generic(default_paths, None)
}
/// Creates a new [`watchexec::Config`] with a custom command spawn hook.
///
/// If paths were provided as arguments the these will be used as the watcher's pathset,
/// otherwise the path the closure returns will be used.
pub fn watchexec_config_with_override<PS: IntoIterator<Item = P>, P: Into<PathBuf>>(
&self,
default_paths: impl FnOnce() -> Result<PS>,
spawn_hook: impl Fn(&[Event], &mut TokioCommand) + Send + Sync + 'static,
) -> Result<watchexec::Config> {
self.watchexec_config_generic(default_paths, Some(Arc::new(spawn_hook)))
}
fn watchexec_config_generic<PS: IntoIterator<Item = P>, P: Into<PathBuf>>(
&self,
default_paths: impl FnOnce() -> Result<PS>,
spawn_hook: Option<SpawnHook>,
) -> Result<watchexec::Config> {
let mut paths = self.watch.as_deref().unwrap_or_default();
let storage: Vec<_>;
if paths.is_empty() {
storage = default_paths()?.into_iter().map(Into::into).filter(|p| p.exists()).collect();
paths = &storage;
}
self.watchexec_config_inner(paths, spawn_hook)
}
fn watchexec_config_inner(
&self,
paths: &[PathBuf],
spawn_hook: Option<SpawnHook>,
) -> Result<watchexec::Config> {
let config = watchexec::Config::default();
config.on_error(|err| {
let _ = sh_eprintln!("[[{err:?}]]");
});
if let Some(delay) = &self.watch_delay {
config.throttle(utils::parse_delay(delay)?);
}
config.pathset(paths.iter().map(|p| p.as_path()));
let n_path_args = self.watch.as_deref().unwrap_or_default().len();
let base_command = Arc::new(watch_command(cmd_args(n_path_args)));
let id = watchexec::Id::default();
let quit_again = Arc::new(AtomicU8::new(0));
let stop_timeout = Duration::from_secs(5);
let no_restart = self.no_restart;
let stop_signal = Signal::Terminate;
config.on_action(move |mut action| {
let base_command = base_command.clone();
let job = action.get_or_create_job(id, move || base_command.clone());
let events = action.events.clone();
let spawn_hook = spawn_hook.clone();
job.set_spawn_hook(move |command, _| {
// https://github.com/watchexec/watchexec/blob/72f069a8477c679e45f845219276b0bfe22fed79/crates/cli/src/emits.rs#L9
let env = summarise_events_to_env(events.iter());
for (k, v) in env {
command.command_mut().env(format!("WATCHEXEC_{k}_PATH"), v);
}
if let Some(spawn_hook) = &spawn_hook {
spawn_hook(&events, command.command_mut());
}
});
let clear_screen = || {
let _ = clearscreen::clear();
};
let quit = |mut action: ActionHandler| {
match quit_again.fetch_add(1, Ordering::Relaxed) {
0 => {
let _ = sh_eprintln!(
"[Waiting {stop_timeout:?} for processes to exit before stopping... \
Ctrl-C again to exit faster]"
);
action.quit_gracefully(stop_signal, stop_timeout);
}
1 => action.quit_gracefully(Signal::ForceStop, Duration::ZERO),
_ => action.quit(),
}
action
};
let signals = action.signals().collect::<Vec<_>>();
if signals.contains(&Signal::Terminate) || signals.contains(&Signal::Interrupt) {
return quit(action);
}
// Only filesystem events below here (or empty synthetic events).
if action.paths().next().is_none() && !action.events.iter().any(|e| e.is_empty()) {
debug!("no filesystem or synthetic events, skip without doing more");
return action;
}
if cfg!(target_os = "linux") {
// Reading a file now triggers `Access(Open)` events on Linux due to:
// https://github.com/notify-rs/notify/pull/612
// This causes an infinite rebuild loop: the build reads a file,
// which triggers a notification, which restarts the build, and so on.
// To prevent this, we ignore `Access(Open)` events during event processing.
let mut has_file_events = false;
let mut has_synthetic_events = false;
'outer: for e in action.events.iter() {
if e.is_empty() {
has_synthetic_events = true;
break;
} else {
for tag in &e.tags {
if let Tag::FileEventKind(kind) = tag
&& !matches!(kind, FileEventKind::Access(AccessKind::Open(_))) {
has_file_events = true;
break 'outer;
}
}
}
}
if !has_file_events && !has_synthetic_events {
debug!("no filesystem events (other than Access(Open)) or synthetic events, skip without doing more");
return action;
}
}
job.run({
let job = job.clone();
move |context| {
if context.current.is_running() && no_restart {
return;
}
job.restart_with_signal(stop_signal, stop_timeout);
job.run({
let job = job.clone();
move |context| {
clear_screen();
setup_process(job, &context.command)
}
});
}
});
action
});
Ok(config)
}
}
fn setup_process(job: Job, _command: &Command) {
tokio::spawn(async move {
job.to_wait().await;
job.run(move |context| end_of_process(context.current));
});
}
fn end_of_process(state: &CommandState) {
let CommandState::Finished { status, started, finished } = state else {
return;
};
let duration = *finished - *started;
let timings = true;
let timing = if timings { format!(", lasted {duration:?}") } else { String::new() };
let (msg, fg) = match status {
ProcessEnd::ExitError(code) => (format!("Command exited with {code}{timing}"), Color::Red),
ProcessEnd::ExitSignal(sig) => {
(format!("Command killed by {sig:?}{timing}"), Color::Magenta)
}
ProcessEnd::ExitStop(sig) => (format!("Command stopped by {sig:?}{timing}"), Color::Blue),
ProcessEnd::Continued => (format!("Command continued{timing}"), Color::Cyan),
ProcessEnd::Exception(ex) => {
(format!("Command ended by exception {ex:#x}{timing}"), Color::Yellow)
}
ProcessEnd::Success => (format!("Command was successful{timing}"), Color::Green),
};
let quiet = false;
if !quiet {
let _ = sh_eprintln!("{}", format!("[{msg}]").paint(fg.foreground()));
}
}
/// Runs the given [`watchexec::Config`].
pub async fn run(config: watchexec::Config) -> Result<()> {
let wx = Watchexec::with_config(config)?;
wx.send_event(Event::default(), Priority::Urgent).await?;
wx.main().await??;
Ok(())
}
/// Executes a [`Watchexec`] that listens for changes in the project's src dir and reruns `forge
/// build`
pub async fn watch_build(args: BuildArgs) -> Result<()> {
let config = args.watchexec_config()?;
run(config).await
}
/// Executes a [`Watchexec`] that listens for changes in the project's src dir and reruns `forge
/// snapshot`
pub async fn watch_gas_snapshot(args: GasSnapshotArgs) -> Result<()> {
let config = args.watchexec_config()?;
run(config).await
}
/// Executes a [`Watchexec`] that listens for changes in the project's src dir and reruns `forge
/// test`
pub async fn watch_test(args: TestArgs) -> Result<()> {
let config: Config = args.build.load_config()?;
let filter = args.filter(&config)?;
// Marker to check whether to override the command.
let no_reconfigure = filter.args().test_pattern.is_some()
|| filter.args().path_pattern.is_some()
|| filter.args().contract_pattern.is_some()
|| args.watch.run_all;
let last_test_files = Mutex::new(HashSet::<String>::default());
let project_root = config.root.to_string_lossy().into_owned();
let test_failures_file = config.test_failures_file.clone();
let rerun_failed = args.watch.rerun_failed;
let config = args.watch.watchexec_config_with_override(
|| Ok([&config.test, &config.src]),
move |events, command| {
// Check if we should prioritize rerunning failed tests
let has_failures = rerun_failed && test_failures_file.exists();
if has_failures {
// Smart mode: rerun failed tests first
trace!("Smart watch mode: will rerun failed tests first");
command.arg("--rerun");
// Don't add file-specific filters when rerunning failures
return;
}
let mut changed_sol_test_files: HashSet<_> = events
.iter()
.flat_map(|e| e.paths())
.filter(|(path, _)| path.is_sol_test())
.filter_map(|(path, _)| path.to_str())
.map(str::to_string)
.collect();
if changed_sol_test_files.len() > 1 {
// Run all tests if multiple files were changed at once, for example when running
// `forge fmt`.
return;
}
if changed_sol_test_files.is_empty() {
// Reuse the old test files if a non-test file was changed.
let last = last_test_files.lock();
if last.is_empty() {
return;
}
changed_sol_test_files = last.clone();
}
// append `--match-path` glob
let mut file = changed_sol_test_files.iter().next().expect("test file present").clone();
// remove the project root dir from the detected file
if let Some(f) = file.strip_prefix(&project_root) {
file = f.trim_start_matches('/').to_string();
}
trace!(?file, "reconfigure test command");
// Before appending `--match-path`, check if it already exists
if !no_reconfigure {
command.arg("--match-path").arg(file);
}
},
)?;
run(config).await
}
pub async fn watch_coverage(args: CoverageArgs) -> Result<()> {
let config = args.watch().watchexec_config(|| {
let config = args.load_config()?;
Ok([config.test, config.src])
})?;
run(config).await
}
pub async fn watch_fmt(args: FmtArgs) -> Result<()> {
let config = args.watch.watchexec_config(|| {
let config = args.load_config()?;
Ok([config.src, config.test, config.script])
})?;
run(config).await
}
/// Executes a [`Watchexec`] that listens for changes in the project's sources directory
pub async fn watch_doc(args: DocArgs) -> Result<()> {
let config = args.watch.watchexec_config(|| {
let config = args.config()?;
Ok([config.src])
})?;
run(config).await
}
/// Converts a list of arguments to a `watchexec::Command`.
///
/// The first index in `args` is the path to the executable.
///
/// # Panics
///
/// Panics if `args` is empty.
fn watch_command(mut args: Vec<String>) -> Command {
debug_assert!(!args.is_empty());
let prog = args.remove(0);
Command { program: Program::Exec { prog: prog.into(), args }, options: Default::default() }
}
/// Returns the env args without the `--watch` flag from the args for the Watchexec command
fn cmd_args(num: usize) -> Vec<String> {
clean_cmd_args(num, std::env::args().collect())
}
#[instrument(level = "debug", ret)]
fn clean_cmd_args(num: usize, mut cmd_args: Vec<String>) -> Vec<String> {
if let Some(pos) = cmd_args.iter().position(|arg| arg == "--watch" || arg == "-w") {
cmd_args.drain(pos..=(pos + num));
}
// There's another edge case where short flags are combined into one which is supported by clap,
// like `-vw` for verbosity and watch
// this removes any `w` from concatenated short flags
if let Some(pos) = cmd_args.iter().position(|arg| {
fn contains_w_in_short(arg: &str) -> Option<bool> {
let mut iter = arg.chars().peekable();
if *iter.peek()? != '-' {
return None;
}
iter.next();
if *iter.peek()? == '-' {
return None;
}
Some(iter.any(|c| c == 'w'))
}
contains_w_in_short(arg).unwrap_or(false)
}) {
let clean_arg = cmd_args[pos].replace('w', "");
if clean_arg == "-" {
cmd_args.remove(pos);
} else {
cmd_args[pos] = clean_arg;
}
}
cmd_args
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn parse_cmd_args() {
let args = vec!["-vw".to_string()];
let cleaned = clean_cmd_args(0, args);
assert_eq!(cleaned, vec!["-v".to_string()]);
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/forge/src/cmd/clone.rs | crates/forge/src/cmd/clone.rs | use super::{init::InitArgs, install::DependencyInstallOpts};
use alloy_primitives::{Address, Bytes, ChainId, TxHash};
use clap::{Parser, ValueHint};
use eyre::Result;
use foundry_block_explorers::{
Client,
contract::{ContractCreationData, ContractMetadata, Metadata},
errors::EtherscanError,
};
use foundry_cli::{
opts::EtherscanOpts,
utils::{Git, LoadConfig},
};
use foundry_common::{compile::ProjectCompiler, fs};
use foundry_compilers::{
ProjectCompileOutput, ProjectPathsConfig,
artifacts::{
ConfigurableContractArtifact, Settings, StorageLayout,
output_selection::ContractOutputSelection,
remappings::{RelativeRemapping, Remapping},
},
compilers::solc::Solc,
};
use foundry_config::{Chain, Config};
use std::{
fs::read_dir,
path::{Path, PathBuf},
time::Duration,
};
/// CloneMetadata stores the metadata that are not included by `foundry.toml` but necessary for a
/// cloned contract. The metadata can be serialized to a metadata file in the cloned project root.
#[derive(Debug, Clone, serde::Serialize)]
#[serde(rename_all = "camelCase")]
pub struct CloneMetadata {
/// The path to the source file that contains the contract declaration.
/// The path is relative to the root directory of the project.
pub path: PathBuf,
/// The name of the contract in the file.
pub target_contract: String,
/// The address of the contract on the blockchain.
pub address: Address,
/// The chain id.
pub chain_id: ChainId,
/// The transaction hash of the creation transaction.
pub creation_transaction: TxHash,
/// The address of the deployer, i.e., sender of the creation transaction.
pub deployer: Address,
/// The constructor arguments of the contract on chain.
pub constructor_arguments: Bytes,
/// The storage layout of the contract on chain.
pub storage_layout: StorageLayout,
}
/// CLI arguments for `forge clone`.
///
/// `forge clone` clones an on-chain contract from block explorers (e.g., Etherscan) in the
/// following steps:
/// 1. Fetch the contract source code from the block explorer.
/// 2. Initialize a empty foundry project at the `root` directory specified in `CloneArgs`.
/// 3. Dump the contract sources to the source directory.
/// 4. Update the `foundry.toml` configuration file with the compiler settings from Etherscan.
/// 5. Try compile the cloned contract, so that we can get the original storage layout. This
/// original storage layout is preserved in the `CloneMetadata` so that if the user later
/// modifies the contract, it is possible to quickly check the storage layout compatibility with
/// the original on-chain contract.
/// 6. Dump the `CloneMetadata` to the root directory of the cloned project as `.clone.meta` file.
#[derive(Clone, Debug, Parser)]
pub struct CloneArgs {
/// The contract address to clone.
pub address: Address,
/// The root directory of the cloned project.
#[arg(value_hint = ValueHint::DirPath, default_value = ".", value_name = "PATH")]
pub root: PathBuf,
/// Do not generate the remappings.txt file. Instead, keep the remappings in the configuration.
#[arg(long)]
pub no_remappings_txt: bool,
/// Keep the original directory structure collected from Etherscan.
///
/// If this flag is set, the directory structure of the cloned project will be kept as is.
/// By default, the directory structure is re-orgnized to increase the readability, but may
/// risk some compilation failures.
#[arg(long)]
pub keep_directory_structure: bool,
#[command(flatten)]
pub etherscan: EtherscanOpts,
#[command(flatten)]
pub install: DependencyInstallOpts,
}
impl CloneArgs {
pub async fn run(self) -> Result<()> {
let Self { address, root, install, etherscan, no_remappings_txt, keep_directory_structure } =
self;
// step 0. get the chain and api key from the config
let config = etherscan.load_config()?;
let chain = config.chain.unwrap_or_default();
let etherscan_api_key = config.get_etherscan_api_key(Some(chain)).unwrap_or_default();
let client = Client::new(chain, etherscan_api_key.clone())?;
// step 1. get the metadata from client
sh_println!("Downloading the source code of {address} from Etherscan...")?;
let meta = Self::collect_metadata_from_client(address, &client).await?;
// step 2. initialize an empty project
Self::init_an_empty_project(&root, install).await?;
// canonicalize the root path
// note that at this point, the root directory must have been created
let root = dunce::canonicalize(&root)?;
// step 3. parse the metadata
Self::parse_metadata(&meta, chain, &root, no_remappings_txt, keep_directory_structure)
.await?;
// step 4. collect the compilation metadata
// if the etherscan api key is not set, we need to wait for 3 seconds between calls
sh_println!("Collecting the creation information of {address} from Etherscan...")?;
if etherscan_api_key.is_empty() {
sh_warn!("Waiting for 5 seconds to avoid rate limit...")?;
tokio::time::sleep(Duration::from_secs(5)).await;
}
Self::collect_compilation_metadata(&meta, chain, address, &root, &client).await?;
// step 5. git add and commit the changes if needed
if install.commit {
let git = Git::new(&root);
git.add(Some("--all"))?;
let msg = format!("chore: forge clone {address}");
git.commit(&msg)?;
}
Ok(())
}
/// Collect the metadata of the contract from the block explorer.
///
/// * `address` - the address of the contract to be cloned.
/// * `client` - the client of the block explorer.
pub(crate) async fn collect_metadata_from_client<C: EtherscanClient>(
address: Address,
client: &C,
) -> Result<Metadata> {
let mut meta = client.contract_source_code(address).await?;
eyre::ensure!(meta.items.len() == 1, "contract not found or ill-formed");
let meta = meta.items.remove(0);
eyre::ensure!(!meta.is_vyper(), "Vyper contracts are not supported");
Ok(meta)
}
/// Initialize an empty project at the root directory.
///
/// * `root` - the root directory of the project.
/// * `enable_git` - whether to enable git for the project.
/// * `quiet` - whether to print messages.
pub(crate) async fn init_an_empty_project(
root: &Path,
install: DependencyInstallOpts,
) -> Result<()> {
// Initialize the project with empty set to true to avoid creating example contracts
let init_args =
InitArgs { root: root.to_path_buf(), install, empty: true, ..Default::default() };
init_args.run().await.map_err(|e| eyre::eyre!("Project init error: {:?}", e))?;
Ok(())
}
/// Collect the compilation metadata of the cloned contract.
/// This function compiles the cloned contract and collects the compilation metadata.
///
/// * `meta` - the metadata of the contract (from Etherscan).
/// * `chain` - the chain where the contract to be cloned locates.
/// * `address` - the address of the contract to be cloned.
/// * `root` - the root directory of the cloned project.
/// * `client` - the client of the block explorer.
pub(crate) async fn collect_compilation_metadata<C: EtherscanClient>(
meta: &Metadata,
chain: Chain,
address: Address,
root: &PathBuf,
client: &C,
) -> Result<()> {
// compile the cloned contract
let compile_output = compile_project(root)?;
let (main_file, main_artifact) = find_main_contract(&compile_output, &meta.contract_name)?;
let main_file = main_file.strip_prefix(root)?.to_path_buf();
let storage_layout =
main_artifact.storage_layout.to_owned().expect("storage layout not found");
// dump the metadata to the root directory
let creation_tx = client.contract_creation_data(address).await?;
let clone_meta = CloneMetadata {
path: main_file,
target_contract: meta.contract_name.clone(),
address,
chain_id: chain.id(),
creation_transaction: creation_tx.transaction_hash,
deployer: creation_tx.contract_creator,
constructor_arguments: meta.constructor_arguments.clone(),
storage_layout,
};
let metadata_content = serde_json::to_string(&clone_meta)?;
let metadata_file = root.join(".clone.meta");
fs::write(&metadata_file, metadata_content)?;
let mut perms = std::fs::metadata(&metadata_file)?.permissions();
perms.set_readonly(true);
std::fs::set_permissions(&metadata_file, perms)?;
Ok(())
}
/// Download and parse the source code from Etherscan.
///
/// * `chain` - the chain where the contract to be cloned locates.
/// * `address` - the address of the contract to be cloned.
/// * `root` - the root directory to clone the contract into as a foundry project.
/// * `client` - the client of the block explorer.
/// * `no_remappings_txt` - whether to generate the remappings.txt file.
pub(crate) async fn parse_metadata(
meta: &Metadata,
chain: Chain,
root: &PathBuf,
no_remappings_txt: bool,
keep_directory_structure: bool,
) -> Result<()> {
// dump sources and update the remapping in configuration
let remappings = dump_sources(meta, root, keep_directory_structure)?;
Config::update_at(root, |config, doc| {
let profile = config.profile.as_str().as_str();
// update the remappings in the configuration
let mut remapping_array = toml_edit::Array::new();
for r in remappings {
remapping_array.push(r.to_string());
}
doc[Config::PROFILE_SECTION][profile]["remappings"] = toml_edit::value(remapping_array);
// make sure auto_detect_remappings is false (it is very important because cloned
// project may not follow the common remappings)
doc[Config::PROFILE_SECTION][profile]["auto_detect_remappings"] =
toml_edit::value(false);
true
})?;
// update configuration
Config::update_at(root, |config, doc| {
update_config_by_metadata(config, doc, meta, chain).is_ok()
})?;
// write remappings to remappings.txt if necessary
if !no_remappings_txt {
let remappings_txt = root.join("remappings.txt");
eyre::ensure!(
!remappings_txt.exists(),
"remappings.txt already exists, please remove it first"
);
Config::update_at(root, |config, doc| {
let remappings_txt_content =
config.remappings.iter().map(|r| r.to_string()).collect::<Vec<_>>().join("\n");
if fs::write(&remappings_txt, remappings_txt_content).is_err() {
return false;
}
let profile = config.profile.as_str().as_str();
if let Some(elem) = doc[Config::PROFILE_SECTION][profile].as_table_mut() {
elem.remove_entry("remappings");
true
} else {
false
}
})?;
}
Ok(())
}
}
/// Update the configuration file with the metadata.
/// This function will update the configuration file with the metadata from the contract.
/// It will update the following fields:
/// - `auto_detect_solc` to `false`
/// - `solc_version` to the value from the metadata
/// - `evm_version` to the value from the metadata, if the metadata's evm_version is "Default", then
/// this is derived from the solc version this contract was compiled with.
/// - `via_ir` to the value from the metadata
/// - `libraries` to the value from the metadata
/// - `metadata` to the value from the metadata
/// - `cbor_metadata`, `use_literal_content`, and `bytecode_hash`
/// - `optimizer` to the value from the metadata
/// - `optimizer_runs` to the value from the metadata
/// - `optimizer_details` to the value from the metadata
/// - `yul_details`, `yul`, etc.
/// - `simpleCounterForLoopUncheckedIncrement` is ignored for now
/// - `remappings` and `stop_after` are pre-validated to be empty and None, respectively
/// - `model_checker`, `debug`, and `output_selection` are ignored for now
///
/// Detailed information can be found from the following link:
/// - <https://github.com/foundry-rs/foundry/blob/master/crates/config/README.md#all-options>
/// - <https://docs.soliditylang.org/en/latest/using-the-compiler.html#compiler-input-and-output-json-description>
fn update_config_by_metadata(
config: &Config,
doc: &mut toml_edit::DocumentMut,
meta: &Metadata,
chain: Chain,
) -> Result<()> {
let profile = config.profile.as_str().as_str();
// macro to update the config if the value exists
macro_rules! update_if_needed {
([$($key:expr),+], $value:expr) => {
{
if let Some(value) = $value {
let mut current = &mut doc[Config::PROFILE_SECTION][profile];
$(
if let Some(nested_doc) = current.get_mut(&$key) {
current = nested_doc;
} else {
return Err(eyre::eyre!("cannot find the key: {}", $key));
}
)+
*current = toml_edit::value(value);
}
}
};
}
// update the chain id
doc[Config::PROFILE_SECTION][profile]["chain_id"] = toml_edit::value(chain.id() as i64);
// disable auto detect solc and set the solc version
doc[Config::PROFILE_SECTION][profile]["auto_detect_solc"] = toml_edit::value(false);
let version = meta.compiler_version()?;
doc[Config::PROFILE_SECTION][profile]["solc_version"] =
toml_edit::value(format!("{}.{}.{}", version.major, version.minor, version.patch));
// get optimizer settings
// we ignore `model_checker`, `debug`, and `output_selection` for now,
// it seems they do not have impacts on the actual compilation
let Settings { optimizer, libraries, evm_version, via_ir, stop_after, metadata, .. } =
meta.settings()?;
eyre::ensure!(stop_after.is_none(), "stop_after should be None");
update_if_needed!(["evm_version"], evm_version.map(|v| v.to_string()));
update_if_needed!(["via_ir"], via_ir);
// update metadata if needed
if let Some(metadata) = metadata {
update_if_needed!(["cbor_metadata"], metadata.cbor_metadata);
update_if_needed!(["use_literal_content"], metadata.use_literal_content);
update_if_needed!(["bytecode_hash"], metadata.bytecode_hash.map(|v| v.to_string()));
}
// update optimizer settings if needed
update_if_needed!(["optimizer"], optimizer.enabled);
update_if_needed!(["optimizer_runs"], optimizer.runs.map(|v| v as i64));
// update optimizer details if needed
if let Some(detail) = optimizer.details {
doc[Config::PROFILE_SECTION][profile]["optimizer_details"] = toml_edit::table();
update_if_needed!(["optimizer_details", "peephole"], detail.peephole);
update_if_needed!(["optimizer_details", "inliner"], detail.inliner);
update_if_needed!(["optimizer_details", "jumpdestRemover"], detail.jumpdest_remover);
update_if_needed!(["optimizer_details", "orderLiterals"], detail.order_literals);
update_if_needed!(["optimizer_details", "deduplicate"], detail.deduplicate);
update_if_needed!(["optimizer_details", "cse"], detail.cse);
update_if_needed!(["optimizer_details", "constantOptimizer"], detail.constant_optimizer);
update_if_needed!(
["optimizer_details", "simpleCounterForLoopUncheckedIncrement"],
detail.simple_counter_for_loop_unchecked_increment
);
update_if_needed!(["optimizer_details", "yul"], detail.yul);
if let Some(yul_detail) = detail.yul_details {
doc[Config::PROFILE_SECTION][profile]["optimizer_details"]["yulDetails"] =
toml_edit::table();
update_if_needed!(
["optimizer_details", "yulDetails", "stackAllocation"],
yul_detail.stack_allocation
);
update_if_needed!(
["optimizer_details", "yulDetails", "optimizerSteps"],
yul_detail.optimizer_steps
);
}
}
// apply remapping on libraries
let path_config: ProjectPathsConfig = config.project_paths();
let libraries = libraries
.apply(|libs| path_config.apply_lib_remappings(libs))
.with_stripped_file_prefixes(&path_config.root);
// update libraries
let mut lib_array = toml_edit::Array::new();
for (path_to_lib, info) in libraries.libs {
for (lib_name, address) in info {
lib_array.push(format!("{}:{}:{}", path_to_lib.to_str().unwrap(), lib_name, address));
}
}
doc[Config::PROFILE_SECTION][profile]["libraries"] = toml_edit::value(lib_array);
Ok(())
}
/// Dump the contract sources to the root directory.
/// The sources are dumped to the `src` directory.
/// IO errors may be returned.
/// A list of remappings is returned
fn dump_sources(meta: &Metadata, root: &PathBuf, no_reorg: bool) -> Result<Vec<RelativeRemapping>> {
// get config
let path_config = ProjectPathsConfig::builder().build_with_root::<Solc>(root);
// we will canonicalize the sources directory later
let src_dir = &path_config.sources;
let lib_dir = &path_config.libraries[0];
// Optional dir, if found in src
let node_modules_dir = &root.join("node_modules");
let contract_name = &meta.contract_name;
let source_tree = meta.source_tree();
// then we move the sources to the correct directories
// we will first load existing remappings if necessary
// make sure this happens before dumping sources
let mut remappings: Vec<Remapping> = Remapping::find_many(root);
// first we dump the sources to a temporary directory
let tmp_dump_dir = root.join("raw_sources");
source_tree
.write_to(&tmp_dump_dir)
.map_err(|e| eyre::eyre!("failed to dump sources: {}", e))?;
// check whether we need to re-organize directories in the original sources, since we do not
// want to put all the sources in the `src` directory if the original directory structure is
// well organized, e.g., a standard foundry project containing `src` and `lib`
//
// * if the user wants to keep the original directory structure, we should not re-organize.
// * if there is any other directory other than `src`, `contracts`, `lib`, `hardhat`,
// `forge-std`,
// or not started with `@`, we should not re-organize.
let to_reorg = !no_reorg
&& std::fs::read_dir(tmp_dump_dir.join(contract_name))?.all(|e| {
let Ok(e) = e else { return false };
let folder_name = e.file_name();
folder_name == "src"
|| folder_name == "lib"
|| folder_name == "node_modules"
|| folder_name == "contracts"
|| folder_name == "hardhat"
|| folder_name == "forge-std"
|| folder_name.to_string_lossy().starts_with('@')
});
// ensure `src` and `lib` directories exist
eyre::ensure!(Path::exists(&root.join(src_dir)), "`src` directory must exists");
eyre::ensure!(Path::exists(&root.join(lib_dir)), "`lib` directory must exists");
// move source files
for entry in std::fs::read_dir(tmp_dump_dir.join(contract_name))? {
let entry = entry?;
let folder_name = entry.file_name();
// special handling when we need to re-organize the directories: we flatten them.
if to_reorg {
if folder_name == "contracts"
|| folder_name == "src"
|| folder_name == "lib"
|| folder_name == "node_modules"
{
// move all sub folders in contracts to src or lib
let new_dir = if folder_name == "lib" {
lib_dir
} else if folder_name == "node_modules" {
// Create node_modules dir if it exists in raw sources.
std::fs::create_dir(node_modules_dir)?;
node_modules_dir
} else {
src_dir
};
for e in read_dir(entry.path())? {
let e = e?;
let dest = new_dir.join(e.file_name());
eyre::ensure!(!Path::exists(&dest), "destination already exists: {:?}", dest);
std::fs::rename(e.path(), &dest)?;
remappings.push(Remapping {
context: None,
name: format!(
"{}/{}",
folder_name.to_string_lossy(),
e.file_name().to_string_lossy()
),
path: dest.to_string_lossy().to_string(),
});
}
} else {
assert!(
folder_name == "hardhat"
|| folder_name == "forge-std"
|| folder_name.to_string_lossy().starts_with('@')
);
// move these other folders to lib
let dest = lib_dir.join(&folder_name);
if folder_name == "forge-std" {
// let's use the provided forge-std directory
std::fs::remove_dir_all(&dest)?;
}
eyre::ensure!(!Path::exists(&dest), "destination already exists: {:?}", dest);
std::fs::rename(entry.path(), &dest)?;
remappings.push(Remapping {
context: None,
name: folder_name.to_string_lossy().to_string(),
path: dest.to_string_lossy().to_string(),
});
}
} else {
// directly move the all folders into src
let dest = src_dir.join(&folder_name);
eyre::ensure!(!Path::exists(&dest), "destination already exists: {:?}", dest);
std::fs::rename(entry.path(), &dest)?;
if folder_name != "src" {
remappings.push(Remapping {
context: None,
name: folder_name.to_string_lossy().to_string(),
path: dest.to_string_lossy().to_string(),
});
}
}
}
// remove the temporary directory
std::fs::remove_dir_all(tmp_dump_dir)?;
// add remappings in the metadata
for mut r in meta.settings()?.remappings {
if to_reorg {
// we should update its remapped path in the same way as we dump sources
// i.e., remove prefix `contracts` (if any) and add prefix `src`
let new_path = if r.path.starts_with("contracts") {
PathBuf::from("src").join(PathBuf::from(&r.path).strip_prefix("contracts")?)
} else if r.path.starts_with('@')
|| r.path.starts_with("hardhat/")
|| r.path.starts_with("forge-std/")
{
PathBuf::from("lib").join(PathBuf::from(&r.path))
} else {
PathBuf::from(&r.path)
};
r.path = new_path.to_string_lossy().to_string();
}
remappings.push(r);
}
Ok(remappings.into_iter().map(|r| r.into_relative(root)).collect())
}
/// Compile the project in the root directory, and return the compilation result.
pub fn compile_project(root: &Path) -> Result<ProjectCompileOutput> {
let mut config = Config::load_with_root(root)?.sanitized();
config.extra_output.push(ContractOutputSelection::StorageLayout);
let project = config.project()?;
let compiler = ProjectCompiler::new();
compiler.compile(&project)
}
/// Find the artifact of the contract with the specified name.
/// This function returns the path to the source file and the artifact.
pub fn find_main_contract<'a>(
compile_output: &'a ProjectCompileOutput,
contract: &str,
) -> Result<(PathBuf, &'a ConfigurableContractArtifact)> {
let mut rv = None;
for (f, c, a) in compile_output.artifacts_with_files() {
if contract == c {
// it is possible that we have multiple contracts with the same name
// in different files
// instead of throwing an error, we should handle this case in the future
if rv.is_some() {
return Err(eyre::eyre!("multiple contracts with the same name found"));
}
rv = Some((PathBuf::from(f), a));
}
}
rv.ok_or_else(|| eyre::eyre!("contract not found"))
}
/// EtherscanClient is a trait that defines the methods to interact with Etherscan.
/// It is defined as a wrapper of the `foundry_block_explorers::Client` to allow mocking.
#[cfg_attr(test, mockall::automock)]
pub(crate) trait EtherscanClient {
async fn contract_source_code(
&self,
address: Address,
) -> std::result::Result<ContractMetadata, EtherscanError>;
async fn contract_creation_data(
&self,
address: Address,
) -> std::result::Result<ContractCreationData, EtherscanError>;
}
impl EtherscanClient for Client {
async fn contract_source_code(
&self,
address: Address,
) -> std::result::Result<ContractMetadata, EtherscanError> {
self.contract_source_code(address).await
}
async fn contract_creation_data(
&self,
address: Address,
) -> std::result::Result<ContractCreationData, EtherscanError> {
self.contract_creation_data(address).await
}
}
#[cfg(test)]
mod tests {
use super::*;
use alloy_primitives::hex;
use foundry_compilers::CompilerContract;
use foundry_test_utils::rpc::next_etherscan_api_key;
use std::collections::BTreeMap;
#[expect(clippy::disallowed_macros)]
fn assert_successful_compilation(root: &PathBuf) -> ProjectCompileOutput {
println!("project_root: {root:#?}");
compile_project(root).expect("compilation failure")
}
fn assert_compilation_result(
compiled: ProjectCompileOutput,
contract_name: &str,
stripped_creation_code: &str,
) {
compiled.compiled_contracts_by_compiler_version().iter().for_each(|(_, contracts)| {
contracts.iter().for_each(|(name, contract)| {
if name == contract_name {
let compiled_creation_code =
contract.bin_ref().expect("creation code not found");
assert!(
hex::encode(compiled_creation_code.as_ref())
.starts_with(stripped_creation_code),
"inconsistent creation code"
);
}
});
});
}
fn mock_etherscan(address: Address) -> impl super::EtherscanClient {
// load mock data
let mut mocked_data = BTreeMap::new();
let data_folder =
PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("../../testdata/etherscan");
// iterate each sub folder
for entry in std::fs::read_dir(data_folder).expect("failed to read test data folder") {
let entry = entry.expect("failed to read test data entry");
let addr: Address = entry.file_name().to_string_lossy().parse().unwrap();
let contract_data_dir = entry.path();
// the metadata.json file contains the metadata of the contract
let metadata_file = contract_data_dir.join("metadata.json");
let metadata: ContractMetadata =
serde_json::from_str(&std::fs::read_to_string(metadata_file).unwrap())
.expect("failed to parse metadata.json");
// the creation_data.json file contains the creation data of the contract
let creation_data_file = contract_data_dir.join("creation_data.json");
let creation_data: ContractCreationData =
serde_json::from_str(&std::fs::read_to_string(creation_data_file).unwrap())
.expect("failed to parse creation_data.json");
// insert the data to the map
mocked_data.insert(addr, (metadata, creation_data));
}
let (metadata, creation_data) = mocked_data.get(&address).unwrap();
let metadata = metadata.clone();
let creation_data = *creation_data;
let mut mocked_client = super::MockEtherscanClient::new();
mocked_client
.expect_contract_source_code()
.times(1)
.returning(move |_| Ok(metadata.clone()));
mocked_client
.expect_contract_creation_data()
.times(1)
.returning(move |_| Ok(creation_data));
mocked_client
}
/// Fetch the metadata and creation data from Etherscan and dump them to the testdata folder.
#[tokio::test(flavor = "multi_thread")]
#[ignore = "this test is used to dump mock data from Etherscan"]
async fn test_dump_mock_data() {
let address: Address = "0x9d27527Ada2CF29fBDAB2973cfa243845a08Bd3F".parse().unwrap();
let data_folder = PathBuf::from(env!("CARGO_MANIFEST_DIR"))
.join("../../testdata/etherscan")
.join(address.to_string());
// create folder if not exists
std::fs::create_dir_all(&data_folder).unwrap();
// create metadata.json and creation_data.json
let client = Client::new(Chain::mainnet(), next_etherscan_api_key()).unwrap();
let meta = client.contract_source_code(address).await.unwrap();
// dump json
let json = serde_json::to_string_pretty(&meta).unwrap();
// write to metadata.json
std::fs::write(data_folder.join("metadata.json"), json).unwrap();
let creation_data = client.contract_creation_data(address).await.unwrap();
// dump json
let json = serde_json::to_string_pretty(&creation_data).unwrap();
// write to creation_data.json
std::fs::write(data_folder.join("creation_data.json"), json).unwrap();
}
/// Run the clone command with the specified contract address and assert the compilation.
async fn one_test_case(address: Address, check_compilation_result: bool) {
let mut project_root = tempfile::tempdir().unwrap().path().to_path_buf();
let client = mock_etherscan(address);
let meta = CloneArgs::collect_metadata_from_client(address, &client).await.unwrap();
CloneArgs::init_an_empty_project(&project_root, DependencyInstallOpts::default())
.await
.unwrap();
project_root = dunce::canonicalize(&project_root).unwrap();
CloneArgs::parse_metadata(&meta, Chain::mainnet(), &project_root, false, false)
.await
.unwrap();
CloneArgs::collect_compilation_metadata(
&meta,
Chain::mainnet(),
address,
&project_root,
&client,
)
.await
.unwrap();
let rv = assert_successful_compilation(&project_root);
if check_compilation_result {
let (contract_name, stripped_creation_code) =
pick_creation_info(&address.to_string()).expect("creation code not found");
assert_compilation_result(rv, contract_name, stripped_creation_code);
}
std::fs::remove_dir_all(project_root).unwrap();
}
#[tokio::test(flavor = "multi_thread")]
async fn test_clone_single_file_contract() {
let address = "0x35Fb958109b70799a8f9Bc2a8b1Ee4cC62034193".parse().unwrap();
one_test_case(address, true).await
}
#[tokio::test(flavor = "multi_thread")]
async fn test_clone_contract_with_optimization_details() {
let address = "0x8B3D32cf2bb4d0D16656f4c0b04Fa546274f1545".parse().unwrap();
one_test_case(address, true).await
}
#[tokio::test(flavor = "multi_thread")]
async fn test_clone_contract_with_libraries() {
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | true |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/forge/src/cmd/mod.rs | crates/forge/src/cmd/mod.rs | //! `forge` subcommands.
//!
//! All subcommands should respect the `foundry_config::Config`.
//! If a subcommand accepts values that are supported by the `Config`, then the subcommand should
//! implement `figment::Provider` which allows the subcommand to override the config's defaults, see
//! [`foundry_config::Config`].
pub mod bind;
pub mod bind_json;
pub mod build;
pub mod cache;
pub mod clone;
pub mod compiler;
pub mod config;
pub mod coverage;
pub mod create;
pub mod doc;
pub mod eip712;
pub mod flatten;
pub mod fmt;
pub mod geiger;
pub mod generate;
pub mod init;
pub mod inspect;
pub mod install;
pub mod lint;
pub mod remappings;
pub mod remove;
pub mod selectors;
pub mod snapshot;
pub mod soldeer;
pub mod test;
pub mod tree;
pub mod update;
pub mod watch;
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/forge/src/cmd/init.rs | crates/forge/src/cmd/init.rs | use super::install::DependencyInstallOpts;
use clap::{Parser, ValueHint};
use eyre::Result;
use foundry_cli::utils::Git;
use foundry_common::fs;
use foundry_compilers::artifacts::remappings::Remapping;
use foundry_config::Config;
use std::path::{Path, PathBuf};
use yansi::Paint;
/// Supported networks for `forge init --network <NETWORK>`
#[derive(Clone, Debug, clap::ValueEnum)]
pub enum Networks {
Tempo,
}
/// CLI arguments for `forge init`.
#[derive(Clone, Debug, Default, Parser)]
pub struct InitArgs {
/// The root directory of the new project.
#[arg(value_hint = ValueHint::DirPath, default_value = ".", value_name = "PATH")]
pub root: PathBuf,
/// The template to start from.
#[arg(long, short)]
pub template: Option<String>,
/// Branch argument that can only be used with template option.
/// If not specified, the default branch is used.
#[arg(long, short, requires = "template")]
pub branch: Option<String>,
/// Do not install dependencies from the network.
#[arg(long, conflicts_with = "template", visible_alias = "no-deps")]
pub offline: bool,
/// Create the project even if the specified root directory is not empty.
#[arg(long, conflicts_with = "template")]
pub force: bool,
/// Create a .vscode/settings.json file with Solidity settings, and generate a remappings.txt
/// file.
#[arg(long, conflicts_with = "template")]
pub vscode: bool,
/// Initialize a Vyper project template.
#[arg(long, conflicts_with = "template")]
pub vyper: bool,
/// Initialize a project template for the specified network in Foundry.
#[arg(long, short, conflicts_with_all = &["vyper", "template"])]
pub network: Option<Networks>,
/// Use the parent git repository instead of initializing a new one.
/// Only valid if the target is in a git repository.
#[arg(long, conflicts_with = "template")]
pub use_parent_git: bool,
/// Do not create example contracts (Counter.sol, Counter.t.sol, Counter.s.sol).
#[arg(long, conflicts_with = "template")]
pub empty: bool,
#[command(flatten)]
pub install: DependencyInstallOpts,
}
impl InitArgs {
pub async fn run(self) -> Result<()> {
let Self {
root,
template,
branch,
install,
offline,
force,
vscode,
use_parent_git,
vyper,
network,
empty,
} = self;
let DependencyInstallOpts { shallow, no_git, commit } = install;
let tempo = matches!(network, Some(Networks::Tempo));
// create the root dir if it does not exist
if !root.exists() {
fs::create_dir_all(&root)?;
}
let root = dunce::canonicalize(root)?;
let git = Git::new(&root).shallow(shallow);
// if a template is provided, then this command initializes a git repo,
// fetches the template repo, and resets the git history to the head of the fetched
// repo with no other history
if let Some(template) = template {
let template = if template.contains("://") {
template
} else if template.starts_with("github.com/") {
"https://".to_string() + &template
} else {
"https://github.com/".to_string() + &template
};
sh_println!("Initializing {} from {}...", root.display(), template)?;
// initialize the git repository
git.init()?;
// fetch the template - always fetch shallow for templates since git history will be
// collapsed. gitmodules will be initialized after the template is fetched
git.fetch(true, &template, branch)?;
// reset git history to the head of the template
// first get the commit hash that was fetched
let commit_hash = git.commit_hash(true, "FETCH_HEAD")?;
// format a commit message for the new repo
let commit_msg = format!("chore: init from {template} at {commit_hash}");
// get the hash of the FETCH_HEAD with the new commit message
let new_commit_hash = git.commit_tree("FETCH_HEAD^{tree}", Some(commit_msg))?;
// reset head of this repo to be the head of the template repo
git.reset(true, new_commit_hash)?;
// if shallow, just initialize submodules
if shallow {
git.submodule_init()?;
} else {
// if not shallow, initialize and clone submodules (without fetching latest)
git.submodule_update(false, false, true, true, std::iter::empty::<PathBuf>())?;
}
} else {
// if target is not empty
if root.read_dir().is_ok_and(|mut i| i.next().is_some()) {
if !force {
eyre::bail!(
"Cannot run `init` on a non-empty directory.\n\
Run with the `--force` flag to initialize regardless."
);
}
sh_warn!("Target directory is not empty, but `--force` was specified")?;
}
// ensure git status is clean before generating anything
if !no_git && commit && !force && git.is_in_repo()? {
git.ensure_clean()?;
}
sh_println!("Initializing {}...", root.display())?;
// make the dirs
let src = root.join("src");
fs::create_dir_all(&src)?;
let test = root.join("test");
fs::create_dir_all(&test)?;
let script = root.join("script");
fs::create_dir_all(&script)?;
// Only create example contracts if not disabled
if !empty {
if vyper {
// write the contract file
let contract_path = src.join("Counter.vy");
fs::write(
contract_path,
include_str!("../../assets/vyper/CounterTemplate.vy"),
)?;
let interface_path = src.join("ICounter.sol");
fs::write(
interface_path,
include_str!("../../assets/vyper/ICounterTemplate.sol"),
)?;
// write the tests
let contract_path = test.join("Counter.t.sol");
fs::write(
contract_path,
include_str!("../../assets/vyper/CounterTemplate.t.sol"),
)?;
// write the script
let contract_path = script.join("Counter.s.sol");
fs::write(
contract_path,
include_str!("../../assets/vyper/CounterTemplate.s.sol"),
)?;
} else if tempo {
// write the contract file
let contract_path = src.join("Mail.sol");
fs::write(contract_path, include_str!("../../assets/tempo/MailTemplate.sol"))?;
// write the tests
let contract_path = test.join("Mail.t.sol");
fs::write(
contract_path,
include_str!("../../assets/tempo/MailTemplate.t.sol"),
)?;
// write the script
let contract_path = script.join("Mail.s.sol");
fs::write(
contract_path,
include_str!("../../assets/tempo/MailTemplate.s.sol"),
)?;
} else {
// write the contract file
let contract_path = src.join("Counter.sol");
fs::write(
contract_path,
include_str!("../../assets/solidity/CounterTemplate.sol"),
)?;
// write the tests
let contract_path = test.join("Counter.t.sol");
fs::write(
contract_path,
include_str!("../../assets/solidity/CounterTemplate.t.sol"),
)?;
// write the script
let contract_path = script.join("Counter.s.sol");
fs::write(
contract_path,
include_str!("../../assets/solidity/CounterTemplate.s.sol"),
)?;
}
}
// Write the README file
let readme_path = root.join("README.md");
if tempo {
fs::write(readme_path, include_str!("../../assets/tempo/README.md"))?;
} else {
fs::write(readme_path, include_str!("../../assets/README.md"))?;
}
// write foundry.toml, if it doesn't exist already
let dest = root.join(Config::FILE_NAME);
let mut config = Config::load_with_root(&root)?;
if !dest.exists() {
fs::write(dest, config.clone().into_basic().to_string_pretty()?)?;
}
let git = self.install.git(&config);
// set up the repo
if !no_git {
init_git_repo(git, commit, use_parent_git, vyper, tempo)?;
}
// install forge-std
if !offline {
if root.join("lib/forge-std").exists() {
sh_warn!("\"lib/forge-std\" already exists, skipping install...")?;
self.install.install(&mut config, vec![]).await?;
} else {
let dep = "https://github.com/foundry-rs/forge-std".parse()?;
self.install.install(&mut config, vec![dep]).await?;
}
// install tempo-std
if tempo {
if root.join("lib/tempo-std").exists() {
sh_warn!("\"lib/tempo-std\" already exists, skipping install...")?;
self.install.install(&mut config, vec![]).await?;
} else {
let dep = "https://github.com/tempoxyz/tempo-std".parse()?;
self.install.install(&mut config, vec![dep]).await?;
}
}
}
// init vscode settings
if vscode {
init_vscode(&root)?;
}
}
sh_println!("{}", " Initialized forge project".green())?;
Ok(())
}
}
/// Initialises `root` as a git repository, if it isn't one already, unless 'use_parent_git' is
/// true.
///
/// Creates `.gitignore` and `.github/workflows/test.yml`, if they don't exist already.
///
/// Commits everything in `root` if `commit` is true.
fn init_git_repo(
git: Git<'_>,
commit: bool,
use_parent_git: bool,
vyper: bool,
tempo: bool,
) -> Result<()> {
// `git init`
if !git.is_in_repo()? || (!use_parent_git && !git.is_repo_root()?) {
git.init()?;
}
// .gitignore
let gitignore = git.root.join(".gitignore");
if !gitignore.exists() {
fs::write(gitignore, include_str!("../../assets/.gitignoreTemplate"))?;
}
// github workflow
let workflow = git.root.join(".github/workflows/test.yml");
if !workflow.exists() {
fs::create_dir_all(workflow.parent().unwrap())?;
if vyper {
fs::write(workflow, include_str!("../../assets/vyper/workflowTemplate.yml"))?;
} else if tempo {
fs::write(workflow, include_str!("../../assets/tempo/workflowTemplate.yml"))?;
} else {
fs::write(workflow, include_str!("../../assets/solidity/workflowTemplate.yml"))?;
}
}
// commit everything
if commit {
git.add(Some("--all"))?;
git.commit("chore: forge init")?;
}
Ok(())
}
/// initializes the `.vscode/settings.json` file
fn init_vscode(root: &Path) -> Result<()> {
let remappings_file = root.join("remappings.txt");
if !remappings_file.exists() {
let mut remappings = Remapping::find_many(&root.join("lib"))
.into_iter()
.map(|r| r.into_relative(root).to_relative_remapping().to_string())
.collect::<Vec<_>>();
if !remappings.is_empty() {
remappings.sort();
let content = remappings.join("\n");
fs::write(remappings_file, content)?;
}
}
let vscode_dir = root.join(".vscode");
let settings_file = vscode_dir.join("settings.json");
let mut settings = if !vscode_dir.is_dir() {
fs::create_dir_all(&vscode_dir)?;
serde_json::json!({})
} else if settings_file.exists() {
foundry_compilers::utils::read_json_file(&settings_file)?
} else {
serde_json::json!({})
};
let obj = settings.as_object_mut().expect("Expected settings object");
// insert [vscode-solidity settings](https://github.com/juanfranblanco/vscode-solidity)
let src_key = "solidity.packageDefaultDependenciesContractsDirectory";
if !obj.contains_key(src_key) {
obj.insert(src_key.to_string(), serde_json::Value::String("src".to_string()));
}
let lib_key = "solidity.packageDefaultDependenciesDirectory";
if !obj.contains_key(lib_key) {
obj.insert(lib_key.to_string(), serde_json::Value::String("lib".to_string()));
}
let content = serde_json::to_string_pretty(&settings)?;
fs::write(settings_file, content)?;
Ok(())
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/forge/src/cmd/geiger.rs | crates/forge/src/cmd/geiger.rs | use clap::{Parser, ValueHint};
use eyre::Result;
use foundry_cli::opts::BuildOpts;
use foundry_config::{DenyLevel, impl_figment_convert};
use std::path::PathBuf;
/// CLI arguments for `forge geiger`.
///
/// This command is an alias for `forge lint --only-lint unsafe-cheatcode`
/// and detects usage of unsafe cheat codes in a project and its dependencies.
#[derive(Clone, Debug, Parser)]
pub struct GeigerArgs {
/// Paths to files or directories to detect.
#[arg(
value_hint = ValueHint::FilePath,
value_name = "PATH",
num_args(0..)
)]
paths: Vec<PathBuf>,
#[arg(long, hide = true)]
check: bool,
#[arg(long, hide = true)]
full: bool,
#[command(flatten)]
build: BuildOpts,
}
impl_figment_convert!(GeigerArgs, build);
impl GeigerArgs {
pub fn run(self) -> Result<()> {
// Deprecated flags warnings
if self.check {
sh_warn!("`--check` is deprecated as it's now the default behavior\n")?;
}
if self.full {
sh_warn!("`--full` is deprecated as reports are not generated anymore\n")?;
}
sh_warn!(
"`forge geiger` is deprecated, as it is just an alias for `forge lint --only-lint unsafe-cheatcode`\n"
)?;
// Convert geiger command to lint command with specific lint filter
let mut lint_args = crate::cmd::lint::LintArgs {
paths: self.paths,
severity: None,
lint: Some(vec!["unsafe-cheatcode".to_string()]),
build: self.build,
};
lint_args.build.deny = Some(DenyLevel::Notes);
// Run the lint command with the geiger-specific configuration
lint_args.run()
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/forge/src/cmd/bind_json.rs | crates/forge/src/cmd/bind_json.rs | use super::eip712::Resolver;
use clap::{Parser, ValueHint};
use eyre::Result;
use foundry_cli::{
opts::{BuildOpts, configure_pcx_from_solc},
utils::LoadConfig,
};
use foundry_common::{TYPE_BINDING_PREFIX, fs};
use foundry_compilers::{
CompilerInput, Graph, Project,
artifacts::{Source, Sources},
multi::{MultiCompilerLanguage, MultiCompilerParser},
solc::{SolcLanguage, SolcVersionedInput},
};
use foundry_config::Config;
use itertools::Itertools;
use path_slash::PathExt;
use rayon::prelude::*;
use semver::Version;
use solar::parse::{
Parser as SolarParser,
ast::{self, Arena, FunctionKind, Span, VarMut, interface::source_map::FileName, visit::Visit},
interface::Session,
};
use std::{
collections::{BTreeMap, BTreeSet, HashSet},
fmt::Write,
ops::ControlFlow,
path::{Path, PathBuf},
sync::Arc,
};
foundry_config::impl_figment_convert!(BindJsonArgs, build);
const JSON_BINDINGS_PLACEHOLDER: &str = "library JsonBindings {}";
/// CLI arguments for `forge bind-json`.
#[derive(Clone, Debug, Parser)]
pub struct BindJsonArgs {
/// The path to write bindings to.
#[arg(value_hint = ValueHint::FilePath, value_name = "PATH")]
pub out: Option<PathBuf>,
#[command(flatten)]
build: BuildOpts,
}
impl BindJsonArgs {
pub fn run(self) -> Result<()> {
let config = self.load_config()?;
let project = config.ephemeral_project()?;
let target_path = config.root.join(self.out.as_ref().unwrap_or(&config.bind_json.out));
// Step 1: Read and preprocess sources
let sources = project.paths.read_input_files()?;
let graph = Graph::<MultiCompilerParser>::resolve_sources(&project.paths, sources)?;
// We only generate bindings for a single Solidity version to avoid conflicts.
let (version, mut sources, _) = graph
// resolve graph into mapping language -> version -> sources
.into_sources_by_version(&project)?
.sources
.into_iter()
// we are only interested in Solidity sources
.find(|(lang, _)| *lang == MultiCompilerLanguage::Solc(SolcLanguage::Solidity))
.ok_or_else(|| eyre::eyre!("no Solidity sources"))?
.1
.into_iter()
// For now, we are always picking the latest version.
.max_by(|(v1, _, _), (v2, _, _)| v1.cmp(v2))
.unwrap();
// Step 2: Preprocess sources to handle potentially invalid bindings
self.preprocess_sources(&mut sources)?;
// Insert empty bindings file.
sources.insert(target_path.clone(), Source::new(JSON_BINDINGS_PLACEHOLDER));
// Step 3: Find structs and generate bindings
let structs_to_write =
self.find_and_resolve_structs(&config, &project, version, sources, &target_path)?;
// Step 4: Write bindings
self.write_bindings(&structs_to_write, &target_path)?;
Ok(())
}
/// In cases when user moves/renames/deletes structs, compiler will start failing because
/// generated bindings will be referencing non-existing structs or importing non-existing
/// files.
///
/// Because of that, we need a little bit of preprocessing to make sure that bindings will still
/// be valid.
///
/// The strategy is:
/// 1. Replace bindings file with an empty one to get rid of potentially invalid imports.
/// 2. Remove all function bodies to get rid of `serialize`/`deserialize` invocations.
/// 3. Remove all `immutable` attributes to avoid errors because of erased constructors
/// initializing them.
///
/// After that we'll still have enough information for bindings but compilation should succeed
/// in most of the cases.
fn preprocess_sources(&self, sources: &mut Sources) -> Result<()> {
let sess = Session::builder().with_stderr_emitter().build();
let result = sess.enter(|| -> solar::interface::Result<()> {
sources.0.par_iter_mut().try_for_each(|(path, source)| {
let mut content = Arc::try_unwrap(std::mem::take(&mut source.content)).unwrap();
let arena = Arena::new();
let mut parser = SolarParser::from_source_code(
&sess,
&arena,
FileName::Real(path.clone()),
content.to_string(),
)?;
let ast = parser.parse_file().map_err(|e| e.emit())?;
let mut visitor = PreprocessorVisitor::new();
let _ = visitor.visit_source_unit(&ast);
visitor.update(&sess, &mut content);
source.content = Arc::new(content);
Ok(())
})
});
eyre::ensure!(result.is_ok(), "failed parsing");
Ok(())
}
/// Find structs, resolve conflicts, and prepare them for writing
fn find_and_resolve_structs(
&self,
config: &Config,
project: &Project,
version: Version,
sources: Sources,
_target_path: &Path,
) -> Result<Vec<StructToWrite>> {
let settings = config.solc_settings()?;
let include = &config.bind_json.include;
let exclude = &config.bind_json.exclude;
let root = &config.root;
let input = SolcVersionedInput::build(sources, settings, SolcLanguage::Solidity, version);
let mut sess = Session::builder().with_stderr_emitter().build();
sess.dcx.set_flags_mut(|flags| flags.track_diagnostics = false);
let mut compiler = solar::sema::Compiler::new(sess);
let mut structs_to_write = Vec::new();
compiler.enter_mut(|compiler| -> Result<()> {
// Set up the parsing context with the project paths, without adding the source files
let mut pcx = compiler.parse();
configure_pcx_from_solc(&mut pcx, &project.paths, &input, false);
let mut target_files = HashSet::new();
for (path, source) in &input.input.sources {
if !include.is_empty() {
if !include.iter().any(|matcher| matcher.is_match(path)) {
continue;
}
} else {
// Exclude library files by default
if project.paths.has_library_ancestor(path) {
continue;
}
}
if exclude.iter().any(|matcher| matcher.is_match(path)) {
continue;
}
if let Ok(src_file) = compiler
.sess()
.source_map()
.new_source_file(path.clone(), source.content.as_str())
{
target_files.insert(Arc::clone(&src_file));
pcx.add_file(src_file);
}
}
// Parse and resolve
pcx.parse();
let Ok(ControlFlow::Continue(())) = compiler.lower_asts() else { return Ok(()) };
let gcx = compiler.gcx();
let hir = &gcx.hir;
let resolver = Resolver::new(gcx);
for id in resolver.struct_ids() {
if let Some(schema) = resolver.resolve_struct_eip712(id) {
let def = hir.strukt(id);
let source = hir.source(def.source);
if !target_files.contains(&source.file) {
continue;
}
if let FileName::Real(path) = &source.file.name {
structs_to_write.push(StructToWrite {
name: def.name.as_str().into(),
contract_name: def
.contract
.map(|id| hir.contract(id).name.as_str().into()),
path: path.strip_prefix(root).unwrap_or(path).to_path_buf(),
schema,
// will be filled later
import_alias: None,
name_in_fns: String::new(),
});
}
}
}
Ok(())
})?;
eyre::ensure!(compiler.sess().dcx.has_errors().is_ok(), "errors occurred");
// Resolve import aliases and function names
self.resolve_conflicts(&mut structs_to_write);
Ok(structs_to_write)
}
/// We manage 2 namespaces for JSON bindings:
/// - Namespace of imported items. This includes imports of contracts containing structs and
/// structs defined at the file level.
/// - Namespace of struct names used in function names and schema_* variables.
///
/// Both of those might contain conflicts, so we need to resolve them.
fn resolve_conflicts(&self, structs_to_write: &mut [StructToWrite]) {
// firstly, we resolve imported names conflicts
// construct mapping name -> paths from which items with such name are imported
let mut names_to_paths = BTreeMap::new();
for s in structs_to_write.iter() {
names_to_paths
.entry(s.struct_or_contract_name())
.or_insert_with(BTreeSet::new)
.insert(s.path.as_path());
}
// now resolve aliases for names which need them and construct mapping (name, file) -> alias
let mut aliases = BTreeMap::new();
for (name, paths) in names_to_paths {
if paths.len() <= 1 {
continue; // no alias needed
}
for (i, path) in paths.into_iter().enumerate() {
aliases
.entry(name.to_string())
.or_insert_with(BTreeMap::new)
.insert(path.to_path_buf(), format!("{name}_{i}"));
}
}
for s in structs_to_write.iter_mut() {
let name = s.struct_or_contract_name();
if aliases.contains_key(name) {
s.import_alias = Some(aliases[name][&s.path].clone());
}
}
// Each struct needs a name by which we are referencing it in function names (e.g.
// deserializeFoo) Those might also have conflicts, so we manage a separate
// namespace for them
let mut name_to_structs_indexes = BTreeMap::new();
for (idx, s) in structs_to_write.iter().enumerate() {
name_to_structs_indexes.entry(&s.name).or_insert_with(Vec::new).push(idx);
}
// Keeps `Some` for structs that will be referenced by name other than their definition
// name.
let mut fn_names = vec![None; structs_to_write.len()];
for (name, indexes) in name_to_structs_indexes {
if indexes.len() > 1 {
for (i, idx) in indexes.into_iter().enumerate() {
fn_names[idx] = Some(format!("{name}_{i}"));
}
}
}
for (s, fn_name) in structs_to_write.iter_mut().zip(fn_names.into_iter()) {
s.name_in_fns = fn_name.unwrap_or(s.name.clone());
}
}
/// Write the final bindings file
fn write_bindings(
&self,
structs_to_write: &[StructToWrite],
target_path: &PathBuf,
) -> Result<()> {
let mut result = String::new();
// Write imports
let mut grouped_imports = BTreeMap::new();
for struct_to_write in structs_to_write {
let item = struct_to_write.import_item();
grouped_imports
.entry(struct_to_write.path.as_path())
.or_insert_with(BTreeSet::new)
.insert(item);
}
result.push_str("// Automatically generated by forge bind-json.\n\npragma solidity >=0.6.2 <0.9.0;\npragma experimental ABIEncoderV2;\n\n");
for (path, names) in grouped_imports {
writeln!(
&mut result,
"import {{{}}} from \"{}\";",
names.iter().join(", "),
path.to_slash_lossy()
)?;
}
// Write VM interface
// Writes minimal VM interface to not depend on forge-std version
result.push_str(r#"
interface Vm {
function parseJsonTypeArray(string calldata json, string calldata key, string calldata typeDescription) external pure returns (bytes memory);
function parseJsonType(string calldata json, string calldata typeDescription) external pure returns (bytes memory);
function parseJsonType(string calldata json, string calldata key, string calldata typeDescription) external pure returns (bytes memory);
function serializeJsonType(string calldata typeDescription, bytes memory value) external pure returns (string memory json);
function serializeJsonType(string calldata objectKey, string calldata valueKey, string calldata typeDescription, bytes memory value) external returns (string memory json);
}
"#);
// Write library
result.push_str(
r#"
library JsonBindings {
Vm constant vm = Vm(address(uint160(uint256(keccak256("hevm cheat code")))));
"#,
);
// write schema constants
for struct_to_write in structs_to_write {
writeln!(
&mut result,
" {}{} = \"{}\";",
TYPE_BINDING_PREFIX, struct_to_write.name_in_fns, struct_to_write.schema
)?;
}
// write serialization functions
for struct_to_write in structs_to_write {
write!(
&mut result,
r#"
function serialize({path} memory value) internal pure returns (string memory) {{
return vm.serializeJsonType(schema_{name_in_fns}, abi.encode(value));
}}
function serialize({path} memory value, string memory objectKey, string memory valueKey) internal returns (string memory) {{
return vm.serializeJsonType(objectKey, valueKey, schema_{name_in_fns}, abi.encode(value));
}}
function deserialize{name_in_fns}(string memory json) public pure returns ({path} memory) {{
return abi.decode(vm.parseJsonType(json, schema_{name_in_fns}), ({path}));
}}
function deserialize{name_in_fns}(string memory json, string memory path) public pure returns ({path} memory) {{
return abi.decode(vm.parseJsonType(json, path, schema_{name_in_fns}), ({path}));
}}
function deserialize{name_in_fns}Array(string memory json, string memory path) public pure returns ({path}[] memory) {{
return abi.decode(vm.parseJsonTypeArray(json, path, schema_{name_in_fns}), ({path}[]));
}}
"#,
name_in_fns = struct_to_write.name_in_fns,
path = struct_to_write.full_path()
)?;
}
result.push_str("}\n");
// Write to file
if let Some(parent) = target_path.parent() {
fs::create_dir_all(parent)?;
}
fs::write(target_path, &result)?;
sh_println!("Bindings written to {}", target_path.display())?;
Ok(())
}
}
struct PreprocessorVisitor {
updates: Vec<(Span, &'static str)>,
}
impl PreprocessorVisitor {
fn new() -> Self {
Self { updates: Vec::new() }
}
fn update(mut self, sess: &Session, content: &mut String) {
if self.updates.is_empty() {
return;
}
let sf = sess.source_map().lookup_source_file(self.updates[0].0.lo());
let base = sf.start_pos.0;
self.updates.sort_by_key(|(span, _)| span.lo());
let mut shift = 0_i64;
for (span, new) in self.updates {
let lo = span.lo() - base;
let hi = span.hi() - base;
let start = ((lo.0 as i64) - shift) as usize;
let end = ((hi.0 as i64) - shift) as usize;
content.replace_range(start..end, new);
shift += (end - start) as i64;
shift -= new.len() as i64;
}
}
}
impl<'ast> Visit<'ast> for PreprocessorVisitor {
type BreakValue = solar::interface::data_structures::Never;
fn visit_item_function(
&mut self,
func: &'ast ast::ItemFunction<'ast>,
) -> ControlFlow<Self::BreakValue> {
// Replace function bodies with a noop statement.
if let Some(block) = &func.body
&& !block.is_empty()
{
let span = block.first().unwrap().span.to(block.last().unwrap().span);
let new_body = match func.kind {
FunctionKind::Modifier => "_;",
_ => "revert();",
};
self.updates.push((span, new_body));
}
self.walk_item_function(func)
}
fn visit_variable_definition(
&mut self,
var: &'ast ast::VariableDefinition<'ast>,
) -> ControlFlow<Self::BreakValue> {
// Remove `immutable` attributes.
if let Some(VarMut::Immutable) = var.mutability {
self.updates.push((var.span, ""));
}
self.walk_variable_definition(var)
}
}
/// A single struct definition for which we need to generate bindings.
#[derive(Debug, Clone)]
struct StructToWrite {
/// Name of the struct definition.
name: String,
/// Name of the contract containing the struct definition. None if the struct is defined at the
/// file level.
contract_name: Option<String>,
/// Import alias for the contract or struct, depending on whether the struct is imported
/// directly, or via a contract.
import_alias: Option<String>,
/// Path to the file containing the struct definition.
path: PathBuf,
/// EIP712 schema for the struct.
schema: String,
/// Name of the struct definition used in function names and schema_* variables.
name_in_fns: String,
}
impl StructToWrite {
/// Returns the name of the imported item. If struct is defined at the file level, returns the
/// struct name, otherwise returns the parent contract name.
fn struct_or_contract_name(&self) -> &str {
self.contract_name.as_deref().unwrap_or(&self.name)
}
/// Same as [StructToWrite::struct_or_contract_name] but with alias applied.
fn struct_or_contract_name_with_alias(&self) -> &str {
self.import_alias.as_deref().unwrap_or(self.struct_or_contract_name())
}
/// Path which can be used to reference this struct in input/output parameters. Either
/// StructName or ParentName.StructName
fn full_path(&self) -> String {
if self.contract_name.is_some() {
format!("{}.{}", self.struct_or_contract_name_with_alias(), self.name)
} else {
self.struct_or_contract_name_with_alias().to_string()
}
}
fn import_item(&self) -> String {
if let Some(alias) = &self.import_alias {
format!("{} as {}", self.struct_or_contract_name(), alias)
} else {
self.struct_or_contract_name().to_string()
}
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/forge/src/cmd/snapshot.rs | crates/forge/src/cmd/snapshot.rs | use super::test;
use crate::result::{SuiteTestResult, TestKindReport, TestOutcome};
use alloy_primitives::{U256, map::HashMap};
use clap::{Parser, ValueHint, builder::RangedU64ValueParser};
use comfy_table::{
Cell, Color, Row, Table, modifiers::UTF8_ROUND_CORNERS, presets::ASCII_MARKDOWN,
};
use eyre::{Context, Result};
use foundry_cli::utils::STATIC_FUZZ_SEED;
use foundry_common::shell;
use regex::Regex;
use std::{
cmp::Ordering,
fs,
io::{self, BufRead},
path::{Path, PathBuf},
str::FromStr,
sync::LazyLock,
};
use yansi::Paint;
/// A regex that matches a basic snapshot entry like
/// `Test:testDeposit() (gas: 58804)`
pub static RE_BASIC_SNAPSHOT_ENTRY: LazyLock<Regex> = LazyLock::new(|| {
Regex::new(r"(?P<file>(.*?)):(?P<sig>(\w+)\s*\((.*?)\))\s*\(((gas:)?\s*(?P<gas>\d+)|(runs:\s*(?P<runs>\d+),\s*μ:\s*(?P<avg>\d+),\s*~:\s*(?P<med>\d+))|(runs:\s*(?P<invruns>\d+),\s*calls:\s*(?P<calls>\d+),\s*reverts:\s*(?P<reverts>\d+)))\)").unwrap()
});
/// CLI arguments for `forge snapshot`.
#[derive(Clone, Debug, Parser)]
pub struct GasSnapshotArgs {
/// Output a diff against a pre-existing gas snapshot.
///
/// By default, the comparison is done with .gas-snapshot.
#[arg(
conflicts_with = "snap",
long,
value_hint = ValueHint::FilePath,
value_name = "SNAPSHOT_FILE",
)]
diff: Option<Option<PathBuf>>,
/// Compare against a pre-existing gas snapshot, exiting with code 1 if they do not match.
///
/// Outputs a diff if the gas snapshots do not match.
///
/// By default, the comparison is done with .gas-snapshot.
#[arg(
conflicts_with = "diff",
long,
value_hint = ValueHint::FilePath,
value_name = "SNAPSHOT_FILE",
)]
check: Option<Option<PathBuf>>,
// Hidden because there is only one option
/// How to format the output.
#[arg(long, hide(true))]
format: Option<Format>,
/// Output file for the gas snapshot.
#[arg(
long,
default_value = ".gas-snapshot",
value_hint = ValueHint::FilePath,
value_name = "FILE",
)]
snap: PathBuf,
/// Tolerates gas deviations up to the specified percentage.
#[arg(
long,
value_parser = RangedU64ValueParser::<u32>::new().range(0..100),
value_name = "SNAPSHOT_THRESHOLD"
)]
tolerance: Option<u32>,
/// How to sort diff results.
#[arg(long, value_name = "ORDER")]
diff_sort: Option<DiffSortOrder>,
/// All test arguments are supported
#[command(flatten)]
pub(crate) test: test::TestArgs,
/// Additional configs for test results
#[command(flatten)]
config: GasSnapshotConfig,
}
impl GasSnapshotArgs {
/// Returns whether `GasSnapshotArgs` was configured with `--watch`
pub fn is_watch(&self) -> bool {
self.test.is_watch()
}
/// Returns the [`watchexec::Config`] necessary to bootstrap a new watch loop.
pub(crate) fn watchexec_config(&self) -> Result<watchexec::Config> {
self.test.watchexec_config()
}
pub async fn run(mut self) -> Result<()> {
// Set fuzz seed so gas snapshots are deterministic
self.test.fuzz_seed = Some(U256::from_be_bytes(STATIC_FUZZ_SEED));
let outcome = self.test.compile_and_run().await?;
outcome.ensure_ok(false)?;
let tests = self.config.apply(outcome);
if let Some(path) = self.diff {
let snap = path.as_ref().unwrap_or(&self.snap);
let snaps = read_gas_snapshot(snap)?;
diff(tests, snaps, self.diff_sort.unwrap_or_default())?;
} else if let Some(path) = self.check {
let snap = path.as_ref().unwrap_or(&self.snap);
let snaps = read_gas_snapshot(snap)?;
if check(tests, snaps, self.tolerance) {
std::process::exit(0)
} else {
std::process::exit(1)
}
} else {
if matches!(self.format, Some(Format::Table)) {
let table = build_gas_snapshot_table(&tests);
sh_println!("\n{}", table)?;
}
write_to_gas_snapshot_file(&tests, self.snap, self.format)?;
}
Ok(())
}
}
// Gas report format on stdout.
#[derive(Clone, Debug)]
pub enum Format {
Table,
}
impl FromStr for Format {
type Err = String;
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s {
"t" | "table" => Ok(Self::Table),
_ => Err(format!("Unrecognized format `{s}`")),
}
}
}
/// Additional filters that can be applied on the test results
#[derive(Clone, Debug, Default, Parser)]
struct GasSnapshotConfig {
/// Sort results by gas used (ascending).
#[arg(long)]
asc: bool,
/// Sort results by gas used (descending).
#[arg(conflicts_with = "asc", long)]
desc: bool,
/// Only include tests that used more gas that the given amount.
#[arg(long, value_name = "MIN_GAS")]
min: Option<u64>,
/// Only include tests that used less gas that the given amount.
#[arg(long, value_name = "MAX_GAS")]
max: Option<u64>,
}
/// Sort order for diff output
#[derive(Clone, Debug, Default, clap::ValueEnum)]
enum DiffSortOrder {
/// Sort by percentage change (smallest to largest) - default behavior
#[default]
Percentage,
/// Sort by percentage change (largest to smallest)
PercentageDesc,
/// Sort by absolute gas change (smallest to largest)
Absolute,
/// Sort by absolute gas change (largest to smallest)
AbsoluteDesc,
}
impl GasSnapshotConfig {
fn is_in_gas_range(&self, gas_used: u64) -> bool {
if let Some(min) = self.min
&& gas_used < min
{
return false;
}
if let Some(max) = self.max
&& gas_used > max
{
return false;
}
true
}
fn apply(&self, outcome: TestOutcome) -> Vec<SuiteTestResult> {
let mut tests = outcome
.into_tests()
.filter(|test| self.is_in_gas_range(test.gas_used()))
.collect::<Vec<_>>();
if self.asc {
tests.sort_by_key(|a| a.gas_used());
} else if self.desc {
tests.sort_by_key(|b| std::cmp::Reverse(b.gas_used()))
}
tests
}
}
/// A general entry in a gas snapshot file
///
/// Has the form:
/// `<signature>(gas:? 40181)` for normal tests
/// `<signature>(runs: 256, μ: 40181, ~: 40181)` for fuzz tests
/// `<signature>(runs: 256, calls: 40181, reverts: 40181)` for invariant tests
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct GasSnapshotEntry {
pub contract_name: String,
pub signature: String,
pub gas_used: TestKindReport,
}
impl FromStr for GasSnapshotEntry {
type Err = String;
fn from_str(s: &str) -> Result<Self, Self::Err> {
RE_BASIC_SNAPSHOT_ENTRY
.captures(s)
.and_then(|cap| {
cap.name("file").and_then(|file| {
cap.name("sig").and_then(|sig| {
if let Some(gas) = cap.name("gas") {
Some(Self {
contract_name: file.as_str().to_string(),
signature: sig.as_str().to_string(),
gas_used: TestKindReport::Unit {
gas: gas.as_str().parse().unwrap(),
},
})
} else if let Some(runs) = cap.name("runs") {
cap.name("avg")
.and_then(|avg| cap.name("med").map(|med| (runs, avg, med)))
.map(|(runs, avg, med)| Self {
contract_name: file.as_str().to_string(),
signature: sig.as_str().to_string(),
gas_used: TestKindReport::Fuzz {
runs: runs.as_str().parse().unwrap(),
median_gas: med.as_str().parse().unwrap(),
mean_gas: avg.as_str().parse().unwrap(),
failed_corpus_replays: 0,
},
})
} else {
cap.name("invruns")
.and_then(|runs| {
cap.name("calls").and_then(|avg| {
cap.name("reverts").map(|med| (runs, avg, med))
})
})
.map(|(runs, calls, reverts)| Self {
contract_name: file.as_str().to_string(),
signature: sig.as_str().to_string(),
gas_used: TestKindReport::Invariant {
runs: runs.as_str().parse().unwrap(),
calls: calls.as_str().parse().unwrap(),
reverts: reverts.as_str().parse().unwrap(),
metrics: HashMap::default(),
failed_corpus_replays: 0,
},
})
}
})
})
})
.ok_or_else(|| format!("Could not extract Snapshot Entry for {s}"))
}
}
/// Reads a list of gas snapshot entries from a gas snapshot file.
fn read_gas_snapshot(path: impl AsRef<Path>) -> Result<Vec<GasSnapshotEntry>> {
let path = path.as_ref();
let mut entries = Vec::new();
for line in io::BufReader::new(
fs::File::open(path)
.wrap_err(format!("failed to read snapshot file \"{}\"", path.display()))?,
)
.lines()
{
entries
.push(GasSnapshotEntry::from_str(line?.as_str()).map_err(|err| eyre::eyre!("{err}"))?);
}
Ok(entries)
}
/// Writes a series of tests to a gas snapshot file after sorting them.
fn write_to_gas_snapshot_file(
tests: &[SuiteTestResult],
path: impl AsRef<Path>,
_format: Option<Format>,
) -> Result<()> {
let mut reports = tests
.iter()
.map(|test| {
format!("{}:{} {}", test.contract_name(), test.signature, test.result.kind.report())
})
.collect::<Vec<_>>();
// sort all reports
reports.sort();
let content = reports.join("\n");
Ok(fs::write(path, content)?)
}
fn build_gas_snapshot_table(tests: &[SuiteTestResult]) -> Table {
let mut table = Table::new();
if shell::is_markdown() {
table.load_preset(ASCII_MARKDOWN);
} else {
table.apply_modifier(UTF8_ROUND_CORNERS);
}
table.set_header(vec![
Cell::new("Contract").fg(Color::Cyan),
Cell::new("Signature").fg(Color::Cyan),
Cell::new("Report").fg(Color::Cyan),
]);
for test in tests {
let mut row = Row::new();
row.add_cell(Cell::new(test.contract_name()));
row.add_cell(Cell::new(&test.signature));
row.add_cell(Cell::new(test.result.kind.report()));
table.add_row(row);
}
table
}
/// A Gas snapshot entry diff.
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct GasSnapshotDiff {
pub signature: String,
pub source_gas_used: TestKindReport,
pub target_gas_used: TestKindReport,
}
impl GasSnapshotDiff {
/// Returns the gas diff
///
/// `> 0` if the source used more gas
/// `< 0` if the target used more gas
fn gas_change(&self) -> i128 {
self.source_gas_used.gas() as i128 - self.target_gas_used.gas() as i128
}
/// Determines the percentage change
fn gas_diff(&self) -> f64 {
self.gas_change() as f64 / self.target_gas_used.gas() as f64
}
}
/// Compares the set of tests with an existing gas snapshot.
///
/// Returns true all tests match
fn check(
tests: Vec<SuiteTestResult>,
snaps: Vec<GasSnapshotEntry>,
tolerance: Option<u32>,
) -> bool {
let snaps = snaps
.into_iter()
.map(|s| ((s.contract_name, s.signature), s.gas_used))
.collect::<HashMap<_, _>>();
let mut has_diff = false;
for test in tests {
if let Some(target_gas) =
snaps.get(&(test.contract_name().to_string(), test.signature.clone())).cloned()
{
let source_gas = test.result.kind.report();
if !within_tolerance(source_gas.gas(), target_gas.gas(), tolerance) {
let _ = sh_println!(
"Diff in \"{}::{}\": consumed \"{}\" gas, expected \"{}\" gas ",
test.contract_name(),
test.signature,
source_gas,
target_gas
);
has_diff = true;
}
} else {
let _ = sh_println!(
"No matching snapshot entry found for \"{}::{}\" in snapshot file",
test.contract_name(),
test.signature
);
has_diff = true;
}
}
!has_diff
}
/// Compare the set of tests with an existing gas snapshot.
fn diff(
tests: Vec<SuiteTestResult>,
snaps: Vec<GasSnapshotEntry>,
sort_order: DiffSortOrder,
) -> Result<()> {
let snaps = snaps
.into_iter()
.map(|s| ((s.contract_name, s.signature), s.gas_used))
.collect::<HashMap<_, _>>();
let mut diffs = Vec::with_capacity(tests.len());
let mut new_tests = Vec::new();
for test in tests.into_iter() {
if let Some(target_gas_used) =
snaps.get(&(test.contract_name().to_string(), test.signature.clone())).cloned()
{
diffs.push(GasSnapshotDiff {
source_gas_used: test.result.kind.report(),
signature: format!("{}::{}", test.contract_name(), test.signature),
target_gas_used,
});
} else {
// Track new tests
new_tests.push(format!("{}::{}", test.contract_name(), test.signature));
}
}
let mut increased = 0;
let mut decreased = 0;
let mut unchanged = 0;
let mut overall_gas_change = 0i128;
let mut overall_gas_used = 0i128;
// Sort based on user preference
match sort_order {
DiffSortOrder::Percentage => {
// Default: sort by percentage change (smallest to largest)
diffs.sort_by(|a, b| a.gas_diff().abs().total_cmp(&b.gas_diff().abs()));
}
DiffSortOrder::PercentageDesc => {
// Sort by percentage change (largest to smallest)
diffs.sort_by(|a, b| b.gas_diff().abs().total_cmp(&a.gas_diff().abs()));
}
DiffSortOrder::Absolute => {
// Sort by absolute gas change (smallest to largest)
diffs.sort_by_key(|d| d.gas_change().abs());
}
DiffSortOrder::AbsoluteDesc => {
// Sort by absolute gas change (largest to smallest)
diffs.sort_by_key(|d| std::cmp::Reverse(d.gas_change().abs()));
}
}
for diff in &diffs {
let gas_change = diff.gas_change();
overall_gas_change += gas_change;
overall_gas_used += diff.target_gas_used.gas() as i128;
let gas_diff = diff.gas_diff();
// Classify changes
if gas_change > 0 {
increased += 1;
} else if gas_change < 0 {
decreased += 1;
} else {
unchanged += 1;
}
// Display with icon and before/after values
let icon = if gas_change > 0 {
"↑".red().to_string()
} else if gas_change < 0 {
"↓".green().to_string()
} else {
"━".to_string()
};
sh_println!(
"{} {} (gas: {} → {} | {} {})",
icon,
diff.signature,
diff.target_gas_used.gas(),
diff.source_gas_used.gas(),
fmt_change(gas_change),
fmt_pct_change(gas_diff)
)?;
}
// Display new tests if any
if !new_tests.is_empty() {
sh_println!("\n{}", "New tests:".yellow())?;
for test in new_tests {
sh_println!(" {} {}", "+".green(), test)?;
}
}
// Summary separator
sh_println!("\n{}", "-".repeat(80))?;
let overall_gas_diff = if overall_gas_used > 0 {
overall_gas_change as f64 / overall_gas_used as f64
} else {
0.0
};
sh_println!(
"Total tests: {}, {} {}, {} {}, {} {}",
diffs.len(),
"↑".red().to_string(),
increased,
"↓".green().to_string(),
decreased,
"━",
unchanged
)?;
sh_println!(
"Overall gas change: {} ({})",
fmt_change(overall_gas_change),
fmt_pct_change(overall_gas_diff)
)?;
Ok(())
}
fn fmt_pct_change(change: f64) -> String {
let change_pct = change * 100.0;
match change.total_cmp(&0.0) {
Ordering::Less => format!("{change_pct:.3}%").green().to_string(),
Ordering::Equal => {
format!("{change_pct:.3}%")
}
Ordering::Greater => format!("{change_pct:.3}%").red().to_string(),
}
}
fn fmt_change(change: i128) -> String {
match change.cmp(&0) {
Ordering::Less => format!("{change}").green().to_string(),
Ordering::Equal => change.to_string(),
Ordering::Greater => format!("{change}").red().to_string(),
}
}
/// Returns true of the difference between the gas values exceeds the tolerance
///
/// If `tolerance` is `None`, then this returns `true` if both gas values are equal
fn within_tolerance(source_gas: u64, target_gas: u64, tolerance_pct: Option<u32>) -> bool {
if let Some(tolerance) = tolerance_pct {
let (hi, lo) = if source_gas > target_gas {
(source_gas, target_gas)
} else {
(target_gas, source_gas)
};
let diff = (1. - (lo as f64 / hi as f64)) * 100.;
diff < tolerance as f64
} else {
source_gas == target_gas
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_tolerance() {
assert!(within_tolerance(100, 105, Some(5)));
assert!(within_tolerance(105, 100, Some(5)));
assert!(!within_tolerance(100, 106, Some(5)));
assert!(!within_tolerance(106, 100, Some(5)));
assert!(within_tolerance(100, 100, None));
}
#[test]
fn can_parse_basic_gas_snapshot_entry() {
let s = "Test:deposit() (gas: 7222)";
let entry = GasSnapshotEntry::from_str(s).unwrap();
assert_eq!(
entry,
GasSnapshotEntry {
contract_name: "Test".to_string(),
signature: "deposit()".to_string(),
gas_used: TestKindReport::Unit { gas: 7222 }
}
);
}
#[test]
fn can_parse_fuzz_gas_snapshot_entry() {
let s = "Test:deposit() (runs: 256, μ: 100, ~:200)";
let entry = GasSnapshotEntry::from_str(s).unwrap();
assert_eq!(
entry,
GasSnapshotEntry {
contract_name: "Test".to_string(),
signature: "deposit()".to_string(),
gas_used: TestKindReport::Fuzz {
runs: 256,
median_gas: 200,
mean_gas: 100,
failed_corpus_replays: 0
}
}
);
}
#[test]
fn can_parse_invariant_gas_snapshot_entry() {
let s = "Test:deposit() (runs: 256, calls: 100, reverts: 200)";
let entry = GasSnapshotEntry::from_str(s).unwrap();
assert_eq!(
entry,
GasSnapshotEntry {
contract_name: "Test".to_string(),
signature: "deposit()".to_string(),
gas_used: TestKindReport::Invariant {
runs: 256,
calls: 100,
reverts: 200,
metrics: HashMap::default(),
failed_corpus_replays: 0,
}
}
);
}
#[test]
fn can_parse_invariant_gas_snapshot_entry2() {
let s = "ERC20Invariants:invariantBalanceSum() (runs: 256, calls: 3840, reverts: 2388)";
let entry = GasSnapshotEntry::from_str(s).unwrap();
assert_eq!(
entry,
GasSnapshotEntry {
contract_name: "ERC20Invariants".to_string(),
signature: "invariantBalanceSum()".to_string(),
gas_used: TestKindReport::Invariant {
runs: 256,
calls: 3840,
reverts: 2388,
metrics: HashMap::default(),
failed_corpus_replays: 0,
}
}
);
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/forge/src/cmd/cache.rs | crates/forge/src/cmd/cache.rs | use cache::Cache;
use clap::{
Arg, Command, Parser, Subcommand,
builder::{PossibleValuesParser, TypedValueParser},
};
use eyre::Result;
use foundry_config::{Chain, Config, NamedChain, cache};
use std::{ffi::OsStr, str::FromStr};
use strum::VariantNames;
/// CLI arguments for `forge cache`.
#[derive(Debug, Parser)]
pub struct CacheArgs {
#[command(subcommand)]
pub sub: CacheSubcommands,
}
#[derive(Debug, Subcommand)]
pub enum CacheSubcommands {
/// Cleans cached data from the global foundry directory.
Clean(CleanArgs),
/// Shows cached data from the global foundry directory.
Ls(LsArgs),
}
/// CLI arguments for `forge clean`.
#[derive(Debug, Parser)]
#[command(group = clap::ArgGroup::new("etherscan-blocks").multiple(false))]
pub struct CleanArgs {
/// The chains to clean the cache for.
///
/// Can also be "all" to clean all chains.
#[arg(
env = "CHAIN",
default_value = "all",
value_parser = ChainOrAllValueParser::default(),
)]
chains: Vec<ChainOrAll>,
/// The blocks to clean the cache for.
#[arg(
short,
long,
num_args(1..),
value_delimiter(','),
group = "etherscan-blocks"
)]
blocks: Vec<u64>,
/// Whether to clean the Etherscan cache.
#[arg(long, group = "etherscan-blocks")]
etherscan: bool,
}
impl CleanArgs {
pub fn run(self) -> Result<()> {
let Self { chains, blocks, etherscan } = self;
for chain_or_all in chains {
match chain_or_all {
ChainOrAll::NamedChain(chain) => {
clean_chain_cache(chain, blocks.to_vec(), etherscan)?
}
ChainOrAll::All => {
if etherscan {
Config::clean_foundry_etherscan_cache()?;
} else {
Config::clean_foundry_cache()?
}
}
}
}
Ok(())
}
}
#[derive(Debug, Parser)]
pub struct LsArgs {
/// The chains to list the cache for.
///
/// Can also be "all" to list all chains.
#[arg(
env = "CHAIN",
default_value = "all",
value_parser = ChainOrAllValueParser::default(),
)]
chains: Vec<ChainOrAll>,
}
impl LsArgs {
pub fn run(self) -> Result<()> {
let Self { chains } = self;
let mut cache = Cache::default();
for chain_or_all in chains {
match chain_or_all {
ChainOrAll::NamedChain(chain) => {
cache.chains.push(Config::list_foundry_chain_cache(chain.into())?)
}
ChainOrAll::All => cache = Config::list_foundry_cache()?,
}
}
sh_print!("{cache}")?;
Ok(())
}
}
#[derive(Clone, Debug)]
pub enum ChainOrAll {
NamedChain(NamedChain),
All,
}
impl FromStr for ChainOrAll {
type Err = String;
fn from_str(s: &str) -> Result<Self, Self::Err> {
if let Ok(chain) = NamedChain::from_str(s) {
Ok(Self::NamedChain(chain))
} else if s == "all" {
Ok(Self::All)
} else {
Err(format!("Expected known chain or all, found: {s}"))
}
}
}
fn clean_chain_cache(chain: impl Into<Chain>, blocks: Vec<u64>, etherscan: bool) -> Result<()> {
let chain = chain.into();
if blocks.is_empty() {
Config::clean_foundry_etherscan_chain_cache(chain)?;
if etherscan {
return Ok(());
}
Config::clean_foundry_chain_cache(chain)?;
} else {
for block in blocks {
Config::clean_foundry_block_cache(chain, block)?;
}
}
Ok(())
}
/// The value parser for `ChainOrAll`
#[derive(Clone, Debug)]
pub struct ChainOrAllValueParser {
inner: PossibleValuesParser,
}
impl Default for ChainOrAllValueParser {
fn default() -> Self {
Self { inner: possible_chains() }
}
}
impl TypedValueParser for ChainOrAllValueParser {
type Value = ChainOrAll;
fn parse_ref(
&self,
cmd: &Command,
arg: Option<&Arg>,
value: &OsStr,
) -> Result<Self::Value, clap::Error> {
self.inner.parse_ref(cmd, arg, value)?.parse::<ChainOrAll>().map_err(|_| {
clap::Error::raw(
clap::error::ErrorKind::InvalidValue,
"chain argument did not match any possible chain variant",
)
})
}
}
fn possible_chains() -> PossibleValuesParser {
Some(&"all").into_iter().chain(NamedChain::VARIANTS).into()
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn can_parse_cache_ls() {
let args: CacheArgs = CacheArgs::parse_from(["cache", "ls"]);
assert!(matches!(args.sub, CacheSubcommands::Ls(_)));
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/forge/src/cmd/eip712.rs | crates/forge/src/cmd/eip712.rs | use alloy_primitives::{B256, keccak256};
use clap::{Parser, ValueHint};
use eyre::Result;
use foundry_cli::{opts::BuildOpts, utils::LoadConfig};
use foundry_common::{compile::ProjectCompiler, shell};
use serde::Serialize;
use solar::sema::{
Gcx, Hir,
hir::StructId,
ty::{Ty, TyKind},
};
use std::{
collections::BTreeMap,
fmt::{Display, Formatter, Result as FmtResult, Write},
ops::ControlFlow,
path::{Path, PathBuf},
};
foundry_config::impl_figment_convert!(Eip712Args, build);
/// CLI arguments for `forge eip712`.
#[derive(Clone, Debug, Parser)]
pub struct Eip712Args {
/// The path to the file from which to read struct definitions.
#[arg(value_hint = ValueHint::FilePath, value_name = "PATH")]
pub target_path: PathBuf,
#[command(flatten)]
build: BuildOpts,
}
#[derive(Debug, Serialize)]
struct Eip712Output {
path: String,
#[serde(rename = "type")]
ty: String,
hash: B256,
}
impl Display for Eip712Output {
fn fmt(&self, f: &mut Formatter<'_>) -> FmtResult {
writeln!(f, "{}:", self.path)?;
writeln!(f, " - type: {}", self.ty)?;
writeln!(f, " - hash: {}", self.hash)
}
}
impl Eip712Args {
pub fn run(self) -> Result<()> {
let config = self.build.load_config()?;
let project = config.solar_project()?;
let mut output = ProjectCompiler::new().files([self.target_path]).compile(&project)?;
let compiler = output.parser_mut().solc_mut().compiler_mut();
compiler.enter_mut(|compiler| -> Result<()> {
let Ok(ControlFlow::Continue(())) = compiler.lower_asts() else { return Ok(()) };
let gcx = compiler.gcx();
let resolver = Resolver::new(gcx);
let outputs = resolver
.struct_ids()
.filter_map(|id| {
let resolved = resolver.resolve_struct_eip712(id)?;
Some(Eip712Output {
path: resolver.get_struct_path(id),
hash: keccak256(resolved.as_bytes()),
ty: resolved,
})
})
.collect::<Vec<_>>();
if shell::is_json() {
sh_println!("{json}", json = serde_json::to_string_pretty(&outputs)?)?;
} else {
for output in &outputs {
sh_println!("{output}")?;
}
}
Ok(())
})?;
// `compiler.sess()` inside of `ProjectCompileOutput` is built with `with_buffer_emitter`.
let diags = compiler.sess().dcx.emitted_diagnostics().unwrap();
if compiler.sess().dcx.has_errors().is_err() {
eyre::bail!("{diags}");
} else {
let _ = sh_print!("{diags}");
}
Ok(())
}
}
/// Generates the EIP-712 `encodeType` string for a given struct.
///
/// Requires a reference to the source HIR.
pub struct Resolver<'gcx> {
gcx: Gcx<'gcx>,
}
impl<'gcx> Resolver<'gcx> {
/// Constructs a new [`Resolver`] for the supplied [`Hir`] instance.
pub fn new(gcx: Gcx<'gcx>) -> Self {
Self { gcx }
}
#[inline]
fn hir(&self) -> &'gcx Hir<'gcx> {
&self.gcx.hir
}
/// Returns the [`StructId`]s of every user-defined struct in source order.
pub fn struct_ids(&self) -> impl Iterator<Item = StructId> {
self.hir().strukt_ids()
}
/// Returns the path for a struct, with the format: `file.sol > MyContract > MyStruct`
pub fn get_struct_path(&self, id: StructId) -> String {
let strukt = self.hir().strukt(id).name.as_str();
match self.hir().strukt(id).contract {
Some(cid) => {
let full_name = self.gcx.contract_fully_qualified_name(cid).to_string();
let relevant = Path::new(&full_name)
.file_name()
.and_then(|s| s.to_str())
.unwrap_or(&full_name);
if let Some((file, contract)) = relevant.rsplit_once(':') {
format!("{file} > {contract} > {strukt}")
} else {
format!("{relevant} > {strukt}")
}
}
None => strukt.to_string(),
}
}
/// Converts a given struct into its EIP-712 `encodeType` representation.
///
/// Returns `None` if the struct, or any of its fields, contains constructs
/// not supported by EIP-712 (mappings, function types, errors, etc).
pub fn resolve_struct_eip712(&self, id: StructId) -> Option<String> {
let mut subtypes = BTreeMap::new();
subtypes.insert(self.hir().strukt(id).name.as_str().into(), id);
self.resolve_eip712_inner(id, &mut subtypes, true, None)
}
fn resolve_eip712_inner(
&self,
id: StructId,
subtypes: &mut BTreeMap<String, StructId>,
append_subtypes: bool,
rename: Option<&str>,
) -> Option<String> {
let def = self.hir().strukt(id);
let mut result = format!("{}(", rename.unwrap_or(def.name.as_str()));
for (idx, field_id) in def.fields.iter().enumerate() {
let field = self.hir().variable(*field_id);
let ty = self.resolve_type(self.gcx.type_of_hir_ty(&field.ty), subtypes)?;
write!(result, "{ty} {name}", name = field.name?.as_str()).ok()?;
if idx < def.fields.len() - 1 {
result.push(',');
}
}
result.push(')');
if append_subtypes {
for (subtype_name, subtype_id) in
subtypes.iter().map(|(name, id)| (name.clone(), *id)).collect::<Vec<_>>()
{
if subtype_id == id {
continue;
}
let encoded_subtype =
self.resolve_eip712_inner(subtype_id, subtypes, false, Some(&subtype_name))?;
result.push_str(&encoded_subtype);
}
}
Some(result)
}
fn resolve_type(
&self,
ty: Ty<'gcx>,
subtypes: &mut BTreeMap<String, StructId>,
) -> Option<String> {
let ty = ty.peel_refs();
match ty.kind {
TyKind::Elementary(elem_ty) => Some(elem_ty.to_abi_str().to_string()),
TyKind::Array(element_ty, size) => {
let inner_type = self.resolve_type(element_ty, subtypes)?;
let size = size.to_string();
Some(format!("{inner_type}[{size}]"))
}
TyKind::DynArray(element_ty) => {
let inner_type = self.resolve_type(element_ty, subtypes)?;
Some(format!("{inner_type}[]"))
}
TyKind::Udvt(ty, _) => self.resolve_type(ty, subtypes),
TyKind::Struct(id) => {
let def = self.hir().strukt(id);
let name = match subtypes.iter().find(|(_, cached_id)| id == **cached_id) {
Some((name, _)) => name.to_string(),
None => {
// Otherwise, assign new name
let mut i = 0;
let mut name = def.name.as_str().into();
while subtypes.contains_key(&name) {
i += 1;
name = format!("{}_{i}", def.name.as_str());
}
subtypes.insert(name.clone(), id);
// Recursively resolve fields to populate subtypes
for &field_id in def.fields {
let field_ty = self.gcx.type_of_item(field_id.into());
self.resolve_type(field_ty, subtypes)?;
}
name
}
};
Some(name)
}
// For now, map enums to `uint8`
TyKind::Enum(_) => Some("uint8".to_string()),
// For now, map contracts to `address`
TyKind::Contract(_) => Some("address".to_string()),
// EIP-712 doesn't support tuples (should use structs), functions, mappings, nor errors
_ => None,
}
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/forge/src/cmd/remove.rs | crates/forge/src/cmd/remove.rs | use crate::Lockfile;
use clap::{Parser, ValueHint};
use eyre::Result;
use foundry_cli::{
opts::Dependency,
utils::{Git, LoadConfig},
};
use foundry_config::impl_figment_convert_basic;
use std::path::PathBuf;
/// CLI arguments for `forge remove`.
#[derive(Clone, Debug, Parser)]
pub struct RemoveArgs {
/// The dependencies you want to remove.
#[arg(required = true)]
dependencies: Vec<Dependency>,
/// The project's root path.
///
/// By default root of the Git repository, if in one,
/// or the current working directory.
#[arg(long, value_hint = ValueHint::DirPath, value_name = "PATH")]
root: Option<PathBuf>,
/// Override the up-to-date check.
#[arg(short, long)]
force: bool,
}
impl_figment_convert_basic!(RemoveArgs);
impl RemoveArgs {
pub fn run(self) -> Result<()> {
let config = self.load_config()?;
let (root, paths, _) = super::update::dependencies_paths(&self.dependencies, &config)?;
let git_modules = root.join(".git/modules");
let git = Git::new(&root);
let mut lockfile = Lockfile::new(&config.root).with_git(&git);
let _synced = lockfile.sync(config.install_lib_dir())?;
// remove all the dependencies by invoking `git rm` only once with all the paths
git.rm(self.force, &paths)?;
// remove all the dependencies from .git/modules
for (Dependency { name, tag, .. }, path) in self.dependencies.iter().zip(&paths) {
// Get the URL from git submodule config instead of using the parsed dependency URL
let url = git.submodule_url(path).unwrap_or(None);
sh_println!(
"Removing '{name}' in {}, (url: {}, tag: {})",
path.display(),
url.as_deref().unwrap_or("None"),
tag.as_deref().unwrap_or("None")
)?;
let _ = lockfile.remove(path);
std::fs::remove_dir_all(git_modules.join(path))?;
}
lockfile.write()?;
Ok(())
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/forge/src/cmd/create.rs | crates/forge/src/cmd/create.rs | use crate::cmd::install;
use alloy_chains::Chain;
use alloy_dyn_abi::{DynSolValue, JsonAbiExt, Specifier};
use alloy_json_abi::{Constructor, JsonAbi};
use alloy_network::{AnyNetwork, AnyTransactionReceipt, EthereumWallet, TransactionBuilder};
use alloy_primitives::{Address, Bytes, hex};
use alloy_provider::{PendingTransactionError, Provider, ProviderBuilder};
use alloy_rpc_types::TransactionRequest;
use alloy_serde::WithOtherFields;
use alloy_signer::Signer;
use alloy_transport::TransportError;
use clap::{Parser, ValueHint};
use eyre::{Context, Result};
use forge_verify::{RetryArgs, VerifierArgs, VerifyArgs};
use foundry_cli::{
opts::{BuildOpts, EthereumOpts, EtherscanOpts, TransactionOpts},
utils::{self, LoadConfig, find_contract_artifacts, read_constructor_args_file},
};
use foundry_common::{
compile::{self},
fmt::parse_tokens,
shell,
};
use foundry_compilers::{
ArtifactId, artifacts::BytecodeObject, info::ContractInfo, utils::canonicalize,
};
use foundry_config::{
Config,
figment::{
self, Metadata, Profile,
value::{Dict, Map},
},
merge_impl_figment_convert,
};
use serde_json::json;
use std::{borrow::Borrow, marker::PhantomData, path::PathBuf, sync::Arc, time::Duration};
merge_impl_figment_convert!(CreateArgs, build, eth);
/// CLI arguments for `forge create`.
#[derive(Clone, Debug, Parser)]
pub struct CreateArgs {
/// The contract identifier in the form `<path>:<contractname>`.
contract: ContractInfo,
/// The constructor arguments.
#[arg(
long,
num_args(1..),
conflicts_with = "constructor_args_path",
value_name = "ARGS",
allow_hyphen_values = true,
)]
constructor_args: Vec<String>,
/// The path to a file containing the constructor arguments.
#[arg(
long,
value_hint = ValueHint::FilePath,
value_name = "PATH",
)]
constructor_args_path: Option<PathBuf>,
/// Broadcast the transaction.
#[arg(long)]
pub broadcast: bool,
/// Verify contract after creation.
#[arg(long)]
verify: bool,
/// Send via `eth_sendTransaction` using the `--from` argument or `$ETH_FROM` as sender
#[arg(long, requires = "from")]
unlocked: bool,
/// Prints the standard json compiler input if `--verify` is provided.
///
/// The standard json compiler input can be used to manually submit contract verification in
/// the browser.
#[arg(long, requires = "verify")]
show_standard_json_input: bool,
/// Timeout to use for broadcasting transactions.
#[arg(long, env = "ETH_TIMEOUT")]
pub timeout: Option<u64>,
#[command(flatten)]
build: BuildOpts,
#[command(flatten)]
tx: TransactionOpts,
#[command(flatten)]
eth: EthereumOpts,
#[command(flatten)]
pub verifier: VerifierArgs,
#[command(flatten)]
retry: RetryArgs,
}
impl CreateArgs {
/// Executes the command to create a contract
pub async fn run(mut self) -> Result<()> {
let mut config = self.load_config()?;
// Install missing dependencies.
if install::install_missing_dependencies(&mut config).await && config.auto_detect_remappings
{
// need to re-configure here to also catch additional remappings
config = self.load_config()?;
}
// Find Project & Compile
let project = config.project()?;
let target_path = if let Some(ref mut path) = self.contract.path {
canonicalize(project.root().join(path))?
} else {
project.find_contract_path(&self.contract.name)?
};
let output = compile::compile_target(&target_path, &project, shell::is_json())?;
let (abi, bin, id) = find_contract_artifacts(output, &target_path, &self.contract.name)?;
let bin = match bin.object {
BytecodeObject::Bytecode(_) => bin.object,
_ => {
let link_refs = bin
.link_references
.iter()
.flat_map(|(path, names)| {
names.keys().map(move |name| format!("\t{name}: {path}"))
})
.collect::<Vec<String>>()
.join("\n");
eyre::bail!(
"Dynamic linking not supported in `create` command - deploy the following library contracts first, then provide the address to link at compile time\n{}",
link_refs
)
}
};
// Add arguments to constructor
let params = if let Some(constructor) = &abi.constructor {
let constructor_args =
self.constructor_args_path.clone().map(read_constructor_args_file).transpose()?;
self.parse_constructor_args(
constructor,
constructor_args.as_deref().unwrap_or(&self.constructor_args),
)?
} else {
vec![]
};
let provider = utils::get_provider(&config)?;
// respect chain, if set explicitly via cmd args
let chain_id = if let Some(chain_id) = self.chain_id() {
chain_id
} else {
provider.get_chain_id().await?
};
// Whether to broadcast the transaction or not
let dry_run = !self.broadcast;
if self.unlocked {
// Deploy with unlocked account
let sender = self.eth.wallet.from.expect("required");
self.deploy(
abi,
bin,
params,
provider,
chain_id,
sender,
config.transaction_timeout,
id,
dry_run,
)
.await
} else {
// Deploy with signer
let signer = self.eth.wallet.signer().await?;
let deployer = signer.address();
let provider = ProviderBuilder::<_, _, AnyNetwork>::default()
.wallet(EthereumWallet::new(signer))
.connect_provider(provider);
self.deploy(
abi,
bin,
params,
provider,
chain_id,
deployer,
config.transaction_timeout,
id,
dry_run,
)
.await
}
}
/// Returns the provided chain id, if any.
fn chain_id(&self) -> Option<u64> {
self.eth.etherscan.chain.map(|chain| chain.id())
}
/// Ensures the verify command can be executed.
///
/// This is supposed to check any things that might go wrong when preparing a verify request
/// before the contract is deployed. This should prevent situations where a contract is deployed
/// successfully, but we fail to prepare a verify request which would require manual
/// verification.
async fn verify_preflight_check(
&self,
constructor_args: Option<String>,
chain: u64,
id: &ArtifactId,
) -> Result<()> {
// NOTE: this does not represent the same `VerifyArgs` that would be sent after deployment,
// since we don't know the address yet.
let mut verify = VerifyArgs {
address: Default::default(),
contract: Some(self.contract.clone()),
compiler_version: Some(id.version.to_string()),
constructor_args,
constructor_args_path: None,
no_auto_detect: false,
use_solc: None,
num_of_optimizations: None,
etherscan: EtherscanOpts {
key: self.eth.etherscan.key.clone(),
chain: Some(chain.into()),
},
rpc: Default::default(),
flatten: false,
force: false,
skip_is_verified_check: true,
watch: true,
retry: self.retry,
libraries: self.build.libraries.clone(),
root: None,
verifier: self.verifier.clone(),
via_ir: self.build.via_ir,
evm_version: self.build.compiler.evm_version,
show_standard_json_input: self.show_standard_json_input,
guess_constructor_args: false,
compilation_profile: Some(id.profile.to_string()),
language: None,
creation_transaction_hash: None,
};
// Check config for Etherscan API Keys to avoid preflight check failing if no
// ETHERSCAN_API_KEY value set.
let config = verify.load_config()?;
verify.etherscan.key =
config.get_etherscan_config_with_chain(Some(chain.into()))?.map(|c| c.key);
let context = verify.resolve_context().await?;
verify.verification_provider()?.preflight_verify_check(verify, context).await?;
Ok(())
}
/// Deploys the contract
#[expect(clippy::too_many_arguments)]
async fn deploy<P: Provider<AnyNetwork>>(
self,
abi: JsonAbi,
bin: BytecodeObject,
args: Vec<DynSolValue>,
provider: P,
chain: u64,
deployer_address: Address,
timeout: u64,
id: ArtifactId,
dry_run: bool,
) -> Result<()> {
let bin = bin.into_bytes().unwrap_or_default();
if bin.is_empty() {
eyre::bail!("no bytecode found in bin object for {}", self.contract.name)
}
let provider = Arc::new(provider);
let factory = ContractFactory::new(abi.clone(), bin.clone(), provider.clone(), timeout);
let is_args_empty = args.is_empty();
let mut deployer =
factory.deploy_tokens(args.clone()).context("failed to deploy contract").map_err(|e| {
if is_args_empty {
e.wrap_err("no arguments provided for contract constructor; consider --constructor-args or --constructor-args-path")
} else {
e
}
})?;
let is_legacy = self.tx.legacy || Chain::from(chain).is_legacy();
deployer.tx.set_from(deployer_address);
deployer.tx.set_chain_id(chain);
// `to` field must be set explicitly, cannot be None.
if deployer.tx.to.is_none() {
deployer.tx.set_create();
}
deployer.tx.set_nonce(if let Some(nonce) = self.tx.nonce {
Ok(nonce.to())
} else {
provider.get_transaction_count(deployer_address).await
}?);
// set tx value if specified
if let Some(value) = self.tx.value {
deployer.tx.set_value(value);
}
deployer.tx.set_gas_limit(if let Some(gas_limit) = self.tx.gas_limit {
Ok(gas_limit.to())
} else {
provider.estimate_gas(deployer.tx.clone()).await
}?);
if is_legacy {
let gas_price = if let Some(gas_price) = self.tx.gas_price {
gas_price.to()
} else {
provider.get_gas_price().await?
};
deployer.tx.set_gas_price(gas_price);
} else {
let estimate = provider.estimate_eip1559_fees().await.wrap_err("Failed to estimate EIP1559 fees. This chain might not support EIP1559, try adding --legacy to your command.")?;
let priority_fee = if let Some(priority_fee) = self.tx.priority_gas_price {
priority_fee.to()
} else {
estimate.max_priority_fee_per_gas
};
let max_fee = if let Some(max_fee) = self.tx.gas_price {
max_fee.to()
} else {
estimate.max_fee_per_gas
};
deployer.tx.set_max_fee_per_gas(max_fee);
deployer.tx.set_max_priority_fee_per_gas(priority_fee);
}
// Before we actually deploy the contract we try check if the verify settings are valid
let mut constructor_args = None;
if self.verify {
if !args.is_empty() {
let encoded_args = abi
.constructor()
.ok_or_else(|| eyre::eyre!("could not find constructor"))?
.abi_encode_input(&args)?;
constructor_args = Some(hex::encode(encoded_args));
}
self.verify_preflight_check(constructor_args.clone(), chain, &id).await?;
}
if dry_run {
if !shell::is_json() {
sh_warn!("Dry run enabled, not broadcasting transaction\n")?;
sh_println!("Contract: {}", self.contract.name)?;
sh_println!(
"Transaction: {}",
serde_json::to_string_pretty(&deployer.tx.clone())?
)?;
sh_println!("ABI: {}\n", serde_json::to_string_pretty(&abi)?)?;
sh_warn!(
"To broadcast this transaction, add --broadcast to the previous command. See forge create --help for more."
)?;
} else {
let output = json!({
"contract": self.contract.name,
"transaction": &deployer.tx,
"abi":&abi
});
sh_println!("{}", serde_json::to_string_pretty(&output)?)?;
}
return Ok(());
}
// Deploy the actual contract
let (deployed_contract, receipt) = deployer.send_with_receipt().await?;
let address = deployed_contract;
if shell::is_json() {
let output = json!({
"deployer": deployer_address.to_string(),
"deployedTo": address.to_string(),
"transactionHash": receipt.transaction_hash
});
sh_println!("{}", serde_json::to_string_pretty(&output)?)?;
} else {
sh_println!("Deployer: {deployer_address}")?;
sh_println!("Deployed to: {address}")?;
sh_println!("Transaction hash: {:?}", receipt.transaction_hash)?;
};
if !self.verify {
return Ok(());
}
sh_println!("Starting contract verification...")?;
let num_of_optimizations = if let Some(optimizer) = self.build.compiler.optimize {
if optimizer { Some(self.build.compiler.optimizer_runs.unwrap_or(200)) } else { None }
} else {
self.build.compiler.optimizer_runs
};
let verify = VerifyArgs {
address,
contract: Some(self.contract),
compiler_version: Some(id.version.to_string()),
constructor_args,
constructor_args_path: None,
no_auto_detect: false,
use_solc: None,
num_of_optimizations,
etherscan: EtherscanOpts { key: self.eth.etherscan.key(), chain: Some(chain.into()) },
rpc: Default::default(),
flatten: false,
force: false,
skip_is_verified_check: true,
watch: true,
retry: self.retry,
libraries: self.build.libraries.clone(),
root: None,
verifier: self.verifier,
via_ir: self.build.via_ir,
evm_version: self.build.compiler.evm_version,
show_standard_json_input: self.show_standard_json_input,
guess_constructor_args: false,
compilation_profile: Some(id.profile.to_string()),
language: None,
creation_transaction_hash: Some(receipt.transaction_hash),
};
sh_println!("Waiting for {} to detect contract deployment...", verify.verifier.verifier)?;
verify.run().await
}
/// Parses the given constructor arguments into a vector of `DynSolValue`s, by matching them
/// against the constructor's input params.
///
/// Returns a list of parsed values that match the constructor's input params.
fn parse_constructor_args(
&self,
constructor: &Constructor,
constructor_args: &[String],
) -> Result<Vec<DynSolValue>> {
let mut params = Vec::with_capacity(constructor.inputs.len());
for (input, arg) in constructor.inputs.iter().zip(constructor_args) {
// resolve the input type directly
let ty = input
.resolve()
.wrap_err_with(|| format!("Could not resolve constructor arg: input={input}"))?;
params.push((ty, arg));
}
let params = params.iter().map(|(ty, arg)| (ty, arg.as_str()));
parse_tokens(params).map_err(Into::into)
}
}
impl figment::Provider for CreateArgs {
fn metadata(&self) -> Metadata {
Metadata::named("Create Args Provider")
}
fn data(&self) -> Result<Map<Profile, Dict>, figment::Error> {
let mut dict = Dict::default();
if let Some(timeout) = self.timeout {
dict.insert("transaction_timeout".to_string(), timeout.into());
}
Ok(Map::from([(Config::selected_profile(), dict)]))
}
}
/// `ContractFactory` is a [`DeploymentTxFactory`] object with an
/// [`Arc`] middleware. This type alias exists to preserve backwards
/// compatibility with less-abstract Contracts.
///
/// For full usage docs, see [`DeploymentTxFactory`].
pub type ContractFactory<P> = DeploymentTxFactory<P>;
/// Helper which manages the deployment transaction of a smart contract. It
/// wraps a deployment transaction, and retrieves the contract address output
/// by it.
#[derive(Debug)]
#[must_use = "ContractDeploymentTx does nothing unless you `send` it"]
pub struct ContractDeploymentTx<P, C> {
/// the actual deployer, exposed for overriding the defaults
pub deployer: Deployer<P>,
/// marker for the `Contract` type to create afterwards
///
/// this type will be used to construct it via `From::from(Contract)`
_contract: PhantomData<C>,
}
impl<P: Clone, C> Clone for ContractDeploymentTx<P, C> {
fn clone(&self) -> Self {
Self { deployer: self.deployer.clone(), _contract: self._contract }
}
}
impl<P, C> From<Deployer<P>> for ContractDeploymentTx<P, C> {
fn from(deployer: Deployer<P>) -> Self {
Self { deployer, _contract: PhantomData }
}
}
/// Helper which manages the deployment transaction of a smart contract
#[derive(Clone, Debug)]
#[must_use = "Deployer does nothing unless you `send` it"]
pub struct Deployer<P> {
/// The deployer's transaction, exposed for overriding the defaults
pub tx: WithOtherFields<TransactionRequest>,
client: P,
confs: usize,
timeout: u64,
}
impl<P: Provider<AnyNetwork>> Deployer<P> {
/// Broadcasts the contract deployment transaction and after waiting for it to
/// be sufficiently confirmed (default: 1), it returns a tuple with the [`Address`] at the
/// deployed contract's address and the corresponding [`AnyTransactionReceipt`].
pub async fn send_with_receipt(
self,
) -> Result<(Address, AnyTransactionReceipt), ContractDeploymentError> {
let receipt = self
.client
.borrow()
.send_transaction(self.tx)
.await?
.with_required_confirmations(self.confs as u64)
.with_timeout(Some(Duration::from_secs(self.timeout)))
.get_receipt()
.await?;
let address =
receipt.contract_address.ok_or(ContractDeploymentError::ContractNotDeployed)?;
Ok((address, receipt))
}
}
/// To deploy a contract to the Ethereum network, a [`ContractFactory`] can be
/// created which manages the Contract bytecode and Application Binary Interface
/// (ABI), usually generated from the Solidity compiler.
#[derive(Clone, Debug)]
pub struct DeploymentTxFactory<P> {
client: P,
abi: JsonAbi,
bytecode: Bytes,
timeout: u64,
}
impl<P: Provider<AnyNetwork> + Clone> DeploymentTxFactory<P> {
/// Creates a factory for deployment of the Contract with bytecode, and the
/// constructor defined in the abi. The client will be used to send any deployment
/// transaction.
pub fn new(abi: JsonAbi, bytecode: Bytes, client: P, timeout: u64) -> Self {
Self { client, abi, bytecode, timeout }
}
/// Create a deployment tx using the provided tokens as constructor
/// arguments
pub fn deploy_tokens(
self,
params: Vec<DynSolValue>,
) -> Result<Deployer<P>, ContractDeploymentError> {
// Encode the constructor args & concatenate with the bytecode if necessary
let data: Bytes = match (self.abi.constructor(), params.is_empty()) {
(None, false) => return Err(ContractDeploymentError::ConstructorError),
(None, true) => self.bytecode.clone(),
(Some(constructor), _) => {
let input: Bytes = constructor
.abi_encode_input(¶ms)
.map_err(ContractDeploymentError::DetokenizationError)?
.into();
// Concatenate the bytecode and abi-encoded constructor call.
self.bytecode.iter().copied().chain(input).collect()
}
};
// create the tx object. Since we're deploying a contract, `to` is `None`
let tx = WithOtherFields::new(TransactionRequest::default().input(data.into()));
Ok(Deployer { client: self.client.clone(), tx, confs: 1, timeout: self.timeout })
}
}
#[derive(thiserror::Error, Debug)]
/// An Error which is thrown when interacting with a smart contract
pub enum ContractDeploymentError {
#[error("constructor is not defined in the ABI")]
ConstructorError,
#[error(transparent)]
DetokenizationError(#[from] alloy_dyn_abi::Error),
#[error("contract was not deployed")]
ContractNotDeployed,
#[error(transparent)]
RpcError(#[from] TransportError),
}
impl From<PendingTransactionError> for ContractDeploymentError {
fn from(_err: PendingTransactionError) -> Self {
Self::ContractNotDeployed
}
}
#[cfg(test)]
mod tests {
use super::*;
use alloy_primitives::I256;
#[test]
fn can_parse_create() {
let args: CreateArgs = CreateArgs::parse_from([
"foundry-cli",
"src/Domains.sol:Domains",
"--verify",
"--retries",
"10",
"--delay",
"30",
]);
assert_eq!(args.retry.retries, 10);
assert_eq!(args.retry.delay, 30);
}
#[test]
fn can_parse_chain_id() {
let args: CreateArgs = CreateArgs::parse_from([
"foundry-cli",
"src/Domains.sol:Domains",
"--verify",
"--retries",
"10",
"--delay",
"30",
"--chain-id",
"9999",
]);
assert_eq!(args.chain_id(), Some(9999));
}
#[test]
fn test_parse_constructor_args() {
let args: CreateArgs = CreateArgs::parse_from([
"foundry-cli",
"src/Domains.sol:Domains",
"--constructor-args",
"Hello",
]);
let constructor: Constructor = serde_json::from_str(r#"{"type":"constructor","inputs":[{"name":"_name","type":"string","internalType":"string"}],"stateMutability":"nonpayable"}"#).unwrap();
let params = args.parse_constructor_args(&constructor, &args.constructor_args).unwrap();
assert_eq!(params, vec![DynSolValue::String("Hello".to_string())]);
}
#[test]
fn test_parse_tuple_constructor_args() {
let args: CreateArgs = CreateArgs::parse_from([
"foundry-cli",
"src/Domains.sol:Domains",
"--constructor-args",
"[(1,2), (2,3), (3,4)]",
]);
let constructor: Constructor = serde_json::from_str(r#"{"type":"constructor","inputs":[{"name":"_points","type":"tuple[]","internalType":"struct Point[]","components":[{"name":"x","type":"uint256","internalType":"uint256"},{"name":"y","type":"uint256","internalType":"uint256"}]}],"stateMutability":"nonpayable"}"#).unwrap();
let _params = args.parse_constructor_args(&constructor, &args.constructor_args).unwrap();
}
#[test]
fn test_parse_int_constructor_args() {
let args: CreateArgs = CreateArgs::parse_from([
"foundry-cli",
"src/Domains.sol:Domains",
"--constructor-args",
"-5",
]);
let constructor: Constructor = serde_json::from_str(r#"{"type":"constructor","inputs":[{"name":"_name","type":"int256","internalType":"int256"}],"stateMutability":"nonpayable"}"#).unwrap();
let params = args.parse_constructor_args(&constructor, &args.constructor_args).unwrap();
assert_eq!(params, vec![DynSolValue::Int(I256::unchecked_from(-5), 256)]);
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/forge/src/cmd/inspect.rs | crates/forge/src/cmd/inspect.rs | use alloy_json_abi::{EventParam, InternalType, JsonAbi, Param};
use alloy_primitives::{hex, keccak256};
use clap::Parser;
use comfy_table::{Cell, Table, modifiers::UTF8_ROUND_CORNERS, presets::ASCII_MARKDOWN};
use eyre::{Result, eyre};
use foundry_cli::opts::{BuildOpts, CompilerOpts};
use foundry_common::{
compile::{PathOrContractInfo, ProjectCompiler},
find_matching_contract_artifact, find_target_path, shell,
};
use foundry_compilers::{
artifacts::{
StorageLayout,
output_selection::{
BytecodeOutputSelection, ContractOutputSelection, DeployedBytecodeOutputSelection,
EvmOutputSelection, EwasmOutputSelection,
},
},
solc::SolcLanguage,
};
use regex::Regex;
use serde_json::{Map, Value};
use std::{collections::BTreeMap, fmt, str::FromStr, sync::LazyLock};
/// CLI arguments for `forge inspect`.
#[derive(Clone, Debug, Parser)]
pub struct InspectArgs {
/// The identifier of the contract to inspect in the form `(<path>:)?<contractname>`.
#[arg(value_parser = PathOrContractInfo::from_str)]
pub contract: PathOrContractInfo,
/// The contract artifact field to inspect.
#[arg(value_enum)]
pub field: ContractArtifactField,
/// All build arguments are supported
#[command(flatten)]
build: BuildOpts,
/// Whether to remove comments when inspecting `ir` and `irOptimized` artifact fields.
#[arg(long, short, help_heading = "Display options")]
pub strip_yul_comments: bool,
/// Whether to wrap the table to the terminal width.
#[arg(long, short, help_heading = "Display options")]
pub wrap: bool,
}
impl InspectArgs {
pub fn run(self) -> Result<()> {
let Self { contract, field, build, strip_yul_comments, wrap } = self;
trace!(target: "forge", ?field, ?contract, "running forge inspect");
// Map field to ContractOutputSelection
let mut cos = build.compiler.extra_output;
if !field.can_skip_field() && !cos.iter().any(|selected| field == *selected) {
cos.push(field.try_into()?);
}
// Run Optimized?
let optimized = if field == ContractArtifactField::AssemblyOptimized {
Some(true)
} else {
build.compiler.optimize
};
// Get the solc version if specified
let solc_version = build.use_solc.clone();
// Build modified Args
let modified_build_args = BuildOpts {
compiler: CompilerOpts { extra_output: cos, optimize: optimized, ..build.compiler },
..build
};
// Build the project
let project = modified_build_args.project()?;
let compiler = ProjectCompiler::new().quiet(true);
let target_path = find_target_path(&project, &contract)?;
let mut output = compiler.files([target_path.clone()]).compile(&project)?;
// Find the artifact
let artifact = find_matching_contract_artifact(&mut output, &target_path, contract.name())?;
// Match on ContractArtifactFields and pretty-print
match field {
ContractArtifactField::Abi => {
let abi = artifact.abi.as_ref().ok_or_else(|| missing_error("ABI"))?;
print_abi(abi, wrap)?;
}
ContractArtifactField::Bytecode => {
print_json_str(&artifact.bytecode, Some("object"))?;
}
ContractArtifactField::DeployedBytecode => {
print_json_str(&artifact.deployed_bytecode, Some("object"))?;
}
ContractArtifactField::Assembly | ContractArtifactField::AssemblyOptimized => {
print_json_str(&artifact.assembly, None)?;
}
ContractArtifactField::LegacyAssembly => {
print_json_str(&artifact.legacy_assembly, None)?;
}
ContractArtifactField::MethodIdentifiers => {
print_method_identifiers(&artifact.method_identifiers, wrap)?;
}
ContractArtifactField::GasEstimates => {
print_json(&artifact.gas_estimates)?;
}
ContractArtifactField::StorageLayout => {
print_storage_layout(artifact.storage_layout.as_ref(), wrap)?;
}
ContractArtifactField::DevDoc => {
print_json(&artifact.devdoc)?;
}
ContractArtifactField::Ir => {
print_yul(artifact.ir.as_deref(), strip_yul_comments)?;
}
ContractArtifactField::IrOptimized => {
print_yul(artifact.ir_optimized.as_deref(), strip_yul_comments)?;
}
ContractArtifactField::Metadata => {
print_json(&artifact.metadata)?;
}
ContractArtifactField::UserDoc => {
print_json(&artifact.userdoc)?;
}
ContractArtifactField::Ewasm => {
print_json_str(&artifact.ewasm, None)?;
}
ContractArtifactField::Errors => {
let out = artifact.abi.as_ref().map_or(Map::new(), parse_errors);
print_errors_events(&out, true, wrap)?;
}
ContractArtifactField::Events => {
let out = artifact.abi.as_ref().map_or(Map::new(), parse_events);
print_errors_events(&out, false, wrap)?;
}
ContractArtifactField::StandardJson => {
let standard_json = if let Some(version) = solc_version {
let version = version.parse()?;
let mut standard_json =
project.standard_json_input(&target_path)?.normalize_evm_version(&version);
standard_json.settings.sanitize(&version, SolcLanguage::Solidity);
standard_json
} else {
project.standard_json_input(&target_path)?
};
print_json(&standard_json)?;
}
ContractArtifactField::Libraries => {
let all_libs: Vec<String> = artifact
.all_link_references()
.into_iter()
.flat_map(|(path, libs)| {
libs.into_keys().map(move |lib| format!("{path}:{lib}"))
})
.collect();
if shell::is_json() {
return print_json(&all_libs);
} else {
sh_println!(
"Dynamically linked libraries:\n{}",
all_libs
.iter()
.map(|v| format!(" {v}"))
.collect::<Vec<String>>()
.join("\n")
)?;
}
}
};
Ok(())
}
}
fn parse_errors(abi: &JsonAbi) -> Map<String, Value> {
let mut out = serde_json::Map::new();
for er in abi.errors.iter().flat_map(|(_, errors)| errors) {
let types = get_ty_sig(&er.inputs);
let sig = format!("{:x}", er.selector());
let sig_trimmed = &sig[0..8];
out.insert(format!("{}({})", er.name, types), sig_trimmed.to_string().into());
}
out
}
fn parse_events(abi: &JsonAbi) -> Map<String, Value> {
let mut out = serde_json::Map::new();
for ev in abi.events.iter().flat_map(|(_, events)| events) {
let types = parse_event_params(&ev.inputs);
let topic = hex::encode(keccak256(ev.signature()));
out.insert(format!("{}({})", ev.name, types), format!("0x{topic}").into());
}
out
}
fn parse_event_params(ev_params: &[EventParam]) -> String {
ev_params
.iter()
.map(|p| {
if let Some(ty) = p.internal_type() {
return internal_ty(ty);
}
p.ty.clone()
})
.collect::<Vec<_>>()
.join(",")
}
fn print_abi(abi: &JsonAbi, should_wrap: bool) -> Result<()> {
if shell::is_json() {
return print_json(abi);
}
let headers = vec![Cell::new("Type"), Cell::new("Signature"), Cell::new("Selector")];
print_table(
headers,
|table| {
// Print events
for ev in abi.events.iter().flat_map(|(_, events)| events) {
let types = parse_event_params(&ev.inputs);
let selector = ev.selector().to_string();
table.add_row(["event", &format!("{}({})", ev.name, types), &selector]);
}
// Print errors
for er in abi.errors.iter().flat_map(|(_, errors)| errors) {
let selector = er.selector().to_string();
table.add_row([
"error",
&format!("{}({})", er.name, get_ty_sig(&er.inputs)),
&selector,
]);
}
// Print functions
for func in abi.functions.iter().flat_map(|(_, f)| f) {
let selector = func.selector().to_string();
let state_mut = func.state_mutability.as_json_str();
let func_sig = if !func.outputs.is_empty() {
format!(
"{}({}) {state_mut} returns ({})",
func.name,
get_ty_sig(&func.inputs),
get_ty_sig(&func.outputs)
)
} else {
format!("{}({}) {state_mut}", func.name, get_ty_sig(&func.inputs))
};
table.add_row(["function", &func_sig, &selector]);
}
if let Some(constructor) = abi.constructor() {
let state_mut = constructor.state_mutability.as_json_str();
table.add_row([
"constructor",
&format!("constructor({}) {state_mut}", get_ty_sig(&constructor.inputs)),
"",
]);
}
if let Some(fallback) = &abi.fallback {
let state_mut = fallback.state_mutability.as_json_str();
table.add_row(["fallback", &format!("fallback() {state_mut}"), ""]);
}
if let Some(receive) = &abi.receive {
let state_mut = receive.state_mutability.as_json_str();
table.add_row(["receive", &format!("receive() {state_mut}"), ""]);
}
},
should_wrap,
)
}
fn get_ty_sig(inputs: &[Param]) -> String {
inputs
.iter()
.map(|p| {
if let Some(ty) = p.internal_type() {
return internal_ty(ty);
}
p.ty.clone()
})
.collect::<Vec<_>>()
.join(",")
}
fn internal_ty(ty: &InternalType) -> String {
let contract_ty =
|c: Option<&str>, ty: &String| c.map_or_else(|| ty.clone(), |c| format!("{c}.{ty}"));
match ty {
InternalType::AddressPayable(addr) => addr.clone(),
InternalType::Contract(contract) => contract.clone(),
InternalType::Enum { contract, ty } => contract_ty(contract.as_deref(), ty),
InternalType::Struct { contract, ty } => contract_ty(contract.as_deref(), ty),
InternalType::Other { contract, ty } => contract_ty(contract.as_deref(), ty),
}
}
pub fn print_storage_layout(
storage_layout: Option<&StorageLayout>,
should_wrap: bool,
) -> Result<()> {
let Some(storage_layout) = storage_layout else {
return Err(missing_error("storage layout"));
};
if shell::is_json() {
return print_json(&storage_layout);
}
let headers = vec![
Cell::new("Name"),
Cell::new("Type"),
Cell::new("Slot"),
Cell::new("Offset"),
Cell::new("Bytes"),
Cell::new("Contract"),
];
print_table(
headers,
|table| {
for slot in &storage_layout.storage {
let storage_type = storage_layout.types.get(&slot.storage_type);
table.add_row([
slot.label.as_str(),
storage_type.map_or("?", |t| &t.label),
&slot.slot,
&slot.offset.to_string(),
storage_type.map_or("?", |t| &t.number_of_bytes),
&slot.contract,
]);
}
},
should_wrap,
)
}
fn print_method_identifiers(
method_identifiers: &Option<BTreeMap<String, String>>,
should_wrap: bool,
) -> Result<()> {
let Some(method_identifiers) = method_identifiers else {
return Err(missing_error("method identifiers"));
};
if shell::is_json() {
return print_json(method_identifiers);
}
let headers = vec![Cell::new("Method"), Cell::new("Identifier")];
print_table(
headers,
|table| {
for (method, identifier) in method_identifiers {
table.add_row([method, identifier]);
}
},
should_wrap,
)
}
fn print_errors_events(map: &Map<String, Value>, is_err: bool, should_wrap: bool) -> Result<()> {
if shell::is_json() {
return print_json(map);
}
let headers = if is_err {
vec![Cell::new("Error"), Cell::new("Selector")]
} else {
vec![Cell::new("Event"), Cell::new("Topic")]
};
print_table(
headers,
|table| {
for (method, selector) in map {
table.add_row([method, selector.as_str().unwrap()]);
}
},
should_wrap,
)
}
fn print_table(
headers: Vec<Cell>,
add_rows: impl FnOnce(&mut Table),
should_wrap: bool,
) -> Result<()> {
let mut table = Table::new();
if shell::is_markdown() {
table.load_preset(ASCII_MARKDOWN);
} else {
table.apply_modifier(UTF8_ROUND_CORNERS);
}
table.set_header(headers);
if should_wrap {
table.set_content_arrangement(comfy_table::ContentArrangement::Dynamic);
}
add_rows(&mut table);
sh_println!("\n{table}\n")?;
Ok(())
}
/// Contract level output selection
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub enum ContractArtifactField {
Abi,
Bytecode,
DeployedBytecode,
Assembly,
AssemblyOptimized,
LegacyAssembly,
MethodIdentifiers,
GasEstimates,
StorageLayout,
DevDoc,
Ir,
IrOptimized,
Metadata,
UserDoc,
Ewasm,
Errors,
Events,
StandardJson,
Libraries,
}
macro_rules! impl_value_enum {
(enum $name:ident { $($field:ident => $main:literal $(| $alias:literal)*),+ $(,)? }) => {
impl $name {
/// All the variants of this enum.
pub const ALL: &'static [Self] = &[$(Self::$field),+];
/// Returns the string representation of `self`.
pub const fn as_str(&self) -> &'static str {
match self {
$(
Self::$field => $main,
)+
}
}
/// Returns all the aliases of `self`.
pub const fn aliases(&self) -> &'static [&'static str] {
match self {
$(
Self::$field => &[$($alias),*],
)+
}
}
}
impl ::clap::ValueEnum for $name {
fn value_variants<'a>() -> &'a [Self] {
Self::ALL
}
fn to_possible_value(&self) -> Option<::clap::builder::PossibleValue> {
Some(::clap::builder::PossibleValue::new(Self::as_str(self)).aliases(Self::aliases(self)))
}
fn from_str(input: &str, ignore_case: bool) -> Result<Self, String> {
let _ = ignore_case;
<Self as ::std::str::FromStr>::from_str(input)
}
}
impl ::std::str::FromStr for $name {
type Err = String;
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s {
$(
$main $(| $alias)* => Ok(Self::$field),
)+
_ => Err(format!(concat!("Invalid ", stringify!($name), " value: {}"), s)),
}
}
}
};
}
impl_value_enum! {
enum ContractArtifactField {
Abi => "abi",
Bytecode => "bytecode" | "bytes" | "b",
DeployedBytecode => "deployedBytecode" | "deployed_bytecode" | "deployed-bytecode"
| "deployed" | "deployedbytecode",
Assembly => "assembly" | "asm",
LegacyAssembly => "legacyAssembly" | "legacyassembly" | "legacy_assembly",
AssemblyOptimized => "assemblyOptimized" | "asmOptimized" | "assemblyoptimized"
| "assembly_optimized" | "asmopt" | "assembly-optimized"
| "asmo" | "asm-optimized" | "asmoptimized" | "asm_optimized",
MethodIdentifiers => "methodIdentifiers" | "methodidentifiers" | "methods"
| "method_identifiers" | "method-identifiers" | "mi",
GasEstimates => "gasEstimates" | "gas" | "gas_estimates" | "gas-estimates"
| "gasestimates",
StorageLayout => "storageLayout" | "storage_layout" | "storage-layout"
| "storagelayout" | "storage",
DevDoc => "devdoc" | "dev-doc" | "devDoc",
Ir => "ir" | "iR" | "IR",
IrOptimized => "irOptimized" | "ir-optimized" | "iroptimized" | "iro" | "iropt",
Metadata => "metadata" | "meta",
UserDoc => "userdoc" | "userDoc" | "user-doc",
Ewasm => "ewasm" | "e-wasm",
Errors => "errors" | "er",
Events => "events" | "ev",
StandardJson => "standardJson" | "standard-json" | "standard_json",
Libraries => "libraries" | "lib" | "libs",
}
}
impl TryFrom<ContractArtifactField> for ContractOutputSelection {
type Error = eyre::Error;
fn try_from(field: ContractArtifactField) -> Result<Self, Self::Error> {
type Caf = ContractArtifactField;
match field {
Caf::Abi => Ok(Self::Abi),
Caf::Bytecode => {
Ok(Self::Evm(EvmOutputSelection::ByteCode(BytecodeOutputSelection::All)))
}
Caf::DeployedBytecode => Ok(Self::Evm(EvmOutputSelection::DeployedByteCode(
DeployedBytecodeOutputSelection::All,
))),
Caf::Assembly | Caf::AssemblyOptimized => Ok(Self::Evm(EvmOutputSelection::Assembly)),
Caf::LegacyAssembly => Ok(Self::Evm(EvmOutputSelection::LegacyAssembly)),
Caf::MethodIdentifiers => Ok(Self::Evm(EvmOutputSelection::MethodIdentifiers)),
Caf::GasEstimates => Ok(Self::Evm(EvmOutputSelection::GasEstimates)),
Caf::StorageLayout => Ok(Self::StorageLayout),
Caf::DevDoc => Ok(Self::DevDoc),
Caf::Ir => Ok(Self::Ir),
Caf::IrOptimized => Ok(Self::IrOptimized),
Caf::Metadata => Ok(Self::Metadata),
Caf::UserDoc => Ok(Self::UserDoc),
Caf::Ewasm => Ok(Self::Ewasm(EwasmOutputSelection::All)),
Caf::Errors => Ok(Self::Abi),
Caf::Events => Ok(Self::Abi),
Caf::StandardJson => {
Err(eyre!("StandardJson is not supported for ContractOutputSelection"))
}
Caf::Libraries => Err(eyre!("Libraries is not supported for ContractOutputSelection")),
}
}
}
impl PartialEq<ContractOutputSelection> for ContractArtifactField {
fn eq(&self, other: &ContractOutputSelection) -> bool {
type Cos = ContractOutputSelection;
type Eos = EvmOutputSelection;
matches!(
(self, other),
(Self::Abi | Self::Events, Cos::Abi)
| (Self::Errors, Cos::Abi)
| (Self::Bytecode, Cos::Evm(Eos::ByteCode(_)))
| (Self::DeployedBytecode, Cos::Evm(Eos::DeployedByteCode(_)))
| (Self::Assembly | Self::AssemblyOptimized, Cos::Evm(Eos::Assembly))
| (Self::LegacyAssembly, Cos::Evm(Eos::LegacyAssembly))
| (Self::MethodIdentifiers, Cos::Evm(Eos::MethodIdentifiers))
| (Self::GasEstimates, Cos::Evm(Eos::GasEstimates))
| (Self::StorageLayout, Cos::StorageLayout)
| (Self::DevDoc, Cos::DevDoc)
| (Self::Ir, Cos::Ir)
| (Self::IrOptimized, Cos::IrOptimized)
| (Self::Metadata, Cos::Metadata)
| (Self::UserDoc, Cos::UserDoc)
| (Self::Ewasm, Cos::Ewasm(_))
)
}
}
impl fmt::Display for ContractArtifactField {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str(self.as_str())
}
}
impl ContractArtifactField {
/// Returns true if this field does not need to be passed to the compiler.
pub const fn can_skip_field(&self) -> bool {
matches!(
self,
Self::Bytecode | Self::DeployedBytecode | Self::StandardJson | Self::Libraries
)
}
}
fn print_json(obj: &impl serde::Serialize) -> Result<()> {
sh_println!("{}", serde_json::to_string_pretty(obj)?)?;
Ok(())
}
fn print_json_str(obj: &impl serde::Serialize, key: Option<&str>) -> Result<()> {
sh_println!("{}", get_json_str(obj, key)?)?;
Ok(())
}
fn print_yul(yul: Option<&str>, strip_comments: bool) -> Result<()> {
let Some(yul) = yul else {
return Err(missing_error("IR output"));
};
static YUL_COMMENTS: LazyLock<Regex> =
LazyLock::new(|| Regex::new(r"(///.*\n\s*)|(\s*/\*\*.*?\*/)").unwrap());
if strip_comments {
sh_println!("{}", YUL_COMMENTS.replace_all(yul, ""))?;
} else {
sh_println!("{yul}")?;
}
Ok(())
}
fn get_json_str(obj: &impl serde::Serialize, key: Option<&str>) -> Result<String> {
let value = serde_json::to_value(obj)?;
let value = if let Some(key) = key
&& let Some(value) = value.get(key)
{
value
} else {
&value
};
Ok(match value.as_str() {
Some(s) => s.to_string(),
None => format!("{value:#}"),
})
}
fn missing_error(field: &str) -> eyre::Error {
eyre!(
"{field} missing from artifact; \
this could be a spurious caching issue, consider running `forge clean`"
)
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn contract_output_selection() {
for &field in ContractArtifactField::ALL {
if field == ContractArtifactField::StandardJson {
let selection: Result<ContractOutputSelection, _> = field.try_into();
assert!(
selection
.unwrap_err()
.to_string()
.eq("StandardJson is not supported for ContractOutputSelection")
);
} else if field == ContractArtifactField::Libraries {
let selection: Result<ContractOutputSelection, _> = field.try_into();
assert!(
selection
.unwrap_err()
.to_string()
.eq("Libraries is not supported for ContractOutputSelection")
);
} else {
let selection: ContractOutputSelection = field.try_into().unwrap();
assert_eq!(field, selection);
let s = field.as_str();
assert_eq!(s, field.to_string());
assert_eq!(s.parse::<ContractArtifactField>().unwrap(), field);
for alias in field.aliases() {
assert_eq!(alias.parse::<ContractArtifactField>().unwrap(), field);
}
}
}
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/forge/src/cmd/fmt.rs | crates/forge/src/cmd/fmt.rs | use super::watch::WatchArgs;
use clap::{Parser, ValueHint};
use eyre::Result;
use foundry_cli::utils::{FoundryPathExt, LoadConfig};
use foundry_common::{errors::convert_solar_errors, fs};
use foundry_compilers::{compilers::solc::SolcLanguage, solc::SOLC_EXTENSIONS};
use foundry_config::{filter::expand_globs, impl_figment_convert_basic};
use rayon::prelude::*;
use similar::{ChangeTag, TextDiff};
use solar::sema::Compiler;
use std::{
fmt::{self, Write},
io,
io::Write as _,
path::{Path, PathBuf},
sync::Arc,
};
use yansi::{Color, Paint, Style};
/// CLI arguments for `forge fmt`.
#[derive(Clone, Debug, Parser)]
pub struct FmtArgs {
/// Path to the file, directory or '-' to read from stdin.
#[arg(value_hint = ValueHint::FilePath, value_name = "PATH", num_args(1..))]
paths: Vec<PathBuf>,
/// The project's root path.
///
/// By default root of the Git repository, if in one,
/// or the current working directory.
#[arg(long, value_hint = ValueHint::DirPath, value_name = "PATH")]
root: Option<PathBuf>,
/// Run in 'check' mode.
///
/// Exits with 0 if input is formatted correctly.
/// Exits with 1 if formatting is required.
#[arg(long)]
check: bool,
/// In 'check' and stdin modes, outputs raw formatted code instead of the diff.
#[arg(long, short)]
raw: bool,
#[command(flatten)]
pub watch: WatchArgs,
}
impl_figment_convert_basic!(FmtArgs);
impl FmtArgs {
pub fn run(self) -> Result<()> {
let config = self.load_config()?;
let cwd = std::env::current_dir()?;
// Expand ignore globs and canonicalize from the get go
let ignored = expand_globs(&config.root, config.fmt.ignore.iter())?
.iter()
.flat_map(fs::canonicalize_path)
.collect::<Vec<_>>();
// Expand lib globs separately - we only exclude these during discovery, not explicit paths
let libs = expand_globs(&config.root, config.libs.iter().filter_map(|p| p.to_str()))?
.iter()
.flat_map(fs::canonicalize_path)
.collect::<Vec<_>>();
// Helper to check if a file path is under any ignored or lib directory
let is_under_ignored_dir = |file_path: &Path, include_libs: bool| -> bool {
let check_against_dir = |dir: &PathBuf| {
file_path.starts_with(dir)
|| cwd.join(file_path).starts_with(dir)
|| fs::canonicalize_path(file_path).is_ok_and(|p| p.starts_with(dir))
};
ignored.iter().any(&check_against_dir)
|| (include_libs && libs.iter().any(&check_against_dir))
};
let input = match &self.paths[..] {
[] => {
// Retrieve the project paths, and filter out the ignored ones and libs.
let project_paths: Vec<PathBuf> = config
.project_paths::<SolcLanguage>()
.input_files_iter()
.filter(|p| {
!(ignored.contains(p)
|| ignored.contains(&cwd.join(p))
|| is_under_ignored_dir(p, true))
})
.collect();
Input::Paths(project_paths)
}
[one] if one == Path::new("-") => Input::Stdin,
paths => {
let mut inputs = Vec::with_capacity(paths.len());
for path in paths {
// Check if path is in ignored directories
if !ignored.is_empty()
&& ((path.is_absolute() && ignored.contains(path))
|| ignored.contains(&cwd.join(path)))
{
continue;
}
if path.is_dir() {
// If the input directory is not a lib directory, make sure to ignore libs.
let exclude_libs = !is_under_ignored_dir(path, true);
inputs.extend(
foundry_compilers::utils::source_files_iter(path, SOLC_EXTENSIONS)
.filter(|p| {
!(ignored.contains(p)
|| ignored.contains(&cwd.join(p))
|| is_under_ignored_dir(p, exclude_libs))
}),
);
} else if path.is_sol() {
// Explicit file paths are always included, even if in a lib
inputs.push(path.to_path_buf());
} else {
warn!("Cannot process path {}", path.display());
}
}
Input::Paths(inputs)
}
};
let mut compiler = Compiler::new(
solar::interface::Session::builder().with_buffer_emitter(Default::default()).build(),
);
// Parse, format, and check the diffs.
compiler.enter_mut(|compiler| {
let mut pcx = compiler.parse();
pcx.set_resolve_imports(false);
match input {
Input::Paths(paths) if paths.is_empty() => {
sh_warn!(
"Nothing to format.\n\
HINT: If you are working outside of the project, \
try providing paths to your source files: `forge fmt <paths>`"
)?;
return Ok(());
}
Input::Paths(paths) => _ = pcx.par_load_files(paths),
Input::Stdin => _ = pcx.load_stdin(),
}
pcx.parse();
let gcx = compiler.gcx();
let fmt_config = Arc::new(config.fmt);
let diffs: Vec<String> = gcx
.sources
.raw
.par_iter()
.filter_map(|source_unit| {
let path = source_unit.file.name.as_real();
let original = source_unit.file.src.as_str();
let formatted = forge_fmt::format_ast(gcx, source_unit, fmt_config.clone())?;
let from_stdin = path.is_none();
// Return formatted code when read from stdin and raw enabled.
// <https://github.com/foundry-rs/foundry/issues/11871>
if from_stdin && self.raw {
return Some(Ok(formatted));
}
if original == formatted {
return None;
}
if self.check || from_stdin {
let summary = if self.raw {
formatted
} else {
let name = match path {
Some(path) => path
.strip_prefix(&config.root)
.unwrap_or(path)
.display()
.to_string(),
None => "stdin".to_string(),
};
format_diff_summary(&name, &TextDiff::from_lines(original, &formatted))
};
Some(Ok(summary))
} else if let Some(path) = path {
match fs::write(path, formatted) {
Ok(()) => {}
Err(e) => return Some(Err(e.into())),
}
let _ = sh_println!("Formatted {}", path.display());
None
} else {
unreachable!()
}
})
.collect::<Result<_>>()?;
if !diffs.is_empty() {
// This block is only reached in --check mode when files need formatting.
let mut stdout = io::stdout().lock();
for (i, diff) in diffs.iter().enumerate() {
if i > 0 {
let _ = stdout.write_all(b"\n");
}
let _ = stdout.write_all(diff.as_bytes());
}
if self.check {
std::process::exit(1);
}
}
convert_solar_errors(compiler.dcx())
})
}
/// Returns whether `FmtArgs` was configured with `--watch`
pub fn is_watch(&self) -> bool {
self.watch.watch.is_some()
}
}
#[derive(Debug)]
enum Input {
Stdin,
Paths(Vec<PathBuf>),
}
struct Line(Option<usize>);
impl fmt::Display for Line {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self.0 {
None => f.write_str(" "),
Some(idx) => write!(f, "{:<4}", idx + 1),
}
}
}
fn format_diff_summary<'a>(name: &str, diff: &'a TextDiff<'a, 'a, '_, str>) -> String {
let cap = 128;
let mut diff_summary = String::with_capacity(cap);
let _ = writeln!(diff_summary, "Diff in {name}:");
for (j, group) in diff.grouped_ops(3).into_iter().enumerate() {
if j > 0 {
let s =
"--------------------------------------------------------------------------------";
diff_summary.push_str(s);
}
for op in group {
for change in diff.iter_inline_changes(&op) {
let dimmed = Style::new().dim();
let (sign, s) = match change.tag() {
ChangeTag::Delete => ("-", Color::Red.foreground()),
ChangeTag::Insert => ("+", Color::Green.foreground()),
ChangeTag::Equal => (" ", dimmed),
};
let _ = write!(
diff_summary,
"{}{} |{}",
Line(change.old_index()).paint(dimmed),
Line(change.new_index()).paint(dimmed),
sign.paint(s.bold()),
);
for (emphasized, value) in change.iter_strings_lossy() {
let s = if emphasized { s.underline().bg(Color::Black) } else { s };
let _ = write!(diff_summary, "{}", value.paint(s));
}
if change.missing_newline() {
diff_summary.push('\n');
}
}
}
}
diff_summary
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/forge/src/cmd/remappings.rs | crates/forge/src/cmd/remappings.rs | use clap::{Parser, ValueHint};
use eyre::Result;
use foundry_cli::utils::LoadConfig;
use foundry_config::impl_figment_convert_basic;
use std::{collections::BTreeMap, path::PathBuf};
/// CLI arguments for `forge remappings`.
#[derive(Clone, Debug, Parser)]
pub struct RemappingArgs {
/// The project's root path.
///
/// By default root of the Git repository, if in one,
/// or the current working directory.
#[arg(long, value_hint = ValueHint::DirPath, value_name = "PATH")]
root: Option<PathBuf>,
/// Pretty-print the remappings, grouping each of them by context.
#[arg(long)]
pretty: bool,
}
impl_figment_convert_basic!(RemappingArgs);
impl RemappingArgs {
pub fn run(self) -> Result<()> {
let config = self.load_config()?;
if self.pretty {
let mut groups = BTreeMap::<_, Vec<_>>::new();
for remapping in config.remappings {
groups.entry(remapping.context.clone()).or_default().push(remapping);
}
for (group, remappings) in groups {
if let Some(group) = group {
sh_println!("Context: {group}")?;
} else {
sh_println!("Global:")?;
}
for mut remapping in remappings {
remapping.context = None; // avoid writing context twice
sh_println!("- {remapping}")?;
}
sh_println!()?;
}
} else {
for remapping in config.remappings {
sh_println!("{remapping}")?;
}
}
Ok(())
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/forge/src/cmd/compiler.rs | crates/forge/src/cmd/compiler.rs | use clap::{Parser, Subcommand, ValueHint};
use eyre::Result;
use foundry_common::shell;
use foundry_compilers::{Graph, artifacts::EvmVersion};
use foundry_config::Config;
use semver::Version;
use serde::Serialize;
use std::{collections::BTreeMap, path::PathBuf};
/// CLI arguments for `forge compiler`.
#[derive(Debug, Parser)]
pub struct CompilerArgs {
#[command(subcommand)]
pub sub: CompilerSubcommands,
}
impl CompilerArgs {
pub fn run(self) -> Result<()> {
match self.sub {
CompilerSubcommands::Resolve(args) => args.run(),
}
}
}
#[derive(Debug, Subcommand)]
pub enum CompilerSubcommands {
/// Retrieves the resolved version(s) of the compiler within the project.
#[command(visible_alias = "r")]
Resolve(ResolveArgs),
}
/// Resolved compiler within the project.
#[derive(Serialize)]
struct ResolvedCompiler {
/// Compiler version.
version: Version,
/// Max supported EVM version of compiler.
#[serde(skip_serializing_if = "Option::is_none")]
evm_version: Option<EvmVersion>,
/// Source paths.
#[serde(skip_serializing_if = "Vec::is_empty")]
paths: Vec<String>,
}
/// CLI arguments for `forge compiler resolve`.
#[derive(Debug, Parser)]
pub struct ResolveArgs {
/// The root directory
#[arg(long, short, value_hint = ValueHint::DirPath, value_name = "PATH")]
root: Option<PathBuf>,
/// Skip files that match the given regex pattern.
#[arg(long, short, value_name = "REGEX")]
skip: Option<regex::Regex>,
}
impl ResolveArgs {
pub fn run(self) -> Result<()> {
let Self { root, skip } = self;
let root = root.unwrap_or_else(|| PathBuf::from("."));
let config = Config::load_with_root(&root)?;
let project = config.project()?;
let graph = Graph::resolve(&project.paths)?;
let sources = graph.into_sources_by_version(&project)?.sources;
let mut output: BTreeMap<String, Vec<ResolvedCompiler>> = BTreeMap::new();
for (language, sources) in sources {
let mut versions_with_paths: Vec<ResolvedCompiler> = sources
.iter()
.map(|(version, sources, _)| {
let paths: Vec<String> = sources
.iter()
.filter_map(|(path_file, _)| {
let path_str = path_file
.strip_prefix(&project.paths.root)
.unwrap_or(path_file)
.to_path_buf()
.display()
.to_string();
// Skip files that match the given regex pattern.
if let Some(ref regex) = skip
&& regex.is_match(&path_str)
{
return None;
}
Some(path_str)
})
.collect();
let evm_version = if shell::verbosity() > 1 {
let evm = EvmVersion::default()
.normalize_version_solc(version)
.unwrap_or_default();
Some(evm)
} else {
None
};
ResolvedCompiler { version: version.clone(), evm_version, paths }
})
.filter(|version| !version.paths.is_empty())
.collect();
// Sort by SemVer version.
versions_with_paths.sort_by(|v1, v2| Version::cmp(&v1.version, &v2.version));
// Skip language if no paths are found after filtering.
if !versions_with_paths.is_empty() {
// Clear paths if verbosity is 0, performed only after filtering to avoid being
// skipped.
if shell::verbosity() == 0 {
versions_with_paths.iter_mut().for_each(|version| version.paths.clear());
}
output.insert(language.to_string(), versions_with_paths);
}
}
if shell::is_json() {
sh_println!("{}", serde_json::to_string(&output)?)?;
return Ok(());
}
for (language, compilers) in &output {
match shell::verbosity() {
0 => sh_println!("{language}:")?,
_ => sh_println!("{language}:\n")?,
}
for resolved_compiler in compilers {
let version = &resolved_compiler.version;
match shell::verbosity() {
0 => sh_println!("- {version}")?,
_ => {
if let Some(evm) = &resolved_compiler.evm_version {
sh_println!("{version} (<= {evm}):")?
} else {
sh_println!("{version}:")?
}
}
}
if shell::verbosity() > 0 {
let paths = &resolved_compiler.paths;
for (idx, path) in paths.iter().enumerate() {
if idx == paths.len() - 1 {
sh_println!("└── {path}\n")?
} else {
sh_println!("├── {path}")?
}
}
}
}
if shell::verbosity() == 0 {
sh_println!()?
}
}
Ok(())
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/forge/src/cmd/generate/mod.rs | crates/forge/src/cmd/generate/mod.rs | use clap::{Parser, Subcommand};
use eyre::Result;
use foundry_common::fs;
use std::path::Path;
use yansi::Paint;
/// CLI arguments for `forge generate`.
#[derive(Debug, Parser)]
pub struct GenerateArgs {
#[command(subcommand)]
pub sub: GenerateSubcommands,
}
#[derive(Debug, Subcommand)]
pub enum GenerateSubcommands {
/// Scaffolds test file for given contract.
Test(GenerateTestArgs),
}
#[derive(Debug, Parser)]
pub struct GenerateTestArgs {
/// Contract name for test generation.
#[arg(long, short, value_name = "CONTRACT_NAME")]
pub contract_name: String,
}
impl GenerateTestArgs {
pub fn run(self) -> Result<()> {
sh_warn!("`forge generate` is deprecated and will be removed in a future version")?;
let contract_name = format_identifier(&self.contract_name, true);
let instance_name = format_identifier(&self.contract_name, false);
// Create the test file content.
let test_content = include_str!("../../../assets/generated/TestTemplate.t.sol");
let test_content = test_content
.replace("{contract_name}", &contract_name)
.replace("{instance_name}", &instance_name);
// Create the test directory if it doesn't exist.
fs::create_dir_all("test")?;
// Define the test file path
let test_file_path = Path::new("test").join(format!("{contract_name}.t.sol"));
// Write the test content to the test file.
fs::write(&test_file_path, test_content)?;
sh_println!("{} test file: {}", "Generated".green(), test_file_path.to_str().unwrap())?;
Ok(())
}
}
/// Utility function to convert an identifier to pascal or camel case.
fn format_identifier(input: &str, is_pascal_case: bool) -> String {
let mut result = String::new();
let mut capitalize_next = is_pascal_case;
for word in input.split_whitespace() {
if !word.is_empty() {
let (first, rest) = word.split_at(1);
let formatted_word = if capitalize_next {
format!("{}{}", first.to_uppercase(), rest)
} else {
format!("{}{}", first.to_lowercase(), rest)
};
capitalize_next = true;
result.push_str(&formatted_word);
}
}
result
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/forge/src/cmd/test/filter.rs | crates/forge/src/cmd/test/filter.rs | use clap::Parser;
use foundry_common::TestFilter;
use foundry_compilers::{FileFilter, ProjectPathsConfig};
use foundry_config::{Config, filter::GlobMatcher};
use std::{fmt, path::Path};
/// The filter to use during testing.
///
/// See also `FileFilter`.
#[derive(Clone, Parser)]
#[command(next_help_heading = "Test filtering")]
pub struct FilterArgs {
/// Only run test functions matching the specified regex pattern.
#[arg(long = "match-test", visible_alias = "mt", value_name = "REGEX")]
pub test_pattern: Option<regex::Regex>,
/// Only run test functions that do not match the specified regex pattern.
#[arg(long = "no-match-test", visible_alias = "nmt", value_name = "REGEX")]
pub test_pattern_inverse: Option<regex::Regex>,
/// Only run tests in contracts matching the specified regex pattern.
#[arg(long = "match-contract", visible_alias = "mc", value_name = "REGEX")]
pub contract_pattern: Option<regex::Regex>,
/// Only run tests in contracts that do not match the specified regex pattern.
#[arg(long = "no-match-contract", visible_alias = "nmc", value_name = "REGEX")]
pub contract_pattern_inverse: Option<regex::Regex>,
/// Only run tests in source files matching the specified glob pattern.
#[arg(long = "match-path", visible_alias = "mp", value_name = "GLOB")]
pub path_pattern: Option<GlobMatcher>,
/// Only run tests in source files that do not match the specified glob pattern.
#[arg(
id = "no-match-path",
long = "no-match-path",
visible_alias = "nmp",
value_name = "GLOB"
)]
pub path_pattern_inverse: Option<GlobMatcher>,
/// Only show coverage for files that do not match the specified regex pattern.
#[arg(long = "no-match-coverage", visible_alias = "nmco", value_name = "REGEX")]
pub coverage_pattern_inverse: Option<regex::Regex>,
}
impl FilterArgs {
/// Returns true if the filter is empty.
pub fn is_empty(&self) -> bool {
self.test_pattern.is_none()
&& self.test_pattern_inverse.is_none()
&& self.contract_pattern.is_none()
&& self.contract_pattern_inverse.is_none()
&& self.path_pattern.is_none()
&& self.path_pattern_inverse.is_none()
}
/// Merges the set filter globs with the config's values
pub fn merge_with_config(mut self, config: &Config) -> ProjectPathsAwareFilter {
if self.test_pattern.is_none() {
self.test_pattern = config.test_pattern.clone().map(Into::into);
}
if self.test_pattern_inverse.is_none() {
self.test_pattern_inverse = config.test_pattern_inverse.clone().map(Into::into);
}
if self.contract_pattern.is_none() {
self.contract_pattern = config.contract_pattern.clone().map(Into::into);
}
if self.contract_pattern_inverse.is_none() {
self.contract_pattern_inverse = config.contract_pattern_inverse.clone().map(Into::into);
}
if self.path_pattern.is_none() {
self.path_pattern = config.path_pattern.clone().map(Into::into);
}
if self.path_pattern_inverse.is_none() {
self.path_pattern_inverse = config.path_pattern_inverse.clone().map(Into::into);
}
if self.coverage_pattern_inverse.is_none() {
self.coverage_pattern_inverse = config.coverage_pattern_inverse.clone().map(Into::into);
}
ProjectPathsAwareFilter { args_filter: self, paths: config.project_paths() }
}
}
impl fmt::Debug for FilterArgs {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("FilterArgs")
.field("match-test", &self.test_pattern.as_ref().map(|r| r.as_str()))
.field("no-match-test", &self.test_pattern_inverse.as_ref().map(|r| r.as_str()))
.field("match-contract", &self.contract_pattern.as_ref().map(|r| r.as_str()))
.field("no-match-contract", &self.contract_pattern_inverse.as_ref().map(|r| r.as_str()))
.field("match-path", &self.path_pattern.as_ref().map(|g| g.as_str()))
.field("no-match-path", &self.path_pattern_inverse.as_ref().map(|g| g.as_str()))
.field("no-match-coverage", &self.coverage_pattern_inverse.as_ref().map(|g| g.as_str()))
.finish_non_exhaustive()
}
}
impl FileFilter for FilterArgs {
/// Returns true if the file regex pattern match the `file`
///
/// If no file regex is set this returns true by default
fn is_match(&self, file: &Path) -> bool {
self.matches_path(file)
}
}
impl TestFilter for FilterArgs {
fn matches_test(&self, test_signature: &str) -> bool {
let mut ok = true;
if let Some(re) = &self.test_pattern {
ok = ok && re.is_match(test_signature);
}
if let Some(re) = &self.test_pattern_inverse {
ok = ok && !re.is_match(test_signature);
}
ok
}
fn matches_contract(&self, contract_name: &str) -> bool {
let mut ok = true;
if let Some(re) = &self.contract_pattern {
ok = ok && re.is_match(contract_name);
}
if let Some(re) = &self.contract_pattern_inverse {
ok = ok && !re.is_match(contract_name);
}
ok
}
fn matches_path(&self, path: &Path) -> bool {
let mut ok = true;
if let Some(re) = &self.path_pattern {
ok = ok && re.is_match(path);
}
if let Some(re) = &self.path_pattern_inverse {
ok = ok && !re.is_match(path);
}
ok
}
}
impl fmt::Display for FilterArgs {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
if let Some(p) = &self.test_pattern {
writeln!(f, "\tmatch-test: `{}`", p.as_str())?;
}
if let Some(p) = &self.test_pattern_inverse {
writeln!(f, "\tno-match-test: `{}`", p.as_str())?;
}
if let Some(p) = &self.contract_pattern {
writeln!(f, "\tmatch-contract: `{}`", p.as_str())?;
}
if let Some(p) = &self.contract_pattern_inverse {
writeln!(f, "\tno-match-contract: `{}`", p.as_str())?;
}
if let Some(p) = &self.path_pattern {
writeln!(f, "\tmatch-path: `{}`", p.as_str())?;
}
if let Some(p) = &self.path_pattern_inverse {
writeln!(f, "\tno-match-path: `{}`", p.as_str())?;
}
if let Some(p) = &self.coverage_pattern_inverse {
writeln!(f, "\tno-match-coverage: `{}`", p.as_str())?;
}
Ok(())
}
}
/// A filter that combines all command line arguments and the paths of the current projects
#[derive(Clone, Debug)]
pub struct ProjectPathsAwareFilter {
args_filter: FilterArgs,
paths: ProjectPathsConfig,
}
impl ProjectPathsAwareFilter {
/// Returns true if the filter is empty.
pub fn is_empty(&self) -> bool {
self.args_filter.is_empty()
}
/// Returns the CLI arguments.
pub fn args(&self) -> &FilterArgs {
&self.args_filter
}
/// Returns the CLI arguments mutably.
pub fn args_mut(&mut self) -> &mut FilterArgs {
&mut self.args_filter
}
/// Returns the project paths.
pub fn paths(&self) -> &ProjectPathsConfig {
&self.paths
}
}
impl FileFilter for ProjectPathsAwareFilter {
/// Returns true if the file regex pattern match the `file`
///
/// If no file regex is set this returns true by default
fn is_match(&self, mut file: &Path) -> bool {
file = file.strip_prefix(&self.paths.root).unwrap_or(file);
self.args_filter.is_match(file)
}
}
impl TestFilter for ProjectPathsAwareFilter {
fn matches_test(&self, test_signature: &str) -> bool {
self.args_filter.matches_test(test_signature)
}
fn matches_contract(&self, contract_name: &str) -> bool {
self.args_filter.matches_contract(contract_name)
}
fn matches_path(&self, mut path: &Path) -> bool {
// we don't want to test files that belong to a library
path = path.strip_prefix(&self.paths.root).unwrap_or(path);
self.args_filter.matches_path(path) && !self.paths.has_library_ancestor(path)
}
}
impl fmt::Display for ProjectPathsAwareFilter {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.args_filter.fmt(f)
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/forge/src/cmd/test/summary.rs | crates/forge/src/cmd/test/summary.rs | use crate::cmd::test::TestOutcome;
use comfy_table::{
Cell, Color, Row, Table, modifiers::UTF8_ROUND_CORNERS, presets::ASCII_MARKDOWN,
};
use foundry_common::shell;
use foundry_evm::executors::invariant::InvariantMetrics;
use itertools::Itertools;
use serde_json::json;
use std::{collections::HashMap, fmt::Display};
/// Represents a test summary report.
pub struct TestSummaryReport {
/// Whether the report should be detailed.
is_detailed: bool,
/// The test outcome to report.
outcome: TestOutcome,
}
impl TestSummaryReport {
pub fn new(is_detailed: bool, outcome: TestOutcome) -> Self {
Self { is_detailed, outcome }
}
}
impl Display for TestSummaryReport {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> Result<(), std::fmt::Error> {
if shell::is_json() {
writeln!(f, "{}", &self.format_json_output(&self.is_detailed, &self.outcome))?;
} else {
writeln!(f, "\n{}", &self.format_table_output(&self.is_detailed, &self.outcome))?;
}
Ok(())
}
}
impl TestSummaryReport {
// Helper function to format the JSON output.
fn format_json_output(&self, is_detailed: &bool, outcome: &TestOutcome) -> String {
let output = json!({
"results": outcome.results.iter().map(|(contract, suite)| {
let (suite_path, suite_name) = contract.split_once(':').unwrap();
let passed = suite.successes().count();
let failed = suite.failures().count();
let skipped = suite.skips().count();
let mut result = json!({
"suite": suite_name,
"passed": passed,
"failed": failed,
"skipped": skipped,
});
if *is_detailed {
result["file_path"] = serde_json::Value::String(suite_path.to_string());
result["duration"] = serde_json::Value::String(format!("{:.2?}", suite.duration));
}
result
}).collect::<Vec<serde_json::Value>>(),
});
serde_json::to_string_pretty(&output).unwrap()
}
fn format_table_output(&self, is_detailed: &bool, outcome: &TestOutcome) -> Table {
let mut table = Table::new();
if shell::is_markdown() {
table.load_preset(ASCII_MARKDOWN);
} else {
table.apply_modifier(UTF8_ROUND_CORNERS);
}
let mut row = Row::from(vec![
Cell::new("Test Suite"),
Cell::new("Passed").fg(Color::Green),
Cell::new("Failed").fg(Color::Red),
Cell::new("Skipped").fg(Color::Yellow),
]);
if *is_detailed {
row.add_cell(Cell::new("File Path").fg(Color::Cyan));
row.add_cell(Cell::new("Duration").fg(Color::Cyan));
}
table.set_header(row);
// Traverse the test_results vector and build the table
for (contract, suite) in &outcome.results {
let mut row = Row::new();
let (suite_path, suite_name) = contract.split_once(':').unwrap();
let passed = suite.successes().count();
let mut passed_cell = Cell::new(passed);
let failed = suite.failures().count();
let mut failed_cell = Cell::new(failed);
let skipped = suite.skips().count();
let mut skipped_cell = Cell::new(skipped);
row.add_cell(Cell::new(suite_name));
if passed > 0 {
passed_cell = passed_cell.fg(Color::Green);
}
row.add_cell(passed_cell);
if failed > 0 {
failed_cell = failed_cell.fg(Color::Red);
}
row.add_cell(failed_cell);
if skipped > 0 {
skipped_cell = skipped_cell.fg(Color::Yellow);
}
row.add_cell(skipped_cell);
if self.is_detailed {
row.add_cell(Cell::new(suite_path));
row.add_cell(Cell::new(format!("{:.2?}", suite.duration)));
}
table.add_row(row);
}
table
}
}
/// Helper function to create the invariant metrics table.
///
/// ╭-----------------------+----------------+-------+---------+----------╮
/// | Contract | Selector | Calls | Reverts | Discards |
/// +=====================================================================+
/// | AnotherCounterHandler | doWork | 7451 | 123 | 4941 |
/// |-----------------------+----------------+-------+---------+----------|
/// | AnotherCounterHandler | doWorkThing | 7279 | 137 | 4849 |
/// |-----------------------+----------------+-------+---------+----------|
/// | CounterHandler | doAnotherThing | 7302 | 150 | 4794 |
/// |-----------------------+----------------+-------+---------+----------|
/// | CounterHandler | doSomething | 7382 | 160 |4794 |
/// ╰-----------------------+----------------+-------+---------+----------╯
pub(crate) fn format_invariant_metrics_table(
test_metrics: &HashMap<String, InvariantMetrics>,
) -> Table {
let mut table = Table::new();
if shell::is_markdown() {
table.load_preset(ASCII_MARKDOWN);
} else {
table.apply_modifier(UTF8_ROUND_CORNERS);
}
table.set_header(vec![
Cell::new("Contract"),
Cell::new("Selector"),
Cell::new("Calls").fg(Color::Green),
Cell::new("Reverts").fg(Color::Red),
Cell::new("Discards").fg(Color::Yellow),
]);
for name in test_metrics.keys().sorted() {
if let Some((contract, selector)) =
name.split_once(':').map_or(name.as_str(), |(_, contract)| contract).split_once('.')
{
let mut row = Row::new();
row.add_cell(Cell::new(contract));
row.add_cell(Cell::new(selector));
if let Some(metrics) = test_metrics.get(name) {
let calls_cell = Cell::new(metrics.calls).fg(if metrics.calls > 0 {
Color::Green
} else {
Color::White
});
let reverts_cell = Cell::new(metrics.reverts).fg(if metrics.reverts > 0 {
Color::Red
} else {
Color::White
});
let discards_cell = Cell::new(metrics.discards).fg(if metrics.discards > 0 {
Color::Yellow
} else {
Color::White
});
row.add_cell(calls_cell);
row.add_cell(reverts_cell);
row.add_cell(discards_cell);
}
table.add_row(row);
}
}
table
}
#[cfg(test)]
mod tests {
use crate::cmd::test::summary::format_invariant_metrics_table;
use foundry_evm::executors::invariant::InvariantMetrics;
use std::collections::HashMap;
#[test]
fn test_invariant_metrics_table() {
let mut test_metrics = HashMap::new();
test_metrics.insert(
"SystemConfig.setGasLimit".to_string(),
InvariantMetrics { calls: 10, reverts: 1, discards: 1 },
);
test_metrics.insert(
"src/universal/Proxy.sol:Proxy.changeAdmin".to_string(),
InvariantMetrics { calls: 20, reverts: 2, discards: 2 },
);
let table = format_invariant_metrics_table(&test_metrics);
assert_eq!(table.row_count(), 2);
let mut first_row_content = table.row(0).unwrap().cell_iter();
assert_eq!(first_row_content.next().unwrap().content(), "SystemConfig");
assert_eq!(first_row_content.next().unwrap().content(), "setGasLimit");
assert_eq!(first_row_content.next().unwrap().content(), "10");
assert_eq!(first_row_content.next().unwrap().content(), "1");
assert_eq!(first_row_content.next().unwrap().content(), "1");
let mut second_row_content = table.row(1).unwrap().cell_iter();
assert_eq!(second_row_content.next().unwrap().content(), "Proxy");
assert_eq!(second_row_content.next().unwrap().content(), "changeAdmin");
assert_eq!(second_row_content.next().unwrap().content(), "20");
assert_eq!(second_row_content.next().unwrap().content(), "2");
assert_eq!(second_row_content.next().unwrap().content(), "2");
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/forge/src/cmd/test/mod.rs | crates/forge/src/cmd/test/mod.rs | use super::{install, test::filter::ProjectPathsAwareFilter, watch::WatchArgs};
use crate::{
MultiContractRunner, MultiContractRunnerBuilder,
decode::decode_console_logs,
gas_report::GasReport,
multi_runner::matches_artifact,
result::{SuiteResult, TestOutcome, TestStatus},
traces::{
CallTraceDecoderBuilder, InternalTraceMode, TraceKind,
debug::{ContractSources, DebugTraceIdentifier},
decode_trace_arena, folded_stack_trace,
identifier::SignaturesIdentifier,
},
};
use alloy_primitives::U256;
use chrono::Utc;
use clap::{Parser, ValueHint};
use eyre::{Context, OptionExt, Result, bail};
use foundry_cli::{
opts::{BuildOpts, EvmArgs, GlobalArgs},
utils::{self, LoadConfig},
};
use foundry_common::{EmptyTestFilter, TestFunctionExt, compile::ProjectCompiler, fs, shell};
use foundry_compilers::{
ProjectCompileOutput,
artifacts::output_selection::OutputSelection,
compilers::{
Language,
multi::{MultiCompiler, MultiCompilerLanguage},
},
utils::source_files_iter,
};
use foundry_config::{
Config, figment,
figment::{
Metadata, Profile, Provider,
value::{Dict, Map},
},
filter::GlobMatcher,
};
use foundry_debugger::Debugger;
use foundry_evm::{
opts::EvmOpts,
traces::{backtrace::BacktraceBuilder, identifier::TraceIdentifiers, prune_trace_depth},
};
use regex::Regex;
use std::{
collections::{BTreeMap, BTreeSet},
fmt::Write,
path::{Path, PathBuf},
sync::{Arc, mpsc::channel},
time::{Duration, Instant},
};
use yansi::Paint;
mod filter;
mod summary;
use crate::{result::TestKind, traces::render_trace_arena_inner};
pub use filter::FilterArgs;
use quick_junit::{NonSuccessKind, Report, TestCase, TestCaseStatus, TestSuite};
use summary::{TestSummaryReport, format_invariant_metrics_table};
// Loads project's figment and merges the build cli arguments into it
foundry_config::merge_impl_figment_convert!(TestArgs, build, evm);
/// CLI arguments for `forge test`.
#[derive(Clone, Debug, Parser)]
#[command(next_help_heading = "Test options")]
pub struct TestArgs {
// Include global options for users of this struct.
#[command(flatten)]
pub global: GlobalArgs,
/// The contract file you want to test, it's a shortcut for --match-path.
#[arg(value_hint = ValueHint::FilePath)]
pub path: Option<GlobMatcher>,
/// Run a single test in the debugger.
///
/// The matching test will be opened in the debugger regardless of the outcome of the test.
///
/// If the matching test is a fuzz test, then it will open the debugger on the first failure
/// case. If the fuzz test does not fail, it will open the debugger on the last fuzz case.
#[arg(long, conflicts_with_all = ["flamegraph", "flamechart", "decode_internal", "rerun"])]
debug: bool,
/// Generate a flamegraph for a single test. Implies `--decode-internal`.
///
/// A flame graph is used to visualize which functions or operations within the smart contract
/// are consuming the most gas overall in a sorted manner.
#[arg(long)]
flamegraph: bool,
/// Generate a flamechart for a single test. Implies `--decode-internal`.
///
/// A flame chart shows the gas usage over time, illustrating when each function is
/// called (execution order) and how much gas it consumes at each point in the timeline.
#[arg(long, conflicts_with = "flamegraph")]
flamechart: bool,
/// Identify internal functions in traces.
///
/// This will trace internal functions and decode stack parameters.
///
/// Parameters stored in memory (such as bytes or arrays) are currently decoded only when a
/// single function is matched, similarly to `--debug`, for performance reasons.
#[arg(long)]
decode_internal: bool,
/// Dumps all debugger steps to file.
#[arg(
long,
requires = "debug",
value_hint = ValueHint::FilePath,
value_name = "PATH"
)]
dump: Option<PathBuf>,
/// Print a gas report.
#[arg(long, env = "FORGE_GAS_REPORT")]
gas_report: bool,
/// Check gas snapshots against previous runs.
#[arg(long, env = "FORGE_SNAPSHOT_CHECK")]
gas_snapshot_check: Option<bool>,
/// Enable/disable recording of gas snapshot results.
#[arg(long, env = "FORGE_SNAPSHOT_EMIT")]
gas_snapshot_emit: Option<bool>,
/// Exit with code 0 even if a test fails.
#[arg(long, env = "FORGE_ALLOW_FAILURE")]
allow_failure: bool,
/// Suppress successful test traces and show only traces for failures.
#[arg(long, short, env = "FORGE_SUPPRESS_SUCCESSFUL_TRACES", help_heading = "Display options")]
suppress_successful_traces: bool,
/// Defines the depth of a trace
#[arg(long)]
trace_depth: Option<usize>,
/// Output test results as JUnit XML report.
#[arg(long, conflicts_with_all = ["quiet", "json", "gas_report", "summary", "list", "show_progress"], help_heading = "Display options")]
pub junit: bool,
/// Stop running tests after the first failure.
#[arg(long)]
pub fail_fast: bool,
/// The Etherscan (or equivalent) API key.
#[arg(long, env = "ETHERSCAN_API_KEY", value_name = "KEY")]
etherscan_api_key: Option<String>,
/// List tests instead of running them.
#[arg(long, short, conflicts_with_all = ["show_progress", "decode_internal", "summary"], help_heading = "Display options")]
list: bool,
/// Set seed used to generate randomness during your fuzz runs.
#[arg(long)]
pub fuzz_seed: Option<U256>,
#[arg(long, env = "FOUNDRY_FUZZ_RUNS", value_name = "RUNS")]
pub fuzz_runs: Option<u64>,
/// Timeout for each fuzz run in seconds.
#[arg(long, env = "FOUNDRY_FUZZ_TIMEOUT", value_name = "TIMEOUT")]
pub fuzz_timeout: Option<u64>,
/// File to rerun fuzz failures from.
#[arg(long)]
pub fuzz_input_file: Option<String>,
/// Show test execution progress.
#[arg(long, conflicts_with_all = ["quiet", "json"], help_heading = "Display options")]
pub show_progress: bool,
/// Re-run recorded test failures from last run.
/// If no failure recorded then regular test run is performed.
#[arg(long)]
pub rerun: bool,
/// Print test summary table.
#[arg(long, help_heading = "Display options")]
pub summary: bool,
/// Print detailed test summary table.
#[arg(long, help_heading = "Display options", requires = "summary")]
pub detailed: bool,
/// Disables the labels in the traces.
#[arg(long, help_heading = "Display options")]
pub disable_labels: bool,
#[command(flatten)]
filter: FilterArgs,
#[command(flatten)]
evm: EvmArgs,
#[command(flatten)]
pub build: BuildOpts,
#[command(flatten)]
pub watch: WatchArgs,
}
impl TestArgs {
pub async fn run(mut self) -> Result<TestOutcome> {
trace!(target: "forge::test", "executing test command");
self.compile_and_run().await
}
/// Returns a list of files that need to be compiled in order to run all the tests that match
/// the given filter.
///
/// This means that it will return all sources that are not test contracts or that match the
/// filter. We want to compile all non-test sources always because tests might depend on them
/// dynamically through cheatcodes.
#[instrument(target = "forge::test", skip_all)]
pub fn get_sources_to_compile(
&self,
config: &Config,
test_filter: &ProjectPathsAwareFilter,
) -> Result<BTreeSet<PathBuf>> {
// An empty filter doesn't filter out anything.
// We can still optimize slightly by excluding scripts.
if test_filter.is_empty() {
return Ok(source_files_iter(&config.src, MultiCompilerLanguage::FILE_EXTENSIONS)
.chain(source_files_iter(&config.test, MultiCompilerLanguage::FILE_EXTENSIONS))
.collect());
}
let mut project = config.create_project(true, true)?;
project.update_output_selection(|selection| {
*selection = OutputSelection::common_output_selection(["abi".to_string()]);
});
let output = project.compile()?;
if output.has_compiler_errors() {
sh_println!("{output}")?;
eyre::bail!("Compilation failed");
}
Ok(output
.artifact_ids()
.filter_map(|(id, artifact)| artifact.abi.as_ref().map(|abi| (id, abi)))
.filter(|(id, abi)| {
id.source.starts_with(&config.src) || matches_artifact(test_filter, id, abi)
})
.map(|(id, _)| id.source)
.collect())
}
/// Executes all the tests in the project.
///
/// This will trigger the build process first. On success all test contracts that match the
/// configured filter will be executed
///
/// Returns the test results for all matching tests.
pub async fn compile_and_run(&mut self) -> Result<TestOutcome> {
// Merge all configs.
let (mut config, evm_opts) = self.load_config_and_evm_opts()?;
// Install missing dependencies.
if install::install_missing_dependencies(&mut config).await && config.auto_detect_remappings
{
// need to re-configure here to also catch additional remappings
config = self.load_config()?;
}
// Set up the project.
let project = config.project()?;
let filter = self.filter(&config)?;
trace!(target: "forge::test", ?filter, "using filter");
let compiler = ProjectCompiler::new()
.dynamic_test_linking(config.dynamic_test_linking)
.quiet(shell::is_json() || self.junit)
.files(self.get_sources_to_compile(&config, &filter)?);
let output = compiler.compile(&project)?;
self.run_tests(&project.paths.root, config, evm_opts, &output, &filter, false).await
}
/// Executes all the tests in the project.
///
/// See [`Self::compile_and_run`] for more details.
pub async fn run_tests(
&mut self,
project_root: &Path,
mut config: Config,
mut evm_opts: EvmOpts,
output: &ProjectCompileOutput,
filter: &ProjectPathsAwareFilter,
coverage: bool,
) -> Result<TestOutcome> {
// Explicitly enable isolation for gas reports for more correct gas accounting.
if self.gas_report {
evm_opts.isolate = true;
} else {
// Do not collect gas report traces if gas report is not enabled.
config.fuzz.gas_report_samples = 0;
config.invariant.gas_report_samples = 0;
}
// Create test options from general project settings and compiler output.
let should_debug = self.debug;
let should_draw = self.flamegraph || self.flamechart;
// Determine print verbosity and executor verbosity.
let verbosity = evm_opts.verbosity;
if (self.gas_report && evm_opts.verbosity < 3) || self.flamegraph || self.flamechart {
evm_opts.verbosity = 3;
}
let env = evm_opts.evm_env().await?;
// Enable internal tracing for more informative flamegraph.
if should_draw && !self.decode_internal {
self.decode_internal = true;
}
// Choose the internal function tracing mode, if --decode-internal is provided.
let decode_internal = if self.decode_internal {
// If more than one function matched, we enable simple tracing.
// If only one function matched, we enable full tracing. This is done in `run_tests`.
InternalTraceMode::Simple
} else {
InternalTraceMode::None
};
// Prepare the test builder.
let config = Arc::new(config);
let runner = MultiContractRunnerBuilder::new(config.clone())
.set_debug(should_debug)
.set_decode_internal(decode_internal)
.initial_balance(evm_opts.initial_balance)
.evm_spec(config.evm_spec_id())
.sender(evm_opts.sender)
.with_fork(evm_opts.get_fork(&config, env.clone()))
.enable_isolation(evm_opts.isolate)
.networks(evm_opts.networks)
.fail_fast(self.fail_fast)
.set_coverage(coverage)
.build::<MultiCompiler>(output, env, evm_opts)?;
let libraries = runner.libraries.clone();
let mut outcome = self.run_tests_inner(runner, config, verbosity, filter, output).await?;
if should_draw {
let (suite_name, test_name, mut test_result) =
outcome.remove_first().ok_or_eyre("no tests were executed")?;
let (_, arena) = test_result
.traces
.iter_mut()
.find(|(kind, _)| *kind == TraceKind::Execution)
.unwrap();
// Decode traces.
let decoder = outcome.last_run_decoder.as_ref().unwrap();
decode_trace_arena(arena, decoder).await;
let mut fst = folded_stack_trace::build(arena);
let label = if self.flamegraph { "flamegraph" } else { "flamechart" };
let contract = suite_name.split(':').next_back().unwrap();
let test_name = test_name.trim_end_matches("()");
let file_name = format!("cache/{label}_{contract}_{test_name}.svg");
let file = std::fs::File::create(&file_name).wrap_err("failed to create file")?;
let file = std::io::BufWriter::new(file);
let mut options = inferno::flamegraph::Options::default();
options.title = format!("{label} {contract}::{test_name}");
options.count_name = "gas".to_string();
if self.flamechart {
options.flame_chart = true;
fst.reverse();
}
// Generate SVG.
inferno::flamegraph::from_lines(&mut options, fst.iter().map(String::as_str), file)
.wrap_err("failed to write svg")?;
sh_println!("Saved to {file_name}")?;
// Open SVG in default program.
if let Err(e) = opener::open(&file_name) {
sh_err!("Failed to open {file_name}; please open it manually: {e}")?;
}
}
if should_debug {
// Get first non-empty suite result. We will have only one such entry.
let (_, _, test_result) =
outcome.remove_first().ok_or_eyre("no tests were executed")?;
let sources =
ContractSources::from_project_output(output, project_root, Some(&libraries))?;
// Run the debugger.
let mut builder = Debugger::builder()
.traces(
test_result.traces.iter().filter(|(t, _)| t.is_execution()).cloned().collect(),
)
.sources(sources)
.breakpoints(test_result.breakpoints.clone());
if let Some(decoder) = &outcome.last_run_decoder {
builder = builder.decoder(decoder);
}
let mut debugger = builder.build();
if let Some(dump_path) = &self.dump {
debugger.dump_to_file(dump_path)?;
} else {
debugger.try_run_tui()?;
}
}
Ok(outcome)
}
/// Run all tests that matches the filter predicate from a test runner
async fn run_tests_inner(
&self,
mut runner: MultiContractRunner,
config: Arc<Config>,
verbosity: u8,
filter: &ProjectPathsAwareFilter,
output: &ProjectCompileOutput,
) -> eyre::Result<TestOutcome> {
if self.list {
return list(runner, filter);
}
trace!(target: "forge::test", "running all tests");
// If we need to render to a serialized format, we should not print anything else to stdout.
let silent = self.gas_report && shell::is_json() || self.summary && shell::is_json();
let num_filtered = runner.matching_test_functions(filter).count();
if num_filtered == 0 {
let mut total_tests = num_filtered;
if !filter.is_empty() {
total_tests = runner.matching_test_functions(&EmptyTestFilter::default()).count();
}
if total_tests == 0 {
sh_println!(
"No tests found in project! Forge looks for functions that start with `test`"
)?;
} else {
let mut msg = format!("no tests match the provided pattern:\n{filter}");
// Try to suggest a test when there's no match.
if let Some(test_pattern) = &filter.args().test_pattern {
let test_name = test_pattern.as_str();
// Filter contracts but not test functions.
let candidates = runner.all_test_functions(filter).map(|f| &f.name);
if let Some(suggestion) = utils::did_you_mean(test_name, candidates).pop() {
write!(msg, "\nDid you mean `{suggestion}`?")?;
}
}
sh_warn!("{msg}")?;
}
return Ok(TestOutcome::empty(Some(runner), false));
}
if num_filtered != 1 && (self.debug || self.flamegraph || self.flamechart) {
let action = if self.flamegraph {
"generate a flamegraph"
} else if self.flamechart {
"generate a flamechart"
} else {
"run the debugger"
};
let filter = if filter.is_empty() {
String::new()
} else {
format!("\n\nFilter used:\n{filter}")
};
eyre::bail!(
"{num_filtered} tests matched your criteria, but exactly 1 test must match in order to {action}.\n\n\
Use --match-contract and --match-path to further limit the search.{filter}",
);
}
// If exactly one test matched, we enable full tracing.
if num_filtered == 1 && self.decode_internal {
runner.decode_internal = InternalTraceMode::Full;
}
// Run tests in a non-streaming fashion and collect results for serialization.
if !self.gas_report && !self.summary && shell::is_json() {
let mut results = runner.test_collect(filter)?;
results.values_mut().for_each(|suite_result| {
for test_result in suite_result.test_results.values_mut() {
if verbosity >= 2 {
// Decode logs at level 2 and above.
test_result.decoded_logs = decode_console_logs(&test_result.logs);
} else {
// Empty logs for non verbose runs.
test_result.logs = vec![];
}
}
});
sh_println!("{}", serde_json::to_string(&results)?)?;
return Ok(TestOutcome::new(Some(runner), results, self.allow_failure));
}
if self.junit {
let results = runner.test_collect(filter)?;
sh_println!("{}", junit_xml_report(&results, verbosity).to_string()?)?;
return Ok(TestOutcome::new(Some(runner), results, self.allow_failure));
}
let remote_chain =
if runner.fork.is_some() { runner.env.tx.chain_id.map(Into::into) } else { None };
let known_contracts = runner.known_contracts.clone();
let libraries = runner.libraries.clone();
// Run tests in a streaming fashion.
let (tx, rx) = channel::<(String, SuiteResult)>();
let timer = Instant::now();
let show_progress = config.show_progress;
let handle = tokio::task::spawn_blocking({
let filter = filter.clone();
move || runner.test(&filter, tx, show_progress).map(|()| runner)
});
// Set up trace identifiers.
let mut identifier = TraceIdentifiers::new().with_local(&known_contracts);
// Avoid using external identifiers for gas report as we decode more traces and this will be
// expensive.
if !self.gas_report {
identifier = identifier.with_external(&config, remote_chain)?;
}
// Build the trace decoder.
let mut builder = CallTraceDecoderBuilder::new()
.with_known_contracts(&known_contracts)
.with_label_disabled(self.disable_labels)
.with_verbosity(verbosity);
// Signatures are of no value for gas reports.
if !self.gas_report {
builder =
builder.with_signature_identifier(SignaturesIdentifier::from_config(&config)?);
}
if self.decode_internal {
let sources =
ContractSources::from_project_output(output, &config.root, Some(&libraries))?;
builder = builder.with_debug_identifier(DebugTraceIdentifier::new(sources));
}
let mut decoder = builder.build();
let mut gas_report = self.gas_report.then(|| {
GasReport::new(
config.gas_reports.clone(),
config.gas_reports_ignore.clone(),
config.gas_reports_include_tests,
)
});
let mut gas_snapshots = BTreeMap::<String, BTreeMap<String, String>>::new();
let mut outcome = TestOutcome::empty(None, self.allow_failure);
let mut any_test_failed = false;
let mut backtrace_builder = None;
for (contract_name, mut suite_result) in rx {
let tests = &mut suite_result.test_results;
let has_tests = !tests.is_empty();
// Clear the addresses and labels from previous test.
decoder.clear_addresses();
// We identify addresses if we're going to print *any* trace or gas report.
let identify_addresses = verbosity >= 3
|| self.gas_report
|| self.debug
|| self.flamegraph
|| self.flamechart;
// Print suite header.
if !silent {
sh_println!()?;
for warning in &suite_result.warnings {
sh_warn!("{warning}")?;
}
if has_tests {
let len = tests.len();
let tests = if len > 1 { "tests" } else { "test" };
sh_println!("Ran {len} {tests} for {contract_name}")?;
}
}
// Process individual test results, printing logs and traces when necessary.
for (name, result) in tests {
let show_traces =
!self.suppress_successful_traces || result.status == TestStatus::Failure;
if !silent {
sh_println!("{}", result.short_result(name))?;
// Display invariant metrics if invariant kind.
if let TestKind::Invariant { metrics, .. } = &result.kind
&& !metrics.is_empty()
{
let _ = sh_println!("\n{}\n", format_invariant_metrics_table(metrics));
}
// We only display logs at level 2 and above
if verbosity >= 2 && show_traces {
// We only decode logs from Hardhat and DS-style console events
let console_logs = decode_console_logs(&result.logs);
if !console_logs.is_empty() {
sh_println!("Logs:")?;
for log in console_logs {
sh_println!(" {log}")?;
}
sh_println!()?;
}
}
}
// We shouldn't break out of the outer loop directly here so that we finish
// processing the remaining tests and print the suite summary.
any_test_failed |= result.status == TestStatus::Failure;
// Clear the addresses and labels from previous runs.
decoder.clear_addresses();
decoder.labels.extend(result.labels.iter().map(|(k, v)| (*k, v.clone())));
// Identify addresses and decode traces.
let mut decoded_traces = Vec::with_capacity(result.traces.len());
for (kind, arena) in &mut result.traces {
if identify_addresses {
decoder.identify(arena, &mut identifier);
}
// verbosity:
// - 0..3: nothing
// - 3: only display traces for failed tests
// - 4: also display the setup trace for failed tests
// - 5..: display all traces for all tests, including storage changes
let should_include = match kind {
TraceKind::Execution => {
(verbosity == 3 && result.status.is_failure()) || verbosity >= 4
}
TraceKind::Setup => {
(verbosity == 4 && result.status.is_failure()) || verbosity >= 5
}
TraceKind::Deployment => false,
};
if should_include {
decode_trace_arena(arena, &decoder).await;
if let Some(trace_depth) = self.trace_depth {
prune_trace_depth(arena, trace_depth);
}
decoded_traces.push(render_trace_arena_inner(arena, false, verbosity > 4));
}
}
if !silent && show_traces && !decoded_traces.is_empty() {
sh_println!("Traces:")?;
for trace in &decoded_traces {
sh_println!("{trace}")?;
}
}
// Extract and display backtrace for failed tests when verbosity >= 3
if !silent
&& result.status.is_failure()
&& verbosity >= 3
&& !result.traces.is_empty()
&& let Some((_, arena)) =
result.traces.iter().find(|(kind, _)| matches!(kind, TraceKind::Execution))
{
// Lazily initialize the backtrace builder on first failure
let builder = backtrace_builder.get_or_insert_with(|| {
BacktraceBuilder::new(
output,
config.root.clone(),
config.parsed_libraries().ok(),
config.via_ir,
)
});
let backtrace = builder.from_traces(arena);
if !backtrace.is_empty() {
sh_println!("{}", backtrace)?;
}
}
if let Some(gas_report) = &mut gas_report {
gas_report.analyze(result.traces.iter().map(|(_, a)| &a.arena), &decoder).await;
for trace in &result.gas_report_traces {
decoder.clear_addresses();
// Re-execute setup and deployment traces to collect identities created in
// setUp and constructor.
for (kind, arena) in &result.traces {
if !matches!(kind, TraceKind::Execution) {
decoder.identify(arena, &mut identifier);
}
}
for arena in trace {
decoder.identify(arena, &mut identifier);
gas_report.analyze([arena], &decoder).await;
}
}
}
// Clear memory.
result.gas_report_traces = Default::default();
// Collect and merge gas snapshots.
for (group, new_snapshots) in &result.gas_snapshots {
gas_snapshots.entry(group.clone()).or_default().extend(new_snapshots.clone());
}
}
// Write gas snapshots to disk if any were collected.
if !gas_snapshots.is_empty() {
// By default `gas_snapshot_check` is set to `false` in the config.
//
// The user can either:
// - Set `FORGE_SNAPSHOT_CHECK=true` in the environment.
// - Pass `--gas-snapshot-check=true` as a CLI argument.
// - Set `gas_snapshot_check = true` in the config.
//
// If the user passes `--gas-snapshot-check=<bool>` then it will override the config
// and the environment variable, disabling the check if `false` is passed.
//
// Exiting early with code 1 if differences are found.
if self.gas_snapshot_check.unwrap_or(config.gas_snapshot_check) {
let differences_found = gas_snapshots.clone().into_iter().fold(
false,
|mut found, (group, snapshots)| {
// If the snapshot file doesn't exist, we can't compare so we skip.
if !&config.snapshots.join(format!("{group}.json")).exists() {
return false;
}
let previous_snapshots: BTreeMap<String, String> =
fs::read_json_file(&config.snapshots.join(format!("{group}.json")))
.expect("Failed to read snapshots from disk");
let diff: BTreeMap<_, _> = snapshots
.iter()
.filter_map(|(k, v)| {
previous_snapshots.get(k).and_then(|previous_snapshot| {
if previous_snapshot != v {
Some((
k.clone(),
(previous_snapshot.clone(), v.clone()),
))
} else {
None
}
})
})
.collect();
if !diff.is_empty() {
let _ = sh_eprintln!(
"{}",
format!("\n[{group}] Failed to match snapshots:").red().bold()
);
for (key, (previous_snapshot, snapshot)) in &diff {
let _ = sh_eprintln!(
"{}",
format!("- [{key}] {previous_snapshot} → {snapshot}").red()
);
}
found = true;
}
found
},
);
if differences_found {
sh_eprintln!()?;
eyre::bail!("Snapshots differ from previous run");
}
}
// By default `gas_snapshot_emit` is set to `true` in the config.
//
// The user can either:
// - Set `FORGE_SNAPSHOT_EMIT=false` in the environment.
// - Pass `--gas-snapshot-emit=false` as a CLI argument.
// - Set `gas_snapshot_emit = false` in the config.
//
// If the user passes `--gas-snapshot-emit=<bool>` then it will override the config
// and the environment variable, enabling the check if `true` is passed.
if self.gas_snapshot_emit.unwrap_or(config.gas_snapshot_emit) {
// Create `snapshots` directory if it doesn't exist.
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | true |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/forge/src/cmd/doc/mod.rs | crates/forge/src/cmd/doc/mod.rs | use super::watch::WatchArgs;
use clap::{Parser, ValueHint};
use eyre::Result;
use forge_doc::{
ContractInheritance, Deployments, DocBuilder, GitSource, InferInlineHyperlinks, Inheritdoc,
};
use foundry_cli::opts::GH_REPO_PREFIX_REGEX;
use foundry_common::compile::ProjectCompiler;
use foundry_config::{Config, load_config_with_root};
use std::{path::PathBuf, process::Command};
mod server;
use server::Server;
#[derive(Clone, Debug, Parser)]
pub struct DocArgs {
/// The project's root path.
///
/// By default root of the Git repository, if in one,
/// or the current working directory.
#[arg(long, value_hint = ValueHint::DirPath, value_name = "PATH")]
pub root: Option<PathBuf>,
/// The doc's output path.
///
/// By default, it is the `docs/` in project root.
#[arg(
long,
short,
value_hint = ValueHint::DirPath,
value_name = "PATH",
)]
out: Option<PathBuf>,
/// Build the `mdbook` from generated files.
#[arg(long, short)]
build: bool,
/// Serve the documentation.
#[arg(long, short)]
serve: bool,
/// Open the documentation in a browser after serving.
#[arg(long, requires = "serve")]
open: bool,
/// Hostname for serving documentation.
#[arg(long, requires = "serve")]
hostname: Option<String>,
#[command(flatten)]
pub watch: WatchArgs,
/// Port for serving documentation.
#[arg(long, short, requires = "serve")]
port: Option<usize>,
/// The relative path to the `hardhat-deploy` or `forge-deploy` artifact directory. Leave blank
/// for default.
#[arg(long)]
deployments: Option<Option<PathBuf>>,
/// Whether to create docs for external libraries.
#[arg(long, short)]
include_libraries: bool,
}
impl DocArgs {
pub async fn run(self) -> Result<()> {
let config = self.config()?;
let root = &config.root;
let project = config.project()?;
let compiler = ProjectCompiler::new().quiet(true);
let mut output = compiler.compile(&project)?;
let compiler = output.parser_mut().solc_mut().compiler_mut();
let mut doc_config = config.doc;
if let Some(out) = self.out {
doc_config.out = out;
}
if doc_config.repository.is_none() {
// Attempt to read repo from git
if let Ok(output) = Command::new("git").args(["remote", "get-url", "origin"]).output()
&& !output.stdout.is_empty()
{
let remote = String::from_utf8(output.stdout)?.trim().to_owned();
if let Some(captures) = GH_REPO_PREFIX_REGEX.captures(&remote) {
let brand = captures.name("brand").unwrap().as_str();
let tld = captures.name("tld").unwrap().as_str();
let project = GH_REPO_PREFIX_REGEX.replace(&remote, "");
doc_config.repository =
Some(format!("https://{brand}.{tld}/{}", project.trim_end_matches(".git")));
}
}
}
let commit = foundry_cli::utils::Git::new(root).commit_hash(false, "HEAD").ok();
let mut builder = DocBuilder::new(
root.clone(),
project.paths.sources,
project.paths.libraries,
self.include_libraries,
)
.with_should_build(self.build)
.with_config(doc_config.clone())
.with_fmt(config.fmt)
.with_preprocessor(ContractInheritance { include_libraries: self.include_libraries })
.with_preprocessor(Inheritdoc::default())
.with_preprocessor(InferInlineHyperlinks::default())
.with_preprocessor(GitSource {
root: root.clone(),
commit,
repository: doc_config.repository.clone(),
});
// If deployment docgen is enabled, add the [Deployments] preprocessor
if let Some(deployments) = self.deployments {
builder = builder.with_preprocessor(Deployments { root: root.clone(), deployments });
}
builder.build(compiler)?;
if self.serve {
Server::new(doc_config.out)
.with_hostname(self.hostname.unwrap_or_else(|| "localhost".into()))
.with_port(self.port.unwrap_or(3000))
.open(self.open)
.serve()?;
}
Ok(())
}
/// Returns whether watch mode is enabled
pub fn is_watch(&self) -> bool {
self.watch.watch.is_some()
}
pub fn config(&self) -> Result<Config> {
load_config_with_root(self.root.as_deref())
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/forge/src/cmd/doc/server.rs | crates/forge/src/cmd/doc/server.rs | use axum::{Router, routing::get_service};
use forge_doc::mdbook::{MDBook, utils::fs::get_404_output_file};
use std::{
io,
net::{SocketAddr, ToSocketAddrs},
path::PathBuf,
};
use tower_http::services::{ServeDir, ServeFile};
/// The HTTP endpoint for the websocket used to trigger reloads when a file changes.
const LIVE_RELOAD_ENDPOINT: &str = "/__livereload";
/// Basic mdbook server. Given a path, hostname and port, serves the mdbook.
#[derive(Debug)]
pub struct Server {
path: PathBuf,
hostname: String,
port: usize,
open: bool,
}
impl Default for Server {
fn default() -> Self {
Self { path: PathBuf::default(), hostname: "localhost".to_owned(), port: 3000, open: false }
}
}
impl Server {
/// Create a new instance.
pub fn new(path: PathBuf) -> Self {
Self { path, ..Default::default() }
}
/// Set the host to serve on.
pub fn with_hostname(mut self, hostname: String) -> Self {
self.hostname = hostname;
self
}
/// Set the port to serve on.
pub fn with_port(mut self, port: usize) -> Self {
self.port = port;
self
}
/// Set whether to open the browser after serving.
pub fn open(mut self, open: bool) -> Self {
self.open = open;
self
}
/// Serve the mdbook.
pub fn serve(self) -> eyre::Result<()> {
let mut book =
MDBook::load(&self.path).map_err(|err| eyre::eyre!("failed to load book: {err:?}"))?;
let reload = LIVE_RELOAD_ENDPOINT.strip_prefix('/').unwrap();
book.config.set("output.html.live-reload-endpoint", reload).unwrap();
// Override site-url for local serving of the 404 file
book.config.set("output.html.site-url", "/").unwrap();
book.build().map_err(|err| eyre::eyre!("failed to build book: {err:?}"))?;
let address = format!("{}:{}", self.hostname, self.port);
let sockaddr: SocketAddr = address
.to_socket_addrs()?
.next()
.ok_or_else(|| eyre::eyre!("no address found for {}", address))?;
let build_dir = book.build_dir_for("html");
let input_404 = book
.config
.get("output.html.input-404")
.and_then(|v| v.as_str())
.map(ToString::to_string);
let file_404 = get_404_output_file(&input_404);
let serving_url = format!("http://{address}");
sh_println!("Serving on: {serving_url}")?;
let thread_handle = std::thread::spawn(move || serve(build_dir, sockaddr, &file_404));
if self.open {
open(serving_url);
}
match thread_handle.join() {
Ok(r) => r.map_err(Into::into),
Err(e) => std::panic::resume_unwind(e),
}
}
}
#[tokio::main]
async fn serve(build_dir: PathBuf, address: SocketAddr, file_404: &str) -> io::Result<()> {
let file_404 = build_dir.join(file_404);
let svc = ServeDir::new(build_dir).not_found_service(ServeFile::new(file_404));
let app = Router::new().fallback_service(get_service(svc));
let tcp_listener = tokio::net::TcpListener::bind(address).await?;
axum::serve(tcp_listener, app.into_make_service()).await
}
fn open<P: AsRef<std::ffi::OsStr>>(path: P) {
info!("Opening web browser");
if let Err(e) = opener::open(path) {
error!("Error opening web browser: {}", e);
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/forge/tests/ui.rs | crates/forge/tests/ui.rs | use foundry_test_utils::ui_runner;
use std::{env, path::Path};
const FORGE_CMD: &str = env!("CARGO_BIN_EXE_forge");
const FORGE_DIR: &str = env!("CARGO_MANIFEST_DIR");
fn main() -> impl std::process::Termination {
let forge_cmd = Path::new(FORGE_CMD);
let forge_dir = Path::new(FORGE_DIR);
let lint_testdata = forge_dir.parent().unwrap().join("lint").join("testdata");
ui_runner::run_tests("lint", forge_cmd, &lint_testdata)
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/forge/tests/cli/config.rs | crates/forge/tests/cli/config.rs | //! Contains various tests for checking forge commands related to config values
use alloy_primitives::{Address, B256, U256};
use foundry_cli::utils as forge_utils;
use foundry_compilers::{
artifacts::{BytecodeHash, OptimizerDetails, RevertStrings, YulDetails},
solc::Solc,
};
use foundry_config::{
CompilationRestrictions, Config, FsPermissions, FuzzConfig, FuzzCorpusConfig, InvariantConfig,
SettingsOverrides, SolcReq,
cache::{CachedChains, CachedEndpoints, StorageCachingConfig},
filter::GlobMatcher,
fs_permissions::{FsAccessPermission, PathPermission},
};
use foundry_evm::opts::EvmOpts;
use foundry_test_utils::{
foundry_compilers::artifacts::{EvmVersion, remappings::Remapping},
util::{OTHER_SOLC_VERSION, OutputExt, TestCommand, pretty_err},
};
use path_slash::PathBufExt;
use semver::VersionReq;
use serde_json::Value;
use similar_asserts::assert_eq;
use std::{
fs,
path::{Path, PathBuf},
str::FromStr,
thread,
};
const DEFAULT_CONFIG: &str = r#"[profile.default]
src = "src"
test = "test"
script = "script"
out = "out"
libs = ["lib"]
remappings = ["forge-std/=lib/forge-std/src/"]
auto_detect_remappings = true
libraries = []
cache = true
cache_path = "cache"
dynamic_test_linking = false
snapshots = "snapshots"
gas_snapshot_check = false
gas_snapshot_emit = true
broadcast = "broadcast"
allow_paths = []
include_paths = []
skip = []
force = false
evm_version = "prague"
gas_reports = ["*"]
gas_reports_ignore = []
gas_reports_include_tests = false
auto_detect_solc = true
offline = false
optimizer = false
optimizer_runs = 200
verbosity = 0
eth_rpc_accept_invalid_certs = false
ignored_error_codes = [
"license",
"code-size",
"init-code-size",
"transient-storage",
]
ignored_warnings_from = []
deny = "never"
test_failures_file = "cache/test-failures"
show_progress = false
ffi = false
allow_internal_expect_revert = false
always_use_create_2_factory = false
prompt_timeout = 120
sender = "0x1804c8ab1f12e6bbf3894d4083f33e07309d1f38"
tx_origin = "0x1804c8ab1f12e6bbf3894d4083f33e07309d1f38"
initial_balance = "0xffffffffffffffffffffffff"
block_number = 1
gas_limit = 1073741824
block_base_fee_per_gas = 0
block_coinbase = "0x0000000000000000000000000000000000000000"
block_timestamp = 1
block_difficulty = 0
block_prevrandao = "0x0000000000000000000000000000000000000000000000000000000000000000"
memory_limit = 134217728
extra_output = []
extra_output_files = []
names = false
sizes = false
via_ir = false
ast = false
no_storage_caching = false
no_rpc_rate_limit = false
use_literal_content = false
bytecode_hash = "ipfs"
cbor_metadata = true
sparse_mode = false
build_info = false
isolate = false
disable_block_gas_limit = false
enable_tx_gas_limit = false
unchecked_cheatcode_artifacts = false
create2_library_salt = "0x0000000000000000000000000000000000000000000000000000000000000000"
create2_deployer = "0x4e59b44847b379578588920ca78fbf26c0b4956c"
assertions_revert = true
legacy_assertions = false
celo = false
bypass_prevrandao = false
transaction_timeout = 120
additional_compiler_profiles = []
compilation_restrictions = []
script_execution_protection = true
[profile.default.rpc_storage_caching]
chains = "all"
endpoints = "all"
[[profile.default.fs_permissions]]
access = "read"
path = "out"
[fmt]
line_length = 120
tab_width = 4
style = "space"
bracket_spacing = false
int_types = "long"
multiline_func_header = "attributes_first"
quote_style = "double"
number_underscore = "preserve"
hex_underscore = "remove"
single_line_statement_blocks = "preserve"
override_spacing = false
wrap_comments = false
docs_style = "preserve"
ignore = []
contract_new_lines = false
sort_imports = false
pow_no_space = false
prefer_compact = "all"
single_line_imports = false
[lint]
severity = [
"high",
"medium",
"low",
]
exclude_lints = []
ignore = []
lint_on_build = true
mixed_case_exceptions = [
"ERC",
"URI",
]
[doc]
out = "docs"
title = ""
book = "book.toml"
homepage = "README.md"
ignore = []
[fuzz]
runs = 256
fail_on_revert = true
max_test_rejects = 65536
dictionary_weight = 40
include_storage = true
include_push_bytes = true
max_fuzz_dictionary_addresses = 15728640
max_fuzz_dictionary_values = 9830400
max_fuzz_dictionary_literals = 6553600
gas_report_samples = 256
corpus_gzip = true
corpus_min_mutations = 5
corpus_min_size = 0
show_edge_coverage = false
failure_persist_dir = "cache/fuzz"
show_logs = false
[invariant]
runs = 256
depth = 500
fail_on_revert = false
call_override = false
dictionary_weight = 80
include_storage = true
include_push_bytes = true
max_fuzz_dictionary_addresses = 15728640
max_fuzz_dictionary_values = 9830400
max_fuzz_dictionary_literals = 6553600
shrink_run_limit = 5000
max_assume_rejects = 65536
gas_report_samples = 256
corpus_gzip = true
corpus_min_mutations = 5
corpus_min_size = 0
show_edge_coverage = false
failure_persist_dir = "cache/invariant"
show_metrics = true
show_solidity = false
[labels]
[vyper]
[bind_json]
out = "utils/JsonBindings.sol"
include = []
exclude = []
"#;
// tests all config values that are in use
forgetest!(can_extract_config_values, |prj, cmd| {
// explicitly set all values
let input = Config {
profile: Config::DEFAULT_PROFILE,
// `profiles` is not serialized.
profiles: vec![],
root: ".".into(),
extends: None,
src: "test-src".into(),
test: "test-test".into(),
script: "test-script".into(),
out: "out-test".into(),
libs: vec!["lib-test".into()],
cache: true,
dynamic_test_linking: false,
cache_path: "test-cache".into(),
snapshots: "snapshots".into(),
gas_snapshot_check: false,
gas_snapshot_emit: true,
broadcast: "broadcast".into(),
force: true,
evm_version: EvmVersion::Byzantium,
gas_reports: vec!["Contract".to_string()],
gas_reports_ignore: vec![],
gas_reports_include_tests: false,
solc: Some(SolcReq::Local(PathBuf::from("custom-solc"))),
auto_detect_solc: false,
auto_detect_remappings: true,
offline: true,
optimizer: Some(false),
optimizer_runs: Some(1000),
optimizer_details: Some(OptimizerDetails {
yul: Some(false),
yul_details: Some(YulDetails { stack_allocation: Some(true), ..Default::default() }),
..Default::default()
}),
model_checker: None,
extra_output: Default::default(),
extra_output_files: Default::default(),
names: true,
sizes: true,
test_pattern: None,
test_pattern_inverse: None,
contract_pattern: None,
contract_pattern_inverse: None,
path_pattern: None,
path_pattern_inverse: None,
coverage_pattern_inverse: None,
test_failures_file: "test-cache/test-failures".into(),
threads: None,
show_progress: false,
fuzz: FuzzConfig {
runs: 1000,
max_test_rejects: 100203,
seed: Some(U256::from(1000)),
failure_persist_dir: Some("test-cache/fuzz".into()),
show_logs: false,
..Default::default()
},
invariant: InvariantConfig {
runs: 256,
failure_persist_dir: Some("test-cache/fuzz".into()),
corpus: FuzzCorpusConfig {
corpus_dir: Some("cache/invariant/corpus".into()),
..Default::default()
},
..Default::default()
},
ffi: true,
allow_internal_expect_revert: false,
always_use_create_2_factory: false,
prompt_timeout: 0,
sender: "00a329c0648769A73afAc7F9381D08FB43dBEA72".parse().unwrap(),
tx_origin: "00a329c0648769A73afAc7F9F81E08FB43dBEA72".parse().unwrap(),
initial_balance: U256::from(0xffffffffffffffffffffffffu128),
block_number: U256::from(10),
fork_block_number: Some(200),
chain: Some(9999.into()),
gas_limit: 99_000_000u64.into(),
code_size_limit: Some(100000),
gas_price: Some(999),
block_base_fee_per_gas: 10,
block_coinbase: Address::random(),
block_timestamp: U256::from(10),
block_difficulty: 10,
block_prevrandao: B256::random(),
block_gas_limit: Some(100u64.into()),
disable_block_gas_limit: false,
enable_tx_gas_limit: false,
memory_limit: 1 << 27,
eth_rpc_url: Some("localhost".to_string()),
eth_rpc_accept_invalid_certs: false,
eth_rpc_jwt: None,
eth_rpc_timeout: None,
eth_rpc_headers: None,
etherscan_api_key: None,
etherscan: Default::default(),
verbosity: 4,
remappings: vec![Remapping::from_str("forge-std/=lib/forge-std/").unwrap().into()],
libraries: vec![
"src/DssSpell.sol:DssExecLib:0x8De6DDbCd5053d32292AAA0D2105A32d108484a6".to_string(),
],
ignored_error_codes: vec![],
ignored_file_paths: vec![],
deny: foundry_config::DenyLevel::Never,
deny_warnings: false,
via_ir: true,
ast: false,
rpc_storage_caching: StorageCachingConfig {
chains: CachedChains::None,
endpoints: CachedEndpoints::Remote,
},
no_storage_caching: true,
no_rpc_rate_limit: true,
use_literal_content: false,
bytecode_hash: Default::default(),
cbor_metadata: true,
revert_strings: Some(RevertStrings::Strip),
sparse_mode: true,
allow_paths: vec![],
include_paths: vec![],
rpc_endpoints: Default::default(),
build_info: false,
build_info_path: None,
fmt: Default::default(),
lint: Default::default(),
doc: Default::default(),
bind_json: Default::default(),
fs_permissions: Default::default(),
labels: Default::default(),
isolate: true,
unchecked_cheatcode_artifacts: false,
create2_library_salt: Config::DEFAULT_CREATE2_LIBRARY_SALT,
create2_deployer: Config::DEFAULT_CREATE2_DEPLOYER,
vyper: Default::default(),
skip: vec![],
dependencies: Default::default(),
soldeer: Default::default(),
warnings: vec![],
assertions_revert: true,
legacy_assertions: false,
extra_args: vec![],
networks: Default::default(),
transaction_timeout: 120,
additional_compiler_profiles: Default::default(),
compilation_restrictions: Default::default(),
script_execution_protection: true,
_non_exhaustive: (),
};
prj.write_config(input.clone());
let config = cmd.config();
similar_asserts::assert_eq!(input, config);
});
// tests config gets printed to std out
forgetest!(can_show_config, |prj, cmd| {
let expected =
Config::load_with_root(prj.root()).unwrap().to_string_pretty().unwrap().trim().to_string();
let output = cmd.arg("config").assert_success().get_output().stdout_lossy().trim().to_string();
assert_eq!(expected, output);
});
// checks that config works
// - foundry.toml is properly generated
// - paths are resolved properly
// - config supports overrides from env, and cli
forgetest_init!(can_override_config, |prj, cmd| {
prj.initialize_default_contracts();
cmd.set_current_dir(prj.root());
let foundry_toml = prj.root().join(Config::FILE_NAME);
assert!(foundry_toml.exists());
let profile = Config::load_with_root(prj.root()).unwrap();
// ensure that the auto-generated internal remapping for forge-std's ds-test exists
assert_eq!(profile.remappings.len(), 1);
assert_eq!("forge-std/=lib/forge-std/src/", profile.remappings[0].to_string());
// ensure remappings contain test
assert_eq!("forge-std/=lib/forge-std/src/", profile.remappings[0].to_string());
// the loaded config has resolved, absolute paths
assert_eq!(
"forge-std/=lib/forge-std/src/",
Remapping::from(profile.remappings[0].clone()).to_string()
);
let expected = profile.to_string_pretty().unwrap().trim().to_string();
let output = cmd.arg("config").assert_success().get_output().stdout_lossy().trim().to_string();
assert_eq!(expected, output);
// remappings work
let remappings_txt =
prj.create_file("remappings.txt", "ds-test/=lib/forge-std/lib/ds-test/from-file/");
let config = forge_utils::load_config_with_root(Some(prj.root())).unwrap();
assert_eq!(
format!(
"ds-test/={}/",
prj.root().join("lib/forge-std/lib/ds-test/from-file").to_slash_lossy()
),
Remapping::from(config.remappings[0].clone()).to_string()
);
let config =
prj.config_from_output(["--remappings", "ds-test/=lib/forge-std/lib/ds-test/from-cli"]);
assert_eq!(
format!(
"ds-test/={}/",
prj.root().join("lib/forge-std/lib/ds-test/from-cli").to_slash_lossy()
),
Remapping::from(config.remappings[0].clone()).to_string()
);
let config = prj.config_from_output(["--remappings", "other-key/=lib/other/"]);
assert_eq!(config.remappings.len(), 3);
assert_eq!(
format!("other-key/={}/", prj.root().join("lib/other").to_slash_lossy()),
// As CLI has the higher priority, it'll be found at the first slot.
Remapping::from(config.remappings[0].clone()).to_string()
);
pretty_err(&remappings_txt, fs::remove_file(&remappings_txt));
let expected = profile.into_basic().to_string_pretty().unwrap().trim().to_string();
let output = cmd
.forge_fuse()
.args(["config", "--basic"])
.assert_success()
.get_output()
.stdout_lossy()
.trim()
.to_string();
assert_eq!(expected, output);
});
forgetest_init!(can_parse_remappings_correctly, |prj, cmd| {
prj.initialize_default_contracts();
cmd.set_current_dir(prj.root());
let foundry_toml = prj.root().join(Config::FILE_NAME);
assert!(foundry_toml.exists());
let profile = Config::load_with_root(prj.root()).unwrap();
// ensure that the auto-generated internal remapping for forge-std's ds-test exists
assert_eq!(profile.remappings.len(), 1);
let r = &profile.remappings[0];
assert_eq!("forge-std/=lib/forge-std/src/", r.to_string());
// the loaded config has resolved, absolute paths
assert_eq!("forge-std/=lib/forge-std/src/", Remapping::from(r.clone()).to_string());
let expected = profile.to_string_pretty().unwrap().trim().to_string();
let output = cmd.arg("config").assert_success().get_output().stdout_lossy().trim().to_string();
assert_eq!(expected, output);
let install = |cmd: &mut TestCommand, dep: &str| {
cmd.forge_fuse().args(["install", dep]).assert_success().stdout_eq(str![[r#"
Installing solmate in [..] (url: https://github.com/transmissions11/solmate, tag: None)
Installed solmate[..]
"#]]);
};
install(&mut cmd, "transmissions11/solmate");
let profile = Config::load_with_root(prj.root()).unwrap();
// remappings work
let remappings_txt = prj.create_file(
"remappings.txt",
"solmate/=lib/solmate/src/\nsolmate-contracts/=lib/solmate/src/",
);
let config = forge_utils::load_config_with_root(Some(prj.root())).unwrap();
// trailing slashes are removed on windows `to_slash_lossy`
let path = prj.root().join("lib/solmate/src/").to_slash_lossy().into_owned();
#[cfg(windows)]
let path = path + "/";
assert_eq!(
format!("solmate/={path}"),
Remapping::from(config.remappings[0].clone()).to_string()
);
// As this is an user-generated remapping, it is not removed, even if it points to the same
// location.
assert_eq!(
format!("solmate-contracts/={path}"),
Remapping::from(config.remappings[1].clone()).to_string()
);
pretty_err(&remappings_txt, fs::remove_file(&remappings_txt));
let expected = profile.into_basic().to_string_pretty().unwrap().trim().to_string();
let output = cmd
.forge_fuse()
.args(["config", "--basic"])
.assert_success()
.get_output()
.stdout_lossy()
.trim()
.to_string();
assert_eq!(expected, output);
});
forgetest_init!(can_detect_config_vals, |prj, _cmd| {
prj.initialize_default_contracts();
let url = "http://127.0.0.1:8545";
let config = prj.config_from_output(["--no-auto-detect", "--rpc-url", url]);
assert!(!config.auto_detect_solc);
assert_eq!(config.eth_rpc_url, Some(url.to_string()));
let mut config = Config::load_with_root(prj.root()).unwrap();
config.eth_rpc_url = Some("http://127.0.0.1:8545".to_string());
config.auto_detect_solc = false;
// write to `foundry.toml`
prj.create_file(
Config::FILE_NAME,
&config.to_string_pretty().unwrap().replace("eth_rpc_url", "eth-rpc-url"),
);
let config = prj.config_from_output(["--force"]);
assert!(!config.auto_detect_solc);
assert_eq!(config.eth_rpc_url, Some(url.to_string()));
});
// checks that `clean` removes dapptools style paths
forgetest_init!(can_get_evm_opts, |prj, _cmd| {
prj.initialize_default_contracts();
let url = "http://127.0.0.1:8545";
let config = prj.config_from_output(["--rpc-url", url, "--ffi"]);
assert_eq!(config.eth_rpc_url, Some(url.to_string()));
assert!(config.ffi);
unsafe {
std::env::set_var("FOUNDRY_ETH_RPC_URL", url);
}
let figment = Config::figment_with_root(prj.root()).merge(("debug", false));
let evm_opts: EvmOpts = figment.extract().unwrap();
assert_eq!(evm_opts.fork_url, Some(url.to_string()));
unsafe {
std::env::remove_var("FOUNDRY_ETH_RPC_URL");
}
});
// checks that we can set various config values
forgetest_init!(can_set_config_values, |prj, _cmd| {
prj.initialize_default_contracts();
let config = prj.config_from_output(["--via-ir", "--no-metadata"]);
assert!(config.via_ir);
assert_eq!(config.cbor_metadata, false);
assert_eq!(config.bytecode_hash, BytecodeHash::None);
});
// tests that solc can be explicitly set
forgetest!(can_set_solc_explicitly, |prj, cmd| {
prj.add_source(
"Foo",
r"
pragma solidity *;
contract Greeter {}
",
);
prj.update_config(|config| {
config.solc = Some(OTHER_SOLC_VERSION.into());
});
cmd.arg("build").assert_success().stdout_eq(str![[r#"
[COMPILING_FILES] with [SOLC_VERSION]
[SOLC_VERSION] [ELAPSED]
Compiler run successful!
"#]]);
});
// tests that `--use <solc>` works
forgetest!(can_use_solc, |prj, cmd| {
prj.add_raw_source(
"Foo",
r"
pragma solidity *;
contract Foo {}
",
);
cmd.args(["build", "--use", OTHER_SOLC_VERSION]).assert_success().stdout_eq(str![[r#"
[COMPILING_FILES] with [SOLC_VERSION]
[SOLC_VERSION] [ELAPSED]
Compiler run successful!
"#]]);
cmd.forge_fuse()
.args(["build", "--force", "--use", &format!("solc:{OTHER_SOLC_VERSION}")])
.root_arg()
.assert_success()
.stdout_eq(str![[r#"
[COMPILING_FILES] with [SOLC_VERSION]
[SOLC_VERSION] [ELAPSED]
Compiler run successful!
"#]]);
// fails to use solc that does not exist
cmd.forge_fuse().args(["build", "--use", "this/solc/does/not/exist"]);
cmd.assert_failure().stderr_eq(str![[r#"
Error: `solc` this/solc/does/not/exist does not exist
"#]]);
// `OTHER_SOLC_VERSION` was installed in previous step, so we can use the path to this directly
let local_solc = Solc::find_or_install(&OTHER_SOLC_VERSION.parse().unwrap()).unwrap();
cmd.forge_fuse()
.args(["build", "--force", "--use"])
.arg(local_solc.solc)
.root_arg()
.assert_success()
.stdout_eq(str![[r#"
[COMPILING_FILES] with [SOLC_VERSION]
[SOLC_VERSION] [ELAPSED]
Compiler run successful!
"#]]);
});
// test to ensure yul optimizer can be set as intended
forgetest!(can_set_yul_optimizer, |prj, cmd| {
prj.update_config(|config| config.optimizer = Some(true));
prj.add_source(
"foo.sol",
r"
contract Foo {
function bar() public pure {
assembly {
let result_start := msize()
}
}
}
",
);
cmd.arg("build").assert_failure().stderr_eq(str![[r#"
Error: Compiler run failed:
Error (6553): The msize instruction cannot be used when the Yul optimizer is activated because it can change its semantics. Either disable the Yul optimizer or do not use the instruction.
[FILE]:6:8:
|
6 | assembly {
| ^ (Relevant source part starts here and spans across multiple lines).
"#]]);
// disable yul optimizer explicitly
prj.update_config(|config| config.optimizer_details.get_or_insert_default().yul = Some(false));
cmd.assert_success();
});
// tests that the lib triple can be parsed
forgetest_init!(can_parse_dapp_libraries, |_prj, cmd| {
cmd.env(
"DAPP_LIBRARIES",
"src/DssSpell.sol:DssExecLib:0x8De6DDbCd5053d32292AAA0D2105A32d108484a6",
);
let config = cmd.config();
assert_eq!(
config.libraries,
vec!["src/DssSpell.sol:DssExecLib:0x8De6DDbCd5053d32292AAA0D2105A32d108484a6".to_string(),]
);
});
// test that optimizer runs works
forgetest!(can_set_optimizer_runs, |prj, cmd| {
// explicitly set optimizer runs
prj.update_config(|config| config.optimizer_runs = Some(1337));
let config = cmd.config();
assert_eq!(config.optimizer_runs, Some(1337));
let config = prj.config_from_output(["--optimizer-runs", "300"]);
assert_eq!(config.optimizer_runs, Some(300));
});
// test that use_literal_content works
forgetest!(can_set_use_literal_content, |prj, cmd| {
// explicitly set use_literal_content
prj.update_config(|config| config.use_literal_content = false);
let config = cmd.config();
assert_eq!(config.use_literal_content, false);
let config = prj.config_from_output(["--use-literal-content"]);
assert_eq!(config.use_literal_content, true);
});
// <https://github.com/foundry-rs/foundry/issues/9665>
forgetest!(enable_optimizer_when_runs_set, |prj, cmd| {
// explicitly set optimizer runs
prj.update_config(|config| config.optimizer_runs = Some(1337));
let config = cmd.config();
assert!(config.optimizer.unwrap());
});
// test `optimizer_runs` set to 200 by default if optimizer enabled
forgetest!(optimizer_runs_default, |prj, cmd| {
// explicitly set optimizer
prj.update_config(|config| config.optimizer = Some(true));
let config = cmd.config();
assert_eq!(config.optimizer_runs, Some(200));
});
// test that gas_price can be set
forgetest!(can_set_gas_price, |prj, cmd| {
// explicitly set gas_price
prj.update_config(|config| config.gas_price = Some(1337));
let config = cmd.config();
assert_eq!(config.gas_price, Some(1337));
let config = prj.config_from_output(["--gas-price", "300"]);
assert_eq!(config.gas_price, Some(300));
});
// test that we can detect remappings from foundry.toml
forgetest_init!(can_detect_lib_foundry_toml, |prj, cmd| {
prj.initialize_default_contracts();
let config = cmd.config();
let remappings = config.remappings.iter().cloned().map(Remapping::from).collect::<Vec<_>>();
similar_asserts::assert_eq!(
remappings,
vec![
// global
"forge-std/=lib/forge-std/src/".parse().unwrap(),
]
);
// create a new lib directly in the `lib` folder with a remapping
let mut config = config;
config.remappings = vec![Remapping::from_str("nested/=lib/nested").unwrap().into()];
let nested = prj.paths().libraries[0].join("nested-lib");
pretty_err(&nested, fs::create_dir_all(&nested));
let toml_file = nested.join("foundry.toml");
pretty_err(&toml_file, fs::write(&toml_file, config.to_string_pretty().unwrap()));
let config = cmd.config();
let remappings = config.remappings.iter().cloned().map(Remapping::from).collect::<Vec<_>>();
similar_asserts::assert_eq!(
remappings,
vec![
// default
"forge-std/=lib/forge-std/src/".parse().unwrap(),
// remapping is local to the lib
"nested-lib/=lib/nested-lib/src/".parse().unwrap(),
// global
"nested/=lib/nested-lib/lib/nested/".parse().unwrap(),
]
);
// nest another lib under the already nested lib
let mut config = config;
config.remappings = vec![Remapping::from_str("nested-twice/=lib/nested-twice").unwrap().into()];
let nested = nested.join("lib/another-lib");
pretty_err(&nested, fs::create_dir_all(&nested));
let toml_file = nested.join("foundry.toml");
pretty_err(&toml_file, fs::write(&toml_file, config.to_string_pretty().unwrap()));
let another_config = cmd.config();
let remappings =
another_config.remappings.iter().cloned().map(Remapping::from).collect::<Vec<_>>();
similar_asserts::assert_eq!(
remappings,
vec![
// local to the lib
"another-lib/=lib/nested-lib/lib/another-lib/src/".parse().unwrap(),
// global
"forge-std/=lib/forge-std/src/".parse().unwrap(),
"nested-lib/=lib/nested-lib/src/".parse().unwrap(),
// remappings local to the lib
"nested-twice/=lib/nested-lib/lib/another-lib/lib/nested-twice/".parse().unwrap(),
"nested/=lib/nested-lib/lib/nested/".parse().unwrap(),
]
);
config.src = "custom-source-dir".into();
pretty_err(&toml_file, fs::write(&toml_file, config.to_string_pretty().unwrap()));
let config = cmd.config();
let remappings = config.remappings.iter().cloned().map(Remapping::from).collect::<Vec<_>>();
similar_asserts::assert_eq!(
remappings,
vec![
// local to the lib
"another-lib/=lib/nested-lib/lib/another-lib/custom-source-dir/".parse().unwrap(),
// global
"forge-std/=lib/forge-std/src/".parse().unwrap(),
"nested-lib/=lib/nested-lib/src/".parse().unwrap(),
// remappings local to the lib
"nested-twice/=lib/nested-lib/lib/another-lib/lib/nested-twice/".parse().unwrap(),
"nested/=lib/nested-lib/lib/nested/".parse().unwrap(),
]
);
// check if lib path is absolute, it should deteect nested lib
let mut config = cmd.config();
config.libs = vec![nested];
let remappings = config.remappings.iter().cloned().map(Remapping::from).collect::<Vec<_>>();
similar_asserts::assert_eq!(
remappings,
vec![
// local to the lib
"another-lib/=lib/nested-lib/lib/another-lib/custom-source-dir/".parse().unwrap(),
// global
"forge-std/=lib/forge-std/src/".parse().unwrap(),
"nested-lib/=lib/nested-lib/src/".parse().unwrap(),
// remappings local to the lib
"nested-twice/=lib/nested-lib/lib/another-lib/lib/nested-twice/".parse().unwrap(),
"nested/=lib/nested-lib/lib/nested/".parse().unwrap(),
]
);
});
// test remappings with closer paths are prioritised
// so that `dep/=lib/a/src` will take precedent over `dep/=lib/a/lib/b/src`
forgetest_init!(can_prioritise_closer_lib_remappings, |prj, cmd| {
prj.initialize_default_contracts();
let config = cmd.config();
// create a new lib directly in the `lib` folder with conflicting remapping `forge-std/`
let mut config = config;
config.remappings = vec![Remapping::from_str("forge-std/=lib/forge-std/src/").unwrap().into()];
let nested = prj.paths().libraries[0].join("dep1");
pretty_err(&nested, fs::create_dir_all(&nested));
let toml_file = nested.join("foundry.toml");
pretty_err(&toml_file, fs::write(&toml_file, config.to_string_pretty().unwrap()));
let config = cmd.config();
let remappings = config.get_all_remappings().collect::<Vec<_>>();
similar_asserts::assert_eq!(
remappings,
vec![
"dep1/=lib/dep1/src/".parse().unwrap(),
"forge-std/=lib/forge-std/src/".parse().unwrap()
]
);
});
// Test that remappings within root of the project have priority over remappings of sub-projects.
// E.g. `@utils/libraries` mapping from library shouldn't be added if project already has `@utils`
// remapping.
// See <https://github.com/foundry-rs/foundry/issues/9146>
// Test that
// - single file remapping is properly added, see
// <https://github.com/foundry-rs/foundry/issues/6706> and <https://github.com/foundry-rs/foundry/issues/8499>
// - project defined `@openzeppelin/contracts` remapping is added
// - library defined `@openzeppelin/contracts-upgradeable` remapping is added
// - library defined `@openzeppelin/contracts/upgradeable` remapping is not added as it conflicts
// with project defined `@openzeppelin/contracts` remapping
// See <https://github.com/foundry-rs/foundry/issues/9271>
forgetest_init!(can_prioritise_project_remappings, |prj, cmd| {
prj.initialize_default_contracts();
let mut config = cmd.config();
// Add `@utils/` remapping in project config.
config.remappings = vec![
Remapping::from_str("@utils/libraries/Contract.sol=src/Contract.sol").unwrap().into(),
Remapping::from_str("@utils/=src/").unwrap().into(),
Remapping::from_str("@openzeppelin/contracts=lib/openzeppelin-contracts/").unwrap().into(),
];
let proj_toml_file = prj.paths().root.join("foundry.toml");
pretty_err(&proj_toml_file, fs::write(&proj_toml_file, config.to_string_pretty().unwrap()));
// Create a new lib in the `lib` folder with conflicting `@utils/libraries` remapping.
// This should be filtered out from final remappings as root project already has `@utils/`.
let nested = prj.paths().libraries[0].join("dep1");
pretty_err(&nested, fs::create_dir_all(&nested));
let mut lib_config = Config::load_with_root(&nested).unwrap();
lib_config.remappings = vec![
Remapping::from_str("@utils/libraries/=src/").unwrap().into(),
Remapping::from_str("@openzeppelin/contracts-upgradeable/=lib/openzeppelin-upgradeable/")
.unwrap()
.into(),
Remapping::from_str(
"@openzeppelin/contracts/upgradeable/=lib/openzeppelin-contracts/upgradeable/",
)
.unwrap()
.into(),
];
let lib_toml_file = nested.join("foundry.toml");
pretty_err(&lib_toml_file, fs::write(&lib_toml_file, lib_config.to_string_pretty().unwrap()));
cmd.args(["remappings", "--pretty"]).assert_success().stdout_eq(str![[r#"
Global:
- @utils/libraries/Contract.sol=src/Contract.sol
- @utils/=src/
- @openzeppelin/contracts/=lib/openzeppelin-contracts/
- @openzeppelin/contracts-upgradeable/=lib/dep1/lib/openzeppelin-upgradeable/
- dep1/=lib/dep1/src/
- forge-std/=lib/forge-std/src/
"#]]);
});
// test to check that foundry.toml libs section updates on install
forgetest!(can_update_libs_section, |prj, cmd| {
cmd.git_init();
// explicitly set gas_price
prj.update_config(|config| config.libs = vec!["node_modules".into()]);
cmd.args(["install", "foundry-rs/forge-std"]).assert_success().stdout_eq(str![[r#"
Installing forge-std in [..] (url: https://github.com/foundry-rs/forge-std, tag: None)
Installed forge-std[..]
"#]]);
let config = cmd.forge_fuse().config();
// `lib` was added automatically
let expected = vec![PathBuf::from("node_modules"), PathBuf::from("lib")];
assert_eq!(config.libs, expected);
// additional install don't edit `libs`
cmd.forge_fuse().args(["install", "dapphub/ds-test"]).assert_success().stdout_eq(str![[r#"
Installing ds-test in [..] (url: https://github.com/dapphub/ds-test, tag: None)
Installed ds-test
"#]]);
let config = cmd.forge_fuse().config();
assert_eq!(config.libs, expected);
});
// test to check that loading the config emits warnings on the root foundry.toml and
// is silent for any libs
forgetest!(config_emit_warnings, |prj, cmd| {
cmd.git_init();
cmd.args(["install", "foundry-rs/forge-std"]).assert_success().stdout_eq(str![[r#"
Installing forge-std in [..] (url: https://github.com/foundry-rs/forge-std, tag: None)
Installed forge-std[..]
"#]]);
let faulty_toml = r"[default]
src = 'src'
out = 'out'
libs = ['lib']";
fs::write(prj.root().join("foundry.toml"), faulty_toml).unwrap();
fs::write(prj.root().join("lib").join("forge-std").join("foundry.toml"), faulty_toml).unwrap();
cmd.forge_fuse().args(["config"]).assert_success().stderr_eq(str![[r#"
Warning: Found unknown config section in foundry.toml: [default]
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | true |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/forge/tests/cli/bind.rs | crates/forge/tests/cli/bind.rs | // <https://github.com/foundry-rs/foundry/issues/9482>
forgetest!(bind_unlinked_bytecode, |prj, cmd| {
prj.add_source(
"SomeLibContract.sol",
r#"
library SomeLib {
function add(uint256 a, uint256 b) external pure returns (uint256) {
return a + b;
}
}
contract SomeLibContract {
function add(uint256 a, uint256 b) public pure returns (uint256) {
return SomeLib.add(a, b);
}
}
"#,
);
cmd.args(["bind", "--select", "SomeLibContract"]).assert_success().stdout_eq(str![[r#"
[COMPILING_FILES] with [SOLC_VERSION]
[SOLC_VERSION] [ELAPSED]
Compiler run successful!
Generating bindings for 1 contracts
Bindings have been generated to [..]
"#]]);
});
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/forge/tests/cli/coverage.rs | crates/forge/tests/cli/coverage.rs | use foundry_common::fs::{self, files_with_ext};
use foundry_test_utils::{
TestCommand, TestProject,
snapbox::{Data, IntoData},
};
use std::path::Path;
#[track_caller]
fn assert_lcov(cmd: &mut TestCommand, data: impl IntoData) {
cmd.args(["--report=lcov", "--report-file"]).assert_file(data.into_data());
}
fn basic_base(prj: TestProject, mut cmd: TestCommand) {
cmd.args(["coverage", "--report=lcov", "--report=summary"]).assert_success().stdout_eq(str![[
r#"
[COMPILING_FILES] with [SOLC_VERSION]
[SOLC_VERSION] [ELAPSED]
Compiler run successful!
Analysing contracts...
Running tests...
Ran 2 tests for test/Counter.t.sol:CounterTest
[PASS] testFuzz_SetNumber(uint256) (runs: 256, [AVG_GAS])
[PASS] test_Increment() ([GAS])
Suite result: ok. 2 passed; 0 failed; 0 skipped; [ELAPSED]
Ran 1 test suite [ELAPSED]: 2 tests passed, 0 failed, 0 skipped (2 total tests)
Wrote LCOV report.
╭----------------------+---------------+---------------+---------------+---------------╮
| File | % Lines | % Statements | % Branches | % Funcs |
+======================================================================================+
| script/Counter.s.sol | 0.00% (0/5) | 0.00% (0/3) | 100.00% (0/0) | 0.00% (0/2) |
|----------------------+---------------+---------------+---------------+---------------|
| src/Counter.sol | 100.00% (4/4) | 100.00% (2/2) | 100.00% (0/0) | 100.00% (2/2) |
|----------------------+---------------+---------------+---------------+---------------|
| Total | 44.44% (4/9) | 40.00% (2/5) | 100.00% (0/0) | 50.00% (2/4) |
╰----------------------+---------------+---------------+---------------+---------------╯
"#
]]);
let lcov = prj.root().join("lcov.info");
assert!(lcov.exists(), "lcov.info was not created");
let default_lcov = str![[r#"
TN:
SF:script/Counter.s.sol
DA:10,0
FN:10,CounterScript.setUp
FNDA:0,CounterScript.setUp
DA:12,0
FN:12,CounterScript.run
FNDA:0,CounterScript.run
DA:13,0
DA:15,0
DA:17,0
FNF:2
FNH:0
LF:5
LH:0
BRF:0
BRH:0
end_of_record
TN:
SF:src/Counter.sol
DA:7,258
FN:7,Counter.setNumber
FNDA:258,Counter.setNumber
DA:8,258
DA:11,1
FN:11,Counter.increment
FNDA:1,Counter.increment
DA:12,1
FNF:2
FNH:2
LF:4
LH:4
BRF:0
BRH:0
end_of_record
"#]];
assert_data_eq!(Data::read_from(&lcov, None), default_lcov.clone());
assert_lcov(
cmd.forge_fuse().args(["coverage", "--report=lcov", "--lcov-version=1"]),
default_lcov,
);
assert_lcov(
cmd.forge_fuse().args(["coverage", "--report=lcov", "--lcov-version=2"]),
str![[r#"
TN:
SF:script/Counter.s.sol
DA:10,0
FN:10,10,CounterScript.setUp
FNDA:0,CounterScript.setUp
DA:12,0
FN:12,18,CounterScript.run
FNDA:0,CounterScript.run
DA:13,0
DA:15,0
DA:17,0
FNF:2
FNH:0
LF:5
LH:0
BRF:0
BRH:0
end_of_record
TN:
SF:src/Counter.sol
DA:7,258
FN:7,9,Counter.setNumber
FNDA:258,Counter.setNumber
DA:8,258
DA:11,1
FN:11,13,Counter.increment
FNDA:1,Counter.increment
DA:12,1
FNF:2
FNH:2
LF:4
LH:4
BRF:0
BRH:0
end_of_record
"#]],
);
assert_lcov(
cmd.forge_fuse().args(["coverage", "--report=lcov", "--lcov-version=2.2"]),
str![[r#"
TN:
SF:script/Counter.s.sol
DA:10,0
FNL:0,10,10
FNA:0,0,CounterScript.setUp
DA:12,0
FNL:1,12,18
FNA:1,0,CounterScript.run
DA:13,0
DA:15,0
DA:17,0
FNF:2
FNH:0
LF:5
LH:0
BRF:0
BRH:0
end_of_record
TN:
SF:src/Counter.sol
DA:7,258
FNL:2,7,9
FNA:2,258,Counter.setNumber
DA:8,258
DA:11,1
FNL:3,11,13
FNA:3,1,Counter.increment
DA:12,1
FNF:2
FNH:2
LF:4
LH:4
BRF:0
BRH:0
end_of_record
"#]],
);
}
forgetest_init!(basic, |prj, cmd| {
prj.initialize_default_contracts();
basic_base(prj, cmd);
});
forgetest_init!(basic_crlf, |prj, cmd| {
prj.initialize_default_contracts();
// Manually replace `\n` with `\r\n` in the source file.
let make_crlf = |path: &Path| {
fs::write(path, fs::read_to_string(path).unwrap().replace('\n', "\r\n")).unwrap()
};
make_crlf(&prj.paths().sources.join("Counter.sol"));
make_crlf(&prj.paths().scripts.join("Counter.s.sol"));
// Should have identical stdout and lcov output.
basic_base(prj, cmd);
});
forgetest!(setup, |prj, cmd| {
prj.insert_ds_test();
prj.add_source(
"AContract.sol",
r#"
contract AContract {
int public i;
function init() public {
i = 0;
}
function foo() public {
i = 1;
}
}
"#,
);
prj.add_source(
"AContractTest.sol",
r#"
import "./test.sol";
import {AContract} from "./AContract.sol";
contract AContractTest is DSTest {
AContract a;
function setUp() public {
a = new AContract();
a.init();
}
function testFoo() public {
a.foo();
}
}
"#,
);
// Assert 100% coverage (init function coverage called in setUp is accounted).
cmd.arg("coverage").assert_success().stdout_eq(str![[r#"
...
╭-------------------+---------------+---------------+---------------+---------------╮
| File | % Lines | % Statements | % Branches | % Funcs |
+===================================================================================+
| src/AContract.sol | 100.00% (4/4) | 100.00% (2/2) | 100.00% (0/0) | 100.00% (2/2) |
|-------------------+---------------+---------------+---------------+---------------|
| Total | 100.00% (4/4) | 100.00% (2/2) | 100.00% (0/0) | 100.00% (2/2) |
╰-------------------+---------------+---------------+---------------+---------------╯
"#]]);
});
forgetest!(setup_md, |prj, cmd| {
prj.insert_ds_test();
prj.add_source(
"AContract.sol",
r#"
contract AContract {
int public i;
function init() public {
i = 0;
}
function foo() public {
i = 1;
}
}
"#,
);
prj.add_source(
"AContractTest.sol",
r#"
import "./test.sol";
import {AContract} from "./AContract.sol";
contract AContractTest is DSTest {
AContract a;
function setUp() public {
a = new AContract();
a.init();
}
function testFoo() public {
a.foo();
}
}
"#,
);
// Assert 100% coverage (init function coverage called in setUp is accounted).
cmd.arg("coverage").args(["--md"]).assert_success().stdout_eq(str![[r#"
...
| File | % Lines | % Statements | % Branches | % Funcs |
|-------------------|---------------|---------------|---------------|---------------|
| src/AContract.sol | 100.00% (4/4) | 100.00% (2/2) | 100.00% (0/0) | 100.00% (2/2) |
| Total | 100.00% (4/4) | 100.00% (2/2) | 100.00% (0/0) | 100.00% (2/2) |
"#]]);
});
forgetest!(no_match, |prj, cmd| {
prj.insert_ds_test();
prj.add_source(
"AContract.sol",
r#"
contract AContract {
int public i;
function init() public {
i = 0;
}
function foo() public {
i = 1;
}
}
"#,
);
prj.add_source(
"AContractTest.sol",
r#"
import "./test.sol";
import {AContract} from "./AContract.sol";
contract AContractTest is DSTest {
AContract a;
function setUp() public {
a = new AContract();
a.init();
}
function testFoo() public {
a.foo();
}
}
"#,
);
prj.add_source(
"BContract.sol",
r#"
contract BContract {
int public i;
function init() public {
i = 0;
}
function foo() public {
i = 1;
}
}
"#,
);
prj.add_source(
"BContractTest.sol",
r#"
import "./test.sol";
import {BContract} from "./BContract.sol";
contract BContractTest is DSTest {
BContract a;
function setUp() public {
a = new BContract();
a.init();
}
function testFoo() public {
a.foo();
}
}
"#,
);
// Assert AContract is not included in report.
cmd.arg("coverage").arg("--no-match-coverage=AContract").assert_success().stdout_eq(str![[
r#"
...
╭-------------------+---------------+---------------+---------------+---------------╮
| File | % Lines | % Statements | % Branches | % Funcs |
+===================================================================================+
| src/BContract.sol | 100.00% (4/4) | 100.00% (2/2) | 100.00% (0/0) | 100.00% (2/2) |
|-------------------+---------------+---------------+---------------+---------------|
| Total | 100.00% (4/4) | 100.00% (2/2) | 100.00% (0/0) | 100.00% (2/2) |
╰-------------------+---------------+---------------+---------------+---------------╯
"#
]]);
});
forgetest!(assert, |prj, cmd| {
prj.insert_ds_test();
prj.add_source(
"AContract.sol",
r#"
contract AContract {
function checkA(uint256 a) external pure returns (bool) {
assert(a > 2);
return true;
}
}
"#,
);
prj.add_source(
"AContractTest.sol",
r#"
import "./test.sol";
import {AContract} from "./AContract.sol";
interface Vm {
function expectRevert() external;
}
contract AContractTest is DSTest {
Vm constant vm = Vm(HEVM_ADDRESS);
AContract a = new AContract();
function testAssertBranch() external {
bool result = a.checkA(10);
assertTrue(result);
}
function testAssertRevertBranch() external {
vm.expectRevert();
a.checkA(1);
}
}
"#,
);
// Assert 50% statement coverage for assert failure (assert not considered a branch).
cmd.arg("coverage").args(["--mt", "testAssertRevertBranch"]).assert_success().stdout_eq(str![
[r#"
...
╭-------------------+--------------+--------------+---------------+---------------╮
| File | % Lines | % Statements | % Branches | % Funcs |
+=================================================================================+
| src/AContract.sol | 66.67% (2/3) | 50.00% (1/2) | 100.00% (0/0) | 100.00% (1/1) |
|-------------------+--------------+--------------+---------------+---------------|
| Total | 66.67% (2/3) | 50.00% (1/2) | 100.00% (0/0) | 100.00% (1/1) |
╰-------------------+--------------+--------------+---------------+---------------╯
"#]
]);
// Assert 100% statement coverage for proper assert (assert not considered a branch).
cmd.forge_fuse().arg("coverage").args(["--mt", "testAssertBranch"]).assert_success().stdout_eq(
str![[r#"
...
╭-------------------+---------------+---------------+---------------+---------------╮
| File | % Lines | % Statements | % Branches | % Funcs |
+===================================================================================+
| src/AContract.sol | 100.00% (3/3) | 100.00% (2/2) | 100.00% (0/0) | 100.00% (1/1) |
|-------------------+---------------+---------------+---------------+---------------|
| Total | 100.00% (3/3) | 100.00% (2/2) | 100.00% (0/0) | 100.00% (1/1) |
╰-------------------+---------------+---------------+---------------+---------------╯
"#]],
);
});
forgetest!(require, |prj, cmd| {
prj.insert_ds_test();
prj.add_source(
"AContract.sol",
r#"
contract AContract {
function checkRequire(bool doNotRevert) public view {
require(doNotRevert, "reverted");
}
}
"#,
);
prj.add_source(
"AContractTest.sol",
r#"
import "./test.sol";
import {AContract} from "./AContract.sol";
interface Vm {
function expectRevert(bytes calldata revertData) external;
}
contract AContractTest is DSTest {
Vm constant vm = Vm(HEVM_ADDRESS);
AContract a = new AContract();
function testRequireRevert() external {
vm.expectRevert(abi.encodePacked("reverted"));
a.checkRequire(false);
}
function testRequireNoRevert() external {
a.checkRequire(true);
}
}
"#,
);
// Assert 50% branch coverage if only revert tested.
cmd.arg("coverage").args(["--mt", "testRequireRevert"]).assert_success().stdout_eq(str![[r#"
...
╭-------------------+---------------+---------------+--------------+---------------╮
| File | % Lines | % Statements | % Branches | % Funcs |
+==================================================================================+
| src/AContract.sol | 100.00% (2/2) | 100.00% (1/1) | 50.00% (1/2) | 100.00% (1/1) |
|-------------------+---------------+---------------+--------------+---------------|
| Total | 100.00% (2/2) | 100.00% (1/1) | 50.00% (1/2) | 100.00% (1/1) |
╰-------------------+---------------+---------------+--------------+---------------╯
"#]]);
// Assert 50% branch coverage if only happy path tested.
cmd.forge_fuse()
.arg("coverage")
.args(["--mt", "testRequireNoRevert"])
.assert_success()
.stdout_eq(str![[r#"
...
╭-------------------+---------------+---------------+--------------+---------------╮
| File | % Lines | % Statements | % Branches | % Funcs |
+==================================================================================+
| src/AContract.sol | 100.00% (2/2) | 100.00% (1/1) | 50.00% (1/2) | 100.00% (1/1) |
|-------------------+---------------+---------------+--------------+---------------|
| Total | 100.00% (2/2) | 100.00% (1/1) | 50.00% (1/2) | 100.00% (1/1) |
╰-------------------+---------------+---------------+--------------+---------------╯
"#]]);
// Assert 100% branch coverage.
cmd.forge_fuse().arg("coverage").assert_success().stdout_eq(str![[r#"
...
╭-------------------+---------------+---------------+---------------+---------------╮
| File | % Lines | % Statements | % Branches | % Funcs |
+===================================================================================+
| src/AContract.sol | 100.00% (2/2) | 100.00% (1/1) | 100.00% (2/2) | 100.00% (1/1) |
|-------------------+---------------+---------------+---------------+---------------|
| Total | 100.00% (2/2) | 100.00% (1/1) | 100.00% (2/2) | 100.00% (1/1) |
╰-------------------+---------------+---------------+---------------+---------------╯
"#]]);
});
forgetest!(line_hit_not_doubled, |prj, cmd| {
prj.insert_ds_test();
prj.add_source(
"AContract.sol",
r#"
contract AContract {
int public i;
function foo() public {
i = 1;
}
}
"#,
);
prj.add_source(
"AContractTest.sol",
r#"
import "./test.sol";
import {AContract} from "./AContract.sol";
contract AContractTest is DSTest {
AContract a = new AContract();
function testFoo() public {
a.foo();
}
}
"#,
);
// We want to make sure DA:8,1 is added only once so line hit is not doubled.
assert_lcov(
cmd.arg("coverage"),
str![[r#"
TN:
SF:src/AContract.sol
DA:7,1
FN:7,AContract.foo
FNDA:1,AContract.foo
DA:8,1
FNF:1
FNH:1
LF:2
LH:2
BRF:0
BRH:0
end_of_record
"#]],
);
});
forgetest!(branch, |prj, cmd| {
prj.insert_ds_test();
prj.add_source(
"Foo.sol",
r#"
contract Foo {
error Gte1(uint256 number, uint256 firstElement);
enum Status {
NULL,
OPEN,
CLOSED
}
struct Item {
Status status;
uint256 value;
}
mapping(uint256 => Item) internal items;
uint256 public nextId = 1;
function getItem(uint256 id) public view returns (Item memory item) {
item = items[id];
}
function addItem(uint256 value) public returns (uint256 id) {
id = nextId;
items[id] = Item(Status.OPEN, value);
nextId++;
}
function closeIfEqValue(uint256 id, uint256 value) public {
if (items[id].value == value) {
items[id].status = Status.CLOSED;
}
}
function incrementIfEqValue(uint256 id, uint256 value) public {
if (items[id].value == value) {
items[id].value = value + 1;
}
}
function foo(uint256 a) external pure {
if (a < 10) {
if (a < 3) {
assert(a == 1);
} else {
assert(a == 5);
}
} else {
assert(a == 60);
}
}
function countOdd(uint256[] memory arr) external pure returns (uint256 count) {
uint256 length = arr.length;
for (uint256 i = 0; i < length; ++i) {
if (arr[i] % 2 == 1) {
count++;
arr[0];
}
}
}
function checkLt(uint256 number, uint256[] memory arr) external pure returns (bool) {
if (number >= arr[0]) {
revert Gte1(number, arr[0]);
}
return true;
}
function checkEmptyStatements(uint256 number, uint256[] memory arr) external pure returns (bool) {
// Check that empty statements are covered.
if (number >= arr[0]) {
// Do nothing
} else {
// Do nothing.
}
if (number >= arr[0]) {}
return true;
}
function singlePathCoverage(uint256 number) external pure {
if (number < 10) {
if (number < 5) {
number++;
}
number++;
}
}
}
"#,
);
prj.add_source(
"FooTest.sol",
r#"
import "./test.sol";
import {Foo} from "./Foo.sol";
interface Vm {
function expectRevert(bytes calldata revertData) external;
function expectRevert() external;
}
contract FooTest is DSTest {
Vm constant vm = Vm(HEVM_ADDRESS);
Foo internal foo = new Foo();
function test_issue_7784() external {
foo.foo(1);
vm.expectRevert();
foo.foo(2);
vm.expectRevert();
foo.foo(4);
foo.foo(5);
foo.foo(60);
vm.expectRevert();
foo.foo(70);
}
function test_issue_4310() external {
uint256[] memory arr = new uint256[](3);
arr[0] = 78;
arr[1] = 493;
arr[2] = 700;
uint256 count = foo.countOdd(arr);
assertEq(count, 1);
arr = new uint256[](4);
arr[0] = 78;
arr[1] = 493;
arr[2] = 700;
arr[3] = 1729;
count = foo.countOdd(arr);
assertEq(count, 2);
}
function test_issue_4315() external {
uint256 value = 42;
uint256 id = foo.addItem(value);
assertEq(id, 1);
assertEq(foo.nextId(), 2);
Foo.Item memory item = foo.getItem(id);
assertEq(uint8(item.status), uint8(Foo.Status.OPEN));
assertEq(item.value, value);
foo = new Foo();
id = foo.addItem(value);
foo.closeIfEqValue(id, 903);
item = foo.getItem(id);
assertEq(uint8(item.status), uint8(Foo.Status.OPEN));
foo = new Foo();
foo.addItem(value);
foo.closeIfEqValue(id, 42);
item = foo.getItem(id);
assertEq(uint8(item.status), uint8(Foo.Status.CLOSED));
foo = new Foo();
id = foo.addItem(value);
foo.incrementIfEqValue(id, 903);
item = foo.getItem(id);
assertEq(item.value, 42);
foo = new Foo();
id = foo.addItem(value);
foo.incrementIfEqValue(id, 42);
item = foo.getItem(id);
assertEq(item.value, 43);
}
function test_issue_4309() external {
uint256[] memory arr = new uint256[](1);
arr[0] = 1;
uint256 number = 2;
vm.expectRevert(abi.encodeWithSelector(Foo.Gte1.selector, number, arr[0]));
foo.checkLt(number, arr);
number = 1;
vm.expectRevert(abi.encodeWithSelector(Foo.Gte1.selector, number, arr[0]));
foo.checkLt(number, arr);
number = 0;
bool result = foo.checkLt(number, arr);
assertTrue(result);
}
function test_issue_4314() external {
uint256[] memory arr = new uint256[](1);
arr[0] = 1;
foo.checkEmptyStatements(0, arr);
}
function test_single_path_child_branch() external {
foo.singlePathCoverage(1);
}
function test_single_path_parent_branch() external {
foo.singlePathCoverage(9);
}
function test_single_path_branch() external {
foo.singlePathCoverage(15);
}
}
"#,
);
// Assert no coverage for single path branch. 2 branches (parent and child) not covered.
cmd.arg("coverage")
.args(["--nmt", "test_single_path_child_branch|test_single_path_parent_branch"])
.assert_success()
.stdout_eq(str![[r#"
...
╭-------------+----------------+----------------+---------------+---------------╮
| File | % Lines | % Statements | % Branches | % Funcs |
+===============================================================================+
| src/Foo.sol | 91.67% (33/36) | 90.00% (27/30) | 80.00% (8/10) | 100.00% (9/9) |
|-------------+----------------+----------------+---------------+---------------|
| Total | 91.67% (33/36) | 90.00% (27/30) | 80.00% (8/10) | 100.00% (9/9) |
╰-------------+----------------+----------------+---------------+---------------╯
"#]]);
// Assert no coverage for single path child branch. 1 branch (child) not covered.
cmd.forge_fuse()
.arg("coverage")
.args(["--nmt", "test_single_path_child_branch"])
.assert_success()
.stdout_eq(str![[r#"
...
╭-------------+----------------+----------------+---------------+---------------╮
| File | % Lines | % Statements | % Branches | % Funcs |
+===============================================================================+
| src/Foo.sol | 97.22% (35/36) | 96.67% (29/30) | 90.00% (9/10) | 100.00% (9/9) |
|-------------+----------------+----------------+---------------+---------------|
| Total | 97.22% (35/36) | 96.67% (29/30) | 90.00% (9/10) | 100.00% (9/9) |
╰-------------+----------------+----------------+---------------+---------------╯
"#]]);
// Assert 100% coverage.
cmd.forge_fuse().arg("coverage").assert_success().stdout_eq(str![[r#"
...
╭-------------+-----------------+-----------------+-----------------+---------------╮
| File | % Lines | % Statements | % Branches | % Funcs |
+===================================================================================+
| src/Foo.sol | 100.00% (36/36) | 100.00% (30/30) | 100.00% (10/10) | 100.00% (9/9) |
|-------------+-----------------+-----------------+-----------------+---------------|
| Total | 100.00% (36/36) | 100.00% (30/30) | 100.00% (10/10) | 100.00% (9/9) |
╰-------------+-----------------+-----------------+-----------------+---------------╯
"#]]);
});
forgetest!(function_call, |prj, cmd| {
prj.insert_ds_test();
prj.add_source(
"AContract.sol",
r#"
contract AContract {
struct Custom {
bool a;
uint256 b;
}
function coverMe() external returns (bool) {
// Next lines should not be counted in coverage.
string("");
uint256(1);
address(this);
bool(false);
Custom(true, 10);
// Next lines should be counted in coverage.
uint256 a = uint256(1);
Custom memory cust = Custom(false, 100);
privateWithNoBody();
privateWithBody();
publicWithNoBody();
publicWithBody();
return true;
}
function privateWithNoBody() private {}
function privateWithBody() private returns (bool) {
return true;
}
function publicWithNoBody() private {}
function publicWithBody() private returns (bool) {
return true;
}
}
"#,
);
prj.add_source(
"AContractTest.sol",
r#"
import "./test.sol";
import {AContract} from "./AContract.sol";
contract AContractTest is DSTest {
AContract a = new AContract();
function testTypeConversionCoverage() external {
a.coverMe();
}
}
"#,
);
// Assert 100% coverage.
cmd.arg("coverage").assert_success().stdout_eq(str![[r#"
...
╭-------------------+-----------------+---------------+---------------+---------------╮
| File | % Lines | % Statements | % Branches | % Funcs |
+=====================================================================================+
| src/AContract.sol | 100.00% (14/14) | 100.00% (9/9) | 100.00% (0/0) | 100.00% (5/5) |
|-------------------+-----------------+---------------+---------------+---------------|
| Total | 100.00% (14/14) | 100.00% (9/9) | 100.00% (0/0) | 100.00% (5/5) |
╰-------------------+-----------------+---------------+---------------+---------------╯
"#]]);
});
forgetest!(try_catch, |prj, cmd| {
prj.insert_ds_test();
prj.add_source(
"Foo.sol",
r#"
contract Foo {
address public owner;
constructor(address _owner) {
require(_owner != address(0), "invalid address");
assert(_owner != 0x0000000000000000000000000000000000000001);
owner = _owner;
}
function myFunc(uint256 x) public pure returns (string memory) {
require(x != 0, "require failed");
return "my func was called";
}
}
contract Bar {
event Log(string message);
event LogBytes(bytes data);
Foo public foo;
constructor() {
foo = new Foo(msg.sender);
}
function tryCatchExternalCall(uint256 _i) public {
try foo.myFunc(_i) returns (string memory result) {
emit Log(result);
} catch {
emit Log("external call failed");
}
}
function tryCatchNewContract(address _owner) public {
try new Foo(_owner) returns (Foo foo_) {
emit Log("Foo created");
} catch Error(string memory reason) {
emit Log(reason);
} catch (bytes memory reason) {}
}
function tryCatchAllNewContract(address _owner) public {
try new Foo(_owner) returns (Foo foo_) {} catch {}
}
}
"#,
);
prj.add_source(
"FooTest.sol",
r#"
import "./test.sol";
import {Bar, Foo} from "./Foo.sol";
interface Vm {
function expectRevert() external;
}
contract FooTest is DSTest {
Vm constant vm = Vm(HEVM_ADDRESS);
function test_happy_foo_coverage() external {
vm.expectRevert();
Foo foo = new Foo(address(0));
vm.expectRevert();
foo = new Foo(address(1));
foo = new Foo(address(2));
}
function test_happy_path_coverage() external {
Bar bar = new Bar();
bar.tryCatchNewContract(0x0000000000000000000000000000000000000002);
bar.tryCatchAllNewContract(0x0000000000000000000000000000000000000002);
bar.tryCatchExternalCall(1);
}
function test_coverage() external {
Bar bar = new Bar();
bar.tryCatchNewContract(0x0000000000000000000000000000000000000000);
bar.tryCatchNewContract(0x0000000000000000000000000000000000000001);
bar.tryCatchAllNewContract(0x0000000000000000000000000000000000000001);
bar.tryCatchExternalCall(0);
}
}
"#,
);
// Assert coverage not 100% for happy paths only.
cmd.arg("coverage").args(["--mt", "happy"]).assert_success().stdout_eq(str![[r#"
...
╭-------------+----------------+----------------+--------------+---------------╮
| File | % Lines | % Statements | % Branches | % Funcs |
+==============================================================================+
| src/Foo.sol | 77.27% (17/22) | 78.57% (11/14) | 66.67% (6/9) | 100.00% (6/6) |
|-------------+----------------+----------------+--------------+---------------|
| Total | 77.27% (17/22) | 78.57% (11/14) | 66.67% (6/9) | 100.00% (6/6) |
╰-------------+----------------+----------------+--------------+---------------╯
"#]]);
// Assert 100% branch coverage (including clauses without body).
cmd.forge_fuse().arg("coverage").assert_success().stdout_eq(str![[r#"
...
╭-------------+-----------------+-----------------+---------------+---------------╮
| File | % Lines | % Statements | % Branches | % Funcs |
+=================================================================================+
| src/Foo.sol | 100.00% (22/22) | 100.00% (14/14) | 100.00% (9/9) | 100.00% (6/6) |
|-------------+-----------------+-----------------+---------------+---------------|
| Total | 100.00% (22/22) | 100.00% (14/14) | 100.00% (9/9) | 100.00% (6/6) |
╰-------------+-----------------+-----------------+---------------+---------------╯
"#]]);
});
forgetest!(yul, |prj, cmd| {
prj.insert_ds_test();
prj.add_source(
"Foo.sol",
r#"
contract Foo {
uint256[] dynamicArray;
function readDynamicArrayLength() public view returns (uint256 length) {
assembly {
length := sload(dynamicArray.slot)
}
}
function switchAndIfStatements(uint256 n) public pure {
uint256 y;
assembly {
switch n
case 0 { y := 0 }
case 1 { y := 1 }
default { y := n }
if y { y := 2 }
}
}
function yulForLoop(uint256 n) public {
uint256 y;
assembly {
for { let i := 0 } lt(i, n) { i := add(i, 1) } { y := add(y, 1) }
let j := 0
for {} lt(j, n) { j := add(j, 1) } { j := add(j, 2) }
}
}
function hello() public pure returns (bool, uint256, bytes32) {
bool x;
uint256 y;
bytes32 z;
assembly {
x := 1
y := 0xa
z := "Hello World!"
}
return (x, y, z);
}
function inlineFunction() public returns (uint256) {
uint256 result;
assembly {
function sum(a, b) -> c {
c := add(a, b)
}
function multiply(a, b) -> c {
for { let i := 0 } lt(i, b) { i := add(i, 1) } { c := add(c, a) }
}
result := sum(2, 3)
result := multiply(result, 5)
}
return result;
}
}
"#,
);
prj.add_source(
"FooTest.sol",
r#"
import "./test.sol";
import {Foo} from "./Foo.sol";
contract FooTest is DSTest {
function test_foo_coverage() external {
Foo foo = new Foo();
foo.switchAndIfStatements(0);
foo.switchAndIfStatements(1);
foo.switchAndIfStatements(2);
foo.yulForLoop(2);
foo.hello();
foo.readDynamicArrayLength();
foo.inlineFunction();
}
}
"#,
);
cmd.forge_fuse().arg("coverage").assert_success().stdout_eq(str![[r#"
...
╭-------------+-----------------+-----------------+---------------+---------------╮
| File | % Lines | % Statements | % Branches | % Funcs |
+=================================================================================+
| src/Foo.sol | 100.00% (30/30) | 100.00% (40/40) | 100.00% (1/1) | 100.00% (7/7) |
|-------------+-----------------+-----------------+---------------+---------------|
| Total | 100.00% (30/30) | 100.00% (40/40) | 100.00% (1/1) | 100.00% (7/7) |
╰-------------+-----------------+-----------------+---------------+---------------╯
"#]]);
});
forgetest!(misc, |prj, cmd| {
prj.insert_ds_test();
prj.add_source(
"Foo.sol",
r#"
struct Custom {
int256 f1;
}
contract A {
function f(Custom memory custom) public returns (int256) {
return custom.f1;
}
}
contract B {
uint256 public x;
constructor(uint256 a) payable {
x = a;
}
}
contract C {
function create() public {
B b = new B{value: 1}(2);
b = new B{value: 1}(2);
b = (new B){value: 1}(2);
}
}
contract D {
uint256 index;
function g() public {
(uint256 x,, uint256 y) = (7, true, 2);
(x, y) = (y, x);
(index,,) = (7, true, 2);
}
}
"#,
);
prj.add_source(
"FooTest.sol",
r#"
import "./test.sol";
import "./Foo.sol";
interface Vm {
function deal(address account, uint256 newBalance) external;
}
contract FooTest is DSTest {
Vm constant vm = Vm(HEVM_ADDRESS);
function test_member_access_coverage() external {
A a = new A();
Custom memory cust = Custom(1);
a.f(cust);
}
function test_new_expression_coverage() external {
B b = new B(1);
b.x();
C c = new C();
vm.deal(address(c), 100 ether);
c.create();
}
function test_tuple_coverage() external {
D d = new D();
d.g();
}
}
"#,
);
cmd.forge_fuse().arg("coverage").assert_success().stdout_eq(str![[r#"
...
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | true |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/forge/tests/cli/verify_bytecode.rs | crates/forge/tests/cli/verify_bytecode.rs | use foundry_compilers::artifacts::{BytecodeHash, EvmVersion};
use foundry_config::Config;
use foundry_test_utils::{
TestCommand, TestProject, forgetest_async,
rpc::{next_etherscan_api_key, next_http_archive_rpc_url},
util::OutputExt,
};
#[expect(clippy::too_many_arguments)]
fn test_verify_bytecode(
prj: TestProject,
mut cmd: TestCommand,
addr: &str,
contract_name: &str,
constructor_args: Option<Vec<&str>>,
config: Config,
verifier: &str,
verifier_url: &str,
expected_matches: (&str, &str),
) {
let etherscan_key = next_etherscan_api_key();
let rpc_url = next_http_archive_rpc_url();
// fetch and flatten source code
let source_code = cmd
.cast_fuse()
.args(["source", addr, "--flatten", "--etherscan-api-key", ðerscan_key])
.assert_success()
.get_output()
.stdout_lossy();
prj.add_source(contract_name, &source_code);
prj.write_config(config);
let etherscan_key = next_etherscan_api_key();
let mut args = vec![
"verify-bytecode",
addr,
contract_name,
"--etherscan-api-key",
ðerscan_key,
"--verifier",
verifier,
"--verifier-url",
verifier_url,
"--rpc-url",
&rpc_url,
];
if let Some(constructor_args) = constructor_args {
args.push("--constructor-args");
args.extend(constructor_args.iter());
}
let output = cmd.forge_fuse().args(args).assert_success().get_output().stdout_lossy();
assert!(
output
.contains(format!("Creation code matched with status {}", expected_matches.0).as_str())
);
assert!(
output
.contains(format!("Runtime code matched with status {}", expected_matches.1).as_str())
);
}
#[expect(clippy::too_many_arguments)]
fn test_verify_bytecode_with_ignore(
prj: TestProject,
mut cmd: TestCommand,
addr: &str,
contract_name: &str,
config: Config,
verifier: &str,
verifier_url: &str,
expected_matches: (&str, &str),
ignore: &str,
chain: &str,
) {
let etherscan_key = next_etherscan_api_key();
let rpc_url = next_http_archive_rpc_url();
// fetch and flatten source code
let source_code = cmd
.cast_fuse()
.args([
"source",
addr,
"--flatten",
"--etherscan-api-key",
ðerscan_key,
"--chain",
chain,
])
.assert_success()
.get_output()
.stdout_lossy();
prj.add_source(contract_name, &source_code);
prj.write_config(config);
let output = cmd
.forge_fuse()
.args([
"verify-bytecode",
addr,
contract_name,
"--etherscan-api-key",
ðerscan_key,
"--verifier",
verifier,
"--verifier-url",
verifier_url,
"--rpc-url",
&rpc_url,
"--ignore",
ignore,
])
.assert_success()
.get_output()
.stdout_lossy();
if ignore == "creation" {
assert!(!output.contains(
format!("Creation code matched with status {}", expected_matches.0).as_str()
));
} else {
assert!(output.contains(
format!("Creation code matched with status {}", expected_matches.0).as_str()
));
}
if ignore == "runtime" {
assert!(
!output.contains(
format!("Runtime code matched with status {}", expected_matches.1).as_str()
)
);
} else {
assert!(
output.contains(
format!("Runtime code matched with status {}", expected_matches.1).as_str()
)
);
}
}
forgetest_async!(
#[ignore = "flaky due to rate limits"]
can_verify_bytecode_no_metadata,
|prj, cmd| {
test_verify_bytecode(
prj,
cmd,
"0xba2492e52F45651B60B8B38d4Ea5E2390C64Ffb1",
"SystemConfig",
None,
Config {
evm_version: EvmVersion::London,
optimizer_runs: Some(999999),
optimizer: Some(true),
cbor_metadata: false,
bytecode_hash: BytecodeHash::None,
..Default::default()
},
"etherscan",
"https://api.etherscan.io/v2/api?chainid=1",
("partial", "partial"),
);
}
);
forgetest_async!(
#[ignore = "flaky due to rate limits"]
can_verify_bytecode_with_metadata,
|prj, cmd| {
test_verify_bytecode(
prj,
cmd,
"0xb8901acb165ed027e32754e0ffe830802919727f",
"L1_ETH_Bridge",
None,
Config {
evm_version: EvmVersion::Paris,
optimizer_runs: Some(50000),
optimizer: Some(true),
..Default::default()
},
"etherscan",
"https://api.etherscan.io/v2/api?chainid=1",
("partial", "partial"),
);
}
);
// Test non-CREATE2 deployed contract with blockscout
forgetest_async!(
#[ignore = "flaky due to rate limits"]
can_verify_bytecode_with_blockscout,
|prj, cmd| {
test_verify_bytecode(
prj,
cmd,
"0x70f44C13944d49a236E3cD7a94f48f5daB6C619b",
"StrategyManager",
None,
Config {
evm_version: EvmVersion::London,
optimizer: Some(true),
optimizer_runs: Some(200),
..Default::default()
},
"blockscout",
"https://eth.blockscout.com/api",
("partial", "partial"),
);
}
);
// Test CREATE2 deployed contract with blockscout
forgetest_async!(
#[ignore = "flaky due to rate limits"]
can_vb_create2_with_blockscout,
|prj, cmd| {
test_verify_bytecode(
prj,
cmd,
"0xba2492e52F45651B60B8B38d4Ea5E2390C64Ffb1",
"SystemConfig",
None,
Config {
evm_version: EvmVersion::London,
optimizer_runs: Some(999999),
optimizer: Some(true),
cbor_metadata: false,
bytecode_hash: BytecodeHash::None,
..Default::default()
},
"blockscout",
"https://eth.blockscout.com/api",
("partial", "partial"),
);
}
);
// Test `--constructor-args`
forgetest_async!(
#[ignore = "flaky due to rate limits"]
can_verify_bytecode_with_constructor_args,
|prj, cmd| {
let constructor_args = vec![
"0x39053D51B77DC0d36036Fc1fCc8Cb819df8Ef37A",
"0x91E677b07F7AF907ec9a428aafA9fc14a0d3A338",
"0xD92145c07f8Ed1D392c1B88017934E301CC1c3Cd",
];
test_verify_bytecode(
prj,
cmd,
"0x70f44C13944d49a236E3cD7a94f48f5daB6C619b",
"StrategyManager",
Some(constructor_args),
Config {
evm_version: EvmVersion::London,
optimizer: Some(true),
optimizer_runs: Some(200),
..Default::default()
},
"etherscan",
"https://api.etherscan.io/v2/api?chainid=1",
("partial", "partial"),
);
}
);
// `--ignore` tests
forgetest_async!(
#[ignore = "flaky due to rate limits"]
can_ignore_creation,
|prj, cmd| {
test_verify_bytecode_with_ignore(
prj,
cmd,
"0xba2492e52F45651B60B8B38d4Ea5E2390C64Ffb1",
"SystemConfig",
Config {
evm_version: EvmVersion::London,
optimizer_runs: Some(999999),
optimizer: Some(true),
cbor_metadata: false,
bytecode_hash: BytecodeHash::None,
..Default::default()
},
"etherscan",
"https://api.etherscan.io/v2/api?chainid=1",
("ignored", "partial"),
"creation",
"1",
);
}
);
forgetest_async!(
#[ignore = "flaky due to rate limits"]
can_ignore_runtime,
|prj, cmd| {
test_verify_bytecode_with_ignore(
prj,
cmd,
"0xba2492e52F45651B60B8B38d4Ea5E2390C64Ffb1",
"SystemConfig",
Config {
evm_version: EvmVersion::London,
optimizer_runs: Some(999999),
optimizer: Some(true),
cbor_metadata: false,
bytecode_hash: BytecodeHash::None,
..Default::default()
},
"etherscan",
"https://api.etherscan.io/v2/api?chainid=1",
("partial", "ignored"),
"runtime",
"1",
);
}
);
// Test that verification fails when source code doesn't match deployed bytecode
forgetest_async!(
#[ignore = "flaky due to rate limits"]
can_verify_bytecode_fails_on_source_mismatch,
|prj, cmd| {
let etherscan_key = next_etherscan_api_key();
let rpc_url = next_http_archive_rpc_url();
// Fetch real source code
let real_source = cmd
.cast_fuse()
.args([
"source",
"0xba2492e52F45651B60B8B38d4Ea5E2390C64Ffb1",
"--flatten",
"--etherscan-api-key",
ðerscan_key,
])
.assert_success()
.get_output()
.stdout_lossy();
prj.add_source("SystemConfig", &real_source);
prj.write_config(Config {
evm_version: EvmVersion::London,
optimizer_runs: Some(999999),
optimizer: Some(true),
cbor_metadata: false,
bytecode_hash: BytecodeHash::None,
..Default::default()
});
// Build once with correct source (creates cache)
cmd.forge_fuse().arg("build").assert_success();
let source_code = r#"
contract SystemConfig {
uint256 public constant MODIFIED_VALUE = 999;
function someFunction() public pure returns (uint256) {
return MODIFIED_VALUE;
}
}
"#;
// Now replace with different incorrect source code
prj.add_source("SystemConfig", source_code);
let args = vec![
"verify-bytecode",
"0xba2492e52F45651B60B8B38d4Ea5E2390C64Ffb1",
"SystemConfig",
"--etherscan-api-key",
ðerscan_key,
"--verifier",
"etherscan",
"--verifier-url",
"https://api.etherscan.io/v2/api?chainid=1",
"--rpc-url",
&rpc_url,
];
let output = cmd.forge_fuse().args(args).assert_success().get_output().stderr_lossy();
// Verify that bytecode does NOT match (recompiled with incorrect source)
assert!(output.contains("Error: Creation code did not match".to_string().as_str()));
assert!(output.contains("Error: Runtime code did not match".to_string().as_str()));
}
);
// Test predeploy contracts
// TODO: Add test utils for base such as basescan keys and alchemy keys.
// WETH9 Predeploy
// forgetest_async!(can_verify_predeploys, |prj, cmd| {
// test_verify_bytecode_with_ignore(
// prj,
// cmd,
// "0x4200000000000000000000000000000000000006",
// "WETH9",
// Config {
// evm_version: EvmVersion::default(),
// optimizer: Some(true),
// optimizer_runs: 10000,
// cbor_metadata: true,
// bytecode_hash: BytecodeHash::Bzzr1,
// ..Default::default()
// },
// "etherscan",
// "https://api.basescan.org/api",
// ("ignored", "partial"),
// "creation",
// "base",
// );
// });
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/forge/tests/cli/svm.rs | crates/forge/tests/cli/svm.rs | //! svm sanity checks
use semver::Version;
use svm::Platform;
/// The latest Solc release.
///
/// Solc to Foundry release process:
/// 1. new solc release
/// 2. svm updated with all build info
/// 3. svm bumped in foundry-compilers
/// 4. foundry-compilers update with any breaking changes
/// 5. upgrade the `LATEST_SOLC`
const LATEST_SOLC: Version = Version::new(0, 8, 30);
macro_rules! ensure_svm_releases {
($($test:ident => $platform:ident),* $(,)?) => {$(
#[tokio::test(flavor = "multi_thread")]
async fn $test() {
ensure_latest_release(Platform::$platform).await
}
)*};
}
async fn ensure_latest_release(platform: Platform) {
let releases = svm::all_releases(platform)
.await
.unwrap_or_else(|err| panic!("Could not fetch releases for {platform}: {err:?}"));
assert!(
releases.releases.contains_key(&LATEST_SOLC),
"platform {platform:?} is missing solc info for v{LATEST_SOLC}"
);
}
// ensures all platform have the latest solc release version
ensure_svm_releases!(
test_svm_releases_linux_amd64 => LinuxAmd64,
test_svm_releases_linux_aarch64 => LinuxAarch64,
test_svm_releases_macos_amd64 => MacOsAmd64,
test_svm_releases_macos_aarch64 => MacOsAarch64,
test_svm_releases_windows_amd64 => WindowsAmd64
);
// Ensures we can always test with the latest solc build
forgetest_init!(can_test_with_latest_solc, |prj, cmd| {
prj.initialize_default_contracts();
prj.add_test(
"Counter.2.t.sol",
&format!(
r#"
pragma solidity ={LATEST_SOLC};
import "forge-std/Test.sol";
contract CounterTest is Test {{
function testAssert() public {{
assert(true);
}}
}}
"#
),
);
// we need to remove the pinned solc version for this
prj.update_config(|c| {
c.solc.take();
});
cmd.arg("test").assert_success().stdout_eq(str![[r#"
...
Ran 1 test for test/Counter.2.t.sol:CounterTest
[PASS] testAssert() ([GAS])
Suite result: ok. 1 passed; 0 failed; 0 skipped; [ELAPSED]
...
Ran 2 tests for test/Counter.t.sol:CounterTest
[PASS] testFuzz_SetNumber(uint256) (runs: 256, [AVG_GAS])
[PASS] test_Increment() ([GAS])
Suite result: ok. 2 passed; 0 failed; 0 skipped; [ELAPSED]
Ran 2 test suites [ELAPSED]: 3 tests passed, 0 failed, 0 skipped (3 total tests)
"#]]);
});
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/forge/tests/cli/version.rs | crates/forge/tests/cli/version.rs | use foundry_test_utils::{forgetest, str};
forgetest!(print_short_version, |_prj, cmd| {
cmd.arg("-V").assert_success().stdout_eq(str![[r#"
forge [..]-[..] ([..] [..])
"#]]);
});
forgetest!(print_long_version, |_prj, cmd| {
cmd.arg("--version").assert_success().stdout_eq(str![[r#"
forge Version: [..]
Commit SHA: [..]
Build Timestamp: [..]
Build Profile: [..]
"#]]);
});
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/forge/tests/cli/soldeer.rs | crates/forge/tests/cli/soldeer.rs | //! Contains various tests related to `forge soldeer`.
use std::{fs, path::Path};
use foundry_test_utils::forgesoldeer;
forgesoldeer!(install_dependency, |prj, cmd| {
let command = "install";
let dependency = "forge-std~1.8.1";
let mut foundry_contents = r#"[profile.default]
src = "src"
out = "out"
libs = ["lib", "dependencies"]
[dependencies]
# See more config options https://github.com/foundry-rs/foundry/blob/master/crates/config/README.md#all-options
"#;
let foundry_file = prj.root().join("foundry.toml");
fs::write(&foundry_file, foundry_contents).unwrap();
cmd.arg("soldeer").args([command, dependency]).assert_success();
// Making sure the path was created to the dependency and that foundry.toml exists
// meaning that the dependencies were installed correctly
let path_dep_forge =
prj.root().join("dependencies").join("forge-std-1.8.1").join("foundry.toml");
assert!(path_dep_forge.exists());
// Making sure the lock contents are the right ones
let path_lock_file = prj.root().join("soldeer.lock");
let actual_lock_contents = read_file_to_string(&path_lock_file);
assert!(actual_lock_contents.contains("forge-std"));
// Making sure the foundry contents are the right ones
foundry_contents = r#"[profile.default]
src = "src"
out = "out"
libs = ["lib", "dependencies"]
[dependencies]
forge-std = "1.8.1"
# See more config options https://github.com/foundry-rs/foundry/blob/master/crates/config/README.md#all-options
"#;
assert_data_eq!(read_file_to_string(&foundry_file), foundry_contents);
});
forgesoldeer!(install_dependency_git, |prj, cmd| {
let command = "install";
let dependency = "forge-std~1.8.1";
let git_arg = "--git";
let git = "https://gitlab.com/mario4582928/Mario.git";
let mut foundry_contents = r#"[profile.default]
src = "src"
out = "out"
libs = ["lib", "dependencies"]
[dependencies]
# See more config options https://github.com/foundry-rs/foundry/blob/master/crates/config/README.md#all-options
"#;
let foundry_file = prj.root().join("foundry.toml");
fs::write(&foundry_file, foundry_contents).unwrap();
cmd.arg("soldeer").args([command, dependency, git_arg, git]).assert_success();
// Making sure the path was created to the dependency and that README.md exists
// meaning that the dependencies were installed correctly
let path_dep_forge = prj.root().join("dependencies").join("forge-std-1.8.1").join("README.md");
assert!(path_dep_forge.exists());
// Making sure the lock contents are the right ones
let path_lock_file = prj.root().join("soldeer.lock");
let actual_lock_contents = read_file_to_string(&path_lock_file);
assert!(actual_lock_contents.contains("forge-std"));
// Making sure the foundry contents are the right ones
foundry_contents = r#"[profile.default]
src = "src"
out = "out"
libs = ["lib", "dependencies"]
[dependencies]
forge-std = { version = "1.8.1", git = "https://gitlab.com/mario4582928/Mario.git", rev = "22868f426bd4dd0e682b5ec5f9bd55507664240c" }
# See more config options https://github.com/foundry-rs/foundry/blob/master/crates/config/README.md#all-options
"#;
assert_data_eq!(read_file_to_string(&foundry_file), foundry_contents);
});
forgesoldeer!(install_dependency_git_commit, |prj, cmd| {
let command = "install";
let dependency = "forge-std~1.8.1";
let git_arg = "--git";
let git = "https://gitlab.com/mario4582928/Mario.git";
let rev_flag = "--rev";
let commit = "7a0663eaf7488732f39550be655bad6694974cb3";
let mut foundry_contents = r#"[profile.default]
src = "src"
out = "out"
libs = ["lib", "dependencies"]
[dependencies]
# See more config options https://github.com/foundry-rs/foundry/blob/master/crates/config/README.md#all-options
"#;
let foundry_file = prj.root().join("foundry.toml");
fs::write(&foundry_file, foundry_contents).unwrap();
cmd.arg("soldeer").args([command, dependency, git_arg, git, rev_flag, commit]).assert_success();
// Making sure the path was created to the dependency and that README.md exists
// meaning that the dependencies were installed correctly
let path_dep_forge =
prj.root().join("dependencies").join("forge-std-1.8.1").join("JustATest2.md");
assert!(path_dep_forge.exists());
// Making sure the lock contents are the right ones
let path_lock_file = prj.root().join("soldeer.lock");
let actual_lock_contents = read_file_to_string(&path_lock_file);
assert!(actual_lock_contents.contains("forge-std"));
// Making sure the foundry contents are the right ones
foundry_contents = r#"[profile.default]
src = "src"
out = "out"
libs = ["lib", "dependencies"]
[dependencies]
forge-std = { version = "1.8.1", git = "https://gitlab.com/mario4582928/Mario.git", rev = "7a0663eaf7488732f39550be655bad6694974cb3" }
# See more config options https://github.com/foundry-rs/foundry/blob/master/crates/config/README.md#all-options
"#;
assert_data_eq!(read_file_to_string(&foundry_file), foundry_contents);
});
forgesoldeer!(update_dependencies, |prj, cmd| {
let command = "update";
// We need to write this into the foundry.toml to make the update install the dependency
let foundry_updates = r#"[profile.default]
src = "src"
out = "out"
libs = ["lib", "dependencies"]
# See more config options https://github.com/foundry-rs/foundry/blob/master/crates/config/README.md#all-options
[dependencies]
"@tt" = {version = "1.6.1", url = "https://soldeer-revisions.s3.amazonaws.com/@openzeppelin-contracts/3_3_0-rc_2_22-01-2024_13:12:57_contracts.zip"}
forge-std = { version = "1.8.1" }
solmate = "6.7.0"
mario = { version = "1.0", git = "https://gitlab.com/mario4582928/Mario.git", rev = "22868f426bd4dd0e682b5ec5f9bd55507664240c" }
mario-custom-tag = { version = "1.0", git = "https://gitlab.com/mario4582928/Mario.git", tag = "custom-tag" }
mario-custom-branch = { version = "1.0", git = "https://gitlab.com/mario4582928/Mario.git", tag = "custom-branch" }
"#;
let foundry_file = prj.root().join("foundry.toml");
fs::write(&foundry_file, foundry_updates).unwrap();
cmd.arg("soldeer").arg(command).assert_success();
// Making sure the path was created to the dependency and that foundry.toml exists
// meaning that the dependencies were installed correctly
let dep1 = prj.root().join("dependencies").join("@tt-1.6.1");
let dep2 = prj.root().join("dependencies").join("forge-std-1.8.1");
let dep3 = prj.root().join("dependencies").join("mario-1.0");
let dep4 = prj.root().join("dependencies").join("solmate-6.7.0");
let dep5 = prj.root().join("dependencies").join("mario-custom-tag-1.0");
let dep6 = prj.root().join("dependencies").join("mario-custom-branch-1.0");
assert!(dep1.exists());
assert!(dep2.exists());
assert!(dep3.exists());
assert!(dep4.exists());
assert!(dep5.exists());
assert!(dep6.exists());
// Making sure the lock contents are the right ones
let path_lock_file = prj.root().join("soldeer.lock");
// assert_data_eq!(lock_contents, read_file_to_string(&path_lock_file));
let actual_lock_contents = read_file_to_string(&path_lock_file);
assert!(actual_lock_contents.contains("forge-std"));
// Making sure the foundry contents are the right ones
let foundry_contents = r#"[profile.default]
src = "src"
out = "out"
libs = ["lib", "dependencies"]
# See more config options https://github.com/foundry-rs/foundry/blob/master/crates/config/README.md#all-options
[dependencies]
"@tt" = {version = "1.6.1", url = "https://soldeer-revisions.s3.amazonaws.com/@openzeppelin-contracts/3_3_0-rc_2_22-01-2024_13:12:57_contracts.zip"}
forge-std = { version = "1.8.1" }
solmate = "6.7.0"
mario = { version = "1.0", git = "https://gitlab.com/mario4582928/Mario.git", rev = "22868f426bd4dd0e682b5ec5f9bd55507664240c" }
mario-custom-tag = { version = "1.0", git = "https://gitlab.com/mario4582928/Mario.git", tag = "custom-tag" }
mario-custom-branch = { version = "1.0", git = "https://gitlab.com/mario4582928/Mario.git", tag = "custom-branch" }
"#;
assert_data_eq!(read_file_to_string(&foundry_file), foundry_contents);
});
forgesoldeer!(update_dependencies_simple_version, |prj, cmd| {
let command = "update";
// We need to write this into the foundry.toml to make the update install the dependency, this
// is he simplified version of version specification
let foundry_updates = r#"[profile.default]
src = "src"
out = "out"
libs = ["lib", "dependencies"]
# See more config options https://github.com/foundry-rs/foundry/blob/master/crates/config/README.md#all-options
[dependencies]
forge-std = "1.8.1"
"#;
let foundry_file = prj.root().join("foundry.toml");
fs::write(&foundry_file, foundry_updates).unwrap();
cmd.arg("soldeer").arg(command).assert_success();
// Making sure the path was created to the dependency and that foundry.toml exists
// meaning that the dependencies were installed correctly
let path_dep_forge =
prj.root().join("dependencies").join("forge-std-1.8.1").join("foundry.toml");
assert!(path_dep_forge.exists());
// Making sure the lock contents are the right ones
let path_lock_file = prj.root().join("soldeer.lock");
let actual_lock_contents = read_file_to_string(&path_lock_file);
assert!(actual_lock_contents.contains("forge-std"));
// Making sure the foundry contents are the right ones
let foundry_contents = r#"[profile.default]
src = "src"
out = "out"
libs = ["lib", "dependencies"]
# See more config options https://github.com/foundry-rs/foundry/blob/master/crates/config/README.md#all-options
[dependencies]
forge-std = "1.8.1"
"#;
assert_data_eq!(read_file_to_string(&foundry_file), foundry_contents);
});
forgesoldeer!(install_dependency_with_remappings_config, |prj, cmd| {
let command = "install";
let dependency = "forge-std~1.8.1";
let foundry_updates = r#"[profile.default]
src = "src"
out = "out"
libs = ["lib", "dependencies"]
# See more config options https://github.com/foundry-rs/foundry/blob/master/crates/config/README.md#all-options
[soldeer]
remappings_generate = true
remappings_prefix = "@custom-f@"
remappings_location = "config"
remappings_regenerate = true
[dependencies]
"#;
let foundry_file = prj.root().join("foundry.toml");
fs::write(&foundry_file, foundry_updates).unwrap();
cmd.arg("soldeer").args([command, dependency]).assert_success();
// Making sure the path was created to the dependency and that foundry.toml exists
// meaning that the dependencies were installed correctly
let path_dep_forge =
prj.root().join("dependencies").join("forge-std-1.8.1").join("foundry.toml");
assert!(path_dep_forge.exists());
// Making sure the lock contents are the right ones
let path_lock_file = prj.root().join("soldeer.lock");
let actual_lock_contents = read_file_to_string(&path_lock_file);
assert!(actual_lock_contents.contains("forge-std"));
// Making sure the foundry contents are the right ones
let foundry_contents = r#"[profile.default]
src = "src"
out = "out"
libs = ["lib", "dependencies"]
remappings = ["@custom-f@forge-std-1.8.1/=dependencies/forge-std-1.8.1/"]
# See more config options https://github.com/foundry-rs/foundry/blob/master/crates/config/README.md#all-options
[soldeer]
remappings_generate = true
remappings_prefix = "@custom-f@"
remappings_location = "config"
remappings_regenerate = true
[dependencies]
forge-std = "1.8.1"
"#;
assert_data_eq!(read_file_to_string(&foundry_file), foundry_contents);
});
forgesoldeer!(install_dependency_with_remappings_txt, |prj, cmd| {
let command = "install";
let dependency = "forge-std~1.8.1";
let foundry_updates = r#"
[dependencies]
[soldeer]
remappings_generate = true
remappings_prefix = "@custom-f@"
remappings_location = "txt"
remappings_regenerate = true
"#;
let foundry_file = prj.root().join("foundry.toml");
fs::write(&foundry_file, foundry_updates).unwrap();
cmd.arg("soldeer").args([command, dependency]).assert_success();
// Making sure the path was created to the dependency and that foundry.toml exists
// meaning that the dependencies were installed correctly
let path_dep_forge =
prj.root().join("dependencies").join("forge-std-1.8.1").join("foundry.toml");
assert!(path_dep_forge.exists());
// Making sure the lock contents are the right ones
let path_lock_file = prj.root().join("soldeer.lock");
let actual_lock_contents = read_file_to_string(&path_lock_file);
assert!(actual_lock_contents.contains("forge-std"));
// Making sure the foundry contents are the right ones
let remappings_content = r#"@custom-f@forge-std-1.8.1/=dependencies/forge-std-1.8.1/
"#;
let remappings_file = prj.root().join("remappings.txt");
assert_data_eq!(read_file_to_string(&remappings_file), remappings_content);
});
forgesoldeer!(login, |prj, cmd| {
let command = "login";
let _ = cmd.arg("soldeer").arg(command).assert_failure();
});
forgesoldeer!(clean, |prj, cmd| {
let dependency = "forge-std~1.8.1";
let foundry_contents = r#"[profile.default]
src = "src"
out = "out"
libs = ["lib", "dependencies"]
[dependencies]
"#;
let foundry_file = prj.root().join("foundry.toml");
fs::write(&foundry_file, foundry_contents).unwrap();
cmd.args(["soldeer", "install", dependency]).assert_success();
cmd.forge_fuse(); // reset command
// Making sure the path was created to the dependency and that foundry.toml exists
// meaning that the dependencies were installed correctly
let path_dep_forge =
prj.root().join("dependencies").join("forge-std-1.8.1").join("foundry.toml");
assert!(path_dep_forge.exists());
let command = "clean";
cmd.arg("soldeer").args([command]).assert_success();
// Dependencies should have been removed from disk
assert!(!prj.root().join("dependencies").exists());
});
forgesoldeer!(detect_project_root, |prj, cmd| {
let command = "update";
let foundry_updates = r#"[profile.default]
src = "src"
out = "out"
libs = ["lib", "dependencies"]
# See more config options https://github.com/foundry-rs/foundry/blob/master/crates/config/README.md#all-options
[dependencies]
forge-std = "1.8.1"
"#;
let foundry_file = prj.root().join("foundry.toml");
fs::write(&foundry_file, foundry_updates).unwrap();
// run command from sub-directory
cmd.set_current_dir(prj.root().join("src"));
cmd.arg("soldeer").arg(command).assert_success();
// Making sure the path was created to the dependency and that foundry.toml exists
// meaning that the dependencies were installed correctly
let path_dep_forge =
prj.root().join("dependencies").join("forge-std-1.8.1").join("foundry.toml");
assert!(path_dep_forge.exists());
});
fn read_file_to_string(path: &Path) -> String {
let contents: String = fs::read_to_string(path).unwrap_or_default();
contents
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/forge/tests/cli/doc.rs | crates/forge/tests/cli/doc.rs | use foundry_test_utils::util::{RemoteProject, setup_forge_remote};
#[test]
fn can_generate_solmate_docs() {
let (prj, _) =
setup_forge_remote(RemoteProject::new("transmissions11/solmate").set_build(false));
prj.forge_command().args(["doc", "--build"]).assert_success();
}
// Test that overloaded functions in interfaces inherit the correct NatSpec comments
// fixes <https://github.com/foundry-rs/foundry/issues/11823>
forgetest_init!(can_generate_docs_for_overloaded_functions, |prj, cmd| {
prj.add_source(
"IExample.sol",
r#"
// SPDX-License-Identifier: MIT
pragma solidity ^0.8.0;
interface IExample {
/// @notice Process a single address
/// @param addr The address to process
function process(address addr) external;
/// @notice Process multiple addresses
/// @param addrs The addresses to process
function process(address[] calldata addrs) external;
/// @notice Process an address with a value
/// @param addr The address to process
/// @param value The value to use
function process(address addr, uint256 value) external;
}
"#,
);
prj.add_source(
"Example.sol",
r#"
// SPDX-License-Identifier: MIT
pragma solidity ^0.8.0;
import "./IExample.sol";
contract Example is IExample {
/// @inheritdoc IExample
function process(address addr) external {
// Implementation for single address
}
/// @inheritdoc IExample
function process(address[] calldata addrs) external {
// Implementation for multiple addresses
}
/// @inheritdoc IExample
function process(address addr, uint256 value) external {
// Implementation for address with value
}
}
"#,
);
cmd.args(["doc", "--build"]).assert_success();
let doc_path = prj.root().join("docs/src/src/Example.sol/contract.Example.md");
let content = std::fs::read_to_string(&doc_path).unwrap();
assert!(content.contains("Process a single address"));
assert!(content.contains("Process multiple addresses"));
assert!(content.contains("Process an address with a value"));
});
// Test that hyperlinks use relative paths, not absolute paths
// fixes <https://github.com/foundry-rs/foundry/issues/12361>
forgetest_init!(hyperlinks_use_relative_paths, |prj, cmd| {
prj.add_source(
"IBase.sol",
r#"
// SPDX-License-Identifier: MIT
pragma solidity ^0.8.0;
interface IBase {
function baseFunction() external;
}
"#,
);
prj.add_source(
"Derived.sol",
r#"
// SPDX-License-Identifier: MIT
pragma solidity ^0.8.0;
import "./IBase.sol";
/// @dev Inherits: {IBase}
contract Derived is IBase {
function baseFunction() external override {}
}
"#,
);
cmd.args(["doc", "--build"]).assert_success();
let doc_path = prj.root().join("docs/src/src/Derived.sol/contract.Derived.md");
let content = std::fs::read_to_string(&doc_path).unwrap();
assert!(
content.contains("[IBase](/src/IBase.sol/interface.IBase.md")
|| content.contains("[IBase](\\src\\IBase.sol\\interface.IBase.md"),
"Hyperlink should use relative path but found: {:?}",
content.lines().find(|line| line.contains("[IBase]")).unwrap_or("not found")
);
});
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/forge/tests/cli/fmt_integration.rs | crates/forge/tests/cli/fmt_integration.rs | use foundry_test_utils::util::ExtTester;
/// Test `forge fmt` immutability.
/// TODO: make sure original fmt is not changed after projects format and rev available.
macro_rules! fmt_test {
($name:ident, $org:expr, $repo:expr, $commit:expr) => {
#[test]
fn $name() {
let (_, mut cmd) = ExtTester::new($org, $repo, $commit).setup_forge_prj(false);
cmd.arg("fmt").assert_success();
cmd.arg("--check").assert_success();
}
};
}
fmt_test!(fmt_ithaca_account, "ithacaxyz", "account", "213c04ee1808784c18609607d85feba7730538fd");
fmt_test!(fmt_univ4_core, "Uniswap", "v4-core", "59d3ecf53afa9264a16bba0e38f4c5d2231f80bc");
fmt_test!(
fmt_evk_periphery,
"euler-xyz",
"evk-periphery",
"e41f2b9b7ed677ca03ff7bd7221a4e2fdd55504f"
);
fmt_test!(fmt_0x_settler, "0xProject", "0x-settler", "a388c8251ab6c4bedce1641b31027d7b1136daef");
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/forge/tests/cli/lint.rs | crates/forge/tests/cli/lint.rs | use forge_lint::{linter::Lint, sol::med::REGISTERED_LINTS};
use foundry_config::{DenyLevel, LintSeverity, LinterConfig};
mod geiger;
const CONTRACT: &str = r#"
// SPDX-License-Identifier: MIT
pragma solidity ^0.8.0;
struct _PascalCaseInfo { uint256 a; }
uint256 constant screaming_snake_case_info = 0;
contract ContractWithLints {
uint256 VARIABLE_MIXED_CASE_INFO;
function incorrectShiftHigh() public {
uint256 localValue = 50;
uint256 result = 8 >> localValue;
}
function divideBeforeMultiplyMedium() public {
(1 / 2) * 3;
}
function unoptimizedHashGas(uint256 a, uint256 b) public view {
keccak256(abi.encodePacked(a, b));
}
function FUNCTION_MIXED_CASE_INFO() public {}
}
"#;
const OTHER_CONTRACT: &str = r#"
// SPDX-License-Identifier: MIT
pragma solidity ^0.8.0;
// forge-lint: disable-next-line
import { ContractWithLints } from "./ContractWithLints.sol";
contract OtherContractWithLints {
function functionMIXEDCaseInfo() public {}
}
"#;
const ONLY_IMPORTS: &str = r#"
// SPDX-License-Identifier: MIT
pragma solidity ^0.8.0;
// forge-lint: disable-next-line
import { ContractWithLints } from "./ContractWithLints.sol";
import { _PascalCaseInfo } from "./ContractWithLints.sol";
import "./ContractWithLints.sol";
contract Dummy {
bool foo;
}
"#;
const COUNTER_A: &str = r#"
// SPDX-License-Identifier: MIT
pragma solidity ^0.8.0;
contract CounterA {
uint256 public CounterA_Fail_Lint;
}
"#;
const COUNTER_B: &str = r#"
// SPDX-License-Identifier: MIT
pragma solidity ^0.8.0;
contract CounterB {
uint256 public CounterB_Fail_Lint;
}
"#;
const COUNTER_WITH_CONST: &str = r#"
// SPDX-License-Identifier: MIT
pragma solidity ^0.8.13;
uint256 constant MAX = 1000000;
contract Counter {
uint256 public number;
function setNumber(uint256 newNumber) public {
number = newNumber;
}
function increment() public {
number++;
}
}
"#;
const COUNTER_TEST_WITH_CONST: &str = r#"
// SPDX-License-Identifier: MIT
pragma solidity ^0.8.13;
import { Counter, MAX } from "../src/Counter.sol";
contract CounterTest {
Counter public counter;
function setUp() public {
counter = new Counter();
}
function testFuzz_setNumber(uint256[MAX] calldata numbers) public {
for (uint256 i = 0; i < numbers.length; ++i) {
counter.setNumber(numbers[i]);
}
}
}
"#;
forgetest!(can_use_config, |prj, cmd| {
prj.add_source("ContractWithLints", CONTRACT);
prj.add_source("OtherContractWithLints", OTHER_CONTRACT);
// Check config for `severity` and `exclude`
prj.update_config(|config| {
config.lint = LinterConfig {
severity: vec![LintSeverity::High, LintSeverity::Med],
exclude_lints: vec!["incorrect-shift".into()],
ignore: vec![],
lint_on_build: true,
..Default::default()
};
});
cmd.arg("lint").assert_success().stderr_eq(str![[r#"
warning[divide-before-multiply]: multiplication should occur before division to avoid loss of precision
[FILE]:16:9
|
16 | (1 / 2) * 3;
| ^^^^^^^^^^^
|
= help: https://book.getfoundry.sh/reference/forge/forge-lint#divide-before-multiply
"#]]);
});
forgetest!(can_use_config_ignore, |prj, cmd| {
prj.add_source("ContractWithLints", CONTRACT);
prj.add_source("OtherContract", OTHER_CONTRACT);
// Check config for `ignore`
prj.update_config(|config| {
config.lint = LinterConfig {
severity: vec![],
exclude_lints: vec![],
ignore: vec!["src/ContractWithLints.sol".into()],
lint_on_build: true,
..Default::default()
};
});
cmd.arg("lint").assert_success().stderr_eq(str![[r#"
note[mixed-case-function]: function names should use mixedCase
[FILE]:9:14
|
9 | function functionMIXEDCaseInfo() public {}
| ^^^^^^^^^^^^^^^^^^^^^ help: consider using: `functionMixedCaseInfo`
|
= help: https://book.getfoundry.sh/reference/forge/forge-lint#mixed-case-function
"#]]);
// Check config again, ignoring all files
prj.update_config(|config| {
config.lint = LinterConfig {
severity: vec![],
exclude_lints: vec![],
ignore: vec!["src/ContractWithLints.sol".into(), "src/OtherContract.sol".into()],
lint_on_build: true,
..Default::default()
};
});
cmd.arg("lint").assert_success().stderr_eq(str![[""]]);
});
forgetest!(can_use_config_mixed_case_exception, |prj, cmd| {
prj.add_source("ContractWithLints", CONTRACT);
prj.add_source("OtherContract", OTHER_CONTRACT);
// Check config for `ignore`
prj.update_config(|config| {
config.lint = LinterConfig {
severity: vec![],
exclude_lints: vec![],
ignore: vec!["src/ContractWithLints.sol".into()],
lint_on_build: true,
mixed_case_exceptions: vec!["MIXED".to_string()],
};
});
cmd.arg("lint").assert_success().stderr_eq(str![[""]]);
});
forgetest!(can_override_config_severity, |prj, cmd| {
prj.add_source("ContractWithLints", CONTRACT);
prj.add_source("OtherContractWithLints", OTHER_CONTRACT);
// Override severity
prj.update_config(|config| {
config.lint = LinterConfig {
severity: vec![LintSeverity::High, LintSeverity::Med],
exclude_lints: vec![],
ignore: vec!["src/ContractWithLints.sol".into()],
lint_on_build: true,
..Default::default()
};
});
cmd.arg("lint").args(["--severity", "info"]).assert_success().stderr_eq(str![[r#"
note[mixed-case-function]: function names should use mixedCase
[FILE]:9:14
|
9 | function functionMIXEDCaseInfo() public {}
| ^^^^^^^^^^^^^^^^^^^^^ help: consider using: `functionMixedCaseInfo`
|
= help: https://book.getfoundry.sh/reference/forge/forge-lint#mixed-case-function
"#]]);
});
forgetest!(can_override_config_path, |prj, cmd| {
prj.add_source("ContractWithLints", CONTRACT);
prj.add_source("OtherContractWithLints", OTHER_CONTRACT);
// Override excluded files
prj.update_config(|config| {
config.lint = LinterConfig {
severity: vec![LintSeverity::High, LintSeverity::Med],
exclude_lints: vec!["incorrect-shift".into()],
ignore: vec!["src/ContractWithLints.sol".into()],
lint_on_build: true,
..Default::default()
};
});
cmd.arg("lint").arg("src/ContractWithLints.sol").assert_success().stderr_eq(str![[r#"
warning[divide-before-multiply]: multiplication should occur before division to avoid loss of precision
[FILE]:16:9
|
16 | (1 / 2) * 3;
| ^^^^^^^^^^^
|
= help: https://book.getfoundry.sh/reference/forge/forge-lint#divide-before-multiply
"#]]);
});
forgetest!(can_override_config_lint, |prj, cmd| {
prj.add_source("ContractWithLints", CONTRACT);
prj.add_source("OtherContractWithLints", OTHER_CONTRACT);
// Override excluded lints
prj.update_config(|config| {
config.lint = LinterConfig {
severity: vec![LintSeverity::High, LintSeverity::Med],
exclude_lints: vec!["incorrect-shift".into()],
ignore: vec![],
lint_on_build: true,
..Default::default()
};
});
cmd.arg("lint").args(["--only-lint", "incorrect-shift"]).assert_success().stderr_eq(str![[
r#"
warning[incorrect-shift]: the order of args in a shift operation is incorrect
[FILE]:13:26
|
13 | uint256 result = 8 >> localValue;
| ^^^^^^^^^^^^^^^
|
= help: https://book.getfoundry.sh/reference/forge/forge-lint#incorrect-shift
"#
]]);
});
forgetest!(build_runs_linter_by_default, |prj, cmd| {
prj.add_source("ContractWithLints", CONTRACT);
// Configure linter to show only medium severity lints
prj.update_config(|config| {
config.lint = LinterConfig {
severity: vec![LintSeverity::Med],
exclude_lints: vec!["incorrect-shift".into()],
ignore: vec![],
lint_on_build: true,
..Default::default()
};
});
// Run forge build and expect linting output before compilation
cmd.arg("build").assert_success().stderr_eq(str![[r#"
warning[divide-before-multiply]: multiplication should occur before division to avoid loss of precision
[FILE]:16:9
|
16 | (1 / 2) * 3;
| ^^^^^^^^^^^
|
= help: https://book.getfoundry.sh/reference/forge/forge-lint#divide-before-multiply
"#]]).stdout_eq(str![[r#"
[COMPILING_FILES] with [SOLC_VERSION]
[SOLC_VERSION] [ELAPSED]
Compiler run successful with warnings:
Warning (2072): Unused local variable.
[FILE]:13:9:
|
13 | uint256 result = 8 >> localValue;
| ^^^^^^^^^^^^^^
Warning (6133): Statement has no effect.
[FILE]:16:9:
|
16 | (1 / 2) * 3;
| ^^^^^^^^^^^
Warning (2018): Function state mutability can be restricted to pure
[FILE]:11:5:
|
11 | function incorrectShiftHigh() public {
| ^ (Relevant source part starts here and spans across multiple lines).
Warning (2018): Function state mutability can be restricted to pure
[FILE]:15:5:
|
15 | function divideBeforeMultiplyMedium() public {
| ^ (Relevant source part starts here and spans across multiple lines).
Warning (2018): Function state mutability can be restricted to pure
[FILE]:18:5:
|
18 | function unoptimizedHashGas(uint256 a, uint256 b) public view {
| ^ (Relevant source part starts here and spans across multiple lines).
"#]]);
});
forgetest!(build_respects_quiet_flag_for_linting, |prj, cmd| {
prj.add_source("ContractWithLints", CONTRACT);
// Configure linter to show medium severity lints
prj.update_config(|config| {
config.lint = LinterConfig {
severity: vec![LintSeverity::Med],
exclude_lints: vec!["incorrect-shift".into()],
ignore: vec![],
lint_on_build: true,
..Default::default()
};
});
// Run forge build with --quiet flag - should not show linting output
cmd.arg("build").arg("--quiet").assert_success().stderr_eq(str![[""]]).stdout_eq(str![[""]]);
});
forgetest!(build_with_json_uses_json_linter_output, |prj, cmd| {
prj.add_source("ContractWithLints", CONTRACT);
// Configure linter to show medium severity lints
prj.update_config(|config| {
config.lint = LinterConfig {
severity: vec![LintSeverity::Med],
exclude_lints: vec!["incorrect-shift".into()],
ignore: vec![],
lint_on_build: true,
..Default::default()
};
});
// Run forge build with --json flag - should use JSON formatter for linting
let output = cmd.arg("build").arg("--json").assert_success();
// Should contain JSON linting output
let stderr = String::from_utf8_lossy(&output.get_output().stderr);
assert!(stderr.contains("\"code\""));
assert!(stderr.contains("divide-before-multiply"));
// Should also contain JSON compilation output
let stdout = String::from_utf8_lossy(&output.get_output().stdout);
assert!(stdout.contains("\"errors\""));
assert!(stdout.contains("\"sources\""));
});
forgetest!(build_respects_lint_on_build_false, |prj, cmd| {
prj.add_source("ContractWithLints", CONTRACT);
// Configure linter with medium severity lints but disable lint_on_build
prj.update_config(|config| {
config.lint = LinterConfig {
severity: vec![LintSeverity::Med],
exclude_lints: vec!["incorrect-shift".into()],
ignore: vec![],
lint_on_build: false,
..Default::default()
};
});
// Run forge build - should NOT show linting output because lint_on_build is false
cmd.arg("build").assert_success().stderr_eq(str![[""]]).stdout_eq(str![[r#"
[COMPILING_FILES] with [SOLC_VERSION]
[SOLC_VERSION] [ELAPSED]
Compiler run successful with warnings:
Warning (2072): Unused local variable.
[FILE]:13:9:
|
13 | uint256 result = 8 >> localValue;
| ^^^^^^^^^^^^^^
Warning (6133): Statement has no effect.
[FILE]:16:9:
|
16 | (1 / 2) * 3;
| ^^^^^^^^^^^
Warning (2018): Function state mutability can be restricted to pure
[FILE]:11:5:
|
11 | function incorrectShiftHigh() public {
| ^ (Relevant source part starts here and spans across multiple lines).
Warning (2018): Function state mutability can be restricted to pure
[FILE]:15:5:
|
15 | function divideBeforeMultiplyMedium() public {
| ^ (Relevant source part starts here and spans across multiple lines).
Warning (2018): Function state mutability can be restricted to pure
[FILE]:18:5:
|
18 | function unoptimizedHashGas(uint256 a, uint256 b) public view {
| ^ (Relevant source part starts here and spans across multiple lines).
"#]]);
});
forgetest!(can_process_inline_config_regardless_of_input_order, |prj, cmd| {
prj.add_source("ContractWithLints", CONTRACT);
prj.add_source("OtherContractWithLints", OTHER_CONTRACT);
cmd.arg("lint").assert_success();
prj.add_source("OtherContractWithLints", OTHER_CONTRACT);
prj.add_source("ContractWithLints", CONTRACT);
cmd.arg("lint").assert_success();
});
// <https://github.com/foundry-rs/foundry/issues/11080>
forgetest!(can_use_only_lint_with_multilint_passes, |prj, cmd| {
prj.add_source("ContractWithLints", CONTRACT);
prj.add_source("OnlyImports", ONLY_IMPORTS);
cmd.arg("lint").args(["--only-lint", "unused-import"]).assert_success().stderr_eq(str![[r#"
note[unused-import]: unused imports should be removed
[FILE]:8:10
|
8 | import { _PascalCaseInfo } from "./ContractWithLints.sol";
| ^^^^^^^^^^^^^^^
|
= help: https://book.getfoundry.sh/reference/forge/forge-lint#unused-import
"#]]);
});
// <https://github.com/foundry-rs/foundry/issues/11234>
forgetest!(can_lint_only_built_files, |prj, cmd| {
prj.add_source("CounterAWithLints", COUNTER_A);
prj.add_source("CounterBWithLints", COUNTER_B);
prj.update_config(|config| {
config.lint.severity = vec![LintSeverity::Info];
});
// Both contracts should be linted on build. Redact contract as order is not guaranteed.
cmd.forge_fuse().args(["build"]).assert_success().stderr_eq(str![[r#"
note[mixed-case-variable]: mutable variables should use mixedCase
...
note[mixed-case-variable]: mutable variables should use mixedCase
...
"#]]);
// Only contract CounterBWithLints that we build should be linted.
let args = ["build", "src/CounterBWithLints.sol"];
cmd.forge_fuse().args(args).assert_success().stderr_eq(str![[r#"
note[mixed-case-variable]: mutable variables should use mixedCase
[FILE]:6:20
|
6 | uint256 public CounterB_Fail_Lint;
| ^^^^^^^^^^^^^^^^^^ help: consider using: `counterBFailLint`
|
= help: https://book.getfoundry.sh/reference/forge/forge-lint#mixed-case-variable
"#]]);
});
// <https://github.com/foundry-rs/foundry/issues/11392>
forgetest!(can_lint_param_constants, |prj, cmd| {
prj.add_source("Counter", COUNTER_WITH_CONST);
prj.add_test("CounterTest", COUNTER_TEST_WITH_CONST);
cmd.forge_fuse().args(["build"]).assert_success().stdout_eq(str![[r#"
[COMPILING_FILES] with [SOLC_VERSION]
[SOLC_VERSION] [ELAPSED]
Compiler run successful!
"#]]);
});
// <https://github.com/foundry-rs/foundry/issues/11460>
forgetest!(lint_json_output_no_ansi_escape_codes, |prj, cmd| {
prj.add_source(
"UnwrappedModifierTest",
r#"
// SPDX-License-Identifier: MIT
pragma solidity ^0.8.0;
contract UnwrappedModifierTest {
mapping(address => bool) isOwner;
modifier onlyOwner() {
require(isOwner[msg.sender], "Not owner");
require(msg.sender != address(0), "Zero address");
_;
}
function doSomething() public onlyOwner {}
}
"#,
);
prj.update_config(|config| {
config.lint = LinterConfig {
severity: vec![LintSeverity::CodeSize],
exclude_lints: vec![],
ignore: vec![],
lint_on_build: true,
..Default::default()
};
});
// should produce clean JSON without ANSI escape sequences (for the url nor the snippets)
cmd.arg("lint").arg("--json").assert_json_stderr(true,
str![[r#"
{
"$message_type": "diagnostic",
"message": "wrap modifier logic to reduce code size",
"code": {
"code": "unwrapped-modifier-logic",
"explanation": null
},
"level": "note",
"spans": [
{
"file_name": "src/UnwrappedModifierTest.sol",
"byte_start": 174,
"byte_end": 355,
"line_start": 8,
"line_end": 12,
"column_start": 13,
"column_end": 14,
"is_primary": true,
"text": [
{
"text": " modifier onlyOwner() {",
"highlight_start": 13,
"highlight_end": 35
},
{
"text": " require(isOwner[msg.sender], \"Not owner\");",
"highlight_start": 1,
"highlight_end": 59
},
{
"text": " require(msg.sender != address(0), \"Zero address\");",
"highlight_start": 1,
"highlight_end": 67
},
{
"text": " _;",
"highlight_start": 1,
"highlight_end": 19
},
{
"text": " }",
"highlight_start": 1,
"highlight_end": 14
}
],
"label": null,
"suggested_replacement": null
}
],
"children": [
{
"message": "https://book.getfoundry.sh/reference/forge/forge-lint#unwrapped-modifier-logic",
"code": null,
"level": "help",
"spans": [],
"children": [],
"rendered": null
},
{
"message": "wrap modifier logic to reduce code size",
"code": null,
"level": "help",
"spans": [
{
"file_name": "src/UnwrappedModifierTest.sol",
"byte_start": 174,
"byte_end": 355,
"line_start": 8,
"line_end": 12,
"column_start": 13,
"column_end": 14,
"is_primary": true,
"text": [
{
"text": " modifier onlyOwner() {",
"highlight_start": 13,
"highlight_end": 35
},
{
"text": " require(isOwner[msg.sender], \"Not owner\");",
"highlight_start": 1,
"highlight_end": 59
},
{
"text": " require(msg.sender != address(0), \"Zero address\");",
"highlight_start": 1,
"highlight_end": 67
},
{
"text": " _;",
"highlight_start": 1,
"highlight_end": 19
},
{
"text": " }",
"highlight_start": 1,
"highlight_end": 14
}
],
"label": null,
"suggested_replacement": "modifier onlyOwner() {\n _onlyOwner();\n _;\n }\n\n function _onlyOwner() internal {\n require(isOwner[msg.sender], \"Not owner\");\n require(msg.sender != address(0), \"Zero address\");\n }"
}
],
"children": [],
"rendered": null
}
],
"rendered": "note[unwrapped-modifier-logic]: wrap modifier logic to reduce code size\n\n --> src/UnwrappedModifierTest.sol:8:13\n |\n 8 | / modifier onlyOwner() {\n 9 | | require(isOwner[msg.sender], \"Not owner\");\n10 | | require(msg.sender != address(0), \"Zero address\");\n11 | | _;\n12 | | }\n | |_____________^\n |\nhelp: wrap modifier logic to reduce code size\n |\n 8 ~ modifier onlyOwner() {\n 9 + _onlyOwner();\n10 + _;\n11 + }\n12 + \n13 + function _onlyOwner() internal {\n14 + require(isOwner[msg.sender], \"Not owner\");\n15 + require(msg.sender != address(0), \"Zero address\");\n16 + }\n |\n = help: https://book.getfoundry.sh/reference/forge/forge-lint#unwrapped-modifier-logic\n"
}
"#]],
);
});
forgetest!(can_fail_on_lints, |prj, cmd| {
prj.add_source("ContractWithLints", CONTRACT);
// -- LINT ALL SEVERITIES [OUTPUT: WARN + NOTE] ----------------------------
cmd.forge_fuse().arg("lint").assert_success(); // DenyLevel::Never (default)
prj.update_config(|config| {
config.deny = DenyLevel::Warnings;
});
cmd.forge_fuse().arg("lint").assert_failure();
prj.update_config(|config| {
config.deny = DenyLevel::Notes;
});
cmd.forge_fuse().arg("lint").assert_failure();
// cmd flags can override config
prj.update_config(|config| {
config.deny = DenyLevel::Never;
});
cmd.forge_fuse().args(["lint", "--deny warnings"]).assert_failure();
cmd.forge_fuse().args(["lint", "--deny notes"]).assert_failure();
// usage of `--deny-warnings` flag works, but emits a warning
cmd.forge_fuse().args(["lint", "--deny-warnings"]).assert_failure().stderr_eq(str![[r#"
Warning: `--deny-warnings` is being deprecated in favor of `--deny warnings`.
...
"#]]);
// usage of `deny_warnings` config works, but emits a warning
prj.create_file(
"foundry.toml",
r#"
[profile.default]
deny_warnings = true
"#,
);
cmd.forge_fuse().arg("lint").assert_failure().stderr_eq(str![[r#"
Warning: Key `deny_warnings` is being deprecated in favor of `deny = warnings`. It will be removed in future versions.
...
"#]]);
// -- ONLY LINT LOW SEVERITIES [OUTPUT: NOTE] ------------------------------
prj.update_config(|config| {
config.deny_warnings = false;
config.deny = DenyLevel::Never;
config.lint.severity = vec![LintSeverity::Info, LintSeverity::Gas, LintSeverity::CodeSize];
});
cmd.forge_fuse().arg("lint").assert_success();
prj.update_config(|config| {
config.deny = DenyLevel::Warnings;
});
cmd.forge_fuse().arg("lint").assert_success();
prj.update_config(|config| {
config.deny = DenyLevel::Notes;
});
cmd.forge_fuse().arg("lint").assert_failure();
// cmd flags can override config
prj.update_config(|config| {
config.deny = DenyLevel::Never;
});
cmd.forge_fuse().args(["lint", "--deny notes"]).assert_failure();
});
// ------------------------------------------------------------------------------------------------
#[tokio::test]
async fn ensure_lint_rule_docs() {
const FOUNDRY_BOOK_LINT_PAGE_URL: &str =
"https://book.getfoundry.sh/reference/forge/forge-lint";
// Fetch the content of the lint reference
let content = match reqwest::get(FOUNDRY_BOOK_LINT_PAGE_URL).await {
Ok(resp) => {
if !resp.status().is_success() {
panic!(
"Failed to fetch Foundry Book lint page ({FOUNDRY_BOOK_LINT_PAGE_URL}). Status: {status}",
status = resp.status()
);
}
match resp.text().await {
Ok(text) => text,
Err(e) => {
panic!("Failed to read response text: {e}");
}
}
}
Err(e) => {
panic!("Failed to fetch Foundry Book lint page ({FOUNDRY_BOOK_LINT_PAGE_URL}): {e}",);
}
};
// Ensure no missing lints
let mut missing_lints = Vec::new();
for lint in REGISTERED_LINTS {
let selector = format!("#{}", lint.id());
if !content.contains(&selector) {
missing_lints.push(lint.id());
}
}
if !missing_lints.is_empty() {
let mut msg = String::from(
"Foundry Book lint validation failed. The following lints must be added to the docs:\n",
);
for lint in missing_lints {
msg.push_str(&format!(" - {lint}\n"));
}
msg.push_str("Please open a PR: https://github.com/foundry-rs/book");
panic!("{msg}");
}
}
#[test]
fn ensure_no_privileged_lint_id() {
for lint in REGISTERED_LINTS {
assert_ne!(lint.id(), "all", "lint-id 'all' is reserved. Please use a different id");
}
}
forgetest!(skips_linting_for_old_solidity_versions, |prj, cmd| {
const OLD_CONTRACT: &str = r#"
// SPDX-License-Identifier: MIT
pragma solidity ^0.7.0;
contract OldContract {
uint256 VARIABLE_MIXED_CASE_INFO;
function FUNCTION_MIXED_CASE_INFO() public {}
}
"#;
// Add a contract with Solidity 0.7.x which has lint issues
prj.add_source("OldContract", OLD_CONTRACT);
prj.update_config(|config| {
config.lint = LinterConfig {
severity: vec![],
exclude_lints: vec![],
ignore: vec![],
lint_on_build: true,
..Default::default()
};
});
// Run forge build - should SUCCEED without linting
cmd.arg("build").assert_success().stderr_eq(str![[
r#"Warning: unable to lint. Solar only supports Solidity versions prior to 0.8.0
"#
]]);
// Run forge lint - should FAIL
cmd.forge_fuse().arg("lint").assert_failure().stderr_eq(str![[
r#"Error: unable to lint. Solar only supports Solidity versions prior to 0.8.0
"#
]]);
});
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/forge/tests/cli/json.rs | crates/forge/tests/cli/json.rs | // tests enhanced `vm.parseJson` and `vm.serializeJson` cheatcodes, which are not constrained to
// alphabetical ordering of struct keys, but rather respect the Solidity struct definition.
forgetest_init!(test_parse_json, |prj, cmd| {
prj.add_test(
"JsonCheats",
r#"
import {Test} from "forge-std/Test.sol";
// Definition order: color, sweetness, sourness
// Alphabetical order: color, sourness, sweetness
struct Apple {
string color;
uint8 sweetness;
uint8 sourness;
}
// Definition order: name, apples
// Alphabetical order: apples, name
struct FruitStall {
string name;
Apple[] apples;
}
contract SimpleJsonCheatsTest is Test {
function testJsonParseAndSerialize() public {
// Initial JSON has keys in a custom order, different from definition and alphabetical.
string memory originalJson =
'{"name":"Fresh Fruit","apples":[{"sweetness":7,"sourness":3,"color":"Red"},{"sweetness":5,"sourness":5,"color":"Green"}]}';
// Parse the original JSON. The parser should correctly handle the unordered keys.
bytes memory decoded = vm.parseJson(originalJson);
FruitStall memory originalType = abi.decode(decoded, (FruitStall));
// Assert initial parsing is correct
assertEq(originalType.name, "Fresh Fruit");
assertEq(originalType.apples[0].color, "Red");
assertEq(originalType.apples[0].sweetness, 7);
assertEq(originalType.apples[1].sourness, 5);
// Serialize the struct back to JSON. `vm.serializeJson` should respect the order for all keys.
string memory serializedJson = vm.serializeJsonType(
"FruitStall(Apple[] apples,string name)Apple(string color,uint8 sourness,uint8 sweetness)",
abi.encode(originalType)
);
// The expected JSON should have keys ordered according to the struct definitions.
string memory expectedJson =
'{"name":"Fresh Fruit","apples":[{"color":"Red","sweetness":7,"sourness":3},{"color":"Green","sweetness":5,"sourness":5}]}';
assertEq(serializedJson, expectedJson);
// Parse the newly serialized JSON to complete the cycle.
bytes memory redecoded = vm.parseJson(serializedJson);
FruitStall memory finalType = abi.decode(redecoded, (FruitStall));
// Assert that the struct from the full cycle is identical to the original parsed struct.
assertEq(keccak256(abi.encode(finalType)), keccak256(abi.encode(originalType)));
}
}
"#,
);
// Directly run the test. No `bind-json` or type schemas are needed.
cmd.forge_fuse().args(["test"]).assert_success();
// Should still work when the project is not compiled.
cmd.forge_fuse().args(["test"]).assert_success();
});
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/forge/tests/cli/install.rs | crates/forge/tests/cli/install.rs | //! forge install and update tests
use forge::{DepIdentifier, FOUNDRY_LOCK, Lockfile};
use foundry_cli::utils::{Git, Submodules};
use foundry_compilers::artifacts::Remapping;
use foundry_config::Config;
use foundry_test_utils::util::{
ExtTester, FORGE_STD_REVISION, TestCommand, pretty_err, read_string,
};
use semver::Version;
use std::{
fs,
path::{Path, PathBuf},
process::Command,
str::FromStr,
};
fn lockfile_get(root: &Path, dep_path: &Path) -> Option<DepIdentifier> {
let mut l = Lockfile::new(root);
l.read().unwrap();
l.get(dep_path).cloned()
}
// checks missing dependencies are auto installed
forgetest_init!(can_install_missing_deps_build, |prj, cmd| {
prj.initialize_default_contracts();
prj.clear();
// wipe forge-std
let forge_std_dir = prj.root().join("lib/forge-std");
pretty_err(&forge_std_dir, fs::remove_dir_all(&forge_std_dir));
// Build the project
cmd.arg("build").assert_success().stdout_eq(str![[r#"
Missing dependencies found. Installing now...
[UPDATING_DEPENDENCIES]
[COMPILING_FILES] with [SOLC_VERSION]
[SOLC_VERSION] [ELAPSED]
Compiler run successful!
"#]]);
// assert lockfile
let forge_std = lockfile_get(prj.root(), &PathBuf::from("lib/forge-std")).unwrap();
assert_eq!(forge_std.rev(), FORGE_STD_REVISION);
// Expect compilation to be skipped as no files have changed
cmd.forge_fuse().arg("build").assert_success().stdout_eq(str![[r#"
No files changed, compilation skipped
"#]]);
});
// checks missing dependencies are auto installed
forgetest_init!(can_install_missing_deps_test, |prj, cmd| {
prj.initialize_default_contracts();
prj.clear();
// wipe forge-std
let forge_std_dir = prj.root().join("lib/forge-std");
pretty_err(&forge_std_dir, fs::remove_dir_all(&forge_std_dir));
cmd.arg("test").assert_success().stdout_eq(str![[r#"
Missing dependencies found. Installing now...
[UPDATING_DEPENDENCIES]
[COMPILING_FILES] with [SOLC_VERSION]
[SOLC_VERSION] [ELAPSED]
Compiler run successful!
Ran 2 tests for test/Counter.t.sol:CounterTest
[PASS] testFuzz_SetNumber(uint256) (runs: 256, [AVG_GAS])
[PASS] test_Increment() ([GAS])
Suite result: ok. 2 passed; 0 failed; 0 skipped; [ELAPSED]
Ran 1 test suite [ELAPSED]: 2 tests passed, 0 failed, 0 skipped (2 total tests)
"#]]);
// assert lockfile
let forge_std = lockfile_get(prj.root(), &PathBuf::from("lib/forge-std")).unwrap();
assert_eq!(forge_std.rev(), FORGE_STD_REVISION);
});
// test to check that install/remove works properly
forgetest!(can_install_and_remove, |prj, cmd| {
cmd.git_init();
let libs = prj.root().join("lib");
let git_mod = prj.root().join(".git/modules/lib");
let git_mod_file = prj.root().join(".gitmodules");
let forge_std = libs.join("forge-std");
let forge_std_mod = git_mod.join("forge-std");
let install = |cmd: &mut TestCommand| {
cmd.forge_fuse().args(["install", "foundry-rs/forge-std"]).assert_success().stdout_eq(
str![[r#"
Installing forge-std in [..] (url: https://github.com/foundry-rs/forge-std, tag: None)
Installed forge-std[..]
"#]],
);
assert!(forge_std.exists());
assert!(forge_std_mod.exists());
let submods = read_string(&git_mod_file);
assert!(submods.contains("https://github.com/foundry-rs/forge-std"));
};
let remove = |cmd: &mut TestCommand, target: &str| {
cmd.forge_fuse().args(["remove", "--force", target]).assert_success().stdout_eq(str![[
r#"
Removing 'forge-std' in [..], (url: https://github.com/foundry-rs/forge-std, tag: None)
"#
]]);
assert!(!forge_std.exists());
assert!(!forge_std_mod.exists());
let submods = read_string(&git_mod_file);
assert!(!submods.contains("https://github.com/foundry-rs/forge-std"));
};
install(&mut cmd);
let forge_std = lockfile_get(prj.root(), &PathBuf::from("lib/forge-std")).unwrap();
assert!(matches!(forge_std, DepIdentifier::Tag { .. }));
remove(&mut cmd, "forge-std");
let forge_std = lockfile_get(prj.root(), &PathBuf::from("lib/forge-std"));
assert!(forge_std.is_none());
// install again and remove via relative path
install(&mut cmd);
remove(&mut cmd, "lib/forge-std");
});
// test to check we can run `forge install` in an empty dir <https://github.com/foundry-rs/foundry/issues/6519>
forgetest!(can_install_empty, |prj, cmd| {
// create
cmd.git_init();
cmd.forge_fuse().args(["install"]);
cmd.assert_empty_stdout();
// create initial commit
fs::write(prj.root().join("README.md"), "Initial commit").unwrap();
cmd.git_add();
cmd.git_commit("Initial commit");
cmd.forge_fuse().args(["install"]);
cmd.assert_empty_stdout();
});
// test to check that package can be reinstalled after manually removing the directory
forgetest!(can_reinstall_after_manual_remove, |prj, cmd| {
cmd.git_init();
let libs = prj.root().join("lib");
let git_mod = prj.root().join(".git/modules/lib");
let git_mod_file = prj.root().join(".gitmodules");
let forge_std = libs.join("forge-std");
let forge_std_mod = git_mod.join("forge-std");
let install = |cmd: &mut TestCommand| {
cmd.forge_fuse().args(["install", "foundry-rs/forge-std"]).assert_success().stdout_eq(
str![[r#"
Installing forge-std in [..] (url: https://github.com/foundry-rs/forge-std, tag: None)
Installed forge-std tag=[..]"#]],
);
assert!(forge_std.exists());
assert!(forge_std_mod.exists());
let submods = read_string(&git_mod_file);
assert!(submods.contains("https://github.com/foundry-rs/forge-std"));
};
install(&mut cmd);
let forge_std_lock = lockfile_get(prj.root(), &PathBuf::from("lib/forge-std")).unwrap();
assert!(matches!(forge_std_lock, DepIdentifier::Tag { .. }));
fs::remove_dir_all(forge_std.clone()).expect("Failed to remove forge-std");
// install again with tag
install(&mut cmd);
let forge_std_lock = lockfile_get(prj.root(), &PathBuf::from("lib/forge-std")).unwrap();
assert!(matches!(forge_std_lock, DepIdentifier::Tag { .. }));
});
// test that we can repeatedly install the same dependency without changes
forgetest!(can_install_repeatedly, |_prj, cmd| {
cmd.git_init();
cmd.forge_fuse().args(["install", "foundry-rs/forge-std"]);
for _ in 0..3 {
cmd.assert_success();
}
});
// test that by default we install the latest semver release tag
// <https://github.com/openzeppelin/openzeppelin-contracts>
forgetest!(can_install_latest_release_tag, |prj, cmd| {
cmd.git_init();
cmd.forge_fuse().args(["install", "openzeppelin/openzeppelin-contracts"]);
cmd.assert_success();
let dep = prj.paths().libraries[0].join("openzeppelin-contracts");
assert!(dep.exists());
let oz_lock = lockfile_get(prj.root(), &PathBuf::from("lib/openzeppelin-contracts")).unwrap();
assert!(matches!(oz_lock, DepIdentifier::Tag { .. }));
// the latest release at the time this test was written
let version: Version = "4.8.0".parse().unwrap();
let out = Command::new("git").current_dir(&dep).args(["describe", "--tags"]).output().unwrap();
let tag = String::from_utf8_lossy(&out.stdout);
let current: Version = tag.as_ref().trim_start_matches('v').trim().parse().unwrap();
assert!(current >= version);
});
forgetest!(can_update_and_retain_tag_revs, |prj, cmd| {
cmd.git_init();
// Installs oz at release tag
cmd.forge_fuse()
.args(["install", "openzeppelin/openzeppelin-contracts@v5.1.0"])
.assert_success();
// Install solady pinned to rev i.e https://github.com/Vectorized/solady/commit/513f581675374706dbe947284d6b12d19ce35a2a
cmd.forge_fuse().args(["install", "vectorized/solady@513f581"]).assert_success();
let out = cmd.git_submodule_status();
let status = String::from_utf8_lossy(&out.stdout);
let oz_init = lockfile_get(prj.root(), &PathBuf::from("lib/openzeppelin-contracts")).unwrap();
let solady_init = lockfile_get(prj.root(), &PathBuf::from("lib/solady")).unwrap();
assert_eq!(oz_init.name(), "v5.1.0");
assert_eq!(solady_init.rev(), "513f581");
let submodules_init: Submodules = status.parse().unwrap();
cmd.forge_fuse().arg("update").assert_success();
let out = cmd.git_submodule_status();
let status = String::from_utf8_lossy(&out.stdout);
let submodules_update: Submodules = status.parse().unwrap();
assert_eq!(submodules_init, submodules_update);
let oz_update = lockfile_get(prj.root(), &PathBuf::from("lib/openzeppelin-contracts")).unwrap();
let solady_update = lockfile_get(prj.root(), &PathBuf::from("lib/solady")).unwrap();
assert_eq!(oz_init, oz_update);
assert_eq!(solady_init, solady_update);
});
forgetest!(can_override_tag_in_update, |prj, cmd| {
cmd.git_init();
// Installs oz at release tag
cmd.forge_fuse()
.args(["install", "openzeppelin/openzeppelin-contracts@v5.0.2"])
.assert_success();
cmd.forge_fuse().args(["install", "vectorized/solady@513f581"]).assert_success();
let out = cmd.git_submodule_status();
let status = String::from_utf8_lossy(&out.stdout);
let submodules_init: Submodules = status.parse().unwrap();
let oz_init_lock =
lockfile_get(prj.root(), &PathBuf::from("lib/openzeppelin-contracts")).unwrap();
assert_eq!(oz_init_lock.name(), "v5.0.2");
let solady_init_lock = lockfile_get(prj.root(), &PathBuf::from("lib/solady")).unwrap();
assert_eq!(solady_init_lock.rev(), "513f581");
// Update oz to a different release tag
cmd.forge_fuse()
.args(["update", "openzeppelin/openzeppelin-contracts@v5.1.0"])
.assert_success();
let out = cmd.git_submodule_status();
let status = String::from_utf8_lossy(&out.stdout);
let submodules_update: Submodules = status.parse().unwrap();
assert_ne!(submodules_init.0[0], submodules_update.0[0]);
assert_eq!(submodules_init.0[1], submodules_update.0[1]);
let oz_update_lock =
lockfile_get(prj.root(), &PathBuf::from("lib/openzeppelin-contracts")).unwrap();
let solady_update_lock = lockfile_get(prj.root(), &PathBuf::from("lib/solady")).unwrap();
assert_ne!(oz_init_lock, oz_update_lock);
assert_eq!(oz_update_lock.name(), "v5.1.0");
assert_eq!(solady_init_lock, solady_update_lock);
});
// Ref: https://github.com/foundry-rs/foundry/pull/9522#pullrequestreview-2494431518
forgetest!(should_not_update_tagged_deps, |prj, cmd| {
cmd.git_init();
// Installs oz at release tag
cmd.forge_fuse()
.args(["install", "openzeppelin/openzeppelin-contracts@tag=v4.9.4"])
.assert_success();
let out = cmd.git_submodule_status();
let status = String::from_utf8_lossy(&out.stdout);
let submodules_init: Submodules = status.parse().unwrap();
let oz_init = lockfile_get(prj.root(), &PathBuf::from("lib/openzeppelin-contracts")).unwrap();
cmd.forge_fuse().arg("update").assert_success();
let out = cmd.git_submodule_status();
let status = String::from_utf8_lossy(&out.stdout);
let submodules_update: Submodules = status.parse().unwrap();
assert_eq!(submodules_init, submodules_update);
let oz_update = lockfile_get(prj.root(), &PathBuf::from("lib/openzeppelin-contracts")).unwrap();
assert_eq!(oz_init, oz_update);
// Check that halmos-cheatcodes dep is not added to oz deps
let halmos_path = prj.paths().libraries[0].join("openzeppelin-contracts/lib/halmos-cheatcodes");
assert!(!halmos_path.exists());
});
forgetest!(can_remove_dep_from_foundry_lock, |prj, cmd| {
cmd.git_init();
cmd.forge_fuse()
.args(["install", "openzeppelin/openzeppelin-contracts@tag=v4.9.4"])
.assert_success();
cmd.forge_fuse().args(["install", "vectorized/solady@513f581"]).assert_success();
cmd.forge_fuse().args(["remove", "openzeppelin-contracts", "--force"]).assert_success();
let mut lock = Lockfile::new(prj.root());
lock.read().unwrap();
assert!(lock.get(&PathBuf::from("lib/openzeppelin-contracts")).is_none());
});
forgetest!(
#[cfg_attr(windows, ignore = "weird git fail")]
can_sync_foundry_lock,
|prj, cmd| {
cmd.git_init();
cmd.forge_fuse().args(["install", "foundry-rs/forge-std@master"]).assert_success();
cmd.forge_fuse().args(["install", "vectorized/solady"]).assert_success();
fs::remove_file(prj.root().join("foundry.lock")).unwrap();
// sync submodules and write foundry.lock
cmd.forge_fuse().arg("install").assert_success();
let mut lock = forge::Lockfile::new(prj.root());
lock.read().unwrap();
assert!(matches!(
lock.get(&PathBuf::from("lib/forge-std")).unwrap(),
&DepIdentifier::Branch { .. }
));
assert!(matches!(
lock.get(&PathBuf::from("lib/solady")).unwrap(),
&DepIdentifier::Rev { .. }
));
}
);
// Tests that forge update doesn't break a working dependency by recursively updating nested
// dependencies
forgetest!(
#[cfg_attr(windows, ignore = "weird git fail")]
can_update_library_with_outdated_nested_dependency,
|prj, cmd| {
cmd.git_init();
let libs = prj.root().join("lib");
let git_mod = prj.root().join(".git/modules/lib");
let git_mod_file = prj.root().join(".gitmodules");
// get paths to check inside install fn
let package = libs.join("forge-5980-test");
let package_mod = git_mod.join("forge-5980-test");
// install main dependency
cmd.forge_fuse().args(["install", "evalir/forge-5980-test"]).assert_success().stdout_eq(
str![[r#"
Installing forge-5980-test in [..] (url: https://github.com/evalir/forge-5980-test, tag: None)
Installed forge-5980-test
"#]],
);
// assert paths exist
assert!(package.exists());
assert!(package_mod.exists());
let submods = read_string(git_mod_file);
assert!(submods.contains("https://github.com/evalir/forge-5980-test"));
// try to update the top-level dependency; there should be no update for this dependency,
// but its sub-dependency has upstream (breaking) changes; forge should not attempt to
// update the sub-dependency
cmd.forge_fuse().args(["update", "lib/forge-5980-test"]).assert_empty_stdout();
// add explicit remappings for test file
let config = Config {
remappings: vec![
Remapping::from_str("forge-5980-test/=lib/forge-5980-test/src/").unwrap().into(),
// explicit remapping for sub-dependency seems necessary for some reason
Remapping::from_str(
"forge-5980-test-dep/=lib/forge-5980-test/lib/forge-5980-test-dep/src/",
)
.unwrap()
.into(),
],
..Default::default()
};
prj.write_config(config);
// create test file that uses the top-level dependency; if the sub-dependency is updated,
// compilation will fail
prj.add_source(
"CounterCopy",
r#"
import "forge-5980-test/Counter.sol";
contract CounterCopy is Counter {
}
"#,
);
// build and check output
cmd.forge_fuse().arg("build").assert_success().stdout_eq(str![[r#"
[COMPILING_FILES] with [SOLC_VERSION]
[SOLC_VERSION] [ELAPSED]
Compiler run successful!
"#]]);
}
);
#[tokio::test]
async fn uni_v4_core_sync_foundry_lock() {
let (prj, mut cmd) =
ExtTester::new("Uniswap", "v4-core", "e50237c43811bd9b526eff40f26772152a42daba")
.setup_forge_prj(true);
assert!(!prj.root().join(FOUNDRY_LOCK).exists());
let git = Git::new(prj.root());
let submodules = git.submodules().unwrap();
let submod_forge_std =
submodules.into_iter().find(|s| s.path() == &PathBuf::from("lib/forge-std")).unwrap();
let submod_oz = submodules
.into_iter()
.find(|s| s.path() == &PathBuf::from("lib/openzeppelin-contracts"))
.unwrap();
let submod_solmate =
submodules.into_iter().find(|s| s.path() == &PathBuf::from("lib/solmate")).unwrap();
cmd.arg("install").assert_success();
let forge_std = lockfile_get(prj.root(), &PathBuf::from("lib/forge-std")).unwrap();
assert!(matches!(forge_std, DepIdentifier::Rev { .. }));
assert_eq!(forge_std.rev(), submod_forge_std.rev());
let solmate = lockfile_get(prj.root(), &PathBuf::from("lib/solmate")).unwrap();
assert!(matches!(solmate, DepIdentifier::Rev { .. }));
assert_eq!(solmate.rev(), submod_solmate.rev());
let oz = lockfile_get(prj.root(), &PathBuf::from("lib/openzeppelin-contracts")).unwrap();
assert!(matches!(oz, DepIdentifier::Rev { .. }));
assert_eq!(oz.rev(), submod_oz.rev());
// Commit the lockfile
git.add(&PathBuf::from(FOUNDRY_LOCK)).unwrap();
git.commit("Foundry lock").unwrap();
// Try update. Nothing should get updated everything is pinned tag/rev.
cmd.forge_fuse().arg("update").assert_success();
let forge_std = lockfile_get(prj.root(), &PathBuf::from("lib/forge-std")).unwrap();
assert!(matches!(forge_std, DepIdentifier::Rev { .. }));
assert_eq!(forge_std.rev(), submod_forge_std.rev());
let solmate = lockfile_get(prj.root(), &PathBuf::from("lib/solmate")).unwrap();
assert!(matches!(solmate, DepIdentifier::Rev { .. }));
assert_eq!(solmate.rev(), submod_solmate.rev());
let oz = lockfile_get(prj.root(), &PathBuf::from("lib/openzeppelin-contracts")).unwrap();
assert!(matches!(oz, DepIdentifier::Rev { .. }));
assert_eq!(oz.rev(), submod_oz.rev());
}
#[tokio::test]
async fn oz_contracts_sync_foundry_lock() {
let (prj, mut cmd) = ExtTester::new(
"OpenZeppelin",
"openzeppelin-contracts",
"840c974028316f3c8172c1b8e5ed67ad95e255ca",
)
.setup_forge_prj(true);
assert!(!prj.root().join(FOUNDRY_LOCK).exists());
let git = Git::new(prj.root());
let submodules = git.submodules().unwrap();
let submod_forge_std =
submodules.into_iter().find(|s| s.path() == &PathBuf::from("lib/forge-std")).unwrap();
let submod_erc4626_tests =
submodules.into_iter().find(|s| s.path() == &PathBuf::from("lib/erc4626-tests")).unwrap();
let submod_halmos = submodules
.into_iter()
.find(|s| s.path() == &PathBuf::from("lib/halmos-cheatcodes"))
.unwrap();
cmd.arg("install").assert_success();
let forge_std = lockfile_get(prj.root(), &PathBuf::from("lib/forge-std")).unwrap();
assert!(matches!(forge_std, DepIdentifier::Branch { .. }));
assert_eq!(forge_std.rev(), submod_forge_std.rev());
assert_eq!(forge_std.name(), "v1");
let erc4626_tests = lockfile_get(prj.root(), &PathBuf::from("lib/erc4626-tests")).unwrap();
assert!(matches!(erc4626_tests, DepIdentifier::Rev { .. }));
assert_eq!(erc4626_tests.rev(), submod_erc4626_tests.rev());
let halmos = lockfile_get(prj.root(), &PathBuf::from("lib/halmos-cheatcodes")).unwrap();
assert!(matches!(halmos, DepIdentifier::Rev { .. }));
assert_eq!(halmos.rev(), submod_halmos.rev());
// Commit the lockfile
git.add(&PathBuf::from(FOUNDRY_LOCK)).unwrap();
git.commit("Foundry lock").unwrap();
// Try update. forge-std should get updated, rest should remain the same.
cmd.forge_fuse().arg("update").assert_success();
let forge_std = lockfile_get(prj.root(), &PathBuf::from("lib/forge-std")).unwrap();
assert!(matches!(forge_std, DepIdentifier::Branch { .. }));
// assert_eq!(forge_std.rev(), submod_forge_std.rev()); // This can fail, as forge-std will get
// updated to the latest commit on master.
assert_eq!(forge_std.name(), "v1"); // But it stays locked on the same master
let erc4626_tests = lockfile_get(prj.root(), &PathBuf::from("lib/erc4626-tests")).unwrap();
assert!(matches!(erc4626_tests, DepIdentifier::Rev { .. }));
assert_eq!(erc4626_tests.rev(), submod_erc4626_tests.rev());
let halmos = lockfile_get(prj.root(), &PathBuf::from("lib/halmos-cheatcodes")).unwrap();
assert!(matches!(halmos, DepIdentifier::Rev { .. }));
assert_eq!(halmos.rev(), submod_halmos.rev());
}
#[tokio::test]
async fn correctly_sync_dep_with_multiple_version() {
let (prj, mut cmd) = ExtTester::new(
"yash-atreya",
"sync-lockfile-multi-version-dep",
"1ca47e73a168e54f8f7761862dbd0c603856c5c8",
)
.setup_forge_prj(true);
assert!(!prj.root().join(FOUNDRY_LOCK).exists());
let git = Git::new(prj.root());
let submodules = git.submodules().unwrap();
let submod_forge_std =
submodules.into_iter().find(|s| s.path() == &PathBuf::from("lib/forge-std")).unwrap();
let submod_solady =
submodules.into_iter().find(|s| s.path() == &PathBuf::from("lib/solady")).unwrap();
let submod_solday_v_245 =
submodules.into_iter().find(|s| s.path() == &PathBuf::from("lib/solady-v0.0.245")).unwrap();
cmd.arg("install").assert_success();
let forge_std = lockfile_get(prj.root(), &PathBuf::from("lib/forge-std")).unwrap();
assert!(matches!(forge_std, DepIdentifier::Rev { .. }));
assert_eq!(forge_std.rev(), submod_forge_std.rev());
let solady = lockfile_get(prj.root(), &PathBuf::from("lib/solady")).unwrap();
assert!(matches!(solady, DepIdentifier::Rev { .. }));
assert_eq!(solady.rev(), submod_solady.rev());
let solday_v_245 = lockfile_get(prj.root(), &PathBuf::from("lib/solady-v0.0.245")).unwrap();
assert!(matches!(solday_v_245, DepIdentifier::Rev { .. }));
assert_eq!(solday_v_245.rev(), submod_solday_v_245.rev());
}
forgetest_init!(sync_on_forge_update, |prj, cmd| {
let git = Git::new(prj.root());
let submodules = git.submodules().unwrap();
assert!(submodules.0.iter().any(|s| s.rev() == FORGE_STD_REVISION));
let mut lockfile = Lockfile::new(prj.root());
lockfile.read().unwrap();
let forge_std = lockfile.get(&PathBuf::from("lib/forge-std")).unwrap();
assert!(forge_std.rev() == FORGE_STD_REVISION);
// cd into the forge-std submodule and reset the master branch
let forge_std_path = prj.root().join("lib/forge-std");
let git = Git::new(&forge_std_path);
git.checkout(false, "master").unwrap();
// Get the master head commit
let origin_master_head = git.head().unwrap();
// Reset the master branch to HEAD~1
git.reset(true, "HEAD~1").unwrap();
let local_master_head = git.head().unwrap();
assert_ne!(origin_master_head, local_master_head, "Master head should have changed");
// Now checkout back to the release tag
git.checkout(false, forge_std.name()).unwrap();
assert!(git.head().unwrap() == forge_std.rev(), "Forge std should be at the release tag");
let expected_output = format!(
r#"Updated dep at 'lib/forge-std', (from: tag={}@{}, to: branch=master@{})
"#,
forge_std.name(),
forge_std.rev(),
origin_master_head
);
cmd.forge_fuse()
.args(["update", "foundry-rs/forge-std@master"])
.assert_success()
.stdout_eq(expected_output);
});
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/forge/tests/cli/build.rs | crates/forge/tests/cli/build.rs | use crate::utils::generate_large_init_contract;
use foundry_test_utils::{forgetest, snapbox::IntoData, str};
use globset::Glob;
forgetest_init!(can_parse_build_filters, |prj, cmd| {
prj.initialize_default_contracts();
prj.clear();
cmd.args(["build", "--names", "--skip", "tests", "scripts"]).assert_success().stdout_eq(str![
[r#"
[COMPILING_FILES] with [SOLC_VERSION]
[SOLC_VERSION] [ELAPSED]
Compiler run successful!
compiler version: [..]
- Counter
"#]
]);
});
forgetest!(throws_on_conflicting_args, |prj, cmd| {
prj.clear();
cmd.args(["compile", "--format-json", "--quiet"]).assert_failure().stderr_eq(str![[r#"
error: the argument '--json' cannot be used with '--quiet'
Usage: forge[..] build --json [PATHS]...
For more information, try '--help'.
"#]]);
});
// tests that json is printed when --format-json is passed
forgetest!(compile_json, |prj, cmd| {
prj.add_source(
"jsonError",
r"
contract Dummy {
uint256 public number;
function something(uint256 newNumber) public {
number = newnumber; // error here
}
}
",
);
// set up command
cmd.args(["compile", "--format-json"]).assert_success().stderr_eq("").stdout_eq(str![[r#"
{
"errors": [
{
"sourceLocation": {
"file": "src/jsonError.sol",
"start": 184,
"end": 193
},
"type": "DeclarationError",
"component": "general",
"severity": "error",
"errorCode": "7576",
"message": "Undeclared identifier. Did you mean \"newNumber\"?",
"formattedMessage": "DeclarationError: Undeclared identifier. Did you mean \"newNumber\"?\n [FILE]:7:18:\n |\n7 | number = newnumber; // error here\n | ^^^^^^^^^\n\n"
}
],
"sources": {},
"contracts": {},
"build_infos": "{...}"
}
"#]].is_json());
});
forgetest!(initcode_size_exceeds_limit, |prj, cmd| {
prj.add_source("LargeContract.sol", generate_large_init_contract(50_000).as_str());
cmd.args(["build", "--sizes"]).assert_failure().stdout_eq(str![[r#"
[COMPILING_FILES] with [SOLC_VERSION]
[SOLC_VERSION] [ELAPSED]
Compiler run successful!
╭---------------+------------------+-------------------+--------------------+---------------------╮
| Contract | Runtime Size (B) | Initcode Size (B) | Runtime Margin (B) | Initcode Margin (B) |
+=================================================================================================+
| LargeContract | 62 | 50,125 | 24,514 | -973 |
╰---------------+------------------+-------------------+--------------------+---------------------╯
"#]]);
cmd.forge_fuse().args(["build", "--sizes", "--json"]).assert_failure().stdout_eq(
str![[r#"
{
"LargeContract": {
"runtime_size": 62,
"init_size": 50125,
"runtime_margin": 24514,
"init_margin": -973
}
}
"#]]
.is_json(),
);
cmd.forge_fuse().args(["build", "--sizes", "--md"]).assert_failure().stdout_eq(str![[r#"
No files changed, compilation skipped
| Contract | Runtime Size (B) | Initcode Size (B) | Runtime Margin (B) | Initcode Margin (B) |
|---------------|------------------|-------------------|--------------------|---------------------|
| LargeContract | 62 | 50,125 | 24,514 | -973 |
"#]]);
// Ignore EIP-3860
cmd.forge_fuse().args(["build", "--sizes", "--ignore-eip-3860"]).assert_success().stdout_eq(
str![[r#"
No files changed, compilation skipped
╭---------------+------------------+-------------------+--------------------+---------------------╮
| Contract | Runtime Size (B) | Initcode Size (B) | Runtime Margin (B) | Initcode Margin (B) |
+=================================================================================================+
| LargeContract | 62 | 50,125 | 24,514 | -973 |
╰---------------+------------------+-------------------+--------------------+---------------------╯
"#]],
);
cmd.forge_fuse()
.args(["build", "--sizes", "--ignore-eip-3860", "--json"])
.assert_success()
.stdout_eq(
str![[r#"
{
"LargeContract": {
"runtime_size": 62,
"init_size": 50125,
"runtime_margin": 24514,
"init_margin": -973
}
}
"#]]
.is_json(),
);
cmd.forge_fuse()
.args(["build", "--sizes", "--ignore-eip-3860", "--md"])
.assert_success()
.stdout_eq(str![[r#"
No files changed, compilation skipped
| Contract | Runtime Size (B) | Initcode Size (B) | Runtime Margin (B) | Initcode Margin (B) |
|---------------|------------------|-------------------|--------------------|---------------------|
| LargeContract | 62 | 50,125 | 24,514 | -973 |
"#]]);
});
// tests build output is as expected
forgetest_init!(exact_build_output, |prj, cmd| {
prj.initialize_default_contracts();
cmd.args(["build", "--force"]).assert_success().stdout_eq(str![[r#"
[COMPILING_FILES] with [SOLC_VERSION]
[SOLC_VERSION] [ELAPSED]
Compiler run successful!
"#]]);
});
// tests build output is as expected
forgetest_init!(build_sizes_no_forge_std, |prj, cmd| {
prj.initialize_default_contracts();
prj.update_config(|config| {
config.solc = Some(foundry_config::SolcReq::Version(semver::Version::new(0, 8, 27)));
});
cmd.args(["build", "--sizes"]).assert_success().stdout_eq(str![[r#"
...
╭----------+------------------+-------------------+--------------------+---------------------╮
| Contract | Runtime Size (B) | Initcode Size (B) | Runtime Margin (B) | Initcode Margin (B) |
+============================================================================================+
| Counter | 481 | 509 | 24,095 | 48,643 |
╰----------+------------------+-------------------+--------------------+---------------------╯
"#]]);
cmd.forge_fuse().args(["build", "--sizes", "--json"]).assert_success().stdout_eq(
str![[r#"
{
"Counter": {
"runtime_size": 481,
"init_size": 509,
"runtime_margin": 24095,
"init_margin": 48643
}
}
"#]]
.is_json(),
);
cmd.forge_fuse().args(["build", "--sizes", "--md"]).assert_success().stdout_eq(str![[r#"
...
| Contract | Runtime Size (B) | Initcode Size (B) | Runtime Margin (B) | Initcode Margin (B) |
|----------|------------------|-------------------|--------------------|---------------------|
| Counter | 481 | 509 | 24,095 | 48,643 |
"#]]);
});
// tests build output --sizes handles multiple contracts with the same name
forgetest_init!(build_sizes_multiple_contracts, |prj, cmd| {
prj.initialize_default_contracts();
prj.add_source(
"Foo",
r"
contract Foo {
}
",
);
prj.add_source(
"a/Counter",
r"
contract Counter {
uint256 public count;
function increment() public {
count++;
}
}
",
);
prj.add_source(
"b/Counter",
r"
contract Counter {
uint256 public count;
function decrement() public {
count--;
}
}
",
);
cmd.args(["build", "--sizes"]).assert_success().stdout_eq(str![[r#"
...
╭-----------------------------+------------------+-------------------+--------------------+---------------------╮
| Contract | Runtime Size (B) | Initcode Size (B) | Runtime Margin (B) | Initcode Margin (B) |
+===============================================================================================================+
| Counter (src/Counter.sol) | 481 | 509 | 24,095 | 48,643 |
|-----------------------------+------------------+-------------------+--------------------+---------------------|
| Counter (src/a/Counter.sol) | 344 | 372 | 24,232 | 48,780 |
|-----------------------------+------------------+-------------------+--------------------+---------------------|
| Counter (src/b/Counter.sol) | 291 | 319 | 24,285 | 48,833 |
|-----------------------------+------------------+-------------------+--------------------+---------------------|
| Foo | 62 | 88 | 24,514 | 49,064 |
╰-----------------------------+------------------+-------------------+--------------------+---------------------╯
"#]]);
cmd.forge_fuse().args(["build", "--sizes", "--md"]).assert_success().stdout_eq(str![[r#"
...
| Contract | Runtime Size (B) | Initcode Size (B) | Runtime Margin (B) | Initcode Margin (B) |
|-----------------------------|------------------|-------------------|--------------------|---------------------|
| Counter (src/Counter.sol) | 481 | 509 | 24,095 | 48,643 |
| Counter (src/a/Counter.sol) | 344 | 372 | 24,232 | 48,780 |
| Counter (src/b/Counter.sol) | 291 | 319 | 24,285 | 48,833 |
| Foo | 62 | 88 | 24,514 | 49,064 |
"#]]);
});
// tests build output --sizes --json handles multiple contracts with the same name
forgetest_init!(build_sizes_multiple_contracts_json, |prj, cmd| {
prj.initialize_default_contracts();
prj.add_source(
"Foo",
r"
contract Foo {
}
",
);
prj.add_source(
"a/Counter",
r"
contract Counter {
uint256 public count;
function increment() public {
count++;
}
}
",
);
prj.add_source(
"b/Counter",
r"
contract Counter {
uint256 public count;
function decrement() public {
count--;
}
}
",
);
cmd.args(["build", "--sizes", "--json"]).assert_success().stdout_eq(
str![[r#"
{
"Counter (src/Counter.sol)":{
"runtime_size":481,
"init_size":509,
"runtime_margin":24095,
"init_margin":48643
},
"Counter (src/a/Counter.sol)":{
"runtime_size":344,
"init_size":372,
"runtime_margin":24232,
"init_margin":48780
},
"Counter (src/b/Counter.sol)":{
"runtime_size":291,
"init_size":319,
"runtime_margin":24285,
"init_margin":48833
},
"Foo":{
"runtime_size":62,
"init_size":88,
"runtime_margin":24514,
"init_margin":49064
}
}
"#]]
.is_json(),
);
});
// tests that skip key in config can be used to skip non-compilable contract
forgetest_init!(test_can_skip_contract, |prj, cmd| {
prj.add_source(
"InvalidContract",
r"
contract InvalidContract {
some_invalid_syntax
}
",
);
prj.add_source(
"ValidContract",
r"
contract ValidContract {}
",
);
prj.update_config(|config| {
config.skip = vec![Glob::new("src/InvalidContract.sol").unwrap().into()];
});
cmd.args(["build"]).assert_success();
});
// <https://github.com/foundry-rs/foundry/issues/11149>
forgetest_init!(test_consistent_build_output, |prj, cmd| {
prj.add_source(
"AContract.sol",
r#"
import {B} from "/badpath/B.sol";
contract A is B {}
"#,
);
prj.add_source(
"CContract.sol",
r#"
import {B} from "badpath/B.sol";
contract C is B {}
"#,
);
cmd.args(["build", "src/AContract.sol"]).assert_failure().stdout_eq(str![[r#"
...
Unable to resolve imports:
"/badpath/B.sol" in "[..]"
with remappings:
forge-std/=[..]
[COMPILING_FILES] with [SOLC_VERSION]
[SOLC_VERSION] [ELAPSED]
"#]]);
cmd.forge_fuse().args(["build", "src/CContract.sol"]).assert_failure().stdout_eq(str![[r#"
Unable to resolve imports:
"badpath/B.sol" in "[..]"
with remappings:
forge-std/=[..]
[COMPILING_FILES] with [SOLC_VERSION]
[SOLC_VERSION] [ELAPSED]
"#]]);
});
// <https://github.com/foundry-rs/foundry/issues/12458>
// <https://github.com/foundry-rs/foundry/issues/12496>
forgetest!(build_with_invalid_natspec, |prj, cmd| {
prj.add_source(
"ContractWithInvalidNatspec.sol",
r#"
contract ContractA {
/// @deprecated quoteExactOutputSingle and exactOutput. Use QuoterV2 instead.
}
/// Some editors highlight `@note` or `@todo`
/// @note foo bar
/// @title ContractB
contract ContractB {
/**
some example code in a comment:
import { Ownable } from "@openzeppelin/contracts/access/Ownable.sol";
*/
}
"#,
);
cmd.args(["build", "src/ContractWithInvalidNatspec.sol"]).assert_success().stderr_eq(str![[
r#"
warning: invalid natspec tag '@deprecated', custom tags must use format '@custom:name'
[FILE]:5:5
|
5 | /// @deprecated quoteExactOutputSingle and exactOutput. Use QuoterV2 instead.
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
...
warning: invalid natspec tag '@note', custom tags must use format '@custom:name'
[FILE]:9:1
|
9 | /// @note foo bar
| ^^^^^^^^^^^^^^^^^
|
...
"#
]]);
});
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/forge/tests/cli/debug.rs | crates/forge/tests/cli/debug.rs | use itertools::Itertools;
use std::path::Path;
// Sets up a debuggable test case.
// Run with `cargo test-debugger`.
forgetest!(
#[ignore = "ran manually"]
manual_debug_setup,
|prj, cmd| {
cmd.args(["init", "--force"])
.arg(prj.root())
.assert_success()
.stdout_eq(str![[r#"
Initializing [..]...
Installing forge-std in [..] (url: https://github.com/foundry-rs/forge-std, tag: None)
Installed forge-std[..]
Initialized forge project
"#]])
.stderr_eq(str![[r#"
Warning: Target directory is not empty, but `--force` was specified
...
"#]]);
prj.add_source("Counter2.sol", r#"
contract A {
address public a;
uint public b;
int public c;
bytes32 public d;
bool public e;
bytes public f;
string public g;
constructor(address _a, uint _b, int _c, bytes32 _d, bool _e, bytes memory _f, string memory _g) {
a = _a;
b = _b;
c = _c;
d = _d;
e = _e;
f = _f;
g = _g;
}
function getA() public view returns (address) {
return a;
}
function setA(address _a) public {
a = _a;
}
}"#,
);
let script = prj.add_script("Counter.s.sol", r#"
import "../src/Counter2.sol";
import "forge-std/Script.sol";
import "forge-std/Test.sol";
contract B is A {
A public other;
address public self = address(this);
constructor(address _a, uint _b, int _c, bytes32 _d, bool _e, bytes memory _f, string memory _g)
A(_a, _b, _c, _d, _e, _f, _g)
{
other = new A(_a, _b, _c, _d, _e, _f, _g);
}
}
contract Script0 is Script, Test {
function run() external {
assertEq(uint256(1), uint256(1));
vm.startBroadcast();
B b = new B(msg.sender, 2 ** 32, -1 * (2 ** 32), keccak256(abi.encode(1)), true, "abcdef", "hello");
assertEq(b.getA(), msg.sender);
b.setA(tx.origin);
assertEq(b.getA(), tx.origin);
address _b = b.self();
bytes32 _d = b.d();
bytes32 _d2 = b.other().d();
}
}"#,
);
cmd.forge_fuse().args(["build"]).assert_success();
cmd.args([
"script",
script.to_str().unwrap(),
"--root",
prj.root().to_str().unwrap(),
"--tc=Script0",
"--debug",
]);
eprintln!("root: {}", prj.root().display());
let cmd_path = Path::new(cmd.cmd().get_program()).canonicalize().unwrap();
let args = cmd.cmd().get_args().map(|s| s.to_str().unwrap()).format(" ");
eprintln!(" cmd: {} {args}", cmd_path.display());
std::mem::forget(prj);
}
);
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/forge/tests/cli/verify.rs | crates/forge/tests/cli/verify.rs | //! Contains various tests for checking forge commands related to verifying contracts on Etherscan
//! and Sourcify.
use crate::utils::{self, EnvExternalities};
use foundry_common::retry::Retry;
use foundry_test_utils::{
forgetest,
util::{OutputExt, TestCommand, TestProject},
};
use std::time::Duration;
/// Adds a `Unique` contract to the source directory of the project that can be imported as
/// `import {Unique} from "./unique.sol";`
fn add_unique(prj: &TestProject) {
let timestamp = utils::millis_since_epoch();
prj.add_source(
"unique",
&format!(
r#"
contract Unique {{
uint public _timestamp = {timestamp};
}}
"#
),
);
}
fn add_verify_target(prj: &TestProject) {
prj.add_source(
"Verify.sol",
r#"
import {Unique} from "./unique.sol";
contract Verify is Unique {
function doStuff() external {}
}
"#,
);
}
fn add_single_verify_target_file(prj: &TestProject) {
let timestamp = utils::millis_since_epoch();
let contract = format!(
r#"
contract Unique {{
uint public _timestamp = {timestamp};
}}
contract Verify is Unique {{
function doStuff() external {{}}
}}
"#
);
prj.add_source("Verify.sol", &contract);
}
fn add_verify_target_with_constructor(prj: &TestProject) {
prj.add_source(
"Verify.sol",
r#"
import {Unique} from "./unique.sol";
contract Verify is Unique {
struct SomeStruct {
uint256 a;
string str;
}
constructor(SomeStruct memory st, address owner) {}
}
"#,
);
}
fn parse_verification_result(cmd: &mut TestCommand, retries: u32) -> eyre::Result<()> {
// Give Etherscan some time to verify the contract.
Retry::new(retries, Duration::from_secs(30)).run(|| -> eyre::Result<()> {
let output = cmd.execute();
let out = String::from_utf8_lossy(&output.stdout);
test_debug!("{out}");
if out.contains("Contract successfully verified") {
return Ok(());
}
eyre::bail!(
"Failed to get verification, stdout: {}, stderr: {}",
out,
String::from_utf8_lossy(&output.stderr)
)
})
}
fn verify_check(
guid: String,
chain: String,
etherscan_api_key: Option<String>,
verifier: Option<String>,
mut cmd: TestCommand,
) {
let mut args = vec!["verify-check", &guid, "--chain-id", &chain];
if let Some(etherscan_api_key) = ðerscan_api_key {
args.push("--etherscan-api-key");
args.push(etherscan_api_key);
}
if let Some(verifier) = &verifier {
args.push("--verifier");
args.push(verifier);
}
cmd.forge_fuse().args(args);
parse_verification_result(&mut cmd, 6).expect("Failed to verify check")
}
fn await_verification_response(info: EnvExternalities, mut cmd: TestCommand) {
let guid = {
// Give Etherscan some time to detect the transaction.
Retry::new(5, Duration::from_secs(60))
.run(|| -> eyre::Result<String> {
let output = cmd.execute();
let out = String::from_utf8_lossy(&output.stdout);
utils::parse_verification_guid(&out).ok_or_else(|| {
eyre::eyre!(
"Failed to get guid, stdout: {}, stderr: {}",
out,
String::from_utf8_lossy(&output.stderr)
)
})
})
.expect("Failed to get verify guid")
};
// verify-check
let etherscan = (!info.etherscan.is_empty()).then_some(info.etherscan.clone());
let verifier = (!info.verifier.is_empty()).then_some(info.verifier.clone());
verify_check(guid, info.chain.to_string(), etherscan, verifier, cmd);
}
fn deploy_contract(
info: &EnvExternalities,
contract_path: &str,
prj: TestProject,
cmd: &mut TestCommand,
) -> String {
add_unique(&prj);
add_verify_target(&prj);
let output = cmd
.forge_fuse()
.arg("create")
.args(info.create_args())
.arg(contract_path)
.assert_success()
.get_output()
.stdout_lossy();
utils::parse_deployed_address(output.as_str())
.unwrap_or_else(|| panic!("Failed to parse deployer {output}"))
}
fn verify_on_chain(info: Option<EnvExternalities>, prj: TestProject, mut cmd: TestCommand) {
// only execute if keys present
if let Some(info) = info {
test_debug!("verifying on {}", info.chain);
let contract_path = "src/Verify.sol:Verify";
let address = deploy_contract(&info, contract_path, prj, &mut cmd);
let mut args = vec![
"--chain-id".to_string(),
info.chain.to_string(),
address,
contract_path.to_string(),
];
if !info.etherscan.is_empty() {
args.push("--etherscan-api-key".to_string());
args.push(info.etherscan.clone());
}
if !info.verifier.is_empty() {
args.push("--verifier".to_string());
args.push(info.verifier.clone());
}
cmd.forge_fuse().arg("verify-contract").root_arg().args(args);
await_verification_response(info, cmd)
}
}
fn guess_constructor_args(info: Option<EnvExternalities>, prj: TestProject, mut cmd: TestCommand) {
// only execute if keys present
if let Some(info) = info {
test_debug!("verifying on {}", info.chain);
add_unique(&prj);
add_verify_target_with_constructor(&prj);
let contract_path = "src/Verify.sol:Verify";
let output = cmd
.arg("create")
.args(info.create_args())
.arg(contract_path)
.args(vec![
"--constructor-args",
"(239,SomeString)",
"0xd8dA6BF26964aF9D7eEd9e03E53415D37aA96045",
])
.assert_success()
.get_output()
.stdout_lossy();
let address = utils::parse_deployed_address(output.as_str())
.unwrap_or_else(|| panic!("Failed to parse deployer {output}"));
cmd.forge_fuse().arg("verify-contract").root_arg().args([
"--rpc-url".to_string(),
info.rpc.to_string(),
address,
contract_path.to_string(),
"--etherscan-api-key".to_string(),
info.etherscan.to_string(),
"--verifier".to_string(),
info.verifier.to_string(),
"--guess-constructor-args".to_string(),
]);
await_verification_response(info, cmd)
}
}
/// Executes create --verify on the given chain
fn create_verify_on_chain(info: Option<EnvExternalities>, prj: TestProject, mut cmd: TestCommand) {
// only execute if keys present
if let Some(info) = info {
test_debug!("verifying on {}", info.chain);
add_single_verify_target_file(&prj);
let contract_path = "src/Verify.sol:Verify";
let output = cmd
.arg("create")
.args(info.create_args())
.args([contract_path, "--etherscan-api-key", info.etherscan.as_str(), "--verify"])
.assert_success()
.get_output()
.stdout_lossy();
assert!(output.contains("Contract successfully verified"), "{}", output);
}
}
// tests `create && contract-verify && verify-check` on Fantom testnet if correct env vars are set
forgetest!(can_verify_random_contract_fantom_testnet, |prj, cmd| {
verify_on_chain(EnvExternalities::ftm_testnet(), prj, cmd);
});
// tests `create && contract-verify && verify-check` on Optimism kovan if correct env vars are set
forgetest!(can_verify_random_contract_optimism_kovan, |prj, cmd| {
verify_on_chain(EnvExternalities::optimism_kovan(), prj, cmd);
});
// tests `create && contract-verify && verify-check` on Sepolia testnet if correct env vars are set
forgetest!(can_verify_random_contract_sepolia, |prj, cmd| {
// Implicitly tests `--verifier etherscan` on Sepolia testnet
verify_on_chain(EnvExternalities::sepolia_etherscan(), prj, cmd);
});
// tests `create --verify on Sepolia testnet if correct env vars are set
// SEPOLIA_RPC_URL=https://rpc.sepolia.org
// TEST_PRIVATE_KEY=0x...
// ETHERSCAN_API_KEY=<API_KEY>
forgetest!(can_create_verify_random_contract_sepolia_etherscan, |prj, cmd| {
// Implicitly tests `--verifier etherscan` on Sepolia testnet
create_verify_on_chain(EnvExternalities::sepolia_etherscan(), prj, cmd);
});
// tests `create --verify --verifier sourcify` on Sepolia testnet
forgetest!(can_create_verify_random_contract_sepolia_sourcify, |prj, cmd| {
verify_on_chain(EnvExternalities::sepolia_sourcify(), prj, cmd);
});
// tests `create --verify --verifier sourcify` with etherscan api key set
// <https://github.com/foundry-rs/foundry/issues/10000>
forgetest!(
can_create_verify_random_contract_sepolia_sourcify_with_etherscan_api_key_set,
|prj, cmd| {
verify_on_chain(EnvExternalities::sepolia_sourcify_with_etherscan_api_key_set(), prj, cmd);
}
);
// tests `create --verify --verifier blockscout` on Sepolia testnet
forgetest!(can_create_verify_random_contract_sepolia_blockscout, |prj, cmd| {
verify_on_chain(EnvExternalities::sepolia_blockscout(), prj, cmd);
});
// tests `create --verify --verifier blockscout` on Sepolia testnet with etherscan api key set
forgetest!(
can_create_verify_random_contract_sepolia_blockscout_with_etherscan_api_key_set,
|prj, cmd| {
verify_on_chain(
EnvExternalities::sepolia_blockscout_with_etherscan_api_key_set(),
prj,
cmd,
);
}
);
// tests `create && contract-verify --guess-constructor-args && verify-check` on Goerli testnet if
// correct env vars are set
forgetest!(can_guess_constructor_args, |prj, cmd| {
guess_constructor_args(EnvExternalities::goerli(), prj, cmd);
});
// tests `create && verify-contract && verify-check` on sepolia with default sourcify verifier
forgetest!(can_verify_random_contract_sepolia_default_sourcify, |prj, cmd| {
verify_on_chain(EnvExternalities::sepolia_empty_verifier(), prj, cmd);
});
// Tests that verify properly validates verifier arguments.
// <https://github.com/foundry-rs/foundry/issues/11430>
forgetest_init!(can_validate_verifier_settings, |prj, cmd| {
prj.initialize_default_contracts();
// Build the project to create the cache.
cmd.forge_fuse().arg("build").assert_success();
// No verifier URL.
cmd.forge_fuse()
.args([
"verify-contract",
"--rpc-url",
"https://rpc.sepolia-api.lisk.com",
"--verifier",
"blockscout",
"0x19b248616E4964f43F611b5871CE1250f360E9d3",
"src/Counter.sol:Counter",
])
.assert_failure()
.stderr_eq(str![[r#"
Error: No verifier URL specified for verifier blockscout
"#]]);
// Unknown Etherscan chain.
cmd.forge_fuse()
.args([
"verify-contract",
"--rpc-url",
"https://rpc.sepolia-api.lisk.com",
"--verifier",
"etherscan",
"0x19b248616E4964f43F611b5871CE1250f360E9d3",
"src/Counter.sol:Counter",
])
.assert_failure()
.stderr_eq(str![[r#"
Error: No known Etherscan API URL for chain `4202`. To fix this, please:
1. Specify a `url` when using Etherscan verifier
2. Verify the chain `4202` is correct
"#]]);
cmd.forge_fuse().args(["verify-contract", "--rpc-url", "https://rpc.sepolia-api.lisk.com", "--verifier", "blockscout", "--verifier-url", "https://sepolia-blockscout.lisk.com/api", "0x19b248616E4964f43F611b5871CE1250f360E9d3", "src/Counter.sol:Counter"]).assert_success().stdout_eq(str![[r#"
Start verifying contract `0x19b248616E4964f43F611b5871CE1250f360E9d3` deployed on 4202
Contract [src/Counter.sol:Counter] "0x19b248616E4964f43F611b5871CE1250f360E9d3" is already verified. Skipping verification.
"#]]);
});
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/forge/tests/cli/utils.rs | crates/forge/tests/cli/utils.rs | //! Various helper functions
use alloy_chains::NamedChain;
use alloy_primitives::Address;
use alloy_signer_local::PrivateKeySigner;
/// Returns the current millis since unix epoch.
///
/// This way we generate unique contracts so, etherscan will always have to verify them
pub fn millis_since_epoch() -> u128 {
let now = std::time::SystemTime::now();
now.duration_since(std::time::SystemTime::UNIX_EPOCH)
.unwrap_or_else(|err| panic!("Current time {now:?} is invalid: {err:?}"))
.as_millis()
}
pub fn etherscan_key(chain: NamedChain) -> Option<String> {
match chain {
NamedChain::Fantom | NamedChain::FantomTestnet => {
std::env::var("FTMSCAN_API_KEY").or_else(|_| std::env::var("FANTOMSCAN_API_KEY")).ok()
}
NamedChain::OptimismKovan => std::env::var("OP_KOVAN_API_KEY").ok(),
_ => std::env::var("ETHERSCAN_API_KEY").ok(),
}
}
pub fn network_rpc_key(chain: &str) -> Option<String> {
let key = format!("{}_RPC_URL", chain.to_uppercase().replace('-', "_"));
std::env::var(key).ok()
}
pub fn network_private_key(chain: &str) -> Option<String> {
let key = format!("{}_PRIVATE_KEY", chain.to_uppercase().replace('-', "_"));
std::env::var(key).or_else(|_| std::env::var("TEST_PRIVATE_KEY")).ok()
}
/// Represents external input required for executing verification requests
pub struct EnvExternalities {
pub chain: NamedChain,
pub rpc: String,
pub pk: String,
pub etherscan: String,
pub verifier: String,
}
impl EnvExternalities {
pub fn address(&self) -> Option<Address> {
let pk: PrivateKeySigner = self.pk.parse().ok()?;
Some(pk.address())
}
pub fn goerli() -> Option<Self> {
Some(Self {
chain: NamedChain::Goerli,
rpc: network_rpc_key("goerli")?,
pk: network_private_key("goerli")?,
etherscan: etherscan_key(NamedChain::Goerli)?,
verifier: "etherscan".to_string(),
})
}
pub fn ftm_testnet() -> Option<Self> {
Some(Self {
chain: NamedChain::FantomTestnet,
rpc: network_rpc_key("ftm_testnet")?,
pk: network_private_key("ftm_testnet")?,
etherscan: etherscan_key(NamedChain::FantomTestnet)?,
verifier: "etherscan".to_string(),
})
}
pub fn optimism_kovan() -> Option<Self> {
Some(Self {
chain: NamedChain::OptimismKovan,
rpc: network_rpc_key("op_kovan")?,
pk: network_private_key("op_kovan")?,
etherscan: etherscan_key(NamedChain::OptimismKovan)?,
verifier: "etherscan".to_string(),
})
}
pub fn arbitrum_goerli() -> Option<Self> {
Some(Self {
chain: NamedChain::ArbitrumGoerli,
rpc: network_rpc_key("arbitrum-goerli")?,
pk: network_private_key("arbitrum-goerli")?,
etherscan: etherscan_key(NamedChain::ArbitrumGoerli)?,
verifier: "blockscout".to_string(),
})
}
pub fn amoy() -> Option<Self> {
Some(Self {
chain: NamedChain::PolygonAmoy,
rpc: network_rpc_key("amoy")?,
pk: network_private_key("amoy")?,
etherscan: etherscan_key(NamedChain::PolygonAmoy)?,
verifier: "etherscan".to_string(),
})
}
pub fn sepolia_etherscan() -> Option<Self> {
Some(Self {
chain: NamedChain::Sepolia,
rpc: network_rpc_key("sepolia")?,
pk: network_private_key("sepolia")?,
etherscan: etherscan_key(NamedChain::Sepolia)?,
verifier: "etherscan".to_string(),
})
}
pub fn sepolia_sourcify() -> Option<Self> {
Some(Self {
chain: NamedChain::Sepolia,
rpc: network_rpc_key("sepolia")?,
pk: network_private_key("sepolia")?,
etherscan: String::new(),
verifier: "sourcify".to_string(),
})
}
pub fn sepolia_sourcify_with_etherscan_api_key_set() -> Option<Self> {
Some(Self {
chain: NamedChain::Sepolia,
rpc: network_rpc_key("sepolia")?,
pk: network_private_key("sepolia")?,
etherscan: etherscan_key(NamedChain::Sepolia)?,
verifier: "sourcify".to_string(),
})
}
pub fn sepolia_blockscout() -> Option<Self> {
Some(Self {
chain: NamedChain::Sepolia,
rpc: network_rpc_key("sepolia")?,
pk: network_private_key("sepolia")?,
etherscan: String::new(),
verifier: "blockscout".to_string(),
})
}
pub fn sepolia_blockscout_with_etherscan_api_key_set() -> Option<Self> {
Some(Self {
chain: NamedChain::Sepolia,
rpc: network_rpc_key("sepolia")?,
pk: network_private_key("sepolia")?,
etherscan: etherscan_key(NamedChain::Sepolia)?,
verifier: "blockscout".to_string(),
})
}
pub fn sepolia_empty_verifier() -> Option<Self> {
Some(Self {
chain: NamedChain::Sepolia,
rpc: network_rpc_key("sepolia")?,
pk: network_private_key("sepolia")?,
etherscan: String::new(),
verifier: String::new(),
})
}
/// Returns the arguments required to deploy the contract
pub fn create_args(&self) -> Vec<String> {
vec![
"--chain".to_string(),
self.chain.to_string(),
"--rpc-url".to_string(),
self.rpc.clone(),
"--private-key".to_string(),
self.pk.clone(),
]
}
}
/// Parses the address the contract was deployed to
pub fn parse_deployed_address(out: &str) -> Option<String> {
for line in out.lines() {
if line.starts_with("Deployed to") {
return Some(line.trim_start_matches("Deployed to: ").to_string());
}
}
None
}
pub fn parse_verification_guid(out: &str) -> Option<String> {
for line in out.lines() {
if line.contains("GUID") {
return Some(line.replace("GUID:", "").replace('`', "").trim().to_string());
}
}
None
}
/// Generates a string containing the code of a Solidity contract.
///
/// This contract compiles to a large init bytecode size, but small runtime size.
pub fn generate_large_init_contract(n: usize) -> String {
let data = vec![0xff; n];
let hex = alloy_primitives::hex::encode(data);
format!(
"\
contract LargeContract {{
constructor() {{
bytes memory data = hex\"{hex}\";
assembly {{
pop(mload(data))
}}
}}
}}
"
)
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/forge/tests/cli/failure_assertions.rs | crates/forge/tests/cli/failure_assertions.rs | // Tests in which we want to assert failures.
forgetest!(test_fail_deprecation, |prj, cmd| {
prj.insert_ds_test();
prj.add_source(
"DeprecationTestFail.t.sol",
r#"
import "./test.sol";
contract DeprecationTestFail is DSTest {
function testFail_deprecated() public {
revert("deprecated");
}
function testFail_deprecated2() public {
revert("deprecated2");
}
}
"#,
);
cmd.forge_fuse()
.args(["test", "--mc", "DeprecationTestFail"])
.assert_failure()
.stdout_eq(str![[r#"
[COMPILING_FILES] with [SOLC_VERSION]
[SOLC_VERSION] [ELAPSED]
...
Failing tests:
Encountered 2 failing tests in src/DeprecationTestFail.t.sol:DeprecationTestFail
[FAIL: `testFail*` has been removed. Consider changing to test_Revert[If|When]_Condition and expecting a revert] testFail_deprecated() ([GAS])
[FAIL: `testFail*` has been removed. Consider changing to test_Revert[If|When]_Condition and expecting a revert] testFail_deprecated2() ([GAS])
Encountered a total of 2 failing tests, 0 tests succeeded
Tip: Run `forge test --rerun` to retry only the 2 failed tests
"#]]);
});
forgetest!(expect_revert_tests_should_fail, |prj, cmd| {
prj.insert_ds_test();
prj.insert_vm();
let expect_revert_failure_tests = include_str!("../fixtures/ExpectRevertFailures.t.sol");
prj.add_source("ExpectRevertFailures.sol", expect_revert_failure_tests);
cmd.forge_fuse()
.args(["test", "--mc", "ExpectRevertFailureTest"])
.assert_failure()
.stdout_eq(
r#"[COMPILING_FILES] with [SOLC_VERSION]
[SOLC_VERSION] [ELAPSED]
...
[FAIL: next call did not revert as expected] testShouldFailExpectRevertAnyRevertDidNotRevert() ([GAS])
[FAIL: next call did not revert as expected] testShouldFailExpectRevertDangling() ([GAS])
[FAIL: next call did not revert as expected] testShouldFailExpectRevertDidNotRevert() ([GAS])
[FAIL: Error != expected error: but reverts with this message != should revert with this message] testShouldFailExpectRevertErrorDoesNotMatch() ([GAS])
[FAIL: next call did not revert as expected] testShouldFailRevertNotOnImmediateNextCall() ([GAS])
[FAIL: some message] testShouldFailexpectCheatcodeRevertForCreate() ([GAS])
[FAIL: revert] testShouldFailexpectCheatcodeRevertForExtCall() ([GAS])
Suite result: FAILED. 0 passed; 7 failed; 0 skipped; [ELAPSED]
...
"#,
);
cmd.forge_fuse()
.args(["test", "--mc", "ExpectRevertWithReverterFailureTest"])
.assert_failure()
.stdout_eq(
r#"No files changed, compilation skipped
...
[FAIL: next call did not revert as expected] testShouldFailExpectRevertsNotOnImmediateNextCall() ([GAS])
Suite result: FAILED. 0 passed; 1 failed; 0 skipped; [ELAPSED]
...
"#,
);
cmd.forge_fuse()
.args(["test", "--mc", "ExpectRevertCountFailureTest"])
.assert_failure()
.stdout_eq(
r#"No files changed, compilation skipped
...
[FAIL: call reverted with 'my cool error' when it was expected not to revert] testShouldFailIfExpectRevertWrongString() ([GAS])
[FAIL: call reverted when it was expected not to revert] testShouldFailNoRevert() ([GAS])
[FAIL: expected 0 reverts with reason: revert, but got one] testShouldFailNoRevertSpecific() ([GAS])
[FAIL: next call did not revert as expected] testShouldFailRevertCountAny() ([GAS])
[FAIL: Error != expected error: wrong revert != called a function and then reverted] testShouldFailRevertCountCallsThenReverts() ([GAS])
[FAIL: Error != expected error: second-revert != revert] testShouldFailRevertCountSpecific() ([GAS])
Suite result: FAILED. 0 passed; 6 failed; 0 skipped; [ELAPSED]
...
"#,
);
cmd.forge_fuse()
.args(["test", "--mc", "ExpectRevertCountWithReverterFailures"])
.assert_failure()
.stdout_eq(r#"No files changed, compilation skipped
...
[FAIL: call reverted with 'revert' from 0x2e234DAe75C793f67A35089C9d99245E1C58470b, but expected 0 reverts from 0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f] testNoRevertWithWrongReverter() ([GAS])
[FAIL: call reverted with 'revert2' from 0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f, but expected 0 reverts with reason 'revert' from 0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f] testNoReverterCountWithData() ([GAS])
[FAIL: expected 0 reverts from address: 0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f, but got one] testShouldFailNoRevertWithReverter() ([GAS])
[FAIL: Reverter != expected reverter: 0x2e234DAe75C793f67A35089C9d99245E1C58470b != 0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f] testShouldFailRevertCountWithReverter() ([GAS])
[FAIL: Error != expected error: wrong revert != revert] testShouldFailReverterCountWithWrongData() ([GAS])
[FAIL: Reverter != expected reverter: 0x2e234DAe75C793f67A35089C9d99245E1C58470b != 0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f] testShouldFailWrongReverterCountWithData() ([GAS])
Suite result: FAILED. 0 passed; 6 failed; 0 skipped; [ELAPSED]
...
"#);
});
forgetest!(expect_call_tests_should_fail, |prj, cmd| {
prj.insert_ds_test();
prj.insert_vm();
let expect_call_failure_tests = include_str!("../fixtures/ExpectCallFailures.t.sol");
prj.add_source("ExpectCallFailures.sol", expect_call_failure_tests);
cmd.forge_fuse().args(["test", "--mc", "ExpectCallFailureTest"]).assert_failure().stdout_eq(
r#"[COMPILING_FILES] with [SOLC_VERSION]
[SOLC_VERSION] [ELAPSED]
...
[FAIL: expected call to 0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f with data 0xc290d6910000000000000000000000000000000000000000000000000000000000000002, value 1 to be called 1 time, but was called 0 times] testShouldFailExpectCallValue() ([GAS])
[FAIL: expected call to 0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f with data 0x771602f700000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000002 to be called 1 time, but was called 0 times] testShouldFailExpectCallWithData() ([GAS])
[FAIL: expected call to 0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f with data 0x771602f7000000000000000000000000000000000000000000000000000000000000000300000000000000000000000000000000000000000000000000000000000000030000000000000000000000000000000000000000000000000000000000000003 to be called 1 time, but was called 0 times] testShouldFailExpectCallWithMoreParameters() ([GAS])
[FAIL: expected call to 0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f with data 0x771602f700000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000001, value 0, gas 25000 to be called 1 time, but was called 0 times] testShouldFailExpectCallWithNoValueAndWrongGas() ([GAS])
[FAIL: expected call to 0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f with data 0x771602f700000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000001, value 0, minimum gas 50001 to be called 1 time, but was called 0 times] testShouldFailExpectCallWithNoValueAndWrongMinGas() ([GAS])
[FAIL: next call did not revert as expected] testShouldFailExpectCallWithRevertDisallowed() ([GAS])
[FAIL: expected call to 0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f with data 0x3fc7c698 to be called 1 time, but was called 0 times] testShouldFailExpectInnerCall() ([GAS])
[FAIL: expected call to 0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f with data 0x771602f700000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000002 to be called 3 times, but was called 2 times] testShouldFailExpectMultipleCallsWithDataAdditive() ([GAS])
[FAIL: expected call to 0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f with data 0x771602f7 to be called 1 time, but was called 0 times] testShouldFailExpectSelectorCall() ([GAS])
Suite result: FAILED. 0 passed; 9 failed; 0 skipped; [ELAPSED]
...
"#,
);
cmd.forge_fuse()
.args(["test", "--mc", "ExpectCallCountFailureTest"])
.assert_failure()
.stdout_eq(
r#"No files changed, compilation skipped
...
[FAIL: expected call to 0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f with data 0xc290d6910000000000000000000000000000000000000000000000000000000000000002, value 1 to be called 1 time, but was called 0 times] testShouldFailExpectCallCountValue() ([GAS])
[FAIL: expected call to 0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f with data 0x771602f700000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000001, value 0, gas 25000 to be called 2 times, but was called 0 times] testShouldFailExpectCallCountWithNoValueAndWrongGas() ([GAS])
[FAIL: expected call to 0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f with data 0x771602f700000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000001, value 0, minimum gas 50001 to be called 1 time, but was called 0 times] testShouldFailExpectCallCountWithNoValueAndWrongMinGas() ([GAS])
[FAIL: expected call to 0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f with data 0x771602f700000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000002 to be called 2 times, but was called 1 time] testShouldFailExpectCallCountWithWrongCount() ([GAS])
[FAIL: expected call to 0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f with data 0x3fc7c698 to be called 1 time, but was called 0 times] testShouldFailExpectCountInnerCall() ([GAS])
Suite result: FAILED. 0 passed; 5 failed; 0 skipped; [ELAPSED]
...
"#,
);
cmd.forge_fuse()
.args(["test", "--mc", "ExpectCallMixedFailureTest"])
.assert_failure()
.stdout_eq(
r#"No files changed, compilation skipped
...
[FAIL: vm.expectCall: counted expected calls can only bet set once] testShouldFailOverrideCountWithCount() ([GAS])
[FAIL: vm.expectCall: cannot overwrite a counted expectCall with a non-counted expectCall] testShouldFailOverrideCountWithNoCount() ([GAS])
[FAIL: vm.expectCall: counted expected calls can only bet set once] testShouldFailOverrideNoCountWithCount() ([GAS])
Suite result: FAILED. 0 passed; 3 failed; 0 skipped; [ELAPSED]
...
"#,
);
});
forgetest!(expect_create_tests_should_fail, |prj, cmd| {
prj.insert_ds_test();
prj.insert_vm();
let expect_create_failures = include_str!("../fixtures/ExpectCreateFailures.t.sol");
prj.add_source("ExpectCreateFailures.t.sol", expect_create_failures);
cmd.forge_fuse().args(["test", "--mc", "ExpectCreateFailureTest"]).assert_failure().stdout_eq(str![[r#"
...
[FAIL: expected CREATE call by address 0x7fa9385be102ac3eac297483dd6233d62b3e1496 for bytecode [..] but not found] testShouldFailExpectCreate() ([GAS])
[FAIL: expected CREATE2 call by address 0x7fa9385be102ac3eac297483dd6233d62b3e1496 for bytecode [..] but not found] testShouldFailExpectCreate2() ([GAS])
[FAIL: expected CREATE2 call by address 0x7fa9385be102ac3eac297483dd6233d62b3e1496 for bytecode [..] but not found] testShouldFailExpectCreate2WrongBytecode() ([GAS])
[FAIL: expected CREATE2 call by address 0x0000000000000000000000000000000000000000 for bytecode [..] but not found] testShouldFailExpectCreate2WrongDeployer() ([GAS])
[FAIL: expected CREATE2 call by address 0x7fa9385be102ac3eac297483dd6233d62b3e1496 for bytecode [..] but not found] testShouldFailExpectCreate2WrongScheme() ([GAS])
[FAIL: expected CREATE call by address 0x7fa9385be102ac3eac297483dd6233d62b3e1496 for bytecode [..] but not found] testShouldFailExpectCreateWrongBytecode() ([GAS])
[FAIL: expected CREATE call by address 0x0000000000000000000000000000000000000000 for bytecode [..] but not found] testShouldFailExpectCreateWrongDeployer() ([GAS])
[FAIL: expected CREATE call by address 0x7fa9385be102ac3eac297483dd6233d62b3e1496 for bytecode [..] but not found] testShouldFailExpectCreateWrongScheme() ([GAS])
Suite result: FAILED. 0 passed; 8 failed; 0 skipped; [ELAPSED]
...
"#]]);
});
forgetest!(expect_emit_tests_should_fail, |prj, cmd| {
prj.insert_ds_test();
prj.insert_vm();
let expect_emit_failure_tests = include_str!("../fixtures/ExpectEmitFailures.t.sol");
prj.add_source("ExpectEmitFailures.sol", expect_emit_failure_tests);
cmd.forge_fuse().arg("build").assert_success();
cmd.forge_fuse().args(["selectors", "cache"]).assert_success();
cmd.forge_fuse().args(["test", "--mc", "ExpectEmitFailureTest"]).assert_failure().stdout_eq(str![[r#"No files changed, compilation skipped
...
[FAIL: E != expected A] testShouldFailCanMatchConsecutiveEvents() ([GAS])
[FAIL: log != expected SomethingElse] testShouldFailDifferentIndexedParameters() ([GAS])
[FAIL: log != expected log] testShouldFailEmitOnlyAppliesToNextCall() ([GAS])
[FAIL: next call did not revert as expected] testShouldFailEmitWindowWithRevertDisallowed() ([GAS])
[FAIL: E != expected A] testShouldFailEventsOnTwoCalls() ([GAS])
[FAIL: Something param mismatch at [..]: expected=[..], got=[..]; counterexample: calldata=[..] args=[..]] testShouldFailExpectEmit(bool,bool,bool,bool,uint128,uint128,uint128,uint128) (runs: 0, [AVG_GAS])
[FAIL: log emitter mismatch: expected=[..], got=[..]] testShouldFailExpectEmitAddress() ([GAS])
[FAIL: log emitter mismatch: expected=[..], got=[..]] testShouldFailExpectEmitAddressWithArgs() ([GAS])
[FAIL: Something != expected SomethingElse] testShouldFailExpectEmitCanMatchWithoutExactOrder() ([GAS])
[FAIL: expected an emit, but no logs were emitted afterwards. you might have mismatched events or not enough events were emitted] testShouldFailExpectEmitDanglingNoReference() ([GAS])
[FAIL: expected an emit, but no logs were emitted afterwards. you might have mismatched events or not enough events were emitted] testShouldFailExpectEmitDanglingWithReference() ([GAS])
[FAIL: Something param mismatch at [..]: expected=[..], got=[..]; counterexample: calldata=[..] args=[..]] testShouldFailExpectEmitNested(bool,bool,bool,bool,uint128,uint128,uint128,uint128) (runs: 0, [AVG_GAS])
[FAIL: log != expected log] testShouldFailLowLevelWithoutEmit() ([GAS])
[FAIL: log != expected log] testShouldFailMatchRepeatedEventsOutOfOrder() ([GAS])
[FAIL: log != expected log] testShouldFailNoEmitDirectlyOnNextCall() ([GAS])
Suite result: FAILED. 0 passed; 15 failed; 0 skipped; [ELAPSED]
...
"#]]);
cmd.forge_fuse()
.args(["test", "--mc", "ExpectEmitCountFailureTest"])
.assert_failure()
.stdout_eq(
r#"No files changed, compilation skipped
...
[FAIL: log != expected log] testShouldFailCountEmitsFromAddress() ([GAS])
[FAIL: log != expected log] testShouldFailCountLessEmits() ([GAS])
[FAIL: log != expected Something] testShouldFailEmitSomethingElse() ([GAS])
[FAIL: log emitted but expected 0 times] testShouldFailNoEmit() ([GAS])
[FAIL: log emitted but expected 0 times] testShouldFailNoEmitFromAddress() ([GAS])
Suite result: FAILED. 0 passed; 5 failed; 0 skipped; [ELAPSED]
...
"#,
);
});
forgetest!(expect_emit_params_tests_should_fail, |prj, cmd| {
prj.insert_ds_test();
prj.insert_vm();
prj.update_config(|config| {
config.fuzz.dictionary.max_fuzz_dictionary_literals = 0;
});
let expect_emit_failure_src = include_str!("../fixtures/ExpectEmitParamHarness.sol");
let expect_emit_failure_tests = include_str!("../fixtures/ExpectEmitParamFailures.t.sol");
prj.add_source("ExpectEmitParamHarness.sol", expect_emit_failure_src);
prj.add_source("ExpectEmitParamFailures.sol", expect_emit_failure_tests);
cmd.forge_fuse().arg("build").assert_success();
cmd.forge_fuse().args(["test", "--mc", "ExpectEmitParamFailures"]).assert_failure().stdout_eq(
r#"No files changed, compilation skipped
...
[PASS] testSelectiveChecks() ([GAS])
Suite result: FAILED. 1 passed; 8 failed; 0 skipped; [ELAPSED]
...
[FAIL: anonymous log mismatch at param 0: expected=0x0000000000000000000000000000000000000000000000000000000000000064, got=0x00000000000000000000000000000000000000000000000000000000000003e7] testAnonymousEventMismatch() ([GAS])
[FAIL: ComplexEvent != expected SimpleEvent] testCompletelyDifferentEvent() ([GAS])
[FAIL: SimpleEvent param mismatch at b: expected=200, got=999] testIndexedParamMismatch() ([GAS])
[FAIL: ManyParams param mismatch at a: expected=100, got=111, b: expected=200, got=222, c: expected=300, got=333, d: expected=400, got=444, e: expected=500, got=555] testManyParameterMismatches() ([GAS])
[FAIL: SimpleEvent param mismatch at c: expected=300, got=999] testMixedEventNonIndexedMismatch() ([GAS])
[FAIL: SimpleEvent param mismatch at a: expected=100, got=999, b: expected=200, got=888, c: expected=300, got=777] testMultipleMismatches() ([GAS])
[FAIL: SimpleEvent param mismatch at c: expected=300, got=999] testNonIndexedParamMismatch() ([GAS])
[FAIL: MixedEventNumbering param mismatch at param2: expected=300, got=999] testParameterNumbering() ([GAS])
Encountered a total of 8 failing tests, 1 tests succeeded
...
"#,
);
});
forgetest!(mem_safety_test_should_fail, |prj, cmd| {
prj.insert_ds_test();
prj.insert_vm();
let mem_safety_failure_tests = include_str!("../fixtures/MemSafetyFailures.t.sol");
prj.add_source("MemSafetyFailures.sol", mem_safety_failure_tests);
cmd.forge_fuse().args(["test", "--mc", "MemSafetyFailureTest"]).assert_failure().stdout_eq(
r#"[COMPILING_FILES] with [SOLC_VERSION]
[SOLC_VERSION] [ELAPSED]
...
[FAIL: Expected call to fail] testShouldFailExpectSafeMemoryCall() ([GAS])
[FAIL: memory write at offset 0x100 of size 0x60 not allowed; safe range: (0x00, 0x60] U (0x80, 0x100]] testShouldFailExpectSafeMemory_CALL() ([GAS])
[FAIL: memory write at offset 0x100 of size 0x60 not allowed; safe range: (0x00, 0x60] U (0x80, 0x100]] testShouldFailExpectSafeMemory_CALLCODE() ([GAS])
[FAIL: memory write at offset 0xA0 of size 0x20 not allowed; safe range: (0x00, 0x60] U (0x80, 0xA0]; counterexample: calldata=[..] args=[..]] testShouldFailExpectSafeMemory_CALLDATACOPY(uint256) (runs: 0, [AVG_GAS])
[FAIL: memory write at offset 0x80 of size [..] not allowed; safe range: (0x00, 0x60] U (0x80, 0xA0]] testShouldFailExpectSafeMemory_CODECOPY() ([GAS])
[FAIL: memory write at offset 0x100 of size 0x20 not allowed; safe range: (0x00, 0x60] U (0x80, 0x100]] testShouldFailExpectSafeMemory_CREATE() ([GAS])
[FAIL: memory write at offset 0x100 of size 0x20 not allowed; safe range: (0x00, 0x60] U (0x80, 0x100]] testShouldFailExpectSafeMemory_CREATE2() ([GAS])
[FAIL: memory write at offset 0x100 of size 0x60 not allowed; safe range: (0x00, 0x60] U (0x80, 0x100]] testShouldFailExpectSafeMemory_DELEGATECALL() ([GAS])
[FAIL: memory write at offset 0xA0 of size 0x20 not allowed; safe range: (0x00, 0x60] U (0x80, 0xA0]] testShouldFailExpectSafeMemory_EXTCODECOPY() ([GAS])
[FAIL: memory write at offset 0x100 of size 0x20 not allowed; safe range: (0x00, 0x60] U (0x80, 0x100]] testShouldFailExpectSafeMemory_LOG0() ([GAS])
[FAIL: memory write at offset 0x100 of size 0x20 not allowed; safe range: (0x00, 0x60] U (0x80, 0x100]] testShouldFailExpectSafeMemory_MLOAD() ([GAS])
[FAIL: memory write at offset 0x81 of size 0x01 not allowed; safe range: (0x00, 0x60] U (0x80, 0x81]] testShouldFailExpectSafeMemory_MSTORE8_High() ([GAS])
[FAIL: memory write at offset 0x60 of size 0x01 not allowed; safe range: (0x00, 0x60] U (0x80, 0x81]] testShouldFailExpectSafeMemory_MSTORE8_Low() ([GAS])
[FAIL: memory write at offset 0xA0 of size 0x20 not allowed; safe range: (0x00, 0x60] U (0x80, 0xA0]] testShouldFailExpectSafeMemory_MSTORE_High() ([GAS])
[FAIL: memory write at offset 0x60 of size 0x20 not allowed; safe range: (0x00, 0x60] U (0x80, 0xA0]] testShouldFailExpectSafeMemory_MSTORE_Low() ([GAS])
[FAIL: memory write at offset 0x100 of size 0x20 not allowed; safe range: (0x00, 0x60] U (0x80, 0x100]] testShouldFailExpectSafeMemory_RETURN() ([GAS])
[FAIL: memory write at offset 0x100 of size 0x60 not allowed; safe range: (0x00, 0x60] U (0x80, 0x100]] testShouldFailExpectSafeMemory_RETURNDATACOPY() ([GAS])
[FAIL: EvmError: Revert] testShouldFailExpectSafeMemory_REVERT() ([GAS])
[FAIL: memory write at offset 0x100 of size 0x20 not allowed; safe range: (0x00, 0x60] U (0x80, 0x100]] testShouldFailExpectSafeMemory_SHA3() ([GAS])
[FAIL: memory write at offset 0x100 of size 0x60 not allowed; safe range: (0x00, 0x60] U (0x80, 0x100]] testShouldFailExpectSafeMemory_STATICCALL() ([GAS])
[FAIL: memory write at offset 0xA0 of size 0x20 not allowed; safe range: (0x00, 0x60] U (0x80, 0xA0]] testShouldFailStopExpectSafeMemory() ([GAS])
Suite result: FAILED. 0 passed; 21 failed; 0 skipped; [ELAPSED]
...
"#,
);
});
forgetest!(ds_style_test_failing, |prj, cmd| {
prj.insert_ds_test();
prj.add_source(
"DSStyleTest.t.sol",
r#"
import "./test.sol";
contract DSStyleTest is DSTest {
function testDSTestFailingAssertions() public {
emit log_string("assertionOne");
assertEq(uint256(1), uint256(2));
emit log_string("assertionTwo");
assertEq(uint256(3), uint256(4));
emit log_string("done");
}
}
"#,
);
cmd.forge_fuse().args(["test", "--mc", "DSStyleTest", "-vv"]).assert_failure().stdout_eq(
r#"[COMPILING_FILES] with [SOLC_VERSION]
[SOLC_VERSION] [ELAPSED]
...
[FAIL] testDSTestFailingAssertions() ([GAS])
Logs:
assertionOne
Error: a == b not satisfied [uint]
Expected: 2
Actual: 1
assertionTwo
Error: a == b not satisfied [uint]
Expected: 4
Actual: 3
done
Suite result: FAILED. 0 passed; 1 failed; 0 skipped; [ELAPSED]
...
"#,
);
});
forgetest!(failing_setup, |prj, cmd| {
prj.insert_ds_test();
prj.add_source(
"FailingSetupTest.t.sol",
r#"
import "./test.sol";
contract FailingSetupTest is DSTest {
event Test(uint256 n);
function setUp() public {
emit Test(42);
require(false, "setup failed predictably");
}
function testShouldBeMarkedAsFailedBecauseOfSetup() public {
emit log("setup did not fail");
}
}
"#,
);
cmd.args(["test", "--mc", "FailingSetupTest"]).assert_failure().stdout_eq(str![[
r#"[COMPILING_FILES] with [SOLC_VERSION]
[SOLC_VERSION] [ELAPSED]
...
[FAIL: setup failed predictably] setUp() ([GAS])
Suite result: FAILED. 0 passed; 1 failed; 0 skipped; [ELAPSED]
...
"#
]]);
});
forgetest!(multiple_after_invariants, |prj, cmd| {
prj.insert_ds_test();
prj.add_source(
"MultipleAfterInvariantsTest.t.sol",
r#"
import "./test.sol";
contract MultipleAfterInvariant is DSTest {
function afterInvariant() public {}
function afterinvariant() public {}
function testFailShouldBeMarkedAsFailedBecauseOfAfterInvariant()
public
pure
{
assert(true);
}
}
"#,
);
cmd.args(["test", "--mc", "MultipleAfterInvariant"]).assert_failure().stdout_eq(str![[
r#"[COMPILING_FILES] with [SOLC_VERSION]
[SOLC_VERSION] [ELAPSED]
...
[FAIL: multiple afterInvariant functions] afterInvariant() ([GAS])
Suite result: FAILED. 0 passed; 1 failed; 0 skipped; [ELAPSED]
...
"#
]]);
});
forgetest!(multiple_setups, |prj, cmd| {
prj.insert_ds_test();
prj.add_source(
"MultipleSetupsTest.t.sol",
r#"
import "./test.sol";
contract MultipleSetup is DSTest {
function setUp() public {}
function setup() public {}
function testFailShouldBeMarkedAsFailedBecauseOfSetup() public {
assert(true);
}
}
"#,
);
cmd.forge_fuse().args(["test", "--mc", "MultipleSetup"]).assert_failure().stdout_eq(str![[
r#"[COMPILING_FILES] with [SOLC_VERSION]
...
[FAIL: multiple setUp functions] setUp() ([GAS])
Suite result: FAILED. 0 passed; 1 failed; 0 skipped; [ELAPSED]
..."#
]]);
});
forgetest!(emit_diff_anonymous, |prj, cmd| {
prj.insert_ds_test();
prj.insert_vm();
prj.add_source(
"EmitDiffAnonymousTest.t.sol",
r#"
import "./test.sol";
import "./Vm.sol";
contract Target {
event AnonymousEventNonIndexed(uint256 a) anonymous;
function emitAnonymousEventNonIndexed(uint256 a) external {
emit AnonymousEventNonIndexed(a);
}
}
contract EmitDiffAnonymousTest is DSTest {
Vm constant vm = Vm(HEVM_ADDRESS);
Target target;
event DifferentAnonymousEventNonIndexed(string a) anonymous;
function setUp() public {
target = new Target();
}
function testShouldFailEmitDifferentEventNonIndexed() public {
vm.expectEmitAnonymous(false, false, false, false, true);
emit DifferentAnonymousEventNonIndexed("1");
target.emitAnonymousEventNonIndexed(1);
}
}
"#,
);
cmd.forge_fuse().args(["test", "--mc", "EmitDiffAnonymousTest"]).assert_failure().stdout_eq(
str![[r#"[COMPILING_FILES] with [SOLC_VERSION]
[SOLC_VERSION] [ELAPSED]
...
[FAIL: log != expected log] testShouldFailEmitDifferentEventNonIndexed() ([GAS])
Suite result: FAILED. 0 passed; 1 failed; 0 skipped; [ELAPSED]
...
"#]],
);
});
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/forge/tests/cli/ext_integration.rs | crates/forge/tests/cli/ext_integration.rs | use foundry_test_utils::util::ExtTester;
// Actively maintained tests
// Last updated: June 19th 2025
// <https://github.com/foundry-rs/forge-std>
#[test]
fn forge_std() {
ExtTester::new("foundry-rs", "forge-std", "b69e66b0ff79924d487d49bf7fb47c9ec326acba")
// Skip fork tests.
.args(["--nmc", "Fork"])
.verbosity(2)
.run();
}
// <https://github.com/PaulRBerg/prb-math>
#[test]
#[cfg_attr(windows, ignore = "Windows cannot find installed programs")]
fn prb_math() {
ExtTester::new("PaulRBerg", "prb-math", "aad73cfc6cdc2c9b660199b5b1e9db391ea48640")
.install_command(&["bun", "install", "--prefer-offline"])
// Try npm if bun fails / is not installed.
.install_command(&["npm", "install", "--prefer-offline"])
.run();
}
// <https://github.com/PaulRBerg/prb-proxy>
#[test]
#[cfg_attr(windows, ignore = "Windows cannot find installed programs")]
fn prb_proxy() {
ExtTester::new("PaulRBerg", "prb-proxy", "e45f5325d4b6003227a6c4bdaefac9453f89de2e")
.install_command(&["bun", "install", "--prefer-offline"])
// Try npm if bun fails / is not installed.
.install_command(&["npm", "install", "--prefer-offline"])
.run();
}
// <https://github.com/sablier-labs/v2-core>
#[test]
#[cfg_attr(windows, ignore = "Windows cannot find installed programs")]
fn sablier_v2_core() {
let mut tester =
ExtTester::new("sablier-labs", "v2-core", "d85521f5615f6c19612ff250ee89c57b9afa6aa2")
// Skip fork tests.
.args(["--nmc", "Fork"])
// Increase the gas limit: https://github.com/sablier-labs/v2-core/issues/956
.args(["--gas-limit", &u64::MAX.to_string()])
// Run tests without optimizations.
.env("FOUNDRY_PROFILE", "lite")
.install_command(&["bun", "install", "--prefer-offline"])
// Try npm if bun fails / is not installed.
.install_command(&["npm", "install", "--prefer-offline"])
.verbosity(2);
// This test reverts due to memory limit without isolation. This revert is not reached with
// isolation because memory is divided between separate EVMs created by inner calls.
if cfg!(feature = "isolate-by-default") {
tester = tester.args(["--nmt", "test_RevertWhen_LoopCalculationOverflowsBlockGasLimit"]);
}
tester.run();
}
// <https://github.com/Vectorized/solady>
#[test]
fn solady() {
ExtTester::new("Vectorized", "solady", "cbcfe0009477aa329574f17e8db0a05703bb8bdd").run();
}
// <https://github.com/pcaversaccio/snekmate>
#[test]
#[cfg_attr(windows, ignore = "Windows cannot find installed programs")]
#[cfg(not(feature = "isolate-by-default"))]
fn snekmate() {
ExtTester::new("pcaversaccio", "snekmate", "601031d244475b160a00f73053532528bf665cc3")
.install_command(&["pnpm", "install", "--prefer-offline"])
// Try npm if pnpm fails / is not installed.
.install_command(&["npm", "install", "--prefer-offline"])
.run();
}
// <https://github.com/mds1/multicall>
#[test]
fn mds1_multicall3() {
ExtTester::new("mds1", "multicall", "5f90062160aedb7c807fadca469ac783a0557b57").run();
}
// Legacy tests
// <https://github.com/Arachnid/solidity-stringutils>
#[test]
fn solidity_stringutils() {
ExtTester::new("Arachnid", "solidity-stringutils", "4b2fcc43fa0426e19ce88b1f1ec16f5903a2e461")
.run();
}
// <https://github.com/m1guelpf/lil-web3>
#[test]
fn lil_web3() {
ExtTester::new("m1guelpf", "lil-web3", "7346bd28c2586da3b07102d5290175a276949b15").run();
}
// <https://github.com/makerdao/multicall>
#[test]
fn makerdao_multicall() {
ExtTester::new("makerdao", "multicall", "103a8a28e4e372d582d6539b30031bda4cd48e21").run();
}
// Legacy forking tests
// <https://github.com/hexonaut/guni-lev>
#[test]
fn gunilev() {
ExtTester::new("hexonaut", "guni-lev", "15ee8b4c2d28e553c5cd5ba9a2a274af97563bc4")
.fork_block(13633752)
.run();
}
// <https://github.com/mds1/convex-shutdown-simulation>
#[test]
fn convex_shutdown_simulation() {
ExtTester::new(
"mds1",
"convex-shutdown-simulation",
"2537cdebce4396753225c5e616c8e00547d2fcea",
)
.fork_block(14445961)
.run();
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/forge/tests/cli/inline_config.rs | crates/forge/tests/cli/inline_config.rs | forgetest!(runs, |prj, cmd| {
prj.add_test(
"inline.sol",
"
contract Inline {
/** forge-config: default.fuzz.runs = 2 */
function test1(bool) public {}
\t///\t forge-config:\tdefault.fuzz.runs=\t3 \t
function test2(bool) public {}
}
",
);
cmd.arg("test").assert_success().stdout_eq(str![[r#"
[COMPILING_FILES] with [SOLC_VERSION]
[SOLC_VERSION] [ELAPSED]
Compiler run successful!
Ran 2 tests for test/inline.sol:Inline
[PASS] test1(bool) (runs: 2, [AVG_GAS])
[PASS] test2(bool) (runs: 3, [AVG_GAS])
Suite result: ok. 2 passed; 0 failed; 0 skipped; [ELAPSED]
Ran 1 test suite [ELAPSED]: 2 tests passed, 0 failed, 0 skipped (2 total tests)
"#]]);
// Make sure inline config is parsed in coverage too.
cmd.forge_fuse().arg("coverage").assert_success().stdout_eq(str![[r#"
[COMPILING_FILES] with [SOLC_VERSION]
[SOLC_VERSION] [ELAPSED]
Compiler run successful!
Analysing contracts...
Running tests...
Ran 2 tests for test/inline.sol:Inline
[PASS] test1(bool) (runs: 2, [AVG_GAS])
[PASS] test2(bool) (runs: 3, [AVG_GAS])
Suite result: ok. 2 passed; 0 failed; 0 skipped; [ELAPSED]
Ran 1 test suite [ELAPSED]: 2 tests passed, 0 failed, 0 skipped (2 total tests)
╭-------+---------------+---------------+---------------+---------------╮
| File | % Lines | % Statements | % Branches | % Funcs |
+=======================================================================+
| Total | 100.00% (0/0) | 100.00% (0/0) | 100.00% (0/0) | 100.00% (0/0) |
╰-------+---------------+---------------+---------------+---------------╯
"#]]);
});
forgetest!(invalid_profile, |prj, cmd| {
prj.add_test(
"inline.sol",
"
/** forge-config: unknown.fuzz.runs = 2 */
contract Inline {
function test(bool) public {}
}
",
);
cmd.arg("test").assert_failure().stderr_eq(str![[r#"
Error: Inline config error at test/inline.sol:4:9: invalid profile `unknown.fuzz.runs = 2`; valid profiles: default
"#]]);
});
// TODO: Uncomment once this done for normal config too.
/*
forgetest!(invalid_key, |prj, cmd| {
prj.add_test(
"inline.sol",
"
/** forge-config: default.fuzzz.runs = 2 */
contract Inline {
function test(bool) public {}
}
",
);
cmd.arg("test").assert_failure().stderr_eq(str![[]]).stdout_eq(str![[r#"
[COMPILING_FILES] with [SOLC_VERSION]
[SOLC_VERSION] [ELAPSED]
Compiler run successful!
Ran 1 test for test/inline.sol:Inline
[FAIL: failed to get inline configuration: unknown config section `default`] test(bool) ([GAS])
Suite result: FAILED. 0 passed; 1 failed; 0 skipped; [ELAPSED]
Ran 1 test suite [ELAPSED]: 0 tests passed, 1 failed, 0 skipped (1 total tests)
Failing tests:
Encountered 1 failing test in test/inline.sol:Inline
[FAIL: failed to get inline configuration: unknown config section `default`] test(bool) ([GAS])
Encountered a total of 1 failing tests, 0 tests succeeded
"#]]);
});
forgetest!(invalid_key_2, |prj, cmd| {
prj.add_test(
"inline.sol",
"
/** forge-config: default.fuzz.runss = 2 */
contract Inline {
function test(bool) public {}
}
",
);
cmd.arg("test").assert_failure().stderr_eq(str![[]]).stdout_eq(str![[r#"
[COMPILING_FILES] with [SOLC_VERSION]
[SOLC_VERSION] [ELAPSED]
Compiler run successful!
Ran 1 test for test/inline.sol:Inline
[FAIL: failed to get inline configuration: unknown config section `default`] test(bool) ([GAS])
Suite result: FAILED. 0 passed; 1 failed; 0 skipped; [ELAPSED]
Ran 1 test suite [ELAPSED]: 0 tests passed, 1 failed, 0 skipped (1 total tests)
Failing tests:
Encountered 1 failing test in test/inline.sol:Inline
[FAIL: failed to get inline configuration: unknown config section `default`] test(bool) ([GAS])
Encountered a total of 1 failing tests, 0 tests succeeded
"#]]);
});
*/
forgetest!(invalid_value, |prj, cmd| {
prj.add_test(
"inline.sol",
"
/** forge-config: default.fuzz.runs = [2] */
contract Inline {
function test(bool) public {}
}
",
);
cmd.arg("test").assert_failure().stderr_eq(str![[]]).stdout_eq(str![[r#"
[COMPILING_FILES] with [SOLC_VERSION]
[SOLC_VERSION] [ELAPSED]
Compiler run successful!
Ran 1 test for test/inline.sol:Inline
[FAIL: invalid type: found sequence, expected u32 for key "default.fuzz.runs" in inline config] setUp() ([GAS])
Suite result: FAILED. 0 passed; 1 failed; 0 skipped; [ELAPSED]
Ran 1 test suite [ELAPSED]: 0 tests passed, 1 failed, 0 skipped (1 total tests)
Failing tests:
Encountered 1 failing test in test/inline.sol:Inline
[FAIL: invalid type: found sequence, expected u32 for key "default.fuzz.runs" in inline config] setUp() ([GAS])
Encountered a total of 1 failing tests, 0 tests succeeded
Tip: Run `forge test --rerun` to retry only the 1 failed test
"#]]);
});
forgetest!(invalid_value_2, |prj, cmd| {
prj.add_test(
"inline.sol",
"
/** forge-config: default.fuzz.runs = '2' */
contract Inline {
function test(bool) public {}
}
",
);
cmd.arg("test").assert_failure().stderr_eq(str![[]]).stdout_eq(str![[r#"
[COMPILING_FILES] with [SOLC_VERSION]
[SOLC_VERSION] [ELAPSED]
Compiler run successful!
Ran 1 test for test/inline.sol:Inline
[FAIL: invalid type: found string "2", expected u32 for key "default.fuzz.runs" in inline config] setUp() ([GAS])
Suite result: FAILED. 0 passed; 1 failed; 0 skipped; [ELAPSED]
Ran 1 test suite [ELAPSED]: 0 tests passed, 1 failed, 0 skipped (1 total tests)
Failing tests:
Encountered 1 failing test in test/inline.sol:Inline
[FAIL: invalid type: found string "2", expected u32 for key "default.fuzz.runs" in inline config] setUp() ([GAS])
Encountered a total of 1 failing tests, 0 tests succeeded
Tip: Run `forge test --rerun` to retry only the 1 failed test
"#]]);
});
#[cfg(not(feature = "isolate-by-default"))]
forgetest_init!(config_inline_isolate, |prj, cmd| {
use serde::{Deserialize, Deserializer};
use std::{fs, path::Path};
prj.add_test(
"inline.sol",
r#"
import {Test} from "forge-std/Test.sol";
contract Dummy {
uint256 public number;
function setNumber(uint256 newNumber) public {
number = newNumber;
}
}
contract FunctionConfig is Test {
Dummy dummy;
function setUp() public {
dummy = new Dummy();
}
/// forge-config: default.isolate = true
function test_isolate() public {
vm.startSnapshotGas("testIsolatedFunction");
dummy.setNumber(1);
vm.stopSnapshotGas();
}
function test_non_isolate() public {
vm.startSnapshotGas("testNonIsolatedFunction");
dummy.setNumber(2);
vm.stopSnapshotGas();
}
}
/// forge-config: default.isolate = true
contract ContractConfig is Test {
Dummy dummy;
function setUp() public {
dummy = new Dummy();
}
function test_non_isolate() public {
vm.startSnapshotGas("testIsolatedContract");
dummy.setNumber(3);
vm.stopSnapshotGas();
}
}
"#,
);
cmd.args(["test", "-j1"]).assert_success().stdout_eq(str![[r#"
[COMPILING_FILES] with [SOLC_VERSION]
[SOLC_VERSION] [ELAPSED]
Compiler run successful!
Ran 1 test for test/inline.sol:ContractConfig
[PASS] test_non_isolate() ([GAS])
Suite result: ok. 1 passed; 0 failed; 0 skipped; [ELAPSED]
Ran 2 tests for test/inline.sol:FunctionConfig
[PASS] test_isolate() ([GAS])
[PASS] test_non_isolate() ([GAS])
Suite result: ok. 2 passed; 0 failed; 0 skipped; [ELAPSED]
Ran 2 test suites [ELAPSED]: 3 tests passed, 0 failed, 0 skipped (3 total tests)
"#]]);
assert!(prj.root().join("snapshots/FunctionConfig.json").exists());
assert!(prj.root().join("snapshots/ContractConfig.json").exists());
#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
struct FunctionConfig {
#[serde(deserialize_with = "string_to_u64")]
test_isolated_function: u64,
#[serde(deserialize_with = "string_to_u64")]
test_non_isolated_function: u64,
}
#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
struct ContractConfig {
#[serde(deserialize_with = "string_to_u64")]
test_isolated_contract: u64,
}
fn string_to_u64<'de, D>(deserializer: D) -> Result<u64, D::Error>
where
D: Deserializer<'de>,
{
let s: serde_json::Value = Deserialize::deserialize(deserializer)?;
match s {
serde_json::Value::String(s) => s.parse::<u64>().map_err(serde::de::Error::custom),
serde_json::Value::Number(n) if n.is_u64() => Ok(n.as_u64().unwrap()),
_ => Err(serde::de::Error::custom("Expected a string or number")),
}
}
fn read_snapshot<T: for<'de> Deserialize<'de>>(path: &Path) -> T {
let content = fs::read_to_string(path).expect("Failed to read file");
serde_json::from_str(&content).expect("Failed to parse snapshot")
}
let function_config: FunctionConfig =
read_snapshot(&prj.root().join("snapshots/FunctionConfig.json"));
let contract_config: ContractConfig =
read_snapshot(&prj.root().join("snapshots/ContractConfig.json"));
// FunctionConfig {
// test_isolated_function: 48926,
// test_non_isolated_function: 27722,
// }
// ContractConfig {
// test_isolated_contract: 48926,
// }
assert!(function_config.test_isolated_function > function_config.test_non_isolated_function);
assert_eq!(function_config.test_isolated_function, contract_config.test_isolated_contract);
});
forgetest_init!(config_inline_evm_version, |prj, cmd| {
prj.add_test(
"inline.sol",
r#"
import {Test} from "forge-std/Test.sol";
contract Dummy {
function getBlobBaseFee() public returns (uint256) {
return block.blobbasefee;
}
}
contract FunctionConfig is Test {
Dummy dummy;
function setUp() public {
dummy = new Dummy();
}
/// forge-config: default.evm_version = "shanghai"
function test_old() public {
vm.expectRevert();
dummy.getBlobBaseFee();
}
function test_new() public {
dummy.getBlobBaseFee();
}
}
/// forge-config: default.evm_version = "shanghai"
contract ContractConfig is Test {
Dummy dummy;
function setUp() public {
dummy = new Dummy();
}
function test_old() public {
vm.expectRevert();
dummy.getBlobBaseFee();
}
/// forge-config: default.evm_version = "cancun"
function test_new() public {
dummy.getBlobBaseFee();
}
}
"#,
);
cmd.args(["test", "--evm-version=cancun", "-j1"]).assert_success().stdout_eq(str![[r#"
...
Ran 2 tests for test/inline.sol:ContractConfig
[PASS] test_new() ([GAS])
[PASS] test_old() ([GAS])
Suite result: ok. 2 passed; 0 failed; 0 skipped; [ELAPSED]
Ran 2 tests for test/inline.sol:FunctionConfig
[PASS] test_new() ([GAS])
[PASS] test_old() ([GAS])
Suite result: ok. 2 passed; 0 failed; 0 skipped; [ELAPSED]
Ran 2 test suites [ELAPSED]: 4 tests passed, 0 failed, 0 skipped (4 total tests)
"#]]);
});
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/forge/tests/cli/bind_json.rs | crates/forge/tests/cli/bind_json.rs | use foundry_test_utils::snapbox;
// tests complete bind-json workflow
// ensures that we can run forge-bind even if files are depending on yet non-existent bindings and
// that generated bindings are correct
forgetest_init!(test_bind_json, |prj, cmd| {
prj.add_test(
"JsonBindings",
r#"
import {JsonBindings} from "utils/JsonBindings.sol";
import {Test} from "forge-std/Test.sol";
struct TopLevelStruct {
uint256 param1;
int8 param2;
}
contract BindJsonTest is Test {
using JsonBindings for *;
struct ContractLevelStruct {
address[][] param1;
address addrParam;
}
function testTopLevel() public pure {
string memory json = '{"param1": 1, "param2": -1}';
TopLevelStruct memory topLevel = json.deserializeTopLevelStruct();
assertEq(topLevel.param1, 1);
assertEq(topLevel.param2, -1);
json = topLevel.serialize();
TopLevelStruct memory deserialized = json.deserializeTopLevelStruct();
assertEq(keccak256(abi.encode(deserialized)), keccak256(abi.encode(topLevel)));
}
function testContractLevel() public pure {
ContractLevelStruct memory contractLevel = ContractLevelStruct({
param1: new address[][](2),
addrParam: address(0xBEEF)
});
string memory json = contractLevel.serialize();
assertEq(json, '{"param1":[[],[]],"addrParam":"0x000000000000000000000000000000000000bEEF"}');
ContractLevelStruct memory deserialized = json.deserializeContractLevelStruct();
assertEq(keccak256(abi.encode(deserialized)), keccak256(abi.encode(contractLevel)));
}
}
"#,
);
cmd.arg("bind-json").assert_success();
snapbox::assert_data_eq!(
snapbox::Data::read_from(&prj.root().join("utils/JsonBindings.sol"), None),
snapbox::str![[r#"
// Automatically generated by forge bind-json.
pragma solidity >=0.6.2 <0.9.0;
pragma experimental ABIEncoderV2;
import {BindJsonTest, TopLevelStruct} from "test/JsonBindings.sol";
interface Vm {
function parseJsonTypeArray(string calldata json, string calldata key, string calldata typeDescription) external pure returns (bytes memory);
function parseJsonType(string calldata json, string calldata typeDescription) external pure returns (bytes memory);
function parseJsonType(string calldata json, string calldata key, string calldata typeDescription) external pure returns (bytes memory);
function serializeJsonType(string calldata typeDescription, bytes memory value) external pure returns (string memory json);
function serializeJsonType(string calldata objectKey, string calldata valueKey, string calldata typeDescription, bytes memory value) external returns (string memory json);
}
...
library JsonBindings {
Vm constant vm = Vm(address(uint160(uint256(keccak256("hevm cheat code")))));
string constant schema_TopLevelStruct = "TopLevelStruct(uint256 param1,int8 param2)";
string constant schema_ContractLevelStruct = "ContractLevelStruct(address[][] param1,address addrParam)";
function serialize(TopLevelStruct memory value) internal pure returns (string memory) {
return vm.serializeJsonType(schema_TopLevelStruct, abi.encode(value));
}
function serialize(TopLevelStruct memory value, string memory objectKey, string memory valueKey) internal returns (string memory) {
return vm.serializeJsonType(objectKey, valueKey, schema_TopLevelStruct, abi.encode(value));
}
function deserializeTopLevelStruct(string memory json) public pure returns (TopLevelStruct memory) {
return abi.decode(vm.parseJsonType(json, schema_TopLevelStruct), (TopLevelStruct));
}
function deserializeTopLevelStruct(string memory json, string memory path) public pure returns (TopLevelStruct memory) {
return abi.decode(vm.parseJsonType(json, path, schema_TopLevelStruct), (TopLevelStruct));
}
function deserializeTopLevelStructArray(string memory json, string memory path) public pure returns (TopLevelStruct[] memory) {
return abi.decode(vm.parseJsonTypeArray(json, path, schema_TopLevelStruct), (TopLevelStruct[]));
}
function serialize(BindJsonTest.ContractLevelStruct memory value) internal pure returns (string memory) {
return vm.serializeJsonType(schema_ContractLevelStruct, abi.encode(value));
}
function serialize(BindJsonTest.ContractLevelStruct memory value, string memory objectKey, string memory valueKey) internal returns (string memory) {
return vm.serializeJsonType(objectKey, valueKey, schema_ContractLevelStruct, abi.encode(value));
}
function deserializeContractLevelStruct(string memory json) public pure returns (BindJsonTest.ContractLevelStruct memory) {
return abi.decode(vm.parseJsonType(json, schema_ContractLevelStruct), (BindJsonTest.ContractLevelStruct));
}
function deserializeContractLevelStruct(string memory json, string memory path) public pure returns (BindJsonTest.ContractLevelStruct memory) {
return abi.decode(vm.parseJsonType(json, path, schema_ContractLevelStruct), (BindJsonTest.ContractLevelStruct));
}
function deserializeContractLevelStructArray(string memory json, string memory path) public pure returns (BindJsonTest.ContractLevelStruct[] memory) {
return abi.decode(vm.parseJsonTypeArray(json, path, schema_ContractLevelStruct), (BindJsonTest.ContractLevelStruct[]));
}
}
"#]],
);
cmd.forge_fuse().args(["test"]).assert_success();
});
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/forge/tests/cli/test_optimizer.rs | crates/forge/tests/cli/test_optimizer.rs | //! Tests for the `forge test` with preprocessed cache.
// Test cache is invalidated when `forge build` if optimize test option toggled.
forgetest_init!(toggle_invalidate_cache_on_build, |prj, cmd| {
prj.initialize_default_contracts();
prj.update_config(|config| {
config.dynamic_test_linking = true;
});
// All files are built with optimized tests.
cmd.args(["build"]).with_no_redact().assert_success().stdout_eq(str![[r#"
...
Compiling 23 files with [..]
...
"#]]);
// No files are rebuilt.
cmd.with_no_redact().assert_success().stdout_eq(str![[r#"
...
No files changed, compilation skipped
...
"#]]);
// Toggle test optimizer off.
prj.update_config(|config| {
config.dynamic_test_linking = false;
});
// All files are rebuilt with preprocessed cache false.
cmd.with_no_redact().assert_success().stdout_eq(str![[r#"
...
Compiling 23 files with [..]
...
"#]]);
});
// Test cache is invalidated when `forge test` if optimize test option toggled.
forgetest_init!(toggle_invalidate_cache_on_test, |prj, cmd| {
prj.initialize_default_contracts();
prj.update_config(|config| {
config.dynamic_test_linking = true;
});
// All files are built with optimized tests.
cmd.args(["test"]).with_no_redact().assert_success().stdout_eq(str![[r#"
...
Compiling 21 files with [..]
...
"#]]);
// No files are rebuilt.
cmd.with_no_redact().assert_success().stdout_eq(str![[r#"
...
No files changed, compilation skipped
...
"#]]);
// Toggle test optimizer off.
prj.update_config(|config| {
config.dynamic_test_linking = false;
});
// All files are rebuilt with preprocessed cache false.
cmd.with_no_redact().assert_success().stdout_eq(str![[r#"
...
Compiling 21 files with [..]
...
"#]]);
});
// Counter contract without interface instantiated in CounterTest
//
// ├── src
// │ └── Counter.sol
// └── test
// └── Counter.t.sol
forgetest_init!(preprocess_contract_with_no_interface, |prj, cmd| {
prj.update_config(|config| {
config.dynamic_test_linking = true;
});
prj.add_source(
"Counter.sol",
r#"
contract Counter {
uint256 public number;
function setNumber(uint256 newNumber) public {
number = newNumber;
}
function increment() public {
number++;
}
}
"#,
);
prj.add_test(
"Counter.t.sol",
r#"
import {Test} from "forge-std/Test.sol";
import {Counter} from "../src/Counter.sol";
contract CounterTest is Test {
Counter public counter;
function setUp() public {
counter = new Counter();
counter.setNumber(0);
}
function test_Increment() public {
counter.increment();
assertEq(counter.number(), 1);
}
function test_SetNumber() public {
counter.setNumber(1);
assertEq(counter.number(), 1);
}
}
"#,
);
// All files are compiled on first run.
cmd.args(["test"]).with_no_redact().assert_success().stdout_eq(str![[r#"
...
Compiling 21 files with [..]
...
"#]]);
// Change Counter implementation to fail both tests.
prj.add_source(
"Counter.sol",
r#"
contract Counter {
uint256 public number;
function setNumber(uint256 newNumber) public {
number = 12345;
}
function increment() public {
number++;
number++;
}
}
"#,
);
// Assert that only 1 file is compiled (Counter source contract) and both tests fail.
cmd.with_no_redact().assert_failure().stdout_eq(str![[r#"
...
Compiling 1 files with [..]
...
[FAIL: assertion failed: 12347 != 1] test_Increment() (gas: [..])
[FAIL: assertion failed: 12345 != 1] test_SetNumber() (gas: [..])
...
"#]]);
// Change Counter implementation to fail single test.
prj.add_source(
"Counter.sol",
r#"
contract Counter {
uint256 public number;
function setNumber(uint256 newNumber) public {
number = 1;
}
function increment() public {
number++;
}
}
"#,
);
// Assert that only 1 file is compiled (Counter source contract) and only one test fails.
cmd.with_no_redact().assert_failure().stdout_eq(str![[r#"
...
Compiling 1 files with [..]
...
[FAIL: assertion failed: 2 != 1] test_Increment() (gas: [..])
[PASS] test_SetNumber() (gas: [..])
...
"#]]);
});
// Counter contract with interface instantiated in CounterTest
//
// ├── src
// │ ├── Counter.sol
// │ └── interface
// │ └── CounterIf.sol
// └── test
// └── Counter.t.sol
forgetest_init!(preprocess_contract_with_interface, |prj, cmd| {
prj.update_config(|config| {
config.dynamic_test_linking = true;
});
prj.add_source(
"interface/CounterIf.sol",
r#"
interface CounterIf {
function number() external returns (uint256);
function setNumber(uint256 newNumber) external;
function increment() external;
}
"#,
);
prj.add_source(
"Counter.sol",
r#"
import {CounterIf} from "./interface/CounterIf.sol";
contract Counter is CounterIf {
uint256 public number;
uint256 public anotherNumber;
function setNumber(uint256 newNumber) public {
number = newNumber;
}
function increment() public {
number++;
}
}
"#,
);
prj.add_test(
"Counter.t.sol",
r#"
import {Test} from "forge-std/Test.sol";
import {Counter} from "../src/Counter.sol";
contract CounterTest is Test {
Counter public counter;
function setUp() public {
counter = Counter(address(new Counter()));
counter.setNumber(0);
}
function test_Increment() public {
counter.increment();
assertEq(counter.number(), 1);
}
function test_SetNumber() public {
counter.setNumber(1);
assertEq(counter.number(), 1);
}
}
"#,
);
// All 21 files are compiled on first run.
cmd.args(["test"]).with_no_redact().assert_success().stdout_eq(str![[r#"
...
Compiling 22 files with [..]
...
"#]]);
// Change only CounterIf interface.
prj.add_source(
"interface/CounterIf.sol",
r#"
interface CounterIf {
function anotherNumber() external returns (uint256);
function number() external returns (uint256);
function setNumber(uint256 newNumber) external;
function increment() external;
}
"#,
);
// All 3 files (interface, implementation and test) are compiled.
cmd.with_no_redact().assert_success().stdout_eq(str![[r#"
...
Compiling 3 files with [..]
...
"#]]);
// Change Counter implementation to fail both tests.
prj.add_source(
"Counter.sol",
r#"
import {CounterIf} from "./interface/CounterIf.sol";
contract Counter is CounterIf {
uint256 public number;
uint256 public anotherNumber;
function setNumber(uint256 newNumber) public {
number = 12345;
}
function increment() public {
number++;
number++;
}
}
"#,
);
// Assert that only 1 file is compiled (Counter source contract) and both tests fail.
cmd.with_no_redact().assert_failure().stdout_eq(str![[r#"
...
Compiling 1 files with [..]
...
[FAIL: assertion failed: 12347 != 1] test_Increment() (gas: [..])
[FAIL: assertion failed: 12345 != 1] test_SetNumber() (gas: [..])
...
"#]]);
});
// - Counter contract instantiated in CounterMock
// - CounterMock instantiated in CounterTest
//
// ├── src
// │ └── Counter.sol
// └── test
// ├── Counter.t.sol
// └── mock
// └── CounterMock.sol
forgetest_init!(preprocess_mock_without_inheritance, |prj, cmd| {
prj.update_config(|config| {
config.dynamic_test_linking = true;
});
prj.add_source(
"Counter.sol",
r#"
contract Counter {
uint256 public number;
function setNumber(uint256 newNumber) public {
number = newNumber;
}
function increment() public {
number++;
}
}
"#,
);
prj.add_test(
"mock/CounterMock.sol",
r#"
import {Test} from "forge-std/Test.sol";
import {Counter} from "src/Counter.sol";
contract CounterMock {
Counter counter = new Counter();
function setNumber(uint256 newNumber) public {
counter.setNumber(newNumber);
}
function increment() public {
counter.increment();
}
function number() public returns (uint256) {
return counter.number();
}
}
"#,
);
prj.add_test(
"Counter.t.sol",
r#"
import {Test} from "forge-std/Test.sol";
import {CounterMock} from "./mock/CounterMock.sol";
contract CounterTest is Test {
CounterMock public counter;
function setUp() public {
counter = new CounterMock();
counter.setNumber(0);
}
function test_Increment() public {
counter.increment();
assertEq(counter.number(), 1);
}
function test_SetNumber() public {
counter.setNumber(1);
assertEq(counter.number(), 1);
}
}
"#,
);
// 20 files plus one mock file are compiled on first run.
cmd.args(["test"]).with_no_redact().assert_success().stdout_eq(str![[r#"
...
Compiling 22 files with [..]
...
"#]]);
// Change Counter contract implementation to fail both tests.
prj.add_source(
"Counter.sol",
r#"
contract Counter {
uint256 public number;
function setNumber(uint256 newNumber) public {
number = 12345;
}
function increment() public {
number++;
number++;
}
}
"#,
);
// Assert that only 1 file is compiled (Counter source contract) and both tests fail.
cmd.with_no_redact().assert_failure().stdout_eq(str![[r#"
...
Compiling 1 files with [..]
...
[FAIL: assertion failed: 12347 != 1] test_Increment() (gas: [..])
[FAIL: assertion failed: 12345 != 1] test_SetNumber() (gas: [..])
...
"#]]);
// Change CounterMock contract implementation to pass both tests.
prj.add_test(
"mock/CounterMock.sol",
r#"
import {Test} from "forge-std/Test.sol";
import {Counter} from "src/Counter.sol";
contract CounterMock {
Counter counter = new Counter();
function setNumber(uint256 newNumber) public {
}
function increment() public {
}
function number() public returns (uint256) {
return 1;
}
}
"#,
);
// Assert that mock and test files are compiled and no test fails.
cmd.with_no_redact().assert_success().stdout_eq(str![[r#"
...
Compiling 2 files with [..]
...
[PASS] test_Increment() (gas: [..])
[PASS] test_SetNumber() (gas: [..])
...
"#]]);
});
// - CounterMock contract is Counter contract
// - CounterMock instantiated in CounterTest
//
// ├── src
// │ └── Counter.sol
// └── test
// ├── Counter.t.sol
// └── mock
// └── CounterMock.sol
forgetest_init!(preprocess_mock_with_inheritance, |prj, cmd| {
prj.update_config(|config| {
config.dynamic_test_linking = true;
});
prj.add_source(
"Counter.sol",
r#"
contract Counter {
uint256 public number;
function setNumber(uint256 newNumber) public {
number = newNumber;
}
function increment() public {
number++;
}
}
"#,
);
prj.add_test(
"mock/CounterMock.sol",
r#"
import {Counter} from "src/Counter.sol";
contract CounterMock is Counter {
}
"#,
);
prj.add_test(
"Counter.t.sol",
r#"
import {Test} from "forge-std/Test.sol";
import {CounterMock} from "./mock/CounterMock.sol";
contract CounterTest is Test {
CounterMock public counter;
function setUp() public {
counter = new CounterMock();
counter.setNumber(0);
}
function test_Increment() public {
counter.increment();
assertEq(counter.number(), 1);
}
function test_SetNumber() public {
counter.setNumber(1);
assertEq(counter.number(), 1);
}
}
"#,
);
// 20 files plus one mock file are compiled on first run.
cmd.args(["test"]).with_no_redact().assert_success().stdout_eq(str![[r#"
...
Compiling 22 files with [..]
...
"#]]);
// Change Counter contract implementation to fail both tests.
prj.add_source(
"Counter.sol",
r#"
contract Counter {
uint256 public number;
function setNumber(uint256) public virtual {
number = 12345;
}
function increment() public virtual {
number++;
number++;
}
}
"#,
);
// Assert Counter source contract and CounterTest test contract (as it imports mock) are
// compiled and both tests fail.
cmd.with_no_redact().assert_failure().stdout_eq(str![[r#"
...
Compiling 3 files with [..]
...
[FAIL: assertion failed: 12347 != 1] test_Increment() (gas: [..])
[FAIL: assertion failed: 12345 != 1] test_SetNumber() (gas: [..])
...
"#]]);
// Change mock implementation to pass both tests.
prj.add_test(
"mock/CounterMock.sol",
r#"
import {Counter} from "src/Counter.sol";
contract CounterMock is Counter {
function setNumber(uint256 newNumber) public override {
number = newNumber;
}
function increment() public override {
number++;
}
}
"#,
);
// Assert that CounterMock and CounterTest files are compiled and no test fails.
cmd.with_no_redact().assert_success().stdout_eq(str![[r#"
...
Compiling 2 files with [..]
...
[PASS] test_Increment() (gas: [..])
[PASS] test_SetNumber() (gas: [..])
...
"#]]);
});
// - CounterMock contract is Counter contract
// - CounterMock instantiated in CounterTest
//
// ├── src
// │ └── Counter.sol
// └── test
// ├── Counter.t.sol
// └── mock
// └── CounterMock.sol
forgetest_init!(preprocess_mock_to_non_mock, |prj, cmd| {
prj.update_config(|config| {
config.dynamic_test_linking = true;
});
prj.add_source(
"Counter.sol",
r#"
contract Counter {
uint256 public number;
function setNumber(uint256 newNumber) public {
number = newNumber;
}
function increment() public {
number++;
}
}
"#,
);
prj.add_test(
"mock/CounterMock.sol",
r#"
import {Counter} from "src/Counter.sol";
contract CounterMock is Counter {
}
"#,
);
prj.add_test(
"Counter.t.sol",
r#"
import {Test} from "forge-std/Test.sol";
import {CounterMock} from "./mock/CounterMock.sol";
contract CounterTest is Test {
CounterMock public counter;
function setUp() public {
counter = new CounterMock();
counter.setNumber(0);
}
function test_Increment() public {
counter.increment();
assertEq(counter.number(), 1);
}
function test_SetNumber() public {
counter.setNumber(1);
assertEq(counter.number(), 1);
}
}
"#,
);
// 20 files plus one mock file are compiled on first run.
cmd.args(["test"]).with_no_redact().assert_success().stdout_eq(str![[r#"
...
Compiling 22 files with [..]
...
"#]]);
cmd.with_no_redact().assert_success().stdout_eq(str![[r#"
...
No files changed, compilation skipped
...
"#]]);
// Change mock implementation to fail tests, no inherit from Counter.
prj.add_test(
"mock/CounterMock.sol",
r#"
import {Test} from "forge-std/Test.sol";
import {Counter} from "src/Counter.sol";
contract CounterMock {
uint256 public number;
function setNumber(uint256 newNumber) public {
number = 1234;
}
function increment() public {
number = 5678;
}
}
"#,
);
// Assert that CounterMock and CounterTest files are compiled and tests fail.
cmd.with_no_redact().assert_failure().stdout_eq(str![[r#"
...
Compiling 2 files with [..]
...
[FAIL: assertion failed: 5678 != 1] test_Increment() (gas: [..])
[FAIL: assertion failed: 1234 != 1] test_SetNumber() (gas: [..])
...
"#]]);
});
// <https://github.com/foundry-rs/foundry/issues/12452>
// - CounterMock contract is Counter contract
// - CounterMock declared in CounterTest
//
// ├── src
// │ └── Counter.sol
// └── test
// ├── Counter.t.sol
forgetest_init!(preprocess_mock_declared_in_test_contract, |prj, cmd| {
prj.update_config(|config| {
config.dynamic_test_linking = true;
});
prj.add_source(
"Counter.sol",
r#"
contract Counter {
function add(uint256 x, uint256 y) public pure returns (uint256) {
return x + y;
}
}
"#,
);
prj.add_test(
"Counter.t.sol",
r#"
import {Test} from "forge-std/Test.sol";
import {Counter} from "src/Counter.sol";
contract CounterMock is Counter {}
contract CounterTest is Test {
function test_add() public {
CounterMock impl = new CounterMock();
assertEq(impl.add(2, 2), 4);
}
}
"#,
);
// 20 files plus one mock file are compiled on first run.
cmd.args(["test"]).with_no_redact().assert_success().stdout_eq(str![[r#"
...
Compiling 21 files with [..]
...
"#]]);
cmd.with_no_redact().assert_success().stdout_eq(str![[r#"
...
No files changed, compilation skipped
...
"#]]);
// Change Counter implementation to fail tests.
prj.add_source(
"Counter.sol",
r#"
contract Counter {
function add(uint256 x, uint256 y) public pure returns (uint256) {
return x + y + 1;
}
}
"#,
);
// Assert that Counter and CounterTest files are compiled and tests fail.
cmd.with_no_redact().assert_failure().stdout_eq(str![[r#"
...
Compiling 2 files with [..]
...
[FAIL: assertion failed: 5 != 4] test_add() (gas: [..])
...
"#]]);
});
// ├── src
// │ ├── CounterA.sol
// │ ├── CounterB.sol
// │ ├── Counter.sol
// │ └── v1
// │ └── Counter.sol
// └── test
// └── Counter.t.sol
forgetest_init!(preprocess_multiple_contracts_with_constructors, |prj, cmd| {
prj.update_config(|config| {
config.dynamic_test_linking = true;
});
prj.add_source(
"Counter.sol",
r#"
contract Counter {
uint256 public number;
function setNumber(uint256 newNumber) public {
number = newNumber;
}
function increment() public {
number++;
}
}
"#,
);
prj.add_source(
"CounterA.sol",
r#"
contract CounterA {
uint256 public number;
address public owner;
constructor(uint256 _newNumber, address _owner) {
number = _newNumber;
owner = _owner;
}
function increment() public {
number++;
}
}
"#,
);
// Contract with constructor args without name.
prj.add_source(
"CounterB.sol",
r#"
contract CounterB {
uint256 public number;
constructor(uint256) {
number = 1;
}
function increment() public {
number++;
}
}
"#,
);
prj.add_source(
"v1/Counter.sol",
r#"
contract Counter {
uint256 public number;
constructor(uint256 _number) {
number = _number;
}
function increment() public {
number++;
}
}
"#,
);
prj.add_test(
"Counter.t.sol",
r#"
import {Test} from "forge-std/Test.sol";
import {Counter} from "src/Counter.sol";
import "src/CounterA.sol";
import "src/CounterB.sol";
import {Counter as CounterV1} from "src/v1/Counter.sol";
contract CounterTest is Test {
function test_Increment_In_Counter() public {
Counter counter = new Counter();
counter.increment();
assertEq(counter.number(), 1);
}
function test_Increment_In_Counter_V1() public {
CounterV1 counter = new CounterV1(1234);
counter.increment();
assertEq(counter.number(), 1235);
}
function test_Increment_In_Counter_A() public {
CounterA counter = new CounterA(1234, address(this));
counter.increment();
assertEq(counter.number(), 1235);
}
function test_Increment_In_Counter_A_with_named_args() public {
CounterA counter = new CounterA({_newNumber: 1234, _owner: address(this)});
counter.increment();
assertEq(counter.number(), 1235);
}
function test_Increment_In_Counter_B() public {
CounterB counter = new CounterB(1234);
counter.increment();
assertEq(counter.number(), 2);
}
}
"#,
);
// 22 files plus one mock file are compiled on first run.
cmd.args(["test"]).with_no_redact().assert_success().stdout_eq(str![[r#"
...
Compiling 24 files with [..]
...
[PASS] test_Increment_In_Counter() (gas: [..])
[PASS] test_Increment_In_Counter_A() (gas: [..])
[PASS] test_Increment_In_Counter_A_with_named_args() (gas: [..])
[PASS] test_Increment_In_Counter_B() (gas: [..])
[PASS] test_Increment_In_Counter_V1() (gas: [..])
...
"#]]);
// Change v1/Counter to fail test.
prj.add_source(
"v1/Counter.sol",
r#"
contract Counter {
uint256 public number;
constructor(uint256 _number) {
number = _number;
}
function increment() public {
number = 12345;
}
}
"#,
);
// Only v1/Counter should be compiled and test should fail.
cmd.with_no_redact().assert_failure().stdout_eq(str![[r#"
...
Compiling 1 files with [..]
...
[PASS] test_Increment_In_Counter() (gas: [..])
[PASS] test_Increment_In_Counter_A() (gas: [..])
[PASS] test_Increment_In_Counter_A_with_named_args() (gas: [..])
[PASS] test_Increment_In_Counter_B() (gas: [..])
[FAIL: assertion failed: 12345 != 1235] test_Increment_In_Counter_V1() (gas: [..])
...
"#]]);
// Change CounterA to fail test.
prj.add_source(
"CounterA.sol",
r#"
contract CounterA {
uint256 public number;
address public owner;
constructor(uint256 _newNumber, address _owner) {
number = _newNumber;
owner = _owner;
}
function increment() public {
number = 12345;
}
}
"#,
);
// Only CounterA should be compiled and test should fail.
cmd.with_no_redact().assert_failure().stdout_eq(str![[r#"
...
Compiling 1 files with [..]
...
[PASS] test_Increment_In_Counter() (gas: [..])
[FAIL: assertion failed: 12345 != 1235] test_Increment_In_Counter_A() (gas: [..])
[FAIL: assertion failed: 12345 != 1235] test_Increment_In_Counter_A_with_named_args() (gas: [..])
[PASS] test_Increment_In_Counter_B() (gas: [..])
[FAIL: assertion failed: 12345 != 1235] test_Increment_In_Counter_V1() (gas: [..])
...
"#]]);
// Change CounterB to fail test.
prj.add_source(
"CounterB.sol",
r#"
contract CounterB {
uint256 public number;
constructor(uint256) {
number = 100;
}
function increment() public {
number++;
}
}
"#,
);
// Only CounterB should be compiled and test should fail.
cmd.with_no_redact().assert_failure().stdout_eq(str![[r#"
...
Compiling 1 files with [..]
...
[PASS] test_Increment_In_Counter() (gas: [..])
[FAIL: assertion failed: 12345 != 1235] test_Increment_In_Counter_A() (gas: [..])
[FAIL: assertion failed: 12345 != 1235] test_Increment_In_Counter_A_with_named_args() (gas: [..])
[FAIL: assertion failed: 101 != 2] test_Increment_In_Counter_B() (gas: [..])
[FAIL: assertion failed: 12345 != 1235] test_Increment_In_Counter_V1() (gas: [..])
...
"#]]);
// Change Counter to fail test.
prj.add_source(
"Counter.sol",
r#"
contract Counter {
uint256 public number;
function setNumber(uint256 newNumber) public {
number = newNumber;
}
function increment() public {
number = 12345;
}
}
"#,
);
// Only Counter should be compiled and test should fail.
cmd.with_no_redact().assert_failure().stdout_eq(str![[r#"
...
Compiling 1 files with [..]
...
[FAIL: assertion failed: 12345 != 1] test_Increment_In_Counter() (gas: [..])
[FAIL: assertion failed: 12345 != 1235] test_Increment_In_Counter_A() (gas: [..])
[FAIL: assertion failed: 12345 != 1235] test_Increment_In_Counter_A_with_named_args() (gas: [..])
[FAIL: assertion failed: 101 != 2] test_Increment_In_Counter_B() (gas: [..])
[FAIL: assertion failed: 12345 != 1235] test_Increment_In_Counter_V1() (gas: [..])
...
"#]]);
});
// Test preprocessing contracts with payable constructor, value and salt named args.
forgetest_init!(preprocess_contracts_with_payable_constructor_and_salt, |prj, cmd| {
prj.update_config(|config| {
config.dynamic_test_linking = true;
});
prj.add_source(
"Counter.sol",
r#"
contract Counter {
uint256 public number;
constructor(uint256 _number) payable {
number = msg.value;
}
function setNumber(uint256 newNumber) public {
number = newNumber;
}
function increment() public {
number++;
}
}
"#,
);
prj.add_source(
"CounterWithSalt.sol",
r#"
contract CounterWithSalt {
uint256 public number;
constructor(uint256 _number) payable {
number = msg.value;
}
function setNumber(uint256 newNumber) public {
number = newNumber;
}
function increment() public {
number++;
}
}
"#,
);
prj.add_test(
"Counter.t.sol",
r#"
import {Test} from "forge-std/Test.sol";
import {Counter} from "src/Counter.sol";
import {CounterWithSalt} from "src/CounterWithSalt.sol";
contract CounterTest is Test {
function test_Increment_In_Counter() public {
Counter counter = Counter(address(new Counter{value: 111}(1)));
counter.increment();
assertEq(counter.number(), 112);
}
function test_Increment_In_Counter_With_Salt() public {
CounterWithSalt counter = new CounterWithSalt{value: 111, salt: bytes32("preprocess_counter_with_salt")}(1);
assertEq(address(counter), 0x223e63BE3BF01DD04f852d70f1bE217017055f49);
}
}
"#,
);
cmd.args(["test"]).with_no_redact().assert_success().stdout_eq(str![[r#"
...
Compiling 22 files with [..]
...
[PASS] test_Increment_In_Counter() (gas: [..])
[PASS] test_Increment_In_Counter_With_Salt() (gas: [..])
...
"#]]);
// Change contract to fail test.
prj.add_source(
"Counter.sol",
r#"
contract Counter {
uint256 public number;
constructor(uint256 _number) payable {
number = msg.value + _number;
}
function setNumber(uint256 newNumber) public {
number = newNumber;
}
function increment() public {
number++;
}
}
"#,
);
// Only Counter should be compiled and test should fail.
cmd.with_no_redact().assert_failure().stdout_eq(str![[r#"
...
Compiling 1 files with [..]
...
[FAIL: assertion failed: 113 != 112] test_Increment_In_Counter() (gas: [..])
[PASS] test_Increment_In_Counter_With_Salt() (gas: [..])
...
"#]]);
// Change contract with salt to fail test too.
prj.add_source(
"CounterWithSalt.sol",
r#"
contract CounterWithSalt {
uint256 public number;
constructor(uint256 _number) payable {
number = msg.value + _number;
}
function setNumber(uint256 newNumber) public {
number = newNumber;
}
function increment() public {
number++;
}
}
"#,
);
// Only Counter should be compiled and test should fail.
cmd.with_no_redact().assert_failure().stdout_eq(str![[r#"
...
Compiling 1 files with [..]
...
[FAIL: assertion failed: 113 != 112] test_Increment_In_Counter() (gas: [..])
[FAIL: assertion failed: 0x11acEfcD29A1BA964A05C0E7F3901054BEfb17c0 != 0x223e63BE3BF01DD04f852d70f1bE217017055f49] test_Increment_In_Counter_With_Salt() (gas: [..])
...
"#]]);
});
// Counter contract with constructor reverts and emitted events.
forgetest_init!(preprocess_contract_with_require_and_emit, |prj, cmd| {
prj.update_config(|config| {
config.dynamic_test_linking = true;
});
prj.add_source(
"Counter.sol",
r#"
contract Counter {
event CounterCreated(uint256 number);
uint256 public number;
constructor(uint256 no) {
require(no != 1, "ctor revert");
emit CounterCreated(10);
}
}
"#,
);
prj.add_test(
"Counter.t.sol",
r#"
import {Test} from "forge-std/Test.sol";
import {Counter} from "../src/Counter.sol";
contract CounterTest is Test {
function test_assert_constructor_revert() public {
vm.expectRevert("ctor revert");
new Counter(1);
}
function test_assert_constructor_emit() public {
vm.expectEmit(true, true, true, true);
emit Counter.CounterCreated(10);
new Counter(11);
}
}
"#,
);
// All 20 files are compiled on first run.
cmd.args(["test"]).with_no_redact().assert_success().stdout_eq(str![[r#"
...
Compiling 21 files with [..]
...
"#]]);
// Change Counter implementation to revert with different message.
prj.add_source(
"Counter.sol",
r#"
contract Counter {
event CounterCreated(uint256 number);
uint256 public number;
constructor(uint256 no) {
require(no != 1, "ctor revert update");
emit CounterCreated(10);
}
}
"#,
);
// Assert that only 1 file is compiled (Counter source contract) and revert test fails.
cmd.with_no_redact().assert_failure().stdout_eq(str![[r#"
...
Compiling 1 files with [..]
...
[PASS] test_assert_constructor_emit() (gas: [..])
[FAIL: Error != expected error: ctor revert update != ctor revert] test_assert_constructor_revert() (gas: [..])
...
"#]]);
// Change Counter implementation and don't revert.
prj.add_source(
"Counter.sol",
r#"
contract Counter {
event CounterCreated(uint256 number);
uint256 public number;
constructor(uint256 no) {
require(no != 0, "ctor revert");
emit CounterCreated(10);
}
}
"#,
);
// Assert that only 1 file is compiled (Counter source contract) and revert test fails.
cmd.with_no_redact().assert_failure().stdout_eq(str![[r#"
...
Compiling 1 files with [..]
...
[PASS] test_assert_constructor_emit() (gas: [..])
[FAIL: next call did not revert as expected] test_assert_constructor_revert() (gas: [..])
...
"#]]);
// Change Counter implementation to emit different event.
prj.add_source(
"Counter.sol",
r#"
contract Counter {
event CounterCreated(uint256 number);
uint256 public number;
constructor(uint256 no) {
require(no != 0, "ctor revert");
emit CounterCreated(100);
}
}
"#,
);
// Assert that only 1 file is compiled (Counter source contract) and emit test fails.
cmd.with_no_redact().assert_failure().stdout_eq(str![[r#"
...
Compiling 1 files with [..]
...
[FAIL: expected an emit, but no logs were emitted afterwards. you might have mismatched events or not enough events were emitted] test_assert_constructor_emit() (gas: [..])
[FAIL: next call did not revert as expected] test_assert_constructor_revert() (gas: [..])
...
"#]]);
});
// <https://github.com/foundry-rs/foundry/issues/10312>
forgetest_init!(preprocess_contract_with_constructor_args_struct, |prj, cmd| {
prj.update_config(|config| {
config.dynamic_test_linking = true;
});
prj.add_source(
"Counter.sol",
r#"
contract Counter {
struct ConstructorArgs {
uint256 _number;
}
constructor(uint256 no) {
}
}
"#,
);
prj.add_test(
"Counter.t.sol",
r#"
import {Test} from "forge-std/Test.sol";
import {Counter} from "../src/Counter.sol";
contract CounterTest is Test {
function test_assert_constructor_revert() public {
Counter counter = new Counter(1);
}
}
"#,
);
// All 20 files should properly compile.
cmd.args(["test"]).with_no_redact().assert_success().stdout_eq(str![[r#"
...
Compiling 21 files with [..]
...
"#]]);
});
// Test preprocessed contracts with decode internal fns.
#[cfg(not(feature = "isolate-by-default"))]
forgetest_init!(preprocess_contract_with_decode_internal, |prj, cmd| {
prj.initialize_default_contracts();
prj.update_config(|config| {
config.dynamic_test_linking = true;
});
prj.add_test(
"Counter.t.sol",
r#"
import {Test} from "forge-std/Test.sol";
import {Counter} from "../src/Counter.sol";
contract CounterTest is Test {
Counter public counter;
function setUp() public {
create_counter(0);
}
function test_Increment() public {
create_counter(0);
counter.increment();
assertEq(counter.number(), 1);
}
function create_counter(uint256 number) internal {
counter = new Counter();
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | true |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/forge/tests/cli/script.rs | crates/forge/tests/cli/script.rs | //! Contains various tests related to `forge script`.
use crate::constants::TEMPLATE_CONTRACT;
use alloy_hardforks::EthereumHardfork;
use alloy_primitives::{Address, Bytes, address, hex};
use anvil::{NodeConfig, spawn};
use forge_script_sequence::ScriptSequence;
use foundry_test_utils::{
ScriptOutcome, ScriptTester,
rpc::{self, next_http_archive_rpc_url},
snapbox::IntoData,
util::{OTHER_SOLC_VERSION, SOLC_VERSION},
};
use regex::Regex;
use serde_json::Value;
use std::{env, fs, path::PathBuf};
// Tests that fork cheat codes can be used in script
forgetest_init!(
#[ignore]
can_use_fork_cheat_codes_in_script,
|prj, cmd| {
let script = prj.add_source(
"Foo",
r#"
import "forge-std/Script.sol";
contract ContractScript is Script {
function setUp() public {}
function run() public {
uint256 fork = vm.activeFork();
vm.rollFork(11469702);
}
}
"#,
);
let rpc = foundry_test_utils::rpc::next_http_rpc_endpoint();
cmd.arg("script").arg(script).args(["--fork-url", rpc.as_str(), "-vvvvv"]).assert_success();
}
);
// Tests that the `run` command works correctly
forgetest!(can_execute_script_command2, |prj, cmd| {
let script = prj.add_source(
"Foo",
r#"
contract Demo {
event log_string(string);
function run() external {
emit log_string("script ran");
}
}
"#,
);
cmd.arg("script").arg(script).assert_success().stdout_eq(str![[r#"
[COMPILING_FILES] with [SOLC_VERSION]
[SOLC_VERSION] [ELAPSED]
Compiler run successful!
Script ran successfully.
[GAS]
== Logs ==
script ran
"#]]);
});
// Tests that the `run` command works correctly when path *and* script name is specified
forgetest!(can_execute_script_command_fqn, |prj, cmd| {
let script = prj.add_source(
"Foo",
r#"
contract Demo {
event log_string(string);
function run() external {
emit log_string("script ran");
}
}
"#,
);
cmd.arg("script").arg(format!("{}:Demo", script.display())).assert_success().stdout_eq(str![[
r#"
...
Script ran successfully.
[GAS]
== Logs ==
script ran
...
"#
]]);
});
// Tests that the run command can run arbitrary functions
forgetest!(can_execute_script_command_with_sig, |prj, cmd| {
let script = prj.add_source(
"Foo",
r#"
contract Demo {
event log_string(string);
function myFunction() external {
emit log_string("script ran");
}
}
"#,
);
cmd.arg("script").arg(script).arg("--sig").arg("myFunction()").assert_success().stdout_eq(
str![[r#"
[COMPILING_FILES] with [SOLC_VERSION]
[SOLC_VERSION] [ELAPSED]
Compiler run successful!
Script ran successfully.
[GAS]
== Logs ==
script ran
"#]],
);
});
static FAILING_SCRIPT: &str = r#"
import "forge-std/Script.sol";
contract FailingScript is Script {
function run() external {
revert("failed");
}
}
"#;
// Tests that execution throws upon encountering a revert in the script.
forgetest_async!(assert_exit_code_error_on_failure_script, |prj, cmd| {
foundry_test_utils::util::initialize(prj.root());
let script = prj.add_source("FailingScript", FAILING_SCRIPT);
// set up command
cmd.arg("script").arg(script);
// run command and assert error exit code
cmd.assert_failure().stderr_eq(str![[r#"
Error: script failed: failed
"#]]);
});
// Tests that execution throws upon encountering a revert in the script with --json option.
// <https://github.com/foundry-rs/foundry/issues/2508>
forgetest_async!(assert_exit_code_error_on_failure_script_with_json, |prj, cmd| {
foundry_test_utils::util::initialize(prj.root());
let script = prj.add_source("FailingScript", FAILING_SCRIPT);
// set up command
cmd.arg("script").arg(script).arg("--json");
// run command and assert error exit code
cmd.assert_failure().stderr_eq(str![[r#"
Error: script failed: failed
"#]]);
});
// Tests that the manually specified gas limit is used when using the --unlocked option
forgetest_async!(can_execute_script_command_with_manual_gas_limit_unlocked, |prj, cmd| {
foundry_test_utils::util::initialize(prj.root());
let deploy_script = prj.add_source(
"Foo",
r#"
import "forge-std/Script.sol";
contract GasWaster {
function wasteGas(uint256 minGas) public {
require(gasleft() >= minGas, "Gas left needs to be higher");
}
}
contract DeployScript is Script {
function run() external {
vm.startBroadcast();
GasWaster gasWaster = new GasWaster();
gasWaster.wasteGas{gas: 500000}(200000);
}
}
"#,
);
let deploy_contract = deploy_script.display().to_string() + ":DeployScript";
let node_config = NodeConfig::test().with_eth_rpc_url(Some(rpc::next_http_archive_rpc_url()));
let (_api, handle) = spawn(node_config).await;
let dev = handle.dev_accounts().next().unwrap();
cmd.set_current_dir(prj.root());
cmd.args([
"script",
&deploy_contract,
"--root",
prj.root().to_str().unwrap(),
"--fork-url",
&handle.http_endpoint(),
"--sender",
format!("{dev:?}").as_str(),
"-vvvvv",
"--slow",
"--broadcast",
"--unlocked",
"--ignored-error-codes=2018", // `wasteGas` can be restricted to view
])
.assert_success()
.stdout_eq(str![[r#"
[COMPILING_FILES] with [SOLC_VERSION]
[SOLC_VERSION] [ELAPSED]
Compiler run successful!
Traces:
[..] DeployScript::run()
├─ [0] VM::startBroadcast()
│ └─ ← [Return]
├─ [..] → new GasWaster@[..]
│ └─ ← [Return] 415 bytes of code
├─ [..] GasWaster::wasteGas(200000 [2e5])
│ └─ ← [Stop]
└─ ← [Stop]
Script ran successfully.
## Setting up 1 EVM.
==========================
Simulated On-chain Traces:
[..] → new GasWaster@[..]
└─ ← [Return] 415 bytes of code
[..] GasWaster::wasteGas(200000 [2e5])
└─ ← [Stop]
==========================
Chain 1
[ESTIMATED_GAS_PRICE]
[ESTIMATED_TOTAL_GAS_USED]
[ESTIMATED_AMOUNT_REQUIRED]
==========================
==========================
ONCHAIN EXECUTION COMPLETE & SUCCESSFUL.
[SAVED_TRANSACTIONS]
[SAVED_SENSITIVE_VALUES]
"#]]);
});
// Tests that the manually specified gas limit is used.
forgetest_async!(can_execute_script_command_with_manual_gas_limit, |prj, cmd| {
foundry_test_utils::util::initialize(prj.root());
let deploy_script = prj.add_source(
"Foo",
r#"
import "forge-std/Script.sol";
contract GasWaster {
function wasteGas(uint256 minGas) public {
require(gasleft() >= minGas, "Gas left needs to be higher");
}
}
contract DeployScript is Script {
function run() external {
vm.startBroadcast();
GasWaster gasWaster = new GasWaster();
gasWaster.wasteGas{gas: 500000}(200000);
}
}
"#,
);
let deploy_contract = deploy_script.display().to_string() + ":DeployScript";
let node_config = NodeConfig::test().with_eth_rpc_url(Some(rpc::next_http_archive_rpc_url()));
let (_api, handle) = spawn(node_config).await;
let private_key =
"ac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80".to_string();
cmd.set_current_dir(prj.root());
cmd.args([
"script",
&deploy_contract,
"--root",
prj.root().to_str().unwrap(),
"--fork-url",
&handle.http_endpoint(),
"-vvvvv",
"--slow",
"--broadcast",
"--private-key",
&private_key,
])
.assert_success()
.stdout_eq(str![[r#"
[COMPILING_FILES] with [SOLC_VERSION]
[SOLC_VERSION] [ELAPSED]
Compiler run successful with warnings:
Warning (2018): Function state mutability can be restricted to view
[FILE]:7:5:
|
7 | function wasteGas(uint256 minGas) public {
| ^ (Relevant source part starts here and spans across multiple lines).
Traces:
[..] DeployScript::run()
├─ [0] VM::startBroadcast()
│ └─ ← [Return]
├─ [..] → new GasWaster@[..]
│ └─ ← [Return] 415 bytes of code
├─ [..] GasWaster::wasteGas(200000 [2e5])
│ └─ ← [Stop]
└─ ← [Stop]
Script ran successfully.
## Setting up 1 EVM.
==========================
Simulated On-chain Traces:
[..] → new GasWaster@[..]
└─ ← [Return] 415 bytes of code
[..] GasWaster::wasteGas(200000 [2e5])
└─ ← [Stop]
==========================
Chain 1
[ESTIMATED_GAS_PRICE]
[ESTIMATED_TOTAL_GAS_USED]
[ESTIMATED_AMOUNT_REQUIRED]
==========================
==========================
ONCHAIN EXECUTION COMPLETE & SUCCESSFUL.
[SAVED_TRANSACTIONS]
[SAVED_SENSITIVE_VALUES]
"#]]);
});
// Tests that the run command can run functions with arguments
forgetest!(can_execute_script_command_with_args, |prj, cmd| {
let script = prj.add_source(
"Foo",
r#"
contract Demo {
event log_string(string);
event log_uint(uint);
function run(uint256 a, uint256 b) external {
emit log_string("script ran");
emit log_uint(a);
emit log_uint(b);
}
}
"#,
);
cmd.arg("script")
.arg(script)
.arg("--sig")
.arg("run(uint256,uint256)")
.arg("1")
.arg("2")
.assert_success()
.stdout_eq(str![[r#"
[COMPILING_FILES] with [SOLC_VERSION]
[SOLC_VERSION] [ELAPSED]
Compiler run successful!
Script ran successfully.
[GAS]
== Logs ==
script ran
1
2
"#]]);
});
// Tests that the run command can run functions with arguments without specifying the signature
// <https://github.com/foundry-rs/foundry/issues/11240>
forgetest!(can_execute_script_command_with_args_no_sig, |prj, cmd| {
let script = prj.add_source(
"Foo",
r#"
contract Demo {
event log_string(string);
event log_uint(uint);
function run(uint256 a, uint256 b) external {
emit log_string("script ran");
emit log_uint(a);
emit log_uint(b);
}
}
"#,
);
cmd.arg("script").arg(script).arg("1").arg("2").assert_success().stdout_eq(str![[r#"
[COMPILING_FILES] with [SOLC_VERSION]
[SOLC_VERSION] [ELAPSED]
Compiler run successful!
Script ran successfully.
[GAS]
== Logs ==
script ran
1
2
"#]]);
});
// Tests that the run command can run functions with return values
forgetest!(can_execute_script_command_with_returned, |prj, cmd| {
let script = prj.add_source(
"Foo",
r#"
contract Demo {
event log_string(string);
function run() external returns (uint256 result, uint8) {
emit log_string("script ran");
return (255, 3);
}
}"#,
);
cmd.arg("script").arg(script).assert_success().stdout_eq(str![[r#"
[COMPILING_FILES] with [SOLC_VERSION]
[SOLC_VERSION] [ELAPSED]
Compiler run successful!
Script ran successfully.
[GAS]
== Return ==
result: uint256 255
1: uint8 3
== Logs ==
script ran
"#]]);
});
forgetest_async!(can_broadcast_script_skipping_simulation, |prj, cmd| {
foundry_test_utils::util::initialize(prj.root());
// This example script would fail in on-chain simulation
let deploy_script = prj.add_source(
"DeployScript",
r#"
import "forge-std/Script.sol";
contract HashChecker {
bytes32 public lastHash;
function update() public {
bytes32 newHash = blockhash(block.number - 1);
require(newHash != lastHash, "Hash didn't change");
lastHash = newHash;
}
function checkLastHash() public view {
require(lastHash != bytes32(0), "Hash shouldn't be zero");
}
}
contract DeployScript is Script {
HashChecker public hashChecker;
function run() external {
vm.startBroadcast();
hashChecker = new HashChecker();
}
}"#,
);
let deploy_contract = deploy_script.display().to_string() + ":DeployScript";
let node_config = NodeConfig::test().with_eth_rpc_url(Some(rpc::next_http_archive_rpc_url()));
let (_api, handle) = spawn(node_config).await;
let private_key =
"ac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80".to_string();
cmd.set_current_dir(prj.root());
cmd.args([
"script",
&deploy_contract,
"--root",
prj.root().to_str().unwrap(),
"--fork-url",
&handle.http_endpoint(),
"-vvvvv",
"--broadcast",
"--slow",
"--skip-simulation",
"--private-key",
&private_key,
])
.assert_success()
.stdout_eq(str![[r#"
[COMPILING_FILES] with [SOLC_VERSION]
[SOLC_VERSION] [ELAPSED]
Compiler run successful!
Traces:
[..] DeployScript::run()
├─ [0] VM::startBroadcast()
│ └─ ← [Return]
├─ [..] → new HashChecker@[..]
│ └─ ← [Return] 718 bytes of code
└─ ← [Stop]
Script ran successfully.
SKIPPING ON CHAIN SIMULATION.
==========================
ONCHAIN EXECUTION COMPLETE & SUCCESSFUL.
[SAVED_TRANSACTIONS]
[SAVED_SENSITIVE_VALUES]
"#]]);
let run_log = std::fs::read_to_string("broadcast/DeployScript.sol/1/run-latest.json").unwrap();
let run_object: Value = serde_json::from_str(&run_log).unwrap();
let contract_address = &run_object["receipts"][0]["contractAddress"]
.as_str()
.unwrap()
.parse::<Address>()
.unwrap()
.to_string();
let run_code = r#"
import "forge-std/Script.sol";
import { HashChecker } from "./DeployScript.sol";
contract RunScript is Script {
HashChecker public hashChecker;
function run() external {
vm.startBroadcast();
hashChecker = HashChecker(CONTRACT_ADDRESS);
uint numUpdates = 8;
vm.roll(block.number - numUpdates);
for(uint i = 0; i < numUpdates; i++) {
vm.roll(block.number + 1);
hashChecker.update();
hashChecker.checkLastHash();
}
}
}"#
.replace("CONTRACT_ADDRESS", contract_address);
let run_script = prj.add_source("RunScript", &run_code);
let run_contract = run_script.display().to_string() + ":RunScript";
cmd.forge_fuse()
.args([
"script",
&run_contract,
"--root",
prj.root().to_str().unwrap(),
"--fork-url",
&handle.http_endpoint(),
"-vvvvv",
"--broadcast",
"--slow",
"--skip-simulation",
"--gas-estimate-multiplier",
"200",
"--private-key",
&private_key,
])
.assert_success()
.stdout_eq(str![[r#"
[COMPILING_FILES] with [SOLC_VERSION]
[SOLC_VERSION] [ELAPSED]
Compiler run successful!
Traces:
[..] RunScript::run()
├─ [0] VM::startBroadcast()
│ └─ ← [Return]
├─ [0] VM::roll([..])
│ └─ ← [Return]
├─ [0] VM::roll([..])
│ └─ ← [Return]
├─ [..] [..]::update()
│ └─ ← [Stop]
├─ [..] [..]::checkLastHash() [staticcall]
│ └─ ← [Stop]
├─ [0] VM::roll([..])
│ └─ ← [Return]
├─ [..] [..]::update()
│ └─ ← [Stop]
├─ [..] [..]::checkLastHash() [staticcall]
│ └─ ← [Stop]
├─ [0] VM::roll([..])
│ └─ ← [Return]
├─ [..] [..]::update()
│ └─ ← [Stop]
├─ [..] [..]::checkLastHash() [staticcall]
│ └─ ← [Stop]
├─ [0] VM::roll([..])
│ └─ ← [Return]
├─ [..] [..]::update()
│ └─ ← [Stop]
├─ [..] [..]::checkLastHash() [staticcall]
│ └─ ← [Stop]
├─ [0] VM::roll([..])
│ └─ ← [Return]
├─ [..] [..]::update()
│ └─ ← [Stop]
├─ [..] [..]::checkLastHash() [staticcall]
│ └─ ← [Stop]
├─ [0] VM::roll([..])
│ └─ ← [Return]
├─ [..] [..]::update()
│ └─ ← [Stop]
├─ [..] [..]::checkLastHash() [staticcall]
│ └─ ← [Stop]
├─ [0] VM::roll([..])
│ └─ ← [Return]
├─ [..] [..]::update()
│ └─ ← [Stop]
├─ [..] [..]::checkLastHash() [staticcall]
│ └─ ← [Stop]
├─ [0] VM::roll([..])
│ └─ ← [Return]
├─ [..] [..]::update()
│ └─ ← [Stop]
├─ [..] [..]::checkLastHash() [staticcall]
│ └─ ← [Stop]
└─ ← [Stop]
Script ran successfully.
SKIPPING ON CHAIN SIMULATION.
==========================
ONCHAIN EXECUTION COMPLETE & SUCCESSFUL.
[SAVED_TRANSACTIONS]
[SAVED_SENSITIVE_VALUES]
"#]]);
});
forgetest_async!(can_deploy_script_without_lib, |prj, cmd| {
let (_api, handle) = spawn(NodeConfig::test()).await;
let mut tester = ScriptTester::new_broadcast(cmd, &handle.http_endpoint(), prj.root());
tester
.load_private_keys(&[0, 1])
.await
.add_sig("BroadcastTestNoLinking", "deployDoesntPanic()")
.simulate(ScriptOutcome::OkSimulation)
.broadcast(ScriptOutcome::OkBroadcast)
.assert_nonce_increment(&[(0, 1), (1, 2)])
.await;
});
forgetest_async!(can_deploy_script_with_lib, |prj, cmd| {
let (_api, handle) = spawn(NodeConfig::test()).await;
let mut tester = ScriptTester::new_broadcast(cmd, &handle.http_endpoint(), prj.root());
tester
.load_private_keys(&[0, 1])
.await
.add_sig("BroadcastTest", "deploy()")
.simulate(ScriptOutcome::OkSimulation)
.broadcast(ScriptOutcome::OkBroadcast)
.assert_nonce_increment(&[(0, 2), (1, 1)])
.await;
});
forgetest_async!(can_deploy_script_private_key, |prj, cmd| {
let (_api, handle) = spawn(NodeConfig::test()).await;
let mut tester = ScriptTester::new_broadcast(cmd, &handle.http_endpoint(), prj.root());
tester
.load_addresses(&[address!("0x90F79bf6EB2c4f870365E785982E1f101E93b906")])
.await
.add_sig("BroadcastTest", "deployPrivateKey()")
.simulate(ScriptOutcome::OkSimulation)
.broadcast(ScriptOutcome::OkBroadcast)
.assert_nonce_increment_addresses(&[(
address!("0x90F79bf6EB2c4f870365E785982E1f101E93b906"),
3,
)])
.await;
});
forgetest_async!(can_deploy_unlocked, |prj, cmd| {
let (_api, handle) = spawn(NodeConfig::test()).await;
let mut tester = ScriptTester::new_broadcast(cmd, &handle.http_endpoint(), prj.root());
tester
.sender("0xf39fd6e51aad88f6f4ce6ab8827279cfffb92266".parse().unwrap())
.unlocked()
.add_sig("BroadcastTest", "deployOther()")
.simulate(ScriptOutcome::OkSimulation)
.broadcast(ScriptOutcome::OkBroadcast);
});
forgetest_async!(can_deploy_script_remember_key, |prj, cmd| {
let (_api, handle) = spawn(NodeConfig::test()).await;
let mut tester = ScriptTester::new_broadcast(cmd, &handle.http_endpoint(), prj.root());
tester
.load_addresses(&[address!("0x90F79bf6EB2c4f870365E785982E1f101E93b906")])
.await
.add_sig("BroadcastTest", "deployRememberKey()")
.simulate(ScriptOutcome::OkSimulation)
.broadcast(ScriptOutcome::OkBroadcast)
.assert_nonce_increment_addresses(&[(
address!("0x90F79bf6EB2c4f870365E785982E1f101E93b906"),
2,
)])
.await;
});
forgetest_async!(can_deploy_script_remember_key_and_resume, |prj, cmd| {
let (_api, handle) = spawn(NodeConfig::test()).await;
let mut tester = ScriptTester::new_broadcast(cmd, &handle.http_endpoint(), prj.root());
tester
.add_deployer(0)
.load_addresses(&[address!("0x90F79bf6EB2c4f870365E785982E1f101E93b906")])
.await
.add_sig("BroadcastTest", "deployRememberKeyResume()")
.simulate(ScriptOutcome::OkSimulation)
.resume(ScriptOutcome::MissingWallet)
// load missing wallet
.load_private_keys(&[0])
.await
.run(ScriptOutcome::OkBroadcast)
.assert_nonce_increment_addresses(&[(
address!("0x90F79bf6EB2c4f870365E785982E1f101E93b906"),
1,
)])
.await
.assert_nonce_increment(&[(0, 2)])
.await;
});
forgetest_async!(can_resume_script, |prj, cmd| {
let (_api, handle) = spawn(NodeConfig::test()).await;
let mut tester = ScriptTester::new_broadcast(cmd, &handle.http_endpoint(), prj.root());
tester
.load_private_keys(&[0])
.await
.add_sig("BroadcastTest", "deploy()")
.simulate(ScriptOutcome::OkSimulation)
.resume(ScriptOutcome::MissingWallet)
// load missing wallet
.load_private_keys(&[1])
.await
.run(ScriptOutcome::OkBroadcast)
.assert_nonce_increment(&[(0, 2), (1, 1)])
.await;
});
forgetest_async!(can_deploy_broadcast_wrap, |prj, cmd| {
let (_api, handle) = spawn(NodeConfig::test()).await;
let mut tester = ScriptTester::new_broadcast(cmd, &handle.http_endpoint(), prj.root());
tester
.add_deployer(2)
.load_private_keys(&[0, 1, 2])
.await
.add_sig("BroadcastTest", "deployOther()")
.simulate(ScriptOutcome::OkSimulation)
.broadcast(ScriptOutcome::OkBroadcast)
.assert_nonce_increment(&[(0, 4), (1, 4), (2, 1)])
.await;
});
forgetest_async!(panic_no_deployer_set, |prj, cmd| {
let (_api, handle) = spawn(NodeConfig::test()).await;
let mut tester = ScriptTester::new_broadcast(cmd, &handle.http_endpoint(), prj.root());
tester
.load_private_keys(&[0, 1])
.await
.add_sig("BroadcastTest", "deployOther()")
.simulate(ScriptOutcome::WarnSpecifyDeployer)
.broadcast(ScriptOutcome::MissingSender);
});
forgetest_async!(can_deploy_no_arg_broadcast, |prj, cmd| {
let (_api, handle) = spawn(NodeConfig::test()).await;
let mut tester = ScriptTester::new_broadcast(cmd, &handle.http_endpoint(), prj.root());
tester
.add_deployer(0)
.load_private_keys(&[0])
.await
.add_sig("BroadcastTest", "deployNoArgs()")
.simulate(ScriptOutcome::OkSimulation)
.broadcast(ScriptOutcome::OkBroadcast)
.assert_nonce_increment(&[(0, 3)])
.await;
});
forgetest_async!(can_deploy_with_create2, |prj, cmd| {
let (api, handle) = spawn(NodeConfig::test()).await;
let mut tester = ScriptTester::new_broadcast(cmd, &handle.http_endpoint(), prj.root());
// Prepare CREATE2 Deployer
api.anvil_set_code(
foundry_evm::constants::DEFAULT_CREATE2_DEPLOYER,
Bytes::from_static(foundry_evm::constants::DEFAULT_CREATE2_DEPLOYER_RUNTIME_CODE),
)
.await
.unwrap();
tester
.add_deployer(0)
.load_private_keys(&[0])
.await
.add_sig("BroadcastTestNoLinking", "deployCreate2()")
.simulate(ScriptOutcome::OkSimulation)
.broadcast(ScriptOutcome::OkBroadcast)
.assert_nonce_increment(&[(0, 2)])
.await
// Running again results in error, since we're repeating the salt passed to CREATE2
.run(ScriptOutcome::ScriptFailed);
});
forgetest_async!(can_deploy_with_custom_create2, |prj, cmd| {
let (api, handle) = spawn(NodeConfig::test()).await;
let mut tester = ScriptTester::new_broadcast(cmd, &handle.http_endpoint(), prj.root());
let create2 = address!("0x0000000000000000000000000000000000b4956c");
// Prepare CREATE2 Deployer
api.anvil_set_code(
create2,
Bytes::from_static(foundry_evm::constants::DEFAULT_CREATE2_DEPLOYER_RUNTIME_CODE),
)
.await
.unwrap();
tester
.add_deployer(0)
.load_private_keys(&[0])
.await
.add_create2_deployer(create2)
.add_sig("BroadcastTestNoLinking", "deployCreate2(address)")
.arg(&create2.to_string())
.simulate(ScriptOutcome::OkSimulation)
.broadcast(ScriptOutcome::OkBroadcast)
.assert_nonce_increment(&[(0, 2)])
.await;
});
forgetest_async!(can_deploy_with_custom_create2_notmatched_bytecode, |prj, cmd| {
let (api, handle) = spawn(NodeConfig::test()).await;
let mut tester = ScriptTester::new_broadcast(cmd, &handle.http_endpoint(), prj.root());
let create2 = address!("0x0000000000000000000000000000000000b4956c");
// Prepare CREATE2 Deployer
api.anvil_set_code(
create2,
Bytes::from_static(&hex!("7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe03601600081602082378035828234f58015156039578182fd5b8082525050506014600cef")),
)
.await
.unwrap();
tester
.add_deployer(0)
.load_private_keys(&[0])
.await
.add_create2_deployer(create2)
.add_sig("BroadcastTestNoLinking", "deployCreate2()")
.simulate(ScriptOutcome::ScriptFailed)
.broadcast(ScriptOutcome::ScriptFailed);
});
forgetest_async!(cannot_deploy_with_nonexist_create2, |prj, cmd| {
let (_api, handle) = spawn(NodeConfig::test()).await;
let mut tester = ScriptTester::new_broadcast(cmd, &handle.http_endpoint(), prj.root());
let create2 = address!("0x0000000000000000000000000000000000b4956c");
tester
.add_deployer(0)
.load_private_keys(&[0])
.await
.add_create2_deployer(create2)
.add_sig("BroadcastTestNoLinking", "deployCreate2()")
.simulate(ScriptOutcome::ScriptFailed)
.broadcast(ScriptOutcome::ScriptFailed);
});
forgetest_async!(can_deploy_and_simulate_25_txes_concurrently, |prj, cmd| {
let (_api, handle) = spawn(NodeConfig::test()).await;
let mut tester = ScriptTester::new_broadcast(cmd, &handle.http_endpoint(), prj.root());
tester
.load_private_keys(&[0])
.await
.add_sig("BroadcastTestNoLinking", "deployMany()")
.simulate(ScriptOutcome::OkSimulation)
.broadcast(ScriptOutcome::OkBroadcast)
.assert_nonce_increment(&[(0, 25)])
.await;
});
forgetest_async!(can_deploy_and_simulate_mixed_broadcast_modes, |prj, cmd| {
let (_api, handle) = spawn(NodeConfig::test()).await;
let mut tester = ScriptTester::new_broadcast(cmd, &handle.http_endpoint(), prj.root());
tester
.load_private_keys(&[0])
.await
.add_sig("BroadcastMix", "deployMix()")
.simulate(ScriptOutcome::OkSimulation)
.broadcast(ScriptOutcome::OkBroadcast)
.assert_nonce_increment(&[(0, 15)])
.await;
});
forgetest_async!(deploy_with_setup, |prj, cmd| {
let (_api, handle) = spawn(NodeConfig::test()).await;
let mut tester = ScriptTester::new_broadcast(cmd, &handle.http_endpoint(), prj.root());
tester
.load_private_keys(&[0])
.await
.add_sig("BroadcastTestSetup", "run()")
.simulate(ScriptOutcome::OkSimulation)
.broadcast(ScriptOutcome::OkBroadcast)
.assert_nonce_increment(&[(0, 6)])
.await;
});
forgetest_async!(fail_broadcast_staticcall, |prj, cmd| {
let (_api, handle) = spawn(NodeConfig::test()).await;
let mut tester = ScriptTester::new_broadcast(cmd, &handle.http_endpoint(), prj.root());
tester
.load_private_keys(&[0])
.await
.add_sig("BroadcastTestNoLinking", "errorStaticCall()")
.simulate(ScriptOutcome::StaticCallNotAllowed);
});
forgetest_async!(check_broadcast_log, |prj, cmd| {
let (api, handle) = spawn(NodeConfig::test()).await;
let mut tester = ScriptTester::new_broadcast(cmd, &handle.http_endpoint(), prj.root());
// Prepare CREATE2 Deployer
let addr = address!("0x4e59b44847b379578588920ca78fbf26c0b4956c");
let code = hex::decode("7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe03601600081602082378035828234f58015156039578182fd5b8082525050506014600cf3").expect("Could not decode create2 deployer init_code").into();
api.anvil_set_code(addr, code).await.unwrap();
tester
.load_private_keys(&[0])
.await
.add_sig("BroadcastTestSetup", "run()")
.simulate(ScriptOutcome::OkSimulation)
.broadcast(ScriptOutcome::OkBroadcast)
.assert_nonce_increment(&[(0, 6)])
.await;
// Uncomment to recreate the broadcast log
// std::fs::copy(
// "broadcast/Broadcast.t.sol/31337/run-latest.json",
// PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("../../testdata/fixtures/broadcast.
// log. json" ), );
// Check broadcast logs
// Ignore timestamp, blockHash, blockNumber, cumulativeGasUsed, effectiveGasPrice,
// transactionIndex and logIndex values since they can change in between runs
let re = Regex::new(r#"((timestamp":).[0-9]*)|((blockHash":).*)|((blockNumber":).*)|((cumulativeGasUsed":).*)|((effectiveGasPrice":).*)|((transactionIndex":).*)|((logIndex":).*)"#).unwrap();
let fixtures_log = std::fs::read_to_string(
PathBuf::from(env!("CARGO_MANIFEST_DIR"))
.join("../../testdata/fixtures/broadcast.log.json"),
)
.unwrap();
let _fixtures_log = re.replace_all(&fixtures_log, "");
let run_log =
std::fs::read_to_string("broadcast/Broadcast.t.sol/31337/run-latest.json").unwrap();
let _run_log = re.replace_all(&run_log, "");
// similar_asserts::assert_eq!(fixtures_log, run_log);
// Uncomment to recreate the sensitive log
// std::fs::copy(
// "cache/Broadcast.t.sol/31337/run-latest.json",
// PathBuf::from(env!("CARGO_MANIFEST_DIR"))
// .join("../../testdata/fixtures/broadcast.sensitive.log.json"),
// );
// Check sensitive logs
// Ignore port number since it can change in between runs
let re = Regex::new(r":[0-9]+").unwrap();
let fixtures_log = std::fs::read_to_string(
PathBuf::from(env!("CARGO_MANIFEST_DIR"))
.join("../../testdata/fixtures/broadcast.sensitive.log.json"),
)
.unwrap();
let fixtures_log = re.replace_all(&fixtures_log, "");
let run_log = std::fs::read_to_string("cache/Broadcast.t.sol/31337/run-latest.json").unwrap();
let run_log = re.replace_all(&run_log, "");
// Clean up carriage return OS differences
let re = Regex::new(r"\r\n").unwrap();
let fixtures_log = re.replace_all(&fixtures_log, "\n");
let run_log = re.replace_all(&run_log, "\n");
similar_asserts::assert_eq!(fixtures_log, run_log);
});
forgetest_async!(test_default_sender_balance, |prj, cmd| {
let (_api, handle) = spawn(NodeConfig::test()).await;
let mut tester = ScriptTester::new_broadcast(cmd, &handle.http_endpoint(), prj.root());
// Expect the default sender to have uint256.max balance.
tester
.add_sig("TestInitialBalance", "runDefaultSender()")
.simulate(ScriptOutcome::OkSimulation);
});
forgetest_async!(test_custom_sender_balance, |prj, cmd| {
let (_api, handle) = spawn(NodeConfig::test()).await;
let mut tester = ScriptTester::new_broadcast(cmd, &handle.http_endpoint(), prj.root());
// Expect the sender to have its starting balance.
tester
.add_deployer(0)
.add_sig("TestInitialBalance", "runCustomSender()")
.simulate(ScriptOutcome::OkSimulation);
});
#[derive(serde::Deserialize)]
struct Transactions {
transactions: Vec<Transaction>,
}
#[derive(serde::Deserialize)]
struct Transaction {
arguments: Vec<String>,
}
// test we output arguments <https://github.com/foundry-rs/foundry/issues/3053>
forgetest_async!(can_execute_script_with_arguments, |prj, cmd| {
cmd.args(["init", "--force"])
.arg(prj.root())
.assert_success()
.stdout_eq(str![[r#"
Initializing [..]...
Installing forge-std in [..] (url: https://github.com/foundry-rs/forge-std, tag: None)
Installed forge-std[..]
Initialized forge project
"#]])
.stderr_eq(str![[r#"
Warning: Target directory is not empty, but `--force` was specified
...
"#]]);
let (_api, handle) = spawn(NodeConfig::test()).await;
let script = prj.add_script(
"Counter.s.sol",
r#"
import "forge-std/Script.sol";
struct Point {
uint256 x;
uint256 y;
}
contract A {
address a;
uint b;
int c;
bytes32 d;
bool e;
bytes f;
Point g;
string h;
constructor(address _a, uint _b, int _c, bytes32 _d, bool _e, bytes memory _f, Point memory _g, string memory _h) {
a = _a;
b = _b;
c = _c;
d = _d;
e = _e;
f = _f;
g = _g;
h = _h;
}
}
contract Script0 is Script {
function run() external {
vm.broadcast();
new A(msg.sender, 2 ** 32, -1 * (2 ** 32), keccak256(abi.encode(1)), true, "abcdef", Point(10, 99), "hello");
}
}
"#,
);
cmd
.forge_fuse()
.arg("script")
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | true |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/forge/tests/cli/main.rs | crates/forge/tests/cli/main.rs | #[macro_use]
extern crate foundry_test_utils;
pub mod constants;
pub mod utils;
mod backtrace;
mod bind;
mod bind_json;
mod build;
mod cache;
mod cmd;
mod compiler;
mod config;
mod context;
mod coverage;
mod create;
mod debug;
mod doc;
mod eip712;
mod failure_assertions;
mod inline_config;
mod install;
mod json;
mod lint;
mod multi_script;
mod precompiles;
mod script;
mod soldeer;
mod svm;
mod test_cmd;
mod verify;
mod verify_bytecode;
mod version;
mod ext_integration;
mod fmt;
mod fmt_integration;
mod test_optimizer;
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/forge/tests/cli/backtrace.rs | crates/forge/tests/cli/backtrace.rs | //! Tests for backtrace functionality
use foundry_test_utils::rpc::{next_etherscan_api_key, next_http_rpc_endpoint};
forgetest!(test_backtraces, |prj, cmd| {
prj.insert_ds_test();
prj.insert_vm();
prj.add_source("SimpleRevert.sol", include_str!("../fixtures/backtraces/SimpleRevert.sol"));
prj.add_source("StaticCall.sol", include_str!("../fixtures/backtraces/StaticCall.sol"));
prj.add_source("DelegateCall.sol", include_str!("../fixtures/backtraces/DelegateCall.sol"));
prj.add_source("NestedCalls.sol", include_str!("../fixtures/backtraces/NestedCalls.sol"));
prj.add_test("Backtrace.t.sol", include_str!("../fixtures/backtraces/Backtrace.t.sol"));
let output = cmd.args(["test", "-vvvvv"]).assert_failure();
output.stdout_eq(str![[r#"
[COMPILING_FILES] with [SOLC_VERSION]
[SOLC_VERSION] [ELAPSED]
Compiler run successful with warnings:
...
Ran 11 tests for test/Backtrace.t.sol:BacktraceTest
[FAIL: panic: assertion failed (0x01)] testAssertFail() ([GAS])
...
Backtrace:
at SimpleRevert.doAssert
at BacktraceTest.testAssertFail (test/Backtrace.t.sol:40:48)
[FAIL: CustomError(42, 0x7FA9385bE102ac3EAc297483Dd6233D62b3e1496)] testCustomError() ([GAS])
...
Backtrace:
at SimpleRevert.doCustomError (src/SimpleRevert.sol:21:59)
at BacktraceTest.testCustomError (test/Backtrace.t.sol:45:49)
[FAIL: Delegate compute failed] testDelegateCallRequire() ([GAS])
...
Backtrace:
at DelegateTarget.compute (src/DelegateCall.sol:11:84)
at DelegateCaller.delegateCompute (src/DelegateCall.sol:32:101)
at BacktraceTest.testDelegateCallRequire (test/Backtrace.t.sol:82:57)
[FAIL: Delegate call failed] testDelegateCallRevert() ([GAS])
...
Backtrace:
at DelegateTarget.fail (src/DelegateCall.sol:7:43)
at DelegateCaller.delegateFail (src/DelegateCall.sol:26:91)
at BacktraceTest.testDelegateCallRevert (test/Backtrace.t.sol:77:56)
[FAIL: Failed at internal level 3] testInternalCallChain() ([GAS])
...
Backtrace:
at BacktraceTest.testInternalCallChain (test/Backtrace.t.sol:72:54)
[FAIL: Failed at chain level 3] testInternalCallsSameSource() ([GAS])
...
Backtrace:
at NestedCalls.callChain1 (src/NestedCalls.sol:25:51)
at BacktraceTest.testInternalCallsSameSource (test/Backtrace.t.sol:55:61)
[FAIL: Maximum depth reached] testNestedCalls() ([GAS])
...
Backtrace:
at NestedCalls.nestedCall (src/NestedCalls.sol:11:46)
at NestedCalls.nestedCall (src/NestedCalls.sol:13:19)
at NestedCalls.nestedCall (src/NestedCalls.sol:13:19)
at NestedCalls.nestedCall (src/NestedCalls.sol:13:19)
at NestedCalls.nestedCall (src/NestedCalls.sol:13:19)
at BacktraceTest.testNestedCalls (test/Backtrace.t.sol:50:49)
[FAIL: Value must be greater than zero] testRequireFail() ([GAS])
...
Backtrace:
at SimpleRevert.doRequire (src/SimpleRevert.sol:11:61)
at BacktraceTest.testRequireFail (test/Backtrace.t.sol:35:49)
[FAIL: Simple revert message] testSimpleRevert() ([GAS])
...
Backtrace:
at SimpleRevert.doRevert (src/SimpleRevert.sol:7:67)
at BacktraceTest.testSimpleRevert (test/Backtrace.t.sol:30:50)
[FAIL: Static compute failed] testStaticCallRequire() ([GAS])
...
Backtrace:
at StaticTarget.compute (src/StaticCall.sol:11:77)
at StaticCaller.staticCompute (src/StaticCall.sol:30:124)
at BacktraceTest.testStaticCallRequire (test/Backtrace.t.sol:92:60)
[FAIL: Static call reverted] testStaticCallRevert() ([GAS])
...
Backtrace:
at StaticTarget.viewFail (src/StaticCall.sol:7:47)
at StaticCaller.staticCallFail (src/StaticCall.sol:25:93)
at BacktraceTest.testStaticCallRevert (test/Backtrace.t.sol:87:59)
Suite result: FAILED. 0 passed; 11 failed; 0 skipped; [ELAPSED]
...
"#]]);
});
forgetest!(test_backtrace_with_mixed_compilation, |prj, cmd| {
prj.insert_ds_test();
prj.insert_vm();
prj.add_source(
"SimpleRevert.sol",
r#"
// SPDX-License-Identifier: MIT
pragma solidity ^0.8.0;
contract SimpleRevert {
function doRevert(string memory reason) public pure {
revert(reason);
}
}
"#,
);
// Add another source file that won't be modified
prj.add_source(
"HelperContract.sol",
r#"
// SPDX-License-Identifier: MIT
pragma solidity ^0.8.0;
contract HelperContract {
function getValue() public pure returns (uint256) {
return 42;
}
function doRevert() public pure {
revert("Helper revert");
}
}
"#,
);
prj.add_test(
"BacktraceTest.t.sol",
r#"
// SPDX-License-Identifier: MIT
pragma solidity ^0.8.0;
import "../src/test.sol";
import "../src/SimpleRevert.sol";
import "../src/HelperContract.sol";
contract BacktraceTest is DSTest {
SimpleRevert simpleRevert;
HelperContract helper;
function setUp() public {
simpleRevert = new SimpleRevert();
helper = new HelperContract();
}
function testSimpleRevert() public {
simpleRevert.doRevert("Test failure");
}
function testHelperRevert() public {
helper.doRevert();
}
}
"#,
);
let output = cmd.args(["test", "-vvvvv"]).assert_failure();
output.stdout_eq(str![[r#"
[COMPILING_FILES] with [SOLC_VERSION]
[SOLC_VERSION] [ELAPSED]
...
Ran 2 tests for test/BacktraceTest.t.sol:BacktraceTest
[FAIL: Helper revert] testHelperRevert() ([GAS])
...
Backtrace:
at HelperContract.doRevert (src/HelperContract.sol:11:47)
at BacktraceTest.testHelperRevert (test/BacktraceTest.t.sol:23:50)
[FAIL: Test failure] testSimpleRevert() ([GAS])
...
Backtrace:
at SimpleRevert.doRevert (src/SimpleRevert.sol:7:67)
at BacktraceTest.testSimpleRevert (test/BacktraceTest.t.sol:19:50)
Suite result: FAILED. 0 passed; 2 failed; 0 skipped; [ELAPSED]
...
"#]]);
// Modify the source file - add a comment to change line numbers
prj.add_source(
"SimpleRevert.sol",
r#"
// SPDX-License-Identifier: MIT
pragma solidity ^0.8.0;
contract SimpleRevert {
function doRevert(string memory reason) public pure {
// Added comment to shift line numbers
revert(reason);
}
}
"#,
);
// Modify the test file as well
prj.add_test(
"BacktraceTest.t.sol",
r#"
// SPDX-License-Identifier: MIT
pragma solidity ^0.8.0;
import "../src/test.sol";
import "../src/SimpleRevert.sol";
import "../src/HelperContract.sol";
contract BacktraceTest is DSTest {
SimpleRevert simpleRevert;
HelperContract helper;
function setUp() public {
simpleRevert = new SimpleRevert();
helper = new HelperContract();
}
function testSimpleRevert() public {
// Added some comments
// to change line numbers
simpleRevert.doRevert("Test failure");
}
function testHelperRevert() public {
helper.doRevert();
}
}
"#,
);
// Second run - mixed compilation (SimpleRevert fresh, BacktraceTest fresh, HelperContract
// cached)
let output = cmd.forge_fuse().args(["test", "-vvvvv"]).assert_failure();
output.stdout_eq(str![[r#"
[COMPILING_FILES] with [SOLC_VERSION]
[SOLC_VERSION] [ELAPSED]
...
Ran 2 tests for test/BacktraceTest.t.sol:BacktraceTest
[FAIL: Helper revert] testHelperRevert() ([GAS])
...
Backtrace:
at HelperContract.doRevert (src/HelperContract.sol:11:47)
at BacktraceTest.testHelperRevert (test/BacktraceTest.t.sol:25:50)
[FAIL: Test failure] testSimpleRevert() ([GAS])
...
Backtrace:
at SimpleRevert.doRevert (src/SimpleRevert.sol:8:56)
at BacktraceTest.testSimpleRevert (test/BacktraceTest.t.sol:21:43)
Suite result: FAILED. 0 passed; 2 failed; 0 skipped; [ELAPSED]
...
"#]]);
});
forgetest!(test_library_backtrace, |prj, cmd| {
prj.insert_ds_test();
prj.insert_vm();
// Add library source files
prj.add_source(
"libraries/InternalMathLib.sol",
include_str!("../fixtures/backtraces/libraries/InternalMathLib.sol"),
);
prj.add_source(
"libraries/ExternalMathLib.sol",
include_str!("../fixtures/backtraces/libraries/ExternalMathLib.sol"),
);
prj.add_source(
"LibraryConsumer.sol",
include_str!("../fixtures/backtraces/LibraryConsumer.sol"),
);
// Add test file
prj.add_test(
"LibraryBacktrace.t.sol",
include_str!("../fixtures/backtraces/LibraryBacktrace.t.sol"),
);
// Add foundry.toml configuration for linked library
let config = foundry_config::Config {
libraries: vec!["src/libraries/ExternalMathLib.sol:ExternalMathLib:0x1234567890123456789012345678901234567890".to_string()],
..Default::default()
};
prj.write_config(config);
let output =
cmd.args(["test", "-vvv", "--ast", "--mc", "LibraryBacktraceTest"]).assert_failure();
output.stdout_eq(str![[r#"
[COMPILING_FILES] with [SOLC_VERSION]
[SOLC_VERSION] [ELAPSED]
Compiler run successful!
Ran 9 tests for test/LibraryBacktrace.t.sol:LibraryBacktraceTest
[FAIL: DivisionByZero()] testExternalDivisionByZero() ([GAS])
...
Backtrace:
at ExternalMathLib.div
at LibraryConsumer.externalDivide
at LibraryBacktraceTest.testExternalDivisionByZero
[FAIL: panic: arithmetic underflow or overflow (0x11)] testExternalOverflow() ([GAS])
...
Backtrace:
at ExternalMathLib.mul
at LibraryConsumer.externalMultiply
at LibraryBacktraceTest.testExternalOverflow
[FAIL: ExternalMathLib: value must be positive] testExternalRequire() ([GAS])
...
Backtrace:
at ExternalMathLib.requirePositive
at LibraryConsumer.externalCheckPositive
at LibraryBacktraceTest.testExternalRequire
[FAIL: Underflow()] testExternalUnderflow() ([GAS])
...
Backtrace:
at ExternalMathLib.sub
at LibraryConsumer.externalSubtract
at LibraryBacktraceTest.testExternalUnderflow
[FAIL: DivisionByZero()] testInternalDivisionByZero() ([GAS])
...
Backtrace:
at LibraryConsumer.internalDivide
at LibraryBacktraceTest.testInternalDivisionByZero
[FAIL: panic: arithmetic underflow or overflow (0x11)] testInternalOverflow() ([GAS])
Traces:
...
Backtrace:
at LibraryConsumer.internalMultiply
at LibraryBacktraceTest.testInternalOverflow
[FAIL: InternalMathLib: value must be positive] testInternalRequire() ([GAS])
Traces:
...
Backtrace:
at LibraryConsumer.internalCheckPositive
at LibraryBacktraceTest.testInternalRequire
[FAIL: Underflow()] testInternalUnderflow() ([GAS])
Traces:
...
Backtrace:
at LibraryConsumer.internalSubtract
at LibraryBacktraceTest.testInternalUnderflow
[FAIL: DivisionByZero()] testMixedLibraryFailure() ([GAS])
Traces:
...
Backtrace:
at ExternalMathLib.div
at LibraryConsumer.mixedCalculation
at LibraryBacktraceTest.testMixedLibraryFailure
Suite result: FAILED. 0 passed; 9 failed; 0 skipped; [ELAPSED]
...
"#]]);
});
forgetest!(test_multiple_libraries_same_file, |prj, cmd| {
prj.insert_ds_test();
prj.add_source(
"libraries/MultipleLibraries.sol",
include_str!("../fixtures/backtraces/libraries/MultipleLibraries.sol"),
);
prj.add_source(
"MultipleLibraryConsumer.sol",
include_str!("../fixtures/backtraces/MultipleLibraryConsumer.sol"),
);
prj.add_test(
"MultipleLibraryBacktrace.t.sol",
include_str!("../fixtures/backtraces/MultipleLibraryBacktrace.t.sol"),
);
let output = cmd
.args(["test", "-vvvvv", "--ast", "--mc", "MultipleLibraryBacktraceTest"])
.assert_failure();
output.stdout_eq(str![[r#"
[COMPILING_FILES] with [SOLC_VERSION]
[SOLC_VERSION] [ELAPSED]
Compiler run successful!
Ran 4 tests for test/MultipleLibraryBacktrace.t.sol:MultipleLibraryBacktraceTest
[FAIL: FirstLibError()] testAllLibrariesFirstFails() ([GAS])
...
Backtrace:
at MultipleLibraryConsumer.useAllLibraries (src/libraries/MultipleLibraries.sol:10:42)
at MultipleLibraryBacktraceTest.testAllLibrariesFirstFails (test/MultipleLibraryBacktrace.t.sol:31:60)
[FAIL: FirstLibError()] testFirstLibraryError() ([GAS])
Traces:
...
Backtrace:
at MultipleLibraryConsumer.useFirstLib (src/libraries/MultipleLibraries.sol:10:42)
at MultipleLibraryBacktraceTest.testFirstLibraryError (test/MultipleLibraryBacktrace.t.sol:16:55)
[FAIL: SecondLibError()] testSecondLibraryError() ([GAS])
Traces:
...
Backtrace:
at MultipleLibraryConsumer.useSecondLib (src/libraries/MultipleLibraries.sol:26:41)
at MultipleLibraryBacktraceTest.testSecondLibraryError (test/MultipleLibraryBacktrace.t.sol:21:56)
[FAIL: ThirdLibError()] testThirdLibraryError() ([GAS])
Traces:
...
Backtrace:
at MultipleLibraryConsumer.useThirdLib (src/libraries/MultipleLibraries.sol:42:42)
at MultipleLibraryBacktraceTest.testThirdLibraryError (test/MultipleLibraryBacktrace.t.sol:26:55)
Suite result: FAILED. 0 passed; 4 failed; 0 skipped; [ELAPSED]
...
"#]]);
});
forgetest!(test_fork_backtrace, |prj, cmd| {
prj.insert_ds_test();
prj.insert_vm();
let etherscan_api_key = next_etherscan_api_key();
let fork_url = next_http_rpc_endpoint();
prj.add_source(
"ForkedERC20Wrapper.sol",
include_str!("../fixtures/backtraces/ForkedERC20Wrapper.sol"),
);
prj.add_test("ForkBacktrace.t.sol", include_str!("../fixtures/backtraces/ForkBacktrace.t.sol"));
let output = cmd
.args(["test", "-vvvvv", "--fork-url", &fork_url, "--match-contract", "ForkBacktraceTest"])
.assert_failure();
output.stdout_eq(str![[r#"
[COMPILING_FILES] with [SOLC_VERSION]
[SOLC_VERSION] [ELAPSED]
...
Ran 5 tests for test/ForkBacktrace.t.sol:ForkBacktraceTest
[FAIL: USDC transfer failed] testDirectOnChainRevert() ([GAS])
...
Backtrace:
at 0x43506849D7C04F9138D1A2050bbF3A0c054402dd.transfer
at 0xA0b86991c6218b36c1d19D4a2e9Eb0cE3606eB48.transfer
at ForkBacktraceTest.testDirectOnChainRevert (test/ForkBacktrace.t.sol:36:126)
[FAIL: ERC20: transfer amount exceeds balance] testNestedFailure() ([GAS])
...
Backtrace:
at 0x43506849D7C04F9138D1A2050bbF3A0c054402dd.transfer
at 0xA0b86991c6218b36c1d19D4a2e9Eb0cE3606eB48.transfer
at ForkedERC20Wrapper.nestedFailure (src/ForkedERC20Wrapper.sol:14:89)
at ForkBacktraceTest.testNestedFailure (test/ForkBacktrace.t.sol:30:51)
[FAIL: Account has zero USDC balance] testRequireNonZeroBalance() ([GAS])
...
Backtrace:
at ForkedERC20Wrapper.requireNonZeroBalance (src/ForkedERC20Wrapper.sol:23:68)
at ForkBacktraceTest.testRequireNonZeroBalance (test/ForkBacktrace.t.sol:26:64)
[FAIL: ERC20: transfer amount exceeds allowance] testTransferFromWithoutApproval() ([GAS])
...
Backtrace:
at 0x43506849D7C04F9138D1A2050bbF3A0c054402dd.transferFrom
at 0xA0b86991c6218b36c1d19D4a2e9Eb0cE3606eB48.transferFrom
at ForkedERC20Wrapper.transferFromWithoutApproval (src/ForkedERC20Wrapper.sol:18:101)
at ForkBacktraceTest.testTransferFromWithoutApproval (test/ForkBacktrace.t.sol:22:65)
[FAIL: ERC20: transfer amount exceeds balance] testTransferWithoutBalance() ([GAS])
...
Backtrace:
at 0x43506849D7C04F9138D1A2050bbF3A0c054402dd.transfer
at 0xA0b86991c6218b36c1d19D4a2e9Eb0cE3606eB48.transfer
at ForkedERC20Wrapper.transferWithoutBalance (src/ForkedERC20Wrapper.sol:14:89)
at ForkBacktraceTest.testTransferWithoutBalance (test/ForkBacktrace.t.sol:18:60)
Suite result: FAILED. 0 passed; 5 failed; 0 skipped; [ELAPSED]
...
"#]]);
cmd.forge_fuse()
.args([
"test",
"--mt",
"testTransferFromWithoutApproval",
"-vvvvv",
"--fork-url",
&fork_url,
"--etherscan-api-key",
ðerscan_api_key,
])
.assert_failure()
.stdout_eq(str![[r#"
No files changed, compilation skipped
...
Ran 1 test for test/ForkBacktrace.t.sol:ForkBacktraceTest
[FAIL: ERC20: transfer amount exceeds allowance] testTransferFromWithoutApproval() ([GAS])
...
Backtrace:
at FiatTokenV2_2.transferFrom
at FiatTokenProxy.fallback
at ForkedERC20Wrapper.transferFromWithoutApproval (src/ForkedERC20Wrapper.sol:18:101)
at ForkBacktraceTest.testTransferFromWithoutApproval (test/ForkBacktrace.t.sol:22:65)
...
"#]]);
});
forgetest!(test_backtrace_via_ir_disables_source_lines, |prj, cmd| {
prj.insert_ds_test();
prj.insert_vm();
prj.add_source("SimpleRevert.sol", include_str!("../fixtures/backtraces/SimpleRevert.sol"));
prj.add_source("StaticCall.sol", include_str!("../fixtures/backtraces/StaticCall.sol"));
prj.add_source("DelegateCall.sol", include_str!("../fixtures/backtraces/DelegateCall.sol"));
prj.add_source("NestedCalls.sol", include_str!("../fixtures/backtraces/NestedCalls.sol"));
prj.add_test("Backtrace.t.sol", include_str!("../fixtures/backtraces/Backtrace.t.sol"));
prj.update_config(|c| c.via_ir = true);
let output = cmd.args(["test", "-vvvvv"]).assert_failure();
output.stdout_eq(str![[r#"
[COMPILING_FILES] with [SOLC_VERSION]
[SOLC_VERSION] [ELAPSED]
...
[FAIL: Static compute failed] testStaticCallRequire() ([GAS])
...
Backtrace:
at StaticTarget.compute
at StaticCaller.staticCompute
at BacktraceTest.testStaticCallRequire
...
"#]]);
});
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/forge/tests/cli/constants.rs | crates/forge/tests/cli/constants.rs | //! various constants
pub const TEMPLATE_CONTRACT: &str = "Counter";
pub const TEMPLATE_TEST_CONTRACT: &str = "CounterTest";
pub const TEMPLATE_CONTRACT_ARTIFACT_BASE: &str = "Counter.sol/Counter";
pub const TEMPLATE_CONTRACT_ARTIFACT_JSON: &str = "Counter.sol/Counter.json";
pub const TEMPLATE_TEST_CONTRACT_ARTIFACT_JSON: &str = "Counter.t.sol/CounterTest.json";
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/forge/tests/cli/cache.rs | crates/forge/tests/cli/cache.rs | //! Tests for various cache command.
forgetest!(can_list_cache, |_prj, cmd| {
cmd.args(["cache", "ls"]);
cmd.assert_success();
});
forgetest!(can_list_cache_all, |_prj, cmd| {
cmd.args(["cache", "ls", "all"]);
cmd.assert_success();
});
forgetest!(can_list_specific_chain, |_prj, cmd| {
cmd.args(["cache", "ls", "mainnet"]);
cmd.assert_success();
});
forgetest_init!(can_test_no_cache, |prj, cmd| {
prj.initialize_default_contracts();
prj.clear_cache();
cmd.args(["test", "--no-cache"]).assert_success();
assert!(!prj.cache().exists(), "cache file should not exist");
cmd.forge_fuse().arg("test").assert_success();
assert!(prj.cache().exists(), "cache file should exist");
});
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/forge/tests/cli/context.rs | crates/forge/tests/cli/context.rs | //! Contains tests for checking forge execution context cheatcodes
const FORGE_TEST_CONTEXT_CONTRACT: &str = r#"
import "./test.sol";
interface Vm {
enum ForgeContext { TestGroup, Test, Coverage, Snapshot, ScriptGroup, ScriptDryRun, ScriptBroadcast, ScriptResume, Unknown }
function isContext(ForgeContext context) external view returns (bool isContext);
}
contract ForgeContextTest is DSTest {
Vm constant vm = Vm(HEVM_ADDRESS);
function testForgeTestContext() external view {
require(vm.isContext(Vm.ForgeContext.TestGroup) && !vm.isContext(Vm.ForgeContext.ScriptGroup), "wrong context");
require(vm.isContext(Vm.ForgeContext.Test), "wrong context");
require(!vm.isContext(Vm.ForgeContext.Coverage), "wrong context");
require(!vm.isContext(Vm.ForgeContext.Snapshot), "wrong context");
}
function testForgeSnapshotContext() external view {
require(vm.isContext(Vm.ForgeContext.TestGroup) && !vm.isContext(Vm.ForgeContext.ScriptGroup), "wrong context");
require(vm.isContext(Vm.ForgeContext.Snapshot), "wrong context");
require(!vm.isContext(Vm.ForgeContext.Test), "wrong context");
require(!vm.isContext(Vm.ForgeContext.Coverage), "wrong context");
}
function testForgeCoverageContext() external view {
require(vm.isContext(Vm.ForgeContext.TestGroup) && !vm.isContext(Vm.ForgeContext.ScriptGroup), "wrong context");
require(vm.isContext(Vm.ForgeContext.Coverage), "wrong context");
require(!vm.isContext(Vm.ForgeContext.Test), "wrong context");
require(!vm.isContext(Vm.ForgeContext.Snapshot), "wrong context");
}
function runDryRun() external view {
require(vm.isContext(Vm.ForgeContext.ScriptGroup) && !vm.isContext(Vm.ForgeContext.TestGroup), "wrong context");
require(vm.isContext(Vm.ForgeContext.ScriptDryRun), "wrong context");
require(!vm.isContext(Vm.ForgeContext.ScriptBroadcast), "wrong context");
require(!vm.isContext(Vm.ForgeContext.ScriptResume), "wrong context");
}
function runBroadcast() external view {
require(vm.isContext(Vm.ForgeContext.ScriptGroup) && !vm.isContext(Vm.ForgeContext.TestGroup), "wrong context");
require(vm.isContext(Vm.ForgeContext.ScriptBroadcast), "wrong context");
require(!vm.isContext(Vm.ForgeContext.ScriptDryRun), "wrong context");
require(!vm.isContext(Vm.ForgeContext.ScriptResume), "wrong context");
}
}
"#;
// tests that context properly set for `forge test` command
forgetest!(can_set_forge_test_standard_context, |prj, cmd| {
prj.insert_ds_test();
prj.add_source("ForgeContextTest.t.sol", FORGE_TEST_CONTEXT_CONTRACT);
cmd.args(["test", "--match-test", "testForgeTestContext"]).assert_success();
});
// tests that context properly set for `forge snapshot` command
forgetest!(can_set_forge_test_snapshot_context, |prj, cmd| {
prj.insert_ds_test();
prj.add_source("ForgeContextTest.t.sol", FORGE_TEST_CONTEXT_CONTRACT);
cmd.args(["snapshot", "--match-test", "testForgeSnapshotContext"]).assert_success();
});
// tests that context properly set for `forge coverage` command
forgetest!(can_set_forge_test_coverage_context, |prj, cmd| {
prj.insert_ds_test();
prj.add_source("ForgeContextTest.t.sol", FORGE_TEST_CONTEXT_CONTRACT);
cmd.args(["coverage", "--match-test", "testForgeCoverageContext"]).assert_success();
});
// tests that context properly set for `forge script` command
forgetest!(can_set_forge_script_dry_run_context, |prj, cmd| {
prj.insert_ds_test();
let script = prj.add_source("ForgeScriptContextTest.s.sol", FORGE_TEST_CONTEXT_CONTRACT);
cmd.arg("script").arg(script).args(["--sig", "runDryRun()"]).assert_success();
});
// tests that context properly set for `forge script --broadcast` command
forgetest!(can_set_forge_script_broadcast_context, |prj, cmd| {
prj.insert_ds_test();
let script = prj.add_source("ForgeScriptContextTest.s.sol", FORGE_TEST_CONTEXT_CONTRACT);
cmd.arg("script").arg(script).args(["--broadcast", "--sig", "runBroadcast()"]).assert_success();
});
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/forge/tests/cli/eip712.rs | crates/forge/tests/cli/eip712.rs | use foundry_config::fs_permissions::PathPermission;
forgetest!(test_eip712, |prj, cmd| {
let path = prj.add_test(
"Structs.sol",
r#"
library Structs {
struct Foo {
Bar bar;
}
struct Bar {
Art art;
}
struct Art {
uint256 id;
}
struct Complex {
Structs2.Foo foo2;
Foo[] foos;
Rec[][] recs;
}
struct Rec {
Rec[] rec;
}
}
library Structs2 {
struct Foo {
uint256 id;
}
struct Rec {
Bar[] bar;
}
struct Bar {
Rec rec;
}
struct FooBar {
Foo[] foos;
Bar[] bars;
Structs.Foo foo;
Structs.Bar bar;
Rec[] recs;
Structs.Rec rec;
}
}
contract DummyTest {
function testDummy() public pure {
revert("test");
}
}
"#,
);
cmd.forge_fuse().args(["eip712", path.to_string_lossy().as_ref()]).assert_success().stdout_eq(
str![[r#"
[COMPILING_FILES] with [SOLC_VERSION]
[SOLC_VERSION] [ELAPSED]
Compiler run successful!
Structs.sol > Structs > Foo:
- type: Foo(Bar bar)Art(uint256 id)Bar(Art art)
- hash: 0x6d9b732373bd999fde4072274c752e03f7437067dd75521eb406d8edf1d30f7d
Structs.sol > Structs > Bar:
- type: Bar(Art art)Art(uint256 id)
- hash: 0xadeb03f4f98fb57c05c9a79d8dd2348220e9bd9fd332ec2fbd92479e5695a596
Structs.sol > Structs > Art:
- type: Art(uint256 id)
- hash: 0xbfeb9da97f9dbc2403e9d5ec3853f36414cae141d772601f24e0097d159d302b
Structs.sol > Structs > Complex:
- type: Complex(Foo foo2,Foo_1[] foos,Rec[][] recs)Art(uint256 id)Bar(Art art)Foo(uint256 id)Foo_1(Bar bar)Rec(Rec[] rec)
- hash: 0xfb0a234a82efcade7c031ebb4c58afd7f5f242ca67ed06f4050c60044dcee425
Structs.sol > Structs > Rec:
- type: Rec(Rec[] rec)
- hash: 0x5f060eb740f5aee93a910587a100458c724479d189f6dd67ac39048bf312102e
Structs.sol > Structs2 > Foo:
- type: Foo(uint256 id)
- hash: 0xb93d8bb2877cd5cc51979d9fe85339ab570714a6fd974225e2a763851092497e
Structs.sol > Structs2 > Rec:
- type: Rec(Bar[] bar)Bar(Rec rec)
- hash: 0xe9dded72c72648f27772620cb4e10b773ce31a3ea26ef980c0b39d1834242cda
Structs.sol > Structs2 > Bar:
- type: Bar(Rec rec)Rec(Bar[] bar)
- hash: 0x164eba932ecde04ec75feba228664d08f29c88d6a67e531757e023e6063c3b2c
Structs.sol > Structs2 > FooBar:
- type: FooBar(Foo[] foos,Bar[] bars,Foo_1 foo,Bar_1 bar,Rec[] recs,Rec_1 rec)Art(uint256 id)Bar(Rec rec)Bar_1(Art art)Foo(uint256 id)Foo_1(Bar_1 bar)Rec(Bar[] bar)Rec_1(Rec_1[] rec)
- hash: 0xce88f333fe5b5d4901ceb2569922ffe741cda3afc383a63d34ed2c3d565e42d8
"#]],
);
cmd.forge_fuse().args(["eip712", path.to_string_lossy().as_ref(), "--json"]).assert_success().stdout_eq(
str![[r#"
[
{
"path": "Structs.sol > Structs > Foo",
"type": "Foo(Bar bar)Art(uint256 id)Bar(Art art)",
"hash": "0x6d9b732373bd999fde4072274c752e03f7437067dd75521eb406d8edf1d30f7d"
},
{
"path": "Structs.sol > Structs > Bar",
"type": "Bar(Art art)Art(uint256 id)",
"hash": "0xadeb03f4f98fb57c05c9a79d8dd2348220e9bd9fd332ec2fbd92479e5695a596"
},
{
"path": "Structs.sol > Structs > Art",
"type": "Art(uint256 id)",
"hash": "0xbfeb9da97f9dbc2403e9d5ec3853f36414cae141d772601f24e0097d159d302b"
},
{
"path": "Structs.sol > Structs > Complex",
"type": "Complex(Foo foo2,Foo_1[] foos,Rec[][] recs)Art(uint256 id)Bar(Art art)Foo(uint256 id)Foo_1(Bar bar)Rec(Rec[] rec)",
"hash": "0xfb0a234a82efcade7c031ebb4c58afd7f5f242ca67ed06f4050c60044dcee425"
},
{
"path": "Structs.sol > Structs > Rec",
"type": "Rec(Rec[] rec)",
"hash": "0x5f060eb740f5aee93a910587a100458c724479d189f6dd67ac39048bf312102e"
},
{
"path": "Structs.sol > Structs2 > Foo",
"type": "Foo(uint256 id)",
"hash": "0xb93d8bb2877cd5cc51979d9fe85339ab570714a6fd974225e2a763851092497e"
},
{
"path": "Structs.sol > Structs2 > Rec",
"type": "Rec(Bar[] bar)Bar(Rec rec)",
"hash": "0xe9dded72c72648f27772620cb4e10b773ce31a3ea26ef980c0b39d1834242cda"
},
{
"path": "Structs.sol > Structs2 > Bar",
"type": "Bar(Rec rec)Rec(Bar[] bar)",
"hash": "0x164eba932ecde04ec75feba228664d08f29c88d6a67e531757e023e6063c3b2c"
},
{
"path": "Structs.sol > Structs2 > FooBar",
"type": "FooBar(Foo[] foos,Bar[] bars,Foo_1 foo,Bar_1 bar,Rec[] recs,Rec_1 rec)Art(uint256 id)Bar(Rec rec)Bar_1(Art art)Foo(uint256 id)Foo_1(Bar_1 bar)Rec(Bar[] bar)Rec_1(Rec_1[] rec)",
"hash": "0xce88f333fe5b5d4901ceb2569922ffe741cda3afc383a63d34ed2c3d565e42d8"
}
]
"#]],
);
// Testing `solar_project` doesn't mess up cache.
cmd.forge_fuse().arg("test").assert_failure().stdout_eq(str![[r#"
[COMPILING_FILES] with [SOLC_VERSION]
[SOLC_VERSION] [ELAPSED]
Compiler run successful!
Ran 1 test for test/Structs.sol:DummyTest
[FAIL: test] testDummy() ([GAS])
Suite result: FAILED. 0 passed; 1 failed; 0 skipped; [ELAPSED]
Ran 1 test suite [ELAPSED]: 0 tests passed, 1 failed, 0 skipped (1 total tests)
Failing tests:
Encountered 1 failing test in test/Structs.sol:DummyTest
[FAIL: test] testDummy() ([GAS])
Encountered a total of 1 failing tests, 0 tests succeeded
Tip: Run `forge test --rerun` to retry only the 1 failed test
"#]]);
});
forgetest!(test_eip712_free_standing_structs, |prj, cmd| {
let path = prj.add_source(
"FreeStandingStructs.sol",
r#"
// free-standing struct (outside a contract and lib)
struct FreeStanding {
uint256 id;
string name;
}
contract InsideContract {
struct ContractStruct {
uint256 value;
}
}
library InsideLibrary {
struct LibraryStruct {
bytes32 hash;
}
}
"#,
);
cmd.forge_fuse().args(["eip712", path.to_string_lossy().as_ref()]).assert_success().stdout_eq(
str![[r#"
[COMPILING_FILES] with [SOLC_VERSION]
[SOLC_VERSION] [ELAPSED]
Compiler run successful!
FreeStanding:
- type: FreeStanding(uint256 id,string name)
- hash: 0xfb3c934b2382873277133498bde6eb3914ab323e3bef8b373ebcd423969bf1a2
FreeStandingStructs.sol > InsideContract > ContractStruct:
- type: ContractStruct(uint256 value)
- hash: 0xfb63263e7cf823ff50385a991cb1bd5c1ff46b58011119984d52f8736331e3fe
FreeStandingStructs.sol > InsideLibrary > LibraryStruct:
- type: LibraryStruct(bytes32 hash)
- hash: 0x81d6d25f4d37549244d76a68f23ecdcbf3ae81e5a361ed6c492b6a2e126a2843
"#]],
);
});
forgetest!(test_eip712_cheatcode_simple, |prj, cmd| {
prj.insert_ds_test();
prj.insert_vm();
prj.insert_console();
prj.add_source(
"Eip712",
r#"
contract Eip712Structs {
struct EIP712Domain {
string name;
string version;
uint256 chainId;
address verifyingContract;
}
}
"#,
);
prj.add_source("Eip712Cheat.sol", r#"
import "./test.sol";
import "./Vm.sol";
import "./console.sol";
string constant CANONICAL = "EIP712Domain(string name,string version,uint256 chainId,address verifyingContract)";
contract Eip712Test is DSTest {
Vm constant vm = Vm(HEVM_ADDRESS);
function testEip712HashType() public {
bytes32 canonicalHash = keccak256(bytes(CANONICAL));
console.logBytes32(canonicalHash);
// Can figure out the canonical type from a messy string representation of the type,
// with an invalid order and extra whitespaces
bytes32 fromTypeDef = vm.eip712HashType(
"EIP712Domain(string name, string version, uint256 chainId, address verifyingContract)"
);
assertEq(fromTypeDef, canonicalHash);
// Can figure out the canonical type from the previously generated bindings
bytes32 fromTypeName = vm.eip712HashType("EIP712Domain");
assertEq(fromTypeName, canonicalHash);
}
}
"#,
);
cmd.forge_fuse().args(["bind-json"]).assert_success();
let bindings = prj.root().join("utils").join("JsonBindings.sol");
assert!(bindings.exists(), "'JsonBindings.sol' was not generated at {bindings:?}");
prj.update_config(|config| config.fs_permissions.add(PathPermission::read(bindings)));
cmd.forge_fuse().args(["test", "--mc", "Eip712Test", "-vv"]).assert_success().stdout_eq(str![
[r#"
[COMPILING_FILES] with [SOLC_VERSION]
[SOLC_VERSION] [ELAPSED]
Compiler run successful!
Ran 1 test for src/Eip712Cheat.sol:Eip712Test
[PASS] testEip712HashType() ([GAS])
Logs:
0x8b73c3c69bb8fe3d512ecc4cf759cc79239f7b179b0ffacaa9a75d522b39400f
Suite result: ok. 1 passed; 0 failed; 0 skipped; [ELAPSED]
Ran 1 test suite [ELAPSED]: 1 tests passed, 0 failed, 0 skipped (1 total tests)
"#]
]);
});
forgetest!(test_eip712_cheatcode_nested, |prj, cmd| {
prj.insert_ds_test();
prj.insert_vm();
prj.insert_console();
prj.add_source(
"Eip712",
r#"
contract Eip712Structs {
struct Transaction {
Person from;
Person to;
Asset tx;
}
struct Person {
address wallet;
string name;
}
struct Asset {
address token;
uint256 amount;
}
}
"#,
);
prj.add_source("Eip712Cheat.sol", r#"
import "./test.sol";
import "./Vm.sol";
string constant CANONICAL = "Transaction(Person from,Person to,Asset tx)Asset(address token,uint256 amount)Person(address wallet,string name)";
contract Eip712Test is DSTest {
Vm constant vm = Vm(HEVM_ADDRESS);
function testEip712HashType_byDefinition() public {
bytes32 canonicalHash = keccak256(bytes(CANONICAL));
// Can figure out the canonical type from a messy string representation of the type,
// with an invalid order and extra whitespaces
bytes32 fromTypeDef = vm.eip712HashType(
"Person(address wallet, string name) Asset(address token, uint256 amount) Transaction(Person from, Person to, Asset tx)"
);
assertEq(fromTypeDef, canonicalHash);
}
function testEip712HashType_byTypeName() public {
bytes32 canonicalHash = keccak256(bytes(CANONICAL));
// Can figure out the canonical type from the previously generated bindings
bytes32 fromTypeName = vm.eip712HashType("Transaction");
assertEq(fromTypeName, canonicalHash);
}
function testReverts_Eip712HashType_invalidName() public {
// Reverts if the input type is not found in the bindings
vm._expectCheatcodeRevert();
bytes32 fromTypeName = vm.eip712HashType("InvalidTypeName");
}
function testEip712HashType_byCustomPathAndTypeName() public {
bytes32 canonicalHash = keccak256(bytes(CANONICAL));
// Can figure out the canonical type from the previously generated bindings
bytes32 fromTypeName = vm.eip712HashType("utils/CustomJsonBindings.sol", "Transaction");
assertEq(fromTypeName, canonicalHash);
}
}
"#,
);
// cheatcode by type definition can run without bindings
cmd.forge_fuse()
.args(["test", "--mc", "Eip712Test", "--match-test", "testEip712HashType_byDefinition"])
.assert_success();
let bindings = prj.root().join("utils").join("JsonBindings.sol");
prj.update_config(|config| config.fs_permissions.add(PathPermission::read(&bindings)));
// cheatcode by type name fails if bindings haven't been generated
cmd.forge_fuse()
.args(["test", "--mc", "Eip712Test", "--match-test", "testEip712HashType_byTypeName"])
.assert_failure()
.stdout_eq(str![[r#"
...
Ran 1 test for src/Eip712Cheat.sol:Eip712Test
[FAIL: vm.eip712HashType: failed to read from [..] testEip712HashType_byTypeName() ([GAS])
Suite result: FAILED. 0 passed; 1 failed; 0 skipped; [ELAPSED]
Ran 1 test suite [ELAPSED]: 0 tests passed, 1 failed, 0 skipped (1 total tests)
Failing tests:
Encountered 1 failing test in src/Eip712Cheat.sol:Eip712Test
[FAIL: vm.eip712HashType: failed to read from [..] testEip712HashType_byTypeName() ([GAS])
Encountered a total of 1 failing tests, 0 tests succeeded
Tip: Run `forge test --rerun` to retry only the 1 failed test
"#]]);
cmd.forge_fuse().args(["bind-json"]).assert_success();
assert!(bindings.exists(), "'JsonBindings.sol' was not generated at {bindings:?}");
// with generated bindings, cheatcode by type name works
cmd.forge_fuse()
.args(["test", "--mc", "Eip712Test", "--match-test", "testEip712HashType_byTypeName"])
.assert_success();
// even with generated bindings, cheatcode by type name fails if name is not present
cmd.forge_fuse()
.args([
"test",
"--mc",
"Eip712Test",
"--match-test",
"testReverts_Eip712HashType_invalidName",
])
.assert_success();
let bindings_2 = prj.root().join("utils").join("CustomJsonBindings.sol");
prj.update_config(|config| {
config.fs_permissions.add(PathPermission::read(&bindings_2));
});
// cheatcode by custom path and type name fails if bindings haven't been generated for that path
cmd.forge_fuse()
.args(["test", "--mc", "Eip712Test", "--match-test", "testEip712HashType_byCustomPathAndTypeName"])
.assert_failure()
.stdout_eq(str![[r#"
...
Ran 1 test for src/Eip712Cheat.sol:Eip712Test
[FAIL: vm.eip712HashType: failed to read from [..] testEip712HashType_byCustomPathAndTypeName() ([GAS])
Suite result: FAILED. 0 passed; 1 failed; 0 skipped; [ELAPSED]
Ran 1 test suite [ELAPSED]: 0 tests passed, 1 failed, 0 skipped (1 total tests)
Failing tests:
Encountered 1 failing test in src/Eip712Cheat.sol:Eip712Test
[FAIL: vm.eip712HashType: failed to read from [..] testEip712HashType_byCustomPathAndTypeName() ([GAS])
Encountered a total of 1 failing tests, 0 tests succeeded
Tip: Run `forge test --rerun` to retry only the 1 failed test
"#]]);
cmd.forge_fuse().args(["bind-json", "utils/CustomJsonBindings.sol"]).assert_success();
assert!(bindings_2.exists(), "'CustomJsonBindings.sol' was not generated at {bindings_2:?}");
// with generated bindings, cheatcode by custom path and type name works
cmd.forge_fuse()
.args([
"test",
"--mc",
"Eip712Test",
"--match-test",
"testEip712HashType_byCustomPathAndTypeName",
])
.assert_success();
});
forgetest!(test_eip712_hash_struct_simple, |prj, cmd| {
prj.insert_ds_test();
prj.insert_vm();
prj.insert_console();
prj.add_source(
"Eip712HashStructDomainTest.sol",
r#"
import "./Vm.sol";
import "./test.sol";
import "./console.sol";
struct EIP712Domain {
string name;
string version;
uint256 chainId;
address verifyingContract;
}
string constant _EIP712_DOMAIN_TYPE_DEF = "EIP712Domain(string name,string version,uint256 chainId,address verifyingContract)";
bytes32 constant _EIP712_DOMAIN_TYPE_HASH = keccak256(bytes(_EIP712_DOMAIN_TYPE_DEF));
contract Eip712HashStructDomainTest is DSTest {
Vm constant vm = Vm(address(uint160(uint256(keccak256("hevm cheat code")))));
function testHashEIP712Domain() public {
EIP712Domain memory domain = EIP712Domain({
name: "Foo",
version: "Bar",
chainId: 1,
verifyingContract: 0xdEADBEeF00000000000000000000000000000000
});
// simulate user-computed domain hash
bytes memory encodedData = abi.encode(
keccak256(bytes(domain.name)),
keccak256(bytes(domain.version)),
bytes32(domain.chainId),
bytes32(uint256(uint160(domain.verifyingContract)))
);
bytes32 userStructHash = keccak256(abi.encodePacked(_EIP712_DOMAIN_TYPE_HASH, encodedData));
// cheatcode-computed domain hash
bytes32 cheatStructHash = vm.eip712HashStruct(_EIP712_DOMAIN_TYPE_DEF, abi.encode(domain));
console.log("EIP712Domain struct hash from cheatcode:");
console.logBytes32(cheatStructHash);
assertEq(cheatStructHash, userStructHash, "EIP712Domain struct hash mismatch");
}
}
"#,
);
cmd.forge_fuse().args(["test", "--mc", "Eip712HashStructDomainTest", "-vvvv"]).assert_success();
});
forgetest!(test_eip712_hash_struct_complex, |prj, cmd| {
prj.insert_ds_test();
prj.insert_vm();
prj.insert_console();
prj.add_source(
"Eip712Permit.sol",
r#"
struct PermitDetails {
address token;
uint160 amount;
uint48 expiration;
uint48 nonce;
}
bytes32 constant _PERMIT_DETAILS_TYPEHASH = keccak256(
"PermitDetails(address token,uint160 amount,uint48 expiration,uint48 nonce)"
);
struct PermitSingle {
PermitDetails details;
address spender;
uint256 sigDeadline;
}
bytes32 constant _PERMIT_SINGLE_TYPEHASH = keccak256(
"PermitSingle(PermitDetails details,address spender,uint256 sigDeadline)PermitDetails(address token,uint160 amount,uint48 expiration,uint48 nonce)"
);
// borrowed from https://github.com/Uniswap/permit2/blob/main/src/libraries/PermitHash.sol
library PermitHash {
function hash(PermitSingle memory permitSingle) internal pure returns (bytes32) {
bytes32 permitHash = _hashDetails(permitSingle.details);
return
keccak256(abi.encode(_PERMIT_SINGLE_TYPEHASH, permitHash, permitSingle.spender, permitSingle.sigDeadline));
}
function _hashDetails(PermitDetails memory details) internal pure returns (bytes32) {
return keccak256(abi.encode(_PERMIT_DETAILS_TYPEHASH, details));
}
}
"#,
);
prj.add_source(
"Eip712Transaction.sol",
r#"
struct Asset {
address token;
uint256 amount;
}
bytes32 constant _ASSET_TYPEHASH = keccak256(
"Asset(address token,uint256 amount)"
);
struct Person {
address wallet;
string name;
}
bytes32 constant _PERSON_TYPEHASH = keccak256(
"Person(address wallet,string name)"
);
struct Transaction {
Person from;
Person to;
Asset tx;
}
bytes32 constant _TRANSACTION_TYPEHASH = keccak256(
"Transaction(Person from,Person to,Asset tx)Asset(address token,uint256 amount)Person(address wallet,string name)"
);
library TransactionHash {
function hash(Transaction memory t) internal pure returns (bytes32) {
bytes32 fromHash = _hashPerson(t.from);
bytes32 toHash = _hashPerson(t.to);
bytes32 assetHash = _hashAsset(t.tx);
return
keccak256(abi.encode(_TRANSACTION_TYPEHASH, fromHash, toHash, assetHash));
}
function _hashPerson(Person memory person) internal pure returns (bytes32) {
return keccak256(
abi.encode(_PERSON_TYPEHASH, person.wallet, keccak256(bytes(person.name)))
);
}
function _hashAsset(Asset memory asset) internal pure returns (bytes32) {
return keccak256(abi.encode(_ASSET_TYPEHASH, asset));
}
}
"#,
);
let bindings = prj.root().join("utils").join("JsonBindings.sol");
prj.update_config(|config| config.fs_permissions.add(PathPermission::read(&bindings)));
cmd.forge_fuse().args(["bind-json"]).assert_success();
prj.add_source(
"Eip712HashStructTest.sol",
r#"
import "./Vm.sol";
import "./test.sol";
import "./console.sol";
import "./Eip712Permit.sol";
import "./Eip712Transaction.sol";
contract Eip712HashStructTest is DSTest {
Vm constant vm = Vm(HEVM_ADDRESS);
function testHashPermitSingle_withTypeName() public {
PermitDetails memory details = PermitDetails({
token: 0x1111111111111111111111111111111111111111,
amount: 1000 ether,
expiration: 12345,
nonce: 1
});
// user-computed permit (using uniswap hash library)
bytes32 userStructHash = PermitHash._hashDetails(details);
// cheatcode-computed permit
bytes32 cheatStructHash = vm.eip712HashStruct("PermitDetails", abi.encode(details));
assertEq(cheatStructHash, userStructHash, "details struct hash mismatch");
PermitSingle memory permit = PermitSingle({
details: details,
spender: 0x2222222222222222222222222222222222222222,
sigDeadline: 12345
});
// user-computed permit (using uniswap hash library)
userStructHash = PermitHash.hash(permit);
// cheatcode-computed permit
cheatStructHash = vm.eip712HashStruct("PermitSingle", abi.encode(permit));
console.log("PermitSingle struct hash from cheatcode:");
console.logBytes32(cheatStructHash);
assertEq(cheatStructHash, userStructHash, "permit struct hash mismatch");
}
function testHashPermitSingle_withTypeDefinition() public {
PermitDetails memory details = PermitDetails({
token: 0x1111111111111111111111111111111111111111,
amount: 1000 ether,
expiration: 12345,
nonce: 1
});
// user-computed permit (using uniswap hash library)
bytes32 userStructHash = PermitHash._hashDetails(details);
// cheatcode-computed permit
bytes32 cheatStructHash = vm.eip712HashStruct("PermitDetails(address token, uint160 amount, uint48 expiration, uint48 nonce)", abi.encode(details));
assertEq(cheatStructHash, userStructHash, "details struct hash mismatch");
PermitSingle memory permit = PermitSingle({
details: details,
spender: 0x2222222222222222222222222222222222222222,
sigDeadline: 12345
});
// user-computed permit (using uniswap hash library)
userStructHash = PermitHash.hash(permit);
// cheatcode-computed permit (previously encoding)
cheatStructHash = vm.eip712HashStruct("PermitDetails(address token, uint160 amount, uint48 expiration, uint48 nonce) PermitSingle(PermitDetails details,address spender,uint256 sigDeadline)", abi.encode(permit));
console.log("PermitSingle struct hash from cheatcode:");
console.logBytes32(cheatStructHash);
assertEq(cheatStructHash, userStructHash, "permit struct hash mismatch");
}
function testHashTransaction_withTypeName() public {
Asset memory asset = Asset ({ token: 0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2, amount: 100 ether });
bytes32 user = TransactionHash._hashAsset(asset);
bytes32 cheat = vm.eip712HashStruct("Asset", abi.encode(asset));
assertEq(user, cheat, "asset struct hash mismatch");
Person memory from = Person ({ wallet: 0x0000000000000000000000000000000000000001, name: "alice" });
Person memory to = Person ({ wallet: 0x0000000000000000000000000000000000000002, name: "bob" });
user = TransactionHash._hashPerson(from);
cheat = vm.eip712HashStruct("Person", abi.encode(from));
assertEq(user, cheat, "person struct hash mismatch");
Transaction memory t = Transaction ({ from: from, to: to, tx: asset });
user = TransactionHash.hash(t);
cheat = vm.eip712HashStruct("Transaction", abi.encode(t));
assertEq(user, cheat, "transaction struct hash mismatch");
}
function testHashTransaction_withTypeDefinition() public {
Asset memory asset = Asset ({ token: 0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2, amount: 100 ether });
bytes32 user = TransactionHash._hashAsset(asset);
bytes32 cheat = vm.eip712HashStruct("Asset(address token, uint256 amount)", abi.encode(asset));
assertEq(user, cheat, "asset struct hash mismatch");
Person memory from = Person ({ wallet: 0x0000000000000000000000000000000000000001, name: "alice" });
Person memory to = Person ({ wallet: 0x0000000000000000000000000000000000000002, name: "bob" });
user = TransactionHash._hashPerson(from);
cheat = vm.eip712HashStruct("Person(address wallet, string name)", abi.encode(from));
assertEq(user, cheat, "person struct hash mismatch");
Transaction memory t = Transaction ({ from: from, to: to, tx: asset });
user = TransactionHash.hash(t);
cheat = vm.eip712HashStruct("Person(address wallet, string name) Asset(address token, uint256 amount) Transaction(Person from, Person to, Asset tx)", abi.encode(t));
assertEq(user, cheat, "transaction struct hash mismatch");
}
}
"#,
);
cmd.forge_fuse()
.args(["test", "--mc", "Eip712HashStructTest", "-vv"])
.assert_success()
.stdout_eq(str![[r#"
...
[PASS] testHashPermitSingle_withTypeDefinition() ([GAS])
Logs:
PermitSingle struct hash from cheatcode:
0x3ed744fdcea02b6b9ad45a9db6e648bf6f18c221909f9ee425191f2a02f9e4a8
[PASS] testHashPermitSingle_withTypeName() ([GAS])
Logs:
PermitSingle struct hash from cheatcode:
0x3ed744fdcea02b6b9ad45a9db6e648bf6f18c221909f9ee425191f2a02f9e4a8
...
"#]]);
});
forgetest!(test_eip712_hash_typed_data, |prj, cmd| {
prj.insert_ds_test();
prj.insert_vm();
prj.insert_console();
prj.add_source(
"Eip712HashTypedData.sol",
r#"
import "./Vm.sol";
import "./test.sol";
import "./console.sol";
contract Eip712HashTypedDataTest is DSTest {
Vm constant vm = Vm(address(uint160(uint256(keccak256("hevm cheat code")))));
function testHashEIP712Message() public {
string memory jsonData =
'{"types":{"EIP712Domain":[{"name":"name","type":"string"},{"name":"version","type":"string"},{"name":"chainId","type":"uint256"},{"name":"verifyingContract","type":"address"},{"name":"salt","type":"bytes32"}]},"primaryType":"EIP712Domain","domain":{"name":"example.metamask.io","version":"1","chainId":1,"verifyingContract":"0x0000000000000000000000000000000000000000"},"message":{}}';
// since this cheatcode simply exposes an alloy fn, the test has been borrowed from:
// <https://github.com/alloy-rs/core/blob/e0727c2224a5a83664d4ca1fb2275090d29def8b/crates/dyn-abi/src/eip712/typed_data.rs#L256>
bytes32 expectedHash = hex"122d1c8ef94b76dad44dcb03fa772361e20855c63311a15d5afe02d1b38f6077";
assertEq(vm.eip712HashTypedData(jsonData), expectedHash, "EIP712Domain struct hash mismatch");
}
}
"#,
);
cmd.forge_fuse().args(["test", "--mc", "Eip712HashTypedDataTest"]).assert_success();
});
// repro: <https://github.com/foundry-rs/foundry/issues/11366>
forgetest!(test_eip712_hash_typed_data_repro, |prj, cmd| {
prj.insert_ds_test();
prj.insert_vm();
prj.insert_console();
prj.add_source(
"Eip712HashTypedData.sol",
r#"
import "./Vm.sol";
import "./test.sol";
import "./console.sol";
contract CounterStrike {
bytes32 public constant ATTACK_TYPEHASH = keccak256("Attack(address player,uint128 x,uint128 y,uint40 shootTime)");
bytes32 public constant DOMAIN_TYPEHASH =
keccak256("EIP712Domain(string name,uint256 chainId,address verifyingContract)");
string public constant PROTOCOL_NAME = "CounterStrike";
}
contract CounterStrike_Test is DSTest {
Vm constant vm = Vm(address(uint160(uint256(keccak256("hevm cheat code")))));
struct EIP712Domain {
string name;
uint256 chainId;
address verifyingContract;
}
struct Attack {
address player;
uint128 x;
uint128 y;
uint40 shootTime;
}
string constant SCHEMA_EIP712_DOMAIN = "EIP712Domain(string name,uint256 chainId,address verifyingContract)";
string constant SCHEMA_ATTACK = "Attack(address player,uint128 x,uint128 y,uint40 shootTime)";
CounterStrike public counterStrike;
address public player;
uint256 public playerPrivateKey;
uint128 public x = 10_000e18;
uint128 public y = 20_000e18;
uint40 public shootTime = 12_345_678;
function setUp() public {
counterStrike = new CounterStrike();
}
function test_Attack() public view {
string memory domainJson = vm.serializeJsonType(
SCHEMA_EIP712_DOMAIN,
abi.encode(
EIP712Domain({
name: counterStrike.PROTOCOL_NAME(),
chainId: block.chainid,
verifyingContract: address(counterStrike)
})
)
);
string memory messageJson = vm.serializeJsonType(
SCHEMA_ATTACK, abi.encode(Attack({ player: player, x: x, y: y, shootTime: shootTime }))
);
string memory typesJson = string.concat(
'{"EIP712Domain":[{"name":"name","type":"string"},{"name":"chainId","type":"uint256"},{"name":"verifyingContract","type":"address"}],"Attack":[{"name":"player","type":"address"},{"name":"x","type":"uint128"},{"name":"y","type":"uint128"},{"name":"shootTime","type":"uint40"}]}'
);
string memory primaryType = '"Attack"';
string memory typedDataJson = string.concat(
'{"types":',
typesJson,
',"primaryType":',
primaryType,
',"domain":',
domainJson,
',"message":',
messageJson,
"}"
);
bytes32 digest = vm.eip712HashTypedData(typedDataJson);
console.logBytes32(digest);
}
}
"#,
);
cmd.forge_fuse().args(["test", "-vvv"]).assert_success();
});
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/forge/tests/cli/cmd.rs | crates/forge/tests/cli/cmd.rs | //! Contains various tests for checking forge's commands
use crate::constants::*;
use foundry_compilers::artifacts::{ConfigurableContractArtifact, Metadata, remappings::Remapping};
use foundry_config::{
BasicConfig, Chain, Config, DenyLevel, FuzzConfig, InvariantConfig, SolidityErrorCode,
parse_with_profile,
};
use foundry_test_utils::{
foundry_compilers::PathStyle,
rpc::next_etherscan_api_key,
snapbox::IntoData,
util::{OutputExt, read_string},
};
use std::{
fs,
path::Path,
process::{Command, Stdio},
str::FromStr,
};
// tests `--help` is printed to std out
forgetest!(print_help, |_prj, cmd| {
cmd.arg("--help").assert_success().stdout_eq(str![[r#"
Build, test, fuzz, debug and deploy Solidity contracts
Usage: forge[..] <COMMAND>
Commands:
...
Options:
-h, --help
Print help (see a summary with '-h')
-j, --threads <THREADS>
Number of threads to use. Specifying 0 defaults to the number of logical cores
[aliases: --jobs]
-V, --version
Print version
Display options:
--color <COLOR>
The color of the log messages
Possible values:
- auto: Intelligently guess whether to use color output (default)
- always: Force color output
- never: Force disable color output
--json
Format log messages as JSON
--md
Format log messages as Markdown
-q, --quiet
Do not print log messages
-v, --verbosity...
Verbosity level of the log messages.
Pass multiple times to increase the verbosity (e.g. -v, -vv, -vvv).
Depending on the context the verbosity levels have different meanings.
For example, the verbosity levels of the EVM are:
- 2 (-vv): Print logs for all tests.
- 3 (-vvv): Print execution traces for failing tests.
- 4 (-vvvv): Print execution traces for all tests, and setup traces for failing tests.
- 5 (-vvvvv): Print execution and setup traces for all tests, including storage changes
and
backtraces with line numbers.
Find more information in the book: https://getfoundry.sh/forge/overview
"#]]);
});
// checks that `clean` can be invoked even if out and cache don't exist
forgetest!(can_clean_non_existing, |prj, cmd| {
cmd.arg("clean");
cmd.assert_empty_stdout();
prj.assert_cleaned();
});
// checks that `clean` doesn't output warnings
forgetest_init!(can_clean_without_warnings, |prj, cmd| {
prj.add_source(
"Simple.sol",
r#"
pragma solidity ^0.8.5;
contract Simple {
uint public value = 42;
}
"#,
);
prj.create_file(
"foundry.toml",
r#"
[default]
evm_version = "cancun"
solc = "0.8.5"
"#,
);
// `forge build` warns
cmd.forge_fuse().arg("build").assert_success().stderr_eq(str![[r#"
Warning: Found unknown config section in foundry.toml: [default]
This notation for profiles has been deprecated and may result in the profile not being registered in future versions.
Please use [profile.default] instead or run `forge config --fix`.
"#]]);
// `forge clear` should not warn
cmd.forge_fuse().arg("clean").assert_success().stderr_eq(str![[r#"
"#]]);
});
// checks that `cache ls` can be invoked and displays the foundry cache
forgetest!(
#[ignore]
can_cache_ls,
|_prj, cmd| {
let chain = Chain::mainnet();
let block1 = 100;
let block2 = 101;
let block1_cache_dir = Config::foundry_block_cache_dir(chain, block1).unwrap();
let block1_file = Config::foundry_block_cache_file(chain, block1).unwrap();
let block2_cache_dir = Config::foundry_block_cache_dir(chain, block2).unwrap();
let block2_file = Config::foundry_block_cache_file(chain, block2).unwrap();
let etherscan_cache_dir = Config::foundry_etherscan_chain_cache_dir(chain).unwrap();
fs::create_dir_all(block1_cache_dir).unwrap();
fs::write(block1_file, "{}").unwrap();
fs::create_dir_all(block2_cache_dir).unwrap();
fs::write(block2_file, "{}").unwrap();
fs::create_dir_all(etherscan_cache_dir).unwrap();
let output = cmd.args(["cache", "ls"]).assert_success().get_output().stdout_lossy();
let output_lines = output.split('\n').collect::<Vec<_>>();
println!("{output}");
assert_eq!(output_lines.len(), 6);
assert!(output_lines[0].starts_with("-️ mainnet ("));
assert!(output_lines[1].starts_with("\t-️ Block Explorer ("));
assert_eq!(output_lines[2], "");
assert!(output_lines[3].starts_with("\t-️ Block 100 ("));
assert!(output_lines[4].starts_with("\t-️ Block 101 ("));
assert_eq!(output_lines[5], "");
Config::clean_foundry_cache().unwrap();
}
);
// checks that `cache clean` can be invoked and cleans the foundry cache
// this test is not isolated and modifies ~ so it is ignored
forgetest!(
#[ignore]
can_cache_clean,
|_prj, cmd| {
let cache_dir = Config::foundry_cache_dir().unwrap();
let path = cache_dir.as_path();
fs::create_dir_all(path).unwrap();
cmd.args(["cache", "clean"]);
cmd.assert_empty_stdout();
assert!(!path.exists());
}
);
// checks that `cache clean --etherscan` can be invoked and only cleans the foundry etherscan cache
// this test is not isolated and modifies ~ so it is ignored
forgetest!(
#[ignore]
can_cache_clean_etherscan,
|_prj, cmd| {
let cache_dir = Config::foundry_cache_dir().unwrap();
let etherscan_cache_dir = Config::foundry_etherscan_cache_dir().unwrap();
let path = cache_dir.as_path();
let etherscan_path = etherscan_cache_dir.as_path();
fs::create_dir_all(etherscan_path).unwrap();
cmd.args(["cache", "clean", "--etherscan"]);
cmd.assert_empty_stdout();
assert!(path.exists());
assert!(!etherscan_path.exists());
Config::clean_foundry_cache().unwrap();
}
);
// checks that `cache clean all --etherscan` can be invoked and only cleans the foundry etherscan
// cache. This test is not isolated and modifies ~ so it is ignored
forgetest!(
#[ignore]
can_cache_clean_all_etherscan,
|_prj, cmd| {
let rpc_cache_dir = Config::foundry_rpc_cache_dir().unwrap();
let etherscan_cache_dir = Config::foundry_etherscan_cache_dir().unwrap();
let rpc_path = rpc_cache_dir.as_path();
let etherscan_path = etherscan_cache_dir.as_path();
fs::create_dir_all(rpc_path).unwrap();
fs::create_dir_all(etherscan_path).unwrap();
cmd.args(["cache", "clean", "all", "--etherscan"]);
cmd.assert_empty_stdout();
assert!(rpc_path.exists());
assert!(!etherscan_path.exists());
Config::clean_foundry_cache().unwrap();
}
);
// checks that `cache clean <chain>` can be invoked and cleans the chain cache
// this test is not isolated and modifies ~ so it is ignored
forgetest!(
#[ignore]
can_cache_clean_chain,
|_prj, cmd| {
let chain = Chain::mainnet();
let cache_dir = Config::foundry_chain_cache_dir(chain).unwrap();
let etherscan_cache_dir = Config::foundry_etherscan_chain_cache_dir(chain).unwrap();
let path = cache_dir.as_path();
let etherscan_path = etherscan_cache_dir.as_path();
fs::create_dir_all(path).unwrap();
fs::create_dir_all(etherscan_path).unwrap();
cmd.args(["cache", "clean", "mainnet"]);
cmd.assert_empty_stdout();
assert!(!path.exists());
assert!(!etherscan_path.exists());
Config::clean_foundry_cache().unwrap();
}
);
// checks that `cache clean <chain> --blocks 100,101` can be invoked and cleans the chain block
// caches this test is not isolated and modifies ~ so it is ignored
forgetest!(
#[ignore]
can_cache_clean_blocks,
|_prj, cmd| {
let chain = Chain::mainnet();
let block1 = 100;
let block2 = 101;
let block3 = 102;
let block1_cache_dir = Config::foundry_block_cache_dir(chain, block1).unwrap();
let block2_cache_dir = Config::foundry_block_cache_dir(chain, block2).unwrap();
let block3_cache_dir = Config::foundry_block_cache_dir(chain, block3).unwrap();
let etherscan_cache_dir = Config::foundry_etherscan_chain_cache_dir(chain).unwrap();
let block1_path = block1_cache_dir.as_path();
let block2_path = block2_cache_dir.as_path();
let block3_path = block3_cache_dir.as_path();
let etherscan_path = etherscan_cache_dir.as_path();
fs::create_dir_all(block1_path).unwrap();
fs::create_dir_all(block2_path).unwrap();
fs::create_dir_all(block3_path).unwrap();
fs::create_dir_all(etherscan_path).unwrap();
cmd.args(["cache", "clean", "mainnet", "--blocks", "100,101"]);
cmd.assert_empty_stdout();
assert!(!block1_path.exists());
assert!(!block2_path.exists());
assert!(block3_path.exists());
assert!(etherscan_path.exists());
Config::clean_foundry_cache().unwrap();
}
);
// checks that `cache clean <chain> --etherscan` can be invoked and cleans the etherscan chain cache
// this test is not isolated and modifies ~ so it is ignored
forgetest!(
#[ignore]
can_cache_clean_chain_etherscan,
|_prj, cmd| {
let cache_dir = Config::foundry_chain_cache_dir(Chain::mainnet()).unwrap();
let etherscan_cache_dir =
Config::foundry_etherscan_chain_cache_dir(Chain::mainnet()).unwrap();
let path = cache_dir.as_path();
let etherscan_path = etherscan_cache_dir.as_path();
fs::create_dir_all(path).unwrap();
fs::create_dir_all(etherscan_path).unwrap();
cmd.args(["cache", "clean", "mainnet", "--etherscan"]);
cmd.assert_empty_stdout();
assert!(path.exists());
assert!(!etherscan_path.exists());
Config::clean_foundry_cache().unwrap();
}
);
// checks that init works
forgetest!(can_init_repo_with_config, |prj, cmd| {
let foundry_toml = prj.root().join(Config::FILE_NAME);
assert!(!foundry_toml.exists());
cmd.args(["init", "--force"])
.arg(prj.root())
.assert_success()
.stdout_eq(str![[r#"
Initializing [..]...
Installing forge-std in [..] (url: https://github.com/foundry-rs/forge-std, tag: None)
Installed forge-std[..]
Initialized forge project
"#]])
.stderr_eq(str![[r#"
Warning: Target directory is not empty, but `--force` was specified
...
"#]]);
let s = read_string(&foundry_toml);
let _config: BasicConfig = parse_with_profile(&s).unwrap().unwrap().1;
});
// Checks that a forge project fails to initialise if dir is already git repo and dirty
forgetest!(can_detect_dirty_git_status_on_init, |prj, cmd| {
prj.wipe();
// initialize new git repo
cmd.git_init();
std::fs::write(prj.root().join("untracked.text"), "untracked").unwrap();
// create nested dir and execute init in nested dir
let nested = prj.root().join("nested");
fs::create_dir_all(&nested).unwrap();
cmd.current_dir(&nested);
cmd.args(["init", "--commit"]).assert_failure().stderr_eq(str![[r#"
Error: The target directory is a part of or on its own an already initialized git repository,
and it requires clean working and staging areas, including no untracked files.
Check the current git repository's status with `git status`.
Then, you can track files with `git add ...` and then commit them with `git commit`,
ignore them in the `.gitignore` file.
"#]]);
// ensure nothing was emitted, dir is empty
assert!(!nested.read_dir().map(|mut i| i.next().is_some()).unwrap_or_default());
});
// Checks that a forge project can be initialized without creating a git repository
forgetest!(can_init_no_git, |prj, cmd| {
prj.wipe();
cmd.arg("init").arg(prj.root()).arg("--no-git").assert_success().stdout_eq(str![[r#"
Initializing [..]...
Installing forge-std in [..] (url: https://github.com/foundry-rs/forge-std, tag: None)
Installed forge-std[..]
Initialized forge project
"#]]);
prj.assert_config_exists();
assert!(!prj.root().join(".git").exists());
assert!(prj.root().join("lib/forge-std").exists());
assert!(!prj.root().join("lib/forge-std/.git").exists());
});
// Checks that quiet mode does not print anything
forgetest!(can_init_quiet, |prj, cmd| {
prj.wipe();
cmd.arg("init").arg(prj.root()).arg("-q").assert_empty_stdout();
});
// `forge init foobar` works with dir argument
forgetest!(can_init_with_dir, |prj, cmd| {
prj.create_file("README.md", "non-empty dir");
cmd.args(["init", "foobar"]);
cmd.assert_success();
assert!(prj.root().join("foobar").exists());
});
// `forge init foobar --template [template]` works with dir argument
forgetest!(can_init_with_dir_and_template, |prj, cmd| {
cmd.args(["init", "foobar", "--template", "foundry-rs/forge-template"])
.assert_success()
.stdout_eq(str![[r#"
Initializing [..] from https://github.com/foundry-rs/forge-template...
Initialized forge project
"#]]);
assert!(prj.root().join("foobar/.git").exists());
assert!(prj.root().join("foobar/foundry.toml").exists());
assert!(prj.root().join("foobar/lib/forge-std").exists());
// assert that gitmodules were correctly initialized
assert!(prj.root().join("foobar/.git/modules").exists());
assert!(prj.root().join("foobar/src").exists());
assert!(prj.root().join("foobar/test").exists());
});
// `forge init foobar --template [template] --branch [branch]` works with dir argument
forgetest!(can_init_with_dir_and_template_and_branch, |prj, cmd| {
cmd.args([
"init",
"foobar",
"--template",
"foundry-rs/forge-template",
"--branch",
"test/deployments",
])
.assert_success()
.stdout_eq(str![[r#"
Initializing [..] from https://github.com/foundry-rs/forge-template...
Initialized forge project
"#]]);
assert!(prj.root().join("foobar/.dapprc").exists());
assert!(prj.root().join("foobar/lib/ds-test").exists());
// assert that gitmodules were correctly initialized
assert!(prj.root().join("foobar/.git/modules").exists());
assert!(prj.root().join("foobar/src").exists());
assert!(prj.root().join("foobar/scripts").exists());
});
// `forge init --force` works on non-empty dirs
forgetest!(can_init_non_empty, |prj, cmd| {
prj.create_file("README.md", "non-empty dir");
cmd.arg("init").arg(prj.root()).assert_failure().stderr_eq(str![[r#"
Error: Cannot run `init` on a non-empty directory.
Run with the `--force` flag to initialize regardless.
"#]]);
cmd.arg("--force")
.assert_success()
.stdout_eq(str![[r#"
Initializing [..]...
Installing forge-std in [..] (url: https://github.com/foundry-rs/forge-std, tag: None)
Installed forge-std[..]
Initialized forge project
"#]])
.stderr_eq(str![[r#"
Warning: Target directory is not empty, but `--force` was specified
...
"#]]);
assert!(prj.root().join(".git").exists());
assert!(prj.root().join("lib/forge-std").exists());
});
// `forge init --force` works on already initialized git repository
forgetest!(can_init_in_empty_repo, |prj, cmd| {
let root = prj.root();
// initialize new git repo
let status = Command::new("git")
.arg("init")
.current_dir(root)
.stdout(Stdio::null())
.stderr(Stdio::null())
.status()
.expect("could not run git init");
assert!(status.success());
assert!(root.join(".git").exists());
cmd.arg("init").arg(root).assert_failure().stderr_eq(str![[r#"
Error: Cannot run `init` on a non-empty directory.
Run with the `--force` flag to initialize regardless.
"#]]);
cmd.arg("--force")
.assert_success()
.stdout_eq(str![[r#"
Initializing [..]...
Installing forge-std in [..] (url: https://github.com/foundry-rs/forge-std, tag: None)
Installed forge-std[..]
Initialized forge project
"#]])
.stderr_eq(str![[r#"
Warning: Target directory is not empty, but `--force` was specified
...
"#]]);
assert!(root.join("lib/forge-std").exists());
});
// `forge init --force` works on already initialized git repository
forgetest!(can_init_in_non_empty_repo, |prj, cmd| {
let root = prj.root();
// initialize new git repo
let status = Command::new("git")
.arg("init")
.current_dir(root)
.stdout(Stdio::null())
.stderr(Stdio::null())
.status()
.expect("could not run git init");
assert!(status.success());
assert!(root.join(".git").exists());
prj.create_file("README.md", "non-empty dir");
prj.create_file(".gitignore", "not foundry .gitignore");
cmd.arg("init").arg(root).assert_failure().stderr_eq(str![[r#"
Error: Cannot run `init` on a non-empty directory.
Run with the `--force` flag to initialize regardless.
"#]]);
cmd.arg("--force")
.assert_success()
.stdout_eq(str![[r#"
Initializing [..]...
Installing forge-std in [..] (url: https://github.com/foundry-rs/forge-std, tag: None)
Installed forge-std[..]
Initialized forge project
"#]])
.stderr_eq(str![[r#"
Warning: Target directory is not empty, but `--force` was specified
...
"#]]);
assert!(root.join("lib/forge-std").exists());
// not overwritten
let gitignore = root.join(".gitignore");
let gitignore = fs::read_to_string(gitignore).unwrap();
assert_eq!(gitignore, "not foundry .gitignore");
});
// `forge init --use-parent-git` works on already initialized git repository
forgetest!(can_init_using_parent_repo, |prj, cmd| {
let root = prj.root();
// initialize new git repo
let status = Command::new("git")
.arg("init")
.current_dir(root)
.stdout(Stdio::null())
.stderr(Stdio::null())
.status()
.expect("could not run git init");
assert!(status.success());
assert!(root.join(".git").exists());
prj.create_file("README.md", "non-empty dir");
prj.create_file(".gitignore", "not foundry .gitignore");
let folder = "foundry-folder";
cmd.arg("init").arg(folder).arg("--force").arg("--use-parent-git").assert_success().stdout_eq(
str![[r#"
Initializing [..]...
Installing forge-std in [..] (url: https://github.com/foundry-rs/forge-std, tag: None)
Installed forge-std[..]
Initialized forge project
"#]],
);
assert!(root.join(folder).join("lib/forge-std").exists());
// not overwritten
let gitignore = root.join(".gitignore");
let gitignore = fs::read_to_string(gitignore).unwrap();
assert_eq!(gitignore, "not foundry .gitignore");
// submodules are registered at root
let gitmodules = root.join(".gitmodules");
let gitmodules = fs::read_to_string(gitmodules).unwrap();
assert!(gitmodules.contains(
"
path = foundry-folder/lib/forge-std
url = https://github.com/foundry-rs/forge-std
"
));
});
// Checks that remappings.txt and .vscode/settings.json is generated
forgetest!(can_init_vscode, |prj, cmd| {
prj.wipe();
cmd.arg("init").arg(prj.root()).arg("--vscode").assert_success().stdout_eq(str![[r#"
Initializing [..]...
Installing forge-std in [..] (url: https://github.com/foundry-rs/forge-std, tag: None)
Installed forge-std[..]
Initialized forge project
"#]]);
let settings = prj.root().join(".vscode/settings.json");
assert!(settings.is_file());
let settings: serde_json::Value = foundry_compilers::utils::read_json_file(&settings).unwrap();
assert_eq!(
settings,
serde_json::json!({
"solidity.packageDefaultDependenciesContractsDirectory": "src",
"solidity.packageDefaultDependenciesDirectory": "lib"
})
);
let remappings = prj.root().join("remappings.txt");
assert!(remappings.is_file());
let content = std::fs::read_to_string(remappings).unwrap();
assert_eq!(content, "forge-std/=lib/forge-std/src/",);
});
// checks that forge can init with template
forgetest!(can_init_template, |prj, cmd| {
prj.wipe();
cmd.args(["init", "--template", "foundry-rs/forge-template"])
.arg(prj.root())
.assert_success()
.stdout_eq(str![[r#"
Initializing [..] from https://github.com/foundry-rs/forge-template...
Initialized forge project
"#]]);
assert!(prj.root().join(".git").exists());
assert!(prj.root().join("foundry.toml").exists());
assert!(prj.root().join("lib/forge-std").exists());
// assert that gitmodules were correctly initialized
assert!(prj.root().join(".git/modules").exists());
assert!(prj.root().join("src").exists());
assert!(prj.root().join("test").exists());
});
// checks that forge can init with template and branch
forgetest!(can_init_template_with_branch, |prj, cmd| {
prj.wipe();
cmd.args(["init", "--template", "foundry-rs/forge-template", "--branch", "test/deployments"])
.arg(prj.root())
.assert_success()
.stdout_eq(str![[r#"
Initializing [..] from https://github.com/foundry-rs/forge-template...
Initialized forge project
"#]]);
assert!(prj.root().join(".git").exists());
assert!(prj.root().join(".dapprc").exists());
assert!(prj.root().join("lib/ds-test").exists());
// assert that gitmodules were correctly initialized
assert!(prj.root().join(".git/modules").exists());
assert!(prj.root().join("src").exists());
assert!(prj.root().join("scripts").exists());
});
// checks that init fails when the provided template doesn't exist
forgetest!(fail_init_nonexistent_template, |prj, cmd| {
prj.wipe();
cmd.args(["init", "--template", "a"]).arg(prj.root()).assert_failure().stderr_eq(str![[r#"
remote: Not Found
fatal: repository 'https://github.com/a/' not found
Error: git fetch exited with code 128
"#]]);
});
// checks that `forge init --template [template] works by default i.e without committing
forgetest!(can_init_template_with_no_commit, |prj, cmd| {
prj.wipe();
cmd.args(["init", "--template", "foundry-rs/forge-template"])
.arg(prj.root())
.assert_success()
.stdout_eq(str![[r#"
Initializing [..] from https://github.com/foundry-rs/forge-template...
Initialized forge project
"#]]);
// show the latest commit message was not changed
let output = Command::new("git")
.args(["log", "-1", "--pretty=%s"]) // Get the latest commit message
.output()
.expect("Failed to execute git command");
let commit_message = String::from_utf8_lossy(&output.stdout);
assert!(
!commit_message.starts_with("chore: init from foundry-rs/forge-template"),
"Commit message should not start with 'chore: init from foundry-rs/forge-template'"
);
});
// checks that clone works
forgetest!(can_clone, |prj, cmd| {
prj.wipe();
let foundry_toml = prj.root().join(Config::FILE_NAME);
assert!(!foundry_toml.exists());
cmd.args([
"clone",
"--etherscan-api-key",
next_etherscan_api_key().as_str(),
"0x044b75f554b886A065b9567891e45c79542d7357",
])
.arg(prj.root())
.assert_success()
.stdout_eq(str![[r#"
Downloading the source code of 0x044b75f554b886A065b9567891e45c79542d7357 from Etherscan...
Initializing [..]...
Installing forge-std in [..] (url: https://github.com/foundry-rs/forge-std, tag: None)
Installed forge-std[..]
Initialized forge project
Collecting the creation information of 0x044b75f554b886A065b9567891e45c79542d7357 from Etherscan...
[COMPILING_FILES] with [SOLC_VERSION]
[SOLC_VERSION] [ELAPSED]
Compiler run successful!
"#]]);
let s = read_string(&foundry_toml);
let _config: BasicConfig = parse_with_profile(&s).unwrap().unwrap().1;
});
// Checks that quiet mode does not print anything for clone
forgetest!(can_clone_quiet, |prj, cmd| {
prj.wipe();
cmd.args([
"clone",
"--etherscan-api-key",
next_etherscan_api_key().as_str(),
"--quiet",
"0xDb53f47aC61FE54F456A4eb3E09832D08Dd7BEec",
])
.arg(prj.root())
.assert_empty_stdout();
});
// checks that clone works with --no-remappings-txt
forgetest!(can_clone_no_remappings_txt, |prj, cmd| {
prj.wipe();
let foundry_toml = prj.root().join(Config::FILE_NAME);
assert!(!foundry_toml.exists());
cmd.args([
"clone",
"--etherscan-api-key",
next_etherscan_api_key().as_str(),
"--no-remappings-txt",
"0x33e690aEa97E4Ef25F0d140F1bf044d663091DAf",
])
.arg(prj.root())
.assert_success()
.stdout_eq(str![[r#"
Downloading the source code of 0x33e690aEa97E4Ef25F0d140F1bf044d663091DAf from Etherscan...
Initializing [..]...
Installing forge-std in [..] (url: https://github.com/foundry-rs/forge-std, tag: None)
Installed forge-std[..]
Initialized forge project
Collecting the creation information of 0x33e690aEa97E4Ef25F0d140F1bf044d663091DAf from Etherscan...
[COMPILING_FILES] with [SOLC_VERSION]
[SOLC_VERSION] [ELAPSED]
Compiler run successful!
"#]]);
let s = read_string(&foundry_toml);
let _config: BasicConfig = parse_with_profile(&s).unwrap().unwrap().1;
});
// checks that clone works with --keep-directory-structure
forgetest!(can_clone_keep_directory_structure, |prj, cmd| {
prj.wipe();
let foundry_toml = prj.root().join(Config::FILE_NAME);
assert!(!foundry_toml.exists());
let output = cmd
.forge_fuse()
.args([
"clone",
"--etherscan-api-key",
next_etherscan_api_key().as_str(),
"--keep-directory-structure",
"0x33e690aEa97E4Ef25F0d140F1bf044d663091DAf",
])
.arg(prj.root())
.assert_success()
.get_output()
.stdout_lossy();
if output.contains("502 Bad Gateway") {
// etherscan nginx proxy issue, skip this test:
//
// stdout:
// Downloading the source code of 0x33e690aEa97E4Ef25F0d140F1bf044d663091DAf from
// Etherscan... 2024-07-05T11:40:11.801765Z ERROR etherscan: Failed to deserialize
// response: expected value at line 1 column 1 res="<html>\r\n<head><title>502 Bad
// Gateway</title></head>\r\n<body>\r\n<center><h1>502 Bad
// Gateway</h1></center>\r\n<hr><center>nginx</center>\r\n</body>\r\n</html>\r\n"
eprintln!("Skipping test due to 502 Bad Gateway");
return;
}
let s = read_string(&foundry_toml);
let _config: BasicConfig = parse_with_profile(&s).unwrap().unwrap().1;
});
// checks that `forge init` works.
forgetest!(can_init_project, |prj, cmd| {
prj.wipe();
cmd.args(["init"]).arg(prj.root()).assert_success().stdout_eq(str![[r#"
Initializing [..]...
Installing forge-std in [..] (url: https://github.com/foundry-rs/forge-std, tag: None)
Installed forge-std[..]
Initialized forge project
"#]]);
assert!(prj.root().join("foundry.toml").exists());
assert!(prj.root().join("lib/forge-std").exists());
assert!(prj.root().join("src").exists());
assert!(prj.root().join("src").join("Counter.sol").exists());
assert!(prj.root().join("test").exists());
assert!(prj.root().join("test").join("Counter.t.sol").exists());
assert!(prj.root().join("script").exists());
assert!(prj.root().join("script").join("Counter.s.sol").exists());
assert!(prj.root().join(".github").join("workflows").exists());
assert!(prj.root().join(".github").join("workflows").join("test.yml").exists());
});
// checks that `forge init --vyper` works.
forgetest!(can_init_vyper_project, |prj, cmd| {
prj.wipe();
cmd.args(["init", "--vyper"]).arg(prj.root()).assert_success().stdout_eq(str![[r#"
Initializing [..]...
Installing forge-std in [..] (url: https://github.com/foundry-rs/forge-std, tag: None)
Installed forge-std[..]
Initialized forge project
"#]]);
assert!(prj.root().join("foundry.toml").exists());
assert!(prj.root().join("lib/forge-std").exists());
assert!(prj.root().join("src").exists());
assert!(prj.root().join("src").join("Counter.vy").exists());
assert!(prj.root().join("src").join("ICounter.sol").exists());
assert!(prj.root().join("test").exists());
assert!(prj.root().join("test").join("Counter.t.sol").exists());
assert!(prj.root().join("script").exists());
assert!(prj.root().join("script").join("Counter.s.sol").exists());
assert!(prj.root().join(".github").join("workflows").exists());
assert!(prj.root().join(".github").join("workflows").join("test.yml").exists());
});
// checks that `forge init --network tempo` works.
forgetest!(can_init_tempo_project, |prj, cmd| {
prj.wipe();
cmd.args(["init", "--network", "tempo"]).arg(prj.root()).assert_success().stdout_eq(str![[
r#"
Initializing [..]...
Installing forge-std in [..] (url: https://github.com/foundry-rs/forge-std, tag: None)
Installed forge-std[..]
Installing tempo-std in [..] (url: https://github.com/tempoxyz/tempo-std, tag: None)
Installed tempo-std[..]
Initialized forge project
"#
]]);
assert!(prj.root().join("foundry.toml").exists());
assert!(prj.root().join("lib/forge-std").exists());
assert!(prj.root().join("lib/tempo-std").exists());
assert!(prj.root().join("src").exists());
assert!(prj.root().join("src").join("Mail.sol").exists());
assert!(prj.root().join("test").exists());
assert!(prj.root().join("test").join("Mail.t.sol").exists());
assert!(prj.root().join("script").exists());
assert!(prj.root().join("script").join("Mail.s.sol").exists());
assert!(prj.root().join(".github").join("workflows").exists());
assert!(prj.root().join(".github").join("workflows").join("test.yml").exists());
assert!(prj.root().join("README.md").exists());
});
// checks that clone works with raw src containing `node_modules`
// <https://github.com/foundry-rs/foundry/issues/10115>
forgetest!(can_clone_with_node_modules, |prj, cmd| {
prj.wipe();
let foundry_toml = prj.root().join(Config::FILE_NAME);
assert!(!foundry_toml.exists());
cmd.args([
"clone",
"--etherscan-api-key",
next_etherscan_api_key().as_str(),
"0xA3E217869460bEf59A1CfD0637e2875F9331e823",
])
.arg(prj.root())
.assert_success()
.stdout_eq(str![[r#"
Downloading the source code of 0xA3E217869460bEf59A1CfD0637e2875F9331e823 from Etherscan...
Initializing [..]...
Installing forge-std in [..] (url: https://github.com/foundry-rs/forge-std, tag: None)
Installed forge-std[..]
Initialized forge project
Collecting the creation information of 0xA3E217869460bEf59A1CfD0637e2875F9331e823 from Etherscan...
[COMPILING_FILES] with [SOLC_VERSION]
[SOLC_VERSION] [ELAPSED]
Compiler run successful!
"#]]);
});
// checks that `clean` removes dapptools style paths
forgetest!(can_clean, |prj, cmd| {
prj.assert_create_dirs_exists();
prj.assert_style_paths_exist(PathStyle::Dapptools);
cmd.arg("clean");
cmd.assert_empty_stdout();
prj.assert_cleaned();
});
// checks that `clean` removes hardhat style paths
forgetest!(can_clean_hardhat, PathStyle::HardHat, |prj, cmd| {
prj.assert_create_dirs_exists();
prj.assert_style_paths_exist(PathStyle::HardHat);
cmd.arg("clean");
cmd.assert_empty_stdout();
prj.assert_cleaned();
});
// checks that `clean` also works with the "out" value set in Config
forgetest_init!(can_clean_config, |prj, cmd| {
prj.initialize_default_contracts();
prj.update_config(|config| config.out = "custom-out".into());
cmd.arg("build").assert_success().stdout_eq(str![[r#"
[COMPILING_FILES] with [SOLC_VERSION]
[SOLC_VERSION] [ELAPSED]
Compiler run successful!
"#]]);
// default test contract is written in custom out directory
let artifact = prj.root().join(format!("custom-out/{TEMPLATE_TEST_CONTRACT_ARTIFACT_JSON}"));
assert!(artifact.exists());
cmd.forge_fuse().arg("clean").assert_empty_stdout();
assert!(!artifact.exists());
});
// checks that `clean` removes fuzz and invariant cache dirs
forgetest_init!(can_clean_test_cache, |prj, cmd| {
prj.initialize_default_contracts();
prj.update_config(|config| {
config.fuzz = FuzzConfig::new("cache/fuzz".into());
config.invariant = InvariantConfig::new("cache/invariant".into());
});
// default test contract is written in custom out directory
let fuzz_cache_dir = prj.root().join("cache/fuzz");
let _ = fs::create_dir(fuzz_cache_dir.clone());
let invariant_cache_dir = prj.root().join("cache/invariant");
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | true |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/forge/tests/cli/create.rs | crates/forge/tests/cli/create.rs | //! Contains various tests for checking the `forge create` subcommand
use crate::{
constants::*,
utils::{self, EnvExternalities},
};
use alloy_primitives::{Address, hex};
use anvil::{NodeConfig, spawn};
use foundry_compilers::artifacts::{BytecodeHash, remappings::Remapping};
use foundry_test_utils::{
forgetest, forgetest_async,
snapbox::IntoData,
str,
util::{OutputExt, TestCommand, TestProject},
};
use std::str::FromStr;
/// This will insert _dummy_ contract that uses a library
///
/// **NOTE** This is intended to be linked against a random address and won't actually work. The
/// purpose of this is _only_ to make sure we can deploy contracts linked against addresses.
///
/// This will create a library `remapping/MyLib.sol:MyLib`
///
/// returns the contract argument for the create command
fn setup_with_simple_remapping(prj: &TestProject) -> String {
// explicitly set remapping and libraries
prj.update_config(|config| {
config.remappings = vec![Remapping::from_str("remapping/=lib/remapping/").unwrap().into()];
config.libraries = vec![format!("remapping/MyLib.sol:MyLib:{:?}", Address::random())];
});
prj.add_source(
"LinkTest",
r#"
import "remapping/MyLib.sol";
contract LinkTest {
function foo() public returns (uint256) {
return MyLib.foobar(1);
}
}
"#,
);
prj.add_lib(
"remapping/MyLib",
r"
library MyLib {
function foobar(uint256 a) public view returns (uint256) {
return a * 100;
}
}
",
);
"src/LinkTest.sol:LinkTest".to_string()
}
fn setup_oracle(prj: &TestProject) -> String {
prj.update_config(|c| {
c.libraries = vec![format!(
"./src/libraries/ChainlinkTWAP.sol:ChainlinkTWAP:{:?}",
Address::random()
)];
});
prj.add_source(
"Contract",
r#"
import {ChainlinkTWAP} from "./libraries/ChainlinkTWAP.sol";
contract Contract {
function getPrice() public view returns (int latest) {
latest = ChainlinkTWAP.getLatestPrice(0xEeeeeEeeeEeEeeEeEeEeeEEEeeeeEeeeeeeeEEeE);
}
}
"#,
);
prj.add_source(
"libraries/ChainlinkTWAP",
r"
library ChainlinkTWAP {
function getLatestPrice(address base) public view returns (int256) {
return 0;
}
}
",
);
"src/Contract.sol:Contract".to_string()
}
/// configures the `TestProject` with the given closure and calls the `forge create` command
fn create_on_chain<F>(info: Option<EnvExternalities>, prj: TestProject, mut cmd: TestCommand, f: F)
where
F: FnOnce(&TestProject) -> String,
{
if let Some(info) = info {
let contract_path = f(&prj);
let output = cmd
.arg("create")
.args(info.create_args())
.arg(contract_path)
.assert_success()
.get_output()
.stdout_lossy();
let _address = utils::parse_deployed_address(output.as_str())
.unwrap_or_else(|| panic!("Failed to parse deployer {output}"));
}
}
// tests `forge` create on goerli if correct env vars are set
forgetest!(can_create_simple_on_goerli, |prj, cmd| {
create_on_chain(EnvExternalities::goerli(), prj, cmd, setup_with_simple_remapping);
});
// tests `forge` create on goerli if correct env vars are set
forgetest!(can_create_oracle_on_goerli, |prj, cmd| {
create_on_chain(EnvExternalities::goerli(), prj, cmd, setup_oracle);
});
// tests `forge` create on amoy if correct env vars are set
forgetest!(can_create_oracle_on_amoy, |prj, cmd| {
create_on_chain(EnvExternalities::amoy(), prj, cmd, setup_oracle);
});
// tests that we can deploy the template contract
forgetest_async!(can_create_template_contract, |prj, cmd| {
foundry_test_utils::util::initialize(prj.root());
prj.initialize_default_contracts();
let (_api, handle) = spawn(NodeConfig::test()).await;
let rpc = handle.http_endpoint();
let wallet = handle.dev_wallets().next().unwrap();
let pk = hex::encode(wallet.credential().to_bytes());
// explicitly byte code hash for consistent checks
prj.update_config(|c| c.bytecode_hash = BytecodeHash::None);
// Dry-run without the `--broadcast` flag
cmd.forge_fuse().args([
"create",
format!("./src/{TEMPLATE_CONTRACT}.sol:{TEMPLATE_CONTRACT}").as_str(),
"--rpc-url",
rpc.as_str(),
"--private-key",
pk.as_str(),
]);
// Dry-run
cmd.assert().stdout_eq(str![[r#"
[COMPILING_FILES] with [SOLC_VERSION]
[SOLC_VERSION] [ELAPSED]
Compiler run successful!
Contract: Counter
Transaction: {
"from": "0xf39fd6e51aad88f6f4ce6ab8827279cfffb92266",
"to": null,
"maxFeePerGas": "0x77359401",
"maxPriorityFeePerGas": "0x1",
"gas": "0x241e7",
"input": "[..]",
"nonce": "0x0",
"chainId": "0x7a69"
}
ABI: [
{
"type": "function",
"name": "increment",
"inputs": [],
"outputs": [],
"stateMutability": "nonpayable"
},
{
"type": "function",
"name": "number",
"inputs": [],
"outputs": [
{
"name": "",
"type": "uint256",
"internalType": "uint256"
}
],
"stateMutability": "view"
},
{
"type": "function",
"name": "setNumber",
"inputs": [
{
"name": "newNumber",
"type": "uint256",
"internalType": "uint256"
}
],
"outputs": [],
"stateMutability": "nonpayable"
}
]
"#]]);
// Dry-run with `--json` flag
cmd.arg("--json").assert().stdout_eq(
str![[r#"
{
"contract": "Counter",
"transaction": {
"from": "0xf39fd6e51aad88f6f4ce6ab8827279cfffb92266",
"to": null,
"maxFeePerGas": "0x77359401",
"maxPriorityFeePerGas": "0x1",
"gas": "0x241e7",
"input": "[..]",
"nonce": "0x0",
"chainId": "0x7a69"
},
"abi": [
{
"type": "function",
"name": "increment",
"inputs": [],
"outputs": [],
"stateMutability": "nonpayable"
},
{
"type": "function",
"name": "number",
"inputs": [],
"outputs": [
{
"name": "",
"type": "uint256",
"internalType": "uint256"
}
],
"stateMutability": "view"
},
{
"type": "function",
"name": "setNumber",
"inputs": [
{
"name": "newNumber",
"type": "uint256",
"internalType": "uint256"
}
],
"outputs": [],
"stateMutability": "nonpayable"
}
]
}
"#]]
.is_json(),
);
cmd.forge_fuse().args([
"create",
format!("./src/{TEMPLATE_CONTRACT}.sol:{TEMPLATE_CONTRACT}").as_str(),
"--rpc-url",
rpc.as_str(),
"--private-key",
pk.as_str(),
"--broadcast",
]);
cmd.assert().stdout_eq(str![[r#"
No files changed, compilation skipped
Deployer: 0xf39Fd6e51aad88F6F4ce6aB8827279cffFb92266
Deployed to: 0x5FbDB2315678afecb367f032d93F642f64180aa3
[TX_HASH]
"#]]);
});
// tests that we can deploy the template contract
forgetest_async!(can_create_using_unlocked, |prj, cmd| {
foundry_test_utils::util::initialize(prj.root());
prj.initialize_default_contracts();
let (_api, handle) = spawn(NodeConfig::test()).await;
let rpc = handle.http_endpoint();
let dev = handle.dev_accounts().next().unwrap();
// explicitly byte code hash for consistent checks
prj.update_config(|c| c.bytecode_hash = BytecodeHash::None);
cmd.forge_fuse().args([
"create",
format!("./src/{TEMPLATE_CONTRACT}.sol:{TEMPLATE_CONTRACT}").as_str(),
"--rpc-url",
rpc.as_str(),
"--from",
format!("{dev:?}").as_str(),
"--unlocked",
"--broadcast",
]);
cmd.assert().stdout_eq(str![[r#"
[COMPILING_FILES] with [SOLC_VERSION]
[SOLC_VERSION] [ELAPSED]
Compiler run successful!
Deployer: 0xf39Fd6e51aad88F6F4ce6aB8827279cffFb92266
Deployed to: 0x5FbDB2315678afecb367f032d93F642f64180aa3
[TX_HASH]
"#]]);
cmd.assert().stdout_eq(str![[r#"
No files changed, compilation skipped
Deployer: 0xf39Fd6e51aad88F6F4ce6aB8827279cffFb92266
Deployed to: 0xe7f1725E7734CE288F8367e1Bb143E90bb3F0512
[TX_HASH]
"#]]);
});
// tests that we can deploy with constructor args
forgetest_async!(can_create_with_constructor_args, |prj, cmd| {
foundry_test_utils::util::initialize(prj.root());
let (_api, handle) = spawn(NodeConfig::test()).await;
let rpc = handle.http_endpoint();
let wallet = handle.dev_wallets().next().unwrap();
let pk = hex::encode(wallet.credential().to_bytes());
// explicitly byte code hash for consistent checks
prj.update_config(|c| c.bytecode_hash = BytecodeHash::None);
prj.add_source(
"ConstructorContract",
r#"
contract ConstructorContract {
string public name;
constructor(string memory _name) {
name = _name;
}
}
"#,
);
cmd.forge_fuse()
.args([
"create",
"./src/ConstructorContract.sol:ConstructorContract",
"--rpc-url",
rpc.as_str(),
"--private-key",
pk.as_str(),
"--broadcast",
"--constructor-args",
"My Constructor",
])
.assert_success()
.stdout_eq(str![[r#"
[COMPILING_FILES] with [SOLC_VERSION]
[SOLC_VERSION] [ELAPSED]
Compiler run successful!
Deployer: 0xf39Fd6e51aad88F6F4ce6aB8827279cffFb92266
Deployed to: 0x5FbDB2315678afecb367f032d93F642f64180aa3
[TX_HASH]
"#]]);
prj.add_source(
"TupleArrayConstructorContract",
r#"
struct Point {
uint256 x;
uint256 y;
}
contract TupleArrayConstructorContract {
constructor(Point[] memory _points) {}
}
"#,
);
cmd.forge_fuse()
.args([
"create",
"./src/TupleArrayConstructorContract.sol:TupleArrayConstructorContract",
"--rpc-url",
rpc.as_str(),
"--private-key",
pk.as_str(),
"--broadcast",
"--constructor-args",
"[(1,2), (2,3), (3,4)]",
])
.assert()
.stdout_eq(str![[r#"
[COMPILING_FILES] with [SOLC_VERSION]
[SOLC_VERSION] [ELAPSED]
Compiler run successful!
Deployer: 0xf39Fd6e51aad88F6F4ce6aB8827279cffFb92266
Deployed to: 0xe7f1725E7734CE288F8367e1Bb143E90bb3F0512
[TX_HASH]
"#]]);
});
// <https://github.com/foundry-rs/foundry/issues/6332>
forgetest_async!(can_create_and_call, |prj, cmd| {
foundry_test_utils::util::initialize(prj.root());
let (_api, handle) = spawn(NodeConfig::test()).await;
let rpc = handle.http_endpoint();
let wallet = handle.dev_wallets().next().unwrap();
let pk = hex::encode(wallet.credential().to_bytes());
// explicitly byte code hash for consistent checks
prj.update_config(|c| c.bytecode_hash = BytecodeHash::None);
prj.add_source(
"UniswapV2Swap",
r#"
contract UniswapV2Swap {
function pairInfo() public view returns (uint reserveA, uint reserveB, uint totalSupply) {
(reserveA, reserveB, totalSupply) = (0,0,0);
}
}
"#,
);
cmd.forge_fuse()
.args([
"create",
"./src/UniswapV2Swap.sol:UniswapV2Swap",
"--rpc-url",
rpc.as_str(),
"--private-key",
pk.as_str(),
"--broadcast",
])
.assert_success()
.stdout_eq(str![[r#"
[COMPILING_FILES] with [SOLC_VERSION]
[SOLC_VERSION] [ELAPSED]
Compiler run successful with warnings:
Warning (2018): Function state mutability can be restricted to pure
[FILE]:6:5:
|
6 | function pairInfo() public view returns (uint reserveA, uint reserveB, uint totalSupply) {
| ^ (Relevant source part starts here and spans across multiple lines).
Deployer: 0xf39Fd6e51aad88F6F4ce6aB8827279cffFb92266
Deployed to: 0x5FbDB2315678afecb367f032d93F642f64180aa3
[TX_HASH]
"#]]);
});
// <https://github.com/foundry-rs/foundry/issues/10156>
forgetest_async!(should_err_if_no_bytecode, |prj, cmd| {
let (_api, handle) = spawn(NodeConfig::test()).await;
let rpc = handle.http_endpoint();
prj.add_source(
"AbstractCounter.sol",
r#"
abstract contract AbstractCounter {
uint256 public number;
function setNumberV1(uint256 newNumber) public {
number = newNumber;
}
function incrementV1() public {
number++;
}
}
"#,
);
cmd.args([
"create",
"./src/AbstractCounter.sol:AbstractCounter",
"--rpc-url",
rpc.as_str(),
"--private-key",
"0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80",
"--broadcast",
])
.assert_failure()
.stderr_eq(str![[r#"
Error: no bytecode found in bin object for AbstractCounter
"#]]);
});
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/forge/tests/cli/fmt.rs | crates/forge/tests/cli/fmt.rs | //! Integration tests for `forge fmt` command
use foundry_test_utils::{forgetest, forgetest_init};
const UNFORMATTED: &str = r#"// SPDX-License-Identifier: MIT
pragma solidity =0.8.30 ;
contract Test {
uint256 public value ;
function setValue ( uint256 _value ) public {
value = _value ;
}
}"#;
const FORMATTED: &str = r#"// SPDX-License-Identifier: MIT
pragma solidity =0.8.30;
contract Test {
uint256 public value;
function setValue(uint256 _value) public {
value = _value;
}
}
"#;
forgetest_init!(fmt_exclude_libs_in_recursion, |prj, cmd| {
prj.update_config(|config| config.fmt.ignore = vec!["src/ignore/".to_string()]);
prj.add_lib("SomeLib.sol", UNFORMATTED);
prj.add_raw_source("ignore/IgnoredContract.sol", UNFORMATTED);
cmd.args(["fmt", ".", "--check"]);
cmd.assert_success();
cmd.forge_fuse().args(["fmt", "lib/SomeLib.sol", "--check"]);
cmd.assert_failure();
});
// Test that fmt can format a simple contract file
forgetest_init!(fmt_file, |prj, cmd| {
prj.add_raw_source("FmtTest.sol", UNFORMATTED);
cmd.arg("fmt").arg("src/FmtTest.sol");
cmd.assert_success().stdout_eq(str![[r#"
Formatted [..]/src/FmtTest.sol
"#]]);
assert_data_eq!(
std::fs::read_to_string(prj.root().join("src/FmtTest.sol")).unwrap(),
FORMATTED,
);
});
// Test that fmt can format from stdin
forgetest!(fmt_stdin, |_prj, cmd| {
cmd.args(["fmt", "-", "--raw"]);
cmd.stdin(UNFORMATTED.as_bytes());
cmd.assert_success().stdout_eq(FORMATTED);
// stdin with `--raw` returns formatted code
cmd.stdin(FORMATTED.as_bytes());
cmd.assert_success().stdout_eq(FORMATTED);
// stdin with `--check` and without `--raw`returns diff
cmd.forge_fuse().args(["fmt", "-", "--check"]);
cmd.assert_success().stdout_eq("");
});
forgetest_init!(fmt_check_mode, |prj, cmd| {
// Run fmt --check on a well-formatted file
prj.add_raw_source("Test.sol", FORMATTED);
cmd.arg("fmt").arg("--check").arg("src/Test.sol");
cmd.assert_success().stderr_eq("").stdout_eq("");
// Run fmt --check on a mal-formatted file
prj.add_raw_source("Test2.sol", UNFORMATTED);
cmd.forge_fuse().arg("fmt").arg("--check").arg("src/Test2.sol");
cmd.assert_failure();
});
forgetest!(fmt_check_mode_stdin, |_prj, cmd| {
// Run fmt --check with well-formatted stdin input
cmd.arg("fmt").arg("-").arg("--check");
cmd.stdin(FORMATTED.as_bytes());
cmd.assert_success().stderr_eq("").stdout_eq("");
// Run fmt --check with mal-formatted stdin input
cmd.stdin(UNFORMATTED.as_bytes());
cmd.assert_failure().stderr_eq("").stdout_eq(str![[r#"
Diff in stdin:
1 1 | // SPDX-License-Identifier: MIT
2 |-pragma solidity =0.8.30 ;
2 |+pragma solidity =0.8.30;
...
4 |-contract Test {
5 |- uint256 public value ;
6 |- function setValue ( uint256 _value ) public {
7 |- value = _value ;
4 |+contract Test {
5 |+ uint256 public value;
...
7 |+ function setValue(uint256 _value) public {
8 |+ value = _value;
8 9 | }
9 |-}
10 |+}
"#]]);
});
// Test that original is returned if read from stdin and no diff.
// <https://github.com/foundry-rs/foundry/issues/11871>
forgetest!(fmt_stdin_original, |_prj, cmd| {
cmd.args(["fmt", "-", "--raw"]);
cmd.stdin(FORMATTED.as_bytes());
cmd.assert_success().stdout_eq(FORMATTED.as_bytes());
});
// Test that fmt can format a simple contract file
forgetest_init!(fmt_file_config_parms_first, |prj, cmd| {
prj.create_file(
"foundry.toml",
r#"
[fmt]
multiline_func_header = 'params_first'
"#,
);
prj.add_raw_source("FmtTest.sol", FORMATTED);
cmd.forge_fuse().args(["fmt", "--check"]).arg("src/FmtTest.sol");
cmd.assert_failure().stdout_eq(str![[r#"
Diff in src/FmtTest.sol:
...
7 |- function setValue(uint256 _value) public {
7 |+ function setValue(
8 |+ uint256 _value
9 |+ ) public {
...
"#]]);
});
// https://github.com/foundry-rs/foundry/issues/12000
forgetest_init!(fmt_only_cmnts_file, |prj, cmd| {
// Only line breaks
prj.add_raw_source("FmtTest.sol", "\n\n");
cmd.forge_fuse().args(["fmt", "src/FmtTest.sol"]);
cmd.assert_success();
assert_data_eq!(std::fs::read_to_string(prj.root().join("src/FmtTest.sol")).unwrap(), "",);
cmd.forge_fuse().args(["fmt", "--check", "src/FmtTest.sol"]);
cmd.assert_success();
// Only cmnts
prj.add_raw_source("FmtTest.sol", "\n\n// this is a cmnt");
cmd.forge_fuse().args(["fmt", "src/FmtTest.sol"]);
cmd.assert_success();
assert_data_eq!(
std::fs::read_to_string(prj.root().join("src/FmtTest.sol")).unwrap(),
"// this is a cmnt\n",
);
cmd.forge_fuse().args(["fmt", "--check", "src/FmtTest.sol"]);
cmd.assert_success();
});
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/forge/tests/cli/multi_script.rs | crates/forge/tests/cli/multi_script.rs | //! Contains various tests related to forge script
use anvil::{NodeConfig, spawn};
use foundry_test_utils::{ScriptOutcome, ScriptTester};
forgetest_async!(can_deploy_multi_chain_script_without_lib, |prj, cmd| {
let (api1, handle1) = spawn(NodeConfig::test()).await;
let (api2, handle2) = spawn(NodeConfig::test()).await;
let mut tester = ScriptTester::new_broadcast_without_endpoint(cmd, prj.root());
tester
.load_private_keys(&[0, 1])
.await
.add_sig("MultiChainBroadcastNoLink", "deploy(string memory,string memory)")
.args(&[&handle1.http_endpoint(), &handle2.http_endpoint()])
.broadcast(ScriptOutcome::OkBroadcast);
assert_eq!(api1.transaction_count(tester.accounts_pub[0], None).await.unwrap().to::<u32>(), 1);
assert_eq!(api1.transaction_count(tester.accounts_pub[1], None).await.unwrap().to::<u32>(), 1);
assert_eq!(api2.transaction_count(tester.accounts_pub[0], None).await.unwrap().to::<u32>(), 2);
assert_eq!(api2.transaction_count(tester.accounts_pub[1], None).await.unwrap().to::<u32>(), 3);
});
forgetest_async!(can_not_deploy_multi_chain_script_with_lib, |prj, cmd| {
let (_, handle1) = spawn(NodeConfig::test()).await;
let (_, handle2) = spawn(NodeConfig::test()).await;
let mut tester = ScriptTester::new_broadcast_without_endpoint(cmd, prj.root());
tester
.load_private_keys(&[0, 1])
.await
.add_deployer(0)
.add_sig("MultiChainBroadcastLink", "deploy(string memory,string memory)")
.args(&[&handle1.http_endpoint(), &handle2.http_endpoint()])
.broadcast(ScriptOutcome::UnsupportedLibraries);
});
forgetest_async!(can_not_change_fork_during_broadcast, |prj, cmd| {
let (_, handle1) = spawn(NodeConfig::test()).await;
let (_, handle2) = spawn(NodeConfig::test()).await;
let mut tester = ScriptTester::new_broadcast_without_endpoint(cmd, prj.root());
tester
.load_private_keys(&[0, 1])
.await
.add_deployer(0)
.add_sig("MultiChainBroadcastNoLink", "deployError(string memory,string memory)")
.args(&[&handle1.http_endpoint(), &handle2.http_endpoint()])
.broadcast(ScriptOutcome::ErrorSelectForkOnBroadcast);
});
forgetest_async!(can_resume_multi_chain_script, |prj, cmd| {
let (_, handle1) = spawn(NodeConfig::test()).await;
let (_, handle2) = spawn(NodeConfig::test()).await;
let mut tester = ScriptTester::new_broadcast_without_endpoint(cmd, prj.root());
tester
.add_sig("MultiChainBroadcastNoLink", "deploy(string memory,string memory)")
.args(&[&handle1.http_endpoint(), &handle2.http_endpoint()])
.broadcast(ScriptOutcome::MissingWallet)
.load_private_keys(&[0, 1])
.await
.arg("--multi")
.resume(ScriptOutcome::OkBroadcast);
});
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/forge/tests/cli/precompiles.rs | crates/forge/tests/cli/precompiles.rs | //! Contains various tests for `forge test` with precompiles.
use foundry_evm_networks::NetworkConfigs;
use foundry_test_utils::str;
// tests transfer using celo precompile.
// <https://github.com/foundry-rs/foundry/issues/11622>
forgetest_init!(celo_transfer, |prj, cmd| {
prj.update_config(|config| {
config.networks = NetworkConfigs::with_celo();
});
prj.add_test(
"CeloTransfer.t.sol",
r#"
import "forge-std/Test.sol";
interface IERC20 {
function balanceOf(address account) external view returns (uint256);
function transfer(address to, uint256 amount) external returns (bool);
}
contract CeloTransferTest is Test {
IERC20 celo = IERC20(0x471EcE3750Da237f93B8E339c536989b8978a438);
IERC20 usdc = IERC20(0xcebA9300f2b948710d2653dD7B07f33A8B32118C);
IERC20 usdt = IERC20(0x48065fbBE25f71C9282ddf5e1cD6D6A887483D5e);
address binanceAccount = 0xf6436829Cf96EA0f8BC49d300c536FCC4f84C4ED;
address recipient = makeAddr("recipient");
function setUp() public {
vm.createSelectFork("https://forno.celo.org");
}
function testCeloBalance() external {
console2.log("recipient balance before", celo.balanceOf(recipient));
vm.prank(binanceAccount);
celo.transfer(recipient, 100);
console2.log("recipient balance after", celo.balanceOf(recipient));
assertEq(celo.balanceOf(recipient), 100);
}
}
"#,
);
cmd.args(["test", "--mt", "testCeloBalance", "-vvv"]).assert_success().stdout_eq(str![[r#"
[COMPILING_FILES] with [SOLC_VERSION]
[SOLC_VERSION] [ELAPSED]
Compiler run successful!
Ran 1 test for test/CeloTransfer.t.sol:CeloTransferTest
[PASS] testCeloBalance() ([GAS])
Logs:
recipient balance before 0
recipient balance after 100
Suite result: ok. 1 passed; 0 failed; 0 skipped; [ELAPSED]
Ran 1 test suite [ELAPSED]: 1 tests passed, 0 failed, 0 skipped (1 total tests)
"#]]);
});
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/forge/tests/cli/compiler.rs | crates/forge/tests/cli/compiler.rs | //! Tests for the `forge compiler` command.
use foundry_test_utils::snapbox::IntoData;
const CONTRACT_A: &str = r#"
// SPDX-license-identifier: MIT
pragma solidity 0.8.4;
contract ContractA {}
"#;
const CONTRACT_B: &str = r#"
// SPDX-license-identifier: MIT
pragma solidity 0.8.11;
contract ContractB {}
"#;
const CONTRACT_C: &str = r#"
// SPDX-license-identifier: MIT
pragma solidity 0.8.30;
contract ContractC {}
"#;
const CONTRACT_D: &str = r#"
// SPDX-license-identifier: MIT
pragma solidity 0.8.30;
contract ContractD {}
"#;
const VYPER_INTERFACE: &str = r#"
# pragma version >=0.4.0
@external
@view
def number() -> uint256:
return empty(uint256)
@external
def set_number(new_number: uint256):
pass
@external
def increment() -> uint256:
return empty(uint256)
"#;
const VYPER_CONTRACT: &str = r#"
import ICounter
implements: ICounter
number: public(uint256)
@external
def set_number(new_number: uint256):
self.number = new_number
@external
def increment() -> uint256:
self.number += 1
return self.number
"#;
forgetest!(can_resolve_path, |prj, cmd| {
prj.add_source("ContractA", CONTRACT_A);
cmd.args(["compiler", "resolve", "--root", prj.root().to_str().unwrap()])
.assert_success()
.stdout_eq(str![[r#"
Solidity:
- 0.8.4
"#]]);
});
forgetest!(can_list_resolved_compiler_versions, |prj, cmd| {
prj.add_source("ContractA", CONTRACT_A);
cmd.args(["compiler", "resolve"]).assert_success().stdout_eq(str![[r#"
Solidity:
- 0.8.4
"#]]);
});
forgetest!(can_list_resolved_compiler_versions_json, |prj, cmd| {
prj.add_source("ContractA", CONTRACT_A);
cmd.args(["compiler", "resolve", "--json"]).assert_success().stdout_eq(
str![[r#"
{
"Solidity":[
{
"version":"0.8.4"
}
]
}
"#]]
.is_json(),
);
});
forgetest!(can_list_resolved_compiler_versions_verbose, |prj, cmd| {
prj.add_source("ContractC", CONTRACT_C);
prj.add_source("ContractD", CONTRACT_D);
cmd.args(["compiler", "resolve", "-v"]).assert_success().stdout_eq(str![[r#"
Solidity:
0.8.30:
├── src/ContractC.sol
└── src/ContractD.sol
"#]]);
});
forgetest!(can_list_resolved_compiler_versions_verbose_json, |prj, cmd| {
prj.add_source("ContractC", CONTRACT_C);
prj.add_source("ContractD", CONTRACT_D);
cmd.args(["compiler", "resolve", "--json", "-v"]).assert_success().stdout_eq(
str![[r#"
{
"Solidity": [
{
"version": "0.8.30",
"paths": [
"src/ContractC.sol",
"src/ContractD.sol"
]
}
]
}
"#]]
.is_json(),
);
});
forgetest!(can_list_resolved_multiple_compiler_versions, |prj, cmd| {
prj.add_source("ContractA", CONTRACT_A);
prj.add_source("ContractB", CONTRACT_B);
prj.add_source("ContractC", CONTRACT_C);
prj.add_source("ContractD", CONTRACT_D);
prj.add_raw_source("ICounter.vyi", VYPER_INTERFACE);
prj.add_raw_source("Counter.vy", VYPER_CONTRACT);
cmd.args(["compiler", "resolve"]).assert_success().stdout_eq(str![[r#"
Solidity:
- 0.8.4
- 0.8.11
- 0.8.30
Vyper:
- 0.4.3
"#]]);
});
forgetest!(can_list_resolved_multiple_compiler_versions_skipped, |prj, cmd| {
prj.add_source("ContractA", CONTRACT_A);
prj.add_source("ContractB", CONTRACT_B);
prj.add_source("ContractC", CONTRACT_C);
prj.add_source("ContractD", CONTRACT_D);
prj.add_raw_source("ICounter.vyi", VYPER_INTERFACE);
prj.add_raw_source("Counter.vy", VYPER_CONTRACT);
cmd.args(["compiler", "resolve", "--skip", ".sol", "-v"]).assert_success().stdout_eq(str![[
r#"
Vyper:
0.4.3:
├── src/Counter.vy
└── src/ICounter.vyi
"#
]]);
});
forgetest!(can_list_resolved_multiple_compiler_versions_skipped_json, |prj, cmd| {
prj.add_source("ContractA", CONTRACT_A);
prj.add_source("ContractB", CONTRACT_B);
prj.add_source("ContractC", CONTRACT_C);
prj.add_source("ContractD", CONTRACT_D);
prj.add_raw_source("ICounter.vyi", VYPER_INTERFACE);
prj.add_raw_source("Counter.vy", VYPER_CONTRACT);
cmd.args(["compiler", "resolve", "--skip", "Contract(A|B|C)", "--json", "-v"])
.assert_success()
.stdout_eq(
str![[r#"
{
"Solidity": [
{
"version": "0.8.30",
"paths": [
"src/ContractD.sol"
]
}
],
"Vyper": [
{
"version": "0.4.3",
"paths": [
"src/Counter.vy",
"src/ICounter.vyi"
]
}
]
}
"#]]
.is_json(),
);
});
forgetest!(can_list_resolved_multiple_compiler_versions_verbose, |prj, cmd| {
prj.add_source("ContractA", CONTRACT_A);
prj.add_source("ContractB", CONTRACT_B);
prj.add_source("ContractC", CONTRACT_C);
prj.add_source("ContractD", CONTRACT_D);
prj.add_raw_source("ICounter.vyi", VYPER_INTERFACE);
prj.add_raw_source("Counter.vy", VYPER_CONTRACT);
cmd.args(["compiler", "resolve", "-vv"]).assert_success().stdout_eq(str![[r#"
Solidity:
0.8.4 (<= istanbul):
└── src/ContractA.sol
0.8.11 (<= london):
└── src/ContractB.sol
0.8.30 (<= prague):
├── src/ContractC.sol
└── src/ContractD.sol
Vyper:
0.4.3 (<= prague):
├── src/Counter.vy
└── src/ICounter.vyi
"#]]);
});
forgetest!(can_list_resolved_multiple_compiler_versions_verbose_json, |prj, cmd| {
prj.add_source("ContractA", CONTRACT_A);
prj.add_source("ContractB", CONTRACT_B);
prj.add_source("ContractC", CONTRACT_C);
prj.add_source("ContractD", CONTRACT_D);
prj.add_raw_source("ICounter.vyi", VYPER_INTERFACE);
prj.add_raw_source("Counter.vy", VYPER_CONTRACT);
cmd.args(["compiler", "resolve", "--json", "-vv"]).assert_success().stdout_eq(
str![[r#"
{
"Solidity": [
{
"version": "0.8.4",
"evm_version": "Istanbul",
"paths": [
"src/ContractA.sol"
]
},
{
"version": "0.8.11",
"evm_version": "London",
"paths": [
"src/ContractB.sol"
]
},
{
"version": "0.8.30",
"evm_version": "[..]",
"paths": [
"src/ContractC.sol",
"src/ContractD.sol"
]
}
],
"Vyper": [
{
"version": "0.4.3",
"evm_version": "[..]",
"paths": [
"src/Counter.vy",
"src/ICounter.vyi"
]
}
]
}
"#]]
.is_json(),
);
});
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/forge/tests/cli/lint/geiger.rs | crates/forge/tests/cli/lint/geiger.rs | forgetest_init!(call, |prj, cmd| {
prj.add_test(
"call.t.sol",
r#"
import {Test} from "forge-std/Test.sol";
contract A is Test {
function do_ffi() public {
string[] memory inputs = new string[](1);
vm.ffi(inputs);
}
}
"#,
);
cmd.arg("geiger").assert_failure().stderr_eq(str![[r#"
...
note[unsafe-cheatcode]: usage of unsafe cheatcodes that can perform dangerous operations
[FILE]:9:20
|
9 | vm.ffi(inputs);
| ^^^
|
= help: https://book.getfoundry.sh/reference/forge/forge-lint#unsafe-cheatcode
Error: aborting due to 1 linter note(s)
...
"#]]);
});
forgetest_init!(assignment, |prj, cmd| {
prj.add_test(
"assignment.t.sol",
r#"
import {Test} from "forge-std/Test.sol";
contract A is Test {
function do_ffi() public returns (bytes memory) {
string[] memory inputs = new string[](1);
bytes memory stuff = vm.ffi(inputs);
return stuff;
}
}
"#,
);
cmd.arg("geiger").assert_failure().stderr_eq(str![[r#"
...
note[unsafe-cheatcode]: usage of unsafe cheatcodes that can perform dangerous operations
[FILE]:9:41
|
9 | bytes memory stuff = vm.ffi(inputs);
| ^^^
|
= help: https://book.getfoundry.sh/reference/forge/forge-lint#unsafe-cheatcode
Error: aborting due to 1 linter note(s)
...
"#]]);
});
forgetest_init!(exit_code, |prj, cmd| {
prj.add_test(
"multiple.t.sol",
r#"
import {Test} from "forge-std/Test.sol";
contract A is Test {
function do_ffi() public {
string[] memory inputs = new string[](1);
vm.ffi(inputs);
vm.ffi(inputs);
vm.ffi(inputs);
}
}
"#,
);
cmd.arg("geiger").assert_failure().stderr_eq(str![[r#"
...
note[unsafe-cheatcode]: usage of unsafe cheatcodes that can perform dangerous operations
[FILE]:9:20
|
9 | vm.ffi(inputs);
| ^^^
|
= help: https://book.getfoundry.sh/reference/forge/forge-lint#unsafe-cheatcode
note[unsafe-cheatcode]: usage of unsafe cheatcodes that can perform dangerous operations
[FILE]:10:20
|
10 | vm.ffi(inputs);
| ^^^
|
= help: https://book.getfoundry.sh/reference/forge/forge-lint#unsafe-cheatcode
note[unsafe-cheatcode]: usage of unsafe cheatcodes that can perform dangerous operations
[FILE]:11:20
|
11 | vm.ffi(inputs);
| ^^^
|
= help: https://book.getfoundry.sh/reference/forge/forge-lint#unsafe-cheatcode
Error: aborting due to 3 linter note(s)
...
"#]]);
});
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/forge/tests/cli/test_cmd/fuzz.rs | crates/forge/tests/cli/test_cmd/fuzz.rs | use alloy_primitives::U256;
use foundry_test_utils::{TestCommand, forgetest_init, str};
use regex::Regex;
forgetest_init!(test_can_scrape_bytecode, |prj, cmd| {
prj.update_config(|config| config.optimizer = Some(true));
prj.add_source(
"FuzzerDict.sol",
r#"
// https://github.com/foundry-rs/foundry/issues/1168
contract FuzzerDict {
// Immutables should get added to the dictionary.
address public immutable immutableOwner;
// Regular storage variables should also get added to the dictionary.
address public storageOwner;
constructor(address _immutableOwner, address _storageOwner) {
immutableOwner = _immutableOwner;
storageOwner = _storageOwner;
}
}
"#,
);
prj.add_test(
"FuzzerDictTest.t.sol",
r#"
import {Test} from "forge-std/Test.sol";
import "src/FuzzerDict.sol";
contract FuzzerDictTest is Test {
FuzzerDict fuzzerDict;
function setUp() public {
fuzzerDict = new FuzzerDict(address(100), address(200));
}
/// forge-config: default.fuzz.runs = 2000
function testImmutableOwner(address who) public {
assertTrue(who != fuzzerDict.immutableOwner());
}
/// forge-config: default.fuzz.runs = 2000
function testStorageOwner(address who) public {
assertTrue(who != fuzzerDict.storageOwner());
}
}
"#,
);
// Test that immutable address is used as fuzzed input, causing test to fail.
cmd.args(["test", "--fuzz-seed", "119", "--mt", "testImmutableOwner"]).assert_failure();
// Test that storage address is used as fuzzed input, causing test to fail.
cmd.forge_fuse()
.args(["test", "--fuzz-seed", "119", "--mt", "testStorageOwner"])
.assert_failure();
});
// tests that inline max-test-rejects config is properly applied
forgetest_init!(test_inline_max_test_rejects, |prj, cmd| {
prj.add_test(
"Contract.t.sol",
r#"
import {Test} from "forge-std/Test.sol";
contract InlineMaxRejectsTest is Test {
/// forge-config: default.fuzz.max-test-rejects = 1
function test_fuzz_bound(uint256 a) public {
vm.assume(false);
}
}
"#,
);
cmd.args(["test"]).assert_failure().stdout_eq(str![[r#"
...
[FAIL: `vm.assume` rejected too many inputs (1 allowed)] test_fuzz_bound(uint256) (runs: 0, [AVG_GAS])
...
"#]]);
});
// Tests that test timeout config is properly applied.
// If test doesn't timeout after one second, then test will fail with `rejected too many inputs`.
forgetest_init!(test_fuzz_timeout, |prj, cmd| {
prj.add_test(
"Contract.t.sol",
r#"
import {Test} from "forge-std/Test.sol";
contract FuzzTimeoutTest is Test {
/// forge-config: default.fuzz.max-test-rejects = 50000
/// forge-config: default.fuzz.timeout = 1
function test_fuzz_bound(uint256 a) public pure {
vm.assume(a == 0);
}
}
"#,
);
cmd.args(["test"]).assert_success().stdout_eq(str![[r#"
[COMPILING_FILES] with [SOLC_VERSION]
[SOLC_VERSION] [ELAPSED]
Compiler run successful!
Ran 1 test for test/Contract.t.sol:FuzzTimeoutTest
[PASS] test_fuzz_bound(uint256) (runs: [..], [AVG_GAS])
Suite result: ok. 1 passed; 0 failed; 0 skipped; [ELAPSED]
Ran 1 test suite [ELAPSED]: 1 tests passed, 0 failed, 0 skipped (1 total tests)
"#]]);
});
forgetest_init!(test_fuzz_fail_on_revert, |prj, cmd| {
prj.update_config(|config| {
config.fuzz.fail_on_revert = false;
config.fuzz.seed = Some(U256::from(100u32));
});
prj.add_source(
"Counter.sol",
r#"
contract Counter {
uint256 public number;
function setNumber(uint256 newNumber) public {
require(number > 10000000000, "low number");
number = newNumber;
}
}
"#,
);
prj.add_test(
"CounterTest.t.sol",
r#"
import {Test} from "forge-std/Test.sol";
import "src/Counter.sol";
contract CounterTest is Test {
Counter public counter;
function setUp() public {
counter = new Counter();
}
function testFuzz_SetNumberRequire(uint256 x) public {
counter.setNumber(x);
require(counter.number() == 1);
}
function testFuzz_SetNumberAssert(uint256 x) public {
counter.setNumber(x);
assertEq(counter.number(), 1);
}
}
"#,
);
// Tests should not fail as revert happens in Counter contract.
cmd.args(["test", "--mc", "CounterTest"]).assert_success().stdout_eq(str![[r#"
[COMPILING_FILES] with [SOLC_VERSION]
[SOLC_VERSION] [ELAPSED]
Compiler run successful!
Ran 2 tests for test/CounterTest.t.sol:CounterTest
[PASS] testFuzz_SetNumberAssert(uint256) (runs: 256, [AVG_GAS])
[PASS] testFuzz_SetNumberRequire(uint256) (runs: 256, [AVG_GAS])
Suite result: ok. 2 passed; 0 failed; 0 skipped; [ELAPSED]
Ran 1 test suite [ELAPSED]: 2 tests passed, 0 failed, 0 skipped (2 total tests)
"#]]);
// Tested contract does not revert.
prj.add_source(
"Counter.sol",
r#"
contract Counter {
uint256 public number;
function setNumber(uint256 newNumber) public {
number = newNumber;
}
}
"#,
);
// Tests should fail as revert happens in cheatcode (assert) and test (require) contract.
cmd.assert_failure().stdout_eq(str![[r#"
[COMPILING_FILES] with [SOLC_VERSION]
[SOLC_VERSION] [ELAPSED]
Compiler run successful!
Ran 2 tests for test/CounterTest.t.sol:CounterTest
[FAIL: assertion failed: [..]] testFuzz_SetNumberAssert(uint256) (runs: 0, [AVG_GAS])
[FAIL: EvmError: Revert; [..]] testFuzz_SetNumberRequire(uint256) (runs: 0, [AVG_GAS])
Suite result: FAILED. 0 passed; 2 failed; 0 skipped; [ELAPSED]
...
"#]]);
});
// Test 256 runs regardless number of test rejects.
// <https://github.com/foundry-rs/foundry/issues/9054>
forgetest_init!(test_fuzz_runs_with_rejects, |prj, cmd| {
prj.add_test(
"FuzzWithRejectsTest.t.sol",
r#"
import {Test} from "forge-std/Test.sol";
contract FuzzWithRejectsTest is Test {
function testFuzzWithRejects(uint256 x) public pure {
vm.assume(x < 1_000_000);
}
}
"#,
);
// Tests should not fail as revert happens in Counter contract.
cmd.args(["test", "--mc", "FuzzWithRejectsTest"]).assert_success().stdout_eq(str![[r#"
[COMPILING_FILES] with [SOLC_VERSION]
[SOLC_VERSION] [ELAPSED]
Compiler run successful!
Ran 1 test for test/FuzzWithRejectsTest.t.sol:FuzzWithRejectsTest
[PASS] testFuzzWithRejects(uint256) (runs: 256, [AVG_GAS])
Suite result: ok. 1 passed; 0 failed; 0 skipped; [ELAPSED]
Ran 1 test suite [ELAPSED]: 1 tests passed, 0 failed, 0 skipped (1 total tests)
"#]]);
});
// Test that counterexample is not replayed if test changes.
// <https://github.com/foundry-rs/foundry/issues/11927>
forgetest_init!(test_fuzz_replay_with_changed_test, |prj, cmd| {
prj.update_config(|config| config.fuzz.seed = Some(U256::from(100u32)));
prj.add_test(
"Counter.t.sol",
r#"
import {Test} from "forge-std/Test.sol";
contract CounterTest is Test {
function testFuzz_SetNumber(uint256 x) public pure {
require(x > 200);
}
}
"#,
);
// Tests should fail and record counterexample with value 200.
cmd.args(["test"]).assert_failure().stdout_eq(str![[r#"
...
Failing tests:
Encountered 1 failing test in test/Counter.t.sol:CounterTest
[FAIL: EvmError: Revert; counterexample: calldata=0x5c7f60d700000000000000000000000000000000000000000000000000000000000000c8 args=[200]] testFuzz_SetNumber(uint256) (runs: 6, [AVG_GAS])
...
"#]]);
// Change test to assume counterexample 2 is discarded.
prj.add_test(
"Counter.t.sol",
r#"
import {Test} from "forge-std/Test.sol";
contract CounterTest is Test {
function testFuzz_SetNumber(uint256 x) public pure {
vm.assume(x != 200);
}
}
"#,
);
// Test should pass when replay failure with changed assume logic.
cmd.forge_fuse().args(["test"]).assert_success().stdout_eq(str![[r#"
[COMPILING_FILES] with [SOLC_VERSION]
[SOLC_VERSION] [ELAPSED]
Compiler run successful!
Ran 1 test for test/Counter.t.sol:CounterTest
[PASS] testFuzz_SetNumber(uint256) (runs: 256, [AVG_GAS])
Suite result: ok. 1 passed; 0 failed; 0 skipped; [ELAPSED]
Ran 1 test suite [ELAPSED]: 1 tests passed, 0 failed, 0 skipped (1 total tests)
"#]]);
// Change test signature.
prj.add_test(
"Counter.t.sol",
r#"
import {Test} from "forge-std/Test.sol";
contract CounterTest is Test {
function testFuzz_SetNumber(uint8 x) public pure {
}
}
"#,
);
// Test should pass when replay failure with changed function signature.
cmd.forge_fuse().args(["test"]).assert_success().stdout_eq(str![[r#"
[COMPILING_FILES] with [SOLC_VERSION]
[SOLC_VERSION] [ELAPSED]
Compiler run successful!
Ran 1 test for test/Counter.t.sol:CounterTest
[PASS] testFuzz_SetNumber(uint8) (runs: 256, [AVG_GAS])
Suite result: ok. 1 passed; 0 failed; 0 skipped; [ELAPSED]
Ran 1 test suite [ELAPSED]: 1 tests passed, 0 failed, 0 skipped (1 total tests)
"#]]);
// Change test back to the original one that produced the counterexample.
prj.add_test(
"Counter.t.sol",
r#"
import {Test} from "forge-std/Test.sol";
contract CounterTest is Test {
function testFuzz_SetNumber(uint256 x) public pure {
require(x > 200);
}
}
"#,
);
// Test should fail with replayed counterexample 200 (0 runs).
cmd.forge_fuse().args(["test"]).assert_failure().stdout_eq(str![[r#"
...
Failing tests:
Encountered 1 failing test in test/Counter.t.sol:CounterTest
[FAIL: EvmError: Revert; counterexample: calldata=0x5c7f60d700000000000000000000000000000000000000000000000000000000000000c8 args=[200]] testFuzz_SetNumber(uint256) (runs: 0, [AVG_GAS])
...
"#]]);
});
forgetest_init!(fuzz_basic, |prj, cmd| {
prj.add_test(
"Fuzz.t.sol",
r#"
import "forge-std/Test.sol";
contract FuzzTest is Test {
constructor() {
emit log("constructor");
}
function setUp() public {
emit log("setUp");
}
function testShouldFailFuzz(uint8 x) public {
emit log("testFailFuzz");
require(x > 128, "should revert");
}
function testSuccessfulFuzz(uint128 a, uint128 b) public {
emit log("testSuccessfulFuzz");
assertEq(uint256(a) + uint256(b), uint256(a) + uint256(b));
}
function testToStringFuzz(bytes32 data) public {
vm.toString(data);
}
}
"#,
);
cmd.args(["test"]).assert_failure().stdout_eq(str![[r#"
...
Ran 3 tests for test/Fuzz.t.sol:FuzzTest
[FAIL: should revert; counterexample: calldata=[..] args=[..]] testShouldFailFuzz(uint8) (runs: [..], [AVG_GAS])
[PASS] testSuccessfulFuzz(uint128,uint128) (runs: 256, [AVG_GAS])
[PASS] testToStringFuzz(bytes32) (runs: 256, [AVG_GAS])
Suite result: FAILED. 2 passed; 1 failed; 0 skipped; [ELAPSED]
Ran 1 test suite [ELAPSED]: 2 tests passed, 1 failed, 0 skipped (3 total tests)
Failing tests:
Encountered 1 failing test in test/Fuzz.t.sol:FuzzTest
[FAIL: should revert; counterexample: calldata=[..] args=[..]] testShouldFailFuzz(uint8) (runs: [..], [AVG_GAS])
Encountered a total of 1 failing tests, 2 tests succeeded
Tip: Run `forge test --rerun` to retry only the 1 failed test
"#]]);
});
// Test that showcases PUSH collection on normal fuzzing.
// Ignored until we collect them in a smarter way.
forgetest_init!(
#[ignore]
fuzz_collection,
|prj, cmd| {
prj.update_config(|config| {
config.invariant.depth = 100;
config.invariant.runs = 1000;
config.fuzz.runs = 1000;
config.fuzz.seed = Some(U256::from(6u32));
});
prj.add_test(
"FuzzCollection.t.sol",
r#"
import "forge-std/Test.sol";
contract SampleContract {
uint256 public counter;
uint256 public counterX2;
address public owner = address(0xBEEF);
bool public found_needle;
event Incremented(uint256 counter);
modifier onlyOwner() {
require(msg.sender == owner, "ONLY_OWNER");
_;
}
function compare(uint256 val) public {
if (val == 0x4446) {
found_needle = true;
}
}
function incrementBy(uint256 numToIncrement) public onlyOwner {
counter += numToIncrement;
counterX2 += numToIncrement * 2;
emit Incremented(counter);
}
function breakTheInvariant(uint256 x) public {
if (x == 0x5556) {
counterX2 = 0;
}
}
}
contract SampleContractTest is Test {
event Incremented(uint256 counter);
SampleContract public sample;
function setUp() public {
sample = new SampleContract();
}
function testIncrement(address caller) public {
vm.startPrank(address(caller));
vm.expectRevert("ONLY_OWNER");
sample.incrementBy(1);
}
function testNeedle(uint256 needle) public {
sample.compare(needle);
require(!sample.found_needle(), "needle found.");
}
function invariantCounter() public {
require(sample.counter() * 2 == sample.counterX2(), "broken counter.");
}
}
"#,
);
cmd.args(["test"]).assert_failure().stdout_eq(str![[r#""#]]);
}
);
forgetest_init!(fuzz_failure_persist, |prj, cmd| {
let persist_dir = prj.cache().parent().unwrap().join("persist");
assert!(!persist_dir.exists());
prj.update_config(|config| {
config.fuzz.failure_persist_dir = Some(persist_dir.clone());
});
prj.add_test(
"FuzzFailurePersist.t.sol",
r#"
import "forge-std/Test.sol";
struct TestTuple {
address user;
uint256 amount;
}
contract FuzzFailurePersistTest is Test {
function test_persist_fuzzed_failure(
uint256 x,
int256 y,
address addr,
bool cond,
string calldata test,
TestTuple calldata tuple,
address[] calldata addresses
) public {
// dummy assume to trigger runs
vm.assume(x > 1 && x < 1111111111111111111111111111);
vm.assume(y > 1 && y < 1111111111111111111111111111);
require(false);
}
}
"#,
);
let mut calldata = None;
let expected = str![[r#"
...
Ran 1 test for test/FuzzFailurePersist.t.sol:FuzzFailurePersistTest
[FAIL: EvmError: Revert; counterexample: calldata=[..] args=[..]] test_persist_fuzzed_failure(uint256,int256,address,bool,string,(address,uint256),address[]) (runs: 0, [AVG_GAS])
Suite result: FAILED. 0 passed; 1 failed; 0 skipped; [ELAPSED]
...
"#]];
let mut check = |cmd: &mut TestCommand, same: bool| {
let assert = cmd.assert_failure();
let output = assert.get_output();
let stdout = String::from_utf8_lossy(&output.stdout);
let calldata = calldata.get_or_insert_with(|| {
let re = Regex::new(r"calldata=(0x[0-9a-fA-F]+)").unwrap();
re.captures(&stdout).unwrap().get(1).unwrap().as_str().to_string()
});
assert_eq!(stdout.contains(calldata.as_str()), same, "\n{stdout}");
assert.stdout_eq(expected.clone());
};
cmd.arg("test");
// Run several times, asserting that the failure persists and is the same.
for _ in 0..3 {
check(&mut cmd, true);
assert!(persist_dir.exists());
}
// Change dir and run again, asserting that the failure persists. It should be a new failure.
let new_persist_dir = prj.cache().parent().unwrap().join("persist2");
assert!(!new_persist_dir.exists());
prj.update_config(|config| {
config.fuzz.failure_persist_dir = Some(new_persist_dir.clone());
});
check(&mut cmd, false);
assert!(new_persist_dir.exists());
});
// https://github.com/foundry-rs/foundry/pull/735 behavior changed with https://github.com/foundry-rs/foundry/issues/3521
// random values (instead edge cases) are generated if no fixtures defined
forgetest_init!(fuzz_int, |prj, cmd| {
prj.add_test(
"FuzzInt.t.sol",
r#"
import "forge-std/Test.sol";
contract FuzzNumbersTest is Test {
function testPositive(int256) public {
assertTrue(true);
}
function testNegativeHalf(int256 val) public {
assertTrue(val < 2 ** 128 - 1);
}
function testNegative0(int256 val) public {
assertTrue(val == 0);
}
function testNegative1(int256 val) public {
assertTrue(val == -1);
}
function testNegative2(int128 val) public {
assertTrue(val == 1);
}
function testNegativeMax0(int256 val) public {
assertTrue(val == type(int256).max);
}
function testNegativeMax1(int256 val) public {
assertTrue(val == type(int256).max - 2);
}
function testNegativeMin0(int256 val) public {
assertTrue(val == type(int256).min);
}
function testNegativeMin1(int256 val) public {
assertTrue(val == type(int256).min + 2);
}
function testEquality(int256 x, int256 y) public {
int256 xy;
unchecked {
xy = x * y;
}
if ((x != 0 && xy / x != y)) {
return;
}
assertEq(((xy - 1) / 1e18) + 1, (xy - 1) / (1e18 + 1));
}
}
"#,
);
cmd.args(["test"]).assert_failure().stdout_eq(str![[r#"
...
Ran 10 tests for test/FuzzInt.t.sol:FuzzNumbersTest
[FAIL: assertion failed[..]] testEquality(int256,int256) (runs: [..], [AVG_GAS])
[FAIL: assertion failed[..]] testNegative0(int256) (runs: [..], [AVG_GAS])
[FAIL: assertion failed[..]] testNegative1(int256) (runs: [..], [AVG_GAS])
[FAIL: assertion failed[..]] testNegative2(int128) (runs: [..], [AVG_GAS])
[FAIL: assertion failed[..]] testNegativeHalf(int256) (runs: [..], [AVG_GAS])
[FAIL: assertion failed[..]] testNegativeMax0(int256) (runs: [..], [AVG_GAS])
[FAIL: assertion failed[..]] testNegativeMax1(int256) (runs: [..], [AVG_GAS])
[FAIL: assertion failed[..]] testNegativeMin0(int256) (runs: [..], [AVG_GAS])
[FAIL: assertion failed[..]] testNegativeMin1(int256) (runs: [..], [AVG_GAS])
[PASS] testPositive(int256) (runs: 256, [AVG_GAS])
Suite result: FAILED. 1 passed; 9 failed; 0 skipped; [ELAPSED]
Ran 1 test suite [ELAPSED]: 1 tests passed, 9 failed, 0 skipped (10 total tests)
...
"#]]);
});
forgetest_init!(fuzz_positive, |prj, cmd| {
prj.add_test(
"FuzzPositive.t.sol",
r#"
import "forge-std/Test.sol";
contract FuzzPositive is Test {
function testSuccessChecker(uint256 val) public {
assertTrue(true);
}
function testSuccessChecker2(int256 val) public {
assert(val == val);
}
function testSuccessChecker3(uint32 val) public {
assert(val + 0 == val);
}
}
"#,
);
cmd.args(["test"]).assert_success().stdout_eq(str![[r#"
...
Ran 3 tests for test/FuzzPositive.t.sol:FuzzPositive
[PASS] testSuccessChecker(uint256) (runs: 256, [AVG_GAS])
[PASS] testSuccessChecker2(int256) (runs: 256, [AVG_GAS])
[PASS] testSuccessChecker3(uint32) (runs: 256, [AVG_GAS])
Suite result: ok. 3 passed; 0 failed; 0 skipped; [ELAPSED]
Ran 1 test suite [ELAPSED]: 3 tests passed, 0 failed, 0 skipped (3 total tests)
"#]]);
});
// https://github.com/foundry-rs/foundry/pull/735 behavior changed with https://github.com/foundry-rs/foundry/issues/3521
// random values (instead edge cases) are generated if no fixtures defined
forgetest_init!(fuzz_uint, |prj, cmd| {
prj.update_config(|config| {
config.fuzz.seed = Some(U256::from(100u32));
});
prj.add_test(
"FuzzUint.t.sol",
r#"
import "forge-std/Test.sol";
contract FuzzNumbersTest is Test {
function testPositive(uint256) public {
assertTrue(true);
}
function testNegativeHalf(uint256 val) public {
assertTrue(val < 2 ** 128 - 1);
}
function testNegative0(uint256 val) public {
assertTrue(val == 0);
}
function testNegative2(uint256 val) public {
assertTrue(val == 2);
}
function testNegative2Max(uint256 val) public {
assertTrue(val == type(uint256).max - 2);
}
function testNegativeMax(uint256 val) public {
assertTrue(val == type(uint256).max);
}
function testEquality(uint256 x, uint256 y) public {
uint256 xy;
unchecked {
xy = x * y;
}
if ((x != 0 && xy / x != y)) {
return;
}
assertEq(((xy - 1) / 1e18) + 1, (xy - 1) / (1e18 + 1));
}
}
"#,
);
cmd.args(["test"]).assert_failure().stdout_eq(str![[r#"
...
Ran 7 tests for test/FuzzUint.t.sol:FuzzNumbersTest
[FAIL: assertion failed[..]] testEquality(uint256,uint256) (runs: [..], [AVG_GAS])
[FAIL: assertion failed[..]] testNegative0(uint256) (runs: [..], [AVG_GAS])
[FAIL: assertion failed[..]] testNegative2(uint256) (runs: [..], [AVG_GAS])
[FAIL: assertion failed[..]] testNegative2Max(uint256) (runs: [..], [AVG_GAS])
[FAIL: assertion failed[..]] testNegativeHalf(uint256) (runs: [..], [AVG_GAS])
[FAIL: assertion failed[..]] testNegativeMax(uint256) (runs: [..], [AVG_GAS])
[PASS] testPositive(uint256) (runs: 256, [AVG_GAS])
Suite result: FAILED. 1 passed; 6 failed; 0 skipped; [ELAPSED]
...
"#]]);
});
forgetest_init!(should_fuzz_literals, |prj, cmd| {
// Add a source with magic (literal) values
prj.add_source(
"Magic.sol",
r#"
contract Magic {
// plain literals
address constant DAI = 0x6B175474E89094C44Da98b954EedeAC495271d0F;
uint64 constant MAGIC_NUMBER = 1122334455;
int32 constant MAGIC_INT = -777;
bytes32 constant MAGIC_WORD = "abcd1234";
bytes constant MAGIC_BYTES = hex"deadbeef";
string constant MAGIC_STRING = "xyzzy";
function checkAddr(address v) external pure { assert(v != DAI); }
function checkWord(bytes32 v) external pure { assert(v != MAGIC_WORD); }
function checkNumber(uint64 v) external pure { assert(v != MAGIC_NUMBER); }
function checkInteger(int32 v) external pure { assert(v != MAGIC_INT); }
function checkString(string memory v) external pure { assert(keccak256(abi.encodePacked(v)) != keccak256(abi.encodePacked(MAGIC_STRING))); }
function checkBytesFromHex(bytes memory v) external pure { assert(keccak256(v) != keccak256(MAGIC_BYTES)); }
function checkBytesFromString(bytes memory v) external pure { assert(keccak256(v) != keccak256(abi.encodePacked(MAGIC_STRING))); }
}
"#,
);
prj.add_test(
"MagicFuzz.t.sol",
r#"
import {Test} from "forge-std/Test.sol";
import {Magic} from "src/Magic.sol";
contract MagicTest is Test {
Magic public magic;
function setUp() public { magic = new Magic(); }
function testFuzz_Addr(address v) public view { magic.checkAddr(v); }
function testFuzz_Number(uint64 v) public view { magic.checkNumber(v); }
function testFuzz_Integer(int32 v) public view { magic.checkInteger(v); }
function testFuzz_Word(bytes32 v) public view { magic.checkWord(v); }
function testFuzz_String(string memory v) public view { magic.checkString(v); }
function testFuzz_BytesFromHex(bytes memory v) public view { magic.checkBytesFromHex(v); }
function testFuzz_BytesFromString(bytes memory v) public view { magic.checkBytesFromString(v); }
}
"#,
);
// Helper to create expected output for a test failure
let expected_fail = |test_name: &str, type_sig: &str, value: &str, runs: u32| -> String {
format!(
r#"No files changed, compilation skipped
Ran 1 test for test/MagicFuzz.t.sol:MagicTest
[FAIL: panic: assertion failed (0x01); counterexample: calldata=[..] args=[{value}]] {test_name}({type_sig}) (runs: {runs}, [AVG_GAS])
[..]
Ran 1 test suite [ELAPSED]: 0 tests passed, 1 failed, 0 skipped (1 total tests)
Failing tests:
...
Encountered a total of 1 failing tests, 0 tests succeeded
...
"#
)
};
// Test address literal fuzzing
let mut test_literal = |seed: u32,
test_name: &'static str,
type_sig: &'static str,
expected_value: &'static str,
expected_runs: u32| {
// the fuzzer is UNABLE to find a breaking input (fast) when NOT seeding from the AST
prj.update_config(|config| {
config.fuzz.runs = 100;
config.fuzz.dictionary.max_fuzz_dictionary_literals = 0;
config.fuzz.seed = Some(U256::from(seed));
});
cmd.forge_fuse().args(["test", "--match-test", test_name]).assert_success();
// the fuzzer is ABLE to find a breaking input when seeding from the AST
prj.update_config(|config| {
config.fuzz.dictionary.max_fuzz_dictionary_literals = 10_000;
});
let expected_output = expected_fail(test_name, type_sig, expected_value, expected_runs);
cmd.forge_fuse()
.args(["test", "--match-test", test_name])
.assert_failure()
.stdout_eq(expected_output);
};
test_literal(100, "testFuzz_Addr", "address", "0x6B175474E89094C44Da98b954EedeAC495271d0F", 28);
test_literal(200, "testFuzz_Number", "uint64", "1122334455 [1.122e9]", 5);
test_literal(300, "testFuzz_Integer", "int32", "-777", 0);
test_literal(
400,
"testFuzz_Word",
"bytes32",
"0x6162636431323334000000000000000000000000000000000000000000000000", /* bytes32("abcd1234") */
7,
);
test_literal(500, "testFuzz_BytesFromHex", "bytes", "0xdeadbeef", 5);
test_literal(600, "testFuzz_String", "string", "\"xyzzy\"", 35);
test_literal(999, "testFuzz_BytesFromString", "bytes", "0x78797a7a79", 19); // abi.encodePacked("xyzzy")
});
// Tests that `vm.randomUint()` produces different values across fuzz runs.
// Regression test for https://github.com/foundry-rs/foundry/issues/12817
//
// The issue was that `vm.randomUint()` would produce the same sequence of values
// in every fuzz run because the RNG was seeded identically for each run.
// This test verifies that with many fuzz runs and a small range, we eventually
// hit value 0, which proves the RNG varies across runs.
forgetest_init!(test_fuzz_random_uint_varies_across_runs, |prj, cmd| {
prj.add_test(
"RandomFuzzTest.t.sol",
r#"
import {Test} from "forge-std/Test.sol";
contract RandomFuzzTest is Test {
function testFuzz_randomUint_shouldFail(uint256) public {
uint256 rand = vm.randomUint(0, 4);
assertTrue(rand != 0, "hit value 0");
}
}
"#,
);
cmd.args(["test", "--fuzz-seed", "1", "--mt", "testFuzz_randomUint_shouldFail"])
.assert_failure()
.stdout_eq(str![[r#"
...
Ran 1 test for test/RandomFuzzTest.t.sol:RandomFuzzTest
[FAIL: hit value 0; counterexample: [..]] testFuzz_randomUint_shouldFail(uint256) (runs: [..], [AVG_GAS])
Suite result: FAILED. 0 passed; 1 failed; 0 skipped; [ELAPSED]
Ran 1 test suite [ELAPSED]: 0 tests passed, 1 failed, 0 skipped (1 total tests)
...
"#]]);
});
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/forge/tests/cli/test_cmd/trace.rs | crates/forge/tests/cli/test_cmd/trace.rs | //! Tests for tracing functionality
use foundry_test_utils::str;
forgetest_init!(conflicting_signatures, |prj, cmd| {
prj.add_test(
"ConflictingSignatures.t.sol",
r#"
pragma solidity ^0.8.18;
import "forge-std/Test.sol";
contract ReturnsNothing {
function func() public pure {}
}
contract ReturnsString {
function func() public pure returns (string memory) {
return "string";
}
}
contract ReturnsUint {
function func() public pure returns (uint256) {
return 1;
}
}
contract ConflictingSignaturesTest is Test {
ReturnsNothing retsNothing;
ReturnsString retsString;
ReturnsUint retsUint;
function setUp() public {
retsNothing = new ReturnsNothing();
retsString = new ReturnsString();
retsUint = new ReturnsUint();
}
/// Tests that traces are decoded properly when multiple
/// functions have the same 4byte signature, but different
/// return values.
function testTraceWithConflictingSignatures() public {
retsNothing.func();
retsString.func();
retsUint.func();
}
}
"#,
);
cmd.args(["test", "-vvvvv"]).assert_success().stdout_eq(str![[r#"
...
Ran 1 test for test/ConflictingSignatures.t.sol:ConflictingSignaturesTest
[PASS] testTraceWithConflictingSignatures() ([GAS])
Traces:
[..] ConflictingSignaturesTest::setUp()
├─ [..] → new ReturnsNothing@0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f
│ └─ ← [Return] 106 bytes of code
├─ [..] → new ReturnsString@0x2e234DAe75C793f67A35089C9d99245E1C58470b
│ └─ ← [Return] 334 bytes of code
├─ [..] → new ReturnsUint@0xF62849F9A0B5Bf2913b396098F7c7019b51A820a
│ └─ ← [Return] 175 bytes of code
└─ ← [Stop]
[..] ConflictingSignaturesTest::testTraceWithConflictingSignatures()
├─ [..] ReturnsNothing::func() [staticcall]
│ └─ ← [Stop]
├─ [..] ReturnsString::func() [staticcall]
│ └─ ← [Return] 0x00000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000006737472696e670000000000000000000000000000000000000000000000000000
├─ [..] ReturnsUint::func() [staticcall]
│ └─ ← [Return] 0x0000000000000000000000000000000000000000000000000000000000000001
└─ ← [Stop]
Suite result: ok. 1 passed; 0 failed; 0 skipped; [ELAPSED]
Ran 1 test suite [ELAPSED]: 1 tests passed, 0 failed, 0 skipped (1 total tests)
"#]]);
});
#[cfg(not(feature = "isolate-by-default"))]
forgetest_init!(trace_test, |prj, cmd| {
prj.add_test(
"Trace.t.sol",
r#"
pragma solidity ^0.8.18;
import "forge-std/Test.sol";
contract RecursiveCall {
TraceTest factory;
event Depth(uint256 depth);
event ChildDepth(uint256 childDepth);
event CreatedChild(uint256 childDepth);
constructor(address _factory) {
factory = TraceTest(_factory);
}
function recurseCall(uint256 neededDepth, uint256 depth) public returns (uint256) {
if (depth == neededDepth) {
this.negativeNum();
return neededDepth;
}
uint256 childDepth = this.recurseCall(neededDepth, depth + 1);
emit ChildDepth(childDepth);
this.someCall();
emit Depth(depth);
return depth;
}
function recurseCreate(uint256 neededDepth, uint256 depth) public returns (uint256) {
if (depth == neededDepth) {
return neededDepth;
}
RecursiveCall child = factory.create();
emit CreatedChild(depth + 1);
uint256 childDepth = child.recurseCreate(neededDepth, depth + 1);
emit ChildDepth(childDepth);
emit Depth(depth);
return depth;
}
function someCall() public pure {}
function negativeNum() public pure returns (int256) {
return -1000000000;
}
}
contract TraceTest is Test {
uint256 nodeId = 0;
RecursiveCall first;
function setUp() public {
first = this.create();
}
function create() public returns (RecursiveCall) {
RecursiveCall node = new RecursiveCall(address(this));
vm.label(address(node), string(abi.encodePacked("Node ", uintToString(nodeId++))));
return node;
}
function testRecurseCall() public {
first.recurseCall(8, 0);
}
function testRecurseCreate() public {
first.recurseCreate(8, 0);
}
}
function uintToString(uint256 value) pure returns (string memory) {
// Taken from OpenZeppelin
if (value == 0) {
return "0";
}
uint256 temp = value;
uint256 digits;
while (temp != 0) {
digits++;
temp /= 10;
}
bytes memory buffer = new bytes(digits);
while (value != 0) {
digits -= 1;
buffer[digits] = bytes1(uint8(48 + uint256(value % 10)));
value /= 10;
}
return string(buffer);
}
"#,
);
cmd.args(["test", "-vvvvv"]).assert_success().stdout_eq(str![[r#"
...
Ran 2 tests for test/Trace.t.sol:TraceTest
[PASS] testRecurseCall() ([GAS])
Traces:
[..] TraceTest::setUp()
├─ [..] TraceTest::create()
│ ├─ [..] → new Node 0@0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f
│ │ └─ ← [Return] 1911 bytes of code
│ ├─ [0] VM::label(Node 0: [0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f], "Node 0")
│ │ └─ ← [Return]
│ └─ ← [Return] Node 0: [0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f]
└─ ← [Stop]
[..] TraceTest::testRecurseCall()
├─ [..] Node 0::recurseCall(8, 0)
│ ├─ [..] Node 0::recurseCall(8, 1)
│ │ ├─ [..] Node 0::recurseCall(8, 2)
│ │ │ ├─ [..] Node 0::recurseCall(8, 3)
│ │ │ │ ├─ [..] Node 0::recurseCall(8, 4)
│ │ │ │ │ ├─ [..] Node 0::recurseCall(8, 5)
│ │ │ │ │ │ ├─ [..] Node 0::recurseCall(8, 6)
│ │ │ │ │ │ │ ├─ [..] Node 0::recurseCall(8, 7)
│ │ │ │ │ │ │ │ ├─ [..] Node 0::recurseCall(8, 8)
│ │ │ │ │ │ │ │ │ ├─ [..] Node 0::negativeNum() [staticcall]
│ │ │ │ │ │ │ │ │ │ └─ ← [Return] -1000000000 [-1e9]
│ │ │ │ │ │ │ │ │ └─ ← [Return] 8
│ │ │ │ │ │ │ │ ├─ emit ChildDepth(childDepth: 8)
│ │ │ │ │ │ │ │ ├─ [..] Node 0::someCall() [staticcall]
│ │ │ │ │ │ │ │ │ └─ ← [Stop]
│ │ │ │ │ │ │ │ ├─ emit Depth(depth: 7)
│ │ │ │ │ │ │ │ └─ ← [Return] 7
│ │ │ │ │ │ │ ├─ emit ChildDepth(childDepth: 7)
│ │ │ │ │ │ │ ├─ [..] Node 0::someCall() [staticcall]
│ │ │ │ │ │ │ │ └─ ← [Stop]
│ │ │ │ │ │ │ ├─ emit Depth(depth: 6)
│ │ │ │ │ │ │ └─ ← [Return] 6
│ │ │ │ │ │ ├─ emit ChildDepth(childDepth: 6)
│ │ │ │ │ │ ├─ [..] Node 0::someCall() [staticcall]
│ │ │ │ │ │ │ └─ ← [Stop]
│ │ │ │ │ │ ├─ emit Depth(depth: 5)
│ │ │ │ │ │ └─ ← [Return] 5
│ │ │ │ │ ├─ emit ChildDepth(childDepth: 5)
│ │ │ │ │ ├─ [..] Node 0::someCall() [staticcall]
│ │ │ │ │ │ └─ ← [Stop]
│ │ │ │ │ ├─ emit Depth(depth: 4)
│ │ │ │ │ └─ ← [Return] 4
│ │ │ │ ├─ emit ChildDepth(childDepth: 4)
│ │ │ │ ├─ [..] Node 0::someCall() [staticcall]
│ │ │ │ │ └─ ← [Stop]
│ │ │ │ ├─ emit Depth(depth: 3)
│ │ │ │ └─ ← [Return] 3
│ │ │ ├─ emit ChildDepth(childDepth: 3)
│ │ │ ├─ [..] Node 0::someCall() [staticcall]
│ │ │ │ └─ ← [Stop]
│ │ │ ├─ emit Depth(depth: 2)
│ │ │ └─ ← [Return] 2
│ │ ├─ emit ChildDepth(childDepth: 2)
│ │ ├─ [..] Node 0::someCall() [staticcall]
│ │ │ └─ ← [Stop]
│ │ ├─ emit Depth(depth: 1)
│ │ └─ ← [Return] 1
│ ├─ emit ChildDepth(childDepth: 1)
│ ├─ [..] Node 0::someCall() [staticcall]
│ │ └─ ← [Stop]
│ ├─ emit Depth(depth: 0)
│ └─ ← [Return] 0
└─ ← [Stop]
[PASS] testRecurseCreate() ([GAS])
Traces:
[..] TraceTest::setUp()
├─ [..] TraceTest::create()
│ ├─ [..] → new Node 0@0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f
│ │ └─ ← [Return] 1911 bytes of code
│ ├─ [0] VM::label(Node 0: [0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f], "Node 0")
│ │ └─ ← [Return]
│ └─ ← [Return] Node 0: [0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f]
└─ ← [Stop]
[..] TraceTest::testRecurseCreate()
├─ [..] Node 0::recurseCreate(8, 0)
│ ├─ [..] TraceTest::create()
│ │ ├─ [..] → new Node 1@0x2e234DAe75C793f67A35089C9d99245E1C58470b
│ │ │ ├─ storage changes:
│ │ │ │ @ 0: 0 → 0x0000000000000000000000007fa9385be102ac3eac297483dd6233d62b3e1496
│ │ │ └─ ← [Return] 1911 bytes of code
│ │ ├─ [0] VM::label(Node 1: [0x2e234DAe75C793f67A35089C9d99245E1C58470b], "Node 1")
│ │ │ └─ ← [Return]
│ │ ├─ storage changes:
│ │ │ @ 32: 1 → 2
│ │ └─ ← [Return] Node 1: [0x2e234DAe75C793f67A35089C9d99245E1C58470b]
│ ├─ emit CreatedChild(childDepth: 1)
│ ├─ [..] Node 1::recurseCreate(8, 1)
│ │ ├─ [..] TraceTest::create()
│ │ │ ├─ [..] → new Node 2@0xF62849F9A0B5Bf2913b396098F7c7019b51A820a
│ │ │ │ ├─ storage changes:
│ │ │ │ │ @ 0: 0 → 0x0000000000000000000000007fa9385be102ac3eac297483dd6233d62b3e1496
│ │ │ │ └─ ← [Return] 1911 bytes of code
│ │ │ ├─ [0] VM::label(Node 2: [0xF62849F9A0B5Bf2913b396098F7c7019b51A820a], "Node 2")
│ │ │ │ └─ ← [Return]
│ │ │ ├─ storage changes:
│ │ │ │ @ 32: 2 → 3
│ │ │ └─ ← [Return] Node 2: [0xF62849F9A0B5Bf2913b396098F7c7019b51A820a]
│ │ ├─ emit CreatedChild(childDepth: 2)
│ │ ├─ [..] Node 2::recurseCreate(8, 2)
│ │ │ ├─ [..] TraceTest::create()
│ │ │ │ ├─ [..] → new Node 3@0x5991A2dF15A8F6A256D3Ec51E99254Cd3fb576A9
│ │ │ │ │ ├─ storage changes:
│ │ │ │ │ │ @ 0: 0 → 0x0000000000000000000000007fa9385be102ac3eac297483dd6233d62b3e1496
│ │ │ │ │ └─ ← [Return] 1911 bytes of code
│ │ │ │ ├─ [0] VM::label(Node 3: [0x5991A2dF15A8F6A256D3Ec51E99254Cd3fb576A9], "Node 3")
│ │ │ │ │ └─ ← [Return]
│ │ │ │ ├─ storage changes:
│ │ │ │ │ @ 32: 3 → 4
│ │ │ │ └─ ← [Return] Node 3: [0x5991A2dF15A8F6A256D3Ec51E99254Cd3fb576A9]
│ │ │ ├─ emit CreatedChild(childDepth: 3)
│ │ │ ├─ [..] Node 3::recurseCreate(8, 3)
│ │ │ │ ├─ [..] TraceTest::create()
│ │ │ │ │ ├─ [..] → new Node 4@0xc7183455a4C133Ae270771860664b6B7ec320bB1
│ │ │ │ │ │ ├─ storage changes:
│ │ │ │ │ │ │ @ 0: 0 → 0x0000000000000000000000007fa9385be102ac3eac297483dd6233d62b3e1496
│ │ │ │ │ │ └─ ← [Return] 1911 bytes of code
│ │ │ │ │ ├─ [0] VM::label(Node 4: [0xc7183455a4C133Ae270771860664b6B7ec320bB1], "Node 4")
│ │ │ │ │ │ └─ ← [Return]
│ │ │ │ │ ├─ storage changes:
│ │ │ │ │ │ @ 32: 4 → 5
│ │ │ │ │ └─ ← [Return] Node 4: [0xc7183455a4C133Ae270771860664b6B7ec320bB1]
│ │ │ │ ├─ emit CreatedChild(childDepth: 4)
│ │ │ │ ├─ [..] Node 4::recurseCreate(8, 4)
│ │ │ │ │ ├─ [..] TraceTest::create()
│ │ │ │ │ │ ├─ [..] → new Node 5@0xa0Cb889707d426A7A386870A03bc70d1b0697598
│ │ │ │ │ │ │ ├─ storage changes:
│ │ │ │ │ │ │ │ @ 0: 0 → 0x0000000000000000000000007fa9385be102ac3eac297483dd6233d62b3e1496
│ │ │ │ │ │ │ └─ ← [Return] 1911 bytes of code
│ │ │ │ │ │ ├─ [0] VM::label(Node 5: [0xa0Cb889707d426A7A386870A03bc70d1b0697598], "Node 5")
│ │ │ │ │ │ │ └─ ← [Return]
│ │ │ │ │ │ ├─ storage changes:
│ │ │ │ │ │ │ @ 32: 5 → 6
│ │ │ │ │ │ └─ ← [Return] Node 5: [0xa0Cb889707d426A7A386870A03bc70d1b0697598]
│ │ │ │ │ ├─ emit CreatedChild(childDepth: 5)
│ │ │ │ │ ├─ [..] Node 5::recurseCreate(8, 5)
│ │ │ │ │ │ ├─ [..] TraceTest::create()
│ │ │ │ │ │ │ ├─ [..] → new Node 6@0x1d1499e622D69689cdf9004d05Ec547d650Ff211
│ │ │ │ │ │ │ │ ├─ storage changes:
│ │ │ │ │ │ │ │ │ @ 0: 0 → 0x0000000000000000000000007fa9385be102ac3eac297483dd6233d62b3e1496
│ │ │ │ │ │ │ │ └─ ← [Return] 1911 bytes of code
│ │ │ │ │ │ │ ├─ [0] VM::label(Node 6: [0x1d1499e622D69689cdf9004d05Ec547d650Ff211], "Node 6")
│ │ │ │ │ │ │ │ └─ ← [Return]
│ │ │ │ │ │ │ ├─ storage changes:
│ │ │ │ │ │ │ │ @ 32: 6 → 7
│ │ │ │ │ │ │ └─ ← [Return] Node 6: [0x1d1499e622D69689cdf9004d05Ec547d650Ff211]
│ │ │ │ │ │ ├─ emit CreatedChild(childDepth: 6)
│ │ │ │ │ │ ├─ [..] Node 6::recurseCreate(8, 6)
│ │ │ │ │ │ │ ├─ [..] TraceTest::create()
│ │ │ │ │ │ │ │ ├─ [..] → new Node 7@0xA4AD4f68d0b91CFD19687c881e50f3A00242828c
│ │ │ │ │ │ │ │ │ ├─ storage changes:
│ │ │ │ │ │ │ │ │ │ @ 0: 0 → 0x0000000000000000000000007fa9385be102ac3eac297483dd6233d62b3e1496
│ │ │ │ │ │ │ │ │ └─ ← [Return] 1911 bytes of code
│ │ │ │ │ │ │ │ ├─ [0] VM::label(Node 7: [0xA4AD4f68d0b91CFD19687c881e50f3A00242828c], "Node 7")
│ │ │ │ │ │ │ │ │ └─ ← [Return]
│ │ │ │ │ │ │ │ ├─ storage changes:
│ │ │ │ │ │ │ │ │ @ 32: 7 → 8
│ │ │ │ │ │ │ │ └─ ← [Return] Node 7: [0xA4AD4f68d0b91CFD19687c881e50f3A00242828c]
│ │ │ │ │ │ │ ├─ emit CreatedChild(childDepth: 7)
│ │ │ │ │ │ │ ├─ [..] Node 7::recurseCreate(8, 7)
│ │ │ │ │ │ │ │ ├─ [..] TraceTest::create()
│ │ │ │ │ │ │ │ │ ├─ [..] → new Node 8@0x03A6a84cD762D9707A21605b548aaaB891562aAb
│ │ │ │ │ │ │ │ │ │ ├─ storage changes:
│ │ │ │ │ │ │ │ │ │ │ @ 0: 0 → 0x0000000000000000000000007fa9385be102ac3eac297483dd6233d62b3e1496
│ │ │ │ │ │ │ │ │ │ └─ ← [Return] 1911 bytes of code
│ │ │ │ │ │ │ │ │ ├─ [0] VM::label(Node 8: [0x03A6a84cD762D9707A21605b548aaaB891562aAb], "Node 8")
│ │ │ │ │ │ │ │ │ │ └─ ← [Return]
│ │ │ │ │ │ │ │ │ ├─ storage changes:
│ │ │ │ │ │ │ │ │ │ @ 32: 8 → 9
│ │ │ │ │ │ │ │ │ └─ ← [Return] Node 8: [0x03A6a84cD762D9707A21605b548aaaB891562aAb]
│ │ │ │ │ │ │ │ ├─ emit CreatedChild(childDepth: 8)
│ │ │ │ │ │ │ │ ├─ [..] Node 8::recurseCreate(8, 8)
│ │ │ │ │ │ │ │ │ └─ ← [Return] 8
│ │ │ │ │ │ │ │ ├─ emit ChildDepth(childDepth: 8)
│ │ │ │ │ │ │ │ ├─ emit Depth(depth: 7)
│ │ │ │ │ │ │ │ └─ ← [Return] 7
│ │ │ │ │ │ │ ├─ emit ChildDepth(childDepth: 7)
│ │ │ │ │ │ │ ├─ emit Depth(depth: 6)
│ │ │ │ │ │ │ └─ ← [Return] 6
│ │ │ │ │ │ ├─ emit ChildDepth(childDepth: 6)
│ │ │ │ │ │ ├─ emit Depth(depth: 5)
│ │ │ │ │ │ └─ ← [Return] 5
│ │ │ │ │ ├─ emit ChildDepth(childDepth: 5)
│ │ │ │ │ ├─ emit Depth(depth: 4)
│ │ │ │ │ └─ ← [Return] 4
│ │ │ │ ├─ emit ChildDepth(childDepth: 4)
│ │ │ │ ├─ emit Depth(depth: 3)
│ │ │ │ └─ ← [Return] 3
│ │ │ ├─ emit ChildDepth(childDepth: 3)
│ │ │ ├─ emit Depth(depth: 2)
│ │ │ └─ ← [Return] 2
│ │ ├─ emit ChildDepth(childDepth: 2)
│ │ ├─ emit Depth(depth: 1)
│ │ └─ ← [Return] 1
│ ├─ emit ChildDepth(childDepth: 1)
│ ├─ emit Depth(depth: 0)
│ └─ ← [Return] 0
└─ ← [Stop]
Suite result: ok. 2 passed; 0 failed; 0 skipped; [ELAPSED]
Ran 1 test suite [ELAPSED]: 2 tests passed, 0 failed, 0 skipped (2 total tests)
"#]]);
});
#[cfg(not(feature = "isolate-by-default"))]
forgetest_init!(trace_test_detph, |prj, cmd| {
prj.add_test(
"Trace.t.sol",
r#"
pragma solidity ^0.8.18;
import "forge-std/Test.sol";
contract RecursiveCall {
TraceTest factory;
event Depth(uint256 depth);
event ChildDepth(uint256 childDepth);
event CreatedChild(uint256 childDepth);
constructor(address _factory) {
factory = TraceTest(_factory);
}
function recurseCall(uint256 neededDepth, uint256 depth) public returns (uint256) {
if (depth == neededDepth) {
this.negativeNum();
return neededDepth;
}
uint256 childDepth = this.recurseCall(neededDepth, depth + 1);
emit ChildDepth(childDepth);
this.someCall();
emit Depth(depth);
return depth;
}
function recurseCreate(uint256 neededDepth, uint256 depth) public returns (uint256) {
if (depth == neededDepth) {
return neededDepth;
}
RecursiveCall child = factory.create();
emit CreatedChild(depth + 1);
uint256 childDepth = child.recurseCreate(neededDepth, depth + 1);
emit ChildDepth(childDepth);
emit Depth(depth);
return depth;
}
function someCall() public pure {}
function negativeNum() public pure returns (int256) {
return -1000000000;
}
}
contract TraceTest is Test {
uint256 nodeId = 0;
RecursiveCall first;
function setUp() public {
first = this.create();
}
function create() public returns (RecursiveCall) {
RecursiveCall node = new RecursiveCall(address(this));
vm.label(address(node), string(abi.encodePacked("Node ", uintToString(nodeId++))));
return node;
}
function testRecurseCall() public {
first.recurseCall(8, 0);
}
function testRecurseCreate() public {
first.recurseCreate(8, 0);
}
}
function uintToString(uint256 value) pure returns (string memory) {
// Taken from OpenZeppelin
if (value == 0) {
return "0";
}
uint256 temp = value;
uint256 digits;
while (temp != 0) {
digits++;
temp /= 10;
}
bytes memory buffer = new bytes(digits);
while (value != 0) {
digits -= 1;
buffer[digits] = bytes1(uint8(48 + uint256(value % 10)));
value /= 10;
}
return string(buffer);
}
"#,
);
cmd.args(["test", "-vvvvv", "--trace-depth", "3"]).assert_success().stdout_eq(str![[r#"
...
Ran 2 tests for test/Trace.t.sol:TraceTest
[PASS] testRecurseCall() ([GAS])
Traces:
[..] TraceTest::setUp()
├─ [..] TraceTest::create()
│ ├─ [..] → new Node 0@0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f
│ │ └─ ← [Return] 1911 bytes of code
│ ├─ [0] VM::label(Node 0: [0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f], "Node 0")
│ │ └─ ← [Return]
│ └─ ← [Return] Node 0: [0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f]
└─ ← [Stop]
[..] TraceTest::testRecurseCall()
├─ [..] Node 0::recurseCall(8, 0)
│ ├─ [..] Node 0::recurseCall(8, 1)
│ │ ├─ [..] Node 0::recurseCall(8, 2)
│ │ │ └─ ← [Return] 2
│ │ ├─ emit ChildDepth(childDepth: 2)
│ │ ├─ [..] Node 0::someCall() [staticcall]
│ │ │ └─ ← [Stop]
│ │ ├─ emit Depth(depth: 1)
│ │ └─ ← [Return] 1
│ ├─ emit ChildDepth(childDepth: 1)
│ ├─ [..] Node 0::someCall() [staticcall]
│ │ └─ ← [Stop]
│ ├─ emit Depth(depth: 0)
│ └─ ← [Return] 0
└─ ← [Stop]
[PASS] testRecurseCreate() ([GAS])
Traces:
[..] TraceTest::setUp()
├─ [..] TraceTest::create()
│ ├─ [..] → new Node 0@0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f
│ │ └─ ← [Return] 1911 bytes of code
│ ├─ [0] VM::label(Node 0: [0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f], "Node 0")
│ │ └─ ← [Return]
│ └─ ← [Return] Node 0: [0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f]
└─ ← [Stop]
[..] TraceTest::testRecurseCreate()
├─ [..] Node 0::recurseCreate(8, 0)
│ ├─ [..] TraceTest::create()
│ │ ├─ [405132] → new Node 1@0x2e234DAe75C793f67A35089C9d99245E1C58470b
│ │ │ ├─ storage changes:
│ │ │ │ @ 0: 0 → 0x0000000000000000000000007fa9385be102ac3eac297483dd6233d62b3e1496
│ │ │ └─ ← [Return] 1911 bytes of code
│ │ ├─ [0] VM::label(Node 1: [0x2e234DAe75C793f67A35089C9d99245E1C58470b], "Node 1")
│ │ │ └─ ← [Return]
│ │ ├─ storage changes:
│ │ │ @ 32: 1 → 2
│ │ └─ ← [Return] Node 1: [0x2e234DAe75C793f67A35089C9d99245E1C58470b]
│ ├─ emit CreatedChild(childDepth: 1)
│ ├─ [..] Node 1::recurseCreate(8, 1)
│ │ ├─ [..] TraceTest::create()
│ │ │ ├─ storage changes:
│ │ │ │ @ 32: 2 → 3
│ │ │ └─ ← [Return] Node 2: [0xF62849F9A0B5Bf2913b396098F7c7019b51A820a]
│ │ ├─ emit CreatedChild(childDepth: 2)
│ │ ├─ [..] Node 2::recurseCreate(8, 2)
│ │ │ └─ ← [Return] 2
│ │ ├─ emit ChildDepth(childDepth: 2)
│ │ ├─ emit Depth(depth: 1)
│ │ └─ ← [Return] 1
│ ├─ emit ChildDepth(childDepth: 1)
│ ├─ emit Depth(depth: 0)
│ └─ ← [Return] 0
└─ ← [Stop]
Suite result: ok. 2 passed; 0 failed; 0 skipped; [ELAPSED]
Ran 1 test suite [ELAPSED]: 2 tests passed, 0 failed, 0 skipped (2 total tests)
"#]]);
});
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/forge/tests/cli/test_cmd/table.rs | crates/forge/tests/cli/test_cmd/table.rs | //! Table tests.
use foundry_test_utils::{forgetest_init, str};
forgetest_init!(should_run_table_tests, |prj, cmd| {
prj.initialize_default_contracts();
prj.add_test(
"CounterTable.t.sol",
r#"
import "forge-std/Test.sol";
import {Counter} from "../src/Counter.sol";
contract CounterTableTest is Test {
Counter counter = new Counter();
uint256[] public fixtureAmount = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10];
bool[] public fixtureSwap = [true, true, false, true, false, true, false, true, false, true];
bool[] public fixtureDiffSwap = [true, false];
function fixtureNoFixture() public returns (address[] memory) {
}
function tableWithNoParamFail() public {
counter.increment();
}
function tableWithParamNoFixtureFail(uint256 noFixture) public {
require(noFixture != 100);
counter.increment();
}
function tableSingleParamPass(uint256 amount) public {
require(amount != 100, "Amount cannot be 100");
counter.increment();
}
function tableSingleParamFail(uint256 amount) public {
require(amount != 10, "Amount cannot be 10");
counter.increment();
}
function tableMultipleParamsNoParamFail(uint256 amount, bool noSwap) public {
require(amount != 100 && noSwap, "Amount cannot be 100");
counter.increment();
}
function tableMultipleParamsDifferentFixturesFail(uint256 amount, bool diffSwap) public {
require(amount != 100 && diffSwap, "Amount cannot be 100");
counter.increment();
}
function tableMultipleParamsFail(uint256 amount, bool swap) public {
require(amount == 3 && swap, "Cannot swap");
counter.increment();
}
function tableMultipleParamsPass(uint256 amount, bool swap) public {
if (amount == 3 && swap) {
revert();
}
counter.increment();
}
}
"#,
);
cmd.args(["test", "--mc", "CounterTable", "-vvvvv"]).assert_failure().stdout_eq(str![[r#"
[COMPILING_FILES] with [SOLC_VERSION]
[SOLC_VERSION] [ELAPSED]
Compiler run successful!
Ran 8 tests for test/CounterTable.t.sol:CounterTableTest
[FAIL: 2 fixtures defined for diffSwap (expected 10)] tableMultipleParamsDifferentFixturesFail(uint256,bool) ([GAS])
[FAIL: Cannot swap; counterexample: calldata=0x717892ca00000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000001 args=[1, true]] tableMultipleParamsFail(uint256,bool) (runs: 1, [AVG_GAS])
Traces:
[..] CounterTableTest::tableMultipleParamsFail(1, true)
└─ ← [Revert] Cannot swap
Backtrace:
at CounterTableTest.tableMultipleParamsFail (test/CounterTable.t.sol:[..]:[..])
[FAIL: No fixture defined for param noSwap] tableMultipleParamsNoParamFail(uint256,bool) ([GAS])
[PASS] tableMultipleParamsPass(uint256,bool) (runs: 10, [AVG_GAS])
Traces:
[..] CounterTableTest::tableMultipleParamsPass(10, true)
├─ [..] Counter::increment()
│ ├─ storage changes:
│ │ @ 0: 0 → 1
│ └─ ← [Stop]
└─ ← [Stop]
[FAIL: Amount cannot be 10; counterexample: calldata=0x44fa2375000000000000000000000000000000000000000000000000000000000000000a args=[10]] tableSingleParamFail(uint256) (runs: 10, [AVG_GAS])
Traces:
[..] CounterTableTest::tableSingleParamFail(10)
└─ ← [Revert] Amount cannot be 10
Backtrace:
at CounterTableTest.tableSingleParamFail (test/CounterTable.t.sol:[..]:[..])
[PASS] tableSingleParamPass(uint256) (runs: 10, [AVG_GAS])
Traces:
[..] CounterTableTest::tableSingleParamPass(10)
├─ [..] Counter::increment()
│ ├─ storage changes:
│ │ @ 0: 0 → 1
│ └─ ← [Stop]
└─ ← [Stop]
[FAIL: Table test should have at least one parameter] tableWithNoParamFail() ([GAS])
[FAIL: Table test should have at least one fixture] tableWithParamNoFixtureFail(uint256) ([GAS])
Suite result: FAILED. 2 passed; 6 failed; 0 skipped; [ELAPSED]
Ran 1 test suite [ELAPSED]: 2 tests passed, 6 failed, 0 skipped (8 total tests)
Failing tests:
Encountered 6 failing tests in test/CounterTable.t.sol:CounterTableTest
[FAIL: 2 fixtures defined for diffSwap (expected 10)] tableMultipleParamsDifferentFixturesFail(uint256,bool) ([GAS])
[FAIL: Cannot swap; counterexample: calldata=0x717892ca00000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000001 args=[1, true]] tableMultipleParamsFail(uint256,bool) (runs: 1, [AVG_GAS])
[FAIL: No fixture defined for param noSwap] tableMultipleParamsNoParamFail(uint256,bool) ([GAS])
[FAIL: Amount cannot be 10; counterexample: calldata=0x44fa2375000000000000000000000000000000000000000000000000000000000000000a args=[10]] tableSingleParamFail(uint256) (runs: 10, [AVG_GAS])
[FAIL: Table test should have at least one parameter] tableWithNoParamFail() ([GAS])
[FAIL: Table test should have at least one fixture] tableWithParamNoFixtureFail(uint256) ([GAS])
Encountered a total of 6 failing tests, 2 tests succeeded
Tip: Run `forge test --rerun` to retry only the 6 failed tests
"#]]);
});
// Table tests should show logs and contribute to coverage.
// <https://github.com/foundry-rs/foundry/issues/11066>
forgetest_init!(should_show_logs_and_add_coverage, |prj, cmd| {
prj.add_source(
"Counter.sol",
r#"
contract Counter {
uint256 public number;
function setNumber(uint256 a, uint256 b) public {
if (a == 1) {
number = b + 1;
} else if (a == 2) {
number = b + 2;
} else if (a == 3) {
number = b + 3;
} else {
number = a + b;
}
}
}
"#,
);
prj.add_test(
"CounterTest.t.sol",
r#"
import "forge-std/Test.sol";
import {Counter} from "../src/Counter.sol";
contract CounterTest is Test {
struct TestCase {
uint256 a;
uint256 b;
uint256 expected;
}
Counter public counter;
function setUp() public {
counter = new Counter();
}
function fixtureNumbers() public pure returns (TestCase[] memory) {
TestCase[] memory entries = new TestCase[](4);
entries[0] = TestCase(1, 5, 6);
entries[1] = TestCase(2, 10, 12);
entries[2] = TestCase(3, 11, 14);
entries[3] = TestCase(4, 11, 15);
return entries;
}
function tableSetNumberTest(TestCase memory numbers) public {
console.log("expected", numbers.expected);
counter.setNumber(numbers.a, numbers.b);
require(counter.number() == numbers.expected, "test failed");
}
}
"#,
);
cmd.args(["test", "-vvv"]).assert_success().stdout_eq(str![[r#"
[COMPILING_FILES] with [SOLC_VERSION]
[SOLC_VERSION] [ELAPSED]
Compiler run successful!
Ran 1 test for test/CounterTest.t.sol:CounterTest
[PASS] tableSetNumberTest((uint256,uint256,uint256)) (runs: 4, [AVG_GAS])
Logs:
expected 6
expected 12
expected 14
expected 15
Suite result: ok. 1 passed; 0 failed; 0 skipped; [ELAPSED]
Ran 1 test suite [ELAPSED]: 1 tests passed, 0 failed, 0 skipped (1 total tests)
"#]]);
cmd.forge_fuse().args(["coverage"]).assert_success().stdout_eq(str![[r#"
[COMPILING_FILES] with [SOLC_VERSION]
[SOLC_VERSION] [ELAPSED]
Compiler run successful!
Analysing contracts...
Running tests...
Ran 1 test for test/CounterTest.t.sol:CounterTest
[PASS] tableSetNumberTest((uint256,uint256,uint256)) (runs: 4, [AVG_GAS])
Suite result: ok. 1 passed; 0 failed; 0 skipped; [ELAPSED]
Ran 1 test suite [ELAPSED]: 1 tests passed, 0 failed, 0 skipped (1 total tests)
╭-----------------+---------------+---------------+---------------+---------------╮
| File | % Lines | % Statements | % Branches | % Funcs |
+=================================================================================+
| src/Counter.sol | 100.00% (8/8) | 100.00% (7/7) | 100.00% (6/6) | 100.00% (1/1) |
|-----------------+---------------+---------------+---------------+---------------|
| Total | 100.00% (8/8) | 100.00% (7/7) | 100.00% (6/6) | 100.00% (1/1) |
╰-----------------+---------------+---------------+---------------+---------------╯
"#]]);
});
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/forge/tests/cli/test_cmd/core.rs | crates/forge/tests/cli/test_cmd/core.rs | //! Core test functionality tests
use foundry_test_utils::str;
forgetest_init!(failing_test_after_failed_setup, |prj, cmd| {
prj.add_test(
"FailingTestAfterFailedSetup.t.sol",
r#"
import "forge-std/Test.sol";
contract FailingTestAfterFailedSetupTest is Test {
function setUp() public {
assertTrue(false);
}
function testAssertSuccess() public {
assertTrue(true);
}
function testAssertFailure() public {
assertTrue(false);
}
}
"#,
);
cmd.arg("test").assert_failure().stdout_eq(str![[r#"
...
Ran 1 test for test/FailingTestAfterFailedSetup.t.sol:FailingTestAfterFailedSetupTest
[FAIL: assertion failed] setUp() ([GAS])
Suite result: FAILED. 0 passed; 1 failed; 0 skipped; [ELAPSED]
Ran 1 test suite [ELAPSED]: 0 tests passed, 1 failed, 0 skipped (1 total tests)
Failing tests:
Encountered 1 failing test in test/FailingTestAfterFailedSetup.t.sol:FailingTestAfterFailedSetupTest
[FAIL: assertion failed] setUp() ([GAS])
Encountered a total of 1 failing tests, 0 tests succeeded
Tip: Run `forge test --rerun` to retry only the 1 failed test
"#]]);
});
forgetest_init!(legacy_assertions, |prj, cmd| {
prj.add_test(
"LegacyAssertions.t.sol",
r#"
import "forge-std/Test.sol";
contract NoAssertionsRevertTest is Test {
function testMultipleAssertFailures() public {
vm.assertEq(uint256(1), uint256(2));
vm.assertLt(uint256(5), uint256(4));
}
}
/// forge-config: default.legacy_assertions = true
contract LegacyAssertionsTest {
bool public failed;
function testFlagNotSetSuccess() public {}
function testFlagSetFailure() public {
failed = true;
}
}
"#,
);
cmd.args(["test", "-j1"]).assert_failure().stdout_eq(str![[r#"
...
Ran 2 tests for test/LegacyAssertions.t.sol:LegacyAssertionsTest
[PASS] testFlagNotSetSuccess() ([GAS])
[FAIL] testFlagSetFailure() ([GAS])
Suite result: FAILED. 1 passed; 1 failed; 0 skipped; [ELAPSED]
Ran 1 test for test/LegacyAssertions.t.sol:NoAssertionsRevertTest
[FAIL: assertion failed: 1 != 2] testMultipleAssertFailures() ([GAS])
Suite result: FAILED. 0 passed; 1 failed; 0 skipped; [ELAPSED]
Ran 2 test suites [ELAPSED]: 1 tests passed, 2 failed, 0 skipped (3 total tests)
Failing tests:
Encountered 1 failing test in test/LegacyAssertions.t.sol:LegacyAssertionsTest
[FAIL] testFlagSetFailure() ([GAS])
Encountered 1 failing test in test/LegacyAssertions.t.sol:NoAssertionsRevertTest
[FAIL: assertion failed: 1 != 2] testMultipleAssertFailures() ([GAS])
Encountered a total of 2 failing tests, 1 tests succeeded
Tip: Run `forge test --rerun` to retry only the 2 failed tests
"#]]);
});
forgetest_init!(payment_failure, |prj, cmd| {
prj.add_test(
"PaymentFailure.t.sol",
r#"
import "forge-std/Test.sol";
contract Payable {
function pay() public payable {}
}
contract PaymentFailureTest is Test {
function testCantPay() public {
Payable target = new Payable();
vm.prank(address(1));
target.pay{value: 1}();
}
}
"#,
);
cmd.arg("test").assert_failure().stdout_eq(str![[r#"
[COMPILING_FILES] with [SOLC_VERSION]
[SOLC_VERSION] [ELAPSED]
Compiler run successful!
Ran 1 test for test/PaymentFailure.t.sol:PaymentFailureTest
[FAIL: EvmError: Revert] testCantPay() ([GAS])
Suite result: FAILED. 0 passed; 1 failed; 0 skipped; [ELAPSED]
Ran 1 test suite [ELAPSED]: 0 tests passed, 1 failed, 0 skipped (1 total tests)
Failing tests:
Encountered 1 failing test in test/PaymentFailure.t.sol:PaymentFailureTest
[FAIL: EvmError: Revert] testCantPay() ([GAS])
Encountered a total of 1 failing tests, 0 tests succeeded
Tip: Run `forge test --rerun` to retry only the 1 failed test
"#]]);
});
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/forge/tests/cli/test_cmd/repros.rs | crates/forge/tests/cli/test_cmd/repros.rs | //! Regression tests for specific GitHub issues
use foundry_test_utils::str;
// https://github.com/foundry-rs/foundry/issues/3055
forgetest_init!(issue_3055, |prj, cmd| {
prj.add_test(
"Issue3055.t.sol",
r#"
import "forge-std/Test.sol";
/// forge-config: default.assertions_revert = false
contract Issue3055Test is Test {
function test_snapshot() external {
uint256 snapshotId = vm.snapshotState();
assertEq(uint256(0), uint256(1));
vm.revertToState(snapshotId);
}
function test_snapshot2() public {
uint256 snapshotId = vm.snapshotState();
assertTrue(false);
vm.revertToState(snapshotId);
assertTrue(true);
}
function test_snapshot3(uint256) public {
vm.expectRevert();
// Call exposed_snapshot3() using this to perform an external call,
// so we can properly test for reverts.
this.exposed_snapshot3();
}
function exposed_snapshot3() public {
uint256 snapshotId = vm.snapshotState();
assertTrue(false);
vm.revertToState(snapshotId);
}
}
"#,
);
cmd.arg("test").assert_failure().stdout_eq(str![[r#"
[COMPILING_FILES] with [SOLC_VERSION]
[SOLC_VERSION] [ELAPSED]
Compiler run successful!
Ran 3 tests for test/Issue3055.t.sol:Issue3055Test
[FAIL] test_snapshot() ([GAS])
[FAIL] test_snapshot2() ([GAS])
[FAIL: next call did not revert as expected; counterexample: calldata=[..] args=[..] test_snapshot3(uint256) (runs: 0, [AVG_GAS])
Suite result: FAILED. 0 passed; 3 failed; 0 skipped; [ELAPSED]
Ran 1 test suite [ELAPSED]: 0 tests passed, 3 failed, 0 skipped (3 total tests)
Failing tests:
Encountered 3 failing tests in test/Issue3055.t.sol:Issue3055Test
[FAIL] test_snapshot() ([GAS])
[FAIL] test_snapshot2() ([GAS])
[FAIL: next call did not revert as expected; counterexample: calldata=[..] args=[..] test_snapshot3(uint256) (runs: 0, [AVG_GAS])
Encountered a total of 3 failing tests, 0 tests succeeded
Tip: Run `forge test --rerun` to retry only the 3 failed tests
"#]]);
});
// https://github.com/foundry-rs/foundry/issues/3189
forgetest_init!(issue_3189, |prj, cmd| {
prj.add_test(
"Issue3189.t.sol",
r#"
import "forge-std/Test.sol";
contract MyContract {
function foo(uint256 arg) public returns (uint256) {
return arg + 2;
}
}
contract MyContractUser is Test {
MyContract immutable myContract;
constructor() {
myContract = new MyContract();
}
function foo(uint256 arg) public returns (uint256 ret) {
ret = myContract.foo(arg);
assertEq(ret, arg + 1, "Invariant failed");
}
}
contract Issue3189Test is Test {
function testFoo() public {
MyContractUser user = new MyContractUser();
user.foo(123);
}
}
"#,
);
cmd.arg("test").assert_failure().stdout_eq(str![[r#"
...
Ran 1 test for test/Issue3189.t.sol:Issue3189Test
[FAIL: Invariant failed: 125 != 124] testFoo() ([GAS])
Suite result: FAILED. 0 passed; 1 failed; 0 skipped; [ELAPSED]
Ran 1 test suite [ELAPSED]: 0 tests passed, 1 failed, 0 skipped (1 total tests)
Failing tests:
Encountered 1 failing test in test/Issue3189.t.sol:Issue3189Test
[FAIL: Invariant failed: 125 != 124] testFoo() ([GAS])
Encountered a total of 1 failing tests, 0 tests succeeded
Tip: Run `forge test --rerun` to retry only the 1 failed test
"#]]);
});
// https://github.com/foundry-rs/foundry/issues/3596
forgetest_init!(issue_3596, |prj, cmd| {
prj.add_test(
"Issue3596.t.sol",
r#"
import "forge-std/Test.sol";
contract Issue3596Test is Test {
function testDealTransfer() public {
address addr = vm.addr(1337);
vm.startPrank(addr);
vm.deal(addr, 20000001 ether);
payable(address(this)).transfer(20000000 ether);
Nested nested = new Nested();
nested.doStuff();
vm.stopPrank();
}
}
contract Nested {
function doStuff() public {
doRevert();
}
function doRevert() public {
revert("This fails");
}
}
"#,
);
cmd.arg("test").assert_failure().stdout_eq(str![[r#"
...
Ran 1 test for test/Issue3596.t.sol:Issue3596Test
[FAIL: EvmError: Revert] testDealTransfer() ([GAS])
Suite result: FAILED. 0 passed; 1 failed; 0 skipped; [ELAPSED]
Ran 1 test suite [ELAPSED]: 0 tests passed, 1 failed, 0 skipped (1 total tests)
Failing tests:
Encountered 1 failing test in test/Issue3596.t.sol:Issue3596Test
[FAIL: EvmError: Revert] testDealTransfer() ([GAS])
Encountered a total of 1 failing tests, 0 tests succeeded
Tip: Run `forge test --rerun` to retry only the 1 failed test
"#]]);
});
// https://github.com/foundry-rs/foundry/issues/2851
forgetest_init!(issue_2851, |prj, cmd| {
prj.add_test(
"Issue2851.t.sol",
r#"
import "forge-std/Test.sol";
contract Backdoor {
uint256 public number = 1;
function backdoor(uint256 newNumber) public payable {
uint256 x = newNumber - 1;
if (x == 6912213124124531) {
number = 0;
}
}
}
contract Issue2851Test is Test {
Backdoor back;
function setUp() public {
back = new Backdoor();
}
/// forge-config: default.fuzz.seed = "111"
function invariantNotZero() public {
assertEq(back.number(), 1);
}
}
"#,
);
cmd.arg("test").assert_failure().stdout_eq(str![[r#"
...
Ran 1 test for test/Issue2851.t.sol:Issue2851Test
[FAIL: assertion failed: 0 != 1]
...
invariantNotZero() ([..])
...
Suite result: FAILED. 0 passed; 1 failed; 0 skipped; [ELAPSED]
Ran 1 test suite [ELAPSED]: 0 tests passed, 1 failed, 0 skipped (1 total tests)
...
"#]]);
});
// https://github.com/foundry-rs/foundry/issues/6170
forgetest_init!(issue_6170, |prj, cmd| {
prj.add_test(
"Issue6170.t.sol",
r#"
import "forge-std/Test.sol";
contract Emitter {
event Values(uint256 indexed a, uint256 indexed b);
function plsEmit(uint256 a, uint256 b) external {
emit Values(a, b);
}
}
contract Issue6170Test is Test {
event Values(uint256 indexed a, uint256 b);
Emitter e = new Emitter();
function test() public {
vm.expectEmit(true, true, false, true);
emit Values(69, 420);
e.plsEmit(69, 420);
}
}
"#,
);
cmd.arg("test").assert_failure().stdout_eq(str![[r#"
[COMPILING_FILES] with [SOLC_VERSION]
[SOLC_VERSION] [ELAPSED]
Compiler run successful!
Ran 1 test for test/Issue6170.t.sol:Issue6170Test
[FAIL: log != expected log] test() ([GAS])
Suite result: FAILED. 0 passed; 1 failed; 0 skipped; [ELAPSED]
Ran 1 test suite [ELAPSED]: 0 tests passed, 1 failed, 0 skipped (1 total tests)
Failing tests:
Encountered 1 failing test in test/Issue6170.t.sol:Issue6170Test
[FAIL: log != expected log] test() ([GAS])
Encountered a total of 1 failing tests, 0 tests succeeded
Tip: Run `forge test --rerun` to retry only the 1 failed test
"#]]);
});
// https://github.com/foundry-rs/foundry/issues/6355
forgetest_init!(issue_6355, |prj, cmd| {
prj.add_test(
"Issue6355.t.sol",
r#"
import "forge-std/Test.sol";
contract Issue6355Test is Test {
uint256 snapshotId;
Target targ;
function setUp() public {
snapshotId = vm.snapshotState();
targ = new Target();
}
// this non-deterministically fails sometimes and passes sometimes
function test_shouldPass() public {
assertEq(2, targ.num());
}
// always fails
function test_shouldFailWithRevertToState() public {
assertEq(3, targ.num());
vm.revertToState(snapshotId);
}
// always fails
function test_shouldFail() public {
assertEq(3, targ.num());
}
}
contract Target {
function num() public pure returns (uint256) {
return 2;
}
}
"#,
);
cmd.arg("test").assert_failure().stdout_eq(str![[r#"
...
Ran 3 tests for test/Issue6355.t.sol:Issue6355Test
[FAIL: assertion failed: 3 != 2] test_shouldFail() ([GAS])
[FAIL: assertion failed: 3 != 2] test_shouldFailWithRevertToState() ([GAS])
[PASS] test_shouldPass() ([GAS])
Suite result: FAILED. 1 passed; 2 failed; 0 skipped; [ELAPSED]
Ran 1 test suite [ELAPSED]: 1 tests passed, 2 failed, 0 skipped (3 total tests)
Failing tests:
Encountered 2 failing tests in test/Issue6355.t.sol:Issue6355Test
[FAIL: assertion failed: 3 != 2] test_shouldFail() ([GAS])
[FAIL: assertion failed: 3 != 2] test_shouldFailWithRevertToState() ([GAS])
Encountered a total of 2 failing tests, 1 tests succeeded
Tip: Run `forge test --rerun` to retry only the 2 failed tests
"#]]);
});
// https://github.com/foundry-rs/foundry/issues/3347
forgetest_init!(issue_3347, |prj, cmd| {
prj.add_test(
"Issue3347.t.sol",
r#"
import "forge-std/Test.sol";
contract Issue3347Test is Test {
event log2(uint256, uint256);
function test() public {
emit log2(1, 2);
}
}
"#,
);
cmd.args(["test", "-vvvv"]).assert_success().stdout_eq(str![[r#"
[COMPILING_FILES] with [SOLC_VERSION]
[SOLC_VERSION] [ELAPSED]
Compiler run successful!
Ran 1 test for test/Issue3347.t.sol:Issue3347Test
[PASS] test() ([GAS])
Traces:
[..] Issue3347Test::test()
├─ emit log2(: 1, : 2)
└─ ← [Stop]
Suite result: ok. 1 passed; 0 failed; 0 skipped; [ELAPSED]
Ran 1 test suite [ELAPSED]: 1 tests passed, 0 failed, 0 skipped (1 total tests)
"#]]);
});
// https://github.com/foundry-rs/foundry/issues/6501
// Make sure we decode Hardhat-style `console.log`s correctly, in both logs and traces.
forgetest_init!(issue_6501, |prj, cmd| {
prj.add_test(
"Issue6501.t.sol",
r#"
import "forge-std/Test.sol";
contract Issue6501Test is Test {
function test_hhLogs() public {
console.log("a");
console.log(uint256(1));
console.log("b", uint256(2));
}
}
"#,
);
cmd.args(["test", "-vvvv"]).assert_success().stdout_eq(str![[r#"
...
Ran 1 test for test/Issue6501.t.sol:Issue6501Test
[PASS] test_hhLogs() ([GAS])
Logs:
a
1
b 2
Traces:
[..] Issue6501Test::test_hhLogs()
├─ [0] console::log("a") [staticcall]
│ └─ ← [Stop]
├─ [0] console::log(1) [staticcall]
│ └─ ← [Stop]
├─ [0] console::log("b", 2) [staticcall]
│ └─ ← [Stop]
└─ ← [Stop]
Suite result: ok. 1 passed; 0 failed; 0 skipped; [ELAPSED]
Ran 1 test suite [ELAPSED]: 1 tests passed, 0 failed, 0 skipped (1 total tests)
"#]]);
});
// https://github.com/foundry-rs/foundry/issues/8383
forgetest_init!(issue_8383, |prj, cmd| {
prj.update_config(|config| {
config.optimizer = Some(true);
config.optimizer_runs = Some(200);
});
prj.add_test(
"Issue8383.t.sol",
r#"
import "forge-std/Test.sol";
contract Issue8383Test is Test {
address internal _verifier;
mapping(bytes32 => bool) internal _vectorTested;
mapping(bytes32 => bool) internal _vectorResult;
function setUp() public {
_verifier = address(new P256Verifier());
}
function _verifyViaVerifier(bytes32 hash, uint256 r, uint256 s, uint256 x, uint256 y) internal returns (bool) {
return _verifyViaVerifier(hash, bytes32(r), bytes32(s), bytes32(x), bytes32(y));
}
function _verifyViaVerifier(bytes32 hash, bytes32 r, bytes32 s, bytes32 x, bytes32 y) internal returns (bool) {
bytes memory payload = abi.encode(hash, r, s, x, y);
if (uint256(y) & 0xff == 0) {
bytes memory truncatedPayload = abi.encodePacked(hash, r, s, x, bytes31(y));
_verifierCall(truncatedPayload);
}
if (uint256(keccak256(abi.encode(payload, "1"))) & 0x1f == 0) {
uint256 r = uint256(keccak256(abi.encode(payload, "2")));
payload = abi.encodePacked(payload, new bytes(r & 0xff));
}
bytes32 payloadHash = keccak256(payload);
if (_vectorTested[payloadHash]) return _vectorResult[payloadHash];
_vectorTested[payloadHash] = true;
return (_vectorResult[payloadHash] = _verifierCall(payload));
}
function _verifierCall(bytes memory payload) internal returns (bool) {
(bool success, bytes memory result) = _verifier.call(payload);
return abi.decode(result, (bool));
}
function testP256VerifyOutOfBounds() public {
vm.pauseGasMetering();
uint256 p = 0xFFFFFFFF00000001000000000000000000000000FFFFFFFFFFFFFFFFFFFFFFFF;
_verifyViaVerifier(bytes32(0), 1, 1, 1, 1);
_verifyViaVerifier(bytes32(0), 1, 1, 0, 1);
_verifyViaVerifier(bytes32(0), 1, 1, 1, 0);
_verifyViaVerifier(bytes32(0), 1, 1, 1, p);
_verifyViaVerifier(bytes32(0), 1, 1, p, 1);
_verifyViaVerifier(bytes32(0), 1, 1, p - 1, 1);
vm.resumeGasMetering();
}
}
contract P256Verifier {
uint256 private constant GX = 0x6B17D1F2E12C4247F8BCE6E563A440F277037D812DEB33A0F4A13945D898C296;
uint256 private constant GY = 0x4FE342E2FE1A7F9B8EE7EB4A7C0F9E162BCE33576B315ECECBB6406837BF51F5;
uint256 private constant P = 0xFFFFFFFF00000001000000000000000000000000FFFFFFFFFFFFFFFFFFFFFFFF; // `A = P - 3`.
uint256 private constant N = 0xFFFFFFFF00000000FFFFFFFFFFFFFFFFBCE6FAADA7179E84F3B9CAC2FC632551;
uint256 private constant B = 0x5AC635D8AA3A93E7B3EBBD55769886BC651D06B0CC53B0F63BCE3C3E27D2604B;
fallback() external payable {
assembly {
// For this implementation, we will use the memory without caring about
// the free memory pointer or zero pointer.
// The slots `0x00`, `0x20`, `0x40`, `0x60`, will not be accessed for the `Points[16]` array,
// and can be used for storing other variables.
mstore(0x40, P) // Set `0x40` to `P`.
function jAdd(x1, y1, z1, x2, y2, z2) -> x3, y3, z3 {
if iszero(z1) {
x3 := x2
y3 := y2
z3 := z2
leave
}
if iszero(z2) {
x3 := x1
y3 := y1
z3 := z1
leave
}
let p := mload(0x40)
let zz1 := mulmod(z1, z1, p)
let zz2 := mulmod(z2, z2, p)
let u1 := mulmod(x1, zz2, p)
let u2 := mulmod(x2, zz1, p)
let s1 := mulmod(y1, mulmod(zz2, z2, p), p)
let s2 := mulmod(y2, mulmod(zz1, z1, p), p)
let h := addmod(u2, sub(p, u1), p)
let hh := mulmod(h, h, p)
let hhh := mulmod(h, hh, p)
let r := addmod(s2, sub(p, s1), p)
x3 := addmod(addmod(mulmod(r, r, p), sub(p, hhh), p), sub(p, mulmod(2, mulmod(u1, hh, p), p)), p)
y3 := addmod(mulmod(r, addmod(mulmod(u1, hh, p), sub(p, x3), p), p), sub(p, mulmod(s1, hhh, p)), p)
z3 := mulmod(h, mulmod(z1, z2, p), p)
}
function setJPoint(i, x, y, z) {
// We will multiply by `0x80` (i.e. `shl(7, i)`) instead
// since the memory expansion costs are cheaper than doing `mul(0x60, i)`.
// Also help combine the lookup expression for `u1` and `u2` in `jMultShamir`.
i := shl(7, i)
mstore(i, x)
mstore(add(i, returndatasize()), y)
mstore(add(i, 0x40), z)
}
function setJPointDouble(i, j) {
j := shl(7, j)
let x := mload(j)
let y := mload(add(j, returndatasize()))
let z := mload(add(j, 0x40))
let p := mload(0x40)
let yy := mulmod(y, y, p)
let zz := mulmod(z, z, p)
let s := mulmod(4, mulmod(x, yy, p), p)
let m := addmod(mulmod(3, mulmod(x, x, p), p), mulmod(mload(returndatasize()), mulmod(zz, zz, p), p), p)
let x2 := addmod(mulmod(m, m, p), sub(p, mulmod(2, s, p)), p)
let y2 := addmod(mulmod(m, addmod(s, sub(p, x2), p), p), sub(p, mulmod(8, mulmod(yy, yy, p), p)), p)
let z2 := mulmod(2, mulmod(y, z, p), p)
setJPoint(i, x2, y2, z2)
}
function setJPointAdd(i, j, k) {
j := shl(7, j)
k := shl(7, k)
let x, y, z :=
jAdd(
mload(j),
mload(add(j, returndatasize())),
mload(add(j, 0x40)),
mload(k),
mload(add(k, returndatasize())),
mload(add(k, 0x40))
)
setJPoint(i, x, y, z)
}
let r := calldataload(0x20)
let n := N
{
let s := calldataload(0x40)
if lt(shr(1, n), s) { s := sub(n, s) }
// Perform `modExp(s, N - 2, N)`.
// After which, we can abuse `returndatasize()` to get `0x20`.
mstore(0x800, 0x20)
mstore(0x820, 0x20)
mstore(0x840, 0x20)
mstore(0x860, s)
mstore(0x880, sub(n, 2))
mstore(0x8a0, n)
let p := mload(0x40)
mstore(0x20, xor(3, p)) // Set `0x20` to `A`.
let Qx := calldataload(0x60)
let Qy := calldataload(0x80)
if iszero(
and( // The arguments of `and` are evaluated last to first.
and(
and(gt(calldatasize(), 0x9f), and(lt(iszero(r), lt(r, n)), lt(iszero(s), lt(s, n)))),
eq(
mulmod(Qy, Qy, p),
addmod(mulmod(addmod(mulmod(Qx, Qx, p), mload(returndatasize()), p), Qx, p), B, p)
)
),
and(
// We need to check that the `returndatasize` is indeed 32,
// so that we can return false if the chain does not have the modexp precompile.
eq(returndatasize(), 0x20),
staticcall(gas(), 0x05, 0x800, 0xc0, returndatasize(), 0x20)
)
)
) {
// POC Note:
// Changing this to `return(0x80, 0x20)` fixes it.
// Alternatively, adding `if mload(0x8c0) { invalid() }` just before the return also fixes it.
return(0x8c0, 0x20)
}
setJPoint(0x01, Qx, Qy, 1)
setJPoint(0x04, GX, GY, 1)
setJPointDouble(0x02, 0x01)
setJPointDouble(0x08, 0x04)
setJPointAdd(0x03, 0x01, 0x02)
setJPointAdd(0x05, 0x01, 0x04)
setJPointAdd(0x06, 0x02, 0x04)
setJPointAdd(0x07, 0x03, 0x04)
setJPointAdd(0x09, 0x01, 0x08)
setJPointAdd(0x0a, 0x02, 0x08)
setJPointAdd(0x0b, 0x03, 0x08)
setJPointAdd(0x0c, 0x04, 0x08)
setJPointAdd(0x0d, 0x01, 0x0c)
setJPointAdd(0x0e, 0x02, 0x0c)
setJPointAdd(0x0f, 0x03, 0x0c)
}
let i := 0
let u1 := mulmod(calldataload(0x00), mload(0x00), n)
let u2 := mulmod(r, mload(0x00), n)
let y := 0
let z := 0
let x := 0
let p := mload(0x40)
for {} 1 {} {
if z {
let yy := mulmod(y, y, p)
let zz := mulmod(z, z, p)
let s := mulmod(4, mulmod(x, yy, p), p)
let m :=
addmod(mulmod(3, mulmod(x, x, p), p), mulmod(mload(returndatasize()), mulmod(zz, zz, p), p), p)
let x2 := addmod(mulmod(m, m, p), sub(p, mulmod(2, s, p)), p)
let y2 := addmod(mulmod(m, addmod(s, sub(p, x2), p), p), sub(p, mulmod(8, mulmod(yy, yy, p), p)), p)
let z2 := mulmod(2, mulmod(y, z, p), p)
yy := mulmod(y2, y2, p)
zz := mulmod(z2, z2, p)
s := mulmod(4, mulmod(x2, yy, p), p)
m := addmod(
mulmod(3, mulmod(x2, x2, p), p),
mulmod(mload(returndatasize()), mulmod(zz, zz, p), p),
p
)
x := addmod(mulmod(m, m, p), sub(p, mulmod(2, s, p)), p)
z := mulmod(2, mulmod(y2, z2, p), p)
y := addmod(mulmod(m, addmod(s, sub(p, x), p), p), sub(p, mulmod(8, mulmod(yy, yy, p), p)), p)
}
for { let o := or(and(shr(245, shl(i, u1)), 0x600), and(shr(247, shl(i, u2)), 0x180)) } o {} {
let z2 := mload(add(o, 0x40))
if iszero(z2) { break }
if iszero(z) {
x := mload(o)
y := mload(add(o, returndatasize()))
z := z2
break
}
let zz1 := mulmod(z, z, p)
let zz2 := mulmod(z2, z2, p)
let u1_ := mulmod(x, zz2, p)
let s1 := mulmod(y, mulmod(zz2, z2, p), p)
let h := addmod(mulmod(mload(o), zz1, p), sub(p, u1_), p)
let hh := mulmod(h, h, p)
let hhh := mulmod(h, hh, p)
let r_ := addmod(mulmod(mload(add(o, returndatasize())), mulmod(zz1, z, p), p), sub(p, s1), p)
x := addmod(addmod(mulmod(r_, r_, p), sub(p, hhh), p), sub(p, mulmod(2, mulmod(u1_, hh, p), p)), p)
y := addmod(mulmod(r_, addmod(mulmod(u1_, hh, p), sub(p, x), p), p), sub(p, mulmod(s1, hhh, p)), p)
z := mulmod(h, mulmod(z, z2, p), p)
break
}
// Just unroll twice. Fully unrolling will only save around 1% to 2% gas, but make the
// bytecode very bloated, which may incur more runtime costs after Verkle.
// See: https://notes.ethereum.org/%40vbuterin/verkle_tree_eip
// It's very unlikely that Verkle will come before the P256 precompile. But who knows?
if z {
let yy := mulmod(y, y, p)
let zz := mulmod(z, z, p)
let s := mulmod(4, mulmod(x, yy, p), p)
let m :=
addmod(mulmod(3, mulmod(x, x, p), p), mulmod(mload(returndatasize()), mulmod(zz, zz, p), p), p)
let x2 := addmod(mulmod(m, m, p), sub(p, mulmod(2, s, p)), p)
let y2 := addmod(mulmod(m, addmod(s, sub(p, x2), p), p), sub(p, mulmod(8, mulmod(yy, yy, p), p)), p)
let z2 := mulmod(2, mulmod(y, z, p), p)
yy := mulmod(y2, y2, p)
zz := mulmod(z2, z2, p)
s := mulmod(4, mulmod(x2, yy, p), p)
m := addmod(
mulmod(3, mulmod(x2, x2, p), p),
mulmod(mload(returndatasize()), mulmod(zz, zz, p), p),
p
)
x := addmod(mulmod(m, m, p), sub(p, mulmod(2, s, p)), p)
z := mulmod(2, mulmod(y2, z2, p), p)
y := addmod(mulmod(m, addmod(s, sub(p, x), p), p), sub(p, mulmod(8, mulmod(yy, yy, p), p)), p)
}
for { let o := or(and(shr(243, shl(i, u1)), 0x600), and(shr(245, shl(i, u2)), 0x180)) } o {} {
let z2 := mload(add(o, 0x40))
if iszero(z2) { break }
if iszero(z) {
x := mload(o)
y := mload(add(o, returndatasize()))
z := z2
break
}
let zz1 := mulmod(z, z, p)
let zz2 := mulmod(z2, z2, p)
let u1_ := mulmod(x, zz2, p)
let s1 := mulmod(y, mulmod(zz2, z2, p), p)
let h := addmod(mulmod(mload(o), zz1, p), sub(p, u1_), p)
let hh := mulmod(h, h, p)
let hhh := mulmod(h, hh, p)
let r_ := addmod(mulmod(mload(add(o, returndatasize())), mulmod(zz1, z, p), p), sub(p, s1), p)
x := addmod(addmod(mulmod(r_, r_, p), sub(p, hhh), p), sub(p, mulmod(2, mulmod(u1_, hh, p), p)), p)
y := addmod(mulmod(r_, addmod(mulmod(u1_, hh, p), sub(p, x), p), p), sub(p, mulmod(s1, hhh, p)), p)
z := mulmod(h, mulmod(z, z2, p), p)
break
}
i := add(i, 4)
if eq(i, 256) { break }
}
if iszero(z) {
mstore(returndatasize(), iszero(r))
return(returndatasize(), 0x20)
}
// Perform `modExp(z, P - 2, P)`.
// `0x800`, `0x820, `0x840` are still set to `0x20`.
mstore(0x860, z)
mstore(0x880, sub(p, 2))
mstore(0x8a0, p)
mstore(
returndatasize(),
and( // The arguments of `and` are evaluated last to first.
eq(mod(mulmod(x, mulmod(mload(returndatasize()), mload(returndatasize()), p), p), n), r),
staticcall(gas(), 0x05, 0x800, 0xc0, returndatasize(), returndatasize())
)
)
return(returndatasize(), returndatasize())
}
}
}
"#,
);
cmd.arg("test").with_no_redact().assert_success().stdout_eq(str![[r#"
...
Ran 1 test for test/Issue8383.t.sol:Issue8383Test
[PASS] testP256VerifyOutOfBounds() (gas: 3139)
...
"#]]);
});
// https://github.com/foundry-rs/foundry/issues/9272
forgetest_init!(issue_9272, |prj, cmd| {
prj.update_config(|config| {
config.allow_paths.push("..".into());
});
prj.add_source(
"Contract.sol",
r#"
pragma solidity ^0.8.0;
import '../Missing.sol';
contract Contract {}
"#,
);
// We expect a compilation error due to the missing import
cmd.arg("build").assert_failure().stderr_eq(str![[r#"
Error: Compiler run failed:
Error (6275): Source "Missing.sol" not found: File not found. Searched the following locations: [..]
ParserError: Source "Missing.sol" not found: File not found. Searched the following locations: [..]
[FILE]:4:1:
|
4 | import '../Missing.sol';
| ^^^^^^^^^^^^^^^^^^^^^^^^
"#]]);
});
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/forge/tests/cli/test_cmd/mod.rs | crates/forge/tests/cli/test_cmd/mod.rs | //! Contains various tests for `forge test`.
use alloy_primitives::U256;
use anvil::{NodeConfig, spawn};
use foundry_test_utils::{
TestCommand,
rpc::{self, rpc_endpoints},
str,
util::{OTHER_SOLC_VERSION, OutputExt, SOLC_VERSION},
};
use similar_asserts::assert_eq;
use std::{io::Write, path::PathBuf, str::FromStr};
mod core;
mod fuzz;
mod invariant;
mod logs;
mod repros;
mod spec;
mod table;
mod trace;
// Run `forge test` on `/testdata`.
forgetest!(testdata, |_prj, cmd| {
let testdata =
PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("../../testdata").canonicalize().unwrap();
cmd.current_dir(&testdata);
let mut dotenv = std::fs::File::create(testdata.join(".env")).unwrap();
for (name, endpoint) in rpc_endpoints().iter() {
if let Some(url) = endpoint.endpoint.as_url() {
let key = format!("RPC_{}", name.to_uppercase());
// cmd.env(&key, url);
writeln!(dotenv, "{key}={url}").unwrap();
}
}
drop(dotenv);
let mut args = vec!["test"];
if cfg!(feature = "isolate-by-default") {
args.push(
"--nmc=(LastCallGasDefaultTest|MockFunctionTest|WithSeed|StateDiff|GetStorageSlotsTest|RecordAccount)",
);
}
let orig_assert = cmd.args(args).assert();
if orig_assert.get_output().status.success() {
return;
}
let stdout = orig_assert.get_output().stdout_lossy();
if let Some(i) = stdout.rfind("Suite result:") {
test_debug!("--- short stdout ---\n\n{}\n\n---", &stdout[i..]);
}
// Retry failed tests.
cmd.args(["--rerun"]);
let n = 3;
for i in 1..=n {
test_debug!("retrying failed tests... ({i}/{n})");
let assert = cmd.assert();
if assert.get_output().status.success() {
return;
}
}
orig_assert.success();
});
// tests that test filters are handled correctly
forgetest!(can_set_filter_values, |prj, cmd| {
let patt = regex::Regex::new("test*").unwrap();
let glob = globset::Glob::from_str("foo/bar/baz*").unwrap();
// explicitly set patterns
prj.update_config(|config| {
config.test_pattern = Some(patt.clone().into());
config.test_pattern_inverse = None;
config.contract_pattern = Some(patt.clone().into());
config.contract_pattern_inverse = None;
config.path_pattern = Some(glob.clone());
config.path_pattern_inverse = None;
config.coverage_pattern_inverse = None;
});
let config = cmd.config();
assert_eq!(config.test_pattern.unwrap().as_str(), patt.as_str());
assert_eq!(config.test_pattern_inverse, None);
assert_eq!(config.contract_pattern.unwrap().as_str(), patt.as_str());
assert_eq!(config.contract_pattern_inverse, None);
assert_eq!(config.path_pattern.unwrap(), glob);
assert_eq!(config.path_pattern_inverse, None);
assert_eq!(config.coverage_pattern_inverse, None);
});
fn dummy_test_filter(cmd: &mut TestCommand) {
cmd.args(["test", "--match-test", "testA.*", "--no-match-test", "testB.*"]);
cmd.args(["--match-contract", "TestC.*", "--no-match-contract", "TestD.*"]);
cmd.args(["--match-path", "*TestE*", "--no-match-path", "*TestF*"]);
}
// tests that a warning is displayed when there are no tests in project, regardless of filters
forgetest!(warn_no_tests, |prj, cmd| {
// Must add at least one source to not fail earlier.
prj.add_source(
"dummy",
r"
contract Dummy {}
",
);
cmd.arg("test").assert_success().stdout_eq(str![[r#"
...
No tests found in project! Forge looks for functions that start with `test`
"#]]);
cmd.forge_fuse();
dummy_test_filter(&mut cmd);
cmd.assert_success().stdout_eq(str![[r#"
...
No tests found in project! Forge looks for functions that start with `test`
"#]]);
});
// tests that a warning is displayed if there are tests but none match a non-empty filter
forgetest!(suggest_when_no_tests_match, |prj, cmd| {
prj.add_source(
"TestE.t.sol",
r"
contract TestC {
function test1() public {
}
}
",
);
dummy_test_filter(&mut cmd);
cmd.assert_success().stderr_eq(str![[r#"
Warning: no tests match the provided pattern:
match-test: `testA.*`
no-match-test: `testB.*`
match-contract: `TestC.*`
no-match-contract: `TestD.*`
match-path: `*TestE*`
no-match-path: `*TestF*`
Did you mean `test1`?
"#]]);
});
// tests that direct import paths are handled correctly
forgetest!(can_fuzz_array_params, |prj, cmd| {
prj.insert_ds_test();
prj.add_source(
"ATest.t.sol",
r#"
import "./test.sol";
contract ATest is DSTest {
function testArray(uint64[2] calldata) external {
assertTrue(true);
}
}
"#,
);
cmd.arg("test").assert_success().stdout_eq(str![[r#"
[COMPILING_FILES] with [SOLC_VERSION]
[SOLC_VERSION] [ELAPSED]
Compiler run successful!
Ran 1 test for src/ATest.t.sol:ATest
[PASS] testArray(uint64[2]) (runs: 256, [AVG_GAS])
Suite result: ok. 1 passed; 0 failed; 0 skipped; [ELAPSED]
Ran 1 test suite [ELAPSED]: 1 tests passed, 0 failed, 0 skipped (1 total tests)
"#]]);
});
// tests that `bytecode_hash` will be sanitized
forgetest!(can_test_pre_bytecode_hash, |prj, cmd| {
prj.insert_ds_test();
prj.add_source(
"ATest.t.sol",
r#"
// pre bytecode hash version, was introduced in 0.6.0
pragma solidity 0.5.17;
import "./test.sol";
contract ATest is DSTest {
function testArray(uint64[2] calldata) external {
assertTrue(true);
}
}
"#,
);
cmd.arg("test").assert_success().stdout_eq(str![[r#"
...
Compiler run successful!
Ran 1 test for src/ATest.t.sol:ATest
[PASS] testArray(uint64[2]) (runs: 256, [AVG_GAS])
Suite result: ok. 1 passed; 0 failed; 0 skipped; [ELAPSED]
Ran 1 test suite [ELAPSED]: 1 tests passed, 0 failed, 0 skipped (1 total tests)
"#]]);
});
// tests that using the --match-path option only runs files matching the path
forgetest!(can_test_with_match_path, |prj, cmd| {
prj.insert_ds_test();
prj.add_source(
"ATest.t.sol",
r#"
import "./test.sol";
contract ATest is DSTest {
function testPass() external {
assertTrue(true);
}
}
"#,
);
prj.add_source(
"FailTest.t.sol",
r#"
import "./test.sol";
contract FailTest is DSTest {
function testNothing() external {
assertTrue(false);
}
}
"#,
);
cmd.args(["test", "--match-path", "*src/ATest.t.sol"]).assert_success().stdout_eq(str![[r#"
[COMPILING_FILES] with [SOLC_VERSION]
[SOLC_VERSION] [ELAPSED]
Compiler run successful!
Ran 1 test for src/ATest.t.sol:ATest
[PASS] testPass() ([GAS])
Suite result: ok. 1 passed; 0 failed; 0 skipped; [ELAPSED]
Ran 1 test suite [ELAPSED]: 1 tests passed, 0 failed, 0 skipped (1 total tests)
"#]]);
});
// tests that using the --match-path option works with absolute paths
forgetest!(can_test_with_match_path_absolute, |prj, cmd| {
prj.insert_ds_test();
prj.add_source(
"ATest.t.sol",
r#"
import "./test.sol";
contract ATest is DSTest {
function testPass() external {
assertTrue(true);
}
}
"#,
);
prj.add_source(
"FailTest.t.sol",
r#"
import "./test.sol";
contract FailTest is DSTest {
function testNothing() external {
assertTrue(false);
}
}
"#,
);
let test_path = prj.root().join("src/ATest.t.sol");
let test_path = test_path.to_string_lossy();
cmd.args(["test", "--match-path", test_path.as_ref()]).assert_success().stdout_eq(str![[r#"
[COMPILING_FILES] with [SOLC_VERSION]
[SOLC_VERSION] [ELAPSED]
Compiler run successful!
Ran 1 test for src/ATest.t.sol:ATest
[PASS] testPass() ([GAS])
Suite result: ok. 1 passed; 0 failed; 0 skipped; [ELAPSED]
Ran 1 test suite [ELAPSED]: 1 tests passed, 0 failed, 0 skipped (1 total tests)
"#]]);
});
const SIMPLE_CONTRACT: &str = r#"
import "./test.sol";
import "./console.sol";
contract SimpleContract {
uint256 public num;
function setValues(uint256 _num) public {
num = _num;
}
}
contract SimpleContractTest is DSTest {
function test() public {
SimpleContract c = new SimpleContract();
c.setValues(100);
console.logUint(100);
}
}
"#;
#[cfg(not(feature = "isolate-by-default"))]
forgetest!(can_run_test_with_json_output_verbose, |prj, cmd| {
prj.insert_ds_test();
prj.insert_console();
prj.add_source("Simple.t.sol", SIMPLE_CONTRACT);
// Assert that with verbose output the json output includes the traces
cmd.args(["test", "-vvvvv", "--json"])
.assert_success()
.stdout_eq(file!["../../fixtures/SimpleContractTestVerbose.json": Json]);
});
forgetest!(can_run_test_with_json_output_non_verbose, |prj, cmd| {
prj.insert_ds_test();
prj.insert_console();
prj.add_source("Simple.t.sol", SIMPLE_CONTRACT);
// Assert that without verbose output the json output does not include the traces
cmd.args(["test", "--json"])
.assert_success()
.stdout_eq(file!["../../fixtures/SimpleContractTestNonVerbose.json": Json]);
});
// tests that `forge test` will pick up tests that are stored in the `test = <path>` config value
forgetest!(can_run_test_in_custom_test_folder, |prj, cmd| {
prj.insert_ds_test();
// explicitly set the test folder
prj.update_config(|config| config.test = "nested/forge-tests".into());
let config = cmd.config();
assert_eq!(config.test, PathBuf::from("nested/forge-tests"));
prj.add_source(
"nested/forge-tests/MyTest.t.sol",
r#"
import "../../test.sol";
contract MyTest is DSTest {
function testTrue() public {
assertTrue(true);
}
}
"#,
);
cmd.arg("test").assert_success().stdout_eq(str![[r#"
[COMPILING_FILES] with [SOLC_VERSION]
[SOLC_VERSION] [ELAPSED]
Compiler run successful!
Ran 1 test for src/nested/forge-tests/MyTest.t.sol:MyTest
[PASS] testTrue() ([GAS])
Suite result: ok. 1 passed; 0 failed; 0 skipped; [ELAPSED]
Ran 1 test suite [ELAPSED]: 1 tests passed, 0 failed, 0 skipped (1 total tests)
"#]]);
});
// checks that forge test repeatedly produces the same output
#[cfg(not(feature = "isolate-by-default"))]
forgetest_init!(can_test_repeatedly, |prj, cmd| {
prj.initialize_default_contracts();
prj.clear();
cmd.arg("test").assert_success().stdout_eq(str![[r#"
[COMPILING_FILES] with [SOLC_VERSION]
[SOLC_VERSION] [ELAPSED]
Compiler run successful!
Ran 2 tests for test/Counter.t.sol:CounterTest
[PASS] testFuzz_SetNumber(uint256) (runs: 256, [AVG_GAS])
[PASS] test_Increment() ([GAS])
Suite result: ok. 2 passed; 0 failed; 0 skipped; [ELAPSED]
Ran 1 test suite [ELAPSED]: 2 tests passed, 0 failed, 0 skipped (2 total tests)
"#]]);
for _ in 0..5 {
cmd.assert_success().stdout_eq(str![[r#"
No files changed, compilation skipped
Ran 2 tests for test/Counter.t.sol:CounterTest
[PASS] testFuzz_SetNumber(uint256) (runs: 256, [AVG_GAS])
[PASS] test_Increment() ([GAS])
Suite result: ok. 2 passed; 0 failed; 0 skipped; [ELAPSED]
Ran 1 test suite [ELAPSED]: 2 tests passed, 0 failed, 0 skipped (2 total tests)
"#]]);
}
});
// tests that `forge test` will run a test only once after changing the version
forgetest!(runs_tests_exactly_once_with_changed_versions, |prj, cmd| {
prj.insert_ds_test();
prj.add_source(
"Contract.t.sol",
r#"
pragma solidity *;
import "./test.sol";
contract ContractTest is DSTest {
function setUp() public {}
function testExample() public {
assertTrue(true);
}
}
"#,
);
// pin version
prj.update_config(|config| {
config.solc = Some(SOLC_VERSION.into());
});
cmd.arg("test").assert_success().stdout_eq(str![[r#"
[COMPILING_FILES] with [SOLC_VERSION]
[SOLC_VERSION] [ELAPSED]
Compiler run successful!
Ran 1 test for src/Contract.t.sol:ContractTest
[PASS] testExample() ([GAS])
Suite result: ok. 1 passed; 0 failed; 0 skipped; [ELAPSED]
Ran 1 test suite [ELAPSED]: 1 tests passed, 0 failed, 0 skipped (1 total tests)
"#]]);
// pin version
prj.update_config(|config| {
config.solc = Some(OTHER_SOLC_VERSION.into());
});
cmd.forge_fuse().arg("test").assert_success().stdout_eq(str![[r#"
[COMPILING_FILES] with [SOLC_VERSION]
[SOLC_VERSION] [ELAPSED]
Compiler run successful!
Ran 1 test for src/Contract.t.sol:ContractTest
[PASS] testExample() ([GAS])
Suite result: ok. 1 passed; 0 failed; 0 skipped; [ELAPSED]
Ran 1 test suite [ELAPSED]: 1 tests passed, 0 failed, 0 skipped (1 total tests)
"#]]);
});
// tests that libraries are handled correctly in multiforking mode
#[cfg(not(feature = "isolate-by-default"))]
forgetest_init!(can_use_libs_in_multi_fork, |prj, cmd| {
prj.add_source(
"Contract.sol",
r"
library Library {
function f(uint256 a, uint256 b) public pure returns (uint256) {
return a + b;
}
}
contract Contract {
uint256 c;
constructor() {
c = Library.f(1, 2);
}
}
",
);
let endpoint = rpc::next_http_archive_rpc_url();
prj.add_test(
"Contract.t.sol",
&r#"
import "forge-std/Test.sol";
import "src/Contract.sol";
contract ContractTest is Test {
function setUp() public {
vm.createSelectFork("<url>");
}
function test() public {
new Contract();
}
}
"#
.replace("<url>", &endpoint),
);
cmd.arg("test").assert_success().stdout_eq(str![[r#"
[COMPILING_FILES] with [SOLC_VERSION]
[SOLC_VERSION] [ELAPSED]
Compiler run successful!
Ran 1 test for test/Contract.t.sol:ContractTest
[PASS] test() ([GAS])
Suite result: ok. 1 passed; 0 failed; 0 skipped; [ELAPSED]
Ran 1 test suite [ELAPSED]: 1 tests passed, 0 failed, 0 skipped (1 total tests)
"#]]);
});
static FAILING_TEST: &str = r#"
import "forge-std/Test.sol";
contract FailingTest is Test {
function testShouldFail() public {
assertTrue(false);
}
}
"#;
forgetest_init!(exit_code_error_on_fail_fast, |prj, cmd| {
prj.add_source("failing_test", FAILING_TEST);
cmd.args(["test", "--fail-fast"]);
cmd.assert_empty_stderr();
});
forgetest_init!(exit_code_error_on_fail_fast_with_json, |prj, cmd| {
prj.add_source("failing_test", FAILING_TEST);
cmd.args(["test", "--fail-fast", "--json"]);
cmd.assert_empty_stderr();
});
// Verify that --show-progress doesn't stop tests after first failure
forgetest_init!(show_progress_runs_all_tests, |prj, cmd| {
prj.add_test(
"MultiTest.t.sol",
r#"
import "forge-std/Test.sol";
contract MultiTest is Test {
function test_1_Fail() public {
assertTrue(false);
}
function test_2_Pass() public {
assertTrue(true);
}
function test_3_Pass() public {
assertTrue(true);
}
}
"#,
);
// With --show-progress, all 3 tests should run despite first one failing
let output = cmd.args(["test", "--show-progress", "-j1"]).assert_failure();
let stdout = String::from_utf8_lossy(&output.get_output().stdout);
// Verify all 3 tests were executed
assert!(stdout.contains("test_1_Fail"), "test_1_Fail should run");
assert!(stdout.contains("test_2_Pass"), "test_2_Pass should run");
assert!(stdout.contains("test_3_Pass"), "test_3_Pass should run");
assert!(stdout.contains("2 passed; 1 failed"), "Should show 2 passed and 1 failed");
});
// Verify that --show-progress with --fail-fast DOES stop after first failure
forgetest_init!(show_progress_with_fail_fast_exits_early, |prj, cmd| {
prj.add_test(
"MultiTest.t.sol",
r#"
import "forge-std/Test.sol";
contract MultiTest is Test {
function test_1_Fail() public {
assertTrue(false);
}
function test_2_SlowPass() public {
vm.sleep(60000); // Sleep for 60 seconds to ensure fail-fast stops before this completes
assertTrue(true);
}
function test_3_SlowPass() public {
vm.sleep(60000); // Sleep for 60 seconds to ensure fail-fast stops before this completes
assertTrue(true);
}
}
"#,
);
// With both --show-progress and --fail-fast, should stop after first failure
let output = cmd.args(["test", "--show-progress", "--fail-fast", "-j1"]).assert_failure();
let stdout = String::from_utf8_lossy(&output.get_output().stdout);
// Verify first test ran and failed
assert!(stdout.contains("test_1_Fail"), "test_1_Fail should run");
// With -j1 (sequential execution) and fail-fast, the slow tests should not run
// since test_1_Fail will fail first
let slow_tests_count = (if stdout.contains("test_2_SlowPass") { 1 } else { 0 })
+ (if stdout.contains("test_3_SlowPass") { 1 } else { 0 });
assert!(
slow_tests_count < 2,
"With --fail-fast and sequential execution, not all slow tests should run after first failure"
);
});
// https://github.com/foundry-rs/foundry/pull/6531
forgetest_init!(fork_traces, |prj, cmd| {
let endpoint = rpc::next_http_archive_rpc_url();
prj.add_test(
"Contract.t.sol",
&r#"
import {Test} from "forge-std/Test.sol";
interface IERC20 {
function name() external view returns (string memory);
}
contract USDTCallingTest is Test {
function test() public {
vm.createSelectFork("<url>");
IERC20(0xdAC17F958D2ee523a2206206994597C13D831ec7).name();
}
}
"#
.replace("<url>", &endpoint),
);
cmd.args(["test", "-vvvv"]).assert_success().stdout_eq(str![[r#"
[COMPILING_FILES] with [SOLC_VERSION]
[SOLC_VERSION] [ELAPSED]
Compiler run successful!
Ran 1 test for test/Contract.t.sol:USDTCallingTest
[PASS] test() ([GAS])
Traces:
[..] USDTCallingTest::test()
├─ [0] VM::createSelectFork("[..]")
│ └─ ← [Return] 0
├─ [3110] 0xdAC17F958D2ee523a2206206994597C13D831ec7::name() [staticcall]
│ └─ ← [Return] "Tether USD"
└─ ← [Stop]
Suite result: ok. 1 passed; 0 failed; 0 skipped; [ELAPSED]
Ran 1 test suite [ELAPSED]: 1 tests passed, 0 failed, 0 skipped (1 total tests)
"#]]);
});
// Validates BPO1 blob gas price calculation during fork transaction replay.
// Block 24127158 has a blob tx at index 0, target tx at index 1.
// Forking at the target tx replays the blob tx with correct BPO1 blob base fee calculation.
forgetest_init!(fork_tx_replay_bpo1_blob_base_fee, |prj, cmd| {
let endpoint = rpc::next_http_archive_rpc_url();
prj.add_test(
"BlobFork.t.sol",
&r#"
import {Test} from "forge-std/Test.sol";
contract BlobForkTest is Test {
function test_fork_with_blob_replay() public {
// Fork at tx index 1 in block 24127158, which replays blob tx at index 0
bytes32 txHash = 0xa0f349b16e0f338ee760a9954ff5dbf2a402cff3320f3fe2c3755aee8babc335;
vm.createSelectFork("<url>", txHash);
// If we get here, blob tx replay succeeded
assertTrue(true);
}
}
"#
.replace("<url>", &endpoint),
);
cmd.args(["test", "-vvvv"]).assert_success();
});
// https://github.com/foundry-rs/foundry/issues/6579
forgetest_init!(include_custom_types_in_traces, |prj, cmd| {
prj.add_test(
"Contract.t.sol",
r#"
import {Test} from "forge-std/Test.sol";
error PoolNotInitialized();
event MyEvent(uint256 a);
contract CustomTypesTest is Test {
function testErr() public pure {
revert PoolNotInitialized();
}
function testEvent() public {
emit MyEvent(100);
}
}
"#,
);
cmd.args(["test", "-vvvvv"]).assert_failure().stdout_eq(str![[r#"
[COMPILING_FILES] with [SOLC_VERSION]
[SOLC_VERSION] [ELAPSED]
Compiler run successful!
Ran 2 tests for test/Contract.t.sol:CustomTypesTest
[FAIL: PoolNotInitialized()] testErr() ([GAS])
Traces:
[247] CustomTypesTest::testErr()
└─ ← [Revert] PoolNotInitialized()
Backtrace:
at CustomTypesTest.testErr (test/Contract.t.sol:[..]:[..])
[PASS] testEvent() ([GAS])
Traces:
[1524] CustomTypesTest::testEvent()
├─ emit MyEvent(a: 100)
└─ ← [Stop]
Suite result: FAILED. 1 passed; 1 failed; 0 skipped; [ELAPSED]
Ran 1 test suite [ELAPSED]: 1 tests passed, 1 failed, 0 skipped (2 total tests)
Failing tests:
Encountered 1 failing test in test/Contract.t.sol:CustomTypesTest
[FAIL: PoolNotInitialized()] testErr() ([GAS])
Encountered a total of 1 failing tests, 1 tests succeeded
Tip: Run `forge test --rerun` to retry only the 1 failed test
"#]]);
});
forgetest_init!(can_test_transient_storage_with_isolation, |prj, cmd| {
prj.add_test(
"Contract.t.sol",
r#"
import {Test} from "forge-std/Test.sol";
contract TransientTester {
function locked() public view returns (bool isLocked) {
assembly {
isLocked := tload(0)
}
}
modifier lock() {
require(!locked(), "locked");
assembly {
tstore(0, 1)
}
_;
}
function maybeReentrant(address target, bytes memory data) public lock {
(bool success, bytes memory ret) = target.call(data);
if (!success) {
// forwards revert reason
assembly {
let ret_size := mload(ret)
revert(add(32, ret), ret_size)
}
}
}
}
contract TransientTest is Test {
function test() public {
TransientTester t = new TransientTester();
vm.expectRevert(bytes("locked"));
t.maybeReentrant(address(t), abi.encodeCall(TransientTester.maybeReentrant, (address(0), new bytes(0))));
t.maybeReentrant(address(0), new bytes(0));
assertEq(t.locked(), false);
}
}
"#,
);
cmd.args(["test", "-vvvv", "--isolate", "--evm-version", "cancun"]).assert_success();
});
forgetest_init!(
#[ignore = "Too slow"]
can_disable_block_gas_limit,
|prj, cmd| {
let endpoint = rpc::next_http_archive_rpc_url();
prj.add_test(
"Contract.t.sol",
&r#"
import {Test} from "forge-std/Test.sol";
contract C is Test {}
contract GasWaster {
function waste() public {
for (uint256 i = 0; i < 100; i++) {
new C();
}
}
}
contract GasLimitTest is Test {
function test() public {
vm.createSelectFork("<rpc>");
GasWaster waster = new GasWaster();
waster.waste();
}
}
"#
.replace("<rpc>", &endpoint),
);
cmd.args(["test", "-vvvv", "--isolate", "--disable-block-gas-limit"]).assert_success();
}
);
forgetest!(test_match_path, |prj, cmd| {
prj.add_source(
"dummy",
r"
contract Dummy {
function testDummy() public {}
}
",
);
cmd.args(["test", "--match-path", "src/dummy.sol"]);
cmd.assert_success();
});
forgetest_init!(should_not_shrink_fuzz_failure, |prj, cmd| {
// deterministic test so we always have 54 runs until test fails with overflow
prj.update_config(|config| {
config.fuzz.runs = 256;
config.fuzz.seed = Some(U256::from(100));
});
prj.add_test(
"CounterFuzz.t.sol",
r#"
import {Test} from "forge-std/Test.sol";
contract Counter {
uint256 public number = 0;
function addOne(uint256 x) external pure returns (uint256) {
return x + 100_000_000;
}
}
contract CounterTest is Test {
Counter public counter;
function setUp() public {
counter = new Counter();
}
function testAddOne(uint256 x) public view {
assertEq(counter.addOne(x), x + 100_000_000);
}
}
"#,
);
// make sure there are only 61 runs (with proptest shrinking same test results in 298 runs)
cmd.args(["test"]).assert_failure().stdout_eq(str![[r#"
[COMPILING_FILES] with [SOLC_VERSION]
[SOLC_VERSION] [ELAPSED]
Compiler run successful!
Ran 1 test for test/CounterFuzz.t.sol:CounterTest
[FAIL: panic: arithmetic underflow or overflow (0x11); counterexample: calldata=0xa76d58f5fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe args=[115792089237316195423570985008687907853269984665640564039457584007913129639934 [1.157e77]]] testAddOne(uint256) (runs: 27, [AVG_GAS])
Suite result: FAILED. 0 passed; 1 failed; 0 skipped; [ELAPSED]
Ran 1 test suite [ELAPSED]: 0 tests passed, 1 failed, 0 skipped (1 total tests)
Failing tests:
Encountered 1 failing test in test/CounterFuzz.t.sol:CounterTest
[FAIL: panic: arithmetic underflow or overflow (0x11); counterexample: calldata=0xa76d58f5fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe args=[115792089237316195423570985008687907853269984665640564039457584007913129639934 [1.157e77]]] testAddOne(uint256) (runs: 27, [AVG_GAS])
Encountered a total of 1 failing tests, 0 tests succeeded
Tip: Run `forge test --rerun` to retry only the 1 failed test
"#]]);
});
forgetest_init!(should_exit_early_on_invariant_failure, |prj, cmd| {
prj.add_test(
"CounterInvariant.t.sol",
r#"
import {Test} from "forge-std/Test.sol";
contract Counter {
uint256 public number = 0;
function inc() external {
number += 1;
}
}
contract CounterTest is Test {
Counter public counter;
function setUp() public {
counter = new Counter();
}
function invariant_early_exit() public view {
assertTrue(counter.number() == 10, "wrong count");
}
}
"#,
);
// make sure invariant test exit early with 0 runs
cmd.args(["test"]).assert_failure().stdout_eq(str![[r#"
[COMPILING_FILES] with [SOLC_VERSION]
[SOLC_VERSION] [ELAPSED]
Compiler run successful!
Ran 1 test for test/CounterInvariant.t.sol:CounterTest
[FAIL: failed to set up invariant testing environment: wrong count] invariant_early_exit() (runs: 0, calls: 0, reverts: 0)
Suite result: FAILED. 0 passed; 1 failed; 0 skipped; [ELAPSED]
Ran 1 test suite [ELAPSED]: 0 tests passed, 1 failed, 0 skipped (1 total tests)
Failing tests:
Encountered 1 failing test in test/CounterInvariant.t.sol:CounterTest
[FAIL: failed to set up invariant testing environment: wrong count] invariant_early_exit() (runs: 0, calls: 0, reverts: 0)
Encountered a total of 1 failing tests, 0 tests succeeded
Tip: Run `forge test --rerun` to retry only the 1 failed test
"#]]);
});
forgetest_init!(should_replay_failures_only, |prj, cmd| {
prj.add_test(
"ReplayFailures.t.sol",
r#"
import {Test} from "forge-std/Test.sol";
contract ReplayFailuresTest is Test {
function testA() public pure {
require(2 > 1);
}
function testB() public pure {
require(1 > 2, "testB failed");
}
function testC() public pure {
require(2 > 1);
}
function testD() public pure {
require(1 > 2, "testD failed");
}
}
"#,
);
cmd.args(["test"]).assert_failure().stdout_eq(str![[r#"
[COMPILING_FILES] with [SOLC_VERSION]
[SOLC_VERSION] [ELAPSED]
Compiler run successful!
Ran 4 tests for test/ReplayFailures.t.sol:ReplayFailuresTest
[PASS] testA() ([GAS])
[FAIL: testB failed] testB() ([GAS])
[PASS] testC() ([GAS])
[FAIL: testD failed] testD() ([GAS])
Suite result: FAILED. 2 passed; 2 failed; 0 skipped; [ELAPSED]
Ran 1 test suite [ELAPSED]: 2 tests passed, 2 failed, 0 skipped (4 total tests)
Failing tests:
Encountered 2 failing tests in test/ReplayFailures.t.sol:ReplayFailuresTest
[FAIL: testB failed] testB() ([GAS])
[FAIL: testD failed] testD() ([GAS])
Encountered a total of 2 failing tests, 2 tests succeeded
Tip: Run `forge test --rerun` to retry only the 2 failed tests
"#]]);
// Test failure filter should be persisted.
assert!(prj.root().join("cache/test-failures").exists());
// Perform only the 2 failing tests from last run.
cmd.forge_fuse().args(["test", "--rerun"]).assert_failure().stdout_eq(str![[r#"
No files changed, compilation skipped
Ran 2 tests for test/ReplayFailures.t.sol:ReplayFailuresTest
[FAIL: testB failed] testB() ([GAS])
[FAIL: testD failed] testD() ([GAS])
Suite result: FAILED. 0 passed; 2 failed; 0 skipped; [ELAPSED]
Ran 1 test suite [ELAPSED]: 0 tests passed, 2 failed, 0 skipped (2 total tests)
Failing tests:
Encountered 2 failing tests in test/ReplayFailures.t.sol:ReplayFailuresTest
[FAIL: testB failed] testB() ([GAS])
[FAIL: testD failed] testD() ([GAS])
Encountered a total of 2 failing tests, 0 tests succeeded
Tip: Run `forge test --rerun` to retry only the 2 failed tests
"#]]);
});
// <https://github.com/foundry-rs/foundry/issues/9285>
forgetest_init!(should_not_record_setup_failures, |prj, cmd| {
prj.add_test(
"ReplayFailures.t.sol",
r#"
import {Test} from "forge-std/Test.sol";
contract SetupFailureTest is Test {
function setUp() public {
require(2 > 1);
}
function testA() public pure {
}
}
"#,
);
cmd.args(["test"]).assert_success();
// Test failure filter should not be persisted if `setUp` failed.
assert!(!prj.root().join("cache/test-failures").exists());
});
// https://github.com/foundry-rs/foundry/issues/7530
forgetest_init!(should_show_precompile_labels, |prj, cmd| {
prj.add_test(
"Contract.t.sol",
r#"
import {Test} from "forge-std/Test.sol";
contract PrecompileLabelsTest is Test {
function testPrecompileLabels() public {
vm.deal(address(0x7109709ECfa91a80626fF3989D68f67F5b1DD12D), 1 ether);
vm.deal(address(0x000000000000000000636F6e736F6c652e6c6f67), 1 ether);
vm.deal(address(0x4e59b44847b379578588920cA78FbF26c0B4956C), 1 ether);
vm.deal(address(0x1804c8AB1F12E6bbf3894d4083f33e07309d1f38), 1 ether);
vm.deal(address(0x7FA9385bE102ac3EAc297483Dd6233D62b3e1496), 1 ether);
vm.deal(address(1), 1 ether);
vm.deal(address(2), 1 ether);
vm.deal(address(3), 1 ether);
vm.deal(address(4), 1 ether);
vm.deal(address(5), 1 ether);
vm.deal(address(6), 1 ether);
vm.deal(address(7), 1 ether);
vm.deal(address(8), 1 ether);
vm.deal(address(9), 1 ether);
vm.deal(address(10), 1 ether);
}
}
"#,
);
cmd.args(["test", "-vvvv"]).assert_success().stdout_eq(str![[r#"
[COMPILING_FILES] with [SOLC_VERSION]
[SOLC_VERSION] [ELAPSED]
Compiler run successful!
Ran 1 test for test/Contract.t.sol:PrecompileLabelsTest
[PASS] testPrecompileLabels() ([GAS])
Traces:
[..] PrecompileLabelsTest::testPrecompileLabels()
├─ [0] VM::deal(VM: [0x7109709ECfa91a80626fF3989D68f67F5b1DD12D], 1000000000000000000 [1e18])
│ └─ ← [Return]
├─ [0] VM::deal(console: [0x000000000000000000636F6e736F6c652e6c6f67], 1000000000000000000 [1e18])
│ └─ ← [Return]
├─ [0] VM::deal(Create2Deployer: [0x4e59b44847b379578588920cA78FbF26c0B4956C], 1000000000000000000 [1e18])
│ └─ ← [Return]
├─ [0] VM::deal(DefaultSender: [0x1804c8AB1F12E6bbf3894d4083f33e07309d1f38], 1000000000000000000 [1e18])
│ └─ ← [Return]
├─ [0] VM::deal(PrecompileLabelsTest: [0x7FA9385bE102ac3EAc297483Dd6233D62b3e1496], 1000000000000000000 [1e18])
│ └─ ← [Return]
├─ [0] VM::deal(ECRecover: [0x0000000000000000000000000000000000000001], 1000000000000000000 [1e18])
│ └─ ← [Return]
├─ [0] VM::deal(SHA-256: [0x0000000000000000000000000000000000000002], 1000000000000000000 [1e18])
│ └─ ← [Return]
├─ [0] VM::deal(RIPEMD-160: [0x0000000000000000000000000000000000000003], 1000000000000000000 [1e18])
│ └─ ← [Return]
├─ [0] VM::deal(Identity: [0x0000000000000000000000000000000000000004], 1000000000000000000 [1e18])
│ └─ ← [Return]
├─ [0] VM::deal(ModExp: [0x0000000000000000000000000000000000000005], 1000000000000000000 [1e18])
│ └─ ← [Return]
├─ [0] VM::deal(ECAdd: [0x0000000000000000000000000000000000000006], 1000000000000000000 [1e18])
│ └─ ← [Return]
├─ [0] VM::deal(ECMul: [0x0000000000000000000000000000000000000007], 1000000000000000000 [1e18])
│ └─ ← [Return]
├─ [0] VM::deal(ECPairing: [0x0000000000000000000000000000000000000008], 1000000000000000000 [1e18])
│ └─ ← [Return]
├─ [0] VM::deal(Blake2F: [0x0000000000000000000000000000000000000009], 1000000000000000000 [1e18])
│ └─ ← [Return]
├─ [0] VM::deal(PointEvaluation: [0x000000000000000000000000000000000000000A], 1000000000000000000 [1e18])
│ └─ ← [Return]
└─ ← [Stop]
Suite result: ok. 1 passed; 0 failed; 0 skipped; [ELAPSED]
Ran 1 test suite [ELAPSED]: 1 tests passed, 0 failed, 0 skipped (1 total tests)
"#]]);
});
// tests that `forge test` with config `show_logs: true` for fuzz tests will
// display `console.log` info
forgetest_init!(should_show_logs_when_fuzz_test, |prj, cmd| {
// run fuzz test 3 times
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | true |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/forge/tests/cli/test_cmd/logs.rs | crates/forge/tests/cli/test_cmd/logs.rs | //! Tests for various logging functionality
use foundry_test_utils::str;
forgetest_init!(debug_logs, |prj, cmd| {
prj.add_test(
"DebugLogs.t.sol",
r#"
import "forge-std/Test.sol";
contract DebugLogsTest is Test {
constructor() {
emit log_uint(0);
}
function setUp() public {
emit log_uint(1);
}
function test1() public {
emit log_uint(2);
}
function test2() public {
emit log_uint(3);
}
function testRevertIfWithRevert() public {
Fails fails = new Fails();
emit log_uint(4);
vm.expectRevert();
fails.failure();
}
/// forge-config: default.allow_internal_expect_revert = true
function testRevertIfWithRequire() public {
emit log_uint(5);
vm.expectRevert();
require(false);
}
function testLog() public {
emit log("Error: Assertion Failed");
}
function testLogs() public {
emit logs(bytes("abcd"));
}
function testLogAddress() public {
emit log_address(address(1));
}
function testLogBytes32() public {
emit log_bytes32(bytes32("abcd"));
}
function testLogInt() public {
emit log_int(int256(-31337));
}
function testLogBytes() public {
emit log_bytes(bytes("abcd"));
}
function testLogString() public {
emit log_string("here");
}
function testLogNamedAddress() public {
emit log_named_address("address", address(1));
}
function testLogNamedBytes32() public {
emit log_named_bytes32("abcd", bytes32("abcd"));
}
function testLogNamedDecimalInt() public {
emit log_named_decimal_int("amount", int256(-31337), uint256(18));
}
function testLogNamedDecimalUint() public {
emit log_named_decimal_uint("amount", uint256(1 ether), uint256(18));
}
function testLogNamedInt() public {
emit log_named_int("amount", int256(-31337));
}
function testLogNamedUint() public {
emit log_named_uint("amount", uint256(1 ether));
}
function testLogNamedBytes() public {
emit log_named_bytes("abcd", bytes("abcd"));
}
function testLogNamedString() public {
emit log_named_string("key", "val");
}
}
contract Fails is Test {
function failure() public {
emit log_uint(100);
revert();
}
}
"#,
);
cmd.args(["test", "-vv"]).assert_success().stdout_eq(str![[r#"
[COMPILING_FILES] with [SOLC_VERSION]
[SOLC_VERSION] [ELAPSED]
Compiler run successful!
Ran 19 tests for test/DebugLogs.t.sol:DebugLogsTest
[PASS] test1() ([GAS])
Logs:
0
1
2
[PASS] test2() ([GAS])
Logs:
0
1
3
[PASS] testLog() ([GAS])
Logs:
0
1
Error: Assertion Failed
[PASS] testLogAddress() ([GAS])
Logs:
0
1
0x0000000000000000000000000000000000000001
[PASS] testLogBytes() ([GAS])
Logs:
0
1
0x61626364
[PASS] testLogBytes32() ([GAS])
Logs:
0
1
0x6162636400000000000000000000000000000000000000000000000000000000
[PASS] testLogInt() ([GAS])
Logs:
0
1
-31337
[PASS] testLogNamedAddress() ([GAS])
Logs:
0
1
address: 0x0000000000000000000000000000000000000001
[PASS] testLogNamedBytes() ([GAS])
Logs:
0
1
abcd: 0x61626364
[PASS] testLogNamedBytes32() ([GAS])
Logs:
0
1
abcd: 0x6162636400000000000000000000000000000000000000000000000000000000
[PASS] testLogNamedDecimalInt() ([GAS])
Logs:
0
1
amount: -0.000000000000031337
[PASS] testLogNamedDecimalUint() ([GAS])
Logs:
0
1
amount: 1.000000000000000000
[PASS] testLogNamedInt() ([GAS])
Logs:
0
1
amount: -31337
[PASS] testLogNamedString() ([GAS])
Logs:
0
1
key: val
[PASS] testLogNamedUint() ([GAS])
Logs:
0
1
amount: 1000000000000000000
[PASS] testLogString() ([GAS])
Logs:
0
1
here
[PASS] testLogs() ([GAS])
Logs:
0
1
0x61626364
[PASS] testRevertIfWithRequire() ([GAS])
Logs:
0
1
5
[PASS] testRevertIfWithRevert() ([GAS])
Logs:
0
1
4
100
Suite result: ok. 19 passed; 0 failed; 0 skipped; [ELAPSED]
Ran 1 test suite [ELAPSED]: 19 tests passed, 0 failed, 0 skipped (19 total tests)
"#]]);
});
forgetest_init!(hardhat_logs, |prj, cmd| {
prj.add_test(
"HardhatLogs.t.sol",
r#"
import "forge-std/console.sol";
contract HardhatLogsTest {
constructor() {
console.log("constructor");
}
string testStr;
int256 testInt;
uint256 testUint;
bool testBool;
address testAddr;
bytes testBytes;
function setUp() public {
testStr = "test";
testInt = -31337;
testUint = 1;
testBool = false;
testAddr = 0x0000000000000000000000000000000000000001;
testBytes = "a";
}
function testInts() public view {
console.log(uint256(0));
console.log(uint256(1));
console.log(uint256(2));
console.log(uint256(3));
}
function testStrings() public view {
console.log("testStrings");
}
function testMisc() public view {
console.log("testMisc", address(1));
console.log("testMisc", uint256(42));
}
function testConsoleLog() public view {
console.log(testStr);
}
function testLogInt() public view {
console.logInt(testInt);
}
function testLogUint() public view {
console.logUint(testUint);
}
function testLogString() public view {
console.logString(testStr);
}
function testLogBool() public view {
console.logBool(testBool);
}
function testLogAddress() public view {
console.logAddress(testAddr);
}
function testLogBytes() public view {
console.logBytes(testBytes);
}
function testLogBytes1() public view {
console.logBytes1(bytes1(testBytes));
}
function testLogBytes2() public view {
console.logBytes2(bytes2(testBytes));
}
function testLogBytes3() public view {
console.logBytes3(bytes3(testBytes));
}
function testLogBytes4() public view {
console.logBytes4(bytes4(testBytes));
}
function testLogBytes5() public view {
console.logBytes5(bytes5(testBytes));
}
function testLogBytes6() public view {
console.logBytes6(bytes6(testBytes));
}
function testLogBytes7() public view {
console.logBytes7(bytes7(testBytes));
}
function testLogBytes8() public view {
console.logBytes8(bytes8(testBytes));
}
function testLogBytes9() public view {
console.logBytes9(bytes9(testBytes));
}
function testLogBytes10() public view {
console.logBytes10(bytes10(testBytes));
}
function testLogBytes11() public view {
console.logBytes11(bytes11(testBytes));
}
function testLogBytes12() public view {
console.logBytes12(bytes12(testBytes));
}
function testLogBytes13() public view {
console.logBytes13(bytes13(testBytes));
}
function testLogBytes14() public view {
console.logBytes14(bytes14(testBytes));
}
function testLogBytes15() public view {
console.logBytes15(bytes15(testBytes));
}
function testLogBytes16() public view {
console.logBytes16(bytes16(testBytes));
}
function testLogBytes17() public view {
console.logBytes17(bytes17(testBytes));
}
function testLogBytes18() public view {
console.logBytes18(bytes18(testBytes));
}
function testLogBytes19() public view {
console.logBytes19(bytes19(testBytes));
}
function testLogBytes20() public view {
console.logBytes20(bytes20(testBytes));
}
function testLogBytes21() public view {
console.logBytes21(bytes21(testBytes));
}
function testLogBytes22() public view {
console.logBytes22(bytes22(testBytes));
}
function testLogBytes23() public view {
console.logBytes23(bytes23(testBytes));
}
function testLogBytes24() public view {
console.logBytes24(bytes24(testBytes));
}
function testLogBytes25() public view {
console.logBytes25(bytes25(testBytes));
}
function testLogBytes26() public view {
console.logBytes26(bytes26(testBytes));
}
function testLogBytes27() public view {
console.logBytes27(bytes27(testBytes));
}
function testLogBytes28() public view {
console.logBytes28(bytes28(testBytes));
}
function testLogBytes29() public view {
console.logBytes29(bytes29(testBytes));
}
function testLogBytes30() public view {
console.logBytes30(bytes30(testBytes));
}
function testLogBytes31() public view {
console.logBytes31(bytes31(testBytes));
}
function testLogBytes32() public view {
console.logBytes32(bytes32(testBytes));
}
function testConsoleLogUint() public view {
console.log(testUint);
}
function testConsoleLogString() public view {
console.log(testStr);
}
function testConsoleLogBool() public view {
console.log(testBool);
}
function testConsoleLogAddress() public view {
console.log(testAddr);
}
function testConsoleLogFormatString() public view {
console.log("formatted log str=%s", testStr);
}
function testConsoleLogFormatUint() public view {
console.log("formatted log uint=%s", testUint);
}
function testConsoleLogFormatAddress() public view {
console.log("formatted log addr=%s", testAddr);
}
function testConsoleLogFormatMulti() public view {
console.log("formatted log str=%s uint=%d", testStr, testUint);
}
function testConsoleLogFormatEscape() public view {
console.log("formatted log %% %s", testStr);
}
function testConsoleLogFormatSpill() public view {
console.log("formatted log %s", testStr, testUint);
}
}
"#,
);
cmd.args(["test", "-vv"]).assert_success().stdout_eq(str![[r#"
...
Ran 52 tests for test/HardhatLogs.t.sol:HardhatLogsTest
[PASS] testConsoleLog() ([GAS])
Logs:
constructor
test
[PASS] testConsoleLogAddress() ([GAS])
Logs:
constructor
0x0000000000000000000000000000000000000001
[PASS] testConsoleLogBool() ([GAS])
Logs:
constructor
false
[PASS] testConsoleLogFormatAddress() ([GAS])
Logs:
constructor
formatted log addr=0x0000000000000000000000000000000000000001
[PASS] testConsoleLogFormatEscape() ([GAS])
Logs:
constructor
formatted log % test
[PASS] testConsoleLogFormatMulti() ([GAS])
Logs:
constructor
formatted log str=test uint=1
[PASS] testConsoleLogFormatSpill() ([GAS])
Logs:
constructor
formatted log test 1
[PASS] testConsoleLogFormatString() ([GAS])
Logs:
constructor
formatted log str=test
[PASS] testConsoleLogFormatUint() ([GAS])
Logs:
constructor
formatted log uint=1
[PASS] testConsoleLogString() ([GAS])
Logs:
constructor
test
[PASS] testConsoleLogUint() ([GAS])
Logs:
constructor
1
[PASS] testInts() ([GAS])
Logs:
constructor
0
1
2
3
[PASS] testLogAddress() ([GAS])
Logs:
constructor
0x0000000000000000000000000000000000000001
[PASS] testLogBool() ([GAS])
Logs:
constructor
false
[PASS] testLogBytes() ([GAS])
Logs:
constructor
0x61
[PASS] testLogBytes1() ([GAS])
Logs:
constructor
0x61
[PASS] testLogBytes10() ([GAS])
Logs:
constructor
0x61000000000000000000
[PASS] testLogBytes11() ([GAS])
Logs:
constructor
0x6100000000000000000000
[PASS] testLogBytes12() ([GAS])
Logs:
constructor
0x610000000000000000000000
[PASS] testLogBytes13() ([GAS])
Logs:
constructor
0x61000000000000000000000000
[PASS] testLogBytes14() ([GAS])
Logs:
constructor
0x6100000000000000000000000000
[PASS] testLogBytes15() ([GAS])
Logs:
constructor
0x610000000000000000000000000000
[PASS] testLogBytes16() ([GAS])
Logs:
constructor
0x61000000000000000000000000000000
[PASS] testLogBytes17() ([GAS])
Logs:
constructor
0x6100000000000000000000000000000000
[PASS] testLogBytes18() ([GAS])
Logs:
constructor
0x610000000000000000000000000000000000
[PASS] testLogBytes19() ([GAS])
Logs:
constructor
0x61000000000000000000000000000000000000
[PASS] testLogBytes2() ([GAS])
Logs:
constructor
0x6100
[PASS] testLogBytes20() ([GAS])
Logs:
constructor
0x6100000000000000000000000000000000000000
[PASS] testLogBytes21() ([GAS])
Logs:
constructor
0x610000000000000000000000000000000000000000
[PASS] testLogBytes22() ([GAS])
Logs:
constructor
0x61000000000000000000000000000000000000000000
[PASS] testLogBytes23() ([GAS])
Logs:
constructor
0x6100000000000000000000000000000000000000000000
[PASS] testLogBytes24() ([GAS])
Logs:
constructor
0x610000000000000000000000000000000000000000000000
[PASS] testLogBytes25() ([GAS])
Logs:
constructor
0x61000000000000000000000000000000000000000000000000
[PASS] testLogBytes26() ([GAS])
Logs:
constructor
0x6100000000000000000000000000000000000000000000000000
[PASS] testLogBytes27() ([GAS])
Logs:
constructor
0x610000000000000000000000000000000000000000000000000000
[PASS] testLogBytes28() ([GAS])
Logs:
constructor
0x61000000000000000000000000000000000000000000000000000000
[PASS] testLogBytes29() ([GAS])
Logs:
constructor
0x6100000000000000000000000000000000000000000000000000000000
[PASS] testLogBytes3() ([GAS])
Logs:
constructor
0x610000
[PASS] testLogBytes30() ([GAS])
Logs:
constructor
0x610000000000000000000000000000000000000000000000000000000000
[PASS] testLogBytes31() ([GAS])
Logs:
constructor
0x61000000000000000000000000000000000000000000000000000000000000
[PASS] testLogBytes32() ([GAS])
Logs:
constructor
0x6100000000000000000000000000000000000000000000000000000000000000
[PASS] testLogBytes4() ([GAS])
Logs:
constructor
0x61000000
[PASS] testLogBytes5() ([GAS])
Logs:
constructor
0x6100000000
[PASS] testLogBytes6() ([GAS])
Logs:
constructor
0x610000000000
[PASS] testLogBytes7() ([GAS])
Logs:
constructor
0x61000000000000
[PASS] testLogBytes8() ([GAS])
Logs:
constructor
0x6100000000000000
[PASS] testLogBytes9() ([GAS])
Logs:
constructor
0x610000000000000000
[PASS] testLogInt() ([GAS])
Logs:
constructor
-31337
[PASS] testLogString() ([GAS])
Logs:
constructor
test
[PASS] testLogUint() ([GAS])
Logs:
constructor
1
[PASS] testMisc() ([GAS])
Logs:
constructor
testMisc 0x0000000000000000000000000000000000000001
testMisc 42
[PASS] testStrings() ([GAS])
Logs:
constructor
testStrings
Suite result: ok. 52 passed; 0 failed; 0 skipped; [ELAPSED]
Ran 1 test suite [ELAPSED]: 52 tests passed, 0 failed, 0 skipped (52 total tests)
"#]]);
});
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/forge/tests/cli/test_cmd/spec.rs | crates/forge/tests/cli/test_cmd/spec.rs | use foundry_test_utils::rpc;
// Test evm version switch during tests / scripts.
// <https://github.com/foundry-rs/foundry/issues/9840>
// <https://github.com/foundry-rs/foundry/issues/6228>
forgetest_init!(test_set_evm_version, |prj, cmd| {
let endpoint = rpc::next_http_archive_rpc_url();
prj.add_test(
"TestEvmVersion.t.sol",
&r#"
import {Test} from "forge-std/Test.sol";
interface EvmVm {
function getEvmVersion() external pure returns (string memory evm);
function setEvmVersion(string calldata evm) external;
}
interface ICreate2Deployer {
function computeAddress(bytes32 salt, bytes32 codeHash) external view returns (address);
}
contract TestEvmVersion is Test {
function test_evm_version() public {
EvmVm evm = EvmVm(address(bytes20(uint160(uint256(keccak256("hevm cheat code"))))));
vm.createSelectFork("<rpc>");
evm.setEvmVersion("istanbul");
evm.getEvmVersion();
// revert with NotActivated for istanbul
vm.expectRevert();
compute();
evm.setEvmVersion("shanghai");
evm.getEvmVersion();
compute();
// switch to Paris, expect revert with NotActivated
evm.setEvmVersion("paris");
vm.expectRevert();
compute();
}
function compute() internal view {
ICreate2Deployer(0x35Da41c476fA5c6De066f20556069096A1F39364).computeAddress(bytes32(0), bytes32(0));
}
}
"#.replace("<rpc>", &endpoint),
);
cmd.args(["test", "--mc", "TestEvmVersion", "-vvvv"]).assert_success().stdout_eq(str![[r#"
[COMPILING_FILES] with [SOLC_VERSION]
[SOLC_VERSION] [ELAPSED]
Compiler run successful!
Ran 1 test for test/TestEvmVersion.t.sol:TestEvmVersion
[PASS] test_evm_version() ([GAS])
Traces:
[..] TestEvmVersion::test_evm_version()
├─ [0] VM::createSelectFork("<rpc url>")
│ └─ ← [Return] 0
├─ [0] VM::setEvmVersion("istanbul")
│ └─ ← [Return]
├─ [0] VM::getEvmVersion() [staticcall]
│ └─ ← [Return] "istanbul"
├─ [0] VM::expectRevert(custom error 0xf4844814)
│ └─ ← [Return]
├─ [..] 0x35Da41c476fA5c6De066f20556069096A1F39364::computeAddress(0x0000000000000000000000000000000000000000000000000000000000000000, 0x0000000000000000000000000000000000000000000000000000000000000000) [staticcall]
│ └─ ← [NotActivated] EvmError: NotActivated
├─ [0] VM::setEvmVersion("shanghai")
│ └─ ← [Return]
├─ [0] VM::getEvmVersion() [staticcall]
│ └─ ← [Return] "shanghai"
├─ [..] 0x35Da41c476fA5c6De066f20556069096A1F39364::computeAddress(0x0000000000000000000000000000000000000000000000000000000000000000, 0x0000000000000000000000000000000000000000000000000000000000000000) [staticcall]
│ └─ ← [Return] 0x0f40d7B7669e3a6683EaB25358318fd42a9F2342
├─ [0] VM::setEvmVersion("paris")
│ └─ ← [Return]
├─ [0] VM::expectRevert(custom error 0xf4844814)
│ └─ ← [Return]
├─ [..] 0x35Da41c476fA5c6De066f20556069096A1F39364::computeAddress(0x0000000000000000000000000000000000000000000000000000000000000000, 0x0000000000000000000000000000000000000000000000000000000000000000) [staticcall]
│ └─ ← [NotActivated] EvmError: NotActivated
└─ ← [Stop]
Suite result: ok. 1 passed; 0 failed; 0 skipped; [ELAPSED]
Ran 1 test suite [ELAPSED]: 1 tests passed, 0 failed, 0 skipped (1 total tests)
"#]]);
// Test evm version set in `setUp` is accounted in test.
prj.add_test(
"TestSetupEvmVersion.t.sol",
&r#"
import {Test} from "forge-std/Test.sol";
interface EvmVm {
function getEvmVersion() external pure returns (string memory evm);
function setEvmVersion(string calldata evm) external;
}
interface ICreate2Deployer {
function computeAddress(bytes32 salt, bytes32 codeHash) external view returns (address);
}
EvmVm constant evm = EvmVm(address(bytes20(uint160(uint256(keccak256("hevm cheat code"))))));
contract TestSetupEvmVersion is Test {
function setUp() public {
evm.setEvmVersion("istanbul");
}
function test_evm_version_in_setup() public {
vm.createSelectFork("<rpc>");
// revert with NotActivated for istanbul
ICreate2Deployer(0x35Da41c476fA5c6De066f20556069096A1F39364).computeAddress(bytes32(0), bytes32(0));
}
}
"#.replace("<rpc>", &endpoint),
);
cmd.forge_fuse()
.args(["test", "--mc", "TestSetupEvmVersion", "-vvvv"])
.assert_failure()
.stdout_eq(str![[r#"
...
[FAIL: EvmError: NotActivated] test_evm_version_in_setup() ([GAS])
Traces:
[..] TestSetupEvmVersion::setUp()
├─ [0] VM::setEvmVersion("istanbul")
│ └─ ← [Return]
└─ ← [Stop]
[..] TestSetupEvmVersion::test_evm_version_in_setup()
└─ ← [NotActivated] EvmError: NotActivated
...
"#]]);
// Test evm version set in constructor is accounted in test.
prj.add_test(
"TestConstructorEvmVersion.t.sol",
&r#"
import {Test} from "forge-std/Test.sol";
interface EvmVm {
function getEvmVersion() external pure returns (string memory evm);
function setEvmVersion(string calldata evm) external;
}
interface ICreate2Deployer {
function computeAddress(bytes32 salt, bytes32 codeHash) external view returns (address);
}
EvmVm constant evm = EvmVm(address(bytes20(uint160(uint256(keccak256("hevm cheat code"))))));
contract TestConstructorEvmVersion is Test {
constructor() {
evm.setEvmVersion("istanbul");
}
function test_evm_version_in_constructor() public {
vm.createSelectFork("<rpc>");
// revert with NotActivated for istanbul
ICreate2Deployer(0x35Da41c476fA5c6De066f20556069096A1F39364).computeAddress(bytes32(0), bytes32(0));
}
}
"#.replace("<rpc>", &endpoint),
);
cmd.forge_fuse()
.args(["test", "--mc", "TestConstructorEvmVersion", "-vvvv"])
.assert_failure()
.stdout_eq(str![[r#"
...
[FAIL: EvmError: NotActivated] test_evm_version_in_constructor() ([GAS])
Traces:
[..] TestConstructorEvmVersion::test_evm_version_in_constructor()
└─ ← [NotActivated] EvmError: NotActivated
...
"#]]);
});
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/forge/tests/cli/test_cmd/invariant/target.rs | crates/forge/tests/cli/test_cmd/invariant/target.rs | use super::*;
forgetest!(filters, |prj, cmd| {
prj.insert_vm();
prj.insert_ds_test();
prj.update_config(|config| {
config.invariant.runs = 50;
config.invariant.depth = 10;
});
prj.add_test(
"ExcludeContracts.t.sol",
r#"
import { DSTest as Test } from "src/test.sol";
contract Hello {
bool public world = true;
function change() public {
world = false;
}
}
contract ExcludeContracts is Test {
Hello hello;
function setUp() public {
hello = new Hello();
new Hello();
}
function excludeContracts() public view returns (address[] memory) {
address[] memory addrs = new address[](1);
addrs[0] = address(hello);
return addrs;
}
function invariantTrueWorld() public {
require(hello.world() == true, "false world");
}
}
"#,
);
prj.add_test(
"ExcludeSelectors.t.sol",
r#"
import { DSTest as Test } from "src/test.sol";
struct FuzzSelector {
address addr;
bytes4[] selectors;
}
contract Hello {
bool public world = false;
function change() public {
world = true;
}
function real_change() public {
world = false;
}
}
contract ExcludeSelectors is Test {
Hello hello;
function setUp() public {
hello = new Hello();
}
function excludeSelectors() public view returns (FuzzSelector[] memory) {
FuzzSelector[] memory targets = new FuzzSelector[](1);
bytes4[] memory selectors = new bytes4[](1);
selectors[0] = Hello.change.selector;
targets[0] = FuzzSelector(address(hello), selectors);
return targets;
}
function invariantFalseWorld() public {
require(hello.world() == false, "true world");
}
}
"#,
);
prj.add_test(
"ExcludeSenders.t.sol",
r#"
import { DSTest as Test } from "src/test.sol";
contract Hello {
address seed_address = address(0xdeadbeef);
bool public world = true;
function changeBeef() public {
require(msg.sender == address(0xdeadbeef));
world = false;
}
// address(0) should be automatically excluded
function change0() public {
require(msg.sender == address(0));
world = false;
}
}
contract ExcludeSenders is Test {
Hello hello;
function setUp() public {
hello = new Hello();
}
function excludeSenders() public view returns (address[] memory) {
address[] memory addrs = new address[](1);
addrs[0] = address(0xdeadbeef);
return addrs;
}
// Tests clashing. Exclusion takes priority.
function targetSenders() public view returns (address[] memory) {
address[] memory addrs = new address[](1);
addrs[0] = address(0xdeadbeef);
return addrs;
}
function invariantTrueWorld() public {
require(hello.world() == true, "false world");
}
}
"#,
);
prj.add_test(
"TargetContracts.t.sol",
r#"
import { DSTest as Test } from "src/test.sol";
contract Hello {
bool public world = true;
function change() public {
world = false;
}
}
contract TargetContracts is Test {
Hello hello1;
Hello hello2;
function setUp() public {
hello1 = new Hello();
hello2 = new Hello();
}
function targetContracts() public view returns (address[] memory) {
address[] memory addrs = new address[](1);
addrs[0] = address(hello1);
return addrs;
}
function invariantTrueWorld() public {
require(hello2.world() == true, "false world");
}
}
"#,
);
prj.add_test(
"TargetInterfaces.t.sol",
r#"
import { DSTest as Test } from "src/test.sol";
struct FuzzInterface {
address target;
string[] artifacts;
}
contract Hello {
bool public world;
function changeWorld() external {
world = true;
}
}
interface IHello {
function world() external view returns (bool);
function changeWorld() external;
}
contract HelloProxy {
address internal immutable _implementation;
constructor(address implementation_) {
_implementation = implementation_;
}
function _delegate(address implementation) internal {
assembly {
calldatacopy(0, 0, calldatasize())
let result := delegatecall(gas(), implementation, 0, calldatasize(), 0, 0)
returndatacopy(0, 0, returndatasize())
switch result
case 0 { revert(0, returndatasize()) }
default { return(0, returndatasize()) }
}
}
fallback() external payable {
_delegate(_implementation);
}
}
contract TargetWorldInterfaces is Test {
IHello proxy;
function setUp() public {
Hello hello = new Hello();
proxy = IHello(address(new HelloProxy(address(hello))));
}
function targetInterfaces() public view returns (FuzzInterface[] memory) {
FuzzInterface[] memory targets = new FuzzInterface[](1);
string[] memory artifacts = new string[](1);
artifacts[0] = "IHello";
targets[0] = FuzzInterface(address(proxy), artifacts);
return targets;
}
function invariantTrueWorld() public {
require(proxy.world() == false, "false world");
}
}
"#,
);
prj.add_test(
"TargetSelectors.t.sol",
r#"
import { DSTest as Test } from "src/test.sol";
struct FuzzSelector {
address addr;
bytes4[] selectors;
}
contract Hello {
bool public world = true;
function change() public {
world = true;
}
function real_change() public {
world = false;
}
}
contract TargetSelectors is Test {
Hello hello;
function setUp() public {
hello = new Hello();
}
function targetSelectors() public view returns (FuzzSelector[] memory) {
FuzzSelector[] memory targets = new FuzzSelector[](1);
bytes4[] memory selectors = new bytes4[](1);
selectors[0] = Hello.change.selector;
targets[0] = FuzzSelector(address(hello), selectors);
return targets;
}
function invariantTrueWorld() public {
require(hello.world() == true, "false world");
}
}
"#,
);
prj.add_test(
"TargetSenders.t.sol",
r#"
import { DSTest as Test } from "src/test.sol";
contract Hello {
bool public world = true;
function change() public {
require(msg.sender == address(0xdeadbeef));
world = false;
}
}
contract TargetSenders is Test {
Hello hello;
function setUp() public {
hello = new Hello();
}
function targetSenders() public view returns (address[] memory) {
address[] memory addrs = new address[](1);
addrs[0] = address(0xdeadbeef);
return addrs;
}
function invariantTrueWorld() public {
require(hello.world() == true, "false world");
}
}
"#,
);
prj.add_test(
"ExcludeArtifacts.t.sol",
r#"
import { DSTest as Test } from "src/test.sol";
// Will get automatically excluded. Otherwise it would throw error.
contract NoMutFunctions {
function no_change() public pure {}
}
contract Excluded {
bool public world = true;
function change() public {
world = false;
}
}
contract Hello {
bool public world = true;
function change() public {
world = false;
}
}
contract ExcludeArtifacts is Test {
Excluded excluded;
function setUp() public {
excluded = new Excluded();
new Hello();
new NoMutFunctions();
}
function excludeArtifacts() public returns (string[] memory) {
string[] memory abis = new string[](1);
abis[0] = "test/ExcludeArtifacts.t.sol:Excluded";
return abis;
}
function invariantShouldPass() public {
require(excluded.world() == true, "false world");
}
}
"#,
);
prj.add_test(
"TargetArtifactSelectors2.t.sol",
r#"
import { DSTest as Test } from "src/test.sol";
struct FuzzArtifactSelector {
string artifact;
bytes4[] selectors;
}
contract Parent {
bool public should_be_true = true;
address public child;
function change() public {
child = msg.sender;
should_be_true = false;
}
function create() public {
new Child();
}
}
contract Child {
Parent parent;
bool public changed = false;
constructor() {
parent = Parent(msg.sender);
}
function change_parent() public {
parent.change();
}
function tracked_change_parent() public {
parent.change();
}
}
contract TargetArtifactSelectors2 is Test {
Parent parent;
function setUp() public {
parent = new Parent();
}
function targetArtifactSelectors() public returns (FuzzArtifactSelector[] memory) {
FuzzArtifactSelector[] memory targets = new FuzzArtifactSelector[](2);
bytes4[] memory selectors_child = new bytes4[](1);
selectors_child[0] = Child.change_parent.selector;
targets[0] = FuzzArtifactSelector(
"test/TargetArtifactSelectors2.t.sol:Child", selectors_child
);
bytes4[] memory selectors_parent = new bytes4[](1);
selectors_parent[0] = Parent.create.selector;
targets[1] = FuzzArtifactSelector(
"test/TargetArtifactSelectors2.t.sol:Parent", selectors_parent
);
return targets;
}
function invariantShouldFail() public {
if (!parent.should_be_true()) {
require(!Child(address(parent.child())).changed(), "should have not happened");
}
require(parent.should_be_true() == true, "it's false");
}
}
"#,
);
prj.add_test(
"TargetArtifactSelectors.t.sol",
r#"
import { DSTest as Test } from "src/test.sol";
struct FuzzArtifactSelector {
string artifact;
bytes4[] selectors;
}
contract Hi {
bool public world = true;
function no_change() public {
world = true;
}
function change() public {
world = false;
}
}
contract TargetArtifactSelectors is Test {
Hi hello;
function setUp() public {
hello = new Hi();
}
function targetArtifactSelectors() public returns (FuzzArtifactSelector[] memory) {
FuzzArtifactSelector[] memory targets = new FuzzArtifactSelector[](1);
bytes4[] memory selectors = new bytes4[](1);
selectors[0] = Hi.no_change.selector;
targets[0] =
FuzzArtifactSelector("test/TargetArtifactSelectors.t.sol:Hi", selectors);
return targets;
}
function invariantShouldPass() public {
require(hello.world() == true, "false world");
}
}
"#,
);
prj.add_test(
"TargetArtifacts.t.sol",
r#"
import { DSTest as Test } from "src/test.sol";
contract Targeted {
bool public world = true;
function change() public {
world = false;
}
}
contract Hello {
bool public world = true;
function no_change() public {}
}
contract TargetArtifacts is Test {
Targeted target1;
Targeted target2;
Hello hello;
function setUp() public {
target1 = new Targeted();
target2 = new Targeted();
hello = new Hello();
}
function targetArtifacts() public returns (string[] memory) {
string[] memory abis = new string[](1);
abis[0] = "test/TargetArtifacts.t.sol:Targeted";
return abis;
}
function invariantShouldPass() public {
require(target2.world() == true || target1.world() == true || hello.world() == true, "false world");
}
function invariantShouldFail() public {
require(target2.world() == true || target1.world() == true, "false world");
}
}
"#,
);
// Test ExcludeContracts
assert_invariant(cmd.forge_fuse().args(["test", "--mc", "ExcludeContracts"]))
.success()
.stdout_eq(str![[r#"
...
Ran 1 test for test/ExcludeContracts.t.sol:ExcludeContracts
[PASS] invariantTrueWorld() ([RUNS])
[STATS]
Suite result: ok. 1 passed; 0 failed; 0 skipped; [ELAPSED]
Ran 1 test suite [ELAPSED]: 1 tests passed, 0 failed, 0 skipped (1 total tests)
"#]]);
// Test ExcludeSelectors
assert_invariant(cmd.forge_fuse().args(["test", "--mc", "ExcludeSelectors"]))
.success()
.stdout_eq(str![[r#"
...
Ran 1 test for test/ExcludeSelectors.t.sol:ExcludeSelectors
[PASS] invariantFalseWorld() ([RUNS])
[STATS]
Suite result: ok. 1 passed; 0 failed; 0 skipped; [ELAPSED]
Ran 1 test suite [ELAPSED]: 1 tests passed, 0 failed, 0 skipped (1 total tests)
"#]]);
// Test ExcludeSenders
assert_invariant(cmd.forge_fuse().args(["test", "--mc", "ExcludeSenders"]))
.success()
.stdout_eq(str![[r#"
...
Ran 1 test for test/ExcludeSenders.t.sol:ExcludeSenders
[PASS] invariantTrueWorld() ([RUNS])
[STATS]
Suite result: ok. 1 passed; 0 failed; 0 skipped; [ELAPSED]
Ran 1 test suite [ELAPSED]: 1 tests passed, 0 failed, 0 skipped (1 total tests)
"#]]);
// Test TargetContracts
assert_invariant(cmd.forge_fuse().args(["test", "--mc", "TargetContracts"]))
.success()
.stdout_eq(str![[r#"
...
Ran 1 test for test/TargetContracts.t.sol:TargetContracts
[PASS] invariantTrueWorld() ([RUNS])
[STATS]
Suite result: ok. 1 passed; 0 failed; 0 skipped; [ELAPSED]
Ran 1 test suite [ELAPSED]: 1 tests passed, 0 failed, 0 skipped (1 total tests)
"#]]);
// Test TargetInterfaces (should fail)
assert_invariant(cmd.forge_fuse().args(["test", "--mc", "TargetWorldInterfaces"]))
.failure()
.stdout_eq(str![[r#"
...
Ran 1 test for test/TargetInterfaces.t.sol:TargetWorldInterfaces
[FAIL: false world]
[SEQUENCE]
invariantTrueWorld() ([RUNS])
[STATS]
Suite result: FAILED. 0 passed; 1 failed; 0 skipped; [ELAPSED]
Ran 1 test suite [ELAPSED]: 0 tests passed, 1 failed, 0 skipped (1 total tests)
Failing tests:
Encountered 1 failing test in test/TargetInterfaces.t.sol:TargetWorldInterfaces
[FAIL: false world]
[SEQUENCE]
invariantTrueWorld() ([RUNS])
Encountered a total of 1 failing tests, 0 tests succeeded
Tip: Run `forge test --rerun` to retry only the 1 failed test
"#]]);
// Test TargetSelectors
assert_invariant(cmd.forge_fuse().args(["test", "--mc", "TargetSelectors"]))
.success()
.stdout_eq(str![[r#"
...
Ran 1 test for test/TargetSelectors.t.sol:TargetSelectors
[PASS] invariantTrueWorld() ([RUNS])
[STATS]
Suite result: ok. 1 passed; 0 failed; 0 skipped; [ELAPSED]
Ran 1 test suite [ELAPSED]: 1 tests passed, 0 failed, 0 skipped (1 total tests)
"#]]);
// Test TargetSenders (should fail)
assert_invariant(cmd.forge_fuse().args(["test", "--mc", "TargetSenders"])).failure().stdout_eq(
str![[r#"
...
Ran 1 test for test/TargetSenders.t.sol:TargetSenders
[FAIL: false world]
[SEQUENCE]
invariantTrueWorld() ([RUNS])
[STATS]
Suite result: FAILED. 0 passed; 1 failed; 0 skipped; [ELAPSED]
Ran 1 test suite [ELAPSED]: 0 tests passed, 1 failed, 0 skipped (1 total tests)
Failing tests:
Encountered 1 failing test in test/TargetSenders.t.sol:TargetSenders
[FAIL: false world]
[SEQUENCE]
invariantTrueWorld() ([RUNS])
Encountered a total of 1 failing tests, 0 tests succeeded
Tip: Run `forge test --rerun` to retry only the 1 failed test
"#]],
);
// Test ExcludeArtifacts
assert_invariant(cmd.forge_fuse().args(["test", "--mc", "ExcludeArtifacts"]))
.success()
.stdout_eq(str![[r#"
...
Ran 1 test for test/ExcludeArtifacts.t.sol:ExcludeArtifacts
[PASS] invariantShouldPass() ([RUNS])
[STATS]
Suite result: ok. 1 passed; 0 failed; 0 skipped; [ELAPSED]
Ran 1 test suite [ELAPSED]: 1 tests passed, 0 failed, 0 skipped (1 total tests)
"#]]);
// Test TargetArtifactSelectors2 (should fail)
assert_invariant(cmd.forge_fuse().args(["test", "--mc", "TargetArtifactSelectors2"]))
.failure()
.stdout_eq(str![[r#"
...
Ran 1 test for test/TargetArtifactSelectors2.t.sol:TargetArtifactSelectors2
[FAIL: it's false]
[SEQUENCE]
invariantShouldFail() ([RUNS])
[STATS]
Suite result: FAILED. 0 passed; 1 failed; 0 skipped; [ELAPSED]
Ran 1 test suite [ELAPSED]: 0 tests passed, 1 failed, 0 skipped (1 total tests)
Failing tests:
Encountered 1 failing test in test/TargetArtifactSelectors2.t.sol:TargetArtifactSelectors2
[FAIL: it's false]
[SEQUENCE]
invariantShouldFail() ([RUNS])
Encountered a total of 1 failing tests, 0 tests succeeded
Tip: Run `forge test --rerun` to retry only the 1 failed test
"#]]);
// Test TargetArtifactSelectors
assert_invariant(cmd.forge_fuse().args(["test", "--mc", "^TargetArtifactSelectors$"]))
.success()
.stdout_eq(str![[r#"
...
Ran 1 test for test/TargetArtifactSelectors.t.sol:TargetArtifactSelectors
[PASS] invariantShouldPass() ([RUNS])
[STATS]
Suite result: ok. 1 passed; 0 failed; 0 skipped; [ELAPSED]
Ran 1 test suite [ELAPSED]: 1 tests passed, 0 failed, 0 skipped (1 total tests)
"#]]);
// Test TargetArtifacts
assert_invariant(cmd.forge_fuse().args(["test", "--mc", "^TargetArtifacts$"]))
.failure()
.stdout_eq(str![[r#"
...
Ran 2 tests for test/TargetArtifacts.t.sol:TargetArtifacts
[FAIL: false world]
[SEQUENCE]
invariantShouldFail() ([RUNS])
[STATS]
[PASS] invariantShouldPass() ([RUNS])
[STATS]
Suite result: FAILED. 1 passed; 1 failed; 0 skipped; [ELAPSED]
Ran 1 test suite [ELAPSED]: 1 tests passed, 1 failed, 0 skipped (2 total tests)
Failing tests:
Encountered 1 failing test in test/TargetArtifacts.t.sol:TargetArtifacts
[FAIL: false world]
[SEQUENCE]
invariantShouldFail() ([RUNS])
Encountered a total of 1 failing tests, 1 tests succeeded
Tip: Run `forge test --rerun` to retry only the 1 failed test
"#]]);
});
// https://github.com/foundry-rs/foundry/issues/5625
// https://github.com/foundry-rs/foundry/issues/6166
// `Target.wrongSelector` is not called when handler added as `targetContract`
// `Target.wrongSelector` is called (and test fails) when no `targetContract` set
forgetest!(fuzzed_selected_targets, |prj, cmd| {
prj.insert_vm();
prj.insert_ds_test();
prj.update_config(|config| {
config.invariant.depth = 10;
config.invariant.fail_on_revert = true;
});
prj.add_test(
"FuzzedTargetContracts.t.sol",
r#"
import { DSTest as Test } from "src/test.sol";
import "src/Vm.sol";
contract Target {
uint256 count;
function wrongSelector() external {
revert("wrong target selector called");
}
function goodSelector() external {
count++;
}
}
contract Handler is Test {
function increment() public {
Target(0x6B175474E89094C44Da98b954EedeAC495271d0F).goodSelector();
}
}
contract ExplicitTargetContract is Test {
Vm constant vm = Vm(HEVM_ADDRESS);
Handler handler;
function setUp() public {
Target target = new Target();
bytes memory targetCode = address(target).code;
vm.etch(address(0x6B175474E89094C44Da98b954EedeAC495271d0F), targetCode);
handler = new Handler();
}
function targetContracts() public view returns (address[] memory) {
address[] memory addrs = new address[](1);
addrs[0] = address(handler);
return addrs;
}
function invariant_explicit_target() public {}
}
contract DynamicTargetContract is Test {
Vm constant vm = Vm(HEVM_ADDRESS);
Handler handler;
function setUp() public {
Target target = new Target();
bytes memory targetCode = address(target).code;
vm.etch(address(0x6B175474E89094C44Da98b954EedeAC495271d0F), targetCode);
handler = new Handler();
}
function invariant_dynamic_targets() public {}
}
"#,
);
assert_invariant(cmd.args(["test", "-j1"])).failure().stdout_eq(str![[r#"
...
[PASS] invariant_explicit_target() ([RUNS])
...
[FAIL: wrong target selector called]
[SEQUENCE]
invariant_dynamic_targets() ([RUNS])
...
"#]]);
});
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/forge/tests/cli/test_cmd/invariant/storage.rs | crates/forge/tests/cli/test_cmd/invariant/storage.rs | use super::*;
forgetest_init!(storage, |prj, cmd| {
prj.add_test(
"name",
r#"
import "forge-std/Test.sol";
contract Contract {
address public addr = address(0xbeef);
string public str = "hello";
uint256 public num = 1337;
uint256 public pushNum;
function changeAddress(address _addr) public {
if (_addr == addr) {
addr = address(0);
}
}
function changeString(string memory _str) public {
if (keccak256(bytes(_str)) == keccak256(bytes(str))) {
str = "";
}
}
function changeUint(uint256 _num) public {
if (_num == num) {
num = 0;
}
}
function push(uint256 _num) public {
if (_num == 68) {
pushNum = 69;
}
}
}
contract InvariantStorageTest is Test {
Contract c;
function setUp() public {
c = new Contract();
}
function invariantChangeAddress() public view {
require(c.addr() == address(0xbeef), "changedAddr");
}
function invariantChangeString() public view {
require(keccak256(bytes(c.str())) == keccak256(bytes("hello")), "changedStr");
}
function invariantChangeUint() public view {
require(c.num() == 1337, "changedUint");
}
function invariantPush() public view {
require(c.pushNum() == 0, "pushUint");
}
}
"#,
);
assert_invariant(cmd.args(["test"])).failure().stdout_eq(str![[r#"
...
Suite result: FAILED. 0 passed; 4 failed; 0 skipped; [ELAPSED]
Ran 1 test suite [ELAPSED]: 0 tests passed, 4 failed, 0 skipped (4 total tests)
Failing tests:
Encountered 4 failing tests in test/name.sol:InvariantStorageTest
...
"#]]);
});
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/forge/tests/cli/test_cmd/invariant/mod.rs | crates/forge/tests/cli/test_cmd/invariant/mod.rs | use alloy_primitives::U256;
use foundry_test_utils::{TestCommand, forgetest_init, snapbox::cmd::OutputAssert, str};
mod common;
mod storage;
mod target;
fn assert_invariant(cmd: &mut TestCommand) -> OutputAssert {
cmd.assert_with(&[
("[RUNS]", r"runs: \d+, calls: \d+, reverts: \d+"),
("[SEQUENCE]", r"\[Sequence\].*(\n\t\t.*)*"),
("[STATS]", r"╭[\s\S]*?╰.*"),
])
}
// Tests that a persisted failure doesn't fail due to assume revert if test driver is changed.
forgetest_init!(should_not_fail_replay_assume, |prj, cmd| {
prj.update_config(|config| {
config.invariant.fail_on_revert = true;
config.invariant.max_assume_rejects = 10;
});
// Add initial test that breaks invariant.
prj.add_test(
"AssumeTest.t.sol",
r#"
import {Test} from "forge-std/Test.sol";
contract AssumeHandler is Test {
function fuzzMe(uint256 a) public {
require(false, "Invariant failure");
}
}
contract AssumeTest is Test {
function setUp() public {
AssumeHandler handler = new AssumeHandler();
}
function invariant_assume() public {}
}
"#,
);
cmd.args(["test", "--mt", "invariant_assume"]).assert_failure().stdout_eq(str![[r#"
...
[FAIL: Invariant failure]
...
"#]]);
// Change test to use assume instead require. Same test should fail with too many inputs
// rejected message instead persisted failure revert.
prj.add_test(
"AssumeTest.t.sol",
r#"
import {Test} from "forge-std/Test.sol";
contract AssumeHandler is Test {
function fuzzMe(uint256 a) public {
vm.assume(false);
}
}
contract AssumeTest is Test {
function setUp() public {
AssumeHandler handler = new AssumeHandler();
}
function invariant_assume() public {}
}
"#,
);
cmd.assert_failure().stdout_eq(str![[r#"
...
[FAIL: `vm.assume` rejected too many inputs (10 allowed)] invariant_assume() (runs: 0, calls: 0, reverts: 0)
...
"#]]);
});
// Test too many inputs rejected for `assumePrecompile`/`assumeForgeAddress`.
// <https://github.com/foundry-rs/foundry/issues/9054>
forgetest_init!(should_revert_with_assume_code, |prj, cmd| {
prj.update_config(|config| {
config.invariant.fail_on_revert = true;
config.invariant.max_assume_rejects = 10;
config.fuzz.seed = Some(U256::from(100u32));
});
// Add initial test that breaks invariant.
prj.add_test(
"AssumeTest.t.sol",
r#"
import {Test} from "forge-std/Test.sol";
contract BalanceTestHandler is Test {
address public ref = address(1412323);
address alice;
constructor(address _alice) {
alice = _alice;
}
function increment(uint256 amount_, address addr) public {
assumeNotPrecompile(addr);
assumeNotForgeAddress(addr);
assertEq(alice.balance, 100_000 ether);
}
}
contract BalanceAssumeTest is Test {
function setUp() public {
address alice = makeAddr("alice");
vm.deal(alice, 100_000 ether);
targetSender(alice);
BalanceTestHandler handler = new BalanceTestHandler(alice);
targetContract(address(handler));
}
function invariant_balance() public {}
}
"#,
);
cmd.args(["test", "--mt", "invariant_balance"]).assert_failure().stdout_eq(str![[r#"
...
[FAIL: `vm.assume` rejected too many inputs (10 allowed)] invariant_balance() (runs: 2, calls: 1000, reverts: 0)
...
"#]]);
});
// Test proper message displayed if `targetSelector`/`excludeSelector` called with empty selectors.
// <https://github.com/foundry-rs/foundry/issues/9066>
forgetest_init!(should_not_panic_if_no_selectors, |prj, cmd| {
prj.add_test(
"NoSelectorTest.t.sol",
r#"
import {Test} from "forge-std/Test.sol";
contract TestHandler is Test {}
contract NoSelectorTest is Test {
bytes4[] selectors;
function setUp() public {
TestHandler handler = new TestHandler();
targetSelector(FuzzSelector({addr: address(handler), selectors: selectors}));
excludeSelector(FuzzSelector({addr: address(handler), selectors: selectors}));
}
function invariant_panic() public {}
}
"#,
);
cmd.args(["test", "--mt", "invariant_panic"]).assert_failure().stdout_eq(str![[r#"
...
[FAIL: failed to set up invariant testing environment: No contracts to fuzz.] invariant_panic() (runs: 0, calls: 0, reverts: 0)
...
"#]]);
});
// <https://github.com/foundry-rs/foundry/issues/3607>
forgetest_init!(should_show_invariant_metrics, |prj, cmd| {
prj.add_test(
"SelectorMetricsTest.t.sol",
r#"
import {Test} from "forge-std/Test.sol";
contract CounterTest is Test {
function setUp() public {
CounterHandler handler = new CounterHandler();
AnotherCounterHandler handler1 = new AnotherCounterHandler();
// targetContract(address(handler1));
}
/// forge-config: default.invariant.runs = 10
/// forge-config: default.invariant.show-metrics = true
function invariant_counter() public {}
/// forge-config: default.invariant.runs = 10
/// forge-config: default.invariant.show-metrics = true
function invariant_counter2() public {}
}
contract CounterHandler is Test {
function doSomething(uint256 a) public {
vm.assume(a < 10_000_000);
require(a < 100_000);
}
function doAnotherThing(uint256 a) public {
vm.assume(a < 10_000_000);
require(a < 100_000);
}
}
contract AnotherCounterHandler is Test {
function doWork(uint256 a) public {
vm.assume(a < 10_000_000);
require(a < 100_000);
}
function doWorkThing(uint256 a) public {
vm.assume(a < 10_000_000);
require(a < 100_000);
}
}
"#,
);
cmd.args(["test", "--mt", "invariant_"]).assert_success().stdout_eq(str![[r#"
...
[PASS] invariant_counter() (runs: 10, calls: 5000, reverts: [..])
╭-----------------------+----------------+-------+---------+----------╮
| Contract | Selector | Calls | Reverts | Discards |
+=====================================================================+
| AnotherCounterHandler | doWork | [..] | [..] | [..] |
|-----------------------+----------------+-------+---------+----------|
| AnotherCounterHandler | doWorkThing | [..] | [..] | [..] |
|-----------------------+----------------+-------+---------+----------|
| CounterHandler | doAnotherThing | [..] | [..] | [..] |
|-----------------------+----------------+-------+---------+----------|
| CounterHandler | doSomething | [..] | [..] | [..] |
╰-----------------------+----------------+-------+---------+----------╯
[PASS] invariant_counter2() (runs: 10, calls: 5000, reverts: [..])
╭-----------------------+----------------+-------+---------+----------╮
| Contract | Selector | Calls | Reverts | Discards |
+=====================================================================+
| AnotherCounterHandler | doWork | [..] | [..] | [..] |
|-----------------------+----------------+-------+---------+----------|
| AnotherCounterHandler | doWorkThing | [..] | [..] | [..] |
|-----------------------+----------------+-------+---------+----------|
| CounterHandler | doAnotherThing | [..] | [..] | [..] |
|-----------------------+----------------+-------+---------+----------|
| CounterHandler | doSomething | [..] | [..] | [..] |
╰-----------------------+----------------+-------+---------+----------╯
Suite result: ok. 2 passed; 0 failed; 0 skipped; [ELAPSED]
Ran 1 test suite [ELAPSED]: 2 tests passed, 0 failed, 0 skipped (2 total tests)
"#]]);
});
// Tests that invariant exists with success after configured timeout.
forgetest_init!(should_apply_configured_timeout, |prj, cmd| {
// Add initial test that breaks invariant.
prj.add_test(
"TimeoutTest.t.sol",
r#"
import {Test} from "forge-std/Test.sol";
contract TimeoutHandler is Test {
uint256 public count;
function increment() public {
count++;
}
}
contract TimeoutTest is Test {
TimeoutHandler handler;
function setUp() public {
handler = new TimeoutHandler();
}
/// forge-config: default.invariant.runs = 10000
/// forge-config: default.invariant.depth = 20000
/// forge-config: default.invariant.timeout = 1
function invariant_counter_timeout() public view {
// Invariant will fail if more than 10000 increments.
// Make sure test timeouts after one second and remaining runs are canceled.
require(handler.count() < 10000);
}
}
"#,
);
cmd.args(["test", "--mt", "invariant_counter_timeout"]).assert_success().stdout_eq(str![[r#"
[COMPILING_FILES] with [SOLC_VERSION]
[SOLC_VERSION] [ELAPSED]
Compiler run successful!
Ran 1 test for test/TimeoutTest.t.sol:TimeoutTest
[PASS] invariant_counter_timeout() (runs: 0, calls: 0, reverts: 0)
╭----------------+-----------+-------+---------+----------╮
| Contract | Selector | Calls | Reverts | Discards |
+=========================================================+
| TimeoutHandler | increment | [..] | [..] | [..] |
╰----------------+-----------+-------+---------+----------╯
Suite result: ok. 1 passed; 0 failed; 0 skipped; [ELAPSED]
Ran 1 test suite [ELAPSED]: 1 tests passed, 0 failed, 0 skipped (1 total tests)
"#]]);
});
// Tests that selector hits are uniformly distributed
// <https://github.com/foundry-rs/foundry/issues/2986>
forgetest_init!(invariant_selectors_weight, |prj, cmd| {
prj.update_config(|config| {
config.invariant.runs = 1;
config.invariant.depth = 10;
});
prj.add_source(
"InvariantHandlers.sol",
r#"
contract HandlerOne {
uint256 public hit1;
function selector1() external {
hit1 += 1;
}
}
contract HandlerTwo {
uint256 public hit2;
uint256 public hit3;
uint256 public hit4;
uint256 public hit5;
function selector2() external {
hit2 += 1;
}
function selector3() external {
hit3 += 1;
}
function selector4() external {
hit4 += 1;
}
function selector5() external {
hit5 += 1;
}
}
"#,
);
prj.add_test(
"InvariantSelectorsWeightTest.t.sol",
r#"
import {Test} from "forge-std/Test.sol";
import "src/InvariantHandlers.sol";
contract InvariantSelectorsWeightTest is Test {
HandlerOne handlerOne;
HandlerTwo handlerTwo;
function setUp() public {
handlerOne = new HandlerOne();
handlerTwo = new HandlerTwo();
}
function afterInvariant() public {
assertEq(handlerOne.hit1(), 2);
assertEq(handlerTwo.hit2(), 2);
assertEq(handlerTwo.hit3(), 2);
assertEq(handlerTwo.hit4(), 1);
assertEq(handlerTwo.hit5(), 3);
}
function invariant_selectors_weight() public view {}
}
"#,
);
cmd.args(["test", "--fuzz-seed", "119", "--mt", "invariant_selectors_weight"]).assert_success();
});
// Tests original and new counterexample lengths are displayed on failure.
// Tests switch from regular sequence output to solidity.
forgetest_init!(invariant_sequence_len, |prj, cmd| {
prj.initialize_default_contracts();
prj.update_config(|config| {
config.fuzz.seed = Some(U256::from(10u32));
});
prj.add_test(
"InvariantSequenceLenTest.t.sol",
r#"
import {Test} from "forge-std/Test.sol";
import "src/Counter.sol";
contract InvariantSequenceLenTest is Test {
Counter public counter;
function setUp() public {
counter = new Counter();
targetContract(address(counter));
}
function invariant_increment() public {
require(counter.number() / 2 < 100000000000000000000000000000000, "invariant increment failure");
}
}
"#,
);
cmd.args(["test", "--mt", "invariant_increment"]).assert_failure().stdout_eq(str![[r#"
...
[FAIL: invariant increment failure]
[Sequence] (original: 3, shrunk: 1)
...
"#]]);
// Check regular sequence output. Shrink disabled to show several lines.
cmd.forge_fuse().arg("clean").assert_success();
prj.update_config(|config| {
config.invariant.shrink_run_limit = 0;
});
cmd.forge_fuse().args(["test", "--mt", "invariant_increment"]).assert_failure().stdout_eq(
str![[r#"
...
Failing tests:
Encountered 1 failing test in test/InvariantSequenceLenTest.t.sol:InvariantSequenceLenTest
[FAIL: invariant increment failure]
[Sequence] (original: 3, shrunk: 3)
sender=0x00000000000000000000000000000000000014aD addr=[src/Counter.sol:Counter]0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f calldata=increment() args=[]
sender=0x8ef7F804bAd9183981A366EA618d9D47D3124649 addr=[src/Counter.sol:Counter]0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f calldata=increment() args=[]
sender=0x00000000000000000000000000000000000016Ac addr=[src/Counter.sol:Counter]0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f calldata=setNumber(uint256) args=[284406551521730736391345481857560031052359183671404042152984097777 [2.844e65]]
invariant_increment() (runs: 0, calls: 0, reverts: 0)
Encountered a total of 1 failing tests, 0 tests succeeded
Tip: Run `forge test --rerun` to retry only the 1 failed test
"#]],
);
// Check solidity sequence output on same failure.
cmd.forge_fuse().arg("clean").assert_success();
prj.update_config(|config| {
config.invariant.show_solidity = true;
});
cmd.forge_fuse().args(["test", "--mt", "invariant_increment"]).assert_failure().stdout_eq(
str![[r#"
...
Failing tests:
Encountered 1 failing test in test/InvariantSequenceLenTest.t.sol:InvariantSequenceLenTest
[FAIL: invariant increment failure]
[Sequence] (original: 3, shrunk: 3)
vm.prank(0x00000000000000000000000000000000000014aD);
Counter(0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f).increment();
vm.prank(0x8ef7F804bAd9183981A366EA618d9D47D3124649);
Counter(0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f).increment();
vm.prank(0x00000000000000000000000000000000000016Ac);
Counter(0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f).setNumber(284406551521730736391345481857560031052359183671404042152984097777);
invariant_increment() (runs: 0, calls: 0, reverts: 0)
Encountered a total of 1 failing tests, 0 tests succeeded
Tip: Run `forge test --rerun` to retry only the 1 failed test
"#]],
);
// Persisted failures should be able to switch output.
prj.update_config(|config| {
config.invariant.show_solidity = false;
});
cmd.forge_fuse().args(["test", "--mt", "invariant_increment"]).assert_failure().stdout_eq(
str![[r#"
...
Failing tests:
Encountered 1 failing test in test/InvariantSequenceLenTest.t.sol:InvariantSequenceLenTest
[FAIL: invariant_increment replay failure]
[Sequence] (original: 3, shrunk: 3)
sender=0x00000000000000000000000000000000000014aD addr=[src/Counter.sol:Counter]0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f calldata=increment() args=[]
sender=0x8ef7F804bAd9183981A366EA618d9D47D3124649 addr=[src/Counter.sol:Counter]0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f calldata=increment() args=[]
sender=0x00000000000000000000000000000000000016Ac addr=[src/Counter.sol:Counter]0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f calldata=setNumber(uint256) args=[284406551521730736391345481857560031052359183671404042152984097777 [2.844e65]]
invariant_increment() (runs: 1, calls: 1, reverts: 1)
Encountered a total of 1 failing tests, 0 tests succeeded
Tip: Run `forge test --rerun` to retry only the 1 failed test
"#]],
);
});
// Tests that persisted failure is discarded if test contract was modified.
// <https://github.com/foundry-rs/foundry/issues/9965>
forgetest_init!(invariant_replay_with_different_bytecode, |prj, cmd| {
prj.update_config(|config| {
config.invariant.runs = 5;
config.invariant.depth = 5;
});
prj.add_source(
"Ownable.sol",
r#"
contract Ownable {
address public owner = address(777);
function backdoor(address _owner) external {
owner = address(888);
}
function changeOwner(address _owner) external {
}
}
"#,
);
prj.add_test(
"OwnableTest.t.sol",
r#"
import {Test} from "forge-std/Test.sol";
import "src/Ownable.sol";
contract OwnableTest is Test {
Ownable ownable;
function setUp() public {
ownable = new Ownable();
}
function invariant_never_owner() public {
require(ownable.owner() != address(888), "never owner");
}
}
"#,
);
cmd.args(["test", "--mt", "invariant_never_owner"]).assert_failure().stdout_eq(str![[r#"
...
[FAIL: never owner]
...
"#]]);
// Should replay failure if same test.
cmd.assert_failure().stdout_eq(str![[r#"
...
[FAIL: invariant_never_owner replay failure]
...
"#]]);
// Different test driver that should not fail the invariant.
prj.add_test(
"OwnableTest.t.sol",
r#"
import {Test} from "forge-std/Test.sol";
import "src/Ownable.sol";
contract OwnableTest is Test {
Ownable ownable;
function setUp() public {
ownable = new Ownable();
// Ignore selector that fails invariant.
bytes4[] memory selectors = new bytes4[](1);
selectors[0] = Ownable.changeOwner.selector;
targetSelector(FuzzSelector({addr: address(ownable), selectors: selectors}));
}
function invariant_never_owner() public {
require(ownable.owner() != address(888), "never owner");
}
}
"#,
);
cmd.assert_success().stderr_eq(str![[r#"
...
Warning: Failure from "[..]/invariant/failures/OwnableTest/invariant_never_owner" file was ignored because test contract bytecode has changed.
...
"#]])
.stdout_eq(str![[r#"
...
[PASS] invariant_never_owner() (runs: 5, calls: 25, reverts: 0)
...
"#]]);
});
// <https://github.com/foundry-rs/foundry/issues/10253>
forgetest_init!(invariant_test_target, |prj, cmd| {
prj.update_config(|config| {
config.invariant.runs = 5;
config.invariant.depth = 5;
});
prj.add_test(
"InvariantTest.t.sol",
r#"
import {Test} from "forge-std/Test.sol";
contract InvariantTest is Test {
uint256 count;
function setCount(uint256 _count) public {
count = _count;
}
function setUp() public {
}
function invariant_check_count() public {
}
}
"#,
);
cmd.args(["test", "--mt", "invariant_check_count"]).assert_failure().stdout_eq(str![[r#"
...
[FAIL: failed to set up invariant testing environment: No contracts to fuzz.] invariant_check_count() (runs: 0, calls: 0, reverts: 0)
...
"#]]);
prj.add_test(
"InvariantTest.t.sol",
r#"
import {Test} from "forge-std/Test.sol";
contract InvariantTest is Test {
uint256 count;
function setCount(uint256 _count) public {
count = _count;
}
function setUp() public {
targetContract(address(this));
}
function invariant_check_count() public {
}
}
"#,
);
cmd.forge_fuse().args(["test", "--mt", "invariant_check_count"]).assert_success().stdout_eq(
str![[r#"
...
[PASS] invariant_check_count() (runs: 5, calls: 25, reverts: 0)
...
"#]],
);
});
// Tests that reserved test functions are not fuzzed when test is set as target.
// <https://github.com/foundry-rs/foundry/issues/10469>
forgetest_init!(invariant_target_test_contract_selectors, |prj, cmd| {
prj.update_config(|config| {
config.invariant.runs = 10;
config.invariant.depth = 100;
});
prj.add_test(
"InvariantTargetTest.t.sol",
r#"
import {Test} from "forge-std/Test.sol";
contract InvariantTargetTest is Test {
bool fooCalled;
bool testSanityCalled;
bool testTableCalled;
uint256 invariantCalledNum;
uint256 setUpCalledNum;
function setUp() public {
targetContract(address(this));
}
function beforeTestSetup() public {
}
// Only this selector should be targeted.
function foo() public {
fooCalled = true;
}
function fixtureCalled() public returns (bool[] memory) {
}
function table_sanity(bool called) public {
testTableCalled = called;
}
function test_sanity() public {
testSanityCalled = true;
}
function afterInvariant() public {
}
function invariant_foo_called() public view {
}
function invariant_testSanity_considered_target() public {
}
function invariant_setUp_considered_target() public {
setUpCalledNum++;
}
function invariant_considered_target() public {
invariantCalledNum++;
}
}
"#,
);
cmd.args(["test", "--mc", "InvariantTargetTest", "--mt", "invariant"])
.assert_success()
.stdout_eq(str![[r#"
[COMPILING_FILES] with [SOLC_VERSION]
[SOLC_VERSION] [ELAPSED]
Compiler run successful!
Ran 4 tests for test/InvariantTargetTest.t.sol:InvariantTargetTest
[PASS] invariant_considered_target() (runs: 10, calls: 1000, reverts: 0)
╭---------------------+----------+-------+---------+----------╮
| Contract | Selector | Calls | Reverts | Discards |
+=============================================================+
| InvariantTargetTest | foo | 1000 | 0 | 0 |
╰---------------------+----------+-------+---------+----------╯
[PASS] invariant_foo_called() (runs: 10, calls: 1000, reverts: 0)
╭---------------------+----------+-------+---------+----------╮
| Contract | Selector | Calls | Reverts | Discards |
+=============================================================+
| InvariantTargetTest | foo | 1000 | 0 | 0 |
╰---------------------+----------+-------+---------+----------╯
[PASS] invariant_setUp_considered_target() (runs: 10, calls: 1000, reverts: 0)
╭---------------------+----------+-------+---------+----------╮
| Contract | Selector | Calls | Reverts | Discards |
+=============================================================+
| InvariantTargetTest | foo | 1000 | 0 | 0 |
╰---------------------+----------+-------+---------+----------╯
[PASS] invariant_testSanity_considered_target() (runs: 10, calls: 1000, reverts: 0)
╭---------------------+----------+-------+---------+----------╮
| Contract | Selector | Calls | Reverts | Discards |
+=============================================================+
| InvariantTargetTest | foo | 1000 | 0 | 0 |
╰---------------------+----------+-------+---------+----------╯
Suite result: ok. 4 passed; 0 failed; 0 skipped; [ELAPSED]
Ran 1 test suite [ELAPSED]: 4 tests passed, 0 failed, 0 skipped (4 total tests)
"#]]);
});
// Tests that `targetSelector` and `excludeSelector` applied on test contract selectors are
// applied.
// <https://github.com/foundry-rs/foundry/issues/11006>
forgetest_init!(invariant_target_test_include_exclude_selectors, |prj, cmd| {
prj.update_config(|config| {
config.invariant.runs = 10;
config.invariant.depth = 100;
});
prj.add_test(
"InvariantTargetTest.t.sol",
r#"
import {Test} from "forge-std/Test.sol";
contract InvariantTargetIncludeTest is Test {
bool include = true;
function setUp() public {
targetContract(address(this));
bytes4[] memory selectors = new bytes4[](2);
selectors[0] = this.shouldInclude1.selector;
selectors[1] = this.shouldInclude2.selector;
targetSelector(FuzzSelector({addr: address(this), selectors: selectors}));
}
function shouldExclude1() public {
include = false;
}
function shouldInclude1() public {
include = true;
}
function shouldExclude2() public {
include = false;
}
function shouldInclude2() public {
include = true;
}
function invariant_include() public view {
require(include, "does not include");
}
}
contract InvariantTargetExcludeTest is Test {
bool include = true;
function setUp() public {
targetContract(address(this));
bytes4[] memory selectors = new bytes4[](2);
selectors[0] = this.shouldExclude1.selector;
selectors[1] = this.shouldExclude2.selector;
excludeSelector(FuzzSelector({addr: address(this), selectors: selectors}));
}
function shouldExclude1() public {
include = false;
}
function shouldInclude1() public {
include = true;
}
function shouldExclude2() public {
include = false;
}
function shouldInclude2() public {
include = true;
}
function invariant_exclude() public view {
require(include, "does not include");
}
}
"#,
);
cmd.args(["test", "--mt", "invariant_include"]).assert_success().stdout_eq(str![[r#"
[COMPILING_FILES] with [SOLC_VERSION]
[SOLC_VERSION] [ELAPSED]
Compiler run successful!
Ran 1 test for test/InvariantTargetTest.t.sol:InvariantTargetIncludeTest
[PASS] invariant_include() (runs: 10, calls: 1000, reverts: 0)
╭----------------------------+----------------+-------+---------+----------╮
| Contract | Selector | Calls | Reverts | Discards |
+==========================================================================+
| InvariantTargetIncludeTest | shouldInclude1 | [..] | 0 | 0 |
|----------------------------+----------------+-------+---------+----------|
| InvariantTargetIncludeTest | shouldInclude2 | [..] | 0 | 0 |
╰----------------------------+----------------+-------+---------+----------╯
Suite result: ok. 1 passed; 0 failed; 0 skipped; [ELAPSED]
Ran 1 test suite [ELAPSED]: 1 tests passed, 0 failed, 0 skipped (1 total tests)
"#]]);
cmd.forge_fuse().args(["test", "--mt", "invariant_exclude"]).assert_success().stdout_eq(str![
[r#"
No files changed, compilation skipped
Ran 1 test for test/InvariantTargetTest.t.sol:InvariantTargetExcludeTest
[PASS] invariant_exclude() (runs: 10, calls: 1000, reverts: 0)
╭----------------------------+----------------+-------+---------+----------╮
| Contract | Selector | Calls | Reverts | Discards |
+==========================================================================+
| InvariantTargetExcludeTest | shouldInclude1 | [..] | 0 | 0 |
|----------------------------+----------------+-------+---------+----------|
| InvariantTargetExcludeTest | shouldInclude2 | [..] | 0 | 0 |
╰----------------------------+----------------+-------+---------+----------╯
Suite result: ok. 1 passed; 0 failed; 0 skipped; [ELAPSED]
Ran 1 test suite [ELAPSED]: 1 tests passed, 0 failed, 0 skipped (1 total tests)
"#]
]);
cmd.forge_fuse()
.args(["test", "--mt", "invariant_include", "--md"])
.assert_success()
.stdout_eq(str![[r#"
No files changed, compilation skipped
Ran 1 test for test/InvariantTargetTest.t.sol:InvariantTargetIncludeTest
[PASS] invariant_include() (runs: 10, calls: 1000, reverts: 0)
| Contract | Selector | Calls | Reverts | Discards |
|----------------------------|----------------|-------|---------|----------|
| InvariantTargetIncludeTest | shouldInclude1 | [..] | 0 | 0 |
| InvariantTargetIncludeTest | shouldInclude2 | [..] | 0 | 0 |
Suite result: ok. 1 passed; 0 failed; 0 skipped; [ELAPSED]
Ran 1 test suite [ELAPSED]: 1 tests passed, 0 failed, 0 skipped (1 total tests)
"#]]);
cmd.forge_fuse()
.args(["test", "--mt", "invariant_exclude", "--md"])
.assert_success()
.stdout_eq(str![[r#"
No files changed, compilation skipped
Ran 1 test for test/InvariantTargetTest.t.sol:InvariantTargetExcludeTest
[PASS] invariant_exclude() (runs: 10, calls: 1000, reverts: 0)
| Contract | Selector | Calls | Reverts | Discards |
|----------------------------|----------------|-------|---------|----------|
| InvariantTargetExcludeTest | shouldInclude1 | [..] | 0 | 0 |
| InvariantTargetExcludeTest | shouldInclude2 | [..] | 0 | 0 |
Suite result: ok. 1 passed; 0 failed; 0 skipped; [ELAPSED]
Ran 1 test suite [ELAPSED]: 1 tests passed, 0 failed, 0 skipped (1 total tests)
"#]]);
});
// <https://github.com/foundry-rs/foundry/issues/11453>
forgetest_init!(corpus_dir, |prj, cmd| {
prj.initialize_default_contracts();
prj.update_config(|config| {
config.invariant.runs = 10;
config.invariant.depth = 10;
config.invariant.corpus.corpus_dir = Some("invariant_corpus".into());
config.fuzz.runs = 10;
config.fuzz.corpus.corpus_dir = Some("fuzz_corpus".into());
});
prj.add_test(
"CounterTests.t.sol",
r#"
import {Test} from "forge-std/Test.sol";
import {Counter} from "../src/Counter.sol";
contract Counter1Test is Test {
Counter public counter;
function setUp() public {
counter = new Counter();
counter.setNumber(0);
}
function testFuzz_SetNumber(uint256 x) public {
counter.setNumber(x);
assertEq(counter.number(), x);
}
function invariant_counter_called() public view {
}
}
contract Counter2Test is Test {
Counter public counter;
function setUp() public {
counter = new Counter();
counter.setNumber(0);
}
function testFuzz_SetNumber(uint256 x) public {
counter.setNumber(x);
assertEq(counter.number(), x);
}
function invariant_counter_called() public view {
}
}
"#,
);
cmd.args(["test"]).assert_success().stdout_eq(str![[r#"
...
Ran 3 test suites [ELAPSED]: 6 tests passed, 0 failed, 0 skipped (6 total tests)
"#]]);
assert!(
prj.root()
.join("invariant_corpus")
.join("Counter1Test")
.join("invariant_counter_called")
.exists()
);
assert!(
prj.root()
.join("invariant_corpus")
.join("Counter2Test")
.join("invariant_counter_called")
.exists()
);
assert!(
prj.root().join("fuzz_corpus").join("Counter1Test").join("testFuzz_SetNumber").exists()
);
assert!(
prj.root().join("fuzz_corpus").join("Counter2Test").join("testFuzz_SetNumber").exists()
);
});
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/forge/tests/cli/test_cmd/invariant/common.rs | crates/forge/tests/cli/test_cmd/invariant/common.rs | use super::*;
forgetest!(invariant_after_invariant, |prj, cmd| {
prj.insert_vm();
prj.insert_ds_test();
prj.add_test(
"InvariantAfterInvariant.t.sol",
r#"
import { DSTest as Test } from "src/test.sol";
struct FuzzSelector {
address addr;
bytes4[] selectors;
}
contract AfterInvariantHandler {
uint256 public count;
function inc() external {
count += 1;
}
}
contract InvariantAfterInvariantTest is Test {
AfterInvariantHandler handler;
function setUp() public {
handler = new AfterInvariantHandler();
}
function targetSelectors() public returns (FuzzSelector[] memory) {
FuzzSelector[] memory targets = new FuzzSelector[](1);
bytes4[] memory selectors = new bytes4[](1);
selectors[0] = handler.inc.selector;
targets[0] = FuzzSelector(address(handler), selectors);
return targets;
}
function afterInvariant() public {
require(handler.count() < 10, "afterInvariant failure");
}
/// forge-config: default.invariant.runs = 1
/// forge-config: default.invariant.depth = 11
function invariant_after_invariant_failure() public view {
require(handler.count() < 20, "invariant after invariant failure");
}
/// forge-config: default.invariant.runs = 1
/// forge-config: default.invariant.depth = 11
function invariant_failure() public view {
require(handler.count() < 9, "invariant failure");
}
/// forge-config: default.invariant.runs = 1
/// forge-config: default.invariant.depth = 5
function invariant_success() public view {
require(handler.count() < 11, "invariant should not fail");
}
}
"#,
);
assert_invariant(cmd.args(["test"])).failure().stdout_eq(str![[r#"
...
Ran 3 tests for test/InvariantAfterInvariant.t.sol:InvariantAfterInvariantTest
[FAIL: afterInvariant failure]
[SEQUENCE]
invariant_after_invariant_failure() ([RUNS])
[STATS]
[FAIL: invariant failure]
[SEQUENCE]
invariant_failure() ([RUNS])
[STATS]
[PASS] invariant_success() ([RUNS])
[STATS]
Suite result: FAILED. 1 passed; 2 failed; 0 skipped; [ELAPSED]
Ran 1 test suite [ELAPSED]: 1 tests passed, 2 failed, 0 skipped (3 total tests)
Failing tests:
Encountered 2 failing tests in test/InvariantAfterInvariant.t.sol:InvariantAfterInvariantTest
[FAIL: afterInvariant failure]
[SEQUENCE]
invariant_after_invariant_failure() ([RUNS])
[FAIL: invariant failure]
[SEQUENCE]
invariant_failure() ([RUNS])
Encountered a total of 2 failing tests, 1 tests succeeded
Tip: Run `forge test --rerun` to retry only the 2 failed tests
"#]]);
});
forgetest_init!(invariant_assume, |prj, cmd| {
prj.update_config(|config| {
config.invariant.runs = 1;
config.invariant.depth = 10;
// Should not treat vm.assume as revert.
config.invariant.fail_on_revert = true;
});
prj.add_test(
"InvariantAssume.t.sol",
r#"
import "forge-std/Test.sol";
contract Handler is Test {
function doSomething(uint256 param) public {
vm.assume(param == 0);
}
}
contract InvariantAssume is Test {
Handler handler;
function setUp() public {
handler = new Handler();
}
function invariant_dummy() public {}
}
"#,
);
assert_invariant(cmd.args(["test"])).success().stdout_eq(str![[r#"
[COMPILING_FILES] with [SOLC_VERSION]
[SOLC_VERSION] [ELAPSED]
Compiler run successful with warnings:
Warning (2018): Function state mutability can be restricted to pure
[FILE]:7:5:
|
7 | function doSomething(uint256 param) public {
| ^ (Relevant source part starts here and spans across multiple lines).
Ran 1 test for test/InvariantAssume.t.sol:InvariantAssume
[PASS] invariant_dummy() ([RUNS])
[STATS]
Suite result: ok. 1 passed; 0 failed; 0 skipped; [ELAPSED]
Ran 1 test suite [ELAPSED]: 1 tests passed, 0 failed, 0 skipped (1 total tests)
"#]]);
// Test that max_assume_rejects is respected.
prj.update_config(|config| {
config.invariant.max_assume_rejects = 1;
});
assert_invariant(&mut cmd).failure().stdout_eq(str![[r#"
No files changed, compilation skipped
Ran 1 test for test/InvariantAssume.t.sol:InvariantAssume
[FAIL: `vm.assume` rejected too many inputs (1 allowed)] invariant_dummy() ([RUNS])
[STATS]
Suite result: FAILED. 0 passed; 1 failed; 0 skipped; [ELAPSED]
Ran 1 test suite [ELAPSED]: 0 tests passed, 1 failed, 0 skipped (1 total tests)
Failing tests:
Encountered 1 failing test in test/InvariantAssume.t.sol:InvariantAssume
[FAIL: `vm.assume` rejected too many inputs (1 allowed)] invariant_dummy() ([RUNS])
Encountered a total of 1 failing tests, 0 tests succeeded
Tip: Run `forge test --rerun` to retry only the 1 failed test
"#]]);
});
// https://github.com/foundry-rs/foundry/issues/5868
forgetest!(invariant_calldata_dictionary, |prj, cmd| {
prj.insert_utils();
prj.update_config(|config| {
config.fuzz.seed = Some(U256::from(1));
config.invariant.depth = 10;
});
prj.add_test(
"InvariantCalldataDictionary.t.sol",
r#"
import "./utils/Test.sol";
struct FuzzSelector {
address addr;
bytes4[] selectors;
}
contract Owned {
address public owner;
address private ownerCandidate;
constructor() {
owner = msg.sender;
}
modifier onlyOwner() {
require(msg.sender == owner);
_;
}
modifier onlyOwnerCandidate() {
require(msg.sender == ownerCandidate);
_;
}
function transferOwnership(address candidate) external onlyOwner {
ownerCandidate = candidate;
}
function acceptOwnership() external onlyOwnerCandidate {
owner = ownerCandidate;
}
}
contract Handler is Test {
Owned owned;
constructor(Owned _owned) {
owned = _owned;
}
function transferOwnership(address sender, address candidate) external {
vm.assume(sender != address(0));
vm.prank(sender);
owned.transferOwnership(candidate);
}
function acceptOwnership(address sender) external {
vm.assume(sender != address(0));
vm.prank(sender);
owned.acceptOwnership();
}
}
contract InvariantCalldataDictionary is Test {
address owner;
Owned owned;
Handler handler;
address[] actors;
function setUp() public {
owner = address(this);
owned = new Owned();
handler = new Handler(owned);
actors.push(owner);
actors.push(address(777));
}
function targetSelectors() public returns (FuzzSelector[] memory) {
FuzzSelector[] memory targets = new FuzzSelector[](1);
bytes4[] memory selectors = new bytes4[](2);
selectors[0] = handler.transferOwnership.selector;
selectors[1] = handler.acceptOwnership.selector;
targets[0] = FuzzSelector(address(handler), selectors);
return targets;
}
function fixtureSender() external returns (address[] memory) {
return actors;
}
function fixtureCandidate() external returns (address[] memory) {
return actors;
}
function invariant_owner_never_changes() public {
assertEq(owned.owner(), owner);
}
}
"#,
);
assert_invariant(cmd.args(["test"])).failure().stdout_eq(str![[r#"
...
Ran 1 test for test/InvariantCalldataDictionary.t.sol:InvariantCalldataDictionary
[FAIL: <empty revert data>]
[SEQUENCE]
invariant_owner_never_changes() ([RUNS])
...
Suite result: FAILED. 0 passed; 1 failed; 0 skipped; [ELAPSED]
Ran 1 test suite [ELAPSED]: 0 tests passed, 1 failed, 0 skipped (1 total tests)
Failing tests:
Encountered 1 failing test in test/InvariantCalldataDictionary.t.sol:InvariantCalldataDictionary
[FAIL: <empty revert data>]
[SEQUENCE]
invariant_owner_never_changes() ([RUNS])
Encountered a total of 1 failing tests, 0 tests succeeded
Tip: Run `forge test --rerun` to retry only the 1 failed test
"#]]);
});
forgetest_init!(invariant_custom_error, |prj, cmd| {
prj.update_config(|config| {
config.invariant.depth = 10;
config.invariant.fail_on_revert = true;
});
prj.add_test(
"InvariantCustomError.t.sol",
r#"
import "forge-std/Test.sol";
contract ContractWithCustomError {
error InvariantCustomError(uint256, string);
function revertWithInvariantCustomError() external {
revert InvariantCustomError(111, "custom");
}
}
contract Handler is Test {
ContractWithCustomError target;
constructor() {
target = new ContractWithCustomError();
}
function revertTarget() external {
target.revertWithInvariantCustomError();
}
}
contract InvariantCustomError is Test {
Handler handler;
function setUp() external {
handler = new Handler();
}
function invariant_decode_error() public {}
}
"#,
);
assert_invariant(cmd.args(["test"])).failure().stdout_eq(str![[r#"
...
Ran 1 test for test/InvariantCustomError.t.sol:InvariantCustomError
[FAIL: InvariantCustomError(111, "custom")]
[SEQUENCE]
invariant_decode_error() ([RUNS])
[STATS]
Suite result: FAILED. 0 passed; 1 failed; 0 skipped; [ELAPSED]
Ran 1 test suite [ELAPSED]: 0 tests passed, 1 failed, 0 skipped (1 total tests)
Failing tests:
Encountered 1 failing test in test/InvariantCustomError.t.sol:InvariantCustomError
[FAIL: InvariantCustomError(111, "custom")]
[SEQUENCE]
invariant_decode_error() ([RUNS])
Encountered a total of 1 failing tests, 0 tests succeeded
Tip: Run `forge test --rerun` to retry only the 1 failed test
"#]]);
});
forgetest_init!(invariant_excluded_senders, |prj, cmd| {
prj.update_config(|config| {
config.invariant.depth = 10;
config.invariant.fail_on_revert = true;
});
prj.add_test(
"InvariantExcludedSenders.t.sol",
r#"
import "forge-std/Test.sol";
contract InvariantSenders {
function checkSender() external {
require(msg.sender != 0x7109709ECfa91a80626fF3989D68f67F5b1DD12D, "sender cannot be cheatcode address");
require(msg.sender != 0x000000000000000000636F6e736F6c652e6c6f67, "sender cannot be console address");
require(msg.sender != 0x4e59b44847b379578588920cA78FbF26c0B4956C, "sender cannot be CREATE2 deployer");
}
}
contract InvariantExcludedSendersTest is Test {
InvariantSenders target;
function setUp() public {
target = new InvariantSenders();
}
function invariant_check_sender() public view {}
}
"#,
);
assert_invariant(cmd.args(["test"])).success().stdout_eq(str![[r#"
[COMPILING_FILES] with [SOLC_VERSION]
[SOLC_VERSION] [ELAPSED]
Compiler run successful with warnings:
Warning (2018): Function state mutability can be restricted to view
[FILE]:7:5:
|
7 | function checkSender() external {
| ^ (Relevant source part starts here and spans across multiple lines).
Ran 1 test for test/InvariantExcludedSenders.t.sol:InvariantExcludedSendersTest
[PASS] invariant_check_sender() ([RUNS])
[STATS]
Suite result: ok. 1 passed; 0 failed; 0 skipped; [ELAPSED]
Ran 1 test suite [ELAPSED]: 1 tests passed, 0 failed, 0 skipped (1 total tests)
"#]]);
});
forgetest_init!(invariant_fixtures, |prj, cmd| {
prj.update_config(|config| {
config.invariant.runs = 1;
config.invariant.depth = 100;
// disable literals to test fixtures
config.invariant.dictionary.max_fuzz_dictionary_literals = 0;
config.fuzz.dictionary.max_fuzz_dictionary_literals = 0;
});
prj.add_test(
"InvariantFixtures.t.sol",
r#"
import "forge-std/Test.sol";
contract Target {
bool ownerFound;
bool amountFound;
bool magicFound;
bool keyFound;
bool backupFound;
bool extraStringFound;
function fuzzWithFixtures(
address owner_,
uint256 _amount,
int32 magic,
bytes32 key,
bytes memory backup,
string memory extra
) external {
if (owner_ == address(0x6B175474E89094C44Da98b954EedeAC495271d0F)) {
ownerFound = true;
}
if (_amount == 1122334455) amountFound = true;
if (magic == -777) magicFound = true;
if (key == "abcd1234") keyFound = true;
if (keccak256(backup) == keccak256("qwerty1234")) backupFound = true;
if (keccak256(abi.encodePacked(extra)) == keccak256(abi.encodePacked("112233aabbccdd"))) {
extraStringFound = true;
}
}
function isCompromised() public view returns (bool) {
return ownerFound && amountFound && magicFound && keyFound && backupFound && extraStringFound;
}
}
/// Try to compromise target contract by finding all accepted values using fixtures.
contract InvariantFixtures is Test {
Target target;
address[] public fixture_owner_ = [address(0x6B175474E89094C44Da98b954EedeAC495271d0F)];
uint256[] public fixture_amount = [1, 2, 1122334455];
function setUp() public {
target = new Target();
}
function fixtureMagic() external returns (int32[2] memory) {
int32[2] memory magic;
magic[0] = -777;
magic[1] = 777;
return magic;
}
function fixtureKey() external pure returns (bytes32[] memory) {
bytes32[] memory keyFixture = new bytes32[](1);
keyFixture[0] = "abcd1234";
return keyFixture;
}
function fixtureBackup() external pure returns (bytes[] memory) {
bytes[] memory backupFixture = new bytes[](1);
backupFixture[0] = "qwerty1234";
return backupFixture;
}
function fixtureExtra() external pure returns (string[] memory) {
string[] memory extraFixture = new string[](1);
extraFixture[0] = "112233aabbccdd";
return extraFixture;
}
function invariant_target_not_compromised() public {
assertEq(target.isCompromised(), false);
}
}
"#,
);
assert_invariant(cmd.args(["test"])).failure().stdout_eq(str![[r#"
...
Ran 1 test for test/InvariantFixtures.t.sol:InvariantFixtures
[FAIL: assertion failed: true != false]
[SEQUENCE]
invariant_target_not_compromised() ([RUNS])
[STATS]
Suite result: FAILED. 0 passed; 1 failed; 0 skipped; [ELAPSED]
Ran 1 test suite [ELAPSED]: 0 tests passed, 1 failed, 0 skipped (1 total tests)
Failing tests:
Encountered 1 failing test in test/InvariantFixtures.t.sol:InvariantFixtures
[FAIL: assertion failed: true != false]
[SEQUENCE]
invariant_target_not_compromised() ([RUNS])
Encountered a total of 1 failing tests, 0 tests succeeded
Tip: Run `forge test --rerun` to retry only the 1 failed test
"#]]);
});
forgetest_init!(invariant_breaks_without_fixtures, |prj, cmd| {
prj.update_config(|config| {
config.fuzz.seed = Some(U256::from(1));
config.invariant.runs = 1;
config.invariant.depth = 100;
});
prj.add_test(
"InvariantLiterals.t.sol",
r#"
import "forge-std/Test.sol";
contract Target {
bool ownerFound;
bool amountFound;
bool magicFound;
bool keyFound;
bool backupFound;
bool extraStringFound;
function fuzzWithoutFixtures(
address owner_,
uint256 _amount,
int32 magic,
bytes32 key,
bytes memory backup,
string memory extra
) external {
if (owner_ == address(0x6B175474E89094C44Da98b954EedeAC495271d0F)) {
ownerFound = true;
}
if (_amount == 1122334455) amountFound = true;
if (magic == -777) magicFound = true;
if (key == "abcd1234") keyFound = true;
if (keccak256(backup) == keccak256("qwerty1234")) backupFound = true;
if (keccak256(abi.encodePacked(extra)) == keccak256(abi.encodePacked("112233aabbccdd"))) {
extraStringFound = true;
}
}
function isCompromised() public view returns (bool) {
return ownerFound && amountFound && magicFound && keyFound && backupFound && extraStringFound;
}
}
/// Try to compromise target contract by finding all accepted values without using fixtures.
contract InvariantLiterals is Test {
Target target;
function setUp() public {
target = new Target();
}
function invariant_target_not_compromised() public {
assertEq(target.isCompromised(), false);
}
}
"#,
);
assert_invariant(cmd.args(["test"])).failure().stdout_eq(str![[r#"
...
Ran 1 test for test/InvariantLiterals.t.sol:InvariantLiterals
[FAIL: assertion failed: true != false]
[SEQUENCE]
invariant_target_not_compromised() ([RUNS])
[STATS]
Suite result: FAILED. 0 passed; 1 failed; 0 skipped; [ELAPSED]
Ran 1 test suite [ELAPSED]: 0 tests passed, 1 failed, 0 skipped (1 total tests)
Failing tests:
Encountered 1 failing test in test/InvariantLiterals.t.sol:InvariantLiterals
[FAIL: assertion failed: true != false]
[SEQUENCE]
invariant_target_not_compromised() ([RUNS])
Encountered a total of 1 failing tests, 0 tests succeeded
Tip: Run `forge test --rerun` to retry only the 1 failed test
"#]]);
});
forgetest!(invariant_handler_failure, |prj, cmd| {
prj.insert_utils();
prj.update_config(|config| {
config.invariant.fail_on_revert = true;
config.invariant.runs = 1;
config.invariant.depth = 10;
});
prj.add_test(
"InvariantHandlerFailure.t.sol",
r#"
import "./utils/Test.sol";
struct FuzzSelector {
address addr;
bytes4[] selectors;
}
contract Handler is Test {
function doSomething() public {
require(false, "failed on revert");
}
}
contract InvariantHandlerFailure is Test {
bytes4[] internal selectors;
Handler handler;
function targetSelectors() public returns (FuzzSelector[] memory) {
FuzzSelector[] memory targets = new FuzzSelector[](1);
bytes4[] memory selectors = new bytes4[](1);
selectors[0] = handler.doSomething.selector;
targets[0] = FuzzSelector(address(handler), selectors);
return targets;
}
function setUp() public {
handler = new Handler();
}
function statefulFuzz_BrokenInvariant() public {}
}
"#,
);
assert_invariant(cmd.args(["test"])).failure().stdout_eq(str![[r#"
...
Ran 1 test for test/InvariantHandlerFailure.t.sol:InvariantHandlerFailure
[FAIL: failed on revert]
[SEQUENCE]
statefulFuzz_BrokenInvariant() ([RUNS])
[STATS]
Suite result: FAILED. 0 passed; 1 failed; 0 skipped; [ELAPSED]
Ran 1 test suite [ELAPSED]: 0 tests passed, 1 failed, 0 skipped (1 total tests)
Failing tests:
Encountered 1 failing test in test/InvariantHandlerFailure.t.sol:InvariantHandlerFailure
[FAIL: failed on revert]
[SEQUENCE]
statefulFuzz_BrokenInvariant() ([RUNS])
Encountered a total of 1 failing tests, 0 tests succeeded
Tip: Run `forge test --rerun` to retry only the 1 failed test
"#]]);
});
// Here we test that the fuzz engine can include a contract created during the fuzz
// in its fuzz dictionary and eventually break the invariant.
// Specifically, can Judas, a created contract from Jesus, break Jesus contract
// by revealing his identity.
forgetest_init!(
#[cfg_attr(windows, ignore = "for some reason there's different rng")]
invariant_inner_contract,
|prj, cmd| {
prj.update_config(|config| {
config.invariant.depth = 10;
});
prj.add_test(
"InvariantInnerContract.t.sol",
r#"
import "forge-std/Test.sol";
contract Jesus {
address fren;
bool public identity_revealed;
function create_fren() public {
fren = address(new Judas());
}
function kiss() public {
require(msg.sender == fren);
identity_revealed = true;
}
}
contract Judas {
Jesus jesus;
constructor() {
jesus = Jesus(msg.sender);
}
function betray() public {
jesus.kiss();
}
}
contract InvariantInnerContract is Test {
Jesus jesus;
function setUp() public {
jesus = new Jesus();
}
function invariantHideJesus() public {
require(jesus.identity_revealed() == false, "jesus betrayed");
}
}
"#,
);
assert_invariant(cmd.args(["test"])).failure().stdout_eq(str![[r#"
...
Ran 1 test for test/InvariantInnerContract.t.sol:InvariantInnerContract
[FAIL: jesus betrayed]
[SEQUENCE]
invariantHideJesus() ([RUNS])
[STATS]
Suite result: FAILED. 0 passed; 1 failed; 0 skipped; [ELAPSED]
Ran 1 test suite [ELAPSED]: 0 tests passed, 1 failed, 0 skipped (1 total tests)
Failing tests:
Encountered 1 failing test in test/InvariantInnerContract.t.sol:InvariantInnerContract
[FAIL: jesus betrayed]
[SEQUENCE]
invariantHideJesus() ([RUNS])
Encountered a total of 1 failing tests, 0 tests succeeded
Tip: Run `forge test --rerun` to retry only the 1 failed test
"#]]);
// `fuzz_seed` at 119 makes this sequence shrinkable from 4 to 2.
prj.update_config(|config| {
config.fuzz.seed = Some(U256::from(119u32));
// Disable persisted failures for rerunning the test.
config.invariant.failure_persist_dir = Some(
config
.invariant
.failure_persist_dir
.as_ref()
.unwrap()
.parent()
.unwrap()
.join("persistence2"),
);
});
cmd.assert_failure().stdout_eq(str![[r#"
No files changed, compilation skipped
Ran 1 test for test/InvariantInnerContract.t.sol:InvariantInnerContract
[FAIL: jesus betrayed]
[Sequence] (original: 2, shrunk: 2)
sender=[..] addr=[test/InvariantInnerContract.t.sol:Jesus][..] calldata=create_fren() args=[]
sender=[..] addr=[test/InvariantInnerContract.t.sol:Judas][..] calldata=betray() args=[]
invariantHideJesus() (runs: 0, calls: 0, reverts: 1)
...
"#]]);
}
);
// https://github.com/foundry-rs/foundry/issues/7219
forgetest!(invariant_preserve_state, |prj, cmd| {
prj.insert_utils();
prj.update_config(|config| {
config.invariant.depth = 10;
config.invariant.fail_on_revert = true;
});
prj.add_test(
"InvariantPreserveState.t.sol",
r#"
import "./utils/Test.sol";
struct FuzzSelector {
address addr;
bytes4[] selectors;
}
contract Handler is Test {
function thisFunctionReverts() external {
if (block.number < 10) {} else {
revert();
}
}
function advanceTime(uint256 blocks) external {
blocks = blocks % 10;
vm.roll(block.number + blocks);
vm.warp(block.timestamp + blocks * 12);
}
}
contract InvariantPreserveState is Test {
Handler handler;
function setUp() public {
handler = new Handler();
}
function targetSelectors() public returns (FuzzSelector[] memory) {
FuzzSelector[] memory targets = new FuzzSelector[](1);
bytes4[] memory selectors = new bytes4[](2);
selectors[0] = handler.thisFunctionReverts.selector;
selectors[1] = handler.advanceTime.selector;
targets[0] = FuzzSelector(address(handler), selectors);
return targets;
}
function invariant_preserve_state() public {
assertTrue(true);
}
}
"#,
);
assert_invariant(cmd.args(["test"])).failure().stdout_eq(str![[r#"
...
Ran 1 test for test/InvariantPreserveState.t.sol:InvariantPreserveState
[FAIL: EvmError: Revert]
[SEQUENCE]
invariant_preserve_state() ([RUNS])
[STATS]
Suite result: FAILED. 0 passed; 1 failed; 0 skipped; [ELAPSED]
Ran 1 test suite [ELAPSED]: 0 tests passed, 1 failed, 0 skipped (1 total tests)
Failing tests:
Encountered 1 failing test in test/InvariantPreserveState.t.sol:InvariantPreserveState
[FAIL: EvmError: Revert]
[SEQUENCE]
invariant_preserve_state() ([RUNS])
Encountered a total of 1 failing tests, 0 tests succeeded
Tip: Run `forge test --rerun` to retry only the 1 failed test
"#]]);
});
// add code so contract is accounted as valid sender
// see https://github.com/foundry-rs/foundry/issues/4245
forgetest!(invariant_reentrancy, |prj, cmd| {
prj.insert_utils();
prj.update_config(|config| {
config.invariant.depth = 10;
config.invariant.fail_on_revert = false;
config.invariant.call_override = true;
});
prj.add_test(
"InvariantReentrancy.t.sol",
r#"
import "./utils/Test.sol";
contract Malicious {
function world() public {
payable(msg.sender).call("");
}
}
contract Vulnerable {
bool public open_door = false;
bool public stolen = false;
Malicious mal;
constructor(address _mal) {
mal = Malicious(_mal);
}
function hello() public {
open_door = true;
mal.world();
open_door = false;
}
function backdoor() public {
require(open_door, "");
stolen = true;
}
}
contract InvariantReentrancy is Test {
Vulnerable vuln;
Malicious mal;
function setUp() public {
mal = new Malicious();
vuln = new Vulnerable(address(mal));
}
// do not include `mal` in identified contracts
// see https://github.com/foundry-rs/foundry/issues/4245
function targetContracts() public view returns (address[] memory) {
address[] memory targets = new address[](1);
targets[0] = address(vuln);
return targets;
}
function invariantNotStolen() public {
require(vuln.stolen() == false, "stolen");
}
}
"#,
);
assert_invariant(cmd.args(["test"])).failure().stdout_eq(str![[r#"
...
Ran 1 test for test/InvariantReentrancy.t.sol:InvariantReentrancy
[FAIL: stolen]
[SEQUENCE]
invariantNotStolen() ([RUNS])
[STATS]
Suite result: FAILED. 0 passed; 1 failed; 0 skipped; [ELAPSED]
Ran 1 test suite [ELAPSED]: 0 tests passed, 1 failed, 0 skipped (1 total tests)
Failing tests:
Encountered 1 failing test in test/InvariantReentrancy.t.sol:InvariantReentrancy
[FAIL: stolen]
[SEQUENCE]
invariantNotStolen() ([RUNS])
Encountered a total of 1 failing tests, 0 tests succeeded
Tip: Run `forge test --rerun` to retry only the 1 failed test
"#]]);
});
forgetest_init!(invariant_roll_fork, |prj, cmd| {
prj.add_rpc_endpoints();
prj.update_config(|config| {
config.fuzz.seed = Some(U256::from(119u32));
config.invariant.shrink_run_limit = 0;
});
prj.add_test(
"InvariantRollFork.t.sol",
r#"
import "forge-std/Test.sol";
interface IERC20 {
function totalSupply() external view returns (uint256 supply);
}
contract RollForkHandler is Test {
uint256 public totalSupply;
function work() external {
vm.rollFork(block.number + 1);
totalSupply = IERC20(0x6B175474E89094C44Da98b954EedeAC495271d0F).totalSupply();
}
}
contract InvariantRollForkBlockTest is Test {
RollForkHandler forkHandler;
function setUp() public {
vm.createSelectFork("mainnet", 19812632);
forkHandler = new RollForkHandler();
}
/// forge-config: default.invariant.runs = 2
/// forge-config: default.invariant.depth = 4
function invariant_fork_handler_block() public {
require(block.number < 19812634, "too many blocks mined");
}
}
contract InvariantRollForkStateTest is Test {
RollForkHandler forkHandler;
function setUp() public {
vm.createSelectFork("mainnet", 19812632);
forkHandler = new RollForkHandler();
}
/// forge-config: default.invariant.runs = 1
function invariant_fork_handler_state() public {
require(forkHandler.totalSupply() < 3254378807384273078310283461, "wrong supply");
}
}
"#,
);
assert_invariant(cmd.args(["test", "-j1"])).failure().stdout_eq(str![[r#"
...
Ran 2 test suites [ELAPSED]: 0 tests passed, 2 failed, 0 skipped (2 total tests)
Failing tests:
Encountered 1 failing test in test/InvariantRollFork.t.sol:InvariantRollForkBlockTest
[FAIL: too many blocks mined]
...
invariant_fork_handler_block() ([RUNS])
Encountered 1 failing test in test/InvariantRollFork.t.sol:InvariantRollForkStateTest
[FAIL: wrong supply]
...
invariant_fork_handler_state() ([RUNS])
Encountered a total of 2 failing tests, 0 tests succeeded
Tip: Run `forge test --rerun` to retry only the 2 failed tests
"#]]);
});
forgetest_init!(invariant_scrape_values, |prj, cmd| {
prj.update_config(|config| {
config.invariant.depth = 10;
config.fuzz.seed = Some(U256::from(100u32));
});
prj.add_test(
"InvariantScrapeValues.t.sol",
r#"
import "forge-std/Test.sol";
contract FindFromReturnValue {
bool public found = false;
function seed() public returns (int256) {
int256 mystery = 13337;
return (1337 + mystery);
}
function find(int256 i) public {
int256 mystery = 13337;
if (i == 1337 + mystery) {
found = true;
}
}
}
contract FindFromReturnValueTest is Test {
FindFromReturnValue target;
function setUp() public {
target = new FindFromReturnValue();
}
/// forge-config: default.invariant.runs = 50
/// forge-config: default.invariant.depth = 300
/// forge-config: default.invariant.fail-on-revert = true
function invariant_value_not_found() public view {
require(!target.found(), "value from return found");
}
}
contract FindFromLogValue {
event FindFromLog(int256 indexed mystery, bytes32 rand);
bool public found = false;
function seed() public {
int256 mystery = 13337;
emit FindFromLog(1337 + mystery, keccak256(abi.encodePacked("mystery")));
}
function find(int256 i) public {
int256 mystery = 13337;
if (i == 1337 + mystery) {
found = true;
}
}
}
contract FindFromLogValueTest is Test {
FindFromLogValue target;
function setUp() public {
target = new FindFromLogValue();
}
/// forge-config: default.invariant.runs = 50
/// forge-config: default.invariant.depth = 300
/// forge-config: default.invariant.fail-on-revert = true
function invariant_value_not_found() public view {
require(!target.found(), "value from logs found");
}
}
"#,
);
assert_invariant(cmd.args(["test", "-j1"])).failure().stdout_eq(str![[r#"
...
Ran 2 test suites [ELAPSED]: 0 tests passed, 2 failed, 0 skipped (2 total tests)
Failing tests:
Encountered 1 failing test in test/InvariantScrapeValues.t.sol:FindFromLogValueTest
[FAIL: value from logs found]
[SEQUENCE]
invariant_value_not_found() ([RUNS])
Encountered 1 failing test in test/InvariantScrapeValues.t.sol:FindFromReturnValueTest
[FAIL: value from return found]
[SEQUENCE]
invariant_value_not_found() ([RUNS])
Encountered a total of 2 failing tests, 0 tests succeeded
Tip: Run `forge test --rerun` to retry only the 2 failed tests
"#]]);
});
forgetest_init!(invariant_sequence_no_reverts, |prj, cmd| {
prj.update_config(|config| {
config.invariant.depth = 15;
config.invariant.fail_on_revert = false;
// Use original counterexample to test sequence len.
config.invariant.shrink_run_limit = 0;
});
prj.add_test(
"InvariantSequenceNoReverts.t.sol",
r#"
import "forge-std/Test.sol";
contract SequenceNoReverts {
uint256 public count;
function work(uint256 x) public {
require(x % 2 != 0);
count++;
}
}
contract SequenceNoRevertsTest is Test {
SequenceNoReverts target;
function setUp() public {
target = new SequenceNoReverts();
}
function invariant_no_reverts() public view {
require(target.count() < 10, "condition met");
}
}
"#,
);
// ensure original counterexample len is 10 (even without shrinking)
cmd.args(["test"]).assert_failure().stdout_eq(str![[r#"
...
Ran 1 test for test/InvariantSequenceNoReverts.t.sol:SequenceNoRevertsTest
[FAIL: condition met]
[Sequence] (original: 10, shrunk: 10)
...
invariant_no_reverts() ([..])
...
Suite result: FAILED. 0 passed; 1 failed; 0 skipped; [ELAPSED]
Ran 1 test suite [ELAPSED]: 0 tests passed, 1 failed, 0 skipped (1 total tests)
...
"#]]);
});
forgetest_init!(
#[cfg_attr(windows, ignore = "for some reason there's different rng")]
invariant_shrink_big_sequence,
|prj, cmd| {
prj.update_config(|config| {
config.fuzz.seed = Some(U256::from(119u32));
config.invariant.runs = 1;
config.invariant.depth = 1000;
config.invariant.shrink_run_limit = 425;
});
prj.add_test(
"InvariantShrinkBigSequence.t.sol",
r#"
import "forge-std/Test.sol";
contract ShrinkBigSequence {
uint256 cond;
function work(uint256 x) public {
if (x % 2 != 0 && x < 9000) {
cond++;
}
}
function checkCond() public view {
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | true |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/forge/bin/main.rs | crates/forge/bin/main.rs | //! The `forge` CLI: build, test, fuzz, debug and deploy Solidity contracts, like Hardhat, Brownie,
//! Ape.
use forge::args::run;
#[global_allocator]
static ALLOC: foundry_cli::utils::Allocator = foundry_cli::utils::new_allocator();
fn main() {
if let Err(err) = run() {
let _ = foundry_common::sh_err!("{err:?}");
std::process::exit(1);
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/chisel/src/solidity_helper.rs | crates/chisel/src/solidity_helper.rs | //! This module contains the `SolidityHelper`, a [rustyline::Helper] implementation for
//! usage in Chisel. It was originally ported from [soli](https://github.com/jpopesculian/soli/blob/master/src/main.rs).
use crate::{
dispatcher::PROMPT_ARROW,
prelude::{COMMAND_LEADER, ChiselCommand, PROMPT_ARROW_STR},
};
use rustyline::{
Helper,
completion::Completer,
highlight::{CmdKind, Highlighter},
hint::Hinter,
validate::{ValidationContext, ValidationResult, Validator},
};
use solar::parse::{
Cursor, Lexer,
interface::Session,
lexer::token::{RawLiteralKind, RawTokenKind},
token::Token,
};
use std::{borrow::Cow, cell::RefCell, fmt, ops::Range, rc::Rc};
use yansi::{Color, Style};
/// The maximum length of an ANSI prefix + suffix characters using [SolidityHelper].
///
/// * 5 - prefix:
/// * 2 - start: `\x1B[`
/// * 2 - fg: `3<fg_code>`
/// * 1 - end: `m`
/// * 4 - suffix: `\x1B[0m`
const MAX_ANSI_LEN: usize = 9;
/// A rustyline helper for Solidity code.
#[derive(Clone)]
pub struct SolidityHelper {
inner: Rc<RefCell<Inner>>,
}
struct Inner {
errored: bool,
do_paint: bool,
sess: Session,
}
impl Default for SolidityHelper {
fn default() -> Self {
Self::new()
}
}
impl fmt::Debug for SolidityHelper {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let this = self.inner.borrow();
f.debug_struct("SolidityHelper")
.field("errored", &this.errored)
.field("do_paint", &this.do_paint)
.finish_non_exhaustive()
}
}
impl SolidityHelper {
/// Create a new SolidityHelper.
pub fn new() -> Self {
Self {
inner: Rc::new(RefCell::new(Inner {
errored: false,
do_paint: yansi::is_enabled(),
sess: Session::builder().with_silent_emitter(None).build(),
})),
}
}
/// Returns whether the helper is in an errored state.
pub fn errored(&self) -> bool {
self.inner.borrow().errored
}
/// Set the errored field.
pub fn set_errored(&mut self, errored: bool) -> &mut Self {
self.inner.borrow_mut().errored = errored;
self
}
/// Highlights a Solidity source string.
pub fn highlight<'a>(&self, input: &'a str) -> Cow<'a, str> {
if !self.do_paint() {
return Cow::Borrowed(input);
}
// Highlight commands separately
if let Some(full_cmd) = input.strip_prefix(COMMAND_LEADER) {
let (cmd, rest) = match input.split_once(' ') {
Some((cmd, rest)) => (cmd, Some(rest)),
None => (input, None),
};
let cmd = cmd.strip_prefix(COMMAND_LEADER).unwrap_or(cmd);
let mut out = String::with_capacity(input.len() + MAX_ANSI_LEN);
// cmd
out.push(COMMAND_LEADER);
let cmd_res = ChiselCommand::parse(full_cmd);
let style = (if cmd_res.is_ok() { Color::Green } else { Color::Red }).foreground();
Self::paint_unchecked(cmd, style, &mut out);
// rest
match rest {
Some(rest) if !rest.is_empty() => {
out.push(' ');
out.push_str(rest);
}
_ => {}
}
Cow::Owned(out)
} else {
let mut out = String::with_capacity(input.len() * 2);
self.with_contiguous_styles(input, |style, range| {
Self::paint_unchecked(&input[range], style, &mut out);
});
Cow::Owned(out)
}
}
/// Returns a list of styles and the ranges they should be applied to.
///
/// Covers the entire source string, including any whitespace.
fn with_contiguous_styles(&self, input: &str, mut f: impl FnMut(Style, Range<usize>)) {
self.enter(|sess| {
let len = input.len();
let mut index = 0;
for token in Lexer::new(sess, input) {
let range = token.span.lo().to_usize()..token.span.hi().to_usize();
let style = token_style(&token);
if index < range.start {
f(Style::default(), index..range.start);
}
index = range.end;
f(style, range);
}
if index < len {
f(Style::default(), index..len);
}
});
}
/// Validate that a source snippet is closed (i.e., all braces and parenthesis are matched).
fn validate_closed(&self, input: &str) -> ValidationResult {
use RawLiteralKind::*;
use RawTokenKind::*;
let mut stack = vec![];
for token in Cursor::new(input) {
match token.kind {
OpenDelim(delim) => stack.push(delim),
CloseDelim(delim) => match (stack.pop(), delim) {
(Some(open), close) if open == close => {}
(Some(wanted), _) => {
let wanted = wanted.to_open_str();
return ValidationResult::Invalid(Some(format!(
"Mismatched brackets: `{wanted}` is not properly closed"
)));
}
(None, c) => {
let c = c.to_close_str();
return ValidationResult::Invalid(Some(format!(
"Mismatched brackets: `{c}` is unpaired"
)));
}
},
Literal { kind: Str { terminated, .. } } => {
if !terminated {
return ValidationResult::Incomplete;
}
}
BlockComment { terminated, .. } => {
if !terminated {
return ValidationResult::Incomplete;
}
}
_ => {}
}
}
// There are open brackets that are not properly closed.
if !stack.is_empty() {
return ValidationResult::Incomplete;
}
ValidationResult::Valid(None)
}
/// Formats `input` with `style` into `out`, without checking `style.wrapping` or
/// `self.do_paint`.
fn paint_unchecked(string: &str, style: Style, out: &mut String) {
if style == Style::default() {
out.push_str(string);
} else {
let _ = style.fmt_prefix(out);
out.push_str(string);
let _ = style.fmt_suffix(out);
}
}
fn paint_unchecked_owned(string: &str, style: Style) -> String {
let mut out = String::with_capacity(MAX_ANSI_LEN + string.len());
Self::paint_unchecked(string, style, &mut out);
out
}
/// Returns whether to color the output.
fn do_paint(&self) -> bool {
self.inner.borrow().do_paint
}
/// Enters the session.
fn enter(&self, f: impl FnOnce(&Session)) {
let this = self.inner.borrow();
this.sess.enter_sequential(|| f(&this.sess));
}
}
impl Highlighter for SolidityHelper {
fn highlight<'l>(&self, line: &'l str, _pos: usize) -> Cow<'l, str> {
self.highlight(line)
}
fn highlight_char(&self, line: &str, pos: usize, _kind: CmdKind) -> bool {
pos == line.len()
}
fn highlight_prompt<'b, 's: 'b, 'p: 'b>(
&'s self,
prompt: &'p str,
_default: bool,
) -> Cow<'b, str> {
if !self.do_paint() {
return Cow::Borrowed(prompt);
}
let mut out = prompt.to_string();
// `^(\(ID: .*?\) )? ➜ `
if prompt.starts_with("(ID: ") {
let id_end = prompt.find(')').unwrap();
let id_span = 5..id_end;
let id = &prompt[id_span.clone()];
out.replace_range(
id_span,
&Self::paint_unchecked_owned(id, Color::Yellow.foreground()),
);
out.replace_range(1..=2, &Self::paint_unchecked_owned("ID", Color::Cyan.foreground()));
}
if let Some(i) = out.find(PROMPT_ARROW) {
let style =
if self.errored() { Color::Red.foreground() } else { Color::Green.foreground() };
out.replace_range(i..=i + 2, &Self::paint_unchecked_owned(PROMPT_ARROW_STR, style));
}
Cow::Owned(out)
}
}
impl Validator for SolidityHelper {
fn validate(&self, ctx: &mut ValidationContext<'_>) -> rustyline::Result<ValidationResult> {
Ok(self.validate_closed(ctx.input()))
}
}
impl Completer for SolidityHelper {
type Candidate = String;
}
impl Hinter for SolidityHelper {
type Hint = String;
}
impl Helper for SolidityHelper {}
#[expect(non_upper_case_globals)]
#[deny(unreachable_patterns)]
fn token_style(token: &Token) -> Style {
use solar::parse::{
interface::kw::*,
token::{TokenKind::*, TokenLitKind::*},
};
match token.kind {
Literal(Str | HexStr | UnicodeStr, _) => Color::Green.foreground(),
Literal(..) => Color::Yellow.foreground(),
Ident(
Memory | Storage | Calldata | Public | Private | Internal | External | Constant | Pure
| View | Payable | Anonymous | Indexed | Abstract | Virtual | Override | Modifier
| Immutable | Unchecked,
) => Color::Cyan.foreground(),
Ident(s) if s.is_elementary_type() => Color::Blue.foreground(),
Ident(Mapping) => Color::Blue.foreground(),
Ident(s) if s.is_used_keyword() || s.is_yul_keyword() => Color::Magenta.foreground(),
Arrow | FatArrow => Color::Magenta.foreground(),
Comment(..) => Color::Primary.dim(),
_ => Color::Primary.foreground(),
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn validate() {
let helper = SolidityHelper::new();
let dbg_r = |r: ValidationResult| match r {
ValidationResult::Incomplete => "Incomplete".to_string(),
ValidationResult::Invalid(inner) => format!("Invalid({inner:?})"),
ValidationResult::Valid(inner) => format!("Valid({inner:?})"),
_ => "Unknown result".to_string(),
};
let valid = |input: &str| {
let r = helper.validate_closed(input);
assert!(matches!(r, ValidationResult::Valid(None)), "{input:?}: {}", dbg_r(r))
};
let incomplete = |input: &str| {
let r = helper.validate_closed(input);
assert!(matches!(r, ValidationResult::Incomplete), "{input:?}: {}", dbg_r(r))
};
let invalid = |input: &str| {
let r = helper.validate_closed(input);
assert!(matches!(r, ValidationResult::Invalid(Some(_))), "{input:?}: {}", dbg_r(r))
};
valid("1");
valid("1 + 2");
valid("()");
valid("{}");
valid("[]");
incomplete("(");
incomplete("((");
incomplete("[");
incomplete("{");
incomplete("({");
valid("({})");
invalid(")");
invalid("]");
invalid("}");
invalid("(}");
invalid("(})");
invalid("[}");
invalid("[}]");
incomplete("\"");
incomplete("\'");
valid("\"\"");
valid("\'\'");
incomplete("/*");
incomplete("/*/*");
valid("/* */");
valid("/* /* */");
valid("/* /* */ */");
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/chisel/src/source.rs | crates/chisel/src/source.rs | //! Session Source
//!
//! This module contains the `SessionSource` struct, which is a minimal wrapper around
//! the REPL contract's source code. It provides simple compilation, parsing, and
//! execution helpers.
use eyre::Result;
use forge_doc::solang_ext::{CodeLocationExt, SafeUnwrap};
use foundry_common::fs;
use foundry_compilers::{
Artifact, ProjectCompileOutput,
artifacts::{ConfigurableContractArtifact, Source, Sources},
project::ProjectCompiler,
solc::Solc,
};
use foundry_config::{Config, SolcReq};
use foundry_evm::{backend::Backend, core::bytecode::InstIter, opts::EvmOpts};
use semver::Version;
use serde::{Deserialize, Serialize};
use solang_parser::pt;
use solar::interface::diagnostics::EmittedDiagnostics;
use std::{cell::OnceCell, collections::HashMap, fmt, path::PathBuf};
use walkdir::WalkDir;
/// The minimum Solidity version of the `Vm` interface.
pub const MIN_VM_VERSION: Version = Version::new(0, 6, 2);
/// Solidity source for the `Vm` interface in [forge-std](https://github.com/foundry-rs/forge-std)
static VM_SOURCE: &str = include_str!("../../../testdata/utils/Vm.sol");
/// [`SessionSource`] build output.
pub struct GeneratedOutput {
output: ProjectCompileOutput,
pub(crate) intermediate: IntermediateOutput,
}
pub struct GeneratedOutputRef<'a> {
output: &'a ProjectCompileOutput,
// compiler: &'b solar::sema::CompilerRef<'c>,
pub(crate) intermediate: &'a IntermediateOutput,
}
/// Intermediate output for the compiled [SessionSource]
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct IntermediateOutput {
/// All expressions within the REPL contract's run function and top level scope.
pub repl_contract_expressions: HashMap<String, pt::Expression>,
/// Intermediate contracts
pub intermediate_contracts: IntermediateContracts,
}
/// A refined intermediate parse tree for a contract that enables easy lookups
/// of definitions.
#[derive(Clone, Debug, Default, PartialEq, Eq)]
pub struct IntermediateContract {
/// All function definitions within the contract
pub function_definitions: HashMap<String, Box<pt::FunctionDefinition>>,
/// All event definitions within the contract
pub event_definitions: HashMap<String, Box<pt::EventDefinition>>,
/// All struct definitions within the contract
pub struct_definitions: HashMap<String, Box<pt::StructDefinition>>,
/// All variable definitions within the top level scope of the contract
pub variable_definitions: HashMap<String, Box<pt::VariableDefinition>>,
}
/// A defined type for a map of contract names to [IntermediateContract]s
type IntermediateContracts = HashMap<String, IntermediateContract>;
impl fmt::Debug for GeneratedOutput {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("GeneratedOutput").finish_non_exhaustive()
}
}
impl GeneratedOutput {
pub fn enter<T: Send>(&self, f: impl FnOnce(GeneratedOutputRef<'_>) -> T + Send) -> T {
// TODO(dani): once intermediate is removed
// self.output
// .parser()
// .solc()
// .compiler()
// .enter(|compiler| f(GeneratedOutputRef { output: &self.output, compiler }))
f(GeneratedOutputRef { output: &self.output, intermediate: &self.intermediate })
}
}
impl GeneratedOutputRef<'_> {
pub fn repl_contract(&self) -> Option<&ConfigurableContractArtifact> {
self.output.find_first("REPL")
}
}
impl std::ops::Deref for GeneratedOutput {
type Target = IntermediateOutput;
fn deref(&self) -> &Self::Target {
&self.intermediate
}
}
impl std::ops::Deref for GeneratedOutputRef<'_> {
type Target = IntermediateOutput;
fn deref(&self) -> &Self::Target {
self.intermediate
}
}
impl IntermediateOutput {
pub fn get_event(&self, input: &str) -> Option<&pt::EventDefinition> {
self.intermediate_contracts
.get("REPL")
.and_then(|contract| contract.event_definitions.get(input).map(std::ops::Deref::deref))
}
pub fn final_pc(&self, contract: &ConfigurableContractArtifact) -> Result<Option<usize>> {
let deployed_bytecode = contract
.get_deployed_bytecode()
.ok_or_else(|| eyre::eyre!("No deployed bytecode found for `REPL` contract"))?;
let deployed_bytecode_bytes = deployed_bytecode
.bytes()
.ok_or_else(|| eyre::eyre!("No deployed bytecode found for `REPL` contract"))?;
let run_func_statements = self.run_func_body()?;
// Record loc of first yul block return statement (if any).
// This is used to decide which is the final statement within the `run()` method.
// see <https://github.com/foundry-rs/foundry/issues/4617>.
let last_yul_return = run_func_statements.iter().find_map(|statement| {
if let pt::Statement::Assembly { loc: _, dialect: _, flags: _, block } = statement
&& let Some(statement) = block.statements.last()
&& let pt::YulStatement::FunctionCall(yul_call) = statement
&& yul_call.id.name == "return"
{
return Some(statement.loc());
}
None
});
// Find the last statement within the "run()" method and get the program
// counter via the source map.
let Some(final_statement) = run_func_statements.last() else { return Ok(None) };
// If the final statement is some type of block (assembly, unchecked, or regular),
// we need to find the final statement within that block. Otherwise, default to
// the source loc of the final statement of the `run()` function's block.
//
// There is some code duplication within the arms due to the difference between
// the [pt::Statement] type and the [pt::YulStatement] types.
let mut source_loc = match final_statement {
pt::Statement::Assembly { loc: _, dialect: _, flags: _, block } => {
// Select last non variable declaration statement, see <https://github.com/foundry-rs/foundry/issues/4938>.
let last_statement = block.statements.iter().rev().find(|statement| {
!matches!(statement, pt::YulStatement::VariableDeclaration(_, _, _))
});
if let Some(statement) = last_statement {
statement.loc()
} else {
// In the case where the block is empty, attempt to grab the statement
// before the asm block. Because we use saturating sub to get the second
// to last index, this can always be safely unwrapped.
run_func_statements
.get(run_func_statements.len().saturating_sub(2))
.unwrap()
.loc()
}
}
pt::Statement::Block { loc: _, unchecked: _, statements } => {
if let Some(statement) = statements.last() {
statement.loc()
} else {
// In the case where the block is empty, attempt to grab the statement
// before the block. Because we use saturating sub to get the second to
// last index, this can always be safely unwrapped.
run_func_statements
.get(run_func_statements.len().saturating_sub(2))
.unwrap()
.loc()
}
}
_ => final_statement.loc(),
};
// Consider yul return statement as final statement (if it's loc is lower) .
if let Some(yul_return) = last_yul_return
&& yul_return.end() < source_loc.start()
{
source_loc = yul_return;
}
// Map the source location of the final statement of the `run()` function to its
// corresponding runtime program counter
let final_pc = {
let offset = source_loc.start() as u32;
let length = (source_loc.end() - source_loc.start()) as u32;
trace!(%offset, %length, "find pc");
contract
.get_source_map_deployed()
.unwrap()
.unwrap()
.into_iter()
.zip(InstIter::new(deployed_bytecode_bytes).with_pc().map(|(pc, _)| pc))
.filter(|(s, _)| s.offset() == offset && s.length() == length)
.map(|(_, pc)| pc)
.max()
};
trace!(?final_pc);
Ok(final_pc)
}
pub fn run_func_body(&self) -> Result<&Vec<pt::Statement>> {
match self
.intermediate_contracts
.get("REPL")
.ok_or_else(|| eyre::eyre!("Could not find REPL intermediate contract!"))?
.function_definitions
.get("run")
.ok_or_else(|| eyre::eyre!("Could not find run function definition in REPL contract!"))?
.body
.as_ref()
.ok_or_else(|| eyre::eyre!("Could not find run function body!"))?
{
pt::Statement::Block { statements, .. } => Ok(statements),
_ => eyre::bail!("Could not find statements within run function body!"),
}
}
}
// TODO(dani): further migration blocked on upstream work
#[cfg(false)]
impl<'gcx> GeneratedOutputRef<'_, '_, 'gcx> {
pub fn gcx(&self) -> Gcx<'gcx> {
self.compiler.gcx()
}
pub fn repl_contract(&self) -> Option<&ConfigurableContractArtifact> {
self.output.find_first("REPL")
}
pub fn get_event(&self, input: &str) -> Option<hir::EventId> {
self.gcx().hir.events_enumerated().find(|(_, e)| e.name.as_str() == input).map(|(id, _)| id)
}
pub fn final_pc(&self, contract: &ConfigurableContractArtifact) -> Result<Option<usize>> {
let deployed_bytecode = contract
.get_deployed_bytecode()
.ok_or_else(|| eyre::eyre!("No deployed bytecode found for `REPL` contract"))?;
let deployed_bytecode_bytes = deployed_bytecode
.bytes()
.ok_or_else(|| eyre::eyre!("No deployed bytecode found for `REPL` contract"))?;
// Fetch the run function's body statement
let run_body = self.run_func_body();
// Record loc of first yul block return statement (if any).
// This is used to decide which is the final statement within the `run()` method.
// see <https://github.com/foundry-rs/foundry/issues/4617>.
let last_yul_return_span: Option<Span> = run_body.iter().find_map(|stmt| {
// TODO(dani): Yul is not yet lowered to HIR.
let _ = stmt;
/*
if let hir::StmtKind::Assembly { block, .. } = stmt {
if let Some(stmt) = block.last() {
if let pt::YulStatement::FunctionCall(yul_call) = stmt {
if yul_call.id.name == "return" {
return Some(stmt.loc())
}
}
}
}
*/
None
});
// Find the last statement within the "run()" method and get the program
// counter via the source map.
let Some(last_stmt) = run_body.last() else { return Ok(None) };
// If the final statement is some type of block (assembly, unchecked, or regular),
// we need to find the final statement within that block. Otherwise, default to
// the source loc of the final statement of the `run()` function's block.
//
// There is some code duplication within the arms due to the difference between
// the [pt::Statement] type and the [pt::YulStatement] types.
let source_stmt = match &last_stmt.kind {
// TODO(dani): Yul is not yet lowered to HIR.
/*
pt::Statement::Assembly { loc: _, dialect: _, flags: _, block } => {
// Select last non variable declaration statement, see <https://github.com/foundry-rs/foundry/issues/4938>.
let last_statement = block.statements.iter().rev().find(|statement| {
!matches!(statement, pt::YulStatement::VariableDeclaration(_, _, _))
});
if let Some(stmt) = last_statement {
stmt
} else {
// In the case where the block is empty, attempt to grab the statement
// before the block. Because we use saturating sub to get the second to
// last index, this can always be safely unwrapped.
&run_body[run_body.len().saturating_sub(2)]
}
}
*/
hir::StmtKind::UncheckedBlock(stmts) | hir::StmtKind::Block(stmts) => {
if let Some(stmt) = stmts.last() {
stmt
} else {
// In the case where the block is empty, attempt to grab the statement
// before the block. Because we use saturating sub to get the second to
// last index, this can always be safely unwrapped.
&run_body[run_body.len().saturating_sub(2)]
}
}
_ => last_stmt,
};
let mut source_span = self.stmt_span_without_semicolon(source_stmt);
// Consider yul return statement as final statement (if it's loc is lower) .
if let Some(yul_return_span) = last_yul_return_span
&& yul_return_span.hi() < source_span.lo()
{
source_span = yul_return_span;
}
// Map the source location of the final statement of the `run()` function to its
// corresponding runtime program counter
let (_sf, range) = self.compiler.sess().source_map().span_to_source(source_span).unwrap();
dbg!(source_span, &range, &_sf.src[range.clone()]);
let offset = range.start as u32;
let length = range.len() as u32;
let final_pc = deployed_bytecode
.source_map()
.ok_or_else(|| eyre::eyre!("No source map found for `REPL` contract"))??
.into_iter()
.zip(InstructionIter::new(deployed_bytecode_bytes))
.filter(|(s, _)| s.offset() == offset && s.length() == length)
.map(|(_, i)| i.pc)
.max()
.unwrap_or_default();
Ok(Some(final_pc))
}
/// Statements' ranges in the solc source map do not include the semicolon.
fn stmt_span_without_semicolon(&self, stmt: &hir::Stmt<'_>) -> Span {
match stmt.kind {
hir::StmtKind::DeclSingle(id) => {
let decl = self.gcx().hir.variable(id);
if let Some(expr) = decl.initializer {
stmt.span.with_hi(expr.span.hi())
} else {
stmt.span
}
}
hir::StmtKind::DeclMulti(_, expr) => stmt.span.with_hi(expr.span.hi()),
hir::StmtKind::Expr(expr) => expr.span,
_ => stmt.span,
}
}
fn run_func_body(&self) -> hir::Block<'_> {
let c = self.repl_contract_hir().expect("REPL contract not found in HIR");
let f = c
.functions()
.find(|&f| self.gcx().hir.function(f).name.as_ref().map(|n| n.as_str()) == Some("run"))
.expect("`run()` function not found in REPL contract");
self.gcx().hir.function(f).body.expect("`run()` function does not have a body")
}
fn repl_contract_hir(&self) -> Option<&hir::Contract<'_>> {
self.gcx().hir.contracts().find(|c| c.name.as_str() == "REPL")
}
}
/// Configuration for the [SessionSource]
#[derive(Clone, Debug, Default, Serialize, Deserialize)]
pub struct SessionSourceConfig {
/// Foundry configuration
pub foundry_config: Config,
/// EVM Options
pub evm_opts: EvmOpts,
/// Disable the default `Vm` import.
pub no_vm: bool,
/// In-memory REVM db for the session's runner.
#[serde(skip)]
pub backend: Option<Backend>,
/// Optionally enable traces for the REPL contract execution
pub traces: bool,
/// Optionally set calldata for the REPL contract execution
pub calldata: Option<Vec<u8>>,
/// Enable viaIR with minimum optimization
///
/// This can fix most of the "stack too deep" errors while resulting a
/// relatively accurate source map.
pub ir_minimum: bool,
}
impl SessionSourceConfig {
/// Detect the solc version to know if VM can be injected.
pub fn detect_solc(&mut self) -> Result<()> {
if self.foundry_config.solc.is_none() {
let version = Solc::ensure_installed(&"*".parse().unwrap())?;
self.foundry_config.solc = Some(SolcReq::Version(version));
}
if !self.no_vm
&& let Some(version) = self.foundry_config.solc_version()
&& version < MIN_VM_VERSION
{
info!(%version, minimum=%MIN_VM_VERSION, "Disabling VM injection");
self.no_vm = true;
}
Ok(())
}
}
/// REPL Session Source wrapper
///
/// Heavily based on soli's [`ConstructedSource`](https://github.com/jpopesculian/soli/blob/master/src/main.rs#L166)
#[derive(Debug, Serialize, Deserialize)]
pub struct SessionSource {
/// The file name
pub file_name: String,
/// The contract name
pub contract_name: String,
/// Session Source configuration
pub config: SessionSourceConfig,
/// Global level Solidity code.
///
/// Above and outside all contract declarations, in the global context.
pub global_code: String,
/// Top level Solidity code.
///
/// Within the contract declaration, but outside of the `run()` function.
pub contract_code: String,
/// The code to be executed in the `run()` function.
pub run_code: String,
/// Cached VM source code.
#[serde(skip, default = "vm_source")]
vm_source: Source,
/// The generated output
#[serde(skip)]
output: OnceCell<GeneratedOutput>,
}
fn vm_source() -> Source {
Source::new(VM_SOURCE)
}
impl Clone for SessionSource {
fn clone(&self) -> Self {
Self {
file_name: self.file_name.clone(),
contract_name: self.contract_name.clone(),
global_code: self.global_code.clone(),
contract_code: self.contract_code.clone(),
run_code: self.run_code.clone(),
config: self.config.clone(),
vm_source: self.vm_source.clone(),
output: Default::default(),
}
}
}
impl SessionSource {
/// Creates a new source given a solidity compiler version
///
/// # Panics
///
/// If no Solc binary is set, cannot be found or the `--version` command fails
///
/// ### Takes
///
/// - An instance of [Solc]
/// - An instance of [SessionSourceConfig]
///
/// ### Returns
///
/// A new instance of [SessionSource]
pub fn new(mut config: SessionSourceConfig) -> Result<Self> {
config.detect_solc()?;
Ok(Self {
file_name: "ReplContract.sol".to_string(),
contract_name: "REPL".to_string(),
config,
global_code: Default::default(),
contract_code: Default::default(),
run_code: Default::default(),
vm_source: vm_source(),
output: Default::default(),
})
}
/// Clones the [SessionSource] and appends a new line of code.
///
/// Returns `true` if the new line was added to `run()`.
pub fn clone_with_new_line(&self, mut content: String) -> Result<(Self, bool)> {
if let Some((new_source, fragment)) = self
.parse_fragment(&content)
.or_else(|| {
content.push(';');
self.parse_fragment(&content)
})
.or_else(|| {
content = content.trim_end().trim_end_matches(';').to_string();
self.parse_fragment(&content)
})
{
Ok((new_source, matches!(fragment, ParseTreeFragment::Function)))
} else {
eyre::bail!("\"{}\"", content.trim());
}
}
/// Parses a fragment of Solidity code in memory and assigns it a scope within the
/// [`SessionSource`].
fn parse_fragment(&self, buffer: &str) -> Option<(Self, ParseTreeFragment)> {
#[track_caller]
fn debug_errors(errors: &EmittedDiagnostics) {
debug!("{errors}");
}
let mut this = self.clone();
match this.add_run_code(buffer).parse() {
Ok(()) => return Some((this, ParseTreeFragment::Function)),
Err(e) => debug_errors(&e),
}
this = self.clone();
match this.add_contract_code(buffer).parse() {
Ok(()) => return Some((this, ParseTreeFragment::Contract)),
Err(e) => debug_errors(&e),
}
this = self.clone();
match this.add_global_code(buffer).parse() {
Ok(()) => return Some((this, ParseTreeFragment::Source)),
Err(e) => debug_errors(&e),
}
None
}
/// Append global-level code to the source.
pub fn add_global_code(&mut self, content: &str) -> &mut Self {
self.global_code.push_str(content.trim());
self.global_code.push('\n');
self.clear_output();
self
}
/// Append contract-level code to the source.
pub fn add_contract_code(&mut self, content: &str) -> &mut Self {
self.contract_code.push_str(content.trim());
self.contract_code.push('\n');
self.clear_output();
self
}
/// Append code to the `run()` function of the REPL contract.
pub fn add_run_code(&mut self, content: &str) -> &mut Self {
self.run_code.push_str(content.trim());
self.run_code.push('\n');
self.clear_output();
self
}
/// Clears all source code.
pub fn clear(&mut self) {
String::clear(&mut self.global_code);
String::clear(&mut self.contract_code);
String::clear(&mut self.run_code);
self.clear_output();
}
/// Clear the global-level code .
pub fn clear_global(&mut self) -> &mut Self {
String::clear(&mut self.global_code);
self.clear_output();
self
}
/// Clear the contract-level code .
pub fn clear_contract(&mut self) -> &mut Self {
String::clear(&mut self.contract_code);
self.clear_output();
self
}
/// Clear the `run()` function code.
pub fn clear_run(&mut self) -> &mut Self {
String::clear(&mut self.run_code);
self.clear_output();
self
}
fn clear_output(&mut self) {
self.output.take();
}
/// Compiles the source if necessary.
pub fn build(&self) -> Result<&GeneratedOutput> {
// TODO: mimics `get_or_try_init`
if let Some(output) = self.output.get() {
return Ok(output);
}
let output = self.compile()?;
let intermediate = self.generate_intermediate_output()?;
let output = GeneratedOutput { output, intermediate };
Ok(self.output.get_or_init(|| output))
}
/// Compiles the source.
#[cold]
fn compile(&self) -> Result<ProjectCompileOutput> {
let sources = self.get_sources();
let mut project = self.config.foundry_config.ephemeral_project()?;
self.config.foundry_config.disable_optimizations(&mut project, self.config.ir_minimum);
let mut output = ProjectCompiler::with_sources(&project, sources)?.compile()?;
if output.has_compiler_errors() {
eyre::bail!("{output}");
}
// TODO(dani): re-enable
if cfg!(false) {
output.parser_mut().solc_mut().compiler_mut().enter_mut(|c| {
let _ = c.lower_asts();
});
}
Ok(output)
}
fn get_sources(&self) -> Sources {
let mut sources = Sources::new();
let src = self.to_repl_source();
sources.insert(self.file_name.clone().into(), Source::new(src));
// Include Vm.sol if forge-std remapping is not available.
if !self.config.no_vm
&& !self
.config
.foundry_config
.get_all_remappings()
.any(|r| r.name.starts_with("forge-std"))
{
sources.insert("forge-std/Vm.sol".into(), self.vm_source.clone());
}
sources
}
/// Generate intermediate contracts for all contract definitions in the compilation source.
///
/// ### Returns
///
/// Optionally, a map of contract names to a vec of [IntermediateContract]s.
pub fn generate_intermediate_contracts(&self) -> Result<HashMap<String, IntermediateContract>> {
let mut res_map = HashMap::default();
let parsed_map = self.get_sources();
for source in parsed_map.values() {
Self::get_intermediate_contract(&source.content, &mut res_map);
}
Ok(res_map)
}
/// Generate intermediate output for the REPL contract
pub fn generate_intermediate_output(&self) -> Result<IntermediateOutput> {
// Parse generate intermediate contracts
let intermediate_contracts = self.generate_intermediate_contracts()?;
// Construct variable definitions
let variable_definitions = intermediate_contracts
.get("REPL")
.ok_or_else(|| eyre::eyre!("Could not find intermediate REPL contract!"))?
.variable_definitions
.clone()
.into_iter()
.map(|(k, v)| (k, v.ty))
.collect::<HashMap<String, pt::Expression>>();
// Construct intermediate output
let mut intermediate_output = IntermediateOutput {
repl_contract_expressions: variable_definitions,
intermediate_contracts,
};
// Add all statements within the run function to the repl_contract_expressions map
for (key, val) in intermediate_output
.run_func_body()?
.clone()
.iter()
.flat_map(Self::get_statement_definitions)
{
intermediate_output.repl_contract_expressions.insert(key, val);
}
Ok(intermediate_output)
}
/// Construct the source as a valid Forge script.
pub fn to_script_source(&self) -> String {
let Self {
contract_name,
global_code,
contract_code: top_level_code,
run_code,
config,
..
} = self;
let script_import =
if !config.no_vm { "import {Script} from \"forge-std/Script.sol\";\n" } else { "" };
format!(
r#"
// SPDX-License-Identifier: UNLICENSED
pragma solidity 0;
{script_import}
{global_code}
contract {contract_name} is Script {{
{top_level_code}
/// @notice Script entry point
function run() public {{
{run_code}
}}
}}"#,
)
}
/// Construct the REPL source.
pub fn to_repl_source(&self) -> String {
let Self {
contract_name,
global_code,
contract_code: top_level_code,
run_code,
config,
..
} = self;
let (mut vm_import, mut vm_constant) = (String::new(), String::new());
// Check if there's any `forge-std` remapping and determine proper path to it by
// searching remapping path.
if !config.no_vm
&& let Some(remapping) = config
.foundry_config
.remappings
.iter()
.find(|remapping| remapping.name == "forge-std/")
&& let Some(vm_path) = WalkDir::new(&remapping.path.path)
.into_iter()
.filter_map(|e| e.ok())
.find(|e| e.file_name() == "Vm.sol")
{
vm_import = format!("import {{Vm}} from \"{}\";\n", vm_path.path().display());
vm_constant = "Vm internal constant vm = Vm(address(uint160(uint256(keccak256(\"hevm cheat code\")))));\n".to_string();
}
format!(
r#"
// SPDX-License-Identifier: UNLICENSED
pragma solidity 0;
{vm_import}
{global_code}
contract {contract_name} {{
{vm_constant}
{top_level_code}
/// @notice REPL contract entry point
function run() public {{
{run_code}
}}
}}"#,
)
}
/// Parse the current source in memory using Solar.
pub(crate) fn parse(&self) -> Result<(), EmittedDiagnostics> {
let sess =
solar::interface::Session::builder().with_buffer_emitter(Default::default()).build();
let _ = sess.enter_sequential(|| -> solar::interface::Result<()> {
let arena = solar::ast::Arena::new();
let filename = self.file_name.clone().into();
let src = self.to_repl_source();
let mut parser = solar::parse::Parser::from_source_code(&sess, &arena, filename, src)?;
let _ast = parser.parse_file().map_err(|e| e.emit())?;
Ok(())
});
sess.dcx.emitted_errors().unwrap()
}
/// Gets the [IntermediateContract] for a Solidity source string and inserts it into the
/// passed `res_map`. In addition, recurses on any imported files as well.
///
/// ### Takes
/// - `content` - A Solidity source string
/// - `res_map` - A mutable reference to a map of contract names to [IntermediateContract]s
pub fn get_intermediate_contract(
content: &str,
res_map: &mut HashMap<String, IntermediateContract>,
) {
if let Ok((pt::SourceUnit(source_unit_parts), _)) = solang_parser::parse(content, 0) {
let func_defs = source_unit_parts
.into_iter()
.filter_map(|sup| match sup {
pt::SourceUnitPart::ImportDirective(i) => match i {
pt::Import::Plain(s, _)
| pt::Import::Rename(s, _, _)
| pt::Import::GlobalSymbol(s, _, _) => {
let s = match s {
pt::ImportPath::Filename(s) => s.string,
pt::ImportPath::Path(p) => p.to_string(),
};
let path = PathBuf::from(s);
match fs::read_to_string(path) {
Ok(source) => {
Self::get_intermediate_contract(&source, res_map);
None
}
Err(_) => None,
}
}
},
pt::SourceUnitPart::ContractDefinition(cd) => {
let mut intermediate = IntermediateContract::default();
cd.parts.into_iter().for_each(|part| match part {
pt::ContractPart::FunctionDefinition(def) => {
// Only match normal function definitions here.
if matches!(def.ty, pt::FunctionTy::Function) {
intermediate
.function_definitions
.insert(def.name.clone().unwrap().name, def);
}
}
pt::ContractPart::EventDefinition(def) => {
let event_name = def.name.safe_unwrap().name.clone();
intermediate.event_definitions.insert(event_name, def);
}
pt::ContractPart::StructDefinition(def) => {
let struct_name = def.name.safe_unwrap().name.clone();
intermediate.struct_definitions.insert(struct_name, def);
}
pt::ContractPart::VariableDefinition(def) => {
let var_name = def.name.safe_unwrap().name.clone();
intermediate.variable_definitions.insert(var_name, def);
}
_ => {}
});
Some((cd.name.safe_unwrap().name.clone(), intermediate))
}
_ => None,
})
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | true |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/chisel/src/lib.rs | crates/chisel/src/lib.rs | //! Chisel is a fast, utilitarian, and verbose Solidity REPL.
#![cfg_attr(not(test), warn(unused_crate_dependencies))]
#![cfg_attr(docsrs, feature(doc_cfg))]
#[macro_use]
extern crate foundry_common;
#[macro_use]
extern crate tracing;
pub mod args;
pub mod cmd;
pub mod dispatcher;
pub mod executor;
pub mod opts;
pub mod runner;
pub mod session;
pub mod source;
mod solidity_helper;
pub use solidity_helper::SolidityHelper;
pub mod prelude {
pub use crate::{cmd::*, dispatcher::*, runner::*, session::*, solidity_helper::*, source::*};
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/chisel/src/runner.rs | crates/chisel/src/runner.rs | //! ChiselRunner
//!
//! This module contains the `ChiselRunner` struct, which assists with deploying
//! and calling the REPL contract on a in-memory REVM instance.
use alloy_primitives::{Address, Bytes, Log, U256, map::AddressHashMap};
use eyre::Result;
use foundry_evm::{
executors::{DeployResult, Executor, RawCallResult},
traces::{TraceKind, Traces},
};
/// The function selector of the REPL contract's entrypoint, the `run()` function.
static RUN_SELECTOR: [u8; 4] = [0xc0, 0x40, 0x62, 0x26];
/// The Chisel Runner
///
/// Based off of foundry's forge cli runner for scripting.
/// See: [runner](cli::cmd::forge::script::runner.rs)
#[derive(Debug)]
pub struct ChiselRunner {
/// The Executor
pub executor: Executor,
/// An initial balance
pub initial_balance: U256,
/// The sender
pub sender: Address,
/// Input calldata appended to `RUN_SELECTOR`
pub input: Option<Vec<u8>>,
}
/// Represents the result of a Chisel REPL run
#[derive(Debug, Default)]
pub struct ChiselResult {
/// Was the run a success?
pub success: bool,
/// Transaction logs
pub logs: Vec<Log>,
/// Call traces
pub traces: Traces,
/// Amount of gas used in the transaction
pub gas_used: u64,
/// Map of addresses to their labels
pub labeled_addresses: AddressHashMap<String>,
/// Return data
pub returned: Bytes,
/// Called address
pub address: Address,
/// EVM State at the final instruction of the `run()` function
pub state: Option<(Vec<U256>, Vec<u8>)>,
}
/// ChiselRunner implementation
impl ChiselRunner {
/// Create a new [ChiselRunner]
///
/// ### Takes
///
/// An [Executor], the initial balance of the sender, and the sender's [Address].
///
/// ### Returns
///
/// A new [ChiselRunner]
pub fn new(
executor: Executor,
initial_balance: U256,
sender: Address,
input: Option<Vec<u8>>,
) -> Self {
Self { executor, initial_balance, sender, input }
}
/// Run a contract as a REPL session
pub fn run(&mut self, bytecode: Bytes) -> Result<ChiselResult> {
// Set the sender's balance to [U256::MAX] for deployment of the REPL contract.
self.executor.set_balance(self.sender, U256::MAX)?;
// Deploy an instance of the REPL contract
// We don't care about deployment traces / logs here
let DeployResult { address, .. } = self
.executor
.deploy(self.sender, bytecode, U256::ZERO, None)
.map_err(|err| eyre::eyre!("Failed to deploy REPL contract:\n{}", err))?;
// Reset the sender's balance to the initial balance for calls.
self.executor.set_balance(self.sender, self.initial_balance)?;
// Append the input to the `RUN_SELECTOR` to form the calldata
let mut calldata = RUN_SELECTOR.to_vec();
if let Some(mut input) = self.input.clone() {
calldata.append(&mut input);
}
let res = self.executor.transact_raw(self.sender, address, calldata.into(), U256::ZERO)?;
let RawCallResult {
result, reverted, logs, traces, labels, chisel_state, gas_used, ..
} = res;
Ok(ChiselResult {
returned: result,
success: !reverted,
gas_used,
logs,
traces: traces.map(|traces| vec![(TraceKind::Execution, traces)]).unwrap_or_default(),
labeled_addresses: labels,
address,
state: chisel_state,
})
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/chisel/src/session.rs | crates/chisel/src/session.rs | //! ChiselSession
//!
//! This module contains the `ChiselSession` struct, which is the top-level
//! wrapper for a serializable REPL session.
use crate::prelude::{SessionSource, SessionSourceConfig};
use eyre::Result;
use serde::{Deserialize, Serialize};
use std::path::Path;
use time::{OffsetDateTime, format_description};
/// A Chisel REPL Session
#[derive(Debug, Serialize, Deserialize)]
pub struct ChiselSession {
/// The `SessionSource` object that houses the REPL session.
pub source: SessionSource,
/// The current session's identifier
pub id: Option<String>,
}
// ChiselSession Common Associated Functions
impl ChiselSession {
/// Create a new `ChiselSession` with a specified `solc` version and configuration.
///
/// ### Takes
///
/// An instance of [SessionSourceConfig]
///
/// ### Returns
///
/// A new instance of [ChiselSession]
pub fn new(config: SessionSourceConfig) -> Result<Self> {
// Return initialized ChiselSession with set solc version
Ok(Self { source: SessionSource::new(config)?, id: None })
}
/// Render the full source code for the current session.
///
/// ### Returns
///
/// Returns the full, flattened source code for the current session.
///
/// ### Notes
///
/// This function will not panic, but will return a blank string if the
/// session's [SessionSource] is None.
pub fn contract_source(&self) -> String {
self.source.to_repl_source()
}
/// Clears the cache directory
///
/// ### WARNING
///
/// This will delete all sessions from the cache.
/// There is no method of recovering these deleted sessions.
pub fn clear_cache() -> Result<()> {
let cache_dir = Self::cache_dir()?;
for entry in std::fs::read_dir(cache_dir)? {
let entry = entry?;
let path = entry.path();
if path.is_dir() {
std::fs::remove_dir_all(path)?;
} else {
std::fs::remove_file(path)?;
}
}
Ok(())
}
/// Writes the ChiselSession to a file by serializing it to a JSON string
///
/// ### Returns
///
/// Returns the path of the new cache file
pub fn write(&mut self) -> Result<String> {
// Try to create the cache directory
let cache_dir = Self::cache_dir()?;
std::fs::create_dir_all(&cache_dir)?;
let cache_file_name = match self.id.as_ref() {
Some(id) => {
// ID is already set- use the existing cache file.
format!("{cache_dir}chisel-{id}.json")
}
None => {
// Get the next session cache ID / file
let (id, file_name) = Self::next_cached_session()?;
// Set the session's ID
self.id = Some(id);
// Return the new session's cache file name
file_name
}
};
// Write the current ChiselSession to that file
let serialized_contents = serde_json::to_string_pretty(self)?;
std::fs::write(&cache_file_name, serialized_contents)?;
// Return the full cache file path
// Ex: /home/user/.foundry/cache/chisel/chisel-0.json
Ok(cache_file_name)
}
/// Get the next default session cache file name
///
/// ### Returns
///
/// Optionally, returns a tuple containing the next cached session's id and file name.
pub fn next_cached_session() -> Result<(String, String)> {
let cache_dir = Self::cache_dir()?;
let mut entries = std::fs::read_dir(&cache_dir)?;
// If there are no existing cached sessions, just create the first one: "chisel-0.json"
let mut latest = if let Some(e) = entries.next() {
e?
} else {
return Ok((String::from("0"), format!("{cache_dir}chisel-0.json")));
};
let mut session_num = 1;
// Get the latest cached session
for entry in entries {
let entry = entry?;
if entry.metadata()?.modified()? >= latest.metadata()?.modified()? {
latest = entry;
}
// Increase session_num counter rather than cloning the iterator and using `.count`
session_num += 1;
}
Ok((format!("{session_num}"), format!("{cache_dir}chisel-{session_num}.json")))
}
/// The Chisel Cache Directory
///
/// ### Returns
///
/// Optionally, the directory of the chisel cache.
pub fn cache_dir() -> Result<String> {
let home_dir =
dirs::home_dir().ok_or_else(|| eyre::eyre!("Failed to grab home directory"))?;
let home_dir_str = home_dir
.to_str()
.ok_or_else(|| eyre::eyre!("Failed to convert home directory to string"))?;
Ok(format!("{home_dir_str}/.foundry/cache/chisel/"))
}
/// Create the cache directory if it does not exist
///
/// ### Returns
///
/// The unit type if the operation was successful.
pub fn create_cache_dir() -> Result<()> {
let cache_dir = Self::cache_dir()?;
if !Path::new(&cache_dir).exists() {
std::fs::create_dir_all(&cache_dir)?;
}
Ok(())
}
/// Returns a list of all available cached sessions.
pub fn get_sessions() -> Result<Vec<(String, String)>> {
// Read the cache directory entries
let cache_dir = Self::cache_dir()?;
let entries = std::fs::read_dir(cache_dir)?;
// For each entry, get the file name and modified time
let mut sessions = Vec::new();
for entry in entries {
let entry = entry?;
let modified_time = entry.metadata()?.modified()?;
let file_name = entry.file_name();
let file_name = file_name
.into_string()
.map_err(|e| eyre::eyre!(format!("{}", e.to_string_lossy())))?;
sessions.push((
systemtime_strftime(modified_time, "[year]-[month]-[day] [hour]:[minute]:[second]")
.unwrap(),
file_name,
));
}
Ok(sessions)
}
/// Loads a specific ChiselSession from the specified cache file
///
/// ### Takes
///
/// The ID of the chisel session that you wish to load.
///
/// ### Returns
///
/// Optionally, an owned instance of the loaded chisel session.
pub fn load(id: &str) -> Result<Self> {
let cache_dir = Self::cache_dir()?;
let contents = std::fs::read_to_string(Path::new(&format!("{cache_dir}chisel-{id}.json")))?;
let chisel_env: Self = serde_json::from_str(&contents)?;
Ok(chisel_env)
}
/// Gets the most recent chisel session from the cache dir
///
/// ### Returns
///
/// Optionally, the file name of the most recently modified cached session.
pub fn latest_cached_session() -> Result<String> {
let cache_dir = Self::cache_dir()?;
let mut entries = std::fs::read_dir(cache_dir)?;
let mut latest = entries.next().ok_or_else(|| eyre::eyre!("No entries found!"))??;
for entry in entries {
let entry = entry?;
if entry.metadata()?.modified()? > latest.metadata()?.modified()? {
latest = entry;
}
}
Ok(latest
.path()
.to_str()
.ok_or_else(|| eyre::eyre!("Failed to get session path!"))?
.to_string())
}
/// Loads the latest ChiselSession from the cache file
///
/// ### Returns
///
/// Optionally, an owned instance of the most recently modified cached session.
pub fn latest() -> Result<Self> {
let last_session = Self::latest_cached_session()?;
let last_session_contents = std::fs::read_to_string(Path::new(&last_session))?;
let chisel_env: Self = serde_json::from_str(&last_session_contents)?;
Ok(chisel_env)
}
}
/// Generic helper function that attempts to convert a type that has
/// an [`Into<OffsetDateTime>`] implementation into a formatted date string.
fn systemtime_strftime<T>(dt: T, format: &str) -> Result<String>
where
T: Into<OffsetDateTime>,
{
Ok(dt.into().format(&format_description::parse(format)?)?)
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/chisel/src/args.rs | crates/chisel/src/args.rs | use crate::{
opts::{Chisel, ChiselSubcommand},
prelude::{ChiselCommand, ChiselDispatcher, SolidityHelper},
};
use clap::Parser;
use eyre::{Context, Result};
use foundry_cli::utils::{self, LoadConfig};
use foundry_common::fs;
use rustyline::{Editor, config::Configurer, error::ReadlineError};
use std::{ops::ControlFlow, path::PathBuf};
use yansi::Paint;
/// Run the `chisel` command line interface.
pub fn run() -> Result<()> {
setup()?;
let args = Chisel::parse();
args.global.init()?;
args.global.tokio_runtime().block_on(run_command(args))
}
/// Setup the global logger and other utilities.
pub fn setup() -> Result<()> {
utils::common_setup();
utils::subscriber();
Ok(())
}
macro_rules! try_cf {
($e:expr) => {
match $e {
ControlFlow::Continue(()) => {}
ControlFlow::Break(()) => return Ok(()),
}
};
}
/// Run the subcommand.
pub async fn run_command(args: Chisel) -> Result<()> {
// Load configuration
let (config, evm_opts) = args.load_config_and_evm_opts()?;
// Create a new cli dispatcher
let mut dispatcher = ChiselDispatcher::new(crate::source::SessionSourceConfig {
// Enable traces if any level of verbosity was passed
traces: config.verbosity > 0,
foundry_config: config,
no_vm: args.no_vm,
evm_opts,
backend: None,
calldata: None,
ir_minimum: args.ir_minimum,
})?;
// Execute prelude Solidity source files
evaluate_prelude(&mut dispatcher, args.prelude).await?;
if let Some(cmd) = args.cmd {
try_cf!(handle_cli_command(&mut dispatcher, cmd).await?);
return Ok(());
}
let mut rl = Editor::<SolidityHelper, _>::new()?;
rl.set_helper(Some(dispatcher.helper.clone()));
rl.set_auto_add_history(true);
if let Some(path) = chisel_history_file() {
let _ = rl.load_history(&path);
}
sh_println!("Welcome to Chisel! Type `{}` to show available commands.", "!help".green())?;
// REPL loop.
let mut interrupt = false;
loop {
match rl.readline(&dispatcher.get_prompt()) {
Ok(line) => {
debug!("dispatching next line: {line}");
// Clear interrupt flag.
interrupt = false;
// Dispatch and match results.
let r = dispatcher.dispatch(&line).await;
dispatcher.helper.set_errored(r.is_err());
match r {
Ok(ControlFlow::Continue(())) => {}
Ok(ControlFlow::Break(())) => break,
Err(e) => {
sh_err!("{}", foundry_common::errors::display_chain(&e))?;
}
}
}
Err(ReadlineError::Interrupted) => {
if interrupt {
break;
} else {
sh_println!("(To exit, press Ctrl+C again)")?;
interrupt = true;
}
}
Err(ReadlineError::Eof) => break,
Err(err) => {
sh_err!("{err}")?;
break;
}
}
}
if let Some(path) = chisel_history_file() {
let _ = rl.save_history(&path);
}
Ok(())
}
/// Evaluate multiple Solidity source files contained within a
/// Chisel prelude directory.
async fn evaluate_prelude(
dispatcher: &mut ChiselDispatcher,
maybe_prelude: Option<PathBuf>,
) -> Result<()> {
let Some(prelude_dir) = maybe_prelude else { return Ok(()) };
if prelude_dir.is_file() {
sh_println!("{} {}", "Loading prelude source file:".yellow(), prelude_dir.display())?;
try_cf!(load_prelude_file(dispatcher, prelude_dir).await?);
sh_println!("{}\n", "Prelude source file loaded successfully!".green())?;
} else {
let prelude_sources = fs::files_with_ext(&prelude_dir, "sol");
let mut print_success_msg = false;
for source_file in prelude_sources {
print_success_msg = true;
sh_println!("{} {}", "Loading prelude source file:".yellow(), source_file.display())?;
try_cf!(load_prelude_file(dispatcher, source_file).await?);
}
if print_success_msg {
sh_println!("{}\n", "All prelude source files loaded successfully!".green())?;
}
}
Ok(())
}
/// Loads a single Solidity file into the prelude.
async fn load_prelude_file(
dispatcher: &mut ChiselDispatcher,
file: PathBuf,
) -> Result<ControlFlow<()>> {
let prelude = fs::read_to_string(file)
.wrap_err("Could not load source file. Are you sure this path is correct?")?;
dispatcher.dispatch(&prelude).await
}
async fn handle_cli_command(
d: &mut ChiselDispatcher,
cmd: ChiselSubcommand,
) -> Result<ControlFlow<()>> {
match cmd {
ChiselSubcommand::List => d.dispatch_command(ChiselCommand::ListSessions).await,
ChiselSubcommand::Load { id } => d.dispatch_command(ChiselCommand::Load { id }).await,
ChiselSubcommand::View { id } => {
let ControlFlow::Continue(()) = d.dispatch_command(ChiselCommand::Load { id }).await?
else {
return Ok(ControlFlow::Break(()));
};
d.dispatch_command(ChiselCommand::Source).await
}
ChiselSubcommand::ClearCache => d.dispatch_command(ChiselCommand::ClearCache).await,
ChiselSubcommand::Eval { command } => d.dispatch(&command).await,
}
}
fn chisel_history_file() -> Option<PathBuf> {
foundry_config::Config::foundry_dir().map(|p| p.join(".chisel_history"))
}
#[cfg(test)]
mod tests {
use super::*;
use clap::CommandFactory;
#[test]
fn verify_cli() {
Chisel::command().debug_assert();
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/chisel/src/dispatcher.rs | crates/chisel/src/dispatcher.rs | //! Dispatcher
//!
//! This module contains the `ChiselDispatcher` struct, which handles the dispatching
//! of both builtin commands and Solidity snippets.
use crate::{
prelude::{ChiselCommand, ChiselResult, ChiselSession, SessionSourceConfig, SolidityHelper},
source::SessionSource,
};
use alloy_primitives::{Address, hex};
use eyre::{Context, Result};
use forge_fmt::FormatterConfig;
use foundry_cli::utils::fetch_abi_from_etherscan;
use foundry_config::RpcEndpointUrl;
use foundry_evm::{
decode::decode_console_logs,
traces::{
CallTraceDecoder, CallTraceDecoderBuilder, TraceKind, decode_trace_arena,
identifier::{SignaturesIdentifier, TraceIdentifiers},
render_trace_arena,
},
};
use reqwest::Url;
use solar::{
parse::lexer::token::{RawLiteralKind, RawTokenKind},
sema::ast::Base,
};
use std::{
borrow::Cow,
io::Write,
ops::ControlFlow,
path::{Path, PathBuf},
process::Command,
};
use tempfile::Builder;
use yansi::Paint;
/// Prompt arrow character.
pub const PROMPT_ARROW: char = '➜';
/// Prompt arrow string.
pub const PROMPT_ARROW_STR: &str = "➜";
const DEFAULT_PROMPT: &str = "➜ ";
/// Command leader character
pub const COMMAND_LEADER: char = '!';
/// Chisel character
pub const CHISEL_CHAR: &str = "⚒️";
/// Chisel input dispatcher
#[derive(Debug)]
pub struct ChiselDispatcher {
pub session: ChiselSession,
pub helper: SolidityHelper,
}
/// Helper function that formats solidity source with the given [FormatterConfig]
pub fn format_source(source: &str, config: FormatterConfig) -> eyre::Result<String> {
let formatted = forge_fmt::format(source, config).into_result()?;
Ok(formatted)
}
impl ChiselDispatcher {
/// Associated public function to create a new Dispatcher instance
pub fn new(config: SessionSourceConfig) -> eyre::Result<Self> {
let session = ChiselSession::new(config)?;
Ok(Self { session, helper: Default::default() })
}
/// Returns the optional ID of the current session.
pub fn id(&self) -> Option<&str> {
self.session.id.as_deref()
}
/// Returns the [`SessionSource`].
pub fn source(&self) -> &SessionSource {
&self.session.source
}
/// Returns the [`SessionSource`].
pub fn source_mut(&mut self) -> &mut SessionSource {
&mut self.session.source
}
fn format_source(&self) -> eyre::Result<String> {
format_source(
&self.source().to_repl_source(),
self.source().config.foundry_config.fmt.clone(),
)
}
/// Returns the prompt based on the current status of the Dispatcher
pub fn get_prompt(&self) -> Cow<'static, str> {
match self.session.id.as_deref() {
// `(ID: {id}) ➜ `
Some(id) => {
let mut prompt = String::with_capacity(DEFAULT_PROMPT.len() + id.len() + 7);
prompt.push_str("(ID: ");
prompt.push_str(id);
prompt.push_str(") ");
prompt.push_str(DEFAULT_PROMPT);
Cow::Owned(prompt)
}
// `➜ `
None => Cow::Borrowed(DEFAULT_PROMPT),
}
}
/// Dispatches an input as a command via [Self::dispatch_command] or as a Solidity snippet.
pub async fn dispatch(&mut self, mut input: &str) -> Result<ControlFlow<()>> {
if let Some(command) = input.strip_prefix(COMMAND_LEADER) {
return match ChiselCommand::parse(command) {
Ok(cmd) => self.dispatch_command(cmd).await,
Err(e) => eyre::bail!("unrecognized command: {e}"),
};
}
let source = self.source_mut();
input = input.trim();
let (only_trivia, new_input) = preprocess(input);
input = &*new_input;
// If the input is a comment, add it to the run code so we avoid running with empty input
if only_trivia {
debug!(?input, "matched trivia");
if !input.is_empty() {
source.add_run_code(input);
}
return Ok(ControlFlow::Continue(()));
}
// Create new source with exact input appended and parse
let (new_source, do_execute) = source.clone_with_new_line(input.to_string())?;
let (cf, res) = source.inspect(input).await?;
if let Some(res) = &res {
let _ = sh_println!("{res}");
}
if cf.is_break() {
debug!(%input, ?res, "inspect success");
return Ok(ControlFlow::Continue(()));
}
if do_execute {
self.execute_and_replace(new_source).await.map(ControlFlow::Continue)
} else {
let out = new_source.build()?;
debug!(%input, ?out, "skipped execute and rebuild source");
*self.source_mut() = new_source;
Ok(ControlFlow::Continue(()))
}
}
/// Decodes traces in the given [`ChiselResult`].
// TODO: Add `known_contracts` back in.
pub async fn decode_traces(
session_config: &SessionSourceConfig,
result: &mut ChiselResult,
// known_contracts: &ContractsByArtifact,
) -> eyre::Result<CallTraceDecoder> {
let mut decoder = CallTraceDecoderBuilder::new()
.with_labels(result.labeled_addresses.clone())
.with_signature_identifier(SignaturesIdentifier::from_config(
&session_config.foundry_config,
)?)
.build();
let mut identifier = TraceIdentifiers::new().with_external(
&session_config.foundry_config,
session_config.evm_opts.get_remote_chain_id().await,
)?;
if !identifier.is_empty() {
for (_, trace) in &mut result.traces {
decoder.identify(trace, &mut identifier);
}
}
Ok(decoder)
}
/// Display the gathered traces of a REPL execution.
pub async fn show_traces(
decoder: &CallTraceDecoder,
result: &mut ChiselResult,
) -> eyre::Result<()> {
if result.traces.is_empty() {
return Ok(());
}
sh_println!("{}", "Traces:".green())?;
for (kind, trace) in &mut result.traces {
// Display all Setup + Execution traces.
if matches!(kind, TraceKind::Setup | TraceKind::Execution) {
decode_trace_arena(trace, decoder).await;
sh_println!("{}", render_trace_arena(trace))?;
}
}
Ok(())
}
async fn execute_and_replace(&mut self, mut new_source: SessionSource) -> Result<()> {
let mut res = new_source.execute().await?;
let failed = !res.success;
if new_source.config.traces || failed {
if let Ok(decoder) = Self::decode_traces(&new_source.config, &mut res).await {
Self::show_traces(&decoder, &mut res).await?;
// Show console logs, if there are any
let decoded_logs = decode_console_logs(&res.logs);
if !decoded_logs.is_empty() {
let _ = sh_println!("{}", "Logs:".green());
for log in decoded_logs {
let _ = sh_println!(" {log}");
}
}
}
if failed {
// If the contract execution failed, continue on without
// updating the source.
eyre::bail!("Failed to execute edited contract!");
}
}
// the code could be compiled, save it
*self.source_mut() = new_source;
Ok(())
}
}
/// [`ChiselCommand`] implementations.
impl ChiselDispatcher {
/// Dispatches a [`ChiselCommand`].
pub async fn dispatch_command(&mut self, cmd: ChiselCommand) -> Result<ControlFlow<()>> {
match cmd {
ChiselCommand::Quit => Ok(ControlFlow::Break(())),
cmd => self.dispatch_command_impl(cmd).await.map(ControlFlow::Continue),
}
}
async fn dispatch_command_impl(&mut self, cmd: ChiselCommand) -> Result<()> {
match cmd {
ChiselCommand::Help => self.show_help(),
ChiselCommand::Quit => unreachable!(),
ChiselCommand::Clear => self.clear_source(),
ChiselCommand::Save { id } => self.save_session(id),
ChiselCommand::Load { id } => self.load_session(&id),
ChiselCommand::ListSessions => self.list_sessions(),
ChiselCommand::Source => self.show_source(),
ChiselCommand::ClearCache => self.clear_cache(),
ChiselCommand::Fork { url } => self.set_fork(url),
ChiselCommand::Traces => self.toggle_traces(),
ChiselCommand::Calldata { data } => self.set_calldata(data.as_deref()),
ChiselCommand::MemDump => self.show_mem_dump().await,
ChiselCommand::StackDump => self.show_stack_dump().await,
ChiselCommand::Export => self.export(),
ChiselCommand::Fetch { addr, name } => self.fetch_interface(addr, name).await,
ChiselCommand::Exec { command, args } => self.exec_command(command, args),
ChiselCommand::Edit => self.edit_session().await,
ChiselCommand::RawStack { var } => self.show_raw_stack(var).await,
}
}
pub(crate) fn show_help(&self) -> Result<()> {
sh_println!("{}", ChiselCommand::format_help())
}
pub(crate) fn clear_source(&mut self) -> Result<()> {
self.source_mut().clear();
sh_println!("Cleared session!")
}
pub(crate) fn save_session(&mut self, id: Option<String>) -> Result<()> {
// If a new name was supplied, overwrite the ID of the current session.
if let Some(id) = id {
// TODO: Should we delete the old cache file if the id of the session changes?
self.session.id = Some(id);
}
self.session.write()?;
sh_println!("Saved session to cache with ID = {}", self.session.id.as_ref().unwrap())
}
pub(crate) fn load_session(&mut self, id: &str) -> Result<()> {
// Try to save the current session before loading another.
// Don't save an empty session.
if !self.source().run_code.is_empty() {
self.session.write()?;
sh_println!("{}", "Saved current session!".green())?;
}
let new_session = match id {
"latest" => ChiselSession::latest(),
id => ChiselSession::load(id),
}
.wrap_err("failed to load session")?;
new_session.source.build()?;
self.session = new_session;
sh_println!("Loaded Chisel session! (ID = {})", self.session.id.as_ref().unwrap())
}
pub(crate) fn list_sessions(&self) -> Result<()> {
let sessions = ChiselSession::get_sessions()?;
if sessions.is_empty() {
eyre::bail!("No sessions found. Use the `!save` command to save a session.");
}
sh_println!(
"{}\n{}",
format!("{CHISEL_CHAR} Chisel Sessions").cyan(),
sessions
.iter()
.map(|(time, name)| format!("{} - {}", format!("{time:?}").blue(), name))
.collect::<Vec<String>>()
.join("\n")
)
}
pub(crate) fn show_source(&self) -> Result<()> {
let formatted = self.format_source().wrap_err("failed to format session source")?;
let highlighted = self.helper.highlight(&formatted);
sh_println!("{highlighted}")
}
pub(crate) fn clear_cache(&mut self) -> Result<()> {
ChiselSession::clear_cache().wrap_err("failed to clear cache")?;
self.session.id = None;
sh_println!("Cleared chisel cache!")
}
pub(crate) fn set_fork(&mut self, url: Option<String>) -> Result<()> {
let Some(url) = url else {
self.source_mut().config.evm_opts.fork_url = None;
sh_println!("Now using local environment.")?;
return Ok(());
};
// If the argument is an RPC alias designated in the
// `[rpc_endpoints]` section of the `foundry.toml` within
// the pwd, use the URL matched to the key.
let endpoint = if let Some(endpoint) =
self.source_mut().config.foundry_config.rpc_endpoints.get(&url)
{
endpoint.clone()
} else {
RpcEndpointUrl::Env(url).into()
};
let fork_url = endpoint.resolve().url()?;
if let Err(e) = Url::parse(&fork_url) {
eyre::bail!("invalid fork URL: {e}");
}
sh_println!("Set fork URL to {}", fork_url.yellow())?;
self.source_mut().config.evm_opts.fork_url = Some(fork_url);
// Clear the backend so that it is re-instantiated with the new fork
// upon the next execution of the session source.
self.source_mut().config.backend = None;
Ok(())
}
pub(crate) fn toggle_traces(&mut self) -> Result<()> {
let t = &mut self.source_mut().config.traces;
*t = !*t;
sh_println!("{} traces!", if *t { "Enabled" } else { "Disabled" })
}
pub(crate) fn set_calldata(&mut self, data: Option<&str>) -> Result<()> {
// remove empty space, double quotes, and 0x prefix
let arg = data
.map(|s| s.trim_matches(|c: char| c.is_whitespace() || c == '"' || c == '\''))
.map(|s| s.strip_prefix("0x").unwrap_or(s))
.unwrap_or("");
if arg.is_empty() {
self.source_mut().config.calldata = None;
sh_println!("Calldata cleared.")?;
return Ok(());
}
let calldata = hex::decode(arg);
match calldata {
Ok(calldata) => {
self.source_mut().config.calldata = Some(calldata);
sh_println!("Set calldata to '{}'", arg.yellow())
}
Err(e) => {
eyre::bail!("Invalid calldata: {e}")
}
}
}
pub(crate) async fn show_mem_dump(&mut self) -> Result<()> {
let res = self.source_mut().execute().await?;
let Some((_, mem)) = res.state.as_ref() else {
eyre::bail!("Run function is empty.");
};
for i in (0..mem.len()).step_by(32) {
let _ = sh_println!(
"{}: {}",
format!("[0x{:02x}:0x{:02x}]", i, i + 32).yellow(),
hex::encode_prefixed(&mem[i..i + 32]).cyan()
);
}
Ok(())
}
pub(crate) async fn show_stack_dump(&mut self) -> Result<()> {
let res = self.source_mut().execute().await?;
let Some((stack, _)) = res.state.as_ref() else {
eyre::bail!("Run function is empty.");
};
for i in (0..stack.len()).rev() {
let _ = sh_println!(
"{}: {}",
format!("[{}]", stack.len() - i - 1).yellow(),
format!("0x{:02x}", stack[i]).cyan()
);
}
Ok(())
}
pub(crate) fn export(&self) -> Result<()> {
// Check if the pwd is a foundry project
if !Path::new("foundry.toml").exists() {
eyre::bail!("Must be in a foundry project to export source to script.");
}
// Create "script" dir if it does not already exist.
if !Path::new("script").exists() {
std::fs::create_dir_all("script")?;
}
let formatted_source = self.format_source()?;
std::fs::write(PathBuf::from("script/REPL.s.sol"), formatted_source)?;
sh_println!("Exported session source to script/REPL.s.sol!")
}
/// Fetches an interface from Etherscan
pub(crate) async fn fetch_interface(&mut self, address: Address, name: String) -> Result<()> {
let abis = fetch_abi_from_etherscan(address, &self.source().config.foundry_config)
.await
.wrap_err("Failed to fetch ABI from Etherscan")?;
let (abi, _) = abis
.into_iter()
.next()
.ok_or_else(|| eyre::eyre!("No ABI found for address {address} on Etherscan"))?;
let code = forge_fmt::format(&abi.to_sol(&name, None), FormatterConfig::default())
.into_result()?;
self.source_mut().add_global_code(&code);
sh_println!("Added {address}'s interface to source as `{name}`")
}
pub(crate) fn exec_command(&self, command: String, args: Vec<String>) -> Result<()> {
let mut cmd = Command::new(command);
cmd.args(args);
let _ = cmd.status()?;
Ok(())
}
pub(crate) async fn edit_session(&mut self) -> Result<()> {
// create a temp file with the content of the run code
let mut tmp = Builder::new()
.prefix("chisel-")
.suffix(".sol")
.tempfile()
.wrap_err("Could not create temporary file")?;
tmp.as_file_mut()
.write_all(self.source().run_code.as_bytes())
.wrap_err("Could not write to temporary file")?;
// open the temp file with the editor
let editor = std::env::var("EDITOR").unwrap_or_else(|_| "vim".to_string());
let mut cmd = Command::new(editor);
cmd.arg(tmp.path());
let st = cmd.status()?;
if !st.success() {
eyre::bail!("Editor exited with {st}");
}
let edited_code = std::fs::read_to_string(tmp.path())?;
let mut new_source = self.source().clone();
new_source.clear_run();
new_source.add_run_code(&edited_code);
// if the editor exited successfully, try to compile the new code
self.execute_and_replace(new_source).await?;
sh_println!("Successfully edited `run()` function's body!")
}
pub(crate) async fn show_raw_stack(&mut self, var: String) -> Result<()> {
let source = self.source_mut();
let line = format!("bytes32 __raw__; assembly {{ __raw__ := {var} }}");
if let Ok((new_source, _)) = source.clone_with_new_line(line)
&& let (_, Some(res)) = new_source.inspect("__raw__").await?
{
sh_println!("{res}")?;
return Ok(());
}
eyre::bail!("Variable must exist within `run()` function.")
}
}
/// Preprocesses addresses to ensure they are correctly checksummed and returns whether the input
/// only contained trivia (comments, whitespace).
fn preprocess(input: &str) -> (bool, Cow<'_, str>) {
let mut only_trivia = true;
let mut new_input = Cow::Borrowed(input);
for (pos, token) in solar::parse::Cursor::new(input).with_position() {
use RawTokenKind::*;
if matches!(token.kind, Whitespace | LineComment { .. } | BlockComment { .. }) {
continue;
}
only_trivia = false;
// Ensure that addresses are correctly checksummed.
if let Literal { kind: RawLiteralKind::Int { base: Base::Hexadecimal, .. } } = token.kind
&& token.len == 42
{
let range = pos..pos + 42;
if let Ok(addr) = input[range.clone()].parse::<Address>() {
new_input.to_mut().replace_range(range, addr.to_checksum_buffer(None).as_str());
}
}
}
(only_trivia, new_input)
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_trivia() {
fn only_trivia(s: &str) -> bool {
let (only_trivia, _new_input) = preprocess(s);
only_trivia
}
assert!(only_trivia("// line comment"));
assert!(only_trivia(" \n// line \tcomment\n"));
assert!(!only_trivia("// line \ncomment"));
assert!(only_trivia("/* block comment */"));
assert!(only_trivia(" \t\n /* block \n \t comment */\n"));
assert!(!only_trivia("/* block \n \t comment */\nwith \tother"));
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/chisel/src/executor.rs | crates/chisel/src/executor.rs | //! Executor
//!
//! This module contains the execution logic for the [SessionSource].
use crate::{
prelude::{ChiselDispatcher, ChiselResult, ChiselRunner, SessionSource, SolidityHelper},
source::IntermediateOutput,
};
use alloy_dyn_abi::{DynSolType, DynSolValue};
use alloy_json_abi::EventParam;
use alloy_primitives::{Address, B256, U256, hex};
use eyre::{Result, WrapErr};
use foundry_compilers::Artifact;
use foundry_evm::{
backend::Backend, decode::decode_console_logs, executors::ExecutorBuilder,
inspectors::CheatsConfig, traces::TraceMode,
};
use solang_parser::pt;
use std::ops::ControlFlow;
use yansi::Paint;
/// Executor implementation for [SessionSource]
impl SessionSource {
/// Runs the source with the [ChiselRunner]
pub async fn execute(&mut self) -> Result<ChiselResult> {
// Recompile the project and ensure no errors occurred.
let output = self.build()?;
let (bytecode, final_pc) = output.enter(|output| -> Result<_> {
let contract = output
.repl_contract()
.ok_or_else(|| eyre::eyre!("failed to find REPL contract"))?;
trace!(?contract, "REPL contract");
let bytecode = contract
.get_bytecode_bytes()
.ok_or_else(|| eyre::eyre!("No bytecode found for `REPL` contract"))?;
Ok((bytecode.into_owned(), output.final_pc(contract)?))
})?;
let final_pc = final_pc.unwrap_or_default();
let mut runner = self.build_runner(final_pc).await?;
runner.run(bytecode)
}
/// Inspect a contract element inside of the current session
///
/// ### Takes
///
/// A solidity snippet
///
/// ### Returns
///
/// If the input is valid `Ok((continue, formatted_output))` where:
/// - `continue` is true if the input should be appended to the source
/// - `formatted_output` is the formatted value, if any
pub async fn inspect(&self, input: &str) -> Result<(ControlFlow<()>, Option<String>)> {
let line = format!("bytes memory inspectoor = abi.encode({input});");
let mut source = match self.clone_with_new_line(line) {
Ok((source, _)) => source,
Err(err) => {
debug!(%err, "failed to build new source for inspection");
return Ok((ControlFlow::Continue(()), None));
}
};
let mut source_without_inspector = self.clone();
// Events and tuples fails compilation due to it not being able to be encoded in
// `inspectoor`. If that happens, try executing without the inspector.
let (mut res, err) = match source.execute().await {
Ok(res) => (res, None),
Err(err) => {
debug!(?err, %input, "execution failed");
match source_without_inspector.execute().await {
Ok(res) => (res, Some(err)),
Err(_) => {
if self.config.foundry_config.verbosity >= 3 {
sh_err!("Could not inspect: {err}")?;
}
return Ok((ControlFlow::Continue(()), None));
}
}
}
};
// If abi-encoding the input failed, check whether it is an event
if let Some(err) = err {
let output = source_without_inspector.build()?;
let formatted_event =
output.enter(|output| output.get_event(input).map(format_event_definition));
if let Some(formatted_event) = formatted_event {
return Ok((ControlFlow::Break(()), Some(formatted_event?)));
}
// we were unable to check the event
if self.config.foundry_config.verbosity >= 3 {
sh_err!("Failed eval: {err}")?;
}
debug!(%err, %input, "failed abi encode input");
return Ok((ControlFlow::Break(()), None));
}
drop(source_without_inspector);
let Some((stack, memory)) = &res.state else {
// Show traces and logs, if there are any, and return an error
if let Ok(decoder) = ChiselDispatcher::decode_traces(&source.config, &mut res).await {
ChiselDispatcher::show_traces(&decoder, &mut res).await?;
}
let decoded_logs = decode_console_logs(&res.logs);
if !decoded_logs.is_empty() {
sh_println!("{}", "Logs:".green())?;
for log in decoded_logs {
sh_println!(" {log}")?;
}
}
return Err(eyre::eyre!("Failed to inspect expression"));
};
// Either the expression referred to by `input`, or the last expression,
// which was wrapped in `abi.encode`.
let generated_output = source.build()?;
// If the expression is a variable declaration within the REPL contract, use its type;
// otherwise, attempt to infer the type.
let contract_expr = generated_output
.intermediate
.repl_contract_expressions
.get(input)
.or_else(|| source.infer_inner_expr_type());
// If the current action is a function call, we get its return type
// otherwise it returns None
let function_call_return_type =
Type::get_function_return_type(contract_expr, &generated_output.intermediate);
let (contract_expr, ty) = if let Some(function_call_return_type) = function_call_return_type
{
(function_call_return_type.0, function_call_return_type.1)
} else {
match contract_expr.and_then(|e| {
Type::ethabi(e, Some(&generated_output.intermediate)).map(|ty| (e, ty))
}) {
Some(res) => res,
// this type was denied for inspection, continue
None => return Ok((ControlFlow::Continue(()), None)),
}
};
// the file compiled correctly, thus the last stack item must be the memory offset of
// the `bytes memory inspectoor` value
let data = (|| -> Option<_> {
let mut offset: usize = stack.last()?.try_into().ok()?;
debug!("inspect memory @ {offset}: {}", hex::encode(memory));
let mem_offset = memory.get(offset..offset + 32)?;
let len: usize = U256::try_from_be_slice(mem_offset)?.try_into().ok()?;
offset += 32;
memory.get(offset..offset + len)
})();
let Some(data) = data else {
eyre::bail!("Failed to inspect last expression: could not retrieve data from memory")
};
let token = ty.abi_decode(data).wrap_err("Could not decode inspected values")?;
let c = if should_continue(contract_expr) {
ControlFlow::Continue(())
} else {
ControlFlow::Break(())
};
Ok((c, Some(format_token(token))))
}
/// Gracefully attempts to extract the type of the expression within the `abi.encode(...)`
/// call inserted by the inspect function.
///
/// ### Takes
///
/// A reference to a [SessionSource]
///
/// ### Returns
///
/// Optionally, a [Type]
fn infer_inner_expr_type(&self) -> Option<&pt::Expression> {
let out = self.build().ok()?;
let run = out.run_func_body().ok()?.last();
match run {
Some(pt::Statement::VariableDefinition(
_,
_,
Some(pt::Expression::FunctionCall(_, _, args)),
)) => {
// We can safely unwrap the first expression because this function
// will only be called on a session source that has just had an
// `inspectoor` variable appended to it.
Some(args.first().unwrap())
}
_ => None,
}
}
async fn build_runner(&mut self, final_pc: usize) -> Result<ChiselRunner> {
let env = self.config.evm_opts.evm_env().await?;
let backend = match self.config.backend.clone() {
Some(backend) => backend,
None => {
let fork = self.config.evm_opts.get_fork(&self.config.foundry_config, env.clone());
let backend = Backend::spawn(fork)?;
self.config.backend = Some(backend.clone());
backend
}
};
let executor = ExecutorBuilder::new()
.inspectors(|stack| {
stack.chisel_state(final_pc).trace_mode(TraceMode::Call).cheatcodes(
CheatsConfig::new(
&self.config.foundry_config,
self.config.evm_opts.clone(),
None,
None,
)
.into(),
)
})
.gas_limit(self.config.evm_opts.gas_limit())
.spec_id(self.config.foundry_config.evm_spec_id())
.legacy_assertions(self.config.foundry_config.legacy_assertions)
.build(env, backend);
Ok(ChiselRunner::new(executor, U256::MAX, Address::ZERO, self.config.calldata.clone()))
}
}
/// Formats a value into an inspection message
// TODO: Verbosity option
fn format_token(token: DynSolValue) -> String {
match token {
DynSolValue::Address(a) => {
format!("Type: {}\n└ Data: {}", "address".red(), a.cyan())
}
DynSolValue::FixedBytes(b, byte_len) => {
format!(
"Type: {}\n└ Data: {}",
format!("bytes{byte_len}").red(),
hex::encode_prefixed(b).cyan()
)
}
DynSolValue::Int(i, bit_len) => {
format!(
"Type: {}\n├ Hex: {}\n├ Hex (full word): {}\n└ Decimal: {}",
format!("int{bit_len}").red(),
format!(
"0x{}",
format!("{i:x}")
.chars()
.skip(if i.is_negative() { 64 - bit_len / 4 } else { 0 })
.collect::<String>()
)
.cyan(),
hex::encode_prefixed(B256::from(i)).cyan(),
i.cyan()
)
}
DynSolValue::Uint(i, bit_len) => {
format!(
"Type: {}\n├ Hex: {}\n├ Hex (full word): {}\n└ Decimal: {}",
format!("uint{bit_len}").red(),
format!("0x{i:x}").cyan(),
hex::encode_prefixed(B256::from(i)).cyan(),
i.cyan()
)
}
DynSolValue::Bool(b) => {
format!("Type: {}\n└ Value: {}", "bool".red(), b.cyan())
}
DynSolValue::String(_) | DynSolValue::Bytes(_) => {
let hex = hex::encode(token.abi_encode());
let s = token.as_str();
format!(
"Type: {}\n{}├ Hex (Memory):\n├─ Length ({}): {}\n├─ Contents ({}): {}\n├ Hex (Tuple Encoded):\n├─ Pointer ({}): {}\n├─ Length ({}): {}\n└─ Contents ({}): {}",
if s.is_some() { "string" } else { "dynamic bytes" }.red(),
if let Some(s) = s {
format!("├ UTF-8: {}\n", s.cyan())
} else {
String::default()
},
"[0x00:0x20]".yellow(),
format!("0x{}", &hex[64..128]).cyan(),
"[0x20:..]".yellow(),
format!("0x{}", &hex[128..]).cyan(),
"[0x00:0x20]".yellow(),
format!("0x{}", &hex[..64]).cyan(),
"[0x20:0x40]".yellow(),
format!("0x{}", &hex[64..128]).cyan(),
"[0x40:..]".yellow(),
format!("0x{}", &hex[128..]).cyan(),
)
}
DynSolValue::FixedArray(tokens) | DynSolValue::Array(tokens) => {
let mut out = format!(
"{}({}) = {}",
"array".red(),
format!("{}", tokens.len()).yellow(),
'['.red()
);
for token in tokens {
out.push_str("\n ├ ");
out.push_str(&format_token(token).replace('\n', "\n "));
out.push('\n');
}
out.push_str(&']'.red().to_string());
out
}
DynSolValue::Tuple(tokens) => {
let displayed_types = tokens
.iter()
.map(|t| t.sol_type_name().unwrap_or_default())
.collect::<Vec<_>>()
.join(", ");
let mut out =
format!("{}({}) = {}", "tuple".red(), displayed_types.yellow(), '('.red());
for token in tokens {
out.push_str("\n ├ ");
out.push_str(&format_token(token).replace('\n', "\n "));
out.push('\n');
}
out.push_str(&')'.red().to_string());
out
}
_ => {
unimplemented!()
}
}
}
/// Formats a [pt::EventDefinition] into an inspection message
///
/// ### Takes
///
/// An borrowed [pt::EventDefinition]
///
/// ### Returns
///
/// A formatted [pt::EventDefinition] for use in inspection output.
// TODO: Verbosity option
fn format_event_definition(event_definition: &pt::EventDefinition) -> Result<String> {
let event_name = event_definition.name.as_ref().expect("Event has a name").to_string();
let inputs = event_definition
.fields
.iter()
.map(|param| {
let name = param
.name
.as_ref()
.map(ToString::to_string)
.unwrap_or_else(|| "<anonymous>".to_string());
let kind = Type::from_expression(¶m.ty)
.and_then(Type::into_builtin)
.ok_or_else(|| eyre::eyre!("Invalid type in event {event_name}"))?;
Ok(EventParam {
name,
ty: kind.to_string(),
components: vec![],
indexed: param.indexed,
internal_type: None,
})
})
.collect::<Result<Vec<_>>>()?;
let event =
alloy_json_abi::Event { name: event_name, inputs, anonymous: event_definition.anonymous };
Ok(format!(
"Type: {}\n├ Name: {}\n├ Signature: {:?}\n└ Selector: {:?}",
"event".red(),
SolidityHelper::new().highlight(&format!(
"{}({})",
&event.name,
&event
.inputs
.iter()
.map(|param| format!(
"{}{}{}",
param.ty,
if param.indexed { " indexed" } else { "" },
if param.name.is_empty() {
String::default()
} else {
format!(" {}", ¶m.name)
},
))
.collect::<Vec<_>>()
.join(", ")
)),
event.signature().cyan(),
event.selector().cyan(),
))
}
// =============================================
// Modified from
// [soli](https://github.com/jpopesculian/soli)
// =============================================
#[derive(Clone, Debug, PartialEq)]
enum Type {
/// (type)
Builtin(DynSolType),
/// (type)
Array(Box<Self>),
/// (type, length)
FixedArray(Box<Self>, usize),
/// (type, index)
ArrayIndex(Box<Self>, Option<usize>),
/// (types)
Tuple(Vec<Option<Self>>),
/// (name, params, returns)
Function(Box<Self>, Vec<Option<Self>>, Vec<Option<Self>>),
/// (lhs, rhs)
Access(Box<Self>, String),
/// (types)
Custom(Vec<String>),
}
impl Type {
/// Convert a [pt::Expression] to a [Type]
///
/// ### Takes
///
/// A reference to a [pt::Expression] to convert.
///
/// ### Returns
///
/// Optionally, an owned [Type]
fn from_expression(expr: &pt::Expression) -> Option<Self> {
match expr {
pt::Expression::Type(_, ty) => Self::from_type(ty),
pt::Expression::Variable(ident) => Some(Self::Custom(vec![ident.name.clone()])),
// array
pt::Expression::ArraySubscript(_, expr, num) => {
// if num is Some then this is either an index operation (arr[<num>])
// or a FixedArray statement (new uint256[<num>])
Self::from_expression(expr).and_then(|ty| {
let boxed = Box::new(ty);
let num = num.as_deref().and_then(parse_number_literal).and_then(|n| {
usize::try_from(n).ok()
});
match expr.as_ref() {
// statement
pt::Expression::Type(_, _) => {
if let Some(num) = num {
Some(Self::FixedArray(boxed, num))
} else {
Some(Self::Array(boxed))
}
}
// index
pt::Expression::Variable(_) => {
Some(Self::ArrayIndex(boxed, num))
}
_ => None
}
})
}
pt::Expression::ArrayLiteral(_, values) => {
values.first().and_then(Self::from_expression).map(|ty| {
Self::FixedArray(Box::new(ty), values.len())
})
}
// tuple
pt::Expression::List(_, params) => Some(Self::Tuple(map_parameters(params))),
// <lhs>.<rhs>
pt::Expression::MemberAccess(_, lhs, rhs) => {
Self::from_expression(lhs).map(|lhs| {
Self::Access(Box::new(lhs), rhs.name.clone())
})
}
// <inner>
pt::Expression::Parenthesis(_, inner) | // (<inner>)
pt::Expression::New(_, inner) | // new <inner>
pt::Expression::UnaryPlus(_, inner) | // +<inner>
// ops
pt::Expression::BitwiseNot(_, inner) | // ~<inner>
pt::Expression::ArraySlice(_, inner, _, _) | // <inner>[*start*:*end*]
// assign ops
pt::Expression::PreDecrement(_, inner) | // --<inner>
pt::Expression::PostDecrement(_, inner) | // <inner>--
pt::Expression::PreIncrement(_, inner) | // ++<inner>
pt::Expression::PostIncrement(_, inner) | // <inner>++
pt::Expression::Assign(_, inner, _) | // <inner> = ...
pt::Expression::AssignAdd(_, inner, _) | // <inner> += ...
pt::Expression::AssignSubtract(_, inner, _) | // <inner> -= ...
pt::Expression::AssignMultiply(_, inner, _) | // <inner> *= ...
pt::Expression::AssignDivide(_, inner, _) | // <inner> /= ...
pt::Expression::AssignModulo(_, inner, _) | // <inner> %= ...
pt::Expression::AssignAnd(_, inner, _) | // <inner> &= ...
pt::Expression::AssignOr(_, inner, _) | // <inner> |= ...
pt::Expression::AssignXor(_, inner, _) | // <inner> ^= ...
pt::Expression::AssignShiftLeft(_, inner, _) | // <inner> <<= ...
pt::Expression::AssignShiftRight(_, inner, _) // <inner> >>= ...
=> Self::from_expression(inner),
// *condition* ? <if_true> : <if_false>
pt::Expression::ConditionalOperator(_, _, if_true, if_false) => {
Self::from_expression(if_true).or_else(|| Self::from_expression(if_false))
}
// address
pt::Expression::AddressLiteral(_, _) => Some(Self::Builtin(DynSolType::Address)),
pt::Expression::HexNumberLiteral(_, s, _) => {
match s.parse::<Address>() {
Ok(addr) => {
if *s == addr.to_checksum(None) {
Some(Self::Builtin(DynSolType::Address))
} else {
Some(Self::Builtin(DynSolType::Uint(256)))
}
},
_ => {
Some(Self::Builtin(DynSolType::Uint(256)))
}
}
}
// uint and int
// invert
pt::Expression::Negate(_, inner) => Self::from_expression(inner).map(Self::invert_int),
// int if either operand is int
// TODO: will need an update for Solidity v0.8.18 user defined operators:
// https://github.com/ethereum/solidity/issues/13718#issuecomment-1341058649
pt::Expression::Add(_, lhs, rhs) |
pt::Expression::Subtract(_, lhs, rhs) |
pt::Expression::Multiply(_, lhs, rhs) |
pt::Expression::Divide(_, lhs, rhs) => {
match (Self::ethabi(lhs, None), Self::ethabi(rhs, None)) {
(Some(DynSolType::Int(_)), Some(DynSolType::Int(_))) |
(Some(DynSolType::Int(_)), Some(DynSolType::Uint(_))) |
(Some(DynSolType::Uint(_)), Some(DynSolType::Int(_))) => {
Some(Self::Builtin(DynSolType::Int(256)))
}
_ => {
Some(Self::Builtin(DynSolType::Uint(256)))
}
}
}
// always assume uint
pt::Expression::Modulo(_, _, _) |
pt::Expression::Power(_, _, _) |
pt::Expression::BitwiseOr(_, _, _) |
pt::Expression::BitwiseAnd(_, _, _) |
pt::Expression::BitwiseXor(_, _, _) |
pt::Expression::ShiftRight(_, _, _) |
pt::Expression::ShiftLeft(_, _, _) |
pt::Expression::NumberLiteral(_, _, _, _) => Some(Self::Builtin(DynSolType::Uint(256))),
// TODO: Rational numbers
pt::Expression::RationalNumberLiteral(_, _, _, _, _) => {
Some(Self::Builtin(DynSolType::Uint(256)))
}
// bool
pt::Expression::BoolLiteral(_, _) |
pt::Expression::And(_, _, _) |
pt::Expression::Or(_, _, _) |
pt::Expression::Equal(_, _, _) |
pt::Expression::NotEqual(_, _, _) |
pt::Expression::Less(_, _, _) |
pt::Expression::LessEqual(_, _, _) |
pt::Expression::More(_, _, _) |
pt::Expression::MoreEqual(_, _, _) |
pt::Expression::Not(_, _) => Some(Self::Builtin(DynSolType::Bool)),
// string
pt::Expression::StringLiteral(_) => Some(Self::Builtin(DynSolType::String)),
// bytes
pt::Expression::HexLiteral(_) => Some(Self::Builtin(DynSolType::Bytes)),
// function
pt::Expression::FunctionCall(_, name, args) => {
Self::from_expression(name).map(|name| {
let args = args.iter().map(Self::from_expression).collect();
Self::Function(Box::new(name), args, vec![])
})
}
pt::Expression::NamedFunctionCall(_, name, args) => {
Self::from_expression(name).map(|name| {
let args = args.iter().map(|arg| Self::from_expression(&arg.expr)).collect();
Self::Function(Box::new(name), args, vec![])
})
}
// explicitly None
pt::Expression::Delete(_, _) | pt::Expression::FunctionCallBlock(_, _, _) => None,
}
}
/// Convert a [pt::Type] to a [Type]
///
/// ### Takes
///
/// A reference to a [pt::Type] to convert.
///
/// ### Returns
///
/// Optionally, an owned [Type]
fn from_type(ty: &pt::Type) -> Option<Self> {
let ty = match ty {
pt::Type::Address | pt::Type::AddressPayable | pt::Type::Payable => {
Self::Builtin(DynSolType::Address)
}
pt::Type::Bool => Self::Builtin(DynSolType::Bool),
pt::Type::String => Self::Builtin(DynSolType::String),
pt::Type::Int(size) => Self::Builtin(DynSolType::Int(*size as usize)),
pt::Type::Uint(size) => Self::Builtin(DynSolType::Uint(*size as usize)),
pt::Type::Bytes(size) => Self::Builtin(DynSolType::FixedBytes(*size as usize)),
pt::Type::DynamicBytes => Self::Builtin(DynSolType::Bytes),
pt::Type::Mapping { value, .. } => Self::from_expression(value)?,
pt::Type::Function { params, returns, .. } => {
let params = map_parameters(params);
let returns = returns
.as_ref()
.map(|(returns, _)| map_parameters(returns))
.unwrap_or_default();
Self::Function(
Box::new(Self::Custom(vec!["__fn_type__".to_string()])),
params,
returns,
)
}
// TODO: Rational numbers
pt::Type::Rational => return None,
};
Some(ty)
}
/// Handle special expressions like [global variables](https://docs.soliditylang.org/en/latest/cheatsheet.html#global-variables)
///
/// See: <https://github.com/ethereum/solidity/blob/81268e336573721819e39fbb3fefbc9344ad176c/libsolidity/ast/Types.cpp#L4106>
fn map_special(self) -> Self {
if !matches!(self, Self::Function(_, _, _) | Self::Access(_, _) | Self::Custom(_)) {
return self;
}
let mut types = Vec::with_capacity(5);
let mut args = None;
self.recurse(&mut types, &mut args);
let len = types.len();
if len == 0 {
return self;
}
// Type members, like array, bytes etc
#[expect(clippy::single_match)]
match &self {
Self::Access(inner, access) => {
if let Some(ty) = inner.as_ref().clone().try_as_ethabi(None) {
// Array / bytes members
let ty = Self::Builtin(ty);
match access.as_str() {
"length" if ty.is_dynamic() || ty.is_array() || ty.is_fixed_bytes() => {
return Self::Builtin(DynSolType::Uint(256));
}
"pop" if ty.is_dynamic_array() => return ty,
_ => {}
}
}
}
_ => {}
}
let this = {
let name = types.last().unwrap().as_str();
match len {
0 => unreachable!(),
1 => match name {
"gasleft" | "addmod" | "mulmod" => Some(DynSolType::Uint(256)),
"keccak256" | "sha256" | "blockhash" => Some(DynSolType::FixedBytes(32)),
"ripemd160" => Some(DynSolType::FixedBytes(20)),
"ecrecover" => Some(DynSolType::Address),
_ => None,
},
2 => {
let access = types.first().unwrap().as_str();
match name {
"block" => match access {
"coinbase" => Some(DynSolType::Address),
"timestamp" | "difficulty" | "prevrandao" | "number" | "gaslimit"
| "chainid" | "basefee" | "blobbasefee" => Some(DynSolType::Uint(256)),
_ => None,
},
"msg" => match access {
"sender" => Some(DynSolType::Address),
"gas" => Some(DynSolType::Uint(256)),
"value" => Some(DynSolType::Uint(256)),
"data" => Some(DynSolType::Bytes),
"sig" => Some(DynSolType::FixedBytes(4)),
_ => None,
},
"tx" => match access {
"origin" => Some(DynSolType::Address),
"gasprice" => Some(DynSolType::Uint(256)),
_ => None,
},
"abi" => match access {
"decode" => {
// args = Some([Bytes(_), Tuple(args)])
// unwrapping is safe because this is first compiled by solc so
// it is guaranteed to be a valid call
let mut args = args.unwrap();
let last = args.pop().unwrap();
match last {
Some(ty) => {
return match ty {
Self::Tuple(_) => ty,
ty => Self::Tuple(vec![Some(ty)]),
};
}
None => None,
}
}
s if s.starts_with("encode") => Some(DynSolType::Bytes),
_ => None,
},
"address" => match access {
"balance" => Some(DynSolType::Uint(256)),
"code" => Some(DynSolType::Bytes),
"codehash" => Some(DynSolType::FixedBytes(32)),
"send" => Some(DynSolType::Bool),
_ => None,
},
"type" => match access {
"name" => Some(DynSolType::String),
"creationCode" | "runtimeCode" => Some(DynSolType::Bytes),
"interfaceId" => Some(DynSolType::FixedBytes(4)),
"min" | "max" => Some(
// Either a builtin or an enum
(|| args?.pop()??.into_builtin())()
.unwrap_or(DynSolType::Uint(256)),
),
_ => None,
},
"string" => match access {
"concat" => Some(DynSolType::String),
_ => None,
},
"bytes" => match access {
"concat" => Some(DynSolType::Bytes),
_ => None,
},
_ => None,
}
}
_ => None,
}
};
this.map(Self::Builtin).unwrap_or_else(|| match types.last().unwrap().as_str() {
"this" | "super" => Self::Custom(types),
_ => match self {
Self::Custom(_) | Self::Access(_, _) => Self::Custom(types),
Self::Function(_, _, _) => self,
_ => unreachable!(),
},
})
}
/// Recurses over itself, appending all the idents and function arguments in the order that they
/// are found
fn recurse(&self, types: &mut Vec<String>, args: &mut Option<Vec<Option<Self>>>) {
match self {
Self::Builtin(ty) => types.push(ty.to_string()),
Self::Custom(tys) => types.extend(tys.clone()),
Self::Access(expr, name) => {
types.push(name.clone());
expr.recurse(types, args);
}
Self::Function(fn_name, fn_args, _fn_ret) => {
if args.is_none() && !fn_args.is_empty() {
*args = Some(fn_args.clone());
}
fn_name.recurse(types, args);
}
_ => {}
}
}
/// Infers a custom type's true type by recursing up the parse tree
///
/// ### Takes
/// - A reference to the [IntermediateOutput]
/// - An array of custom types generated by the `MemberAccess` arm of [Self::from_expression]
/// - An optional contract name. This should always be `None` when this function is first
/// called.
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | true |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/chisel/src/opts.rs | crates/chisel/src/opts.rs | use clap::{Parser, Subcommand};
use foundry_cli::opts::{BuildOpts, EvmArgs, GlobalArgs};
use foundry_common::version::{LONG_VERSION, SHORT_VERSION};
use std::path::PathBuf;
foundry_config::impl_figment_convert!(Chisel, build, evm);
/// Fast, utilitarian, and verbose Solidity REPL.
#[derive(Debug, Parser)]
#[command(name = "chisel", version = SHORT_VERSION, long_version = LONG_VERSION)]
pub struct Chisel {
/// Include the global arguments.
#[command(flatten)]
pub global: GlobalArgs,
#[command(subcommand)]
pub cmd: Option<ChiselSubcommand>,
/// Path to a directory containing Solidity files to import, or path to a single Solidity file.
///
/// These files will be evaluated before the top-level of the
/// REPL, therefore functioning as a prelude
#[arg(long, help_heading = "REPL options")]
pub prelude: Option<PathBuf>,
/// Disable the default `Vm` import.
#[arg(long, help_heading = "REPL options", long_help = format!(
"Disable the default `Vm` import.\n\n\
The import is disabled by default if the Solc version is less than {}.",
crate::source::MIN_VM_VERSION
))]
pub no_vm: bool,
/// Enable viaIR with minimum optimization
///
/// This can fix most of the "stack too deep" errors while resulting a
/// relatively accurate source map.
#[arg(long, help_heading = "REPL options")]
pub ir_minimum: bool,
#[command(flatten)]
pub build: BuildOpts,
#[command(flatten)]
pub evm: EvmArgs,
}
/// Chisel binary subcommands
#[derive(Debug, Subcommand)]
pub enum ChiselSubcommand {
/// List all cached sessions.
List,
/// Load a cached session.
Load {
/// The ID of the session to load.
id: String,
},
/// View the source of a cached session.
View {
/// The ID of the session to load.
id: String,
},
/// Clear all cached chisel sessions from the cache directory.
ClearCache,
/// Simple evaluation of a command without entering the REPL.
Eval {
/// The command to be evaluated.
command: String,
},
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/chisel/src/cmd.rs | crates/chisel/src/cmd.rs | use crate::prelude::CHISEL_CHAR;
use alloy_primitives::Address;
use clap::{CommandFactory, Parser};
use itertools::Itertools;
use yansi::Paint;
/// Chisel REPL commands.
#[derive(Debug, Parser)]
#[command(disable_help_flag = true, disable_help_subcommand = true)]
pub enum ChiselCommand {
/// Display all commands.
#[command(visible_alias = "h", next_help_heading = "General")]
Help,
/// Quit the REPL.
#[command(visible_alias = "q")]
Quit,
/// Executes a shell command.
#[command(visible_alias = "e")]
Exec {
/// Command to execute.
command: String,
/// Command arguments.
#[arg(trailing_var_arg = true)]
args: Vec<String>,
},
/// Clear the current session source.
#[command(visible_alias = "c", next_help_heading = "Session")]
Clear,
/// Print the generated source contract.
#[command(visible_alias = "so")]
Source,
/// Save the current session to the cache.
#[command(visible_alias = "s")]
Save {
/// Optional session ID.
id: Option<String>,
},
/// Load a previous session from cache.
/// WARNING: This will overwrite the current session (though the current session will be
/// optimistically cached).
#[command(visible_alias = "l")]
Load {
/// Session ID to load.
id: String,
},
/// List all cached sessions.
#[command(name = "list", visible_alias = "ls")]
ListSessions,
/// Clear the cache of all stored sessions.
#[command(name = "clearcache", visible_alias = "cc")]
ClearCache,
/// Export the current REPL session source to a Script file.
#[command(visible_alias = "ex")]
Export,
/// Fetch an interface of a verified contract on Etherscan.
#[command(visible_alias = "fe")]
Fetch {
/// Contract address.
addr: Address,
/// Interface name.
name: String,
},
/// Open the current session in an editor.
Edit,
/// Fork an RPC in the current session.
#[command(visible_alias = "f", next_help_heading = "Environment")]
Fork {
/// Fork URL, environment variable, or RPC endpoints alias (empty to return to local
/// network).
url: Option<String>,
},
/// Enable / disable traces for the current session.
#[command(visible_alias = "t")]
Traces,
/// Set calldata (`msg.data`) for the current session (appended after function selector). Clears
/// it if no argument provided.
#[command(visible_alias = "cd")]
Calldata {
/// Calldata (empty to clear).
data: Option<String>,
},
/// Dump the raw memory.
#[command(name = "memdump", visible_alias = "md", next_help_heading = "Debug")]
MemDump,
/// Dump the raw stack.
#[command(name = "stackdump", visible_alias = "sd")]
StackDump,
/// Display the raw value of a variable's stack allocation. For variables that are > 32 bytes in
/// length, this will display their memory pointer.
#[command(name = "rawstack", visible_alias = "rs")]
RawStack {
/// Variable name.
var: String,
},
}
impl ChiselCommand {
pub fn parse(input: &str) -> eyre::Result<Self> {
let args = input.split_whitespace();
let args = std::iter::once("chisel").chain(args);
Self::try_parse_from(args)
.map_err(|e| eyre::eyre!("{}; for more information, see `!help`", e.kind()))
}
pub fn format_help() -> String {
let cmd = Self::command();
let mut categories = Vec::new();
let mut cat = None;
for sub in cmd.get_subcommands() {
if let Some(cat_) = sub.get_next_help_heading()
&& Some(cat_) != cat
{
cat = Some(cat_);
categories.push((cat_, vec![]));
}
categories.last_mut().unwrap().1.push(sub);
}
format!(
"{}\n{}",
format!("{CHISEL_CHAR} Chisel help\n=============").cyan(),
categories
.iter()
.map(|(cat, cat_cmds)| {
format!(
"{}\n{}\n",
cat.magenta(),
cat_cmds
.iter()
.map(|&cmd| format!(
"\t{}{} - {}",
std::iter::once(cmd.get_name())
.chain(cmd.get_visible_aliases())
.map(|s| format!("!{}", s.green()))
.format(" | "),
{
let usage = get_usage(cmd);
if usage.is_empty() {
String::new()
} else {
format!(" {usage}")
}
}
.green(),
cmd.get_about().expect("command is missing about"),
))
.format("\n")
)
})
.format("\n")
)
}
}
fn get_usage(cmd: &clap::Command) -> String {
let s = cmd.clone().render_usage().to_string();
if let Some(idx) = s.find(['[', '<']) {
return s[idx..].to_string();
}
String::new()
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn print_help() {
let _ = sh_eprintln!("{}", ChiselCommand::format_help());
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/chisel/tests/it/main.rs | crates/chisel/tests/it/main.rs | #[cfg(unix)]
mod repl;
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/chisel/tests/it/repl/session.rs | crates/chisel/tests/it/repl/session.rs | use foundry_compilers::PathStyle;
use foundry_test_utils::TestProject;
use rexpect::{reader::Options, session::PtySession, spawn_with_options};
const TIMEOUT_SECS: u64 = 3;
const PROMPT: &str = "➜ ";
/// Testing session for Chisel.
pub struct ChiselSession {
session: Box<PtySession>,
project: Box<TestProject>,
is_repl: bool,
}
static SUBCOMMANDS: &[&str] = &["list", "load", "view", "clear-cache", "eval", "help"];
fn is_repl(args: &[String]) -> bool {
args.is_empty()
|| !SUBCOMMANDS.iter().any(|subcommand| args.iter().any(|arg| arg == subcommand))
}
#[allow(dead_code)]
impl ChiselSession {
pub fn new(name: &str, flags: &str, init: bool) -> Self {
let project = foundry_test_utils::TestProject::new(name, PathStyle::Dapptools);
if init {
foundry_test_utils::util::initialize(project.root());
project.initialize_default_contracts();
}
let bin = env!("CARGO_BIN_EXE_chisel");
let mut command = std::process::Command::new(bin);
// TODO: TTY works but logs become unreadable.
command.current_dir(project.root());
command.env("NO_COLOR", "1");
command.env("TERM", "dumb");
command.env("ETHERSCAN_API_KEY", foundry_test_utils::rpc::next_etherscan_api_key());
if !flags.is_empty() {
command.args(flags.split_whitespace());
}
let args = command.get_args().map(|s| s.to_str().unwrap().to_string()).collect::<Vec<_>>();
let session = spawn_with_options(
command,
Options {
timeout_ms: Some(TIMEOUT_SECS * 1000),
strip_ansi_escape_codes: false,
encoding: rexpect::Encoding::UTF8,
},
)
.unwrap();
let is_repl = is_repl(&args);
let mut session = Self { session: Box::new(session), project: Box::new(project), is_repl };
// Expect initial prompt only if we're in the REPL.
if session.is_repl() {
session.expect("Welcome to Chisel!");
}
session
}
pub fn project(&self) -> &TestProject {
&self.project
}
pub fn is_repl(&self) -> bool {
self.is_repl
}
/// Send a line to the REPL and expects the prompt to appear.
#[track_caller]
pub fn sendln(&mut self, line: &str) {
self.sendln_raw(line);
if self.is_repl() {
self.expect_prompt();
}
}
/// Send a line to the REPL without expecting the prompt to appear.
///
/// You might want to call `expect_prompt` after this.
#[track_caller]
pub fn sendln_raw(&mut self, line: &str) {
match self.session.send_line(line) {
Ok(_) => (),
Err(e) => {
panic!("failed to send line {line:?}: {e}")
}
}
}
/// Expect the needle to appear.
#[track_caller]
pub fn expect(&mut self, needle: &str) {
match self.session.exp_string(needle) {
Ok(_) => (),
Err(e) => {
panic!("failed to expect {needle:?}: {e}")
}
}
}
/// Expect the prompt to appear.
#[track_caller]
pub fn expect_prompt(&mut self) {
self.expect(PROMPT);
}
/// Expect the prompt to appear `n` times.
#[track_caller]
pub fn expect_prompts(&mut self, n: usize) {
for _ in 0..n {
self.expect_prompt();
}
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/chisel/tests/it/repl/mod.rs | crates/chisel/tests/it/repl/mod.rs | mod session;
use session::ChiselSession;
macro_rules! repl_test {
($name:ident, | $cmd:ident | $test:expr) => {
repl_test!($name, "", |$cmd| $test);
};
($name:ident, $flags:expr, | $cmd:ident | $test:expr) => {
repl_test!($name, $flags, init = false, |$cmd| $test);
};
($name:ident, $flags:expr,init = $init:expr, | $cmd:ident | $test:expr) => {
#[test]
#[allow(unused_mut)]
fn $name() {
let mut $cmd = ChiselSession::new(stringify!($name), $flags, $init);
$test;
return (); // Fix "go to definition" due to `tokio::test`.
}
};
}
repl_test!(repl_help, |repl| {
repl.sendln_raw("!h");
repl.expect("Chisel help");
repl.expect_prompt();
});
// Test abi encode/decode.
repl_test!(abi_encode_decode, |repl| {
repl.sendln("bytes memory encoded = abi.encode(42, \"hello\")");
repl.sendln("(uint num, string memory str) = abi.decode(encoded, (uint, string))");
repl.sendln("num");
repl.expect("42");
repl.sendln("str");
repl.expect("hello");
});
// Test 0x prefixed strings.
repl_test!(hex_string_interpretation, |repl| {
repl.sendln("string memory s = \"0x1234\"");
repl.sendln("s");
// Should be treated as string, not hex literal.
repl.expect("0x1234");
});
// Test cheatcodes availability.
repl_test!(cheatcodes_available, "", init = true, |repl| {
repl.sendln("address alice = address(0x1)");
repl.sendln("alice.balance");
repl.expect("Decimal: 0");
repl.sendln("vm.deal();");
repl.expect("Wrong argument count for function call");
repl.sendln("vm.deal(alice, 1 ether);");
repl.sendln("alice.balance");
repl.expect("Decimal: 1000000000000000000");
});
// Test empty inputs.
repl_test!(empty_input, |repl| {
repl.sendln(" \n \n\n \t \t \n \n\t\t\t\t \n \n");
});
// Issue #4130: Test type(intN).min correctness.
repl_test!(int_min_values, |repl| {
repl.sendln("type(int8).min");
repl.expect("-128");
repl.sendln("type(int256).min");
repl.expect("-57896044618658097711785492504343953926634992332820282019728792003956564819968");
});
// Issue #4393: Test edit command with traces.
// TODO: test `!edit`
// repl_test!(edit_with_traces, |repl| {
// repl.sendln("!traces");
// repl.sendln("uint x = 42");
// repl.sendln("!edit");
// // Should open editor without errors.
// repl.expect("Running");
// });
// Test tuple support.
repl_test!(tuples, |repl| {
repl.sendln("(uint a, uint b) = (1, 2)");
repl.sendln("a");
repl.expect("Decimal: 1");
repl.sendln("b");
repl.expect("Decimal: 2");
});
// Issue #4467: Test import.
repl_test!(import, "", init = true, |repl| {
repl.sendln("import {Counter} from \"src/Counter.sol\"");
repl.sendln("Counter c = new Counter()");
// TODO: pre-existing inspection failure.
// repl.sendln("c.number()");
repl.sendln("uint x = c.number();\nx");
repl.expect("Decimal: 0");
repl.sendln("c.increment();");
// repl.sendln("c.number()");
repl.sendln("x = c.number();\nx");
repl.expect("Decimal: 1");
});
// Issue #4617: Test code after assembly return.
repl_test!(assembly_return, |repl| {
repl.sendln("uint x = 1;");
repl.sendln("assembly { mstore(0x0, 0x1337) return(0x0, 0x20) }");
repl.sendln("x = 2;");
repl.sendln("!md");
// Should work without errors.
repl.expect("[0x00:0x20]: 0x0000000000000000000000000000000000000000000000000000000000001337");
});
// Issue #4652: Test commands with trailing whitespace.
repl_test!(trailing_whitespace, |repl| {
repl.sendln("uint x = 42 ");
repl.sendln("x");
repl.expect("Decimal: 42");
});
// Issue #4652: Test that solc flags are respected.
repl_test!(solc_flags, "--use 0.8.23", |repl| {
repl.sendln("pragma solidity 0.8.24;");
repl.expect("invalid solc version");
});
// Issue #4915: `chisel eval`
repl_test!(eval_subcommand, "eval type(uint8).max", |repl| {
repl.expect("Decimal: 255");
});
// Issue #4938: Test memory/stack dumps with assembly.
repl_test!(assembly_memory_dump, |repl| {
let input = r#"
uint256 value = 12345;
string memory str;
assembly {
str := add(mload(0x40), 0x80)
mstore(0x40, add(str, 0x20))
mstore(str, 0)
let end := str
}
"#;
repl.sendln_raw(input.trim());
repl.expect_prompts(3);
repl.sendln("value");
repl.expect("Decimal: 12345");
repl.sendln("!md");
repl.expect("[0x00:0x20]");
});
// Issue #5051, #8978: Test EVM version normalization.
repl_test!(evm_version_normalization, "--use 0.7.6 --evm-version london", |repl| {
repl.sendln("uint x;\nx");
repl.expect("Decimal: 0");
});
// Issue #5481: Test function return values are displayed.
repl_test!(function_return_display, |repl| {
repl.sendln("function add(uint a, uint b) public pure returns (uint) { return a + b; }");
repl.sendln("add(2, 3)");
repl.expect("Decimal: 5");
});
// Issue #5737: Test bytesN return types.
repl_test!(bytes_length_type, |repl| {
repl.sendln("bytes10 b = bytes10(0)");
repl.sendln("b.length");
repl.expect("Decimal: 10");
});
// Issue #5737: Test bytesN indexing return type.
repl_test!(bytes_index_type, |repl| {
repl.sendln("bytes32 b = bytes32(uint256(0x0102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20))");
repl.sendln("b[3]");
repl.expect("Data: 0x0400000000000000000000000000000000000000000000000000000000000000");
});
// Issue #6618: Test fetching interface with structs.
repl_test!(fetch_interface_with_structs, |repl| {
repl.sendln_raw("!fe 0x5ff137d4b0fdcd49dca30c7cf57e578a026d2789 IEntryPoint");
repl.expect(
"Added 0x5FF137D4b0FDCD49DcA30c7CF57E578a026d2789's interface to source as `IEntryPoint`",
);
repl.expect_prompt();
repl.sendln("uint256 x = 1;\nx");
repl.expect("Decimal: 1");
});
// Issue #7035: Test that hex strings aren't checksummed as addresses.
repl_test!(hex_string_no_checksum, |repl| {
repl.sendln("function test(string memory s) public pure returns (string memory) { return s; }");
repl.sendln("test(\"0xe5f3af50fe5d0bf402a3c6f55ccc47d4307922d4\")");
// Should return the exact string, not checksummed.
repl.expect("0xe5f3af50fe5d0bf402a3c6f55ccc47d4307922d4");
});
// Issue #7050: Test enum min/max operations.
repl_test!(enum_min_max, |repl| {
repl.sendln("enum Color { Red, Green, Blue }");
repl.sendln("type(Color).min");
repl.expect("Decimal: 0");
repl.sendln("type(Color).max");
repl.expect("Decimal: 2");
});
// Issue #9377: Test correct hex formatting for uint256.
repl_test!(uint256_hex_formatting, |repl| {
repl.sendln("uint256 x = 42");
// Full word hex should be 64 chars (256 bits).
repl.sendln("x");
repl.expect("0x000000000000000000000000000000000000000000000000000000000000002a");
});
// Issue #9377: Test that full words are printed correctly.
repl_test!(full_word_hex_formatting, |repl| {
repl.sendln(r#"keccak256(abi.encode(uint256(keccak256("AgoraStableSwapStorage.OracleStorage")) - 1)) & ~bytes32(uint256(0xff))"#);
repl.expect(
"Hex (full word): 0x0a6b316b47a0cd26c1b582ae3dcffbd175283c221c3cb3d1c614e3e47f62a700",
);
});
// Test that uint is printed properly with any size.
repl_test!(uint_formatting, |repl| {
for size in (8..=256).step_by(8) {
repl.sendln(&format!("type(uint{size}).max"));
repl.expect(&format!("Hex: 0x{}", "f".repeat(size / 4)));
repl.sendln(&format!("uint{size}(2)"));
repl.expect("Hex: 0x2");
}
});
// Test that int is printed properly with any size.
repl_test!(int_formatting, |repl| {
for size in (8..=256).step_by(8) {
let size_minus_1: usize = size / 4 - 1;
repl.sendln(&format!("type(int{size}).max"));
repl.expect(&format!("Hex: 0x7{}", "f".repeat(size_minus_1)));
repl.sendln(&format!("int{size}(2)"));
repl.expect("Hex: 0x2");
repl.sendln(&format!("type(int{size}).min"));
repl.expect(&format!("Hex: 0x8{}", "0".repeat(size_minus_1)));
repl.sendln(&format!("int{size}(-2)"));
repl.expect(&format!("Hex: 0x{}e", "f".repeat(size_minus_1)));
}
});
repl_test!(uninitialized_variables, |repl| {
repl.sendln("uint256 x;");
repl.sendln("address y;");
repl.sendln("assembly { y := not(x) }");
repl.sendln("x");
repl.expect("Hex: 0x0");
repl.sendln("y");
repl.expect("Data: 0xFFfFfFffFFfffFFfFFfFFFFFffFFFffffFfFFFfF");
});
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/chisel/bin/main.rs | crates/chisel/bin/main.rs | //! The `chisel` CLI: a fast, utilitarian, and verbose Solidity REPL.
use chisel::args::run;
#[global_allocator]
static ALLOC: foundry_cli::utils::Allocator = foundry_cli::utils::new_allocator();
fn main() {
if let Err(err) = run() {
let _ = foundry_common::sh_err!("{err:?}");
std::process::exit(1);
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/cheatcodes/src/config.rs | crates/cheatcodes/src/config.rs | use super::Result;
use crate::Vm::Rpc;
use alloy_primitives::{U256, map::AddressHashMap};
use foundry_common::{ContractsByArtifact, fs::normalize_path};
use foundry_compilers::{ArtifactId, ProjectPathsConfig, utils::canonicalize};
use foundry_config::{
Config, FsPermissions, ResolvedRpcEndpoint, ResolvedRpcEndpoints, RpcEndpoint, RpcEndpointUrl,
cache::StorageCachingConfig, fs_permissions::FsAccessKind,
};
use foundry_evm_core::opts::EvmOpts;
use std::{
collections::HashMap,
path::{Path, PathBuf},
time::Duration,
};
/// Additional, configurable context the `Cheatcodes` inspector has access to
///
/// This is essentially a subset of various `Config` settings `Cheatcodes` needs to know.
#[derive(Clone, Debug)]
pub struct CheatsConfig {
/// Whether the FFI cheatcode is enabled.
pub ffi: bool,
/// Use the create 2 factory in all cases including tests and non-broadcasting scripts.
pub always_use_create_2_factory: bool,
/// Sets a timeout for vm.prompt cheatcodes
pub prompt_timeout: Duration,
/// RPC storage caching settings determines what chains and endpoints to cache
pub rpc_storage_caching: StorageCachingConfig,
/// Disables storage caching entirely.
pub no_storage_caching: bool,
/// All known endpoints and their aliases
pub rpc_endpoints: ResolvedRpcEndpoints,
/// Project's paths as configured
pub paths: ProjectPathsConfig,
/// Path to the directory that contains the bindings generated by `forge bind-json`.
pub bind_json_path: PathBuf,
/// Filesystem permissions for cheatcodes like `writeFile`, `readFile`
pub fs_permissions: FsPermissions,
/// Project root
pub root: PathBuf,
/// Absolute Path to broadcast dir i.e project_root/broadcast
pub broadcast: PathBuf,
/// Paths (directories) where file reading/writing is allowed
pub allowed_paths: Vec<PathBuf>,
/// How the evm was configured by the user
pub evm_opts: EvmOpts,
/// Address labels from config
pub labels: AddressHashMap<String>,
/// Artifacts which are guaranteed to be fresh (either recompiled or cached).
/// If Some, `vm.getDeployedCode` invocations are validated to be in scope of this list.
/// If None, no validation is performed.
pub available_artifacts: Option<ContractsByArtifact>,
/// Currently running artifact.
pub running_artifact: Option<ArtifactId>,
/// Whether to enable legacy (non-reverting) assertions.
pub assertions_revert: bool,
/// Optional seed for the RNG algorithm.
pub seed: Option<U256>,
/// Whether to allow `expectRevert` to work for internal calls.
pub internal_expect_revert: bool,
/// Mapping of chain aliases to chain data
pub chains: HashMap<String, ChainData>,
/// Mapping of chain IDs to their aliases
pub chain_id_to_alias: HashMap<u64, String>,
}
/// Chain data for getChain cheatcodes
#[derive(Clone, Debug)]
pub struct ChainData {
pub name: String,
pub chain_id: u64,
pub default_rpc_url: String, // Store default RPC URL
}
impl CheatsConfig {
/// Extracts the necessary settings from the Config
pub fn new(
config: &Config,
evm_opts: EvmOpts,
available_artifacts: Option<ContractsByArtifact>,
running_artifact: Option<ArtifactId>,
) -> Self {
let mut allowed_paths = vec![config.root.clone()];
allowed_paths.extend(config.libs.iter().cloned());
allowed_paths.extend(config.allow_paths.iter().cloned());
let rpc_endpoints = config.rpc_endpoints.clone().resolved();
trace!(?rpc_endpoints, "using resolved rpc endpoints");
// If user explicitly disabled safety checks, do not set available_artifacts
let available_artifacts =
if config.unchecked_cheatcode_artifacts { None } else { available_artifacts };
Self {
ffi: evm_opts.ffi,
always_use_create_2_factory: evm_opts.always_use_create_2_factory,
prompt_timeout: Duration::from_secs(config.prompt_timeout),
rpc_storage_caching: config.rpc_storage_caching.clone(),
no_storage_caching: config.no_storage_caching,
rpc_endpoints,
paths: config.project_paths(),
bind_json_path: config.bind_json.out.clone(),
fs_permissions: config.fs_permissions.clone().joined(config.root.as_ref()),
root: config.root.clone(),
broadcast: config.root.clone().join(&config.broadcast),
allowed_paths,
evm_opts,
labels: config.labels.clone(),
available_artifacts,
running_artifact,
assertions_revert: config.assertions_revert,
seed: config.fuzz.seed,
internal_expect_revert: config.allow_internal_expect_revert,
chains: HashMap::new(),
chain_id_to_alias: HashMap::new(),
}
}
/// Returns a new `CheatsConfig` configured with the given `Config` and `EvmOpts`.
pub fn clone_with(&self, config: &Config, evm_opts: EvmOpts) -> Self {
Self::new(config, evm_opts, self.available_artifacts.clone(), self.running_artifact.clone())
}
/// Attempts to canonicalize (see [std::fs::canonicalize]) the path.
///
/// Canonicalization fails for non-existing paths, in which case we just normalize the path.
pub fn normalized_path(&self, path: impl AsRef<Path>) -> PathBuf {
let path = self.root.join(path);
canonicalize(&path).unwrap_or_else(|_| normalize_path(&path))
}
/// Returns true if the given path is allowed, if any path `allowed_paths` is an ancestor of the
/// path
///
/// We only allow paths that are inside allowed paths. To prevent path traversal
/// ("../../etc/passwd") we canonicalize/normalize the path first. We always join with the
/// configured root directory.
pub fn is_path_allowed(&self, path: impl AsRef<Path>, kind: FsAccessKind) -> bool {
self.is_normalized_path_allowed(&self.normalized_path(path), kind)
}
fn is_normalized_path_allowed(&self, path: &Path, kind: FsAccessKind) -> bool {
self.fs_permissions.is_path_allowed(path, kind)
}
/// Returns an error if no access is granted to access `path`, See also [Self::is_path_allowed]
///
/// Returns the normalized version of `path`, see [`CheatsConfig::normalized_path`]
pub fn ensure_path_allowed(
&self,
path: impl AsRef<Path>,
kind: FsAccessKind,
) -> Result<PathBuf> {
let path = path.as_ref();
let normalized = self.normalized_path(path);
ensure!(
self.is_normalized_path_allowed(&normalized, kind),
"the path {} is not allowed to be accessed for {kind} operations",
normalized.strip_prefix(&self.root).unwrap_or(path).display()
);
Ok(normalized)
}
/// Returns true if the given `path` is the project's foundry.toml file
///
/// Note: this should be called with normalized path
pub fn is_foundry_toml(&self, path: impl AsRef<Path>) -> bool {
// path methods that do not access the filesystem are such as [`Path::starts_with`], are
// case-sensitive no matter the platform or filesystem. to make this case-sensitive
// we convert the underlying `OssStr` to lowercase checking that `path` and
// `foundry.toml` are the same file by comparing the FD, because it may not exist
let foundry_toml = self.root.join(Config::FILE_NAME);
Path::new(&foundry_toml.to_string_lossy().to_lowercase())
.starts_with(Path::new(&path.as_ref().to_string_lossy().to_lowercase()))
}
/// Same as [`Self::is_foundry_toml`] but returns an `Err` if [`Self::is_foundry_toml`] returns
/// true
pub fn ensure_not_foundry_toml(&self, path: impl AsRef<Path>) -> Result<()> {
ensure!(!self.is_foundry_toml(path), "access to `foundry.toml` is not allowed");
Ok(())
}
/// Returns the RPC to use
///
/// If `url_or_alias` is a known alias in the `ResolvedRpcEndpoints` then it returns the
/// corresponding URL of that alias. otherwise this assumes `url_or_alias` is itself a URL
/// if it starts with a `http` or `ws` scheme.
///
/// If the url is a path to an existing file, it is also considered a valid RPC URL, IPC path.
///
/// # Errors
///
/// - Returns an error if `url_or_alias` is a known alias but references an unresolved env var.
/// - Returns an error if `url_or_alias` is not an alias but does not start with a `http` or
/// `ws` `scheme` and is not a path to an existing file
pub fn rpc_endpoint(&self, url_or_alias: &str) -> Result<ResolvedRpcEndpoint> {
if let Some(endpoint) = self.rpc_endpoints.get(url_or_alias) {
Ok(endpoint.clone().try_resolve())
} else {
// check if it's a URL or a path to an existing file to an ipc socket
if url_or_alias.starts_with("http") ||
url_or_alias.starts_with("ws") ||
// check for existing ipc file
Path::new(url_or_alias).exists()
{
let url = RpcEndpointUrl::Env(url_or_alias.to_string());
Ok(RpcEndpoint::new(url).resolve())
} else {
Err(fmt_err!("invalid rpc url: {url_or_alias}"))
}
}
}
/// Returns all the RPC urls and their alias.
pub fn rpc_urls(&self) -> Result<Vec<Rpc>> {
let mut urls = Vec::with_capacity(self.rpc_endpoints.len());
for alias in self.rpc_endpoints.keys() {
let url = self.rpc_endpoint(alias)?.url()?;
urls.push(Rpc { key: alias.clone(), url });
}
Ok(urls)
}
}
impl Default for CheatsConfig {
fn default() -> Self {
Self {
ffi: false,
always_use_create_2_factory: false,
prompt_timeout: Duration::from_secs(120),
rpc_storage_caching: Default::default(),
no_storage_caching: false,
rpc_endpoints: Default::default(),
paths: ProjectPathsConfig::builder().build_with_root("./"),
fs_permissions: Default::default(),
root: Default::default(),
bind_json_path: PathBuf::default().join("utils").join("jsonBindings.sol"),
broadcast: Default::default(),
allowed_paths: vec![],
evm_opts: Default::default(),
labels: Default::default(),
available_artifacts: Default::default(),
running_artifact: Default::default(),
assertions_revert: true,
seed: None,
internal_expect_revert: false,
chains: HashMap::new(),
chain_id_to_alias: HashMap::new(),
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use foundry_config::fs_permissions::PathPermission;
fn config(root: &str, fs_permissions: FsPermissions) -> CheatsConfig {
CheatsConfig::new(
&Config { root: root.into(), fs_permissions, ..Default::default() },
Default::default(),
None,
None,
)
}
#[test]
fn test_allowed_paths() {
let root = "/my/project/root/";
let config = config(root, FsPermissions::new(vec![PathPermission::read_write("./")]));
assert!(config.ensure_path_allowed("./t.txt", FsAccessKind::Read).is_ok());
assert!(config.ensure_path_allowed("./t.txt", FsAccessKind::Write).is_ok());
assert!(config.ensure_path_allowed("../root/t.txt", FsAccessKind::Read).is_ok());
assert!(config.ensure_path_allowed("../root/t.txt", FsAccessKind::Write).is_ok());
assert!(config.ensure_path_allowed("../../root/t.txt", FsAccessKind::Read).is_err());
assert!(config.ensure_path_allowed("../../root/t.txt", FsAccessKind::Write).is_err());
}
#[test]
fn test_is_foundry_toml() {
let root = "/my/project/root/";
let config = config(root, FsPermissions::new(vec![PathPermission::read_write("./")]));
let f = format!("{root}foundry.toml");
assert!(config.is_foundry_toml(f));
let f = format!("{root}Foundry.toml");
assert!(config.is_foundry_toml(f));
let f = format!("{root}lib/other/foundry.toml");
assert!(!config.is_foundry_toml(f));
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/cheatcodes/src/test.rs | crates/cheatcodes/src/test.rs | //! Implementations of [`Testing`](spec::Group::Testing) cheatcodes.
use crate::{Cheatcode, Cheatcodes, CheatsCtxt, Result, Vm::*};
use alloy_chains::Chain as AlloyChain;
use alloy_primitives::{Address, U256};
use alloy_sol_types::SolValue;
use foundry_common::version::SEMVER_VERSION;
use foundry_evm_core::constants::MAGIC_SKIP;
use std::str::FromStr;
pub(crate) mod assert;
pub(crate) mod assume;
pub(crate) mod expect;
pub(crate) mod revert_handlers;
impl Cheatcode for breakpoint_0Call {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { char } = self;
breakpoint(ccx.state, &ccx.caller, char, true)
}
}
impl Cheatcode for breakpoint_1Call {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { char, value } = self;
breakpoint(ccx.state, &ccx.caller, char, *value)
}
}
impl Cheatcode for getFoundryVersionCall {
fn apply(&self, _state: &mut Cheatcodes) -> Result {
let Self {} = self;
Ok(SEMVER_VERSION.abi_encode())
}
}
impl Cheatcode for rpcUrlCall {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let Self { rpcAlias } = self;
let url = state.config.rpc_endpoint(rpcAlias)?.url()?.abi_encode();
Ok(url)
}
}
impl Cheatcode for rpcUrlsCall {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let Self {} = self;
state.config.rpc_urls().map(|urls| urls.abi_encode())
}
}
impl Cheatcode for rpcUrlStructsCall {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let Self {} = self;
state.config.rpc_urls().map(|urls| urls.abi_encode())
}
}
impl Cheatcode for sleepCall {
fn apply(&self, _state: &mut Cheatcodes) -> Result {
let Self { duration } = self;
let sleep_duration = std::time::Duration::from_millis(duration.saturating_to());
std::thread::sleep(sleep_duration);
Ok(Default::default())
}
}
impl Cheatcode for skip_0Call {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { skipTest } = *self;
skip_1Call { skipTest, reason: String::new() }.apply_stateful(ccx)
}
}
impl Cheatcode for skip_1Call {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { skipTest, reason } = self;
if *skipTest {
// Skip should not work if called deeper than at test level.
// Since we're not returning the magic skip bytes, this will cause a test failure.
ensure!(ccx.ecx.journaled_state.depth <= 1, "`skip` can only be used at test level");
Err([MAGIC_SKIP, reason.as_bytes()].concat().into())
} else {
Ok(Default::default())
}
}
}
impl Cheatcode for getChain_0Call {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let Self { chainAlias } = self;
get_chain(state, chainAlias)
}
}
impl Cheatcode for getChain_1Call {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let Self { chainId } = self;
// Convert the chainId to a string and use the existing get_chain function
let chain_id_str = chainId.to_string();
get_chain(state, &chain_id_str)
}
}
/// Adds or removes the given breakpoint to the state.
fn breakpoint(state: &mut Cheatcodes, caller: &Address, s: &str, add: bool) -> Result {
let mut chars = s.chars();
let (Some(point), None) = (chars.next(), chars.next()) else {
bail!("breakpoints must be exactly one character");
};
ensure!(point.is_alphabetic(), "only alphabetic characters are accepted as breakpoints");
if add {
state.breakpoints.insert(point, (*caller, state.pc));
} else {
state.breakpoints.remove(&point);
}
Ok(Default::default())
}
/// Gets chain information for the given alias.
fn get_chain(state: &mut Cheatcodes, chain_alias: &str) -> Result {
// Parse the chain alias - works for both chain names and IDs
let alloy_chain = AlloyChain::from_str(chain_alias)
.map_err(|_| fmt_err!("invalid chain alias: {chain_alias}"))?;
let chain_name = alloy_chain.to_string();
let chain_id = alloy_chain.id();
// Check if this is an unknown chain ID by comparing the name to the chain ID
// When a numeric ID is passed for an unknown chain, alloy_chain.to_string() will return the ID
// So if they match, it's likely an unknown chain ID
if chain_name == chain_id.to_string() {
return Err(fmt_err!("invalid chain alias: {chain_alias}"));
}
// Try to retrieve RPC URL and chain alias from user's config in foundry.toml.
let (rpc_url, chain_alias) = if let Some(rpc_url) =
state.config.rpc_endpoint(&chain_name).ok().and_then(|e| e.url().ok())
{
(rpc_url, chain_name.clone())
} else {
(String::new(), chain_alias.to_string())
};
let chain_struct = Chain {
name: chain_name,
chainId: U256::from(chain_id),
chainAlias: chain_alias,
rpcUrl: rpc_url,
};
Ok(chain_struct.abi_encode())
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/cheatcodes/src/evm.rs | crates/cheatcodes/src/evm.rs | //! Implementations of [`Evm`](spec::Group::Evm) cheatcodes.
use crate::{
BroadcastableTransaction, Cheatcode, Cheatcodes, CheatcodesExecutor, CheatsCtxt, Error, Result,
Vm::*,
inspector::{Ecx, RecordDebugStepInfo},
};
use alloy_consensus::TxEnvelope;
use alloy_genesis::{Genesis, GenesisAccount};
use alloy_network::eip2718::EIP4844_TX_TYPE_ID;
use alloy_primitives::{
Address, B256, U256, hex, keccak256,
map::{B256Map, HashMap},
};
use alloy_rlp::Decodable;
use alloy_sol_types::SolValue;
use foundry_common::{
fs::{read_json_file, write_json_file},
slot_identifier::{
ENCODING_BYTES, ENCODING_DYN_ARRAY, ENCODING_INPLACE, ENCODING_MAPPING, SlotIdentifier,
SlotInfo,
},
};
use foundry_compilers::artifacts::EvmVersion;
use foundry_evm_core::{
ContextExt,
backend::{DatabaseExt, RevertStateSnapshotAction},
constants::{CALLER, CHEATCODE_ADDRESS, HARDHAT_CONSOLE_ADDRESS, TEST_CONTRACT_ADDRESS},
utils::get_blob_base_fee_update_fraction_by_spec_id,
};
use foundry_evm_traces::TraceMode;
use itertools::Itertools;
use rand::Rng;
use revm::{
bytecode::Bytecode,
context::{Block, JournalTr},
primitives::{KECCAK_EMPTY, hardfork::SpecId},
state::Account,
};
use std::{
collections::{BTreeMap, HashSet, btree_map::Entry},
fmt::Display,
path::Path,
str::FromStr,
};
mod record_debug_step;
use foundry_common::fmt::format_token_raw;
use foundry_config::evm_spec_id;
use record_debug_step::{convert_call_trace_ctx_to_debug_step, flatten_call_trace};
use serde::Serialize;
mod fork;
pub(crate) mod mapping;
pub(crate) mod mock;
pub(crate) mod prank;
/// Records storage slots reads and writes.
#[derive(Clone, Debug, Default)]
pub struct RecordAccess {
/// Storage slots reads.
pub reads: HashMap<Address, Vec<U256>>,
/// Storage slots writes.
pub writes: HashMap<Address, Vec<U256>>,
}
impl RecordAccess {
/// Records a read access to a storage slot.
pub fn record_read(&mut self, target: Address, slot: U256) {
self.reads.entry(target).or_default().push(slot);
}
/// Records a write access to a storage slot.
///
/// This also records a read internally as `SSTORE` does an implicit `SLOAD`.
pub fn record_write(&mut self, target: Address, slot: U256) {
self.record_read(target, slot);
self.writes.entry(target).or_default().push(slot);
}
/// Clears the recorded reads and writes.
pub fn clear(&mut self) {
// Also frees memory.
*self = Default::default();
}
}
/// Records the `snapshotGas*` cheatcodes.
#[derive(Clone, Debug)]
pub struct GasRecord {
/// The group name of the gas snapshot.
pub group: String,
/// The name of the gas snapshot.
pub name: String,
/// The total gas used in the gas snapshot.
pub gas_used: u64,
/// Depth at which the gas snapshot was taken.
pub depth: usize,
}
/// Records `deal` cheatcodes
#[derive(Clone, Debug)]
pub struct DealRecord {
/// Target of the deal.
pub address: Address,
/// The balance of the address before deal was applied
pub old_balance: U256,
/// Balance after deal was applied
pub new_balance: U256,
}
/// Storage slot diff info.
#[derive(Serialize, Default)]
#[serde(rename_all = "camelCase")]
struct SlotStateDiff {
/// Initial storage value.
previous_value: B256,
/// Current storage value.
new_value: B256,
/// Storage layout metadata (variable name, type, offset).
/// Only present when contract has storage layout output.
/// This includes decoded values when available.
#[serde(skip_serializing_if = "Option::is_none", flatten)]
slot_info: Option<SlotInfo>,
}
/// Balance diff info.
#[derive(Serialize, Default)]
#[serde(rename_all = "camelCase")]
struct BalanceDiff {
/// Initial storage value.
previous_value: U256,
/// Current storage value.
new_value: U256,
}
/// Nonce diff info.
#[derive(Serialize, Default)]
#[serde(rename_all = "camelCase")]
struct NonceDiff {
/// Initial nonce value.
previous_value: u64,
/// Current nonce value.
new_value: u64,
}
/// Account state diff info.
#[derive(Serialize, Default)]
#[serde(rename_all = "camelCase")]
struct AccountStateDiffs {
/// Address label, if any set.
label: Option<String>,
/// Contract identifier from artifact. e.g "src/Counter.sol:Counter"
contract: Option<String>,
/// Account balance changes.
balance_diff: Option<BalanceDiff>,
/// Account nonce changes.
nonce_diff: Option<NonceDiff>,
/// State changes, per slot.
state_diff: BTreeMap<B256, SlotStateDiff>,
}
impl Display for AccountStateDiffs {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> eyre::Result<(), std::fmt::Error> {
// Print changed account.
if let Some(label) = &self.label {
writeln!(f, "label: {label}")?;
}
if let Some(contract) = &self.contract {
writeln!(f, "contract: {contract}")?;
}
// Print balance diff if changed.
if let Some(balance_diff) = &self.balance_diff
&& balance_diff.previous_value != balance_diff.new_value
{
writeln!(
f,
"- balance diff: {} → {}",
balance_diff.previous_value, balance_diff.new_value
)?;
}
// Print nonce diff if changed.
if let Some(nonce_diff) = &self.nonce_diff
&& nonce_diff.previous_value != nonce_diff.new_value
{
writeln!(f, "- nonce diff: {} → {}", nonce_diff.previous_value, nonce_diff.new_value)?;
}
// Print state diff if any.
if !&self.state_diff.is_empty() {
writeln!(f, "- state diff:")?;
for (slot, slot_changes) in &self.state_diff {
match &slot_changes.slot_info {
Some(slot_info) => {
if let Some(decoded) = &slot_info.decoded {
// Have slot info with decoded values - show decoded values
writeln!(
f,
"@ {slot} ({}, {}): {} → {}",
slot_info.label,
slot_info.slot_type.dyn_sol_type,
format_token_raw(&decoded.previous_value),
format_token_raw(&decoded.new_value)
)?;
} else {
// Have slot info but no decoded values - show raw hex values
writeln!(
f,
"@ {slot} ({}, {}): {} → {}",
slot_info.label,
slot_info.slot_type.dyn_sol_type,
slot_changes.previous_value,
slot_changes.new_value
)?;
}
}
None => {
// No slot info - show raw hex values
writeln!(
f,
"@ {slot}: {} → {}",
slot_changes.previous_value, slot_changes.new_value
)?;
}
}
}
}
Ok(())
}
}
impl Cheatcode for addrCall {
fn apply(&self, _state: &mut Cheatcodes) -> Result {
let Self { privateKey } = self;
let wallet = super::crypto::parse_wallet(privateKey)?;
Ok(wallet.address().abi_encode())
}
}
impl Cheatcode for getNonce_0Call {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { account } = self;
get_nonce(ccx, account)
}
}
impl Cheatcode for getNonce_1Call {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { wallet } = self;
get_nonce(ccx, &wallet.addr)
}
}
impl Cheatcode for loadCall {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { target, slot } = *self;
ccx.ensure_not_precompile(&target)?;
let (db, journal, _) = ccx.ecx.as_db_env_and_journal();
journal.load_account(db, target)?;
let mut val = journal
.sload(db, target, slot.into(), false)
.map_err(|e| fmt_err!("failed to load storage slot: {:?}", e))?;
if val.is_cold && val.data.is_zero() {
if ccx.state.has_arbitrary_storage(&target) {
// If storage slot is untouched and load from a target with arbitrary storage,
// then set random value for current slot.
let rand_value = ccx.state.rng().random();
ccx.state.arbitrary_storage.as_mut().unwrap().save(
ccx.ecx,
target,
slot.into(),
rand_value,
);
val.data = rand_value;
} else if ccx.state.is_arbitrary_storage_copy(&target) {
// If storage slot is untouched and load from a target that copies storage from
// a source address with arbitrary storage, then copy existing arbitrary value.
// If no arbitrary value generated yet, then the random one is saved and set.
let rand_value = ccx.state.rng().random();
val.data = ccx.state.arbitrary_storage.as_mut().unwrap().copy(
ccx.ecx,
target,
slot.into(),
rand_value,
);
}
}
Ok(val.abi_encode())
}
}
impl Cheatcode for loadAllocsCall {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { pathToAllocsJson } = self;
let path = Path::new(pathToAllocsJson);
ensure!(path.exists(), "allocs file does not exist: {pathToAllocsJson}");
// Let's first assume we're reading a file with only the allocs.
let allocs: BTreeMap<Address, GenesisAccount> = match read_json_file(path) {
Ok(allocs) => allocs,
Err(_) => {
// Let's try and read from a genesis file, and extract allocs.
let genesis = read_json_file::<Genesis>(path)?;
genesis.alloc
}
};
// Then, load the allocs into the database.
let (db, journal, _) = ccx.ecx.as_db_env_and_journal();
db.load_allocs(&allocs, journal)
.map(|()| Vec::default())
.map_err(|e| fmt_err!("failed to load allocs: {e}"))
}
}
impl Cheatcode for cloneAccountCall {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { source, target } = self;
let (db, journal, _) = ccx.ecx.as_db_env_and_journal();
let account = journal.load_account(db, *source)?;
let genesis = &genesis_account(account.data);
db.clone_account(genesis, target, journal)?;
// Cloned account should persist in forked envs.
ccx.ecx.journaled_state.database.add_persistent_account(*target);
Ok(Default::default())
}
}
impl Cheatcode for dumpStateCall {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { pathToStateJson } = self;
let path = Path::new(pathToStateJson);
// Do not include system account or empty accounts in the dump.
let skip = |key: &Address, val: &Account| {
key == &CHEATCODE_ADDRESS
|| key == &CALLER
|| key == &HARDHAT_CONSOLE_ADDRESS
|| key == &TEST_CONTRACT_ADDRESS
|| key == &ccx.caller
|| key == &ccx.state.config.evm_opts.sender
|| val.is_empty()
};
let alloc = ccx
.ecx
.journaled_state
.state()
.iter_mut()
.filter(|(key, val)| !skip(key, val))
.map(|(key, val)| (key, genesis_account(val)))
.collect::<BTreeMap<_, _>>();
write_json_file(path, &alloc)?;
Ok(Default::default())
}
}
impl Cheatcode for recordCall {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let Self {} = self;
state.recording_accesses = true;
state.accesses.clear();
Ok(Default::default())
}
}
impl Cheatcode for stopRecordCall {
fn apply(&self, state: &mut Cheatcodes) -> Result {
state.recording_accesses = false;
Ok(Default::default())
}
}
impl Cheatcode for accessesCall {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let Self { target } = *self;
let result = (
state.accesses.reads.entry(target).or_default().as_slice(),
state.accesses.writes.entry(target).or_default().as_slice(),
);
Ok(result.abi_encode_params())
}
}
impl Cheatcode for recordLogsCall {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let Self {} = self;
state.recorded_logs = Some(Default::default());
Ok(Default::default())
}
}
impl Cheatcode for getRecordedLogsCall {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let Self {} = self;
Ok(state.recorded_logs.replace(Default::default()).unwrap_or_default().abi_encode())
}
}
impl Cheatcode for pauseGasMeteringCall {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let Self {} = self;
state.gas_metering.paused = true;
Ok(Default::default())
}
}
impl Cheatcode for resumeGasMeteringCall {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let Self {} = self;
state.gas_metering.resume();
Ok(Default::default())
}
}
impl Cheatcode for resetGasMeteringCall {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let Self {} = self;
state.gas_metering.reset();
Ok(Default::default())
}
}
impl Cheatcode for lastCallGasCall {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let Self {} = self;
let Some(last_call_gas) = &state.gas_metering.last_call_gas else {
bail!("no external call was made yet");
};
Ok(last_call_gas.abi_encode())
}
}
impl Cheatcode for getChainIdCall {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self {} = self;
Ok(U256::from(ccx.ecx.cfg.chain_id).abi_encode())
}
}
impl Cheatcode for chainIdCall {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { newChainId } = self;
ensure!(*newChainId <= U256::from(u64::MAX), "chain ID must be less than 2^64");
ccx.ecx.cfg.chain_id = newChainId.to();
Ok(Default::default())
}
}
impl Cheatcode for coinbaseCall {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { newCoinbase } = self;
ccx.ecx.block.beneficiary = *newCoinbase;
Ok(Default::default())
}
}
impl Cheatcode for difficultyCall {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { newDifficulty } = self;
ensure!(
ccx.ecx.cfg.spec < SpecId::MERGE,
"`difficulty` is not supported after the Paris hard fork, use `prevrandao` instead; \
see EIP-4399: https://eips.ethereum.org/EIPS/eip-4399"
);
ccx.ecx.block.difficulty = *newDifficulty;
Ok(Default::default())
}
}
impl Cheatcode for feeCall {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { newBasefee } = self;
ensure!(*newBasefee <= U256::from(u64::MAX), "base fee must be less than 2^64");
ccx.ecx.block.basefee = newBasefee.saturating_to();
Ok(Default::default())
}
}
impl Cheatcode for prevrandao_0Call {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { newPrevrandao } = self;
ensure!(
ccx.ecx.cfg.spec >= SpecId::MERGE,
"`prevrandao` is not supported before the Paris hard fork, use `difficulty` instead; \
see EIP-4399: https://eips.ethereum.org/EIPS/eip-4399"
);
ccx.ecx.block.prevrandao = Some(*newPrevrandao);
Ok(Default::default())
}
}
impl Cheatcode for prevrandao_1Call {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { newPrevrandao } = self;
ensure!(
ccx.ecx.cfg.spec >= SpecId::MERGE,
"`prevrandao` is not supported before the Paris hard fork, use `difficulty` instead; \
see EIP-4399: https://eips.ethereum.org/EIPS/eip-4399"
);
ccx.ecx.block.prevrandao = Some((*newPrevrandao).into());
Ok(Default::default())
}
}
impl Cheatcode for blobhashesCall {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { hashes } = self;
ensure!(
ccx.ecx.cfg.spec >= SpecId::CANCUN,
"`blobhashes` is not supported before the Cancun hard fork; \
see EIP-4844: https://eips.ethereum.org/EIPS/eip-4844"
);
ccx.ecx.tx.blob_hashes.clone_from(hashes);
// force this as 4844 txtype
ccx.ecx.tx.tx_type = EIP4844_TX_TYPE_ID;
Ok(Default::default())
}
}
impl Cheatcode for getBlobhashesCall {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self {} = self;
ensure!(
ccx.ecx.cfg.spec >= SpecId::CANCUN,
"`getBlobhashes` is not supported before the Cancun hard fork; \
see EIP-4844: https://eips.ethereum.org/EIPS/eip-4844"
);
Ok(ccx.ecx.tx.blob_hashes.clone().abi_encode())
}
}
impl Cheatcode for rollCall {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { newHeight } = self;
ccx.ecx.block.number = *newHeight;
Ok(Default::default())
}
}
impl Cheatcode for getBlockNumberCall {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self {} = self;
Ok(ccx.ecx.block.number.abi_encode())
}
}
impl Cheatcode for txGasPriceCall {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { newGasPrice } = self;
ensure!(*newGasPrice <= U256::from(u64::MAX), "gas price must be less than 2^64");
ccx.ecx.tx.gas_price = newGasPrice.saturating_to();
Ok(Default::default())
}
}
impl Cheatcode for warpCall {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { newTimestamp } = self;
ccx.ecx.block.timestamp = *newTimestamp;
Ok(Default::default())
}
}
impl Cheatcode for getBlockTimestampCall {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self {} = self;
Ok(ccx.ecx.block.timestamp.abi_encode())
}
}
impl Cheatcode for blobBaseFeeCall {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { newBlobBaseFee } = self;
ensure!(
ccx.ecx.cfg.spec >= SpecId::CANCUN,
"`blobBaseFee` is not supported before the Cancun hard fork; \
see EIP-4844: https://eips.ethereum.org/EIPS/eip-4844"
);
ccx.ecx.block.set_blob_excess_gas_and_price(
(*newBlobBaseFee).to(),
get_blob_base_fee_update_fraction_by_spec_id(ccx.ecx.cfg.spec),
);
Ok(Default::default())
}
}
impl Cheatcode for getBlobBaseFeeCall {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self {} = self;
Ok(ccx.ecx.block.blob_excess_gas().unwrap_or(0).abi_encode())
}
}
impl Cheatcode for dealCall {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { account: address, newBalance: new_balance } = *self;
let account = journaled_account(ccx.ecx, address)?;
let old_balance = std::mem::replace(&mut account.info.balance, new_balance);
let record = DealRecord { address, old_balance, new_balance };
ccx.state.eth_deals.push(record);
Ok(Default::default())
}
}
impl Cheatcode for etchCall {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { target, newRuntimeBytecode } = self;
ccx.ensure_not_precompile(target)?;
let (db, journal, _) = ccx.ecx.as_db_env_and_journal();
journal.load_account(db, *target)?;
let bytecode = Bytecode::new_raw_checked(newRuntimeBytecode.clone())
.map_err(|e| fmt_err!("failed to create bytecode: {e}"))?;
journal.set_code(*target, bytecode);
Ok(Default::default())
}
}
impl Cheatcode for resetNonceCall {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { account } = self;
let account = journaled_account(ccx.ecx, *account)?;
// Per EIP-161, EOA nonces start at 0, but contract nonces
// start at 1. Comparing by code_hash instead of code
// to avoid hitting the case where account's code is None.
let empty = account.info.code_hash == KECCAK_EMPTY;
let nonce = if empty { 0 } else { 1 };
account.info.nonce = nonce;
debug!(target: "cheatcodes", nonce, "reset");
Ok(Default::default())
}
}
impl Cheatcode for setNonceCall {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { account, newNonce } = *self;
let account = journaled_account(ccx.ecx, account)?;
// nonce must increment only
let current = account.info.nonce;
ensure!(
newNonce >= current,
"new nonce ({newNonce}) must be strictly equal to or higher than the \
account's current nonce ({current})"
);
account.info.nonce = newNonce;
Ok(Default::default())
}
}
impl Cheatcode for setNonceUnsafeCall {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { account, newNonce } = *self;
let account = journaled_account(ccx.ecx, account)?;
account.info.nonce = newNonce;
Ok(Default::default())
}
}
impl Cheatcode for storeCall {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { target, slot, value } = *self;
ccx.ensure_not_precompile(&target)?;
ensure_loaded_account(ccx.ecx, target)?;
let (db, journal, _) = ccx.ecx.as_db_env_and_journal();
journal
.sstore(db, target, slot.into(), value.into(), false)
.map_err(|e| fmt_err!("failed to store storage slot: {:?}", e))?;
Ok(Default::default())
}
}
impl Cheatcode for coolCall {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { target } = self;
if let Some(account) = ccx.ecx.journaled_state.state.get_mut(target) {
account.unmark_touch();
account.storage.values_mut().for_each(|slot| slot.mark_cold());
}
Ok(Default::default())
}
}
impl Cheatcode for accessListCall {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let Self { access } = self;
let access_list = access
.iter()
.map(|item| {
let keys = item.storageKeys.iter().map(|key| B256::from(*key)).collect_vec();
alloy_rpc_types::AccessListItem { address: item.target, storage_keys: keys }
})
.collect_vec();
state.access_list = Some(alloy_rpc_types::AccessList::from(access_list));
Ok(Default::default())
}
}
impl Cheatcode for noAccessListCall {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let Self {} = self;
// Set to empty option in order to override previous applied access list.
if state.access_list.is_some() {
state.access_list = Some(alloy_rpc_types::AccessList::default());
}
Ok(Default::default())
}
}
impl Cheatcode for warmSlotCall {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { target, slot } = *self;
set_cold_slot(ccx, target, slot.into(), false);
Ok(Default::default())
}
}
impl Cheatcode for coolSlotCall {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { target, slot } = *self;
set_cold_slot(ccx, target, slot.into(), true);
Ok(Default::default())
}
}
impl Cheatcode for readCallersCall {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self {} = self;
read_callers(ccx.state, &ccx.ecx.tx.caller, ccx.ecx.journaled_state.depth())
}
}
impl Cheatcode for snapshotValue_0Call {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { name, value } = self;
inner_value_snapshot(ccx, None, Some(name.clone()), value.to_string())
}
}
impl Cheatcode for snapshotValue_1Call {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { group, name, value } = self;
inner_value_snapshot(ccx, Some(group.clone()), Some(name.clone()), value.to_string())
}
}
impl Cheatcode for snapshotGasLastCall_0Call {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { name } = self;
let Some(last_call_gas) = &ccx.state.gas_metering.last_call_gas else {
bail!("no external call was made yet");
};
inner_last_gas_snapshot(ccx, None, Some(name.clone()), last_call_gas.gasTotalUsed)
}
}
impl Cheatcode for snapshotGasLastCall_1Call {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { name, group } = self;
let Some(last_call_gas) = &ccx.state.gas_metering.last_call_gas else {
bail!("no external call was made yet");
};
inner_last_gas_snapshot(
ccx,
Some(group.clone()),
Some(name.clone()),
last_call_gas.gasTotalUsed,
)
}
}
impl Cheatcode for startSnapshotGas_0Call {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { name } = self;
inner_start_gas_snapshot(ccx, None, Some(name.clone()))
}
}
impl Cheatcode for startSnapshotGas_1Call {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { group, name } = self;
inner_start_gas_snapshot(ccx, Some(group.clone()), Some(name.clone()))
}
}
impl Cheatcode for stopSnapshotGas_0Call {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self {} = self;
inner_stop_gas_snapshot(ccx, None, None)
}
}
impl Cheatcode for stopSnapshotGas_1Call {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { name } = self;
inner_stop_gas_snapshot(ccx, None, Some(name.clone()))
}
}
impl Cheatcode for stopSnapshotGas_2Call {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { group, name } = self;
inner_stop_gas_snapshot(ccx, Some(group.clone()), Some(name.clone()))
}
}
// Deprecated in favor of `snapshotStateCall`
impl Cheatcode for snapshotCall {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self {} = self;
inner_snapshot_state(ccx)
}
}
impl Cheatcode for snapshotStateCall {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self {} = self;
inner_snapshot_state(ccx)
}
}
// Deprecated in favor of `revertToStateCall`
impl Cheatcode for revertToCall {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { snapshotId } = self;
inner_revert_to_state(ccx, *snapshotId)
}
}
impl Cheatcode for revertToStateCall {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { snapshotId } = self;
inner_revert_to_state(ccx, *snapshotId)
}
}
// Deprecated in favor of `revertToStateAndDeleteCall`
impl Cheatcode for revertToAndDeleteCall {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { snapshotId } = self;
inner_revert_to_state_and_delete(ccx, *snapshotId)
}
}
impl Cheatcode for revertToStateAndDeleteCall {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { snapshotId } = self;
inner_revert_to_state_and_delete(ccx, *snapshotId)
}
}
// Deprecated in favor of `deleteStateSnapshotCall`
impl Cheatcode for deleteSnapshotCall {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { snapshotId } = self;
inner_delete_state_snapshot(ccx, *snapshotId)
}
}
impl Cheatcode for deleteStateSnapshotCall {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { snapshotId } = self;
inner_delete_state_snapshot(ccx, *snapshotId)
}
}
// Deprecated in favor of `deleteStateSnapshotsCall`
impl Cheatcode for deleteSnapshotsCall {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self {} = self;
inner_delete_state_snapshots(ccx)
}
}
impl Cheatcode for deleteStateSnapshotsCall {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self {} = self;
inner_delete_state_snapshots(ccx)
}
}
impl Cheatcode for startStateDiffRecordingCall {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let Self {} = self;
state.recorded_account_diffs_stack = Some(Default::default());
// Enable mapping recording to track mapping slot accesses
state.mapping_slots.get_or_insert_default();
Ok(Default::default())
}
}
impl Cheatcode for stopAndReturnStateDiffCall {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let Self {} = self;
get_state_diff(state)
}
}
impl Cheatcode for getStateDiffCall {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let mut diffs = String::new();
let state_diffs = get_recorded_state_diffs(ccx);
for (address, state_diffs) in state_diffs {
diffs.push_str(&format!("{address}\n"));
diffs.push_str(&format!("{state_diffs}\n"));
}
Ok(diffs.abi_encode())
}
}
impl Cheatcode for getStateDiffJsonCall {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let state_diffs = get_recorded_state_diffs(ccx);
Ok(serde_json::to_string(&state_diffs)?.abi_encode())
}
}
impl Cheatcode for getStorageSlotsCall {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { target, variableName } = self;
let storage_layout = get_contract_data(ccx, *target)
.and_then(|(_, data)| data.storage_layout.as_ref().map(|layout| layout.clone()))
.ok_or_else(|| fmt_err!("Storage layout not available for contract at {target}. Try compiling contracts with `--extra-output storageLayout`"))?;
trace!(storage = ?storage_layout.storage, "fetched storage");
let storage = storage_layout
.storage
.iter()
.find(|s| s.label.to_lowercase() == *variableName.to_lowercase())
.ok_or_else(|| fmt_err!("variable '{variableName}' not found in storage layout"))?;
let storage_type = storage_layout
.types
.get(&storage.storage_type)
.ok_or_else(|| fmt_err!("storage type not found for variable {variableName}"))?;
if storage_type.encoding == ENCODING_MAPPING || storage_type.encoding == ENCODING_DYN_ARRAY
{
return Err(fmt_err!(
"cannot get storage slots for variables with mapping or dynamic array types"
));
}
let slot = U256::from_str(&storage.slot).map_err(|_| {
fmt_err!("invalid slot {} format for variable {variableName}", storage.slot)
})?;
let mut slots = Vec::new();
// Always push the base slot
slots.push(slot);
if storage_type.encoding == ENCODING_INPLACE {
// For inplace encoding, calculate the number of slots needed
let num_bytes = U256::from_str(&storage_type.number_of_bytes).map_err(|_| {
fmt_err!(
"invalid number_of_bytes {} for variable {variableName}",
storage_type.number_of_bytes
)
})?;
let num_slots = num_bytes.div_ceil(U256::from(32));
// Start from 1 since base slot is already added
for i in 1..num_slots.to::<usize>() {
slots.push(slot + U256::from(i));
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | true |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/cheatcodes/src/lib.rs | crates/cheatcodes/src/lib.rs | //! # foundry-cheatcodes
//!
//! Foundry cheatcodes implementations.
#![cfg_attr(not(test), warn(unused_crate_dependencies))]
#![cfg_attr(docsrs, feature(doc_cfg))]
#![allow(elided_lifetimes_in_paths)] // Cheats context uses 3 lifetimes
#[macro_use]
extern crate foundry_common;
#[macro_use]
pub extern crate foundry_cheatcodes_spec as spec;
#[macro_use]
extern crate tracing;
use alloy_evm::eth::EthEvmContext;
use alloy_primitives::Address;
use foundry_evm_core::backend::DatabaseExt;
use spec::Status;
pub use Vm::ForgeContext;
pub use config::CheatsConfig;
pub use error::{Error, ErrorKind, Result};
pub use inspector::{
BroadcastableTransaction, BroadcastableTransactions, Cheatcodes, CheatcodesExecutor,
};
pub use spec::{CheatcodeDef, Vm};
#[macro_use]
mod error;
mod base64;
mod config;
mod crypto;
mod version;
mod env;
pub use env::set_execution_context;
mod evm;
mod fs;
mod inspector;
pub use inspector::CheatcodeAnalysis;
mod json;
mod script;
pub use script::{Wallets, WalletsInner};
mod string;
mod test;
pub use test::expect::ExpectedCallTracker;
mod toml;
mod utils;
/// Cheatcode implementation.
pub(crate) trait Cheatcode: CheatcodeDef + DynCheatcode {
/// Applies this cheatcode to the given state.
///
/// Implement this function if you don't need access to the EVM data.
fn apply(&self, state: &mut Cheatcodes) -> Result {
let _ = state;
unimplemented!("{}", Self::CHEATCODE.func.id)
}
/// Applies this cheatcode to the given context.
///
/// Implement this function if you need access to the EVM data.
#[inline(always)]
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
self.apply(ccx.state)
}
/// Applies this cheatcode to the given context and executor.
///
/// Implement this function if you need access to the executor.
#[inline(always)]
fn apply_full(&self, ccx: &mut CheatsCtxt, executor: &mut dyn CheatcodesExecutor) -> Result {
let _ = executor;
self.apply_stateful(ccx)
}
}
pub(crate) trait DynCheatcode: 'static + std::fmt::Debug {
fn cheatcode(&self) -> &'static spec::Cheatcode<'static>;
fn dyn_apply(&self, ccx: &mut CheatsCtxt, executor: &mut dyn CheatcodesExecutor) -> Result;
}
impl<T: Cheatcode> DynCheatcode for T {
fn cheatcode(&self) -> &'static spec::Cheatcode<'static> {
Self::CHEATCODE
}
fn dyn_apply(&self, ccx: &mut CheatsCtxt, executor: &mut dyn CheatcodesExecutor) -> Result {
self.apply_full(ccx, executor)
}
}
impl dyn DynCheatcode {
pub(crate) fn name(&self) -> &'static str {
self.cheatcode().func.signature.split('(').next().unwrap()
}
pub(crate) fn id(&self) -> &'static str {
self.cheatcode().func.id
}
pub(crate) fn signature(&self) -> &'static str {
self.cheatcode().func.signature
}
pub(crate) fn status(&self) -> &Status<'static> {
&self.cheatcode().status
}
}
/// The cheatcode context, used in `Cheatcode`.
pub struct CheatsCtxt<'cheats, 'evm, 'db, 'db2> {
/// The cheatcodes inspector state.
pub(crate) state: &'cheats mut Cheatcodes,
/// The EVM data.
pub(crate) ecx: &'evm mut EthEvmContext<&'db mut (dyn DatabaseExt + 'db2)>,
/// The original `msg.sender`.
pub(crate) caller: Address,
/// Gas limit of the current cheatcode call.
pub(crate) gas_limit: u64,
}
impl<'db, 'db2> std::ops::Deref for CheatsCtxt<'_, '_, 'db, 'db2> {
type Target = EthEvmContext<&'db mut (dyn DatabaseExt + 'db2)>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
self.ecx
}
}
impl std::ops::DerefMut for CheatsCtxt<'_, '_, '_, '_> {
#[inline(always)]
fn deref_mut(&mut self) -> &mut Self::Target {
&mut *self.ecx
}
}
impl CheatsCtxt<'_, '_, '_, '_> {
pub(crate) fn ensure_not_precompile(&self, address: &Address) -> Result<()> {
if self.is_precompile(address) { Err(precompile_error(address)) } else { Ok(()) }
}
pub(crate) fn is_precompile(&self, address: &Address) -> bool {
self.ecx.journaled_state.warm_addresses.precompiles().contains(address)
}
}
#[cold]
fn precompile_error(address: &Address) -> Error {
fmt_err!("cannot use precompile {address} as an argument")
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/cheatcodes/src/version.rs | crates/cheatcodes/src/version.rs | use crate::{Cheatcode, Cheatcodes, Result, Vm::*};
use alloy_sol_types::SolValue;
use foundry_common::version::SEMVER_VERSION;
use semver::Version;
use std::cmp::Ordering;
impl Cheatcode for foundryVersionCmpCall {
fn apply(&self, _state: &mut Cheatcodes) -> Result {
let Self { version } = self;
foundry_version_cmp(version).map(|cmp| (cmp as i8).abi_encode())
}
}
impl Cheatcode for foundryVersionAtLeastCall {
fn apply(&self, _state: &mut Cheatcodes) -> Result {
let Self { version } = self;
foundry_version_cmp(version).map(|cmp| cmp.is_ge().abi_encode())
}
}
fn foundry_version_cmp(version: &str) -> Result<Ordering> {
version_cmp(SEMVER_VERSION.split('-').next().unwrap(), version)
}
fn version_cmp(version_a: &str, version_b: &str) -> Result<Ordering> {
let version_a = parse_version(version_a)?;
let version_b = parse_version(version_b)?;
Ok(version_a.cmp(&version_b))
}
fn parse_version(version: &str) -> Result<Version> {
let version =
Version::parse(version).map_err(|e| fmt_err!("invalid version `{version}`: {e}"))?;
if !version.pre.is_empty() {
return Err(fmt_err!(
"invalid version `{version}`: pre-release versions are not supported"
));
}
if !version.build.is_empty() {
return Err(fmt_err!("invalid version `{version}`: build metadata is not supported"));
}
Ok(version)
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/cheatcodes/src/string.rs | crates/cheatcodes/src/string.rs | //! Implementations of [`String`](spec::Group::String) cheatcodes.
use crate::{Cheatcode, Cheatcodes, Result, Vm::*};
use alloy_dyn_abi::{DynSolType, DynSolValue};
use alloy_primitives::{U256, hex};
use alloy_sol_types::SolValue;
// address
impl Cheatcode for toString_0Call {
fn apply(&self, _state: &mut Cheatcodes) -> Result {
let Self { value } = self;
Ok(value.to_string().abi_encode())
}
}
// bytes
impl Cheatcode for toString_1Call {
fn apply(&self, _state: &mut Cheatcodes) -> Result {
let Self { value } = self;
Ok(value.to_string().abi_encode())
}
}
// bytes32
impl Cheatcode for toString_2Call {
fn apply(&self, _state: &mut Cheatcodes) -> Result {
let Self { value } = self;
Ok(value.to_string().abi_encode())
}
}
// bool
impl Cheatcode for toString_3Call {
fn apply(&self, _state: &mut Cheatcodes) -> Result {
let Self { value } = self;
Ok(value.to_string().abi_encode())
}
}
// uint256
impl Cheatcode for toString_4Call {
fn apply(&self, _state: &mut Cheatcodes) -> Result {
let Self { value } = self;
Ok(value.to_string().abi_encode())
}
}
// int256
impl Cheatcode for toString_5Call {
fn apply(&self, _state: &mut Cheatcodes) -> Result {
let Self { value } = self;
Ok(value.to_string().abi_encode())
}
}
impl Cheatcode for parseBytesCall {
fn apply(&self, _state: &mut Cheatcodes) -> Result {
let Self { stringifiedValue } = self;
parse(stringifiedValue, &DynSolType::Bytes)
}
}
impl Cheatcode for parseAddressCall {
fn apply(&self, _state: &mut Cheatcodes) -> Result {
let Self { stringifiedValue } = self;
parse(stringifiedValue, &DynSolType::Address)
}
}
impl Cheatcode for parseUintCall {
fn apply(&self, _state: &mut Cheatcodes) -> Result {
let Self { stringifiedValue } = self;
parse(stringifiedValue, &DynSolType::Uint(256))
}
}
impl Cheatcode for parseIntCall {
fn apply(&self, _state: &mut Cheatcodes) -> Result {
let Self { stringifiedValue } = self;
parse(stringifiedValue, &DynSolType::Int(256))
}
}
impl Cheatcode for parseBytes32Call {
fn apply(&self, _state: &mut Cheatcodes) -> Result {
let Self { stringifiedValue } = self;
parse(stringifiedValue, &DynSolType::FixedBytes(32))
}
}
impl Cheatcode for parseBoolCall {
fn apply(&self, _state: &mut Cheatcodes) -> Result {
let Self { stringifiedValue } = self;
parse(stringifiedValue, &DynSolType::Bool)
}
}
impl Cheatcode for toLowercaseCall {
fn apply(&self, _state: &mut Cheatcodes) -> Result {
let Self { input } = self;
Ok(input.to_lowercase().abi_encode())
}
}
impl Cheatcode for toUppercaseCall {
fn apply(&self, _state: &mut Cheatcodes) -> Result {
let Self { input } = self;
Ok(input.to_uppercase().abi_encode())
}
}
impl Cheatcode for trimCall {
fn apply(&self, _state: &mut Cheatcodes) -> Result {
let Self { input } = self;
Ok(input.trim().abi_encode())
}
}
impl Cheatcode for replaceCall {
fn apply(&self, _state: &mut Cheatcodes) -> Result {
let Self { input, from, to } = self;
Ok(input.replace(from, to).abi_encode())
}
}
impl Cheatcode for splitCall {
fn apply(&self, _state: &mut Cheatcodes) -> Result {
let Self { input, delimiter } = self;
let parts: Vec<&str> = input.split(delimiter).collect();
Ok(parts.abi_encode())
}
}
impl Cheatcode for indexOfCall {
fn apply(&self, _state: &mut Cheatcodes) -> Result {
let Self { input, key } = self;
Ok(input.find(key).map(U256::from).unwrap_or(U256::MAX).abi_encode())
}
}
impl Cheatcode for containsCall {
fn apply(&self, _state: &mut Cheatcodes) -> Result {
let Self { subject, search } = self;
Ok(subject.contains(search).abi_encode())
}
}
pub(super) fn parse(s: &str, ty: &DynSolType) -> Result {
parse_value(s, ty).map(|v| v.abi_encode())
}
pub(super) fn parse_array<I, S>(values: I, ty: &DynSolType) -> Result
where
I: IntoIterator<Item = S>,
S: AsRef<str>,
{
let mut values = values.into_iter();
match values.next() {
Some(first) if !first.as_ref().is_empty() => std::iter::once(first)
.chain(values)
.map(|s| parse_value(s.as_ref(), ty))
.collect::<Result<Vec<_>, _>>()
.map(|vec| DynSolValue::Array(vec).abi_encode()),
// return the empty encoded Bytes when values is empty or the first element is empty
_ => Ok("".abi_encode()),
}
}
pub(super) fn parse_value(s: &str, ty: &DynSolType) -> Result<DynSolValue> {
match ty.coerce_str(s) {
Ok(value) => Ok(value),
Err(e) => match parse_value_fallback(s, ty) {
Some(Ok(value)) => Ok(value),
Some(Err(e2)) => Err(fmt_err!("failed parsing {s:?} as type `{ty}`: {e2}")),
None => Err(fmt_err!("failed parsing {s:?} as type `{ty}`: {e}")),
},
}
}
// More lenient parsers than `coerce_str`.
fn parse_value_fallback(s: &str, ty: &DynSolType) -> Option<Result<DynSolValue, &'static str>> {
match ty {
DynSolType::Bool => {
let b = match s {
"1" => true,
"0" => false,
s if s.eq_ignore_ascii_case("true") => true,
s if s.eq_ignore_ascii_case("false") => false,
_ => return None,
};
return Some(Ok(DynSolValue::Bool(b)));
}
DynSolType::Int(_)
| DynSolType::Uint(_)
| DynSolType::FixedBytes(_)
| DynSolType::Bytes => {
if !s.starts_with("0x") && hex::check_raw(s) {
return Some(Err("missing hex prefix (\"0x\") for hex string"));
}
}
_ => {}
}
None
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/cheatcodes/src/fs.rs | crates/cheatcodes/src/fs.rs | //! Implementations of [`Filesystem`](spec::Group::Filesystem) cheatcodes.
use super::string::parse;
use crate::{Cheatcode, Cheatcodes, CheatcodesExecutor, CheatsCtxt, Result, Vm::*};
use alloy_dyn_abi::DynSolType;
use alloy_json_abi::ContractObject;
use alloy_network::AnyTransactionReceipt;
use alloy_primitives::{Bytes, U256, hex, map::Entry};
use alloy_provider::network::ReceiptResponse;
use alloy_sol_types::SolValue;
use dialoguer::{Input, Password};
use forge_script_sequence::{BroadcastReader, TransactionWithMetadata};
use foundry_common::fs;
use foundry_config::fs_permissions::FsAccessKind;
use revm::{
context::{CreateScheme, JournalTr},
interpreter::CreateInputs,
};
use revm_inspectors::tracing::types::CallKind;
use semver::Version;
use std::{
io::{BufRead, BufReader},
path::{Path, PathBuf},
process::Command,
sync::mpsc,
thread,
time::{SystemTime, UNIX_EPOCH},
};
use walkdir::WalkDir;
impl Cheatcode for existsCall {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let Self { path } = self;
let path = state.config.ensure_path_allowed(path, FsAccessKind::Read)?;
Ok(path.exists().abi_encode())
}
}
impl Cheatcode for fsMetadataCall {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let Self { path } = self;
let path = state.config.ensure_path_allowed(path, FsAccessKind::Read)?;
let metadata = path.metadata()?;
// These fields not available on all platforms; default to 0
let [modified, accessed, created] =
[metadata.modified(), metadata.accessed(), metadata.created()].map(|time| {
time.unwrap_or(UNIX_EPOCH).duration_since(UNIX_EPOCH).unwrap_or_default().as_secs()
});
Ok(FsMetadata {
isDir: metadata.is_dir(),
isSymlink: metadata.is_symlink(),
length: U256::from(metadata.len()),
readOnly: metadata.permissions().readonly(),
modified: U256::from(modified),
accessed: U256::from(accessed),
created: U256::from(created),
}
.abi_encode())
}
}
impl Cheatcode for isDirCall {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let Self { path } = self;
let path = state.config.ensure_path_allowed(path, FsAccessKind::Read)?;
Ok(path.is_dir().abi_encode())
}
}
impl Cheatcode for isFileCall {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let Self { path } = self;
let path = state.config.ensure_path_allowed(path, FsAccessKind::Read)?;
Ok(path.is_file().abi_encode())
}
}
impl Cheatcode for projectRootCall {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let Self {} = self;
Ok(state.config.root.display().to_string().abi_encode())
}
}
impl Cheatcode for unixTimeCall {
fn apply(&self, _state: &mut Cheatcodes) -> Result {
let Self {} = self;
let difference = SystemTime::now()
.duration_since(UNIX_EPOCH)
.map_err(|e| fmt_err!("failed getting Unix timestamp: {e}"))?;
Ok(difference.as_millis().abi_encode())
}
}
impl Cheatcode for closeFileCall {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let Self { path } = self;
let path = state.config.ensure_path_allowed(path, FsAccessKind::Read)?;
state.test_context.opened_read_files.remove(&path);
Ok(Default::default())
}
}
impl Cheatcode for copyFileCall {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let Self { from, to } = self;
let from = state.config.ensure_path_allowed(from, FsAccessKind::Read)?;
let to = state.config.ensure_path_allowed(to, FsAccessKind::Write)?;
state.config.ensure_not_foundry_toml(&to)?;
let n = fs::copy(from, to)?;
Ok(n.abi_encode())
}
}
impl Cheatcode for createDirCall {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let Self { path, recursive } = self;
let path = state.config.ensure_path_allowed(path, FsAccessKind::Write)?;
if *recursive { fs::create_dir_all(path) } else { fs::create_dir(path) }?;
Ok(Default::default())
}
}
impl Cheatcode for readDir_0Call {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let Self { path } = self;
read_dir(state, path.as_ref(), 1, false)
}
}
impl Cheatcode for readDir_1Call {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let Self { path, maxDepth } = self;
read_dir(state, path.as_ref(), *maxDepth, false)
}
}
impl Cheatcode for readDir_2Call {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let Self { path, maxDepth, followLinks } = self;
read_dir(state, path.as_ref(), *maxDepth, *followLinks)
}
}
impl Cheatcode for readFileCall {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let Self { path } = self;
let path = state.config.ensure_path_allowed(path, FsAccessKind::Read)?;
Ok(fs::locked_read_to_string(path)?.abi_encode())
}
}
impl Cheatcode for readFileBinaryCall {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let Self { path } = self;
let path = state.config.ensure_path_allowed(path, FsAccessKind::Read)?;
Ok(fs::locked_read(path)?.abi_encode())
}
}
impl Cheatcode for readLineCall {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let Self { path } = self;
let path = state.config.ensure_path_allowed(path, FsAccessKind::Read)?;
// Get reader for previously opened file to continue reading OR initialize new reader
let reader = match state.test_context.opened_read_files.entry(path.clone()) {
Entry::Occupied(entry) => entry.into_mut(),
Entry::Vacant(entry) => entry.insert(BufReader::new(fs::open(path)?)),
};
let mut line: String = String::new();
reader.read_line(&mut line)?;
// Remove trailing newline character, preserving others for cases where it may be important
if line.ends_with('\n') {
line.pop();
if line.ends_with('\r') {
line.pop();
}
}
Ok(line.abi_encode())
}
}
impl Cheatcode for readLinkCall {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let Self { linkPath: path } = self;
let path = state.config.ensure_path_allowed(path, FsAccessKind::Read)?;
let target = fs::read_link(path)?;
Ok(target.display().to_string().abi_encode())
}
}
impl Cheatcode for removeDirCall {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let Self { path, recursive } = self;
let path = state.config.ensure_path_allowed(path, FsAccessKind::Write)?;
if *recursive { fs::remove_dir_all(path) } else { fs::remove_dir(path) }?;
Ok(Default::default())
}
}
impl Cheatcode for removeFileCall {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let Self { path } = self;
let path = state.config.ensure_path_allowed(path, FsAccessKind::Write)?;
state.config.ensure_not_foundry_toml(&path)?;
// also remove from the set if opened previously
state.test_context.opened_read_files.remove(&path);
if state.fs_commit {
fs::remove_file(&path)?;
}
Ok(Default::default())
}
}
impl Cheatcode for writeFileCall {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let Self { path, data } = self;
write_file(state, path.as_ref(), data.as_bytes())
}
}
impl Cheatcode for writeFileBinaryCall {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let Self { path, data } = self;
write_file(state, path.as_ref(), data)
}
}
impl Cheatcode for writeLineCall {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let Self { path, data: line } = self;
let path = state.config.ensure_path_allowed(path, FsAccessKind::Write)?;
state.config.ensure_not_foundry_toml(&path)?;
if state.fs_commit {
fs::locked_write_line(path, line)?;
}
Ok(Default::default())
}
}
impl Cheatcode for getArtifactPathByCodeCall {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let Self { code } = self;
let (artifact_id, _) = state
.config
.available_artifacts
.as_ref()
.and_then(|artifacts| artifacts.find_by_creation_code(code))
.ok_or_else(|| fmt_err!("no matching artifact found"))?;
Ok(artifact_id.path.to_string_lossy().abi_encode())
}
}
impl Cheatcode for getArtifactPathByDeployedCodeCall {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let Self { deployedCode } = self;
let (artifact_id, _) = state
.config
.available_artifacts
.as_ref()
.and_then(|artifacts| artifacts.find_by_deployed_code(deployedCode))
.ok_or_else(|| fmt_err!("no matching artifact found"))?;
Ok(artifact_id.path.to_string_lossy().abi_encode())
}
}
impl Cheatcode for getCodeCall {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let Self { artifactPath: path } = self;
Ok(get_artifact_code(state, path, false)?.abi_encode())
}
}
impl Cheatcode for getDeployedCodeCall {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let Self { artifactPath: path } = self;
Ok(get_artifact_code(state, path, true)?.abi_encode())
}
}
impl Cheatcode for deployCode_0Call {
fn apply_full(&self, ccx: &mut CheatsCtxt, executor: &mut dyn CheatcodesExecutor) -> Result {
let Self { artifactPath: path } = self;
deploy_code(ccx, executor, path, None, None, None)
}
}
impl Cheatcode for deployCode_1Call {
fn apply_full(&self, ccx: &mut CheatsCtxt, executor: &mut dyn CheatcodesExecutor) -> Result {
let Self { artifactPath: path, constructorArgs: args } = self;
deploy_code(ccx, executor, path, Some(args), None, None)
}
}
impl Cheatcode for deployCode_2Call {
fn apply_full(&self, ccx: &mut CheatsCtxt, executor: &mut dyn CheatcodesExecutor) -> Result {
let Self { artifactPath: path, value } = self;
deploy_code(ccx, executor, path, None, Some(*value), None)
}
}
impl Cheatcode for deployCode_3Call {
fn apply_full(&self, ccx: &mut CheatsCtxt, executor: &mut dyn CheatcodesExecutor) -> Result {
let Self { artifactPath: path, constructorArgs: args, value } = self;
deploy_code(ccx, executor, path, Some(args), Some(*value), None)
}
}
impl Cheatcode for deployCode_4Call {
fn apply_full(&self, ccx: &mut CheatsCtxt, executor: &mut dyn CheatcodesExecutor) -> Result {
let Self { artifactPath: path, salt } = self;
deploy_code(ccx, executor, path, None, None, Some((*salt).into()))
}
}
impl Cheatcode for deployCode_5Call {
fn apply_full(&self, ccx: &mut CheatsCtxt, executor: &mut dyn CheatcodesExecutor) -> Result {
let Self { artifactPath: path, constructorArgs: args, salt } = self;
deploy_code(ccx, executor, path, Some(args), None, Some((*salt).into()))
}
}
impl Cheatcode for deployCode_6Call {
fn apply_full(&self, ccx: &mut CheatsCtxt, executor: &mut dyn CheatcodesExecutor) -> Result {
let Self { artifactPath: path, value, salt } = self;
deploy_code(ccx, executor, path, None, Some(*value), Some((*salt).into()))
}
}
impl Cheatcode for deployCode_7Call {
fn apply_full(&self, ccx: &mut CheatsCtxt, executor: &mut dyn CheatcodesExecutor) -> Result {
let Self { artifactPath: path, constructorArgs: args, value, salt } = self;
deploy_code(ccx, executor, path, Some(args), Some(*value), Some((*salt).into()))
}
}
/// Helper function to deploy contract from artifact code.
/// Uses CREATE2 scheme if salt specified.
fn deploy_code(
ccx: &mut CheatsCtxt,
executor: &mut dyn CheatcodesExecutor,
path: &str,
constructor_args: Option<&Bytes>,
value: Option<U256>,
salt: Option<U256>,
) -> Result {
let mut bytecode = get_artifact_code(ccx.state, path, false)?.to_vec();
// If active broadcast then set flag to deploy from code.
if let Some(broadcast) = &mut ccx.state.broadcast {
broadcast.deploy_from_code = true;
}
if let Some(args) = constructor_args {
bytecode.extend_from_slice(args);
}
let scheme =
if let Some(salt) = salt { CreateScheme::Create2 { salt } } else { CreateScheme::Create };
// If prank active at current depth, then use it as caller for create input.
let caller = ccx
.state
.get_prank(ccx.ecx.journaled_state.depth())
.map_or(ccx.caller, |prank| prank.new_caller);
let outcome = executor.exec_create(
CreateInputs {
caller,
scheme,
value: value.unwrap_or(U256::ZERO),
init_code: bytecode.into(),
gas_limit: ccx.gas_limit,
},
ccx,
)?;
if !outcome.result.result.is_ok() {
return Err(crate::Error::from(outcome.result.output));
}
let address = outcome.address.ok_or_else(|| fmt_err!("contract creation failed"))?;
Ok(address.abi_encode())
}
/// Returns the bytecode from a JSON artifact file.
///
/// Can parse following input formats:
/// - `path/to/artifact.json`
/// - `path/to/contract.sol`
/// - `path/to/contract.sol:ContractName`
/// - `path/to/contract.sol:ContractName:0.8.23`
/// - `path/to/contract.sol:0.8.23`
/// - `ContractName`
/// - `ContractName:0.8.23`
///
/// This function is safe to use with contracts that have library dependencies.
/// `alloy_json_abi::ContractObject` validates bytecode during JSON parsing and will
/// reject artifacts with unlinked library placeholders.
fn get_artifact_code(state: &Cheatcodes, path: &str, deployed: bool) -> Result<Bytes> {
let path = if path.ends_with(".json") {
PathBuf::from(path)
} else {
let mut parts = path.split(':');
let mut file = None;
let mut contract_name = None;
let mut version = None;
let path_or_name = parts.next().unwrap();
if path_or_name.contains('.') {
file = Some(PathBuf::from(path_or_name));
if let Some(name_or_version) = parts.next() {
if name_or_version.contains('.') {
version = Some(name_or_version);
} else {
contract_name = Some(name_or_version);
version = parts.next();
}
}
} else {
contract_name = Some(path_or_name);
version = parts.next();
}
let version = if let Some(version) = version {
Some(Version::parse(version).map_err(|e| fmt_err!("failed parsing version: {e}"))?)
} else {
None
};
// Use available artifacts list if present
if let Some(artifacts) = &state.config.available_artifacts {
let filtered = artifacts
.iter()
.filter(|(id, _)| {
// name might be in the form of "Counter.0.8.23"
let id_name = id.name.split('.').next().unwrap();
if let Some(path) = &file
&& !id.source.ends_with(path)
{
return false;
}
if let Some(name) = contract_name
&& id_name != name
{
return false;
}
if let Some(ref version) = version
&& (id.version.minor != version.minor
|| id.version.major != version.major
|| id.version.patch != version.patch)
{
return false;
}
true
})
.collect::<Vec<_>>();
let artifact = match &filtered[..] {
[] => None,
[artifact] => Some(Ok(*artifact)),
filtered => {
let mut filtered = filtered.to_vec();
// If we know the current script/test contract solc version, try to filter by it
Some(
state
.config
.running_artifact
.as_ref()
.and_then(|running| {
// Firstly filter by version
filtered.retain(|(id, _)| id.version == running.version);
// Return artifact if only one matched
if filtered.len() == 1 {
return Some(filtered[0]);
}
// Try filtering by profile as well
filtered.retain(|(id, _)| id.profile == running.profile);
if filtered.len() == 1 { Some(filtered[0]) } else { None }
})
.ok_or_else(|| fmt_err!("multiple matching artifacts found")),
)
}
};
if let Some(artifact) = artifact {
let artifact = artifact?;
let maybe_bytecode = if deployed {
artifact.1.deployed_bytecode().cloned()
} else {
artifact.1.bytecode().cloned()
};
return maybe_bytecode.ok_or_else(|| {
fmt_err!("no bytecode for contract; is it abstract or unlinked?")
});
}
}
// Fallback: construct path manually when no artifacts list or no match found
let path_in_artifacts = match (file.map(|f| f.to_string_lossy().to_string()), contract_name)
{
(Some(file), Some(contract_name)) => {
PathBuf::from(format!("{file}/{contract_name}.json"))
}
(None, Some(contract_name)) => {
PathBuf::from(format!("{contract_name}.sol/{contract_name}.json"))
}
(Some(file), None) => {
let name = file.replace(".sol", "");
PathBuf::from(format!("{file}/{name}.json"))
}
_ => bail!("invalid artifact path"),
};
state.config.paths.artifacts.join(path_in_artifacts)
};
let path = state.config.ensure_path_allowed(path, FsAccessKind::Read)?;
let data = fs::read_to_string(path).map_err(|e| {
if state.config.available_artifacts.is_some() {
fmt_err!("no matching artifact found")
} else {
e.into()
}
})?;
let artifact = serde_json::from_str::<ContractObject>(&data)?;
let maybe_bytecode = if deployed { artifact.deployed_bytecode } else { artifact.bytecode };
maybe_bytecode.ok_or_else(|| fmt_err!("no bytecode for contract; is it abstract or unlinked?"))
}
impl Cheatcode for ffiCall {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let Self { commandInput: input } = self;
let output = ffi(state, input)?;
// Check the exit code of the command.
if output.exitCode != 0 {
// If the command failed, return an error with the exit code and stderr.
return Err(fmt_err!(
"ffi command {:?} exited with code {}. stderr: {}",
input,
output.exitCode,
String::from_utf8_lossy(&output.stderr)
));
}
// If the command succeeded but still wrote to stderr, log it as a warning.
if !output.stderr.is_empty() {
let stderr = String::from_utf8_lossy(&output.stderr);
warn!(target: "cheatcodes", ?input, ?stderr, "ffi command wrote to stderr");
}
// We already hex-decoded the stdout in the `ffi` helper function.
Ok(output.stdout.abi_encode())
}
}
impl Cheatcode for tryFfiCall {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let Self { commandInput: input } = self;
ffi(state, input).map(|res| res.abi_encode())
}
}
impl Cheatcode for promptCall {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let Self { promptText: text } = self;
prompt(state, text, prompt_input).map(|res| res.abi_encode())
}
}
impl Cheatcode for promptSecretCall {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let Self { promptText: text } = self;
prompt(state, text, prompt_password).map(|res| res.abi_encode())
}
}
impl Cheatcode for promptSecretUintCall {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let Self { promptText: text } = self;
parse(&prompt(state, text, prompt_password)?, &DynSolType::Uint(256))
}
}
impl Cheatcode for promptAddressCall {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let Self { promptText: text } = self;
parse(&prompt(state, text, prompt_input)?, &DynSolType::Address)
}
}
impl Cheatcode for promptUintCall {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let Self { promptText: text } = self;
parse(&prompt(state, text, prompt_input)?, &DynSolType::Uint(256))
}
}
pub(super) fn write_file(state: &Cheatcodes, path: &Path, contents: &[u8]) -> Result {
let path = state.config.ensure_path_allowed(path, FsAccessKind::Write)?;
// write access to foundry.toml is not allowed
state.config.ensure_not_foundry_toml(&path)?;
if state.fs_commit {
fs::locked_write(path, contents)?;
}
Ok(Default::default())
}
fn read_dir(state: &Cheatcodes, path: &Path, max_depth: u64, follow_links: bool) -> Result {
let root = state.config.ensure_path_allowed(path, FsAccessKind::Read)?;
let paths: Vec<DirEntry> = WalkDir::new(root)
.min_depth(1)
.max_depth(max_depth.try_into().unwrap_or(usize::MAX))
.follow_links(follow_links)
.contents_first(false)
.same_file_system(true)
.sort_by_file_name()
.into_iter()
.map(|entry| match entry {
Ok(entry) => DirEntry {
errorMessage: String::new(),
path: entry.path().display().to_string(),
depth: entry.depth() as u64,
isDir: entry.file_type().is_dir(),
isSymlink: entry.path_is_symlink(),
},
Err(e) => DirEntry {
errorMessage: e.to_string(),
path: e.path().map(|p| p.display().to_string()).unwrap_or_default(),
depth: e.depth() as u64,
isDir: false,
isSymlink: false,
},
})
.collect();
Ok(paths.abi_encode())
}
fn ffi(state: &Cheatcodes, input: &[String]) -> Result<FfiResult> {
ensure!(
state.config.ffi,
"FFI is disabled; add the `--ffi` flag to allow tests to call external commands"
);
ensure!(!input.is_empty() && !input[0].is_empty(), "can't execute empty command");
let mut cmd = Command::new(&input[0]);
cmd.args(&input[1..]);
debug!(target: "cheatcodes", ?cmd, "invoking ffi");
let output = cmd
.current_dir(&state.config.root)
.output()
.map_err(|err| fmt_err!("failed to execute command {cmd:?}: {err}"))?;
// The stdout might be encoded on valid hex, or it might just be a string,
// so we need to determine which it is to avoid improperly encoding later.
let trimmed_stdout = String::from_utf8(output.stdout)?;
let trimmed_stdout = trimmed_stdout.trim();
let encoded_stdout = if let Ok(hex) = hex::decode(trimmed_stdout) {
hex
} else {
trimmed_stdout.as_bytes().to_vec()
};
Ok(FfiResult {
exitCode: output.status.code().unwrap_or(69),
stdout: encoded_stdout.into(),
stderr: output.stderr.into(),
})
}
fn prompt_input(prompt_text: &str) -> Result<String, dialoguer::Error> {
Input::new().allow_empty(true).with_prompt(prompt_text).interact_text()
}
fn prompt_password(prompt_text: &str) -> Result<String, dialoguer::Error> {
Password::new().with_prompt(prompt_text).interact()
}
fn prompt(
state: &Cheatcodes,
prompt_text: &str,
input: fn(&str) -> Result<String, dialoguer::Error>,
) -> Result<String> {
let text_clone = prompt_text.to_string();
let timeout = state.config.prompt_timeout;
let (tx, rx) = mpsc::channel();
thread::spawn(move || {
let _ = tx.send(input(&text_clone));
});
match rx.recv_timeout(timeout) {
Ok(res) => res.map_err(|err| {
let _ = sh_println!();
err.to_string().into()
}),
Err(_) => {
let _ = sh_eprintln!();
Err("Prompt timed out".into())
}
}
}
impl Cheatcode for getBroadcastCall {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let Self { contractName, chainId, txType } = self;
let latest_broadcast = latest_broadcast(
contractName,
*chainId,
&state.config.broadcast,
vec![map_broadcast_tx_type(*txType)],
)?;
Ok(latest_broadcast.abi_encode())
}
}
impl Cheatcode for getBroadcasts_0Call {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let Self { contractName, chainId, txType } = self;
let reader = BroadcastReader::new(contractName.clone(), *chainId, &state.config.broadcast)?
.with_tx_type(map_broadcast_tx_type(*txType));
let broadcasts = reader.read()?;
let summaries = broadcasts
.into_iter()
.flat_map(|broadcast| {
let results = reader.into_tx_receipts(broadcast);
parse_broadcast_results(results)
})
.collect::<Vec<_>>();
Ok(summaries.abi_encode())
}
}
impl Cheatcode for getBroadcasts_1Call {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let Self { contractName, chainId } = self;
let reader = BroadcastReader::new(contractName.clone(), *chainId, &state.config.broadcast)?;
let broadcasts = reader.read()?;
let summaries = broadcasts
.into_iter()
.flat_map(|broadcast| {
let results = reader.into_tx_receipts(broadcast);
parse_broadcast_results(results)
})
.collect::<Vec<_>>();
Ok(summaries.abi_encode())
}
}
impl Cheatcode for getDeployment_0Call {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { contractName } = self;
let chain_id = ccx.ecx.cfg.chain_id;
let latest_broadcast = latest_broadcast(
contractName,
chain_id,
&ccx.state.config.broadcast,
vec![CallKind::Create, CallKind::Create2],
)?;
Ok(latest_broadcast.contractAddress.abi_encode())
}
}
impl Cheatcode for getDeployment_1Call {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let Self { contractName, chainId } = self;
let latest_broadcast = latest_broadcast(
contractName,
*chainId,
&state.config.broadcast,
vec![CallKind::Create, CallKind::Create2],
)?;
Ok(latest_broadcast.contractAddress.abi_encode())
}
}
impl Cheatcode for getDeploymentsCall {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let Self { contractName, chainId } = self;
let reader = BroadcastReader::new(contractName.clone(), *chainId, &state.config.broadcast)?
.with_tx_type(CallKind::Create)
.with_tx_type(CallKind::Create2);
let broadcasts = reader.read()?;
let summaries = broadcasts
.into_iter()
.flat_map(|broadcast| {
let results = reader.into_tx_receipts(broadcast);
parse_broadcast_results(results)
})
.collect::<Vec<_>>();
let deployed_addresses =
summaries.into_iter().map(|summary| summary.contractAddress).collect::<Vec<_>>();
Ok(deployed_addresses.abi_encode())
}
}
fn map_broadcast_tx_type(tx_type: BroadcastTxType) -> CallKind {
match tx_type {
BroadcastTxType::Call => CallKind::Call,
BroadcastTxType::Create => CallKind::Create,
BroadcastTxType::Create2 => CallKind::Create2,
_ => unreachable!("invalid tx type"),
}
}
fn parse_broadcast_results(
results: Vec<(TransactionWithMetadata, AnyTransactionReceipt)>,
) -> Vec<BroadcastTxSummary> {
results
.into_iter()
.map(|(tx, receipt)| BroadcastTxSummary {
txHash: receipt.transaction_hash,
blockNumber: receipt.block_number.unwrap_or_default(),
txType: match tx.opcode {
CallKind::Call => BroadcastTxType::Call,
CallKind::Create => BroadcastTxType::Create,
CallKind::Create2 => BroadcastTxType::Create2,
_ => unreachable!("invalid tx type"),
},
contractAddress: tx.contract_address.unwrap_or_default(),
success: receipt.status(),
})
.collect()
}
fn latest_broadcast(
contract_name: &String,
chain_id: u64,
broadcast_path: &Path,
filters: Vec<CallKind>,
) -> Result<BroadcastTxSummary> {
let mut reader = BroadcastReader::new(contract_name.clone(), chain_id, broadcast_path)?;
for filter in filters {
reader = reader.with_tx_type(filter);
}
let broadcast = reader.read_latest()?;
let results = reader.into_tx_receipts(broadcast);
let summaries = parse_broadcast_results(results);
summaries
.first()
.ok_or_else(|| fmt_err!("no deployment found for {contract_name} on chain {chain_id}"))
.cloned()
}
#[cfg(test)]
mod tests {
use super::*;
use crate::CheatsConfig;
use std::sync::Arc;
fn cheats() -> Cheatcodes {
let config = CheatsConfig {
ffi: true,
root: PathBuf::from(&env!("CARGO_MANIFEST_DIR")),
..Default::default()
};
Cheatcodes::new(Arc::new(config))
}
#[test]
fn test_ffi_hex() {
let msg = b"gm";
let cheats = cheats();
let args = ["echo".to_string(), hex::encode(msg)];
let output = ffi(&cheats, &args).unwrap();
assert_eq!(output.stdout, Bytes::from(msg));
}
#[test]
fn test_ffi_string() {
let msg = "gm";
let cheats = cheats();
let args = ["echo".to_string(), msg.to_string()];
let output = ffi(&cheats, &args).unwrap();
assert_eq!(output.stdout, Bytes::from(msg.as_bytes()));
}
#[test]
fn test_ffi_fails_on_error_code() {
let mut cheats = cheats();
// Use a command that is guaranteed to fail with a non-zero exit code on any platform.
#[cfg(unix)]
let args = vec!["false".to_string()];
#[cfg(windows)]
let args = vec!["cmd".to_string(), "/c".to_string(), "exit 1".to_string()];
let result = ffiCall { commandInput: args }.apply(&mut cheats);
// Assert that the cheatcode returned an error.
assert!(result.is_err(), "Expected ffi cheatcode to fail, but it succeeded");
// Assert that the error message contains the expected information.
let err_msg = result.unwrap_err().to_string();
assert!(
err_msg.contains("exited with code 1"),
"Error message did not contain exit code: {err_msg}"
);
}
#[test]
fn test_artifact_parsing() {
let s = include_str!("../../evm/test-data/solc-obj.json");
let artifact: ContractObject = serde_json::from_str(s).unwrap();
assert!(artifact.bytecode.is_some());
let artifact: ContractObject = serde_json::from_str(s).unwrap();
assert!(artifact.deployed_bytecode.is_some());
}
#[test]
fn test_alloy_json_abi_rejects_unlinked_bytecode() {
let artifact_json = r#"{
"abi": [],
"bytecode": "0x73__$987e73aeca5e61ce83e4cb0814d87beda9$__63baf2f868"
}"#;
let result: Result<ContractObject, _> = serde_json::from_str(artifact_json);
assert!(result.is_err(), "should reject unlinked bytecode with placeholders");
let err = result.unwrap_err().to_string();
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | true |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/cheatcodes/src/base64.rs | crates/cheatcodes/src/base64.rs | use crate::{Cheatcode, Cheatcodes, Result, Vm::*};
use alloy_sol_types::SolValue;
use base64::prelude::*;
impl Cheatcode for toBase64_0Call {
fn apply(&self, _state: &mut Cheatcodes) -> Result {
let Self { data } = self;
Ok(BASE64_STANDARD.encode(data).abi_encode())
}
}
impl Cheatcode for toBase64_1Call {
fn apply(&self, _state: &mut Cheatcodes) -> Result {
let Self { data } = self;
Ok(BASE64_STANDARD.encode(data).abi_encode())
}
}
impl Cheatcode for toBase64URL_0Call {
fn apply(&self, _state: &mut Cheatcodes) -> Result {
let Self { data } = self;
Ok(BASE64_URL_SAFE.encode(data).abi_encode())
}
}
impl Cheatcode for toBase64URL_1Call {
fn apply(&self, _state: &mut Cheatcodes) -> Result {
let Self { data } = self;
Ok(BASE64_URL_SAFE.encode(data).abi_encode())
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/cheatcodes/src/env.rs | crates/cheatcodes/src/env.rs | //! Implementations of [`Environment`](spec::Group::Environment) cheatcodes.
use crate::{Cheatcode, Cheatcodes, Error, Result, Vm::*, string};
use alloy_dyn_abi::DynSolType;
use alloy_sol_types::SolValue;
use std::{env, sync::OnceLock};
/// Stores the forge execution context for the duration of the program.
pub static FORGE_CONTEXT: OnceLock<ForgeContext> = OnceLock::new();
impl Cheatcode for setEnvCall {
fn apply(&self, _state: &mut Cheatcodes) -> Result {
let Self { name: key, value } = self;
if key.is_empty() {
Err(fmt_err!("environment variable key can't be empty"))
} else if key.contains('=') {
Err(fmt_err!("environment variable key can't contain equal sign `=`"))
} else if key.contains('\0') {
Err(fmt_err!("environment variable key can't contain NUL character `\\0`"))
} else if value.contains('\0') {
Err(fmt_err!("environment variable value can't contain NUL character `\\0`"))
} else {
unsafe {
env::set_var(key, value);
}
Ok(Default::default())
}
}
}
impl Cheatcode for resolveEnvCall {
fn apply(&self, _state: &mut Cheatcodes) -> Result {
let Self { input } = self;
let resolved = foundry_config::resolve::interpolate(input)
.map_err(|e| fmt_err!("failed to resolve env var: {e}"))?;
Ok(resolved.abi_encode())
}
}
impl Cheatcode for envExistsCall {
fn apply(&self, _state: &mut Cheatcodes) -> Result {
let Self { name } = self;
Ok(env::var(name).is_ok().abi_encode())
}
}
impl Cheatcode for envBool_0Call {
fn apply(&self, _state: &mut Cheatcodes) -> Result {
let Self { name } = self;
env(name, &DynSolType::Bool)
}
}
impl Cheatcode for envUint_0Call {
fn apply(&self, _state: &mut Cheatcodes) -> Result {
let Self { name } = self;
env(name, &DynSolType::Uint(256))
}
}
impl Cheatcode for envInt_0Call {
fn apply(&self, _state: &mut Cheatcodes) -> Result {
let Self { name } = self;
env(name, &DynSolType::Int(256))
}
}
impl Cheatcode for envAddress_0Call {
fn apply(&self, _state: &mut Cheatcodes) -> Result {
let Self { name } = self;
env(name, &DynSolType::Address)
}
}
impl Cheatcode for envBytes32_0Call {
fn apply(&self, _state: &mut Cheatcodes) -> Result {
let Self { name } = self;
env(name, &DynSolType::FixedBytes(32))
}
}
impl Cheatcode for envString_0Call {
fn apply(&self, _state: &mut Cheatcodes) -> Result {
let Self { name } = self;
env(name, &DynSolType::String)
}
}
impl Cheatcode for envBytes_0Call {
fn apply(&self, _state: &mut Cheatcodes) -> Result {
let Self { name } = self;
env(name, &DynSolType::Bytes)
}
}
impl Cheatcode for envBool_1Call {
fn apply(&self, _state: &mut Cheatcodes) -> Result {
let Self { name, delim } = self;
env_array(name, delim, &DynSolType::Bool)
}
}
impl Cheatcode for envUint_1Call {
fn apply(&self, _state: &mut Cheatcodes) -> Result {
let Self { name, delim } = self;
env_array(name, delim, &DynSolType::Uint(256))
}
}
impl Cheatcode for envInt_1Call {
fn apply(&self, _state: &mut Cheatcodes) -> Result {
let Self { name, delim } = self;
env_array(name, delim, &DynSolType::Int(256))
}
}
impl Cheatcode for envAddress_1Call {
fn apply(&self, _state: &mut Cheatcodes) -> Result {
let Self { name, delim } = self;
env_array(name, delim, &DynSolType::Address)
}
}
impl Cheatcode for envBytes32_1Call {
fn apply(&self, _state: &mut Cheatcodes) -> Result {
let Self { name, delim } = self;
env_array(name, delim, &DynSolType::FixedBytes(32))
}
}
impl Cheatcode for envString_1Call {
fn apply(&self, _state: &mut Cheatcodes) -> Result {
let Self { name, delim } = self;
env_array(name, delim, &DynSolType::String)
}
}
impl Cheatcode for envBytes_1Call {
fn apply(&self, _state: &mut Cheatcodes) -> Result {
let Self { name, delim } = self;
env_array(name, delim, &DynSolType::Bytes)
}
}
// bool
impl Cheatcode for envOr_0Call {
fn apply(&self, _state: &mut Cheatcodes) -> Result {
let Self { name, defaultValue } = self;
env_default(name, defaultValue, &DynSolType::Bool)
}
}
// uint256
impl Cheatcode for envOr_1Call {
fn apply(&self, _state: &mut Cheatcodes) -> Result {
let Self { name, defaultValue } = self;
env_default(name, defaultValue, &DynSolType::Uint(256))
}
}
// int256
impl Cheatcode for envOr_2Call {
fn apply(&self, _state: &mut Cheatcodes) -> Result {
let Self { name, defaultValue } = self;
env_default(name, defaultValue, &DynSolType::Int(256))
}
}
// address
impl Cheatcode for envOr_3Call {
fn apply(&self, _state: &mut Cheatcodes) -> Result {
let Self { name, defaultValue } = self;
env_default(name, defaultValue, &DynSolType::Address)
}
}
// bytes32
impl Cheatcode for envOr_4Call {
fn apply(&self, _state: &mut Cheatcodes) -> Result {
let Self { name, defaultValue } = self;
env_default(name, defaultValue, &DynSolType::FixedBytes(32))
}
}
// string
impl Cheatcode for envOr_5Call {
fn apply(&self, _state: &mut Cheatcodes) -> Result {
let Self { name, defaultValue } = self;
env_default(name, defaultValue, &DynSolType::String)
}
}
// bytes
impl Cheatcode for envOr_6Call {
fn apply(&self, _state: &mut Cheatcodes) -> Result {
let Self { name, defaultValue } = self;
env_default(name, defaultValue, &DynSolType::Bytes)
}
}
// bool[]
impl Cheatcode for envOr_7Call {
fn apply(&self, _state: &mut Cheatcodes) -> Result {
let Self { name, delim, defaultValue } = self;
env_array_default(name, delim, defaultValue, &DynSolType::Bool)
}
}
// uint256[]
impl Cheatcode for envOr_8Call {
fn apply(&self, _state: &mut Cheatcodes) -> Result {
let Self { name, delim, defaultValue } = self;
env_array_default(name, delim, defaultValue, &DynSolType::Uint(256))
}
}
// int256[]
impl Cheatcode for envOr_9Call {
fn apply(&self, _state: &mut Cheatcodes) -> Result {
let Self { name, delim, defaultValue } = self;
env_array_default(name, delim, defaultValue, &DynSolType::Int(256))
}
}
// address[]
impl Cheatcode for envOr_10Call {
fn apply(&self, _state: &mut Cheatcodes) -> Result {
let Self { name, delim, defaultValue } = self;
env_array_default(name, delim, defaultValue, &DynSolType::Address)
}
}
// bytes32[]
impl Cheatcode for envOr_11Call {
fn apply(&self, _state: &mut Cheatcodes) -> Result {
let Self { name, delim, defaultValue } = self;
env_array_default(name, delim, defaultValue, &DynSolType::FixedBytes(32))
}
}
// string[]
impl Cheatcode for envOr_12Call {
fn apply(&self, _state: &mut Cheatcodes) -> Result {
let Self { name, delim, defaultValue } = self;
env_array_default(name, delim, defaultValue, &DynSolType::String)
}
}
// bytes[]
impl Cheatcode for envOr_13Call {
fn apply(&self, _state: &mut Cheatcodes) -> Result {
let Self { name, delim, defaultValue } = self;
let default = defaultValue.to_vec();
env_array_default(name, delim, &default, &DynSolType::Bytes)
}
}
impl Cheatcode for isContextCall {
fn apply(&self, _state: &mut Cheatcodes) -> Result {
let Self { context } = self;
Ok((FORGE_CONTEXT.get() == Some(context)).abi_encode())
}
}
/// Set `forge` command current execution context for the duration of the program.
/// Execution context is immutable, subsequent calls of this function won't change the context.
pub fn set_execution_context(context: ForgeContext) {
let _ = FORGE_CONTEXT.set(context);
}
fn env(key: &str, ty: &DynSolType) -> Result {
get_env(key).and_then(|val| string::parse(&val, ty).map_err(map_env_err(key, &val)))
}
fn env_default<T: SolValue>(key: &str, default: &T, ty: &DynSolType) -> Result {
Ok(env(key, ty).unwrap_or_else(|_| default.abi_encode()))
}
fn env_array(key: &str, delim: &str, ty: &DynSolType) -> Result {
get_env(key).and_then(|val| {
string::parse_array(val.split(delim).map(str::trim), ty).map_err(map_env_err(key, &val))
})
}
fn env_array_default<T: SolValue>(key: &str, delim: &str, default: &T, ty: &DynSolType) -> Result {
Ok(env_array(key, delim, ty).unwrap_or_else(|_| default.abi_encode()))
}
fn get_env(key: &str) -> Result<String> {
match env::var(key) {
Ok(val) => Ok(val),
Err(env::VarError::NotPresent) => Err(fmt_err!("environment variable {key:?} not found")),
Err(env::VarError::NotUnicode(s)) => {
Err(fmt_err!("environment variable {key:?} was not valid unicode: {s:?}"))
}
}
}
/// Converts the error message of a failed parsing attempt to a more user-friendly message that
/// doesn't leak the value.
fn map_env_err<'a>(key: &'a str, value: &'a str) -> impl FnOnce(Error) -> Error + 'a {
move |e| {
// failed parsing <value> as type `uint256`: parser error:
// <value>
// ^
// expected at least one digit
let mut e = e.to_string();
e = e.replacen(&format!("\"{value}\""), &format!("${key}"), 1);
e = e.replacen(&format!("\n{value}\n"), &format!("\n${key}\n"), 1);
Error::from(e)
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn parse_env_uint() {
let key = "parse_env_uint";
let value = "t";
unsafe {
env::set_var(key, value);
}
let err = env(key, &DynSolType::Uint(256)).unwrap_err().to_string();
assert_eq!(err.matches("$parse_env_uint").count(), 2, "{err:?}");
unsafe {
env::remove_var(key);
}
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/cheatcodes/src/json.rs | crates/cheatcodes/src/json.rs | //! Implementations of [`Json`](spec::Group::Json) cheatcodes.
use crate::{Cheatcode, Cheatcodes, Result, Vm::*, string};
use alloy_dyn_abi::{DynSolType, DynSolValue, Resolver, eip712_parser::EncodeType};
use alloy_primitives::{Address, B256, I256, U256, hex};
use alloy_sol_types::SolValue;
use foundry_common::{fmt::StructDefinitions, fs};
use foundry_config::fs_permissions::FsAccessKind;
use serde_json::{Map, Value};
use std::{
borrow::Cow,
collections::{BTreeMap, BTreeSet},
};
impl Cheatcode for keyExistsCall {
fn apply(&self, _state: &mut Cheatcodes) -> Result {
let Self { json, key } = self;
check_json_key_exists(json, key)
}
}
impl Cheatcode for keyExistsJsonCall {
fn apply(&self, _state: &mut Cheatcodes) -> Result {
let Self { json, key } = self;
check_json_key_exists(json, key)
}
}
impl Cheatcode for parseJson_0Call {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let Self { json } = self;
parse_json(json, "$", state.struct_defs())
}
}
impl Cheatcode for parseJson_1Call {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let Self { json, key } = self;
parse_json(json, key, state.struct_defs())
}
}
impl Cheatcode for parseJsonUintCall {
fn apply(&self, _state: &mut Cheatcodes) -> Result {
let Self { json, key } = self;
parse_json_coerce(json, key, &DynSolType::Uint(256))
}
}
impl Cheatcode for parseJsonUintArrayCall {
fn apply(&self, _state: &mut Cheatcodes) -> Result {
let Self { json, key } = self;
parse_json_coerce(json, key, &DynSolType::Array(Box::new(DynSolType::Uint(256))))
}
}
impl Cheatcode for parseJsonIntCall {
fn apply(&self, _state: &mut Cheatcodes) -> Result {
let Self { json, key } = self;
parse_json_coerce(json, key, &DynSolType::Int(256))
}
}
impl Cheatcode for parseJsonIntArrayCall {
fn apply(&self, _state: &mut Cheatcodes) -> Result {
let Self { json, key } = self;
parse_json_coerce(json, key, &DynSolType::Array(Box::new(DynSolType::Int(256))))
}
}
impl Cheatcode for parseJsonBoolCall {
fn apply(&self, _state: &mut Cheatcodes) -> Result {
let Self { json, key } = self;
parse_json_coerce(json, key, &DynSolType::Bool)
}
}
impl Cheatcode for parseJsonBoolArrayCall {
fn apply(&self, _state: &mut Cheatcodes) -> Result {
let Self { json, key } = self;
parse_json_coerce(json, key, &DynSolType::Array(Box::new(DynSolType::Bool)))
}
}
impl Cheatcode for parseJsonAddressCall {
fn apply(&self, _state: &mut Cheatcodes) -> Result {
let Self { json, key } = self;
parse_json_coerce(json, key, &DynSolType::Address)
}
}
impl Cheatcode for parseJsonAddressArrayCall {
fn apply(&self, _state: &mut Cheatcodes) -> Result {
let Self { json, key } = self;
parse_json_coerce(json, key, &DynSolType::Array(Box::new(DynSolType::Address)))
}
}
impl Cheatcode for parseJsonStringCall {
fn apply(&self, _state: &mut Cheatcodes) -> Result {
let Self { json, key } = self;
parse_json_coerce(json, key, &DynSolType::String)
}
}
impl Cheatcode for parseJsonStringArrayCall {
fn apply(&self, _state: &mut Cheatcodes) -> Result {
let Self { json, key } = self;
parse_json_coerce(json, key, &DynSolType::Array(Box::new(DynSolType::String)))
}
}
impl Cheatcode for parseJsonBytesCall {
fn apply(&self, _state: &mut Cheatcodes) -> Result {
let Self { json, key } = self;
parse_json_coerce(json, key, &DynSolType::Bytes)
}
}
impl Cheatcode for parseJsonBytesArrayCall {
fn apply(&self, _state: &mut Cheatcodes) -> Result {
let Self { json, key } = self;
parse_json_coerce(json, key, &DynSolType::Array(Box::new(DynSolType::Bytes)))
}
}
impl Cheatcode for parseJsonBytes32Call {
fn apply(&self, _state: &mut Cheatcodes) -> Result {
let Self { json, key } = self;
parse_json_coerce(json, key, &DynSolType::FixedBytes(32))
}
}
impl Cheatcode for parseJsonBytes32ArrayCall {
fn apply(&self, _state: &mut Cheatcodes) -> Result {
let Self { json, key } = self;
parse_json_coerce(json, key, &DynSolType::Array(Box::new(DynSolType::FixedBytes(32))))
}
}
impl Cheatcode for parseJsonType_0Call {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let Self { json, typeDescription } = self;
parse_json_coerce(json, "$", &resolve_type(typeDescription, state.struct_defs())?)
.map(|v| v.abi_encode())
}
}
impl Cheatcode for parseJsonType_1Call {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let Self { json, key, typeDescription } = self;
parse_json_coerce(json, key, &resolve_type(typeDescription, state.struct_defs())?)
.map(|v| v.abi_encode())
}
}
impl Cheatcode for parseJsonTypeArrayCall {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let Self { json, key, typeDescription } = self;
let ty = resolve_type(typeDescription, state.struct_defs())?;
parse_json_coerce(json, key, &DynSolType::Array(Box::new(ty))).map(|v| v.abi_encode())
}
}
impl Cheatcode for parseJsonKeysCall {
fn apply(&self, _state: &mut Cheatcodes) -> Result {
let Self { json, key } = self;
parse_json_keys(json, key)
}
}
impl Cheatcode for serializeJsonCall {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let Self { objectKey, value } = self;
*state.serialized_jsons.entry(objectKey.into()).or_default() = serde_json::from_str(value)?;
Ok(value.abi_encode())
}
}
impl Cheatcode for serializeBool_0Call {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let Self { objectKey, valueKey, value } = self;
serialize_json(state, objectKey, valueKey, (*value).into())
}
}
impl Cheatcode for serializeUint_0Call {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let Self { objectKey, valueKey, value } = self;
serialize_json(state, objectKey, valueKey, (*value).into())
}
}
impl Cheatcode for serializeInt_0Call {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let Self { objectKey, valueKey, value } = self;
serialize_json(state, objectKey, valueKey, (*value).into())
}
}
impl Cheatcode for serializeAddress_0Call {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let Self { objectKey, valueKey, value } = self;
serialize_json(state, objectKey, valueKey, (*value).into())
}
}
impl Cheatcode for serializeBytes32_0Call {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let Self { objectKey, valueKey, value } = self;
serialize_json(state, objectKey, valueKey, DynSolValue::FixedBytes(*value, 32))
}
}
impl Cheatcode for serializeString_0Call {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let Self { objectKey, valueKey, value } = self;
serialize_json(state, objectKey, valueKey, value.clone().into())
}
}
impl Cheatcode for serializeBytes_0Call {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let Self { objectKey, valueKey, value } = self;
serialize_json(state, objectKey, valueKey, value.to_vec().into())
}
}
impl Cheatcode for serializeBool_1Call {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let Self { objectKey, valueKey, values } = self;
serialize_json(
state,
objectKey,
valueKey,
DynSolValue::Array(values.iter().copied().map(DynSolValue::Bool).collect()),
)
}
}
impl Cheatcode for serializeUint_1Call {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let Self { objectKey, valueKey, values } = self;
serialize_json(
state,
objectKey,
valueKey,
DynSolValue::Array(values.iter().map(|v| DynSolValue::Uint(*v, 256)).collect()),
)
}
}
impl Cheatcode for serializeInt_1Call {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let Self { objectKey, valueKey, values } = self;
serialize_json(
state,
objectKey,
valueKey,
DynSolValue::Array(values.iter().map(|v| DynSolValue::Int(*v, 256)).collect()),
)
}
}
impl Cheatcode for serializeAddress_1Call {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let Self { objectKey, valueKey, values } = self;
serialize_json(
state,
objectKey,
valueKey,
DynSolValue::Array(values.iter().copied().map(DynSolValue::Address).collect()),
)
}
}
impl Cheatcode for serializeBytes32_1Call {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let Self { objectKey, valueKey, values } = self;
serialize_json(
state,
objectKey,
valueKey,
DynSolValue::Array(values.iter().map(|v| DynSolValue::FixedBytes(*v, 32)).collect()),
)
}
}
impl Cheatcode for serializeString_1Call {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let Self { objectKey, valueKey, values } = self;
serialize_json(
state,
objectKey,
valueKey,
DynSolValue::Array(values.iter().cloned().map(DynSolValue::String).collect()),
)
}
}
impl Cheatcode for serializeBytes_1Call {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let Self { objectKey, valueKey, values } = self;
serialize_json(
state,
objectKey,
valueKey,
DynSolValue::Array(
values.iter().cloned().map(Into::into).map(DynSolValue::Bytes).collect(),
),
)
}
}
impl Cheatcode for serializeJsonType_0Call {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let Self { typeDescription, value } = self;
let ty = resolve_type(typeDescription, state.struct_defs())?;
let value = ty.abi_decode(value)?;
let value = foundry_common::fmt::serialize_value_as_json(value, state.struct_defs())?;
Ok(value.to_string().abi_encode())
}
}
impl Cheatcode for serializeJsonType_1Call {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let Self { objectKey, valueKey, typeDescription, value } = self;
let ty = resolve_type(typeDescription, state.struct_defs())?;
let value = ty.abi_decode(value)?;
serialize_json(state, objectKey, valueKey, value)
}
}
impl Cheatcode for serializeUintToHexCall {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let Self { objectKey, valueKey, value } = self;
let hex = format!("0x{value:x}");
serialize_json(state, objectKey, valueKey, hex.into())
}
}
impl Cheatcode for writeJson_0Call {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let Self { json, path } = self;
let json = serde_json::from_str(json).unwrap_or_else(|_| Value::String(json.to_owned()));
let json_string = serde_json::to_string_pretty(&json)?;
super::fs::write_file(state, path.as_ref(), json_string.as_bytes())
}
}
impl Cheatcode for writeJson_1Call {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let Self { json: value, path, valueKey } = self;
// Read, parse, and update the JSON object
let data_path = state.config.ensure_path_allowed(path, FsAccessKind::Read)?;
let data_string = fs::locked_read_to_string(&data_path)?;
let mut data =
serde_json::from_str(&data_string).unwrap_or_else(|_| Value::String(data_string));
upsert_json_value(&mut data, value, valueKey)?;
// Write the updated content back to the file
let json_string = serde_json::to_string_pretty(&data)?;
super::fs::write_file(state, path.as_ref(), json_string.as_bytes())
}
}
pub(super) fn check_json_key_exists(json: &str, key: &str) -> Result {
let json = parse_json_str(json)?;
let values = select(&json, key)?;
let exists = !values.is_empty();
Ok(exists.abi_encode())
}
pub(super) fn parse_json(json: &str, path: &str, defs: Option<&StructDefinitions>) -> Result {
let value = parse_json_str(json)?;
let selected = select(&value, path)?;
let sol = json_to_sol(defs, &selected)?;
Ok(encode(sol))
}
pub(super) fn parse_json_coerce(json: &str, path: &str, ty: &DynSolType) -> Result {
let json = parse_json_str(json)?;
let [value] = select(&json, path)?[..] else {
bail!("path {path:?} must return exactly one JSON value");
};
parse_json_as(value, ty).map(|v| v.abi_encode())
}
/// Parses given [serde_json::Value] as a [DynSolValue].
pub(super) fn parse_json_as(value: &Value, ty: &DynSolType) -> Result<DynSolValue> {
let to_string = |v: &Value| {
let mut s = v.to_string();
s.retain(|c: char| c != '"');
s
};
match (value, ty) {
(Value::Array(array), ty) => parse_json_array(array, ty),
(Value::Object(object), ty) => parse_json_map(object, ty),
(Value::String(s), DynSolType::String) => Ok(DynSolValue::String(s.clone())),
(Value::String(s), DynSolType::Uint(_) | DynSolType::Int(_)) => string::parse_value(s, ty),
_ => string::parse_value(&to_string(value), ty),
}
}
pub(super) fn parse_json_array(array: &[Value], ty: &DynSolType) -> Result<DynSolValue> {
match ty {
DynSolType::Tuple(types) => {
ensure!(array.len() == types.len(), "array length mismatch");
let values = array
.iter()
.zip(types)
.map(|(e, ty)| parse_json_as(e, ty))
.collect::<Result<Vec<_>>>()?;
Ok(DynSolValue::Tuple(values))
}
DynSolType::Array(inner) => {
let values =
array.iter().map(|e| parse_json_as(e, inner)).collect::<Result<Vec<_>>>()?;
Ok(DynSolValue::Array(values))
}
DynSolType::FixedArray(inner, len) => {
ensure!(array.len() == *len, "array length mismatch");
let values =
array.iter().map(|e| parse_json_as(e, inner)).collect::<Result<Vec<_>>>()?;
Ok(DynSolValue::FixedArray(values))
}
_ => bail!("expected {ty}, found array"),
}
}
pub(super) fn parse_json_map(map: &Map<String, Value>, ty: &DynSolType) -> Result<DynSolValue> {
let Some((name, fields, types)) = ty.as_custom_struct() else {
bail!("expected {ty}, found JSON object");
};
let mut values = Vec::with_capacity(fields.len());
for (field, ty) in fields.iter().zip(types.iter()) {
let Some(value) = map.get(field) else { bail!("field {field:?} not found in JSON object") };
values.push(parse_json_as(value, ty)?);
}
Ok(DynSolValue::CustomStruct {
name: name.to_string(),
prop_names: fields.to_vec(),
tuple: values,
})
}
pub(super) fn parse_json_keys(json: &str, key: &str) -> Result {
let json = parse_json_str(json)?;
let values = select(&json, key)?;
let [value] = values[..] else {
bail!("key {key:?} must return exactly one JSON object");
};
let Value::Object(object) = value else {
bail!("JSON value at {key:?} is not an object");
};
let keys = object.keys().collect::<Vec<_>>();
Ok(keys.abi_encode())
}
fn parse_json_str(json: &str) -> Result<Value> {
serde_json::from_str(json).map_err(|e| fmt_err!("failed parsing JSON: {e}"))
}
fn json_to_sol(defs: Option<&StructDefinitions>, json: &[&Value]) -> Result<Vec<DynSolValue>> {
let mut sol = Vec::with_capacity(json.len());
for value in json {
sol.push(json_value_to_token(value, defs)?);
}
Ok(sol)
}
fn select<'a>(value: &'a Value, mut path: &str) -> Result<Vec<&'a Value>> {
// Handle the special case of the root key
if path == "." {
path = "$";
}
// format error with debug string because json_path errors may contain newlines
jsonpath_lib::select(value, &canonicalize_json_path(path))
.map_err(|e| fmt_err!("failed selecting from JSON: {:?}", e.to_string()))
}
fn encode(values: Vec<DynSolValue>) -> Vec<u8> {
// Double `abi_encode` is intentional
let bytes = match &values[..] {
[] => Vec::new(),
[one] => one.abi_encode(),
_ => DynSolValue::Array(values).abi_encode(),
};
bytes.abi_encode()
}
/// Canonicalize a json path key to always start from the root of the document.
/// Read more about json path syntax: <https://goessner.net/articles/JsonPath/>
pub(super) fn canonicalize_json_path(path: &str) -> Cow<'_, str> {
if !path.starts_with('$') { format!("${path}").into() } else { path.into() }
}
/// Converts a JSON [`Value`] to a [`DynSolValue`] by trying to guess encoded type. For safer
/// decoding, use [`parse_json_as`].
///
/// The function is designed to run recursively, so that in case of an object
/// it will call itself to convert each of it's value and encode the whole as a
/// Tuple
#[instrument(target = "cheatcodes", level = "trace", ret)]
pub(super) fn json_value_to_token(
value: &Value,
defs: Option<&StructDefinitions>,
) -> Result<DynSolValue> {
if let Some(defs) = defs {
_json_value_to_token(value, defs)
} else {
_json_value_to_token(value, &StructDefinitions::default())
}
}
fn _json_value_to_token(value: &Value, defs: &StructDefinitions) -> Result<DynSolValue> {
match value {
Value::Null => Ok(DynSolValue::FixedBytes(B256::ZERO, 32)),
Value::Bool(boolean) => Ok(DynSolValue::Bool(*boolean)),
Value::Array(array) => array
.iter()
.map(|v| _json_value_to_token(v, defs))
.collect::<Result<_>>()
.map(DynSolValue::Array),
Value::Object(map) => {
// Try to find a struct definition that matches the object keys.
let keys: BTreeSet<_> = map.keys().map(|s| s.as_str()).collect();
let matching_def = defs.values().find(|fields| {
fields.len() == keys.len()
&& fields.iter().map(|(name, _)| name.as_str()).collect::<BTreeSet<_>>() == keys
});
if let Some(fields) = matching_def {
// Found a struct with matching field names, use the order from the definition.
fields
.iter()
.map(|(name, _)| {
// unwrap is safe because we know the key exists.
_json_value_to_token(map.get(name).unwrap(), defs)
})
.collect::<Result<_>>()
.map(DynSolValue::Tuple)
} else {
// Fallback to alphabetical sorting if no matching struct is found.
// See: [#3647](https://github.com/foundry-rs/foundry/pull/3647)
let ordered_object: BTreeMap<_, _> =
map.iter().map(|(k, v)| (k.clone(), v.clone())).collect();
ordered_object
.values()
.map(|value| _json_value_to_token(value, defs))
.collect::<Result<_>>()
.map(DynSolValue::Tuple)
}
}
Value::Number(number) => {
if let Some(f) = number.as_f64() {
// Check if the number has decimal digits because the EVM does not support floating
// point math
if f.fract() == 0.0 {
// Use the string representation of the `serde_json` Number type instead of
// calling f.to_string(), because some numbers are wrongly rounded up after
// being convented to f64.
// Example: 18446744073709551615 becomes 18446744073709552000 after parsing it
// to f64.
let s = number.to_string();
// Coerced to scientific notation, so short-circuit to using fallback.
// This will not have a problem with hex numbers, as for parsing these
// We'd need to prefix this with 0x.
// See also <https://docs.soliditylang.org/en/latest/types.html#rational-and-integer-literals>
if s.contains('e') {
// Calling Number::to_string with powers of ten formats the number using
// scientific notation and causes from_dec_str to fail. Using format! with
// f64 keeps the full number representation.
// Example: 100000000000000000000 becomes 1e20 when Number::to_string is
// used.
let fallback_s = f.to_string();
if let Ok(n) = fallback_s.parse() {
return Ok(DynSolValue::Uint(n, 256));
}
if let Ok(n) = I256::from_dec_str(&fallback_s) {
return Ok(DynSolValue::Int(n, 256));
}
}
if let Ok(n) = s.parse() {
return Ok(DynSolValue::Uint(n, 256));
}
if let Ok(n) = s.parse() {
return Ok(DynSolValue::Int(n, 256));
}
}
}
Err(fmt_err!("unsupported JSON number: {number}"))
}
Value::String(string) => {
// Hanfl hex strings
if let Some(mut val) = string.strip_prefix("0x") {
let s;
if val.len() == 39 {
return Err(format!("Cannot parse \"{val}\" as an address. If you want to specify address, prepend zero to the value.").into());
}
if !val.len().is_multiple_of(2) {
s = format!("0{val}");
val = &s[..];
}
if let Ok(bytes) = hex::decode(val) {
return Ok(match bytes.len() {
20 => DynSolValue::Address(Address::from_slice(&bytes)),
32 => DynSolValue::FixedBytes(B256::from_slice(&bytes), 32),
_ => DynSolValue::Bytes(bytes),
});
}
}
// Handle large numbers that were potentially encoded as strings because they exceed the
// capacity of a 64-bit integer.
// Note that number-like strings that *could* fit in an `i64`/`u64` will fall through
// and be treated as literal strings.
if let Ok(n) = string.parse::<I256>()
&& i64::try_from(n).is_err()
{
return Ok(DynSolValue::Int(n, 256));
} else if let Ok(n) = string.parse::<U256>()
&& u64::try_from(n).is_err()
{
return Ok(DynSolValue::Uint(n, 256));
}
// Otherwise, treat as a regular string
Ok(DynSolValue::String(string.to_owned()))
}
}
}
/// Serializes a key:value pair to a specific object. If the key is valueKey, the value is
/// expected to be an object, which will be set as the root object for the provided object key,
/// overriding the whole root object if the object key already exists. By calling this function
/// multiple times, the user can serialize multiple KV pairs to the same object. The value can be of
/// any type, even a new object in itself. The function will return a stringified version of the
/// object, so that the user can use that as a value to a new invocation of the same function with a
/// new object key. This enables the user to reuse the same function to crate arbitrarily complex
/// object structures (JSON).
fn serialize_json(
state: &mut Cheatcodes,
object_key: &str,
value_key: &str,
value: DynSolValue,
) -> Result {
let value = foundry_common::fmt::serialize_value_as_json(value, state.struct_defs())?;
let map = state.serialized_jsons.entry(object_key.into()).or_default();
map.insert(value_key.into(), value);
let stringified = serde_json::to_string(map).unwrap();
Ok(stringified.abi_encode())
}
/// Resolves a [DynSolType] from user input.
pub(super) fn resolve_type(
type_description: &str,
struct_defs: Option<&StructDefinitions>,
) -> Result<DynSolType> {
let ordered_ty = |ty| -> Result<DynSolType> {
if let Some(defs) = struct_defs { reorder_type(ty, defs) } else { Ok(ty) }
};
if let Ok(ty) = DynSolType::parse(type_description) {
return ordered_ty(ty);
};
if let Ok(encoded) = EncodeType::parse(type_description) {
let main_type = encoded.types[0].type_name;
let mut resolver = Resolver::default();
for t in &encoded.types {
resolver.ingest(t.to_owned());
}
// Get the alphabetically-sorted type from the resolver, and reorder if necessary.
return ordered_ty(resolver.resolve(main_type)?);
}
bail!("type description should be a valid Solidity type or a EIP712 `encodeType` string")
}
/// Upserts a value into a JSON object based on a dot-separated key.
///
/// This function navigates through a mutable `serde_json::Value` object using a
/// path-like key. It creates nested JSON objects if they do not exist along the path.
/// The value is inserted at the final key in the path.
///
/// # Arguments
///
/// * `data` - A mutable reference to the `serde_json::Value` to be modified.
/// * `value` - The string representation of the value to upsert. This string is first parsed as
/// JSON, and if that fails, it's treated as a plain JSON string.
/// * `key` - A dot-separated string representing the path to the location for upserting.
pub(super) fn upsert_json_value(data: &mut Value, value: &str, key: &str) -> Result<()> {
// Parse the path key into segments.
let canonical_key = canonicalize_json_path(key);
let parts: Vec<&str> = canonical_key
.strip_prefix("$.")
.unwrap_or(key)
.split('.')
.filter(|s| !s.is_empty())
.collect();
if parts.is_empty() {
return Err(fmt_err!("'valueKey' cannot be empty or just '$'"));
}
// Separate the final key from the path.
// Traverse the objects, creating intermediary ones if necessary.
if let Some((key_to_insert, path_to_parent)) = parts.split_last() {
let mut current_level = data;
for segment in path_to_parent {
if !current_level.is_object() {
return Err(fmt_err!("path segment '{segment}' does not resolve to an object."));
}
current_level = current_level
.as_object_mut()
.unwrap()
.entry(segment.to_string())
.or_insert(Value::Object(Map::new()));
}
// Upsert the new value
if let Some(parent_obj) = current_level.as_object_mut() {
parent_obj.insert(
key_to_insert.to_string(),
serde_json::from_str(value).unwrap_or_else(|_| Value::String(value.to_owned())),
);
} else {
return Err(fmt_err!("final destination is not an object, cannot insert key."));
}
}
Ok(())
}
/// Recursively traverses a `DynSolType` and reorders the fields of any
/// `CustomStruct` variants according to the provided `StructDefinitions`.
///
/// This is necessary because the EIP-712 resolver sorts struct fields alphabetically,
/// but we want to respect the order defined in the Solidity source code.
fn reorder_type(ty: DynSolType, struct_defs: &StructDefinitions) -> Result<DynSolType> {
match ty {
DynSolType::CustomStruct { name, prop_names, tuple } => {
if let Some(def) = struct_defs.get(&name)? {
// The incoming `prop_names` and `tuple` are alphabetically sorted.
let type_map: std::collections::HashMap<String, DynSolType> =
prop_names.into_iter().zip(tuple).collect();
let mut sorted_props = Vec::with_capacity(def.len());
let mut sorted_tuple = Vec::with_capacity(def.len());
for (field_name, _) in def {
sorted_props.push(field_name.clone());
if let Some(field_ty) = type_map.get(field_name) {
sorted_tuple.push(reorder_type(field_ty.clone(), struct_defs)?);
} else {
bail!(
"mismatch between struct definition and type description: field '{field_name}' not found in provided type for struct '{name}'"
);
}
}
Ok(DynSolType::CustomStruct { name, prop_names: sorted_props, tuple: sorted_tuple })
} else {
// No definition found, so we can't reorder. However, we still reorder its children
// in case they have known structs.
let new_tuple = tuple
.into_iter()
.map(|t| reorder_type(t, struct_defs))
.collect::<Result<Vec<_>>>()?;
Ok(DynSolType::CustomStruct { name, prop_names, tuple: new_tuple })
}
}
DynSolType::Array(inner) => {
Ok(DynSolType::Array(Box::new(reorder_type(*inner, struct_defs)?)))
}
DynSolType::FixedArray(inner, len) => {
Ok(DynSolType::FixedArray(Box::new(reorder_type(*inner, struct_defs)?), len))
}
DynSolType::Tuple(inner) => Ok(DynSolType::Tuple(
inner.into_iter().map(|t| reorder_type(t, struct_defs)).collect::<Result<Vec<_>>>()?,
)),
_ => Ok(ty),
}
}
#[cfg(test)]
mod tests {
use super::*;
use alloy_primitives::FixedBytes;
use foundry_common::fmt::{TypeDefMap, serialize_value_as_json};
use proptest::{arbitrary::any, prop_oneof, strategy::Strategy};
use std::collections::HashSet;
fn valid_value(value: &DynSolValue) -> bool {
(match value {
DynSolValue::String(s) if s == "{}" => false,
DynSolValue::Tuple(_) | DynSolValue::CustomStruct { .. } => false,
DynSolValue::Array(v) | DynSolValue::FixedArray(v) => v.iter().all(valid_value),
_ => true,
}) && value.as_type().is_some()
}
/// [DynSolValue::Bytes] of length 32 and 20 are converted to [DynSolValue::FixedBytes] and
/// [DynSolValue::Address] respectively. Thus, we can't distinguish between address and bytes of
/// length 20 during decoding. Because of that, there are issues with handling of arrays of
/// those types.
fn fixup_guessable(value: DynSolValue) -> DynSolValue {
match value {
DynSolValue::Array(mut v) | DynSolValue::FixedArray(mut v) => {
if let Some(DynSolValue::Bytes(_)) = v.first() {
v.retain(|v| {
let len = v.as_bytes().unwrap().len();
len != 32 && len != 20
})
}
DynSolValue::Array(v.into_iter().map(fixup_guessable).collect())
}
DynSolValue::FixedBytes(v, _) => DynSolValue::FixedBytes(v, 32),
DynSolValue::Bytes(v) if v.len() == 32 => {
DynSolValue::FixedBytes(FixedBytes::from_slice(&v), 32)
}
DynSolValue::Bytes(v) if v.len() == 20 => DynSolValue::Address(Address::from_slice(&v)),
_ => value,
}
}
fn guessable_types() -> impl proptest::strategy::Strategy<Value = DynSolValue> {
any::<DynSolValue>().prop_map(fixup_guessable).prop_filter("invalid value", valid_value)
}
/// A proptest strategy for generating a (simple) `DynSolValue::CustomStruct`
/// and its corresponding `StructDefinitions` object.
fn custom_struct_strategy() -> impl Strategy<Value = (StructDefinitions, DynSolValue)> {
// Define a strategy for basic field names and values.
let field_name_strat = "[a-z]{4,12}";
let field_value_strat = prop_oneof![
any::<bool>().prop_map(DynSolValue::Bool),
any::<u32>().prop_map(|v| DynSolValue::Uint(U256::from(v), 256)),
any::<[u8; 20]>().prop_map(Address::from).prop_map(DynSolValue::Address),
any::<[u8; 32]>().prop_map(B256::from).prop_map(|b| DynSolValue::FixedBytes(b, 32)),
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | true |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/cheatcodes/src/error.rs | crates/cheatcodes/src/error.rs | use crate::Vm;
use alloy_primitives::{Bytes, hex};
use alloy_signer::Error as SignerError;
use alloy_signer_local::LocalSignerError;
use alloy_sol_types::SolError;
use foundry_common::errors::FsPathError;
use foundry_config::UnresolvedEnvVarError;
use foundry_evm_core::backend::{BackendError, DatabaseError};
use foundry_wallets::error::WalletSignerError;
use k256::ecdsa::signature::Error as SignatureError;
use revm::context_interface::result::EVMError;
use std::{borrow::Cow, fmt};
/// Cheatcode result type.
///
/// Type alias with a default Ok type of [`Vec<u8>`], and default Err type of [`Error`].
pub type Result<T = Vec<u8>, E = Error> = std::result::Result<T, E>;
macro_rules! fmt_err {
($msg:literal $(,)?) => {
$crate::Error::fmt(::std::format_args!($msg))
};
($err:expr $(,)?) => {
<$crate::Error as ::std::convert::From<_>>::from($err)
};
($fmt:expr, $($arg:tt)*) => {
$crate::Error::fmt(::std::format_args!($fmt, $($arg)*))
};
}
macro_rules! bail {
($msg:literal $(,)?) => {
return ::std::result::Result::Err(fmt_err!($msg))
};
($err:expr $(,)?) => {
return ::std::result::Result::Err(fmt_err!($err))
};
($fmt:expr, $($arg:tt)*) => {
return ::std::result::Result::Err(fmt_err!($fmt, $($arg)*))
};
}
macro_rules! ensure {
($cond:expr $(,)?) => {
if !$cond {
return ::std::result::Result::Err($crate::Error::custom(
::std::concat!("Condition failed: `", ::std::stringify!($cond), "`")
));
}
};
($cond:expr, $msg:literal $(,)?) => {
if !$cond {
return ::std::result::Result::Err(fmt_err!($msg));
}
};
($cond:expr, $err:expr $(,)?) => {
if !$cond {
return ::std::result::Result::Err(fmt_err!($err));
}
};
($cond:expr, $fmt:expr, $($arg:tt)*) => {
if !$cond {
return ::std::result::Result::Err(fmt_err!($fmt, $($arg)*));
}
};
}
/// Error thrown by cheatcodes.
// This uses a custom repr to minimize the size of the error.
// The repr is basically `enum { Cow<'static, str>, Cow<'static, [u8]> }`
pub struct Error {
/// If true, encode `data` as `Error(string)`, otherwise encode it directly as `bytes`.
is_str: bool,
/// Whether this was constructed from an owned byte vec, which means we have to drop the data
/// in `impl Drop`.
drop: bool,
/// The error data. Always a valid pointer, and never modified.
data: *const [u8],
}
impl std::error::Error for Error {}
impl fmt::Debug for Error {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str("Error::")?;
self.kind().fmt(f)
}
}
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.kind().fmt(f)
}
}
/// Kind of cheatcode errors.
///
/// Constructed by [`Error::kind`].
#[derive(Debug)]
pub enum ErrorKind<'a> {
/// A string error, ABI-encoded as `CheatcodeError(string)`.
String(&'a str),
/// A raw bytes error. Does not get encoded.
Bytes(&'a [u8]),
}
impl fmt::Display for ErrorKind<'_> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match *self {
Self::String(ss) => f.write_str(ss),
Self::Bytes(b) => f.write_str(&hex::encode_prefixed(b)),
}
}
}
impl Error {
/// Creates a new error and ABI encodes it as `CheatcodeError(string)`.
pub fn encode(error: impl Into<Self>) -> Bytes {
error.into().abi_encode().into()
}
/// Creates a new error with a custom message.
pub fn display(msg: impl fmt::Display) -> Self {
Self::fmt(format_args!("{msg}"))
}
/// Creates a new error with a custom [`fmt::Arguments`] message.
pub fn fmt(args: fmt::Arguments<'_>) -> Self {
match args.as_str() {
Some(s) => Self::new_str(s),
None => Self::new_string(std::fmt::format(args)),
}
}
/// ABI-encodes this error as `CheatcodeError(string)` if the inner message is a string,
/// otherwise returns the raw bytes.
pub fn abi_encode(&self) -> Vec<u8> {
match self.kind() {
ErrorKind::String(string) => Vm::CheatcodeError { message: string.into() }.abi_encode(),
ErrorKind::Bytes(bytes) => bytes.into(),
}
}
/// Returns the kind of this error.
pub fn kind(&self) -> ErrorKind<'_> {
let data = self.data();
if self.is_str {
debug_assert!(std::str::from_utf8(data).is_ok());
ErrorKind::String(unsafe { std::str::from_utf8_unchecked(data) })
} else {
ErrorKind::Bytes(data)
}
}
/// Returns the raw data of this error.
pub fn data(&self) -> &[u8] {
unsafe { &*self.data }
}
/// Returns `true` if this error is a human-readable string.
pub fn is_str(&self) -> bool {
self.is_str
}
fn new_str(data: &'static str) -> Self {
Self::_new(true, false, data.as_bytes())
}
fn new_string(data: String) -> Self {
Self::_new(true, true, Box::into_raw(data.into_boxed_str().into_boxed_bytes()))
}
fn new_bytes(data: &'static [u8]) -> Self {
Self::_new(false, false, data)
}
fn new_vec(data: Vec<u8>) -> Self {
Self::_new(false, true, Box::into_raw(data.into_boxed_slice()))
}
fn _new(is_str: bool, drop: bool, data: *const [u8]) -> Self {
debug_assert!(!data.is_null());
Self { is_str, drop, data }
}
}
impl Drop for Error {
fn drop(&mut self) {
if self.drop {
drop(unsafe { Box::<[u8]>::from_raw(self.data.cast_mut()) });
}
}
}
impl From<Cow<'static, str>> for Error {
fn from(value: Cow<'static, str>) -> Self {
match value {
Cow::Borrowed(str) => Self::new_str(str),
Cow::Owned(string) => Self::new_string(string),
}
}
}
impl From<String> for Error {
fn from(value: String) -> Self {
Self::new_string(value)
}
}
impl From<&'static str> for Error {
fn from(value: &'static str) -> Self {
Self::new_str(value)
}
}
impl From<Cow<'static, [u8]>> for Error {
fn from(value: Cow<'static, [u8]>) -> Self {
match value {
Cow::Borrowed(bytes) => Self::new_bytes(bytes),
Cow::Owned(vec) => Self::new_vec(vec),
}
}
}
impl From<&'static [u8]> for Error {
fn from(value: &'static [u8]) -> Self {
Self::new_bytes(value)
}
}
impl<const N: usize> From<&'static [u8; N]> for Error {
fn from(value: &'static [u8; N]) -> Self {
Self::new_bytes(value)
}
}
impl From<Vec<u8>> for Error {
fn from(value: Vec<u8>) -> Self {
Self::new_vec(value)
}
}
impl From<Bytes> for Error {
fn from(value: Bytes) -> Self {
Self::new_vec(value.into())
}
}
// So we can use `?` on `Result<_, Error>`.
macro_rules! impl_from {
($($t:ty),* $(,)?) => {$(
impl From<$t> for Error {
fn from(value: $t) -> Self {
Self::display(value)
}
}
)*};
}
impl_from!(
alloy_sol_types::Error,
alloy_dyn_abi::Error,
alloy_primitives::SignatureError,
alloy_consensus::crypto::RecoveryError,
FsPathError,
hex::FromHexError,
BackendError,
DatabaseError,
jsonpath_lib::JsonPathError,
serde_json::Error,
SignatureError,
std::io::Error,
std::num::TryFromIntError,
std::str::Utf8Error,
std::string::FromUtf8Error,
UnresolvedEnvVarError,
LocalSignerError,
SignerError,
WalletSignerError,
);
impl<T: Into<BackendError>> From<EVMError<T>> for Error {
fn from(err: EVMError<T>) -> Self {
Self::display(BackendError::from(err))
}
}
impl From<eyre::Report> for Error {
fn from(err: eyre::Report) -> Self {
Self::new_string(foundry_common::errors::display_chain(&err))
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn encode() {
let error = Vm::CheatcodeError { message: "hello".into() }.abi_encode();
assert_eq!(Error::from("hello").abi_encode(), error);
assert_eq!(Error::encode("hello"), error);
assert_eq!(Error::from(b"hello").abi_encode(), b"hello");
assert_eq!(Error::encode(b"hello"), b"hello"[..]);
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/cheatcodes/src/utils.rs | crates/cheatcodes/src/utils.rs | //! Implementations of [`Utilities`](spec::Group::Utilities) cheatcodes.
use crate::{Cheatcode, Cheatcodes, CheatcodesExecutor, CheatsCtxt, Result, Vm::*};
use alloy_dyn_abi::{DynSolType, DynSolValue, Resolver, TypedData, eip712_parser::EncodeType};
use alloy_ens::namehash;
use alloy_primitives::{B64, Bytes, I256, U256, aliases::B32, keccak256, map::HashMap};
use alloy_rlp::{Decodable, Encodable};
use alloy_sol_types::SolValue;
use foundry_common::{TYPE_BINDING_PREFIX, fs};
use foundry_config::fs_permissions::FsAccessKind;
use foundry_evm_core::constants::DEFAULT_CREATE2_DEPLOYER;
use foundry_evm_fuzz::strategies::BoundMutator;
use proptest::prelude::Strategy;
use rand::{Rng, RngCore, seq::SliceRandom};
use revm::context::JournalTr;
use std::path::PathBuf;
/// Contains locations of traces ignored via cheatcodes.
///
/// The way we identify location in traces is by (node_idx, item_idx) tuple where node_idx is an
/// index of a call trace node, and item_idx is a value between 0 and `node.ordering.len()` where i
/// represents point after ith item, and 0 represents the beginning of the node trace.
#[derive(Debug, Default, Clone)]
pub struct IgnoredTraces {
/// Mapping from (start_node_idx, start_item_idx) to (end_node_idx, end_item_idx) representing
/// ranges of trace nodes to ignore.
pub ignored: HashMap<(usize, usize), (usize, usize)>,
/// Keeps track of (start_node_idx, start_item_idx) of the last `vm.pauseTracing` call.
pub last_pause_call: Option<(usize, usize)>,
}
impl Cheatcode for labelCall {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let Self { account, newLabel } = self;
state.labels.insert(*account, newLabel.clone());
Ok(Default::default())
}
}
impl Cheatcode for getLabelCall {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let Self { account } = self;
Ok(match state.labels.get(account) {
Some(label) => label.abi_encode(),
None => format!("unlabeled:{account}").abi_encode(),
})
}
}
impl Cheatcode for computeCreateAddressCall {
fn apply(&self, _state: &mut Cheatcodes) -> Result {
let Self { nonce, deployer } = self;
ensure!(*nonce <= U256::from(u64::MAX), "nonce must be less than 2^64");
Ok(deployer.create(nonce.to()).abi_encode())
}
}
impl Cheatcode for computeCreate2Address_0Call {
fn apply(&self, _state: &mut Cheatcodes) -> Result {
let Self { salt, initCodeHash, deployer } = self;
Ok(deployer.create2(salt, initCodeHash).abi_encode())
}
}
impl Cheatcode for computeCreate2Address_1Call {
fn apply(&self, _state: &mut Cheatcodes) -> Result {
let Self { salt, initCodeHash } = self;
Ok(DEFAULT_CREATE2_DEPLOYER.create2(salt, initCodeHash).abi_encode())
}
}
impl Cheatcode for ensNamehashCall {
fn apply(&self, _state: &mut Cheatcodes) -> Result {
let Self { name } = self;
Ok(namehash(name).abi_encode())
}
}
impl Cheatcode for bound_0Call {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let Self { current, min, max } = *self;
let Some(mutated) = U256::bound(current, min, max, state.test_runner()) else {
bail!("cannot bound {current} in [{min}, {max}] range")
};
Ok(mutated.abi_encode())
}
}
impl Cheatcode for bound_1Call {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let Self { current, min, max } = *self;
let Some(mutated) = I256::bound(current, min, max, state.test_runner()) else {
bail!("cannot bound {current} in [{min}, {max}] range")
};
Ok(mutated.abi_encode())
}
}
impl Cheatcode for randomUint_0Call {
fn apply(&self, state: &mut Cheatcodes) -> Result {
random_uint(state, None, None)
}
}
impl Cheatcode for randomUint_1Call {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let Self { min, max } = *self;
random_uint(state, None, Some((min, max)))
}
}
impl Cheatcode for randomUint_2Call {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let Self { bits } = *self;
random_uint(state, Some(bits), None)
}
}
impl Cheatcode for randomAddressCall {
fn apply(&self, state: &mut Cheatcodes) -> Result {
Ok(DynSolValue::type_strategy(&DynSolType::Address)
.new_tree(state.test_runner())
.unwrap()
.current()
.abi_encode())
}
}
impl Cheatcode for randomInt_0Call {
fn apply(&self, state: &mut Cheatcodes) -> Result {
random_int(state, None)
}
}
impl Cheatcode for randomInt_1Call {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let Self { bits } = *self;
random_int(state, Some(bits))
}
}
impl Cheatcode for randomBoolCall {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let rand_bool: bool = state.rng().random();
Ok(rand_bool.abi_encode())
}
}
impl Cheatcode for randomBytesCall {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let Self { len } = *self;
ensure!(
len <= U256::from(usize::MAX),
format!("bytes length cannot exceed {}", usize::MAX)
);
let mut bytes = vec![0u8; len.to::<usize>()];
state.rng().fill_bytes(&mut bytes);
Ok(bytes.abi_encode())
}
}
impl Cheatcode for randomBytes4Call {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let rand_u32 = state.rng().next_u32();
Ok(B32::from(rand_u32).abi_encode())
}
}
impl Cheatcode for randomBytes8Call {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let rand_u64 = state.rng().next_u64();
Ok(B64::from(rand_u64).abi_encode())
}
}
impl Cheatcode for pauseTracingCall {
fn apply_full(
&self,
ccx: &mut crate::CheatsCtxt,
executor: &mut dyn CheatcodesExecutor,
) -> Result {
let Some(tracer) = executor.tracing_inspector() else {
// No tracer -> nothing to pause
return Ok(Default::default());
};
// If paused earlier, ignore the call
if ccx.state.ignored_traces.last_pause_call.is_some() {
return Ok(Default::default());
}
let cur_node = &tracer.traces().nodes().last().expect("no trace nodes");
ccx.state.ignored_traces.last_pause_call = Some((cur_node.idx, cur_node.ordering.len()));
Ok(Default::default())
}
}
impl Cheatcode for resumeTracingCall {
fn apply_full(
&self,
ccx: &mut crate::CheatsCtxt,
executor: &mut dyn CheatcodesExecutor,
) -> Result {
let Some(tracer) = executor.tracing_inspector() else {
// No tracer -> nothing to unpause
return Ok(Default::default());
};
let Some(start) = ccx.state.ignored_traces.last_pause_call.take() else {
// Nothing to unpause
return Ok(Default::default());
};
let node = &tracer.traces().nodes().last().expect("no trace nodes");
ccx.state.ignored_traces.ignored.insert(start, (node.idx, node.ordering.len()));
Ok(Default::default())
}
}
impl Cheatcode for interceptInitcodeCall {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let Self {} = self;
if !state.intercept_next_create_call {
state.intercept_next_create_call = true;
} else {
bail!("vm.interceptInitcode() has already been called")
}
Ok(Default::default())
}
}
impl Cheatcode for setArbitraryStorage_0Call {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { target } = self;
ccx.state.arbitrary_storage().mark_arbitrary(target, false);
Ok(Default::default())
}
}
impl Cheatcode for setArbitraryStorage_1Call {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { target, overwrite } = self;
ccx.state.arbitrary_storage().mark_arbitrary(target, *overwrite);
Ok(Default::default())
}
}
impl Cheatcode for copyStorageCall {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { from, to } = self;
ensure!(
!ccx.state.has_arbitrary_storage(to),
"target address cannot have arbitrary storage"
);
if let Ok(from_account) = ccx.ecx.journaled_state.load_account(*from) {
let from_storage = from_account.storage.clone();
if ccx.ecx.journaled_state.load_account(*to).is_ok() {
// SAFETY: We ensured the account was already loaded.
ccx.ecx.journaled_state.state.get_mut(to).unwrap().storage = from_storage;
if let Some(arbitrary_storage) = &mut ccx.state.arbitrary_storage {
arbitrary_storage.mark_copy(from, to);
}
}
}
Ok(Default::default())
}
}
impl Cheatcode for sortCall {
fn apply(&self, _state: &mut Cheatcodes) -> Result {
let Self { array } = self;
let mut sorted_values = array.clone();
sorted_values.sort();
Ok(sorted_values.abi_encode())
}
}
impl Cheatcode for shuffleCall {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let Self { array } = self;
let mut shuffled_values = array.clone();
let rng = state.rng();
shuffled_values.shuffle(rng);
Ok(shuffled_values.abi_encode())
}
}
impl Cheatcode for setSeedCall {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { seed } = self;
ccx.state.set_seed(*seed);
Ok(Default::default())
}
}
/// Helper to generate a random `uint` value (with given bits or bounded if specified)
/// from type strategy.
fn random_uint(state: &mut Cheatcodes, bits: Option<U256>, bounds: Option<(U256, U256)>) -> Result {
if let Some(bits) = bits {
// Generate random with specified bits.
ensure!(bits <= U256::from(256), "number of bits cannot exceed 256");
return Ok(DynSolValue::type_strategy(&DynSolType::Uint(bits.to::<usize>()))
.new_tree(state.test_runner())
.unwrap()
.current()
.abi_encode());
}
if let Some((min, max)) = bounds {
ensure!(min <= max, "min must be less than or equal to max");
// Generate random between range min..=max
let exclusive_modulo = max - min;
let mut random_number: U256 = state.rng().random();
if exclusive_modulo != U256::MAX {
let inclusive_modulo = exclusive_modulo + U256::from(1);
random_number %= inclusive_modulo;
}
random_number += min;
return Ok(random_number.abi_encode());
}
// Generate random `uint256` value.
Ok(DynSolValue::type_strategy(&DynSolType::Uint(256))
.new_tree(state.test_runner())
.unwrap()
.current()
.abi_encode())
}
/// Helper to generate a random `int` value (with given bits if specified) from type strategy.
fn random_int(state: &mut Cheatcodes, bits: Option<U256>) -> Result {
let no_bits = bits.unwrap_or(U256::from(256));
ensure!(no_bits <= U256::from(256), "number of bits cannot exceed 256");
Ok(DynSolValue::type_strategy(&DynSolType::Int(no_bits.to::<usize>()))
.new_tree(state.test_runner())
.unwrap()
.current()
.abi_encode())
}
impl Cheatcode for eip712HashType_0Call {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let Self { typeNameOrDefinition } = self;
let type_def = get_canonical_type_def(typeNameOrDefinition, state, None)?;
Ok(keccak256(type_def.as_bytes()).to_vec())
}
}
impl Cheatcode for eip712HashType_1Call {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let Self { bindingsPath, typeName } = self;
let path = state.config.ensure_path_allowed(bindingsPath, FsAccessKind::Read)?;
let type_def = get_type_def_from_bindings(typeName, path, &state.config.root)?;
Ok(keccak256(type_def.as_bytes()).to_vec())
}
}
impl Cheatcode for eip712HashStruct_0Call {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let Self { typeNameOrDefinition, abiEncodedData } = self;
let type_def = get_canonical_type_def(typeNameOrDefinition, state, None)?;
let primary = &type_def[..type_def.find('(').unwrap_or(type_def.len())];
get_struct_hash(primary, &type_def, abiEncodedData)
}
}
impl Cheatcode for eip712HashStruct_1Call {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let Self { bindingsPath, typeName, abiEncodedData } = self;
let path = state.config.ensure_path_allowed(bindingsPath, FsAccessKind::Read)?;
let type_def = get_type_def_from_bindings(typeName, path, &state.config.root)?;
get_struct_hash(typeName, &type_def, abiEncodedData)
}
}
impl Cheatcode for eip712HashTypedDataCall {
fn apply(&self, _state: &mut Cheatcodes) -> Result {
let Self { jsonData } = self;
let typed_data: TypedData = serde_json::from_str(jsonData)?;
let digest = typed_data.eip712_signing_hash()?;
Ok(digest.to_vec())
}
}
/// Returns EIP-712 canonical type definition from the provided string type representation or type
/// name. If type name provided, then it looks up bindings from file generated by `forge bind-json`.
fn get_canonical_type_def(
name_or_def: &String,
state: &mut Cheatcodes,
path: Option<PathBuf>,
) -> Result<String> {
let type_def = if name_or_def.contains('(') {
// If the input contains '(', it must be the type definition.
EncodeType::parse(name_or_def).and_then(|parsed| parsed.canonicalize())?
} else {
// Otherwise, it must be the type name.
let path = path.as_ref().unwrap_or(&state.config.bind_json_path);
let path = state.config.ensure_path_allowed(path, FsAccessKind::Read)?;
get_type_def_from_bindings(name_or_def, path, &state.config.root)?
};
Ok(type_def)
}
/// Returns the EIP-712 type definition from the bindings in the provided path.
/// Assumes that read validation for the path has already been checked.
fn get_type_def_from_bindings(name: &String, path: PathBuf, root: &PathBuf) -> Result<String> {
let content = fs::read_to_string(&path)?;
let type_defs: HashMap<&str, &str> = content
.lines()
.filter_map(|line| {
let relevant = line.trim().strip_prefix(TYPE_BINDING_PREFIX)?;
let (name, def) = relevant.split_once('=')?;
Some((name.trim(), def.trim().strip_prefix('"')?.strip_suffix("\";")?))
})
.collect();
match type_defs.get(name.as_str()) {
Some(value) => Ok(value.to_string()),
None => {
let bindings =
type_defs.keys().map(|k| format!(" - {k}")).collect::<Vec<String>>().join("\n");
bail!(
"'{}' not found in '{}'.{}",
name,
path.strip_prefix(root).unwrap_or(&path).to_string_lossy(),
if bindings.is_empty() {
String::new()
} else {
format!("\nAvailable bindings:\n{bindings}\n")
}
);
}
}
}
/// Returns the EIP-712 struct hash for provided name, definition and ABI encoded data.
fn get_struct_hash(primary: &str, type_def: &String, abi_encoded_data: &Bytes) -> Result {
let mut resolver = Resolver::default();
// Populate the resolver by ingesting the canonical type definition, and then get the
// corresponding `DynSolType` of the primary type.
resolver
.ingest_string(type_def)
.map_err(|e| fmt_err!("Resolver failed to ingest type definition: {e}"))?;
let resolved_sol_type = resolver
.resolve(primary)
.map_err(|e| fmt_err!("Failed to resolve EIP-712 primary type '{primary}': {e}"))?;
// ABI-decode the bytes into `DynSolValue::CustomStruct`.
let sol_value = resolved_sol_type.abi_decode(abi_encoded_data.as_ref()).map_err(|e| {
fmt_err!("Failed to ABI decode using resolved_sol_type directly for '{primary}': {e}.")
})?;
// Use the resolver to properly encode the data.
let encoded_data: Vec<u8> = resolver
.encode_data(&sol_value)
.map_err(|e| fmt_err!("Failed to EIP-712 encode data for struct '{primary}': {e}"))?
.ok_or_else(|| fmt_err!("EIP-712 data encoding returned 'None' for struct '{primary}'"))?;
// Compute the type hash of the primary type.
let type_hash = resolver
.type_hash(primary)
.map_err(|e| fmt_err!("Failed to compute typeHash for EIP712 type '{primary}': {e}"))?;
// Compute the struct hash of the concatenated type hash and encoded data.
let mut bytes_to_hash = Vec::with_capacity(32 + encoded_data.len());
bytes_to_hash.extend_from_slice(type_hash.as_slice());
bytes_to_hash.extend_from_slice(&encoded_data);
Ok(keccak256(&bytes_to_hash).to_vec())
}
impl Cheatcode for toRlpCall {
fn apply(&self, _state: &mut Cheatcodes) -> Result {
let Self { data } = self;
let mut buf = Vec::new();
data.encode(&mut buf);
Ok(Bytes::from(buf).abi_encode())
}
}
impl Cheatcode for fromRlpCall {
fn apply(&self, _state: &mut Cheatcodes) -> Result {
let Self { rlp } = self;
let decoded: Vec<Bytes> = Vec::<Bytes>::decode(&mut rlp.as_ref())
.map_err(|e| fmt_err!("Failed to decode RLP: {e}"))?;
Ok(decoded.abi_encode())
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.