repo stringlengths 6 65 | file_url stringlengths 81 311 | file_path stringlengths 6 227 | content stringlengths 0 32.8k | language stringclasses 1
value | license stringclasses 7
values | commit_sha stringlengths 40 40 | retrieved_at stringdate 2026-01-04 15:31:58 2026-01-04 20:25:31 | truncated bool 2
classes |
|---|---|---|---|---|---|---|---|---|
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-migrate/src/cli/shared.rs | forc-plugins/forc-migrate/src/cli/shared.rs | use std::path::PathBuf;
use anyhow::{bail, Ok, Result};
use clap::Parser;
use forc_pkg::{self as pkg, PackageManifestFile};
use forc_pkg::{
manifest::{GenericManifestFile, ManifestFile},
source::IPFSNode,
};
use forc_tracing::println_action_green;
use sway_core::{BuildTarget, Engines};
use sway_error::diagnostic::*;
use sway_features::{ExperimentalFeatures, Feature};
use sway_types::SourceEngine;
use crate::migrations::{MigrationStepKind, MigrationStepsWithOccurrences, Occurrence};
use crate::{
instructive_error,
migrations::{MigrationStep, MigrationStepExecution, ProgramInfo},
};
/// Args that can be shared between all commands that `compile` a package. E.g. `check`, `run`.
#[derive(Debug, Default, Parser)]
pub(crate) struct Compile {
/// Path to the project.
///
/// If not specified, current working directory will be used.
#[clap(short, long)]
pub path: Option<String>,
/// Offline mode, prevents Forc from using the network when managing dependencies.
/// Meaning it will only try to use previously downloaded dependencies.
#[clap(long = "offline")]
pub offline: bool,
/// Requires that the Forc.lock file is up-to-date. If the lock file is missing, or it
/// needs to be updated, Forc will exit with an error.
#[clap(long)]
pub locked: bool,
/// The IPFS Node to use for fetching IPFS sources.
///
/// Possible values: FUEL, PUBLIC, LOCAL, <GATEWAY_URL>
#[clap(long)]
pub ipfs_node: Option<IPFSNode>,
#[clap(flatten)]
pub experimental: sway_features::CliFields,
/// Silent mode. Don't output any warnings or errors to the command line.
#[clap(long = "silent", short = 's')]
pub silent: bool,
}
impl Compile {
/// Returns the [Compile::path] if provided, otherwise the current directory.
pub(crate) fn manifest_dir(&self) -> std::io::Result<PathBuf> {
if let Some(path) = &self.path {
std::result::Result::Ok(PathBuf::from(path))
} else {
std::env::current_dir()
}
}
/// Returns the cumulative [ExperimentalFeatures], from the package manifest
/// file and the CLI experimental flag.
pub(crate) fn experimental_features(&self) -> Result<ExperimentalFeatures> {
let manifest = ManifestFile::from_dir(self.manifest_dir()?)?;
let pkg_manifest = get_pkg_manifest_file(&manifest)?;
Ok(ExperimentalFeatures::new(
&pkg_manifest.project.experimental,
&self.experimental.experimental,
&self.experimental.no_experimental,
)
.map_err(|err| anyhow::anyhow!("{err}"))?)
}
}
// Clippy issue. It erroneously assumes that `vec!`s in `instructive_error` calls are not needed.
#[allow(clippy::useless_vec)]
fn get_pkg_manifest_file(manifest: &ManifestFile) -> Result<&PackageManifestFile> {
match manifest {
ManifestFile::Package(pkg_manifest) => Ok(pkg_manifest),
ManifestFile::Workspace(_) => Err(anyhow::anyhow!(instructive_error(
"`forc migrate` does not support migrating workspaces.",
&vec![
&format!("\"{}\" is a workspace.", manifest.dir().to_string_lossy()),
"Please migrate each workspace member individually.",
]
))),
}
}
// Clippy issue. It erroneously assumes that `vec!`s in `instructive_error` calls are not needed.
#[allow(clippy::useless_vec)]
pub(crate) fn compile_package<'a>(
engines: &'a Engines,
build_instructions: &Compile,
) -> Result<ProgramInfo<'a>> {
let manifest_dir = build_instructions.manifest_dir()?;
let manifest = ManifestFile::from_dir(manifest_dir.clone())?;
let pkg_manifest = get_pkg_manifest_file(&manifest)?;
let pkg_name = pkg_manifest.project_name();
println_action_green(
"Compiling",
&format!("{} ({})", pkg_name, manifest.dir().to_string_lossy()),
);
let member_manifests = manifest.member_manifests()?;
let lock_path = manifest.lock_path()?;
let ipfs_node = build_instructions.ipfs_node.clone().unwrap_or_default();
let plan = pkg::BuildPlan::from_lock_and_manifests(
&lock_path,
&member_manifests,
build_instructions.locked,
build_instructions.offline,
&ipfs_node,
)?;
let include_tests = true; // We want to migrate the tests as well.
let mut compile_results = pkg::check(
&plan,
BuildTarget::default(),
build_instructions.silent,
None,
include_tests,
engines,
None,
&build_instructions.experimental.experimental,
&build_instructions.experimental.no_experimental,
sway_core::DbgGeneration::Full,
)?;
let Some(programs) =
compile_results
.pop()
.and_then(|(programs, handler)| if handler.has_errors() { None } else { programs })
else {
bail!(instructive_compilation_error(
&pkg_manifest.path().to_string_lossy()
));
};
let core::result::Result::Ok(ty_program) = programs.typed else {
bail!(instructive_compilation_error(
&pkg_manifest.path().to_string_lossy()
));
};
return Ok(ProgramInfo {
pkg_name: pkg_name.to_string(),
lexed_program: programs.lexed,
ty_program,
engines,
});
fn instructive_compilation_error(manifest_dir: &str) -> String {
instructive_error("The Sway project cannot be compiled.", &vec![
&format!("`forc migrate` could not compile the Sway project located at \"{manifest_dir}\"."),
"To see the compilation errors, run `forc build` on the project.",
"Did you maybe forget to specify experimental features?",
"If the project uses experimental features, they need to be specified when running `forc migrate`.",
"E.g.:",
" forc migrate run --experimental <feature_1>,<feature_2>",
])
}
}
pub(crate) const PROJECT_IS_COMPATIBLE: &str =
"Project is compatible with the next breaking change version of Sway";
pub(crate) fn print_features_and_migration_steps(
features_and_migration_steps: MigrationStepsWithOccurrences,
) {
let show_migration_effort = features_and_migration_steps
.iter()
.flat_map(|(_, steps)| steps.iter().map(|step| step.1))
.all(|occurrences| occurrences.is_some());
let mut total_migration_effort = 0;
for (feature, migration_steps) in features_and_migration_steps {
println!("{}", feature.name());
for (migration_step, occurrence) in migration_steps.iter() {
println!(
" {} {}",
match migration_step.execution() {
MigrationStepExecution::Manual => "[M]",
MigrationStepExecution::Semiautomatic => "[S]",
MigrationStepExecution::Automatic => "[A]",
},
migration_step.title
);
if show_migration_effort {
let count = occurrence
.expect("if the `show_migration_effort` is true, all occurrences are `Some`");
// For automatic steps **that have occurrences**, plan ~10 minutes
// for the review of the automatically changed code.
let migration_effort_in_mins = if migration_step.duration == 0 && count > 0 {
10
} else {
// Otherwise, a very simple linear calculation will give
// a decent and useful rough estimate.
count * migration_step.duration
};
println!(
" Occurrences: {count:>5} Migration effort (hh::mm): ~{}\n",
duration_to_str(migration_effort_in_mins)
);
total_migration_effort += migration_effort_in_mins;
}
}
if !show_migration_effort {
println!();
}
}
if show_migration_effort {
println!(
"Total migration effort (hh::mm): ~{}",
duration_to_str(total_migration_effort)
);
// If there are no occurrences in code that require migration,
// inform that the project is compatible with the next breaking change version of Sway.
let num_of_occurrences = features_and_migration_steps
.iter()
.flat_map(|(_, steps)| steps.iter().map(|step| step.1.unwrap_or(0)))
.sum::<usize>();
if num_of_occurrences == 0 {
println!();
println!("{PROJECT_IS_COMPATIBLE}.");
}
}
}
/// Creates a single migration [Diagnostic] that shows **all the occurrences** in code
/// that require migration effort expected by the `migration_step`.
///
/// Returns `None` if the migration step is not necessary, in other words, if there
/// are no occurrences in code that require this particular migration.
pub(crate) fn create_migration_diagnostic(
source_engine: &SourceEngine,
feature: &Feature,
migration_step: &MigrationStep,
occurrences: &[Occurrence],
) -> Option<Diagnostic> {
if occurrences.is_empty() {
return None;
}
let description = format!("[{}] {}", feature.name(), migration_step.title);
Some(Diagnostic {
reason: Some(Reason::new(Code::migrations(1), description)),
issue: Issue::info(
source_engine,
occurrences[0].span.clone(),
occurrences[0].msg_or_empty(),
),
hints: occurrences
.iter()
.skip(1)
.map(|occurrence| {
Hint::info(
source_engine,
occurrence.span.clone(),
occurrence.msg_or_empty(),
)
})
.collect(),
help: migration_step
.help
.iter()
.map(|help| help.to_string())
.chain(if migration_step.help.is_empty() {
vec![]
} else {
vec![Diagnostic::help_empty_line()]
})
.chain(match migration_step.kind {
MigrationStepKind::Instruction(_) => vec![],
MigrationStepKind::CodeModification(_, [], _) => vec![],
MigrationStepKind::CodeModification(_, manual_migration_actions, _) => {
get_manual_migration_actions_help(manual_migration_actions)
}
MigrationStepKind::Interaction(_, _, [], _) => vec![
"This migration step will interactively modify the code, based on your input."
.to_string(),
Diagnostic::help_empty_line(),
],
MigrationStepKind::Interaction(_, _, manual_migration_actions, _) => vec![
"This migration step will interactively modify the code, based on your input."
.to_string(),
Diagnostic::help_empty_line(),
]
.into_iter()
.chain(get_manual_migration_actions_help(manual_migration_actions))
.collect(),
})
.chain(vec![detailed_migration_guide_msg(feature)])
.collect(),
})
}
fn get_manual_migration_actions_help(manual_migration_actions: &[&str]) -> Vec<String> {
["After the migration, you will still need to:".to_string()]
.into_iter()
.chain(
manual_migration_actions
.iter()
.map(|help| format!("- {help}"))
.chain(vec![Diagnostic::help_empty_line()]),
)
.collect()
}
pub(crate) fn detailed_migration_guide_msg(feature: &Feature) -> String {
format!("For a detailed migration guide see: {}", feature.url())
}
fn duration_to_str(duration_in_mins: usize) -> String {
let hours = duration_in_mins / 60;
let minutes = duration_in_mins % 60;
format!("{hours:#02}:{minutes:#02}")
}
pub(crate) fn max_feature_name_len<T>(features: &[(Feature, T)]) -> usize {
features
.iter()
.map(|(feature, _)| feature.name().len())
.max()
.unwrap_or_default()
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-migrate/src/cli/commands/check.rs | forc-plugins/forc-migrate/src/cli/commands/check.rs | use clap::Parser;
use crate::{
cli::{
self,
shared::{
compile_package, create_migration_diagnostic, print_features_and_migration_steps,
},
},
get_migration_steps_or_return,
migrations::{DryRun, MigrationStepKind},
};
use anyhow::{Ok, Result};
use forc_util::format_diagnostic;
use itertools::Itertools;
use sway_core::Engines;
forc_util::cli_examples! {
crate::cli::Opt {
[ Check the project in the current path => "forc migrate check"]
[ Check the project located in another path => "forc migrate check --path {path}" ]
}
}
/// Check the project for code that needs to be migrated.
///
/// Dry-runs the migration steps and prints places in code that need to be reviewed or changed.
#[derive(Debug, Parser)]
pub(crate) struct Command {
#[clap(flatten)]
pub check: cli::shared::Compile,
}
pub(crate) fn exec(command: Command) -> Result<()> {
let migration_steps = get_migration_steps_or_return!();
let engines = Engines::default();
let build_instructions = command.check;
let mut program_info = compile_package(&engines, &build_instructions)?;
// Dry-run all the migration steps.
let mut check_result = vec![];
for (feature, migration_steps) in migration_steps.iter() {
for migration_step in migration_steps.iter() {
let migration_point_spans = match migration_step.kind {
MigrationStepKind::Instruction(instruction) => instruction(&program_info)?,
MigrationStepKind::CodeModification(modification, ..) => {
modification(&mut program_info.as_mut(), DryRun::Yes)?
}
MigrationStepKind::Interaction(instruction, ..) => instruction(&program_info)?,
};
check_result.push((feature, migration_step, migration_point_spans));
}
}
// For every migration step, display the found occurrences in code that require migration effort, if any.
for (feature, migration_step, occurrences_spans) in check_result.iter() {
if let Some(diagnostic) =
create_migration_diagnostic(engines.se(), feature, migration_step, occurrences_spans)
{
format_diagnostic(&diagnostic);
}
}
// Display the summary of the migration effort.
let features_and_migration_steps = check_result
.iter()
.chunk_by(|(feature, _, _)| feature)
.into_iter()
.map(|(key, chunk)| {
(
**key,
chunk
.map(|(_, migration_step, migration_point_spans)| {
(*migration_step, Some(migration_point_spans.len()))
})
.collect::<Vec<_>>(),
)
})
.collect::<Vec<_>>();
println!("Migration effort:");
println!();
print_features_and_migration_steps(&features_and_migration_steps);
Ok(())
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-migrate/src/cli/commands/mod.rs | forc-plugins/forc-migrate/src/cli/commands/mod.rs | pub(crate) mod check;
pub(crate) mod run;
pub(crate) mod show;
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-migrate/src/cli/commands/run.rs | forc-plugins/forc-migrate/src/cli/commands/run.rs | use std::{
collections::HashSet,
path::{Path, PathBuf},
};
use anyhow::{bail, Ok, Result};
use clap::Parser;
use forc_tracing::{println_action_green, println_action_yellow, println_yellow_bold};
use forc_util::{format_diagnostic, fs_locking::is_file_dirty};
use itertools::Itertools;
use sway_ast::Module;
use sway_core::{
language::lexed::{LexedModule, LexedProgram},
Engines,
};
use sway_error::formatting::*;
use sway_features::{ExperimentalFeatures, Feature};
use sway_types::{SourceEngine, Span};
use swayfmt::Formatter;
use crate::{
cli::{
self,
shared::{
compile_package, create_migration_diagnostic, detailed_migration_guide_msg,
max_feature_name_len, PROJECT_IS_COMPATIBLE,
},
},
get_migration_steps_or_return, instructive_error,
migrations::{
ContinueMigrationProcess, DryRun, InteractionResponse, MigrationStep, MigrationStepKind,
MigrationSteps, Occurrence, ProgramInfo,
},
};
forc_util::cli_examples! {
crate::cli::Opt {
[ Migrate the project in the current path => "forc migrate run"]
[ Migrate the project located in another path => "forc migrate run --path {path}" ]
[ Migrate the project offline without downloading any dependencies => "forc migrate run --offline" ]
}
}
/// Migrate the project.
///
/// Runs the migration steps and and guides you through the migration process.
#[derive(Debug, Parser)]
pub(crate) struct Command {
#[clap(flatten)]
pub run: cli::shared::Compile,
}
/// Contains information about lexed [Module]s that are modified
/// during a migration step.
struct ModifiedModules<'a> {
source_engine: &'a SourceEngine,
modified_modules_paths: HashSet<PathBuf>,
}
impl<'a> ModifiedModules<'a> {
fn new(source_engine: &'a SourceEngine, occurrences_spans: &[Span]) -> Self {
Self {
source_engine,
modified_modules_paths: occurrences_spans
.iter()
.filter_map(|span| span.source_id().copied())
.filter(|source_id| !source_engine.is_source_id_autogenerated(source_id))
.map(|source_id| source_engine.get_path(&source_id))
.collect(),
}
}
/// Returns the `module`s path, if the `module` was modified.
fn get_path_if_modified(&self, module: &Module) -> Option<PathBuf> {
module.source_id().and_then(|source_id| {
let path = self.source_engine.get_path(&source_id);
if self.modified_modules_paths.contains(&path) {
Some(path)
} else {
None
}
})
}
/// Returns the paths of modified modules, that are at the same
/// time marked as "dirty", means in-use by some other programs
/// like IDEs.
fn get_dirty_modified_modules_paths(&self) -> Vec<&PathBuf> {
self.modified_modules_paths
.iter()
.filter(|path| is_file_dirty(path))
.collect()
}
}
pub(crate) fn exec(command: Command) -> Result<()> {
let migration_steps = get_migration_steps_or_return!();
let engines = Engines::default();
let build_instructions = command.run;
let experimental = build_instructions.experimental_features()?;
let mut program_info = compile_package(&engines, &build_instructions)?;
// For migrations, we go with the following workflow.
// We have three possible situations:
// - we skip a migration step if it doesn't have any occurrences in code.
// We say that the step is *checked*.
// - we *check* an instruction migration step if it does have occurrences in code.
// We print those occurrences.
// - we *migrate* a code transformation step if it does have changes in code.
// We rewrite original code files with the changed code.
// We print just the number of the applied transformations.
//
// Skipping (checked) and checking will move to the next migration step.
//
// Migrating will stop the further execution of migration steps **if there are manual migration actions**
// to be done by developers. In that case, it will ask for manual action and instruct developers to review
// the changes before continuing migration.
//
// Migrating **without manual migration actions** will move to the next migration step **in the same feature**.
// If that was the last migration step in the feature, the migration will stop, and instruct the developer
// to review the migrations done in that feature, before continuing to migrate the next experimental feature.
print_migrating_action(migration_steps);
let max_len = max_feature_name_len(migration_steps);
let last_migration_feature = migration_steps
.last()
.expect(
"`get_migration_steps_or_return!` guarantees that the `migration_steps` are not empty",
)
.0;
let mut current_feature_migration_has_code_changes = false;
let mut num_of_postponed_steps = 0;
for (feature, migration_steps) in migration_steps.iter() {
for migration_step in migration_steps.iter() {
match migration_step.kind {
MigrationStepKind::Instruction(instruction) => {
let occurrences = instruction(&program_info)?;
print_instruction_result(
&engines,
max_len,
feature,
migration_step,
&occurrences,
);
if !occurrences.is_empty() {
println_yellow_bold("If you've already reviewed the above points, you can ignore this info.");
}
}
MigrationStepKind::CodeModification(
modification,
manual_migration_actions,
continue_migration_process,
) => {
let occurrences = modification(&mut program_info.as_mut(), DryRun::No)?;
output_modified_modules(
&build_instructions.manifest_dir()?,
&program_info,
&occurrences,
experimental,
)?;
let stop_migration_process = print_modification_result(
max_len,
feature,
migration_step,
manual_migration_actions,
continue_migration_process,
&occurrences,
InteractionResponse::None,
&mut current_feature_migration_has_code_changes,
);
if stop_migration_process == StopMigrationProcess::Yes {
return Ok(());
}
}
MigrationStepKind::Interaction(
instruction,
interaction,
manual_migration_actions,
continue_migration_process,
) => {
let instruction_occurrences_spans = instruction(&program_info)?;
print_instruction_result(
&engines,
max_len,
feature,
migration_step,
&instruction_occurrences_spans,
);
// We have occurrences, let's continue with the interaction.
if !instruction_occurrences_spans.is_empty() {
let (interaction_response, interaction_occurrences_spans) =
interaction(&mut program_info.as_mut())?;
if interaction_response == InteractionResponse::PostponeStep {
num_of_postponed_steps += 1;
}
output_modified_modules(
&build_instructions.manifest_dir()?,
&program_info,
&interaction_occurrences_spans,
experimental,
)?;
let stop_migration_process = print_modification_result(
max_len,
feature,
migration_step,
manual_migration_actions,
continue_migration_process,
&interaction_occurrences_spans,
interaction_response,
&mut current_feature_migration_has_code_changes,
);
if stop_migration_process == StopMigrationProcess::Yes {
return Ok(());
}
}
}
};
}
// If there were code changes and this is not the last feature,
// stop for a review before continuing with the next feature.
if current_feature_migration_has_code_changes {
if *feature == last_migration_feature {
print_migration_finished_action(num_of_postponed_steps);
} else {
print_continue_migration_action("Review the changed code");
}
return Ok(());
}
}
// We've run through all the migration steps.
// Print the confirmation message, even if there were maybe infos
// displayed for manual reviews.
print_migration_finished_action(num_of_postponed_steps);
Ok(())
}
#[derive(PartialEq, Eq)]
enum StopMigrationProcess {
Yes,
No,
}
#[allow(clippy::too_many_arguments)]
fn print_modification_result(
max_len: usize,
feature: &Feature,
migration_step: &MigrationStep,
manual_migration_actions: &[&str],
continue_migration_process: ContinueMigrationProcess,
occurrences: &[Occurrence],
interaction_response: InteractionResponse,
current_feature_migration_has_code_changes: &mut bool,
) -> StopMigrationProcess {
if occurrences.is_empty() {
if interaction_response == InteractionResponse::PostponeStep {
print_postponed_action(max_len, feature, migration_step);
} else {
print_checked_action(max_len, feature, migration_step);
}
StopMigrationProcess::No
} else {
print_changing_code_action(max_len, feature, migration_step);
// Print the confirmation.
println!(
"Source code successfully changed ({} change{}).",
occurrences.len(),
plural_s(occurrences.len())
);
// Check if we can proceed with the next migration step,
// or we have a mandatory stop, or a stop for completing manual actions.
match continue_migration_process {
ContinueMigrationProcess::Never => {
print_continue_migration_action("Review the changed code");
StopMigrationProcess::Yes
}
ContinueMigrationProcess::IfNoManualMigrationActionsNeeded => {
if !migration_step.has_manual_actions() {
// Mark the feature as having made code changes in the migration, and proceed with the
// next migration step *within the same feature*, if any.
*current_feature_migration_has_code_changes = true;
StopMigrationProcess::No
} else {
// Display the manual migration actions and stop the further execution of the migration steps.
println!();
println!("You still need to manually:");
manual_migration_actions
.iter()
.for_each(|help| println!("- {help}"));
println!();
println!("{}", detailed_migration_guide_msg(feature));
print_continue_migration_action("Do the above manual changes");
StopMigrationProcess::Yes
}
}
}
}
}
fn print_instruction_result(
engines: &Engines,
max_len: usize,
feature: &Feature,
migration_step: &MigrationStep,
occurrences: &[Occurrence],
) {
if occurrences.is_empty() {
print_checked_action(max_len, feature, migration_step);
} else {
print_review_action(max_len, feature, migration_step);
if let Some(diagnostic) =
create_migration_diagnostic(engines.se(), feature, migration_step, occurrences)
{
format_diagnostic(&diagnostic);
}
}
}
/// Outputs modified modules, if any, to their original files.
///
/// A module is considered modified, if any of the [Span]s in `occurrences_spans`
/// has that module as its source.
fn output_modified_modules(
manifest_dir: &Path,
program_info: &ProgramInfo,
occurrences: &[Occurrence],
experimental: ExperimentalFeatures,
) -> Result<()> {
if occurrences.is_empty() {
return Ok(());
}
let modified_modules = ModifiedModules::new(
program_info.engines.se(),
&occurrences
.iter()
.map(|o| o.span.clone())
.collect::<Vec<_>>(),
);
check_that_modified_modules_are_not_dirty(&modified_modules)?;
output_changed_lexed_program(
manifest_dir,
&modified_modules,
&program_info.lexed_program,
experimental,
)?;
Ok(())
}
fn check_that_modified_modules_are_not_dirty(modified_modules: &ModifiedModules) -> Result<()> {
let dirty_modules = modified_modules.get_dirty_modified_modules_paths();
if !dirty_modules.is_empty() {
bail!(instructive_error("Files cannot be changed, because they are open in an editor and contain unsaved changes.",
&[
"The below files are open in an editor and contain unsaved changes:".to_string(),
]
.into_iter()
.chain(dirty_modules.iter().map(|file| format!(" - {}", file.display())))
.chain(vec!["Please save the open files before running the migrations.".to_string()])
.collect::<Vec<_>>()
));
}
Ok(())
}
fn output_changed_lexed_program(
manifest_dir: &Path,
modified_modules: &ModifiedModules,
lexed_program: &LexedProgram,
experimental: ExperimentalFeatures,
) -> Result<()> {
fn output_modules_rec(
manifest_dir: &Path,
modified_modules: &ModifiedModules,
lexed_module: &LexedModule,
experimental: ExperimentalFeatures,
) -> Result<()> {
if let Some(path) = modified_modules.get_path_if_modified(&lexed_module.tree.value) {
let mut formatter = Formatter::from_dir(manifest_dir, experimental)?;
let code = formatter.format_module(&lexed_module.tree.clone())?;
std::fs::write(path, code)?;
}
for (_, lexed_submodule) in lexed_module.submodules.iter() {
output_modules_rec(
manifest_dir,
modified_modules,
&lexed_submodule.module,
experimental,
)?;
}
Ok(())
}
output_modules_rec(
manifest_dir,
modified_modules,
&lexed_program.root,
experimental,
)
}
fn print_migrating_action(migration_steps: MigrationSteps) {
println_action_green(
"Migrating",
&format!(
"Breaking change feature{} {}",
plural_s(migration_steps.len()),
sequence_to_str(
&migration_steps
.iter()
.map(|(feature, _)| feature.name())
.collect_vec(),
Enclosing::None,
4
),
),
);
}
fn print_changing_code_action(max_len: usize, feature: &Feature, migration_step: &MigrationStep) {
println_action_yellow(
"Changing",
&full_migration_step_title(max_len, feature, migration_step),
);
}
fn print_checked_action(max_len: usize, feature: &Feature, migration_step: &MigrationStep) {
println_action_green(
"Checked",
&full_migration_step_title(max_len, feature, migration_step),
);
}
fn print_review_action(max_len: usize, feature: &Feature, migration_step: &MigrationStep) {
println_action_yellow(
"Review",
&full_migration_step_title(max_len, feature, migration_step),
);
}
fn print_postponed_action(max_len: usize, feature: &Feature, migration_step: &MigrationStep) {
println_action_yellow(
"Postponed",
&full_migration_step_title(max_len, feature, migration_step),
);
}
fn print_migration_finished_action(num_of_postponed_steps: usize) {
if num_of_postponed_steps > 0 {
println_action_green(
"Finished",
&format!(
"Run `forc migrate` at a later point to resolve {} postponed migration step{}",
num_to_str(num_of_postponed_steps),
plural_s(num_of_postponed_steps),
),
)
} else {
println_action_green("Finished", PROJECT_IS_COMPATIBLE);
}
}
fn print_continue_migration_action(txt: &str) {
println_action_yellow(
"Continue",
&format!("{txt} and re-run `forc migrate` to finish the migration process"),
);
}
/// Returns the [MigrationStep::title] prefixed by its [Feature::name].
fn full_migration_step_title(
max_len: usize,
feature: &Feature,
migration_step: &MigrationStep,
) -> String {
let feature_name_len = max_len + 2;
format!(
"{:<feature_name_len$} {}",
format!("[{}]", feature.name()),
migration_step.title
)
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-migrate/src/cli/commands/show.rs | forc-plugins/forc-migrate/src/cli/commands/show.rs | use std::collections::HashMap;
use crate::cli::shared::{max_feature_name_len, print_features_and_migration_steps};
use crate::get_migration_steps_or_return;
use crate::migrations::MigrationStepExecution;
use anyhow::{Ok, Result};
use clap::Parser;
use itertools::Itertools;
use sway_error::formatting::{sequence_to_list, sequence_to_str, Enclosing, Indent};
forc_util::cli_examples! {
crate::cli::Opt {
[ Show the upcoming breaking change features and their migration steps => "forc migrate show"]
}
}
/// Show the upcoming breaking change features and their migration steps.
#[derive(Debug, Parser)]
pub(crate) struct Command {}
pub(crate) fn exec(_command: Command) -> Result<()> {
let migration_steps = get_migration_steps_or_return!();
let feature_name_len = max_feature_name_len(migration_steps);
// Convert migration steps to form suitable for printing (adding `None` for time estimates.)
let migration_steps = migration_steps
.iter()
.map(|(feature, steps)| {
(
*feature,
steps.iter().map(|step| (step, None)).collect::<Vec<_>>(),
)
})
.collect::<Vec<_>>();
// Print the list of breaking change features.
println!("Breaking change features:");
println!(
"{}",
sequence_to_list(
&migration_steps
.iter()
.map(|(feature, _)| format!(
"{:feature_name_len$} ({})",
feature.name(),
feature.url()
))
.collect_vec(),
Indent::Single,
usize::MAX
)
.join("\n")
);
println!();
// Print migration steps.
let mut num_of_steps_per_execution_kind = HashMap::<MigrationStepExecution, usize>::new();
migration_steps
.iter()
.flat_map(|(_, steps)| steps)
.for_each(|(step, _)| {
*num_of_steps_per_execution_kind
.entry(step.execution())
.or_insert(0) += 1
});
let num_of_steps_per_execution_kind = num_of_steps_per_execution_kind
.into_iter()
.filter(|(_, count)| *count > 0)
.sorted_by_key(|(execution, _)| *execution)
.map(|(execution, count)| {
format!(
"{count} {}",
match execution {
MigrationStepExecution::Manual => "manual",
MigrationStepExecution::Semiautomatic => "semiautomatic",
MigrationStepExecution::Automatic => "automatic",
},
)
})
.collect_vec();
println!(
"Migration steps ({}):",
sequence_to_str(
&num_of_steps_per_execution_kind,
Enclosing::None,
usize::MAX
)
);
print_features_and_migration_steps(&migration_steps);
// Print experimental feature flags.
let features = migration_steps.iter().map(|(feature, _)| feature.name());
println!("Experimental feature flags:");
println!(
"- for Forc.toml: experimental = {{ {} }}",
features
.clone()
.map(|feature| format!("{feature} = true"))
.collect::<Vec<_>>()
.join(", ")
);
println!(
"- for CLI: --experimental {}",
features.collect::<Vec<_>>().join(",")
);
Ok(())
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-migrate/src/modifying/attribute.rs | forc-plugins/forc-migrate/src/modifying/attribute.rs | use sway_ast::{
attribute::{Attribute, AttributeArg, CFG_ATTRIBUTE_NAME, DOC_COMMENT_ATTRIBUTE_NAME},
brackets::SquareBrackets,
keywords::{HashBangToken, HashToken, Token},
AttributeDecl, Literal, Parens, Punctuated,
};
use sway_types::{Ident, Span, Spanned};
use crate::assert_insert_span;
use super::{Modifier, New};
#[allow(dead_code)]
impl Modifier<'_, Attribute> {
pub(crate) fn set_name<S: AsRef<str> + ?Sized>(&mut self, name: &S) -> &mut Self {
// We preserve the current span of the name.
let insert_span = self.element.name.span();
self.element.name = Ident::new_with_override(name.as_ref().into(), insert_span);
self
}
}
#[allow(dead_code)]
impl New {
/// Creates an [Attribute] with a single [AttributeArg]. E.g. `attribute_name(arg_name = value)` or `attribute_name(arg_name)`.
pub(crate) fn attribute_with_arg<S: AsRef<str> + ?Sized>(
insert_span: Span,
attribute_name: &S,
arg_name: &S,
value: Option<Literal>,
) -> Attribute {
assert_insert_span!(insert_span);
Attribute {
name: Ident::new_with_override(attribute_name.as_ref().into(), insert_span.clone()),
args: Some(Parens {
inner: Punctuated {
value_separator_pairs: vec![],
final_value_opt: Some(Box::new(AttributeArg {
name: Ident::new_with_override(
arg_name.as_ref().into(),
insert_span.clone(),
),
value,
})),
},
span: insert_span,
}),
}
}
/// Creates an [AttributeDecl] with a single [Attribute] that has a single [AttributeArg]. E.g. `#[attribute_name(arg_name = value)]` or `#[attribute_name(arg_name)]`.
pub(crate) fn attribute_decl_with_arg<S: AsRef<str> + ?Sized>(
insert_span: Span,
attribute_name: &S,
arg_name: &S,
value: Option<Literal>,
) -> AttributeDecl {
assert_insert_span!(insert_span);
AttributeDecl {
hash_kind: sway_ast::attribute::AttributeHashKind::Outer(HashToken::new(
insert_span.clone(),
)),
attribute: SquareBrackets {
inner: Punctuated {
value_separator_pairs: vec![],
final_value_opt: Some(Box::new(New::attribute_with_arg(
insert_span.clone(),
attribute_name,
arg_name,
value,
))),
},
span: insert_span,
},
}
}
/// Creates an [AttributeDecl] representing a single `cfg` experimental attribute. E.g. `#[cfg(experimental_flag = value)]`.
pub(crate) fn cfg_experimental_attribute_decl(
insert_span: Span,
feature_name: &str,
value: bool,
) -> AttributeDecl {
assert_insert_span!(insert_span);
AttributeDecl {
hash_kind: sway_ast::attribute::AttributeHashKind::Outer(HashToken::new(
insert_span.clone(),
)),
attribute: SquareBrackets {
inner: Punctuated {
value_separator_pairs: vec![],
final_value_opt: Some(Box::new(New::attribute_with_arg(
insert_span.clone(),
CFG_ATTRIBUTE_NAME,
&format!("experimental_{feature_name}"),
Some(New::literal_bool(insert_span.clone(), value)),
))),
},
span: insert_span,
},
}
}
/// Creates a `doc-comment` [AttributeDecl] that defines a single doc-comment line.
/// It automatically inserts the leading space.
///
/// E.g., `comment` "This is a comment." will create an [AttributeDecl] that corresponds to:
/// ```ignore
/// //! This is a comment.
/// ```
pub(crate) fn doc_comment_attribute_decl<S: AsRef<str> + ?Sized>(
insert_span: Span,
comment: &S,
) -> AttributeDecl {
assert_insert_span!(insert_span);
AttributeDecl {
hash_kind: sway_ast::attribute::AttributeHashKind::Inner(HashBangToken::new(
insert_span.clone(),
)),
attribute: SquareBrackets {
inner: Punctuated {
value_separator_pairs: vec![],
final_value_opt: Some(Box::new(New::attribute_with_arg(
insert_span.clone(),
DOC_COMMENT_ATTRIBUTE_NAME,
&format!(" {}", comment.as_ref()),
None,
))),
},
span: insert_span,
},
}
}
/// Creates `doc-comment` [AttributeDecl]s that define multiple doc-comment lines.
/// It automatically inserts the leading space into each line.
///
/// E.g., `comments` \["This is a comment.", "This is the second line."\] will create
/// [AttributeDecl]s that corresponds to:
/// ```ignore
/// //! This is a comment.
/// //! This is the second line.
/// ```
pub(crate) fn doc_comments_attribute_decls<S: AsRef<str> + ?Sized>(
insert_span: Span,
comments: &[&S],
) -> Vec<AttributeDecl> {
comments
.iter()
.map(|comment| New::doc_comment_attribute_decl(insert_span.clone(), comment))
.collect()
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-migrate/src/modifying/expr.rs | forc-plugins/forc-migrate/src/modifying/expr.rs | use sway_ast::{
keywords::{DotToken, Token},
Expr, Parens, PathExprSegment, Punctuated,
};
use sway_types::{Ident, Span};
use crate::assert_insert_span;
use super::New;
impl New {
/// Creates an [Expr] representing a call to a non-generic method with the name `method_name`.
/// The method does not accepts any arguments.
pub(crate) fn method_call<S: AsRef<str> + ?Sized>(
insert_span: Span,
target: Expr,
method_name: &S,
) -> Expr {
assert_insert_span!(insert_span);
Expr::MethodCall {
target: Box::new(target),
dot_token: DotToken::new(insert_span.clone()),
path_seg: PathExprSegment {
name: Ident::new_with_override(method_name.as_ref().into(), insert_span.clone()),
generics_opt: None,
},
contract_args_opt: None,
args: Parens {
inner: Punctuated {
value_separator_pairs: vec![],
final_value_opt: None,
},
span: insert_span,
},
}
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-migrate/src/modifying/path_expression_segment.rs | forc-plugins/forc-migrate/src/modifying/path_expression_segment.rs | use sway_ast::PathExprSegment;
use sway_types::{Ident, Spanned};
use super::Modifier;
impl Modifier<'_, PathExprSegment> {
pub(crate) fn set_name<S: AsRef<str> + ?Sized>(&mut self, name: &S) -> &mut Self {
// We preserve the current span of the name.
let insert_span = self.element.name.span();
self.element.name = Ident::new_with_override(name.as_ref().into(), insert_span);
self
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-migrate/src/modifying/module.rs | forc-plugins/forc-migrate/src/modifying/module.rs | use std::cmp::min;
use sway_ast::{attribute::Annotated, ItemFn, ItemKind, Module};
use sway_types::{Span, Spanned};
use super::Modifier;
#[allow(dead_code)]
impl Modifier<'_, Module> {
/// Removes an [Annotated<ItemKind>] from `self`.
/// The item to remove is identified by its [Span], `annotated_item_span`.
pub(crate) fn remove_annotated_item(&mut self, annotated_item_span: &Span) -> &mut Self {
self.element
.items
.retain(|annotated| annotated.span() != *annotated_item_span);
self
}
/// Inserts `annotated_item` after the first already existing item whose [Span::end]
/// is greater than or equal to `annotated_item`'s [Span::start].
pub(crate) fn insert_annotated_item_after(
&mut self,
annotated_item: Annotated<ItemKind>,
) -> &mut Self {
let first_existing_preceding_item_index = self
.element
.items
.iter()
.position(|annotated| annotated.span().end() >= annotated_item.span().start())
.unwrap_or(0)
+ 1;
let index = min(
first_existing_preceding_item_index,
self.element.items.len(),
);
self.element.items.insert(index, annotated_item);
self
}
/// Appends `annotated_item` at the end of the [Module].
pub(crate) fn append_annotated_item(
&mut self,
annotated_item: Annotated<ItemKind>,
) -> &mut Self {
self.element.items.push(annotated_item);
self
}
pub(crate) fn append_function(&mut self, function: ItemFn) -> &mut Self {
let function = Annotated {
attributes: vec![],
value: ItemKind::Fn(function),
};
self.append_annotated_item(function)
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-migrate/src/modifying/function.rs | forc-plugins/forc-migrate/src/modifying/function.rs | use sway_ast::{
keywords::{FnToken, Keyword},
Braces, CodeBlockContents, FnSignature, ItemFn, Parens, Punctuated,
};
use sway_types::{Ident, Span, Spanned};
use crate::assert_insert_span;
use super::{Modifier, New};
impl Modifier<'_, ItemFn> {
pub(crate) fn set_name<S: AsRef<str> + ?Sized>(&mut self, name: &S) -> &mut Self {
// We preserve the current span of the name.
let insert_span = self.element.fn_signature.name.span();
self.element.fn_signature.name =
Ident::new_with_override(name.as_ref().into(), insert_span);
self
}
}
impl New {
/// Creates an [ItemFn] representing and empty function without arguments that is named `name`.
pub(crate) fn function<S: AsRef<str> + ?Sized>(insert_span: Span, name: &S) -> ItemFn {
assert_insert_span!(insert_span);
ItemFn {
fn_signature: FnSignature {
visibility: None,
fn_token: FnToken::new(insert_span.clone()),
name: Ident::new_with_override(name.as_ref().into(), insert_span.clone()),
generics: None,
arguments: Parens {
inner: sway_ast::FnArgs::Static(Punctuated {
value_separator_pairs: vec![],
final_value_opt: None,
}),
span: insert_span.clone(),
},
return_type_opt: None,
where_clause_opt: None,
},
body: Braces {
inner: CodeBlockContents {
statements: vec![],
final_expr_opt: None,
span: insert_span.clone(),
},
span: insert_span,
},
}
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-migrate/src/modifying/literal.rs | forc-plugins/forc-migrate/src/modifying/literal.rs | use sway_ast::{
literal::{LitBool, LitBoolType},
Literal,
};
use sway_types::Span;
use crate::assert_insert_span;
use super::New;
impl New {
/// Creates a [Literal] representing bool `value`.
pub(crate) fn literal_bool(insert_span: Span, value: bool) -> Literal {
assert_insert_span!(insert_span);
Literal::Bool(LitBool {
span: insert_span,
kind: if value {
LitBoolType::True
} else {
LitBoolType::False
},
})
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-migrate/src/modifying/mod.rs | forc-plugins/forc-migrate/src/modifying/mod.rs | //! This module contains common API for building new and modifying existing
//! elements within a lexed tree.
use sway_types::Span;
mod annotated;
mod attribute;
mod expr;
mod function;
mod literal;
mod module;
mod path_expression_segment;
mod storage_field;
/// A wrapper around a lexed tree element that will be modified.
pub(crate) struct Modifier<'a, T> {
element: &'a mut T,
}
impl<'a, T> Modifier<'a, T> {
// Private, so that we enforce creating modifiers with the
// `modify` function.
fn new(element: &'a mut T) -> Self {
Self { element }
}
}
pub(crate) fn modify<T>(element: &mut T) -> Modifier<'_, T> {
Modifier::new(element)
}
// Empty struct for creating new lexed elements.
// Constructors for each lexed element are in separate modules,
// grouped by lexed elements they construct, and each module
// has its own `New` impl.
pub(crate) struct New {}
/// Trait for setting all spans within `Self` to the same insert span.
///
/// New elements inserted into lexed tree should have their spans set
/// to the same zero-sized [Span]. This ensures that they will always
/// be properly rendered. Sometimes, new elements are copied from existing
/// elements and modified. Such new elements might not have all spans
/// set to the same, zero-sized insert span. Implementing this trait
/// ensures proper setting of the insert span.
// TODO: Implement `SetInsertSpan` for lexed tree elements.
#[allow(dead_code)]
pub(crate) trait SetInsertSpan {
fn set_insert_span(&mut self, insert_span: Span);
}
#[macro_export]
macro_rules! assert_insert_span {
($insert_span: ident) => {
assert!(
stringify!($insert_span) == "insert_span",
"the insert span function argument must be called `insert_span`"
);
assert!($insert_span.is_empty(), "`insert_span` must be empty");
assert!(
!$insert_span.is_dummy(),
"`insert_span` must not be a `Span::dummy()`"
);
};
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-migrate/src/modifying/annotated.rs | forc-plugins/forc-migrate/src/modifying/annotated.rs | #[allow(unused_imports)] // Used in doc-comments.
use sway_ast::{
attribute::{Annotated, Attribute},
AttributeDecl,
};
use sway_types::{Span, Spanned};
use super::Modifier;
impl<T> Modifier<'_, Annotated<T>> {
/// From `self`, removes [AttributeDecl] that contains an [Attribute]
/// whose span equals `attribute_span`.
///
/// Method **removes the whole [AttributeDecl]**, even if it contains
/// other attributes, aside from the `attribute_span` matching one.
pub(crate) fn remove_attribute_decl_containing_attribute(
&mut self,
attribute_span: &Span,
) -> &mut Self {
self.element.attributes.retain(|attribute_decl| {
attribute_decl
.attribute
.inner
.iter()
.all(|attribute| attribute.span() != *attribute_span)
});
self
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-migrate/src/modifying/storage_field.rs | forc-plugins/forc-migrate/src/modifying/storage_field.rs | use num_bigint::BigUint;
use sway_ast::{
keywords::{InToken, Keyword},
Expr, LitInt, StorageField,
};
use sway_types::{Span, Spanned};
use super::Modifier;
pub(crate) trait ToInKey {
fn to_in_key(self, span: Span) -> Expr;
}
impl ToInKey for BigUint {
fn to_in_key(self, span: Span) -> Expr {
Expr::Literal(sway_ast::Literal::Int(LitInt {
span,
parsed: self,
ty_opt: None,
is_generated_b256: true,
}))
}
}
impl ToInKey for Expr {
fn to_in_key(self, _span: Span) -> Expr {
// TODO: Provide infrastructure for replacing spans on the elements
// of a lexed tree. This will be useful in modifications in
// which we generate new tree elements by copying existing.
//
// Until then, in this demo on how to develop `Modifier`s,
// just return `self`, without the spans replaced.
self
}
}
#[allow(dead_code)]
impl Modifier<'_, StorageField> {
pub(crate) fn set_in_key<K: ToInKey>(&mut self, key: K) -> &mut Self {
// If the `in` token already exists, just replace the key and leave the `in`
// token as is. Place the key after the `in` token.
let insert_span = if let Some(in_token) = &self.element.in_token {
Span::empty_at_end(&in_token.span())
} else {
// Otherwise, place the `in` token after the name.
Span::empty_at_end(&self.element.name.span())
};
if self.element.in_token.is_none() {
self.element.in_token = Some(InToken::new(insert_span.clone()));
}
self.element.key_expr = Some(key.to_in_key(insert_span));
self
}
pub(crate) fn remove_in_key(&mut self) -> &mut Self {
self.element.in_token = None;
self.element.key_expr = None;
self
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-doc/src/lib.rs | forc-plugins/forc-doc/src/lib.rs | pub mod doc;
pub mod render;
pub mod search;
use anyhow::{bail, Result};
use clap::Parser;
use doc::{module::ModuleInfo, Documentation};
use forc_pkg::{
self as pkg,
manifest::{GenericManifestFile, ManifestFile},
source::IPFSNode,
PackageManifestFile, Programs,
};
use forc_tracing::println_action_green;
use forc_util::default_output_directory;
use render::{
index::{LibraryInfo, WorkspaceIndex},
HTMLString, Renderable, RenderedDocumentation,
};
use std::{
fs,
path::{Path, PathBuf},
};
use sway_core::{
language::ty::{TyProgram, TyProgramKind},
BuildTarget, Engines,
};
use sway_features::ExperimentalFeatures;
pub const DOC_DIR_NAME: &str = "doc";
pub const ASSETS_DIR_NAME: &str = "static.files";
forc_util::cli_examples! {
crate::Command {
[ Build the docs for a project or workspace in the current path => "forc doc"]
[ Build the docs for a project or workspace in the current path and open it in the browser => "forc doc --open" ]
[ Build the docs for a project located in another path => "forc doc --path {path}" ]
[ Build the docs for the current project exporting private types => "forc doc --document-private-items" ]
[ Build the docs offline without downloading any dependencies => "forc doc --offline" ]
}
}
/// Forc plugin for building a Sway package's documentation
#[derive(Debug, Parser, Default)]
#[clap(
name = "forc-doc",
after_help = help(),
version
)]
pub struct Command {
/// Path to the project.
///
/// If not specified, current working directory will be used.
#[clap(short, long, alias = "manifest-path")]
pub path: Option<String>,
/// Include non-public items in the documentation.
#[clap(long)]
pub document_private_items: bool,
/// Open the docs in a browser after building them.
#[clap(long)]
pub open: bool,
/// Offline mode, prevents Forc from using the network when managing dependencies.
/// Meaning it will only try to use previously downloaded dependencies.
#[clap(long)]
pub offline: bool,
/// Requires that the Forc.lock file is up-to-date. If the lock file is missing, or it
/// needs to be updated, Forc will exit with an error.
#[clap(long)]
pub locked: bool,
/// Do not build documentation for dependencies.
#[clap(long)]
pub no_deps: bool,
/// The IPFS Node to use for fetching IPFS sources.
///
/// Possible values: FUEL, PUBLIC, LOCAL, <GATEWAY_URL>
#[clap(long)]
pub ipfs_node: Option<IPFSNode>,
/// The path to the documentation output directory.
///
/// If not specified, the default documentation output directory will be used.
#[clap(long)]
pub doc_path: Option<String>,
#[clap(flatten)]
pub experimental: sway_features::CliFields,
/// Silent mode. Don't output any warnings or errors to the command line.
#[clap(long, short = 's', action)]
pub silent: bool,
}
/// Result of documentation generation, either for a single package or a workspace.
#[derive(Debug, Clone)]
pub enum DocResult {
Package(Box<PackageManifestFile>),
Workspace {
name: String,
libraries: Vec<LibraryInfo>,
},
}
/// Generate documentation for a given package or workspace.
pub fn generate_docs(opts: &Command) -> Result<(PathBuf, DocResult)> {
let ctx = DocContext::from_options(opts)?;
let mut compile_results = compile(&ctx, opts)?.collect::<Vec<_>>();
let doc_result = compile_html(opts, &ctx, &mut compile_results)?;
Ok((ctx.doc_path, doc_result))
}
/// Information passed to the render phase to get TypeInfo, CallPath or visibility for type anchors.
#[derive(Clone)]
pub struct RenderPlan<'e> {
no_deps: bool,
document_private_items: bool,
engines: &'e Engines,
}
impl<'e> RenderPlan<'e> {
pub fn new(
no_deps: bool,
document_private_items: bool,
engines: &'e Engines,
) -> RenderPlan<'e> {
Self {
no_deps,
document_private_items,
engines,
}
}
}
pub struct DocContext {
pub manifest: ManifestFile,
pub doc_path: PathBuf,
pub engines: Engines,
pub build_plan: pkg::BuildPlan,
pub workspace_name: String,
}
impl DocContext {
pub fn is_workspace(&self) -> bool {
matches!(self.manifest, ManifestFile::Workspace(_))
}
/// package manifest for single packages. Returns None for workspaces.
pub fn pkg_manifest(&self) -> Option<&PackageManifestFile> {
match &self.manifest {
ManifestFile::Package(pkg) => Some(pkg),
ManifestFile::Workspace(_) => None,
}
}
pub fn from_options(opts: &Command) -> Result<Self> {
// get manifest directory
let dir = if let Some(ref path) = opts.path {
PathBuf::from(path)
} else {
std::env::current_dir()?
};
let manifest = ManifestFile::from_dir(dir)?;
// Get workspace name for later use
let workspace_name = manifest
.dir()
.file_name()
.and_then(|name| name.to_str())
.unwrap_or("workspace")
.to_string();
// create doc path
let out_path = default_output_directory(manifest.dir());
let doc_dir = opts
.doc_path
.clone()
.unwrap_or_else(|| DOC_DIR_NAME.to_string());
let doc_path = out_path.join(doc_dir);
if doc_path.exists() {
std::fs::remove_dir_all(&doc_path)?;
}
fs::create_dir_all(&doc_path)?;
// Build Plan
let member_manifests = manifest.member_manifests()?;
let lock_path = manifest.lock_path()?;
// Check for empty workspaces
if matches!(manifest, ManifestFile::Workspace(_)) && member_manifests.is_empty() {
bail!("Workspace contains no members");
}
let ipfs_node = opts.ipfs_node.clone().unwrap_or_default();
let build_plan = pkg::BuildPlan::from_lock_and_manifests(
&lock_path,
&member_manifests,
opts.locked,
opts.offline,
&ipfs_node,
)?;
Ok(Self {
manifest,
doc_path,
engines: Engines::default(),
build_plan,
workspace_name,
})
}
}
pub fn compile(ctx: &DocContext, opts: &Command) -> Result<impl Iterator<Item = Option<Programs>>> {
if ctx.is_workspace() {
println_action_green(
"Compiling",
&format!("workspace ({})", ctx.manifest.dir().to_string_lossy()),
);
} else if let Some(pkg_manifest) = ctx.pkg_manifest() {
println_action_green(
"Compiling",
&format!(
"{} ({})",
pkg_manifest.project_name(),
ctx.manifest.dir().to_string_lossy()
),
);
}
let tests_enabled = opts.document_private_items;
pkg::check(
&ctx.build_plan,
BuildTarget::default(),
opts.silent,
None,
tests_enabled,
&ctx.engines,
None,
&opts.experimental.experimental,
&opts.experimental.no_experimental,
sway_core::DbgGeneration::Full,
)
.map(|results| results.into_iter().map(|(programs, _handler)| programs))
}
pub fn compile_html(
opts: &Command,
ctx: &DocContext,
compile_results: &mut Vec<Option<Programs>>,
) -> Result<DocResult> {
let mut documented_libraries = Vec::new();
let raw_docs = if opts.no_deps {
if let Some(pkg_manifest) = ctx.pkg_manifest() {
// Single package mode
let Some(ty_program) = compile_results
.pop()
.and_then(|programs| programs)
.and_then(|p| p.typed.ok())
else {
bail! {
"documentation could not be built from manifest located at '{}'",
pkg_manifest.path().display()
}
};
// Only document if it's a library
if matches!(ty_program.kind, TyProgramKind::Library { .. }) {
let lib_info = LibraryInfo {
name: pkg_manifest.project_name().to_string(),
description: pkg_manifest
.project
.description
.clone()
.unwrap_or_else(|| format!("Library {}", pkg_manifest.project_name())),
};
documented_libraries.push(lib_info);
build_docs(opts, ctx, &ty_program, &ctx.manifest, pkg_manifest)?
} else {
bail!(
"forc-doc only supports libraries. '{}' is not a library.",
pkg_manifest.project_name()
);
}
} else {
// Workspace mode with no_deps
bail!("--no-deps flag is not meaningful for workspaces");
}
} else {
let (order, graph, manifest_map) = (
ctx.build_plan.compilation_order(),
ctx.build_plan.graph(),
ctx.build_plan.manifest_map(),
);
let mut raw_docs = Documentation(Vec::new());
for (node, compile_result) in order.iter().zip(compile_results) {
let id = &graph[*node].id();
if let Some(pkg_manifest_file) = manifest_map.get(id) {
let manifest_file = ManifestFile::from_dir(pkg_manifest_file.path())?;
let ty_program = compile_result
.as_ref()
.and_then(|programs| programs.typed.clone().ok())
.ok_or_else(|| {
anyhow::anyhow!(
"documentation could not be built from manifest located at '{}'",
pkg_manifest_file.path().display()
)
})?;
// Only document libraries that are workspace members
if matches!(ty_program.kind, TyProgramKind::Library { .. }) {
// Check if this package is a workspace member
let is_workspace_member = if ctx.is_workspace() {
ctx.manifest.member_manifests()?.iter().any(|(_, member)| {
member.project_name() == pkg_manifest_file.project_name()
})
} else {
true // For single packages, always include
};
if is_workspace_member {
let lib_info = LibraryInfo {
name: pkg_manifest_file.project_name().to_string(),
description: pkg_manifest_file
.project
.description
.clone()
.unwrap_or_else(|| {
format!("Library {}", pkg_manifest_file.project_name())
}),
};
documented_libraries.push(lib_info);
raw_docs.0.extend(
build_docs(opts, ctx, &ty_program, &manifest_file, pkg_manifest_file)?
.0,
);
}
}
}
}
raw_docs
};
// Create workspace index if this is a workspace
if ctx.is_workspace() && !documented_libraries.is_empty() {
// Sort libraries alphabetically for consistent display
documented_libraries.sort_by(|a, b| a.name.cmp(&b.name));
create_workspace_index(
&ctx.doc_path,
&documented_libraries,
&ctx.engines,
&ctx.workspace_name,
)?;
}
search::write_search_index(&ctx.doc_path, &raw_docs)?;
let result = if ctx.is_workspace() {
DocResult::Workspace {
name: ctx.workspace_name.clone(),
libraries: documented_libraries,
}
} else if let Some(pkg_manifest) = ctx.pkg_manifest() {
DocResult::Package(Box::new(pkg_manifest.clone()))
} else {
unreachable!("Should have either workspace or package")
};
Ok(result)
}
fn build_docs(
opts: &Command,
ctx: &DocContext,
ty_program: &TyProgram,
manifest: &ManifestFile,
pkg_manifest: &PackageManifestFile,
) -> Result<Documentation> {
let experimental = ExperimentalFeatures::new(
&pkg_manifest.project.experimental,
&opts.experimental.experimental,
&opts.experimental.no_experimental,
)
.map_err(|err| anyhow::anyhow!("{err}"))?;
println_action_green(
"Building",
&format!(
"documentation for {} ({})",
pkg_manifest.project_name(),
manifest.dir().to_string_lossy()
),
);
let raw_docs = Documentation::from_ty_program(
&ctx.engines,
pkg_manifest.project_name(),
ty_program,
opts.document_private_items,
experimental,
)?;
let root_attributes = (!ty_program.root_module.attributes.is_empty())
.then_some(ty_program.root_module.attributes.clone());
let forc_version = pkg_manifest
.project
.forc_version
.as_ref()
.map(|ver| format!("Forc v{}.{}.{}", ver.major, ver.minor, ver.patch));
// render docs to HTML
let rendered_docs = RenderedDocumentation::from_raw_docs(
raw_docs.clone(),
RenderPlan::new(opts.no_deps, opts.document_private_items, &ctx.engines),
root_attributes,
&ty_program.kind,
forc_version,
)?;
// write file contents to doc folder
write_content(rendered_docs, &ctx.doc_path)?;
println_action_green("Finished", pkg_manifest.project_name());
Ok(raw_docs)
}
fn write_content(rendered_docs: RenderedDocumentation, doc_path: &Path) -> Result<()> {
for doc in rendered_docs.0 {
let mut doc_path = doc_path.to_path_buf();
for prefix in doc.module_info.module_prefixes {
doc_path.push(prefix);
}
fs::create_dir_all(&doc_path)?;
doc_path.push(doc.html_filename);
fs::write(&doc_path, doc.file_contents.0.as_bytes())?;
}
Ok(())
}
fn create_workspace_index(
doc_path: &Path,
documented_libraries: &[LibraryInfo],
engines: &Engines,
workspace_name: &str,
) -> Result<()> {
// Create a workspace module info with the actual directory name
let workspace_info = ModuleInfo::from_ty_module(vec![workspace_name.to_string()], None);
// Create the workspace index
let workspace_index = WorkspaceIndex::new(workspace_info, documented_libraries.to_vec());
let render_plan = RenderPlan::new(false, false, engines);
let rendered_content = workspace_index.render(render_plan)?;
let html_content = HTMLString::from_rendered_content(rendered_content)?;
fs::write(doc_path.join("index.html"), html_content.0.as_bytes())?;
Ok(())
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-doc/src/search.rs | forc-plugins/forc-doc/src/search.rs | use crate::doc::{module::ModuleInfo, Document, Documentation};
use anyhow::Result;
use serde::{Deserialize, Serialize};
use std::{
collections::{BTreeMap, HashMap},
fs,
path::Path,
};
const JS_SEARCH_FILE_NAME: &str = "search.js";
/// Creates the search index javascript file for the search bar.
pub fn write_search_index(doc_path: &Path, docs: &Documentation) -> Result<()> {
let json_data = docs.to_search_index_json_value()?;
let module_export =
"\"object\"==typeof exports&&\"undefined\"!=typeof module&&(module.exports=SEARCH_INDEX);";
let js_data = format!("var SEARCH_INDEX={json_data};\n{module_export}");
Ok(fs::write(doc_path.join(JS_SEARCH_FILE_NAME), js_data)?)
}
impl Documentation {
/// Generates a mapping of program name to a vector of documentable items within the program
/// and returns the map as a `serde_json::Value`.
fn to_search_index_json_value(&self) -> Result<serde_json::Value, serde_json::Error> {
let mut map = HashMap::with_capacity(self.len());
let mut modules = BTreeMap::new();
for doc in self.iter() {
let project_name = doc.module_info.project_name().to_string();
map.entry(project_name)
.or_insert_with(Vec::new)
.push(JsonSearchItem::from(doc));
modules.insert(
doc.module_info.module_prefixes.join("::"),
doc.module_info.clone(),
);
}
// Insert the modules themselves into the map.
for (_, module) in modules.iter() {
let project_name = module.project_name().to_string();
map.entry(project_name)
.or_insert_with(Vec::new)
.push(JsonSearchItem::from(module));
}
serde_json::to_value(map)
}
}
/// Item information used in the `search_pool.json`.
/// The item name is what the fuzzy search will be
/// matching on, all other information will be used
/// in generating links to the item.
#[derive(Clone, Debug, Serialize, Deserialize)]
struct JsonSearchItem {
name: String,
html_filename: String,
module_info: Vec<String>,
preview: String,
type_name: String,
}
impl<'a> From<&'a Document> for JsonSearchItem {
fn from(value: &'a Document) -> Self {
Self {
name: value.item_body.item_name.to_string(),
html_filename: value.html_filename(),
module_info: value.module_info.module_prefixes.clone(),
preview: value
.preview_opt()
.unwrap_or_default()
.replace("<br>", "")
.replace("<p>", "")
.replace("</p>", ""),
type_name: value.item_body.ty.friendly_type_name().into(),
}
}
}
impl<'a> From<&'a ModuleInfo> for JsonSearchItem {
fn from(value: &'a ModuleInfo) -> Self {
Self {
name: value
.module_prefixes
.last()
.unwrap_or(&String::new())
.to_string(),
html_filename: "index.html".into(),
module_info: value.module_prefixes.clone(),
preview: value
.preview_opt()
.unwrap_or_default()
.replace("<br>", "")
.replace("<p>", "")
.replace("</p>", ""),
type_name: "module".into(),
}
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-doc/src/main.rs | forc-plugins/forc-doc/src/main.rs | use anyhow::{bail, Result};
use clap::Parser;
use forc_doc::{generate_docs, render::INDEX_FILENAME, Command, DocResult, ASSETS_DIR_NAME};
use include_dir::{include_dir, Dir};
use std::{
process::Command as Process,
{fs, path::PathBuf},
};
pub fn main() -> Result<()> {
let build_instructions = Command::parse();
let (doc_path, doc_result) = generate_docs(&build_instructions)?;
// CSS, icons and logos
static ASSETS_DIR: Dir<'_> = include_dir!("$CARGO_MANIFEST_DIR/src/static.files");
let assets_path = doc_path.join(ASSETS_DIR_NAME);
fs::create_dir_all(&assets_path)?;
for file in ASSETS_DIR.files() {
let asset_path = assets_path.join(file.path());
fs::write(asset_path, file.contents())?;
}
// Sway syntax highlighting file
const SWAY_HJS_FILENAME: &str = "highlight.js";
let sway_hjs = std::include_bytes!("static.files/highlight.js");
fs::write(assets_path.join(SWAY_HJS_FILENAME), sway_hjs)?;
// check if the user wants to open the doc in the browser
// if opening in the browser fails, attempt to open using a file explorer
if build_instructions.open {
const BROWSER_ENV_VAR: &str = "BROWSER";
let path = match &doc_result {
DocResult::Workspace { .. } => {
// For workspaces, open the root index.html
doc_path.join(INDEX_FILENAME)
}
DocResult::Package(pkg_manifest) => {
// For single packages, open the package-specific index
doc_path
.join(pkg_manifest.project_name())
.join(INDEX_FILENAME)
}
};
let default_browser_opt = std::env::var_os(BROWSER_ENV_VAR);
match default_browser_opt {
Some(def_browser) => {
let browser = PathBuf::from(def_browser);
if let Err(e) = Process::new(&browser).arg(path).status() {
bail!(
"Couldn't open docs with {}: {}",
browser.to_string_lossy(),
e
);
}
}
None => {
if let Err(e) = opener::open(&path) {
bail!("Couldn't open docs: {}", e);
}
}
}
}
Ok(())
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-doc/src/render/link.rs | forc-plugins/forc-doc/src/render/link.rs | //! Handles creation of links for modules.
use crate::{
doc::module::ModuleInfo,
render::{BlockTitle, DocStyle, Renderable},
RenderPlan,
};
use anyhow::Result;
use horrorshow::{box_html, Raw, RenderBox, Template};
use std::collections::BTreeMap;
/// Used for creating links between docs.
#[derive(Debug, Clone, Ord, PartialOrd, Eq, PartialEq)]
pub struct DocLink {
pub name: String,
pub module_info: ModuleInfo,
pub html_filename: String,
pub preview_opt: Option<String>,
}
#[derive(Clone, Ord, PartialOrd, Eq, PartialEq)]
pub struct DocLinks {
pub style: DocStyle,
/// The title and link info for each doc item.
pub links: BTreeMap<BlockTitle, Vec<DocLink>>,
}
impl Renderable for DocLinks {
fn render(self, _render_plan: RenderPlan) -> Result<Box<dyn RenderBox>> {
let mut links_vec = Vec::new();
// sort the doc links alphabetically
// for the AllDoc page, sort based on the module prefix and name
match self.style {
DocStyle::AllDoc(_) => {
for (block_title, mut doc_link) in self.links {
doc_link.sort_by(|a, b| {
let first = a
.module_info
.to_path_literal_string(&a.name, a.module_info.project_name());
let second = b
.module_info
.to_path_literal_string(&b.name, b.module_info.project_name());
first.cmp(&second)
});
links_vec.push((block_title, doc_link));
}
}
_ => {
for (block_title, mut doc_link) in self.links {
doc_link.sort();
links_vec.push((block_title.clone(), doc_link.to_vec()));
}
}
}
let doc_links = match self.style {
DocStyle::AllDoc(_) => box_html! {
@ for (title, list_items) in links_vec {
@ if !list_items.is_empty() {
h2(id=format!("{}", title.html_title_string())) { : title.as_str(); }
div(class="item-table") {
@ for item in list_items {
div(class="item-row") {
div(class=format!("item-left {}-item", title.item_title_str())) {
a(
class=title.class_title_str(),
href=item.module_info.file_path_at_location(&item.html_filename, item.module_info.project_name())
) {
: item.module_info.to_path_literal_string(
&item.name,
item.module_info.project_name()
);
}
}
@ if item.preview_opt.is_some() {
div(class="item-right docblock-short") {
: Raw(item.preview_opt.clone().unwrap());
}
}
}
}
}
}
}
}
.into_string()
.unwrap(),
DocStyle::ProjectIndex(_) => box_html! {
@ for (title, list_items) in links_vec {
@ if !list_items.is_empty() {
h2(id=format!("{}", title.html_title_string())) { : title.as_str(); }
div(class="item-table") {
@ for item in list_items {
div(class="item-row") {
div(class=format!("item-left {}-item", title.item_title_str())) {
a(
class=title.class_title_str(),
href=item.module_info.file_path_at_location(&item.html_filename, item.module_info.project_name())
) {
@ if title == BlockTitle::Modules {
: item.name.clone();
} else {
: item.module_info.to_path_literal_string(
&item.name,
item.module_info.project_name()
);
}
}
}
@ if item.preview_opt.is_some() {
div(class="item-right docblock-short") {
: Raw(item.preview_opt.clone().unwrap());
}
}
}
}
}
}
}
}
.into_string()
.unwrap(),
_ => box_html! {
@ for (title, list_items) in links_vec {
@ if !list_items.is_empty() {
h2(id=format!("{}", title.html_title_string())) { : title.as_str(); }
div(class="item-table") {
@ for item in list_items {
div(class="item-row") {
div(class=format!("item-left {}-item", title.item_title_str())) {
a(
class=title.class_title_str(),
href=item.module_info.file_path_at_location(&item.html_filename, item.module_info.location())
) {
: item.module_info.to_path_literal_string(
&item.name,
item.module_info.location()
);
}
}
@ if item.preview_opt.is_some() {
div(class="item-right docblock-short") {
: Raw(item.preview_opt.clone().unwrap());
}
}
}
}
}
}
}
}
.into_string()
.unwrap(),
};
Ok(box_html! {
: Raw(doc_links);
})
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-doc/src/render/index.rs | forc-plugins/forc-doc/src/render/index.rs | //! Handles creation of `index.html` files.
use crate::{
doc::module::ModuleInfo,
render::{
link::DocLinks, search::generate_searchbar, sidebar::*, BlockTitle, DocStyle, Renderable,
IDENTITY,
},
RenderPlan, ASSETS_DIR_NAME,
};
use anyhow::Result;
use horrorshow::{box_html, Raw, RenderBox};
use std::collections::BTreeMap;
/// Information about a documented library including its name and description
#[derive(Clone, Debug, PartialEq)]
pub struct LibraryInfo {
pub name: String,
pub description: String,
}
/// Workspace level index page
#[derive(Clone)]
pub(crate) struct WorkspaceIndex {
/// The workspace root module info
workspace_info: ModuleInfo,
/// All documented libraries in the workspace with their descriptions
documented_libraries: Vec<LibraryInfo>,
}
impl WorkspaceIndex {
pub(crate) fn new(workspace_info: ModuleInfo, documented_libraries: Vec<LibraryInfo>) -> Self {
Self {
workspace_info,
documented_libraries,
}
}
}
impl SidebarNav for WorkspaceIndex {
fn sidebar(&self) -> Sidebar {
// Create empty doc links for workspace sidebar
let doc_links = DocLinks {
style: DocStyle::WorkspaceIndex,
links: BTreeMap::new(),
};
Sidebar::new(
None,
DocStyle::WorkspaceIndex,
self.workspace_info.clone(),
doc_links,
)
}
}
impl Renderable for WorkspaceIndex {
fn render(self, render_plan: RenderPlan) -> Result<Box<dyn RenderBox>> {
let sidebar = self.sidebar().render(render_plan)?;
// For workspace index, we're at the root level, so no path prefix needed
let assets_path = ASSETS_DIR_NAME.to_string();
// Create a custom searchbar for workspace
let workspace_searchbar = box_html! {
script(src="search.js", type="text/javascript");
script {
: Raw(r#"
function onSearchFormSubmit(event) {
event.preventDefault();
const searchQuery = document.getElementById("search-input").value;
const url = new URL(window.location.href);
if (searchQuery) {
url.searchParams.set('search', searchQuery);
} else {
url.searchParams.delete('search');
}
history.pushState({ search: searchQuery }, "", url);
window.dispatchEvent(new HashChangeEvent("hashchange"));
}
document.addEventListener('DOMContentLoaded', () => {
const searchbar = document.getElementById("search-input");
searchbar.addEventListener("keyup", function(event) {
onSearchFormSubmit(event);
});
searchbar.addEventListener("search", function(event) {
onSearchFormSubmit(event);
});
function onQueryParamsChange() {
const searchParams = new URLSearchParams(window.location.search);
const query = searchParams.get("search");
const searchSection = document.getElementById('search');
const mainSection = document.getElementById('main-content');
const searchInput = document.getElementById('search-input');
if (query) {
searchInput.value = query;
const results = Object.values(SEARCH_INDEX).flat().filter(item => {
const lowerQuery = query.toLowerCase();
return item.name.toLowerCase().includes(lowerQuery);
});
const header = `<h1>Results for ${query}</h1>`;
if (results.length > 0) {
const resultList = results.map(item => {
const formattedName = `<span class="type ${item.type_name}">${item.name}</span>`;
const name = item.type_name === "module"
? [...item.module_info.slice(0, -1), formattedName].join("::")
: [...item.module_info, formattedName].join("::");
// Fix path generation for workspace - no leading slash, proper relative path
const path = [...item.module_info, item.html_filename].join("/");
const left = `<td><span>${name}</span></td>`;
const right = `<td><p>${item.preview}</p></td>`;
return `<tr onclick="window.location='${path}';">${left}${right}</tr>`;
}).join('');
searchSection.innerHTML = `${header}<table>${resultList}</table>`;
} else {
searchSection.innerHTML = `${header}<p>No results found.</p>`;
}
searchSection.setAttribute("class", "search-results");
mainSection.setAttribute("class", "content hidden");
} else {
searchSection.setAttribute("class", "search-results hidden");
mainSection.setAttribute("class", "content");
}
}
window.addEventListener('hashchange', onQueryParamsChange);
onQueryParamsChange();
});
"#)
}
nav(class="sub") {
form(id="search-form", class="search-form", onsubmit="onSearchFormSubmit(event)") {
div(class="search-container") {
input(
id="search-input",
class="search-input",
name="search",
autocomplete="off",
spellcheck="false",
placeholder="Search the docs...",
type="search"
);
}
}
}
};
Ok(box_html! {
head {
meta(charset="utf-8");
meta(name="viewport", content="width=device-width, initial-scale=1.0");
meta(name="generator", content="swaydoc");
meta(
name="description",
content="Workspace documentation index"
);
meta(name="keywords", content="sway, swaylang, sway-lang, workspace");
link(rel="icon", href=format!("{}/sway-logo.svg", assets_path));
title: "Workspace Documentation";
link(rel="stylesheet", type="text/css", href=format!("{}/normalize.css", assets_path));
link(rel="stylesheet", type="text/css", href=format!("{}/swaydoc.css", assets_path), id="mainThemeStyle");
link(rel="stylesheet", type="text/css", href=format!("{}/ayu.css", assets_path));
link(rel="stylesheet", href=format!("{}/ayu.min.css", assets_path));
}
body(class="swaydoc mod") {
: sidebar;
main {
div(class="width-limiter") {
: *workspace_searchbar;
section(id="main-content", class="content") {
div(class="main-heading") {
p { : "This workspace contains the following libraries:" }
}
h2(class="small-section-header") {
: "Libraries";
}
div(class="item-table") {
@ for lib in &self.documented_libraries {
div(class="item-row") {
div(class="item-left module-item") {
a(class="mod", href=format!("{}/index.html", lib.name)) {
: &lib.name;
}
}
div(class="item-right docblock-short") {
: &lib.description;
}
}
}
}
}
section(id="search", class="search-results");
}
}
script(src=format!("{}/highlight.js", assets_path));
script {
: "hljs.highlightAll();";
}
}
})
}
}
/// Project level, all items belonging to a project
#[derive(Clone)]
pub(crate) struct AllDocIndex {
/// A [ModuleInfo] with only the project name.
project_name: ModuleInfo,
/// All doc items.
all_docs: DocLinks,
}
impl AllDocIndex {
pub(crate) fn new(project_name: ModuleInfo, all_docs: DocLinks) -> Self {
Self {
project_name,
all_docs,
}
}
}
impl SidebarNav for AllDocIndex {
fn sidebar(&self) -> Sidebar {
Sidebar::new(
None,
self.all_docs.style.clone(),
self.project_name.clone(),
self.all_docs.clone(),
)
}
}
impl Renderable for AllDocIndex {
fn render(self, render_plan: RenderPlan) -> Result<Box<dyn RenderBox>> {
let doc_links = self.all_docs.clone().render(render_plan.clone())?;
let sidebar = self.sidebar().render(render_plan)?;
Ok(box_html! {
head {
meta(charset="utf-8");
meta(name="viewport", content="width=device-width, initial-scale=1.0");
meta(name="generator", content="swaydoc");
meta(
name="description",
content="List of all items in this project"
);
meta(name="keywords", content="sway, swaylang, sway-lang");
link(rel="icon", href=format!("../{ASSETS_DIR_NAME}/sway-logo.svg"));
title: "List of all items in this project";
link(rel="stylesheet", type="text/css", href=format!("../{ASSETS_DIR_NAME}/normalize.css"));
link(rel="stylesheet", type="text/css", href=format!("../{ASSETS_DIR_NAME}/swaydoc.css"), id="mainThemeStyle");
link(rel="stylesheet", type="text/css", href=format!("../{ASSETS_DIR_NAME}/ayu.css"));
link(rel="stylesheet", href=format!("../{ASSETS_DIR_NAME}/ayu.min.css"));
}
body(class="swaydoc mod") {
: sidebar;
main {
div(class="width-limiter") {
: generate_searchbar(&self.project_name);
section(id="main-content", class="content") {
h1(class="fqn") {
span(class="in-band") { : "List of all items" }
}
: doc_links;
}
section(id="search", class="search-results");
}
}
script(src=format!("../{ASSETS_DIR_NAME}/highlight.js"));
script {
: "hljs.highlightAll();";
}
}
})
}
}
/// The index for each module in a Sway project.
pub(crate) struct ModuleIndex {
/// used only for the root module
version_opt: Option<String>,
module_info: ModuleInfo,
module_docs: DocLinks,
}
impl ModuleIndex {
pub(crate) fn new(
version_opt: Option<String>,
module_info: ModuleInfo,
module_docs: DocLinks,
) -> Self {
Self {
version_opt,
module_info,
module_docs,
}
}
}
impl SidebarNav for ModuleIndex {
fn sidebar(&self) -> Sidebar {
let style = if self.module_info.is_root_module() {
self.module_docs.style.clone()
} else {
DocStyle::ModuleIndex
};
Sidebar::new(
self.version_opt.clone(),
style,
self.module_info.clone(),
self.module_docs.clone(),
)
}
}
impl Renderable for ModuleIndex {
fn render(self, render_plan: RenderPlan) -> Result<Box<dyn RenderBox>> {
let doc_links = self.module_docs.clone().render(render_plan.clone())?;
let sidebar = self.sidebar().render(render_plan)?;
let title_prefix = match self.module_docs.style {
DocStyle::ProjectIndex(ref program_type) => format!("{program_type} "),
DocStyle::ModuleIndex => "Module ".to_string(),
_ => unreachable!("Module Index can only be either a project or module at this time."),
};
let favicon = self
.module_info
.to_html_shorthand_path_string(&format!("{ASSETS_DIR_NAME}/sway-logo.svg"));
let normalize = self
.module_info
.to_html_shorthand_path_string(&format!("{ASSETS_DIR_NAME}/normalize.css"));
let swaydoc = self
.module_info
.to_html_shorthand_path_string(&format!("{ASSETS_DIR_NAME}/swaydoc.css"));
let ayu = self
.module_info
.to_html_shorthand_path_string(&format!("{ASSETS_DIR_NAME}/ayu.css"));
let sway_hjs = self
.module_info
.to_html_shorthand_path_string(&format!("{ASSETS_DIR_NAME}/highlight.js"));
let ayu_hjs = self
.module_info
.to_html_shorthand_path_string(&format!("{ASSETS_DIR_NAME}/ayu.min.css"));
let mut rendered_module_anchors = self.module_info.get_anchors()?;
rendered_module_anchors.pop();
Ok(box_html! {
head {
meta(charset="utf-8");
meta(name="viewport", content="width=device-width, initial-scale=1.0");
meta(name="generator", content="swaydoc");
meta(
name="description",
content=format!(
"API documentation for the Sway `{}` module in `{}`.",
self.module_info.location(), self.module_info.project_name(),
)
);
meta(name="keywords", content=format!("sway, swaylang, sway-lang, {}", self.module_info.location()));
link(rel="icon", href=favicon);
title: format!("{} in {} - Sway", self.module_info.location(), self.module_info.project_name());
link(rel="stylesheet", type="text/css", href=normalize);
link(rel="stylesheet", type="text/css", href=swaydoc, id="mainThemeStyle");
link(rel="stylesheet", type="text/css", href=ayu);
link(rel="stylesheet", href=ayu_hjs);
}
body(class="swaydoc mod") {
: sidebar;
main {
div(class="width-limiter") {
: generate_searchbar(&self.module_info);
section(id="main-content", class="content") {
div(class="main-heading") {
h1(class="fqn") {
span(class="in-band") {
: title_prefix;
@ for anchor in rendered_module_anchors {
: Raw(anchor);
}
a(class=BlockTitle::Modules.class_title_str(), href=IDENTITY) {
: self.module_info.location();
}
}
}
}
@ if self.module_info.attributes.is_some() {
details(class="swaydoc-toggle top-doc", open) {
summary(class="hideme") {
span { : "Expand description" }
}
div(class="docblock") {
: Raw(self.module_info.attributes.unwrap())
}
}
}
: doc_links;
}
section(id="search", class="search-results");
}
}
script(src=sway_hjs);
script {
: "hljs.highlightAll();";
}
}
})
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-doc/src/render/title.rs | forc-plugins/forc-doc/src/render/title.rs | use sway_core::{language::ty::TyDecl, TypeInfo};
pub trait DocBlock {
/// Returns the title of the block that the user will see.
fn title(&self) -> BlockTitle;
/// Returns the name of the block that will be used in the html and css.
fn name(&self) -> &str;
}
/// Represents all of the possible titles
/// belonging to an index or sidebar.
#[derive(Debug, Clone, Ord, PartialOrd, Eq, PartialEq)]
pub enum BlockTitle {
Modules,
Structs,
Enums,
Traits,
Abi,
ContractStorage,
Constants,
Functions,
Fields,
Variants,
RequiredMethods,
ImplMethods,
ImplTraits,
Primitives,
}
impl BlockTitle {
pub fn as_str(&self) -> &str {
match self {
Self::Modules => "Modules",
Self::Structs => "Structs",
Self::Enums => "Enums",
Self::Traits => "Traits",
Self::Abi => "Abi",
Self::ContractStorage => "Contract Storage",
Self::Constants => "Constants",
Self::Functions => "Functions",
Self::Fields => "Fields",
Self::Variants => "Variants",
Self::RequiredMethods => "Required Methods",
Self::ImplMethods => "Methods",
Self::ImplTraits => "Trait Implementations",
Self::Primitives => "Primitives",
}
}
pub fn item_title_str(&self) -> &str {
match self {
Self::Modules => "Module",
Self::Structs => "Struct",
Self::Enums => "Enum",
Self::Traits => "Trait",
Self::Abi => "Abi",
Self::ContractStorage => "Contract Storage",
Self::Constants => "Constant",
Self::Functions => "Function",
Self::Fields => "Fields",
Self::Variants => "Variants",
Self::RequiredMethods => "Required Methods",
Self::ImplMethods => "Methods",
Self::ImplTraits => "Trait Implementations",
Self::Primitives => "Primitive",
}
}
pub fn class_title_str(&self) -> &str {
match self {
Self::Modules => "mod",
Self::Structs => "struct",
Self::Enums => "enum",
Self::Traits => "trait",
Self::Abi => "abi",
Self::ContractStorage => "storage",
Self::Constants => "constant",
Self::Functions => "fn",
Self::Primitives => "primitive",
_ => unimplemented!(
"BlockTitle {:?} is unimplemented, and should not be used this way.",
self
),
}
}
pub fn html_title_string(&self) -> String {
if self.as_str().contains(' ') {
self.as_str()
.to_lowercase()
.split_whitespace()
.collect::<Vec<&str>>()
.join("-")
} else {
self.as_str().to_lowercase()
}
}
}
impl DocBlock for TyDecl {
fn title(&self) -> BlockTitle {
match self {
TyDecl::StructDecl { .. } => BlockTitle::Structs,
TyDecl::EnumDecl { .. } => BlockTitle::Enums,
TyDecl::TraitDecl { .. } => BlockTitle::Traits,
TyDecl::AbiDecl { .. } => BlockTitle::Abi,
TyDecl::StorageDecl { .. } => BlockTitle::ContractStorage,
TyDecl::ConstantDecl { .. } => BlockTitle::Constants,
TyDecl::FunctionDecl { .. } => BlockTitle::Functions,
_ => {
unreachable!(
"TyDecls {:?} is non-documentable and should never be matched on.",
self
)
}
}
}
fn name(&self) -> &str {
match self {
TyDecl::StructDecl(_) => "struct",
TyDecl::EnumDecl(_) => "enum",
TyDecl::TraitDecl(_) => "trait",
TyDecl::AbiDecl(_) => "abi",
TyDecl::StorageDecl(_) => "contract_storage",
TyDecl::ImplSelfOrTrait(_) => "impl_trait",
TyDecl::FunctionDecl(_) => "fn",
TyDecl::ConstantDecl(_) => "constant",
TyDecl::TypeAliasDecl(_) => "type_alias",
_ => {
unreachable!(
"TyDecl {:?} is non-documentable and should never be matched on.",
self
)
}
}
}
}
impl DocBlock for TypeInfo {
fn title(&self) -> BlockTitle {
match self {
sway_core::TypeInfo::StringSlice
| sway_core::TypeInfo::StringArray(_)
| sway_core::TypeInfo::Boolean
| sway_core::TypeInfo::B256
| sway_core::TypeInfo::UnsignedInteger(_) => BlockTitle::Primitives,
_ => {
unimplemented!(
"TypeInfo {:?} is non-documentable and should never be matched on.",
self
)
}
}
}
fn name(&self) -> &str {
match self {
sway_core::TypeInfo::StringSlice
| sway_core::TypeInfo::StringArray(_)
| sway_core::TypeInfo::Boolean
| sway_core::TypeInfo::B256
| sway_core::TypeInfo::UnsignedInteger(_) => "primitive",
_ => {
unimplemented!(
"TypeInfo {:?} is non-documentable and should never be matched on.",
self
)
}
}
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-doc/src/render/search.rs | forc-plugins/forc-doc/src/render/search.rs | //! Generates the searchbar.
use crate::doc::module::ModuleInfo;
use horrorshow::{box_html, Raw, RenderBox};
use minifier::js::minify;
pub(crate) fn generate_searchbar(module_info: &ModuleInfo) -> Box<dyn RenderBox> {
let path_to_root = module_info.path_to_root();
// Since this searchbar is rendered on all pages, we need to inject the path the root into the script.
// Therefore, we can't simply import this script from a javascript file.
let minified_script = minify(&format!(r#"
function onSearchFormSubmit(event) {{
event.preventDefault();
const searchQuery = document.getElementById("search-input").value;
const url = new URL(window.location.href);
if (searchQuery) {{
url.searchParams.set('search', searchQuery);
}} else {{
url.searchParams.delete('search');
}}
history.pushState({{ search: searchQuery }}, "", url);
window.dispatchEvent(new HashChangeEvent("hashchange"));
}}
document.addEventListener('DOMContentLoaded', () => {{
const searchbar = document.getElementById("search-input");
const searchForm = document.getElementById("search-form");
searchbar.addEventListener("keyup", function(event) {{
onSearchFormSubmit(event);
}});
searchbar.addEventListener("search", function(event) {{
onSearchFormSubmit(event);
}});
function onQueryParamsChange() {{
const searchParams = new URLSearchParams(window.location.search);
const query = searchParams.get("search");
const searchSection = document.getElementById('search');
const mainSection = document.getElementById('main-content');
const searchInput = document.getElementById('search-input');
if (query) {{
searchInput.value = query;
const results = Object.values(SEARCH_INDEX).flat().filter(item => {{
const lowerQuery = query.toLowerCase();
return item.name.toLowerCase().includes(lowerQuery);
}});
const header = `<h1>Results for ${{query}}</h1>`;
if (results.length > 0) {{
const resultList = results.map(item => {{
const formattedName = `<span class="type ${{item.type_name}}">${{item.name}}</span>`;
const name = item.type_name === "module"
? [...item.module_info.slice(0, -1), formattedName].join("::")
: [...item.module_info, formattedName].join("::");
const path = ["{path_to_root}", ...item.module_info, item.html_filename].join("/");
const left = `<td><span>${{name}}</span></td>`;
const right = `<td><p>${{item.preview}}</p></td>`;
return `<tr onclick="window.location='${{path}}';">${{left}}${{right}}</tr>`;
}}).join('');
searchSection.innerHTML = `${{header}}<table>${{resultList}}</table>`;
}} else {{
searchSection.innerHTML = `${{header}}<p>No results found.</p>`;
}}
searchSection.setAttribute("class", "search-results");
mainSection.setAttribute("class", "content hidden");
}} else {{
searchSection.setAttribute("class", "search-results hidden");
mainSection.setAttribute("class", "content");
}}
}}
window.addEventListener('hashchange', onQueryParamsChange);
// Check for any query parameters initially
onQueryParamsChange();
}}
);"#)).to_string();
box_html! {
script(src=format!("{}/search.js", path_to_root), type="text/javascript");
script {
: Raw(minified_script)
}
nav(class="sub") {
form(id="search-form", class="search-form", onsubmit="onSearchFormSubmit(event)") {
div(class="search-container") {
input(
id="search-input",
class="search-input",
name="search",
autocomplete="off",
spellcheck="false",
placeholder="Search the docs...",
type="search"
);
}
}
}
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-doc/src/render/mod.rs | forc-plugins/forc-doc/src/render/mod.rs | //! Renders [Documentation] to HTML.
use crate::{
doc::{
module::{ModuleInfo, ModulePrefixes},
Document, Documentation,
},
render::{
index::{AllDocIndex, ModuleIndex},
link::{DocLink, DocLinks},
title::BlockTitle,
util::format::docstring::DocStrings,
},
RenderPlan,
};
use anyhow::Result;
use horrorshow::{box_html, helper::doctype, html, prelude::*};
use rayon::prelude::*;
use std::{
collections::BTreeMap,
ops::{Deref, DerefMut},
};
use sway_core::{language::ty::TyProgramKind, transform::Attributes};
use sway_types::BaseIdent;
pub mod index;
pub mod item;
pub mod link;
mod search;
mod sidebar;
mod title;
pub mod util;
pub const ALL_DOC_FILENAME: &str = "all.html";
pub const INDEX_FILENAME: &str = "index.html";
pub const IDENTITY: &str = "#";
type DocLinkMap = BTreeMap<BlockTitle, Vec<DocLink>>;
type ModuleMap = BTreeMap<ModulePrefixes, DocLinkMap>;
type RenderResult = (RenderedDocument, ModuleMap, DocLinks);
/// Something that can be rendered to HTML.
pub(crate) trait Renderable {
fn render(self, render_plan: RenderPlan) -> Result<Box<dyn RenderBox>>;
}
impl Renderable for Document {
fn render(self, render_plan: RenderPlan) -> Result<Box<dyn RenderBox>> {
let header = self.item_header.render(render_plan.clone())?;
let body = self.item_body.render(render_plan)?;
Ok(box_html! {
: header;
: body;
})
}
}
/// A [Document] rendered to HTML.
#[derive(Debug)]
pub struct RenderedDocument {
pub module_info: ModuleInfo,
pub html_filename: String,
pub file_contents: HTMLString,
}
impl RenderedDocument {
fn from_doc(doc: &Document, render_plan: RenderPlan) -> Result<Self> {
Ok(Self {
module_info: doc.module_info.clone(),
html_filename: doc.html_filename(),
file_contents: HTMLString::from_rendered_content(doc.clone().render(render_plan)?)?,
})
}
}
#[derive(Default)]
pub struct RenderedDocumentation(pub Vec<RenderedDocument>);
impl RenderedDocumentation {
/// Top level HTML rendering for all [Documentation] of a program.
pub fn from_raw_docs(
raw_docs: Documentation,
render_plan: RenderPlan,
root_attributes: Option<Attributes>,
program_kind: &TyProgramKind,
forc_version: Option<String>,
) -> Result<RenderedDocumentation> {
let mut rendered_docs: RenderedDocumentation = RenderedDocumentation::default();
let root_module = match raw_docs.0.first() {
Some(doc) => ModuleInfo::from_ty_module(
vec![doc.module_info.project_name().to_owned()],
root_attributes.map(|attrs_map| attrs_map.to_html_string()),
),
None => panic!("Project does not contain a root module"),
};
let mut all_docs = DocLinks {
style: DocStyle::AllDoc(program_kind.as_title_str().to_string()),
links: BTreeMap::default(),
};
// Parallel document rendering
let rendered_results: Result<Vec<RenderResult>, anyhow::Error> = raw_docs
.0
.par_iter()
.map(|doc| {
let rendered_doc = RenderedDocument::from_doc(doc, render_plan.clone())?;
let mut local_module_map = ModuleMap::new();
let mut local_all_docs = DocLinks {
style: DocStyle::AllDoc(program_kind.as_title_str().to_string()),
links: BTreeMap::default(),
};
populate_decls(doc, &mut local_module_map);
populate_modules(doc, &mut local_module_map);
populate_doc_links(doc, &mut local_all_docs.links);
Ok((rendered_doc, local_module_map, local_all_docs))
})
.collect();
// Merge results sequentially
let mut module_map = ModuleMap::new();
for (rendered_doc, local_module_map, local_all_docs) in rendered_results? {
rendered_docs.0.push(rendered_doc);
// Merge module maps without overwriting existing categories; append and dedup links.
for (key, value) in local_module_map {
let entry = module_map.entry(key).or_default();
for (block, mut links) in value {
let list = entry.entry(block).or_default();
// Append new links while avoiding duplicates.
for link in links.drain(..) {
if !list.contains(&link) {
list.push(link);
}
}
}
}
// Merge "all docs" links similarly, preserving existing items.
for (block, mut links) in local_all_docs.links {
let list = all_docs.links.entry(block).or_default();
for link in links.drain(..) {
if !list.contains(&link) {
list.push(link);
}
}
}
}
// ProjectIndex
match module_map.get(&root_module.module_prefixes) {
Some(doc_links) => rendered_docs.push(RenderedDocument {
module_info: root_module.clone(),
html_filename: INDEX_FILENAME.to_string(),
file_contents: HTMLString::from_rendered_content(
ModuleIndex::new(
forc_version,
root_module.clone(),
DocLinks {
style: DocStyle::ProjectIndex(program_kind.as_title_str().to_string()),
links: doc_links.to_owned(),
},
)
.render(render_plan.clone())?,
)?,
}),
None => panic!("Project does not contain a root module."),
}
if module_map.len() > 1 {
module_map.remove_entry(&root_module.module_prefixes);
// ModuleIndex(s)
for (module_prefixes, doc_links) in module_map {
let module_info_opt = match doc_links.values().last() {
Some(doc_links) => doc_links
.first()
.map(|doc_link| doc_link.module_info.clone()),
// No module to be documented
None => None,
};
if let Some(module_info) = module_info_opt {
rendered_docs.push(RenderedDocument {
module_info: module_info.clone(),
html_filename: INDEX_FILENAME.to_string(),
file_contents: HTMLString::from_rendered_content(
ModuleIndex::new(
None,
module_info.clone(),
DocLinks {
style: DocStyle::ModuleIndex,
links: doc_links.to_owned(),
},
)
.render(render_plan.clone())?,
)?,
});
if module_info.module_prefixes != module_prefixes {
let module_info = ModuleInfo::from_ty_module(module_prefixes, None);
rendered_docs.push(RenderedDocument {
module_info: module_info.clone(),
html_filename: INDEX_FILENAME.to_string(),
file_contents: HTMLString::from_rendered_content(
ModuleIndex::new(
None,
module_info,
DocLinks {
style: DocStyle::ModuleIndex,
links: doc_links.clone(),
},
)
.render(render_plan.clone())?,
)?,
});
}
}
}
}
// AllDocIndex
rendered_docs.push(RenderedDocument {
module_info: root_module.clone(),
html_filename: ALL_DOC_FILENAME.to_string(),
file_contents: HTMLString::from_rendered_content(
AllDocIndex::new(root_module, all_docs).render(render_plan)?,
)?,
});
Ok(rendered_docs)
}
}
impl Deref for RenderedDocumentation {
type Target = Vec<RenderedDocument>;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl DerefMut for RenderedDocumentation {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
/// Adds a document's link to the appropriate category in the doc links map.
fn populate_doc_links(doc: &Document, doc_links: &mut DocLinkMap) {
let key = doc.item_body.ty.as_block_title();
match doc_links.get_mut(&key) {
Some(links) => links.push(doc.link()),
None => {
doc_links.insert(key, vec![doc.link()]);
}
}
}
/// Organizes document links by module prefix for navigation.
fn populate_decls(doc: &Document, module_map: &mut ModuleMap) {
let module_prefixes = &doc.module_info.module_prefixes;
if let Some(doc_links) = module_map.get_mut(module_prefixes) {
populate_doc_links(doc, doc_links)
} else {
let mut doc_links = DocLinkMap::new();
populate_doc_links(doc, &mut doc_links);
module_map.insert(module_prefixes.clone(), doc_links);
}
}
/// Creates links to parent modules for hierarchical navigation.
fn populate_modules(doc: &Document, module_map: &mut ModuleMap) {
let mut module_clone = doc.module_info.clone();
while module_clone.parent().is_some() {
let html_filename = if module_clone.depth() > 2 {
format!("{}/{INDEX_FILENAME}", module_clone.location())
} else {
INDEX_FILENAME.to_string()
};
let module_link = DocLink {
name: module_clone.location().to_owned(),
module_info: module_clone.clone(),
html_filename,
preview_opt: doc.module_info.preview_opt(),
};
let module_prefixes = module_clone
.module_prefixes
.clone()
.split_last()
.unwrap()
.1
.to_vec();
if let Some(doc_links) = module_map.get_mut(&module_prefixes) {
match doc_links.get_mut(&BlockTitle::Modules) {
Some(links) => {
if !links.contains(&module_link) {
links.push(module_link);
}
}
None => {
doc_links.insert(BlockTitle::Modules, vec![module_link]);
}
}
} else {
let mut doc_links = DocLinkMap::new();
doc_links.insert(BlockTitle::Modules, vec![module_link]);
module_map.insert(module_prefixes.clone(), doc_links);
}
module_clone.module_prefixes.pop();
}
}
/// The finalized HTML file contents.
#[derive(Debug)]
pub struct HTMLString(pub String);
impl HTMLString {
/// Final rendering of a [Document] HTML page to String.
pub fn from_rendered_content(rendered_content: Box<dyn RenderBox>) -> Result<Self> {
Ok(Self(
html! {
: doctype::HTML;
html {
: rendered_content
}
}
.into_string()?,
))
}
}
/// The type of document. Helpful in determining what to represent in
/// the sidebar & page content.
#[derive(Clone, Ord, PartialOrd, Eq, PartialEq)]
pub enum DocStyle {
AllDoc(String),
ProjectIndex(String),
WorkspaceIndex,
ModuleIndex,
Item {
title: Option<BlockTitle>,
name: Option<BaseIdent>,
},
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-doc/src/render/sidebar.rs | forc-plugins/forc-doc/src/render/sidebar.rs | use crate::ASSETS_DIR_NAME;
use std::path::PathBuf;
use crate::{
doc::module::ModuleInfo,
render::{
BlockTitle, DocLinks, DocStyle, Renderable, {ALL_DOC_FILENAME, IDENTITY, INDEX_FILENAME},
},
RenderPlan,
};
use anyhow::Result;
use horrorshow::{box_html, Raw, RenderBox, Template};
pub(crate) trait SidebarNav {
/// Create sidebar component.
fn sidebar(&self) -> Sidebar;
}
/// Sidebar component for quick navigation.
pub(crate) struct Sidebar {
version_opt: Option<String>,
style: DocStyle,
module_info: ModuleInfo,
/// support for page navigation
nav: DocLinks,
}
impl Sidebar {
pub(crate) fn new(
version_opt: Option<String>,
style: DocStyle,
module_info: ModuleInfo,
nav: DocLinks,
) -> Self {
Self {
version_opt,
style,
module_info,
nav,
}
}
}
impl Renderable for Sidebar {
fn render(self, _render_plan: RenderPlan) -> Result<Box<dyn RenderBox>> {
// For workspace sidebar, use direct path to logo (no path prefix needed)
let path_to_logo = if matches!(self.style, DocStyle::WorkspaceIndex) {
format!("{ASSETS_DIR_NAME}/sway-logo.svg")
} else {
self.module_info
.to_html_shorthand_path_string(&format!("{ASSETS_DIR_NAME}/sway-logo.svg"))
};
let style = self.style.clone();
let version_opt = self.version_opt.clone();
let location_with_prefix = match &style {
DocStyle::AllDoc(project_kind) | DocStyle::ProjectIndex(project_kind) => {
format!("{project_kind} {}", self.module_info.location())
}
DocStyle::WorkspaceIndex => {
format!("Workspace {}", self.module_info.location())
}
DocStyle::ModuleIndex => format!(
"{} {}",
BlockTitle::Modules.item_title_str(),
self.module_info.location()
),
DocStyle::Item { title, name } => {
let title = title.clone().expect("Expected a BlockTitle");
let name = name.clone().expect("Expected a BaseIdent");
format!("{} {}", title.item_title_str(), name.as_str())
}
};
let root_path = self.module_info.to_html_shorthand_path_string(
PathBuf::from(self.module_info.project_name())
.join(INDEX_FILENAME)
.to_str()
.ok_or_else(|| anyhow::anyhow!(
"found invalid root file path for {}\nmake sure your project's name contains only valid unicode characters",
self.module_info.project_name(),
))?,
);
let logo_path_to_root = match style {
DocStyle::AllDoc(_) | DocStyle::Item { .. } | DocStyle::ModuleIndex => root_path,
DocStyle::ProjectIndex(_) | DocStyle::WorkspaceIndex => IDENTITY.to_owned(),
};
// Unfortunately, match arms that return a closure, even if they are the same
// type, are incompatible. The work around is to return a String instead,
// and render it from Raw in the final output.
let styled_content = match &self.style {
DocStyle::WorkspaceIndex => {
let nav_links = &self.nav.links;
box_html! {
div(class="sidebar-elems") {
section {
div(class="block") {
ul {
@ for (_title, doc_links) in nav_links {
@ for doc_link in doc_links {
li {
a(href=format!("{}", doc_link.html_filename)) {
: doc_link.name.clone();
}
}
}
}
}
}
}
}
}
.into_string()
.unwrap()
}
DocStyle::ProjectIndex(_) => {
let nav_links = &self.nav.links;
let all_items = format!("See all {}'s items", self.module_info.project_name());
box_html! {
div(class="sidebar-elems") {
a(id="all-types", href=ALL_DOC_FILENAME) {
p: all_items;
}
section {
div(class="block") {
ul {
@ for (title, _) in nav_links {
li {
a(href=format!("{}{}", IDENTITY, title.html_title_string())) {
: title.as_str();
}
}
}
}
}
}
}
}
.into_string()
.unwrap()
}
DocStyle::AllDoc(_) => {
let nav_links = &self.nav.links;
box_html! {
div(class="sidebar-elems") {
a(id="all-types", href=INDEX_FILENAME) {
p: "Back to index";
}
section {
div(class="block") {
ul {
@ for (title, _) in nav_links {
li {
a(href=format!("{}{}", IDENTITY, title.html_title_string())) {
: title.as_str();
}
}
}
}
}
}
}
}
.into_string()
.unwrap()
}
_ => box_html! {
div(class="sidebar-elems") {
@ for (title, doc_links) in &self.nav.links {
section {
h3 {
a(href=format!("{}{}", IDENTITY, title.html_title_string())) {
: title.as_str();
}
}
ul(class="block method") {
@ for doc_link in doc_links {
li {
a(href=format!("{}", doc_link.html_filename)) {
: doc_link.name.clone();
}
}
}
}
}
}
}
}
.into_string()
.unwrap(),
};
Ok(box_html! {
nav(class="sidebar") {
a(class="sidebar-logo", href=&logo_path_to_root) {
div(class="logo-container") {
img(class="sway-logo", src=path_to_logo, alt="logo");
}
}
h2(class="location") {
: location_with_prefix;
}
@ if let DocStyle::ProjectIndex(_) = style.clone() {
@ if version_opt.is_some() {
div(class="version") {
p: version_opt.unwrap();
}
}
}
: Raw(styled_content);
}
})
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-doc/src/render/util/mod.rs | forc-plugins/forc-doc/src/render/util/mod.rs | //! Utilities for managing edge cases in rendering types and their corresponding documentation.
pub mod format;
/// Strip the generic suffix from a type name. For example, `Foo<T>` would become `Foo`.
pub fn strip_generic_suffix(input: &str) -> &str {
input.split_once('<').map_or(input, |(head, _)| head)
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-doc/src/render/util/format/docstring.rs | forc-plugins/forc-doc/src/render/util/format/docstring.rs | //! Rendering and formatting for Sway doc attributes.
use crate::render::util::format::constant::*;
use comrak::{markdown_to_html, ComrakOptions};
use std::fmt::Write;
use sway_core::transform::{AttributeKind, Attributes};
use sway_lsp::utils::markdown::format_docs;
pub(crate) trait DocStrings {
fn to_html_string(&self) -> String;
fn to_raw_string(&self) -> String;
}
/// Creates an HTML String from [Attributes].
impl DocStrings for Attributes {
fn to_html_string(&self) -> String {
let docs = self.to_raw_string();
let mut options = ComrakOptions::default();
options.render.hardbreaks = true;
options.extension.strikethrough = true;
options.extension.table = true;
options.extension.autolink = true;
options.extension.superscript = true;
options.extension.footnotes = true;
options.parse.smart = true;
options.parse.default_info_string = Some(SWAY_FILEINE.into());
markdown_to_html(&format_docs(&docs), &options)
}
fn to_raw_string(&self) -> String {
let mut docs = String::new();
// TODO: Change this logic once https://github.com/FuelLabs/sway/issues/6938 gets implemented.
for arg in self
.of_kind(AttributeKind::DocComment)
.flat_map(|attribute| &attribute.args)
{
writeln!(docs, "{}", arg.name.as_str())
.expect("problem appending `arg.name.as_str()` to `docs` with `writeln` macro.");
}
docs
}
}
/// Create a docstring preview from raw html attributes.
///
/// Returns `None` if there are no attributes.
pub(crate) fn create_preview(raw_attributes: Option<String>) -> Option<String> {
raw_attributes.as_ref().map(|description| {
let preview = split_at_markdown_header(description);
if preview.chars().count() > MAX_PREVIEW_CHARS && preview.contains(CLOSING_PARAGRAPH_TAG) {
let closing_tag_index = preview
.find(CLOSING_PARAGRAPH_TAG)
.expect("closing tag out of range");
// We add 1 here to get the index of the char after the closing tag.
// This ensures we retain the closing tag and don't break the html.
let (preview, _) =
preview.split_at(closing_tag_index + CLOSING_PARAGRAPH_TAG.len() + 1);
if preview.chars().count() > MAX_PREVIEW_CHARS && preview.contains(NEWLINE_CHAR) {
let newline_index = preview
.find(NEWLINE_CHAR)
.expect("new line char out of range");
preview.split_at(newline_index).0.to_string()
} else {
preview.to_string()
}
} else {
preview.to_string()
}
})
}
/// Checks if some raw html (rendered from markdown) contains a header.
/// If it does, it splits at the header and returns the slice that preceded it.
pub(crate) fn split_at_markdown_header(raw_html: &str) -> &str {
for header in HTML_HEADERS {
if raw_html.contains(header) {
let v: Vec<_> = raw_html.split(header).collect();
return v.first().expect("expected non-empty &str");
}
continue;
}
raw_html
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-doc/src/render/util/format/mod.rs | forc-plugins/forc-doc/src/render/util/format/mod.rs | //! Formatting utility, mainly for use in complex HTML string manipulation.
mod constant;
pub mod docstring;
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-doc/src/render/util/format/constant.rs | forc-plugins/forc-doc/src/render/util/format/constant.rs | pub(crate) const MAX_PREVIEW_CHARS: usize = 100;
pub(crate) const CLOSING_PARAGRAPH_TAG: &str = "</p>";
const H1: &str = "<h1>";
const H2: &str = "<h2>";
const H3: &str = "<h3>";
const H4: &str = "<h4>";
const H5: &str = "<h5>";
pub(crate) const HTML_HEADERS: &[&str] = &[H1, H2, H3, H4, H5];
pub(crate) const NEWLINE_CHAR: char = '\n';
pub(crate) const SWAY_FILEINE: &str = "sway";
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-doc/src/render/item/mod.rs | forc-plugins/forc-doc/src/render/item/mod.rs | pub mod components;
pub mod context;
pub mod documentable_type;
mod type_anchor;
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-doc/src/render/item/type_anchor.rs | forc-plugins/forc-doc/src/render/item/type_anchor.rs | //! Creation of HTML anchors for types that can be linked.
use crate::{doc::module::ModuleInfo, RenderPlan};
use anyhow::{anyhow, Result};
use horrorshow::{box_html, RenderBox};
use sway_core::{AbiName, TypeInfo};
use sway_types::{Named, Spanned};
/// Handles types & nested types that should have links
/// eg. (`[]` represent types with links).
///
/// ```sway
/// struct Foo {
/// foo: ([Foo], (u32, [Foo], ([Foo], [Foo])))
/// }
/// ```
//
// TODO: Add checks for multiline types
pub(crate) fn render_type_anchor(
type_info: TypeInfo,
render_plan: &RenderPlan,
current_module_info: &ModuleInfo,
) -> Result<Box<dyn RenderBox>> {
match type_info {
TypeInfo::Array(ty_arg, len) => {
let inner = render_type_anchor(
(*render_plan.engines.te().get(ty_arg.type_id)).clone(),
render_plan,
current_module_info,
)?;
let len_string = format!("{:?}", render_plan.engines.help_out(len.expr()));
Ok(box_html! {
: "[";
: inner;
: format!("; {}]", len_string);
})
}
TypeInfo::Slice(ty_arg) => {
let inner = render_type_anchor(
(*render_plan.engines.te().get(ty_arg.type_id)).clone(),
render_plan,
current_module_info,
)?;
Ok(box_html! {
: "__slice[";
: inner;
: "]";
})
}
TypeInfo::Tuple(ty_args) => {
let mut rendered_args: Vec<_> = Vec::new();
for ty_arg in ty_args {
rendered_args.push(render_type_anchor(
(*render_plan.engines.te().get(ty_arg.type_id)).clone(),
render_plan,
current_module_info,
)?);
}
Ok(box_html! {
: "(";
@ for arg in rendered_args {
: arg;
}
: ")";
})
}
TypeInfo::Enum(decl_id) => {
let enum_decl = render_plan.engines.de().get_enum(&decl_id);
if !render_plan.document_private_items && enum_decl.visibility.is_private() {
Ok(box_html! {
: enum_decl.name().as_str();
})
} else {
let module_info = ModuleInfo::from_call_path(&enum_decl.call_path);
let file_name = format!("enum.{}.html", enum_decl.name().as_str());
let href =
module_info.file_path_from_location(&file_name, current_module_info, false)?;
Ok(box_html! {
a(class="enum", href=href) {
: enum_decl.name().as_str();
}
})
}
}
TypeInfo::Struct(decl_id) => {
let struct_decl = render_plan.engines.de().get_struct(&decl_id);
if !render_plan.document_private_items && struct_decl.visibility.is_private() {
Ok(box_html! {
: struct_decl.name().as_str();
})
} else {
let module_info = ModuleInfo::from_call_path(&struct_decl.call_path);
let file_name = format!("struct.{}.html", struct_decl.name().as_str());
let href =
module_info.file_path_from_location(&file_name, current_module_info, false)?;
Ok(box_html! {
a(class="struct", href=href) {
: struct_decl.name().as_str();
}
})
}
}
TypeInfo::UnknownGeneric { name, .. } => Ok(box_html! {
: name.as_str();
}),
TypeInfo::StringArray(len) => Ok(box_html! {
: len.expr().span().as_str();
}),
TypeInfo::UnsignedInteger(int_bits) => {
use sway_types::integer_bits::IntegerBits;
let uint = match int_bits {
IntegerBits::Eight => "u8",
IntegerBits::Sixteen => "u16",
IntegerBits::ThirtyTwo => "u32",
IntegerBits::SixtyFour => "u64",
IntegerBits::V256 => "u256",
};
Ok(box_html! {
: uint;
})
}
TypeInfo::Boolean => Ok(box_html! {
: "bool";
}),
TypeInfo::ContractCaller { abi_name, .. } => {
// TODO: determine whether we should give a link to this
if let AbiName::Known(name) = abi_name {
Ok(box_html! {
: name.suffix.as_str();
})
} else {
Err(anyhow!("Deferred AbiName is unhandled"))
}
}
TypeInfo::Custom {
qualified_call_path,
..
} => Ok(box_html! {
: qualified_call_path.call_path.suffix.as_str();
}),
TypeInfo::B256 => Ok(box_html! {
: "b256";
}),
_ => Err(anyhow!("Undetermined or unusable TypeInfo")),
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-doc/src/render/item/components.rs | forc-plugins/forc-doc/src/render/item/components.rs | //! Handles creation of the head and body of an HTML doc.
use crate::{
doc::module::ModuleInfo,
render::{
item::context::ItemContext,
search::generate_searchbar,
sidebar::{Sidebar, SidebarNav},
DocStyle, Renderable, IDENTITY,
},
RenderPlan, ASSETS_DIR_NAME,
};
use anyhow::Result;
use horrorshow::{box_html, Raw, RenderBox};
use sway_types::BaseIdent;
use super::documentable_type::DocumentableType;
// Asset file names to avoid repeated string formatting
const SWAY_LOGO_FILE: &str = "sway-logo.svg";
const NORMALIZE_CSS_FILE: &str = "normalize.css";
const SWAYDOC_CSS_FILE: &str = "swaydoc.css";
const AYU_CSS_FILE: &str = "ayu.css";
const AYU_MIN_CSS_FILE: &str = "ayu.min.css";
/// All necessary components to render the header portion of
/// the item html doc.
#[derive(Clone, Debug)]
pub struct ItemHeader {
pub module_info: ModuleInfo,
pub friendly_name: &'static str,
pub item_name: BaseIdent,
}
impl Renderable for ItemHeader {
/// Basic HTML header component
fn render(self, _render_plan: RenderPlan) -> Result<Box<dyn RenderBox>> {
let ItemHeader {
module_info,
friendly_name,
item_name,
} = self;
let favicon = module_info
.to_html_shorthand_path_string(&format!("{ASSETS_DIR_NAME}/{SWAY_LOGO_FILE}"));
let normalize = module_info
.to_html_shorthand_path_string(&format!("{ASSETS_DIR_NAME}/{NORMALIZE_CSS_FILE}"));
let swaydoc = module_info
.to_html_shorthand_path_string(&format!("{ASSETS_DIR_NAME}/{SWAYDOC_CSS_FILE}"));
let ayu =
module_info.to_html_shorthand_path_string(&format!("{ASSETS_DIR_NAME}/{AYU_CSS_FILE}"));
let ayu_hjs = module_info
.to_html_shorthand_path_string(&format!("{ASSETS_DIR_NAME}/{AYU_MIN_CSS_FILE}"));
Ok(box_html! {
head {
meta(charset="utf-8");
meta(name="viewport", content="width=device-width, initial-scale=1.0");
meta(name="generator", content="swaydoc");
meta(
name="description",
content=format!(
"API documentation for the Sway `{}` {} in `{}`.",
item_name.as_str(), friendly_name, module_info.location(),
)
);
meta(name="keywords", content=format!("sway, swaylang, sway-lang, {}", item_name.as_str()));
link(rel="icon", href=favicon);
title: format!("{} in {} - Sway", item_name.as_str(), module_info.location());
link(rel="stylesheet", type="text/css", href=normalize);
link(rel="stylesheet", type="text/css", href=swaydoc, id="mainThemeStyle");
link(rel="stylesheet", type="text/css", href=ayu);
link(rel="stylesheet", href=ayu_hjs);
// TODO: Add links for fonts
}
})
}
}
/// All necessary components to render the body portion of
/// the item html doc. Many parts of the HTML body structure will be the same
/// for each item, but things like struct fields vs trait methods will be different.
#[derive(Clone, Debug)]
pub struct ItemBody {
pub module_info: ModuleInfo,
pub ty: DocumentableType,
/// The item name varies depending on type.
/// We store it during info gathering to avoid
/// multiple match statements.
pub item_name: BaseIdent,
pub code_str: String,
pub attrs_opt: Option<String>,
pub item_context: ItemContext,
}
impl SidebarNav for ItemBody {
fn sidebar(&self) -> Sidebar {
let style = DocStyle::Item {
title: Some(self.ty.as_block_title()),
name: Some(self.item_name.clone()),
};
Sidebar::new(
None,
style,
self.module_info.clone(),
self.item_context.to_doclinks(),
)
}
}
impl Renderable for ItemBody {
/// HTML body component
fn render(self, render_plan: RenderPlan) -> Result<Box<dyn RenderBox>> {
let sidebar = self.sidebar();
let ItemBody {
module_info,
ty,
item_name,
code_str,
attrs_opt,
item_context,
} = self;
let doc_name = ty.doc_name().to_string();
let block_title = ty.as_block_title();
let sidebar = sidebar.render(render_plan.clone())?;
let item_context = (item_context.context_opt.is_some()
|| item_context.impl_traits.is_some())
.then(|| -> Result<Box<dyn RenderBox>> { item_context.render(render_plan.clone()) });
let sway_hjs =
module_info.to_html_shorthand_path_string(&format!("{ASSETS_DIR_NAME}/highlight.js"));
let rendered_module_anchors = module_info.get_anchors()?;
Ok(box_html! {
body(class=format!("swaydoc {doc_name}")) {
: sidebar;
// this is the main code block
main {
div(class="width-limiter") {
: generate_searchbar(&module_info);
section(id="main-content", class="content") {
div(class="main-heading") {
h1(class="fqn") {
span(class="in-band") {
: format!("{} ", block_title.item_title_str());
@ for anchor in rendered_module_anchors {
: Raw(anchor);
}
a(class=&doc_name, href=IDENTITY) {
: item_name.as_str();
}
}
}
}
div(class="docblock item-decl") {
pre(class=format!("sway {}", &doc_name)) {
code { : code_str; }
}
}
@ if attrs_opt.is_some() {
// expand or hide description of main code block
details(class="swaydoc-toggle top-doc", open) {
summary(class="hideme") {
span { : "Expand description" }
}
// this is the description
div(class="docblock") {
: Raw(attrs_opt.unwrap())
}
}
}
@ if item_context.is_some() {
: item_context.unwrap();
}
}
section(id="search", class="search-results");
}
}
script(src=sway_hjs);
script {
: "hljs.highlightAll();";
}
}
})
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-doc/src/render/item/documentable_type.rs | forc-plugins/forc-doc/src/render/item/documentable_type.rs | use crate::render::title::{BlockTitle, DocBlock};
use sway_core::{language::ty::TyDecl, TypeInfo};
/// The compiler type that can be documented.
#[derive(Clone, Debug)]
pub enum DocumentableType {
/// Any type that is declared in the Sway source code can be documented.
Declared(TyDecl),
/// Primitive types are not declared in the Sway source code, so they must be documented
/// without a declaration.
Primitive(TypeInfo),
}
impl DocumentableType {
pub fn doc_name(&self) -> &str {
match self {
DocumentableType::Declared(decl) => decl.name(),
DocumentableType::Primitive(ty) => ty.name(),
}
}
pub fn as_block_title(&self) -> BlockTitle {
match self {
DocumentableType::Declared(decl) => decl.title(),
DocumentableType::Primitive(ty) => ty.title(),
}
}
pub fn friendly_type_name(&self) -> &str {
match self {
DocumentableType::Declared(decl) => decl.friendly_type_name(),
DocumentableType::Primitive(_) => "primitive",
}
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-doc/src/render/item/context.rs | forc-plugins/forc-doc/src/render/item/context.rs | //! Manages how the context of Sway types are rendered on corresponding item pages.
use crate::{
doc::module::ModuleInfo,
render::{
item::type_anchor::render_type_anchor,
link::{DocLink, DocLinks},
title::BlockTitle,
title::DocBlock,
util::format::docstring::DocStrings,
DocStyle, Renderable, IDENTITY,
},
RenderPlan,
};
use anyhow::Result;
use horrorshow::{box_html, Raw, RenderBox, Template};
use std::{collections::BTreeMap, fmt::Write};
use sway_core::language::ty::{
TyConstantDecl, TyEnumVariant, TyFunctionDecl, TyImplSelfOrTrait, TyStorageField,
TyStructField, TyTraitFn, TyTraitItem, TyTraitType,
};
use sway_types::Spanned;
/// The actual context of the item displayed by [ItemContext].
/// This uses [ContextType] to determine how to represent the context of an item.
///
/// Example:
/// ```sw
/// struct Foo {}
/// trait Foo {
/// fn foo() -> Foo;
/// }
/// ```
/// Becomes:
/// ```ignore
/// Context {
/// module_info: ModuleInfo, /* cloned from item origin to create links */
/// context_type: ContextType::RequiredMethods(Vec<TyTraitFn>), /* trait fn foo() stored here */
/// }
/// ```
#[derive(Clone, Debug)]
pub struct Context {
module_info: ModuleInfo,
context_type: ContextType,
}
impl Context {
pub fn new(module_info: ModuleInfo, context_type: ContextType) -> Self {
Self {
module_info,
context_type,
}
}
}
impl Renderable for Context {
fn render(self, render_plan: RenderPlan) -> Result<Box<dyn RenderBox>> {
let mut rendered_list: Vec<String> = Vec::new();
let mut is_method_block = false;
match &self.context_type {
ContextType::StructFields(fields) => {
for field in fields {
let struct_field_id = format!("structfield.{}", field.name.as_str());
let type_anchor = render_type_anchor(
(*render_plan.engines.te().get(field.type_argument.type_id)).clone(),
&render_plan,
&self.module_info,
);
rendered_list.push(box_html! {
span(id=&struct_field_id, class="structfield small-section-header") {
a(class="anchor field", href=format!("{IDENTITY}{struct_field_id}"));
code {
: format!("{}: ", field.name.as_str());
@ if let Ok(type_anchor) = type_anchor {
: type_anchor;
} else {
: field.type_argument.span().as_str();
}
}
}
@ if !field.attributes.is_empty() {
div(class="docblock") {
: Raw(field.attributes.to_html_string());
}
}
}.into_string()?);
}
}
ContextType::StorageFields(fields) => {
for field in fields {
let storage_field_id = format!("storagefield.{}", field.name.as_str());
let type_anchor = render_type_anchor(
(*render_plan.engines.te().get(field.type_argument.type_id)).clone(),
&render_plan,
&self.module_info,
);
rendered_list.push(box_html! {
span(id=&storage_field_id, class="storagefield small-section-header") {
a(class="anchor field", href=format!("{IDENTITY}{storage_field_id}"));
code {
: format!("{}: ", field.name.as_str());
@ if let Ok(type_anchor) = type_anchor {
: type_anchor;
} else {
: field.type_argument.span().as_str();
}
}
}
@ if !field.attributes.is_empty() {
div(class="docblock") {
: Raw(field.attributes.to_html_string());
}
}
}.into_string()?);
}
}
ContextType::EnumVariants(variants) => {
for variant in variants {
let enum_variant_id = format!("variant.{}", variant.name.as_str());
let type_anchor = render_type_anchor(
(*render_plan.engines.te().get(variant.type_argument.type_id)).clone(),
&render_plan,
&self.module_info,
);
rendered_list.push(box_html! {
h3(id=&enum_variant_id, class="variant small-section-header") {
a(class="anchor field", href=format!("{IDENTITY}{enum_variant_id}"));
code {
: format!("{}: ", variant.name.as_str());
@ if let Ok(type_anchor) = type_anchor {
: type_anchor;
} else {
: variant.type_argument.span().as_str();
}
}
}
@ if !variant.attributes.is_empty() {
div(class="docblock") {
: Raw(variant.attributes.to_html_string());
}
}
}.into_string()?);
}
}
ContextType::RequiredMethods(methods) => {
is_method_block = true;
for method in methods {
let mut fn_sig = format!("fn {}(", method.name.as_str());
for param in &method.parameters {
let mut param_str = String::new();
if param.is_reference {
write!(param_str, "ref ")?;
}
if param.is_mutable {
write!(param_str, "mut ")?;
}
if param.is_self() {
write!(param_str, "self,")?;
} else {
write!(
fn_sig,
"{} {},",
param.name.as_str(),
param.type_argument.span().as_str()
)?;
}
}
write!(fn_sig, ") -> {}", method.return_type.span().as_str())?;
let multiline = fn_sig.chars().count() >= 60;
let fn_sig = format!("fn {}(", method.name);
let method_id = format!("tymethod.{}", method.name.as_str());
let method_attrs = method.attributes.clone();
let rendered_method = box_html! {
div(id=&method_id, class="method has-srclink") {
a(href=format!("{IDENTITY}{method_id}"), class="anchor");
h4(class="code-header") {
: "fn ";
a(class="fnname", href=format!("{IDENTITY}{method_id}")) {
: method.name.as_str();
}
: "(";
@ if multiline {
@ for param in &method.parameters {
br;
: " ";
@ if param.is_reference {
: "ref";
}
@ if param.is_mutable {
: "mut ";
}
@ if param.is_self() {
: "self,"
} else {
: param.name.as_str();
: ": ";
: param.type_argument.span().as_str();
: ","
}
}
br;
: ")";
} else {
@ for param in &method.parameters {
@ if param.is_reference {
: "ref";
}
@ if param.is_mutable {
: "mut ";
}
@ if param.is_self() {
: "self"
} else {
: param.name.as_str();
: ": ";
: param.type_argument.span().as_str();
}
@ if param.name.as_str()
!= method.parameters.last()
.expect("no last element in trait method parameters list")
.name.as_str() {
: ", ";
}
}
: ")";
}
@ if !method.return_type.span().as_str().contains(&fn_sig) {
: " -> ";
: method.return_type.span().as_str();
}
}
}
}.into_string()?;
rendered_list.push(
box_html! {
@ if !method_attrs.is_empty() {
details(class="swaydoc-toggle open") {
summary {
: Raw(rendered_method);
}
div(class="docblock") {
: Raw(method_attrs.to_html_string());
}
}
} else {
: Raw(rendered_method);
}
}
.into_string()?,
);
}
}
};
Ok(box_html! {
@ if is_method_block {
div(class="methods") {
@ for item in rendered_list {
: Raw(item);
}
}
} else {
@ for item in rendered_list {
: Raw(item);
}
}
})
}
}
#[derive(Debug, Clone)]
pub struct DocImplTrait {
pub impl_for_module: ModuleInfo,
pub impl_trait: TyImplSelfOrTrait,
pub module_info_override: Option<Vec<String>>,
}
impl DocImplTrait {
pub fn short_name(&self) -> String {
self.impl_trait.trait_name.suffix.as_str().to_string()
}
pub fn type_args(&self) -> Vec<String> {
self.impl_trait
.trait_type_arguments
.iter()
.map(|arg| arg.span().as_str().to_string())
.collect()
}
pub fn name_with_type_args(&self) -> String {
let type_args = self.type_args();
if !type_args.is_empty() {
format!("{}<{}>", self.short_name(), type_args.join(", "))
} else {
self.short_name()
}
}
// If the trait name is the same as the declaration's name, it's an inherent implementation.
// Otherwise, it's a trait implementation.
pub fn is_inherent(&self) -> bool {
self.short_name() == self.impl_trait.implementing_for.span().as_str()
|| self.short_name() == "r#Self"
}
}
#[derive(Clone, Debug, Default)]
/// The context section of an item that appears in the page [ItemBody].
pub struct ItemContext {
/// [Context] can be fields on a struct, variants of an enum, etc.
pub context_opt: Option<Context>,
// The implementations for this type.
pub inherent_impls: Option<Vec<DocImplTrait>>,
/// The traits implemented for this type.
pub impl_traits: Option<Vec<DocImplTrait>>,
}
impl ItemContext {
pub fn to_doclinks(&self) -> DocLinks {
let mut links: BTreeMap<BlockTitle, Vec<DocLink>> = BTreeMap::new();
if let Some(context) = &self.context_opt {
match context.context_type.clone() {
ContextType::StructFields(fields) => {
let doc_links = fields
.iter()
.map(|field| DocLink {
name: field.name.as_str().to_string(),
module_info: ModuleInfo::from_ty_module(vec![], None),
html_filename: format!(
"{}structfield.{}",
IDENTITY,
field.name.as_str()
),
preview_opt: None,
})
.collect();
links.insert(BlockTitle::Fields, doc_links);
}
ContextType::StorageFields(fields) => {
let doc_links = fields
.iter()
.map(|field| DocLink {
name: field.name.as_str().to_string(),
module_info: ModuleInfo::from_ty_module(vec![], None),
html_filename: format!(
"{}storagefield.{}",
IDENTITY,
field.name.as_str()
),
preview_opt: None,
})
.collect();
links.insert(BlockTitle::Fields, doc_links);
}
ContextType::EnumVariants(variants) => {
let doc_links = variants
.iter()
.map(|variant| DocLink {
name: variant.name.as_str().to_string(),
module_info: ModuleInfo::from_ty_module(vec![], None),
html_filename: format!("{}variant.{}", IDENTITY, variant.name.as_str()),
preview_opt: None,
})
.collect();
links.insert(BlockTitle::Variants, doc_links);
}
ContextType::RequiredMethods(methods) => {
let doc_links = methods
.iter()
.map(|method| DocLink {
name: method.name.as_str().to_string(),
module_info: ModuleInfo::from_ty_module(vec![], None),
html_filename: format!(
"{}structfield.{}",
IDENTITY,
method.name.as_str()
),
preview_opt: None,
})
.collect();
links.insert(BlockTitle::RequiredMethods, doc_links);
}
}
}
if let Some(inherent_impls) = &self.inherent_impls {
let mut doc_links = Vec::new();
for inherent_impl in inherent_impls {
for item in &inherent_impl.impl_trait.items {
if let TyTraitItem::Fn(item_fn) = item {
let method_name = item_fn.name().to_string();
doc_links.push(DocLink {
name: method_name.clone(),
module_info: inherent_impl.impl_for_module.clone(),
html_filename: format!("{IDENTITY}method.{method_name}"),
preview_opt: None,
})
}
}
}
links.insert(BlockTitle::ImplMethods, doc_links);
}
if let Some(impl_traits) = &self.impl_traits {
let doc_links = impl_traits
.iter()
.map(|impl_trait| DocLink {
name: impl_trait.name_with_type_args(),
module_info: impl_trait.impl_for_module.clone(),
html_filename: format!("{}impl-{}", IDENTITY, impl_trait.name_with_type_args()),
preview_opt: None,
})
.collect();
links.insert(BlockTitle::ImplTraits, doc_links);
}
DocLinks {
style: DocStyle::Item {
title: None,
name: None,
},
links,
}
}
}
impl Renderable for ItemContext {
fn render(self, render_plan: RenderPlan) -> Result<Box<dyn RenderBox>> {
let context_opt = match self.context_opt {
Some(context) => {
let title = context.context_type.title();
let rendered_list = context.render(render_plan.clone())?;
let lct = title.html_title_string();
Some(
box_html! {
h2(id=&lct, class=format!("{} small-section-header", &lct)) {
: title.as_str();
a(class="anchor", href=format!("{IDENTITY}{lct}"));
}
: rendered_list;
}
.into_string()?,
)
}
None => None,
};
let impl_traits = match self.impl_traits {
Some(impl_traits) => {
let mut impl_trait_vec: Vec<_> = Vec::with_capacity(impl_traits.len());
for impl_trait in impl_traits {
impl_trait_vec.push(impl_trait.render(render_plan.clone())?);
}
impl_trait_vec
}
None => vec![],
};
let inherent_impls = match self.inherent_impls {
Some(inherent_impls) => {
let mut inherent_impl_vec: Vec<_> = Vec::with_capacity(inherent_impls.len());
for inherent_impl in inherent_impls {
inherent_impl_vec.push(inherent_impl.render(render_plan.clone())?);
}
inherent_impl_vec
}
None => vec![],
};
Ok(box_html! {
@ if let Some(context) = context_opt {
: Raw(context);
}
@ if !inherent_impls.is_empty() {
h2(id="methods", class="small-section-header") {
: "Implementations";
a(href=format!("{IDENTITY}methods"), class="anchor");
}
div(id="methods-list") {
@ for inherent_impl in inherent_impls {
: inherent_impl;
}
}
}
@ if !impl_traits.is_empty() {
h2(id="trait-implementations", class="small-section-header") {
: "Trait Implementations";
a(href=format!("{IDENTITY}trait-implementations"), class="anchor");
}
div(id="trait-implementations-list") {
@ for impl_trait in impl_traits {
: impl_trait;
}
}
}
})
}
}
impl Renderable for DocImplTrait {
fn render(self, render_plan: RenderPlan) -> Result<Box<dyn RenderBox>> {
let TyImplSelfOrTrait {
trait_name,
items,
implementing_for,
..
} = &self.impl_trait;
let short_name = self.short_name();
let name_with_type_args = self.name_with_type_args();
let type_args = self.type_args();
let is_inherent = self.is_inherent();
let impl_for_module = &self.impl_for_module;
let no_deps = render_plan.no_deps;
let is_external_item = if let Some(project_root) = trait_name.prefixes.first() {
project_root.as_str() != impl_for_module.project_name()
} else {
false
};
let trait_link = if let Some(module_prefixes) = &self.module_info_override {
ModuleInfo::from_vec_str(module_prefixes).file_path_from_location(
&format!("trait.{short_name}.html"),
impl_for_module,
is_external_item,
)?
} else {
ModuleInfo::from_call_path(trait_name).file_path_from_location(
&format!("trait.{short_name}.html"),
impl_for_module,
is_external_item,
)?
};
let mut rendered_items = Vec::with_capacity(items.len());
for item in items {
rendered_items.push(item.clone().render(render_plan.clone())?)
}
let impl_for = box_html! {
div(id=format!("impl-{}", name_with_type_args), class="impl has-srclink") {
a(href=format!("{IDENTITY}impl-{}", name_with_type_args), class="anchor");
h3(class="code-header in-band") {
: "impl ";
@ if !is_inherent {
@ if no_deps && is_external_item {
: name_with_type_args;
} else {
a(class="trait", href=format!("{trait_link}")) {
: short_name;
}
@ for arg in &type_args {
@ if arg == type_args.first().unwrap() {
: "<";
}
: arg;
@ if arg != type_args.last().unwrap() {
: ", ";
}
@ if arg == type_args.last().unwrap() {
: ">";
}
}
}
: " for ";
}
: implementing_for.span().as_str();
}
}
}
.into_string()?;
Ok(box_html! {
// check if the implementation has methods
@ if !rendered_items.is_empty() {
details(class="swaydoc-toggle implementors-toggle", open) {
summary {
: Raw(impl_for);
}
div(class="impl-items") {
@ for item in rendered_items {
: item;
}
}
}
} else {
: Raw(impl_for);
}
})
}
}
impl Renderable for TyTraitItem {
fn render(self, render_plan: RenderPlan) -> Result<Box<dyn RenderBox>> {
match self {
TyTraitItem::Fn(decl_ref) => {
let decl = render_plan.engines.de().get_function(decl_ref.id());
<TyFunctionDecl as Clone>::clone(&decl).render(render_plan)
}
TyTraitItem::Constant(ref decl_ref) => {
let decl = render_plan.engines.de().get_constant(decl_ref.id());
<TyConstantDecl as Clone>::clone(&decl).render(render_plan)
}
TyTraitItem::Type(ref decl_ref) => {
let decl = render_plan.engines.de().get_type(decl_ref.id());
<TyTraitType as Clone>::clone(&decl).render(render_plan)
}
}
}
}
impl Renderable for TyFunctionDecl {
fn render(self, _render_plan: RenderPlan) -> Result<Box<dyn RenderBox>> {
let attributes = self.attributes.to_html_string();
let mut fn_sig = format!("fn {}(", self.name.as_str());
for param in self.parameters.iter() {
let mut param_str = String::new();
if param.is_reference {
write!(param_str, "ref ")?;
}
if param.is_mutable {
write!(param_str, "mut ")?;
}
if param.is_self() {
write!(param_str, "self,")?;
} else {
write!(
fn_sig,
"{} {},",
param.name.as_str(),
param.type_argument.span().as_str()
)?;
}
}
write!(fn_sig, ") -> {}", self.return_type.span().as_str())?;
let multiline = fn_sig.chars().count() >= 60;
let method_id = format!("method.{}", self.name.as_str());
let impl_list = box_html! {
div(id=format!("{method_id}"), class="method trait-impl") {
a(href=format!("{IDENTITY}{method_id}"), class="anchor");
h4(class="code-header") {
@ if self.visibility.is_public() {
: "pub ";
}
: "fn ";
a(class="fnname", href=format!("{IDENTITY}{method_id}")) {
: self.name.as_str();
}
: "(";
@ if multiline {
@ for param in self.parameters.iter() {
br;
: " ";
@ if param.is_reference {
: "ref";
}
@ if param.is_mutable {
: "mut ";
}
@ if param.is_self() {
: "self,"
} else {
: param.name.as_str();
: ": ";
: param.type_argument.span().as_str();
: ","
}
}
br;
: ")";
} else {
@ for param in self.parameters.iter() {
@ if param.is_reference {
: "ref";
}
@ if param.is_mutable {
: "mut ";
}
@ if param.is_self() {
: "self"
} else {
: param.name.as_str();
: ": ";
: param.type_argument.span().as_str();
}
@ if param.name.as_str()
!= self.parameters.last()
.expect("no last element in trait method parameters list")
.name.as_str() {
: ", ";
}
}
: ")";
}
@ if self.span.as_str().contains("->") {
: " -> ";
: self.return_type.span().as_str();
}
}
}
}
.into_string()?;
Ok(box_html! {
@ if !attributes.is_empty() {
details(class="swaydoc-toggle method-toggle", open) {
summary {
: Raw(impl_list);
}
div(class="docblock") {
: Raw(attributes);
}
}
} else {
: Raw(impl_list);
}
})
}
}
impl Renderable for TyTraitType {
fn render(self, _render_plan: RenderPlan) -> Result<Box<dyn RenderBox>> {
let attributes = self.attributes.to_html_string();
let trait_type_id = format!("traittype.{}", self.name.as_str());
let contents = box_html! {
div(id=format!("{trait_type_id}"), class="type trait-impl") {
a(href=format!("{IDENTITY}{trait_type_id}"), class="anchor");
h4(class="code-header") {
: self.span.as_str();
}
}
}
.into_string()?;
Ok(box_html! {
@ if !attributes.is_empty() {
details(class="swaydoc-toggle method-toggle", open) {
summary {
: Raw(contents);
}
div(class="docblock") {
: Raw(attributes);
}
}
} else {
: Raw(contents);
}
})
}
}
impl Renderable for TyConstantDecl {
fn render(self, _render_plan: RenderPlan) -> Result<Box<dyn RenderBox>> {
let attributes = self.attributes.to_html_string();
let const_id = format!("const.{}", self.call_path.suffix.as_str());
let contents = box_html! {
div(id=format!("{const_id}"), class="const trait-impl") {
a(href=format!("{IDENTITY}{const_id}"), class="anchor");
h4(class="code-header") {
: self.span.as_str();
}
}
}
.into_string()?;
Ok(box_html! {
@ if !attributes.is_empty() {
details(class="swaydoc-toggle method-toggle", open) {
summary {
: Raw(contents);
}
div(class="docblock") {
: Raw(attributes);
}
}
} else {
: Raw(contents);
}
})
}
}
#[derive(Clone, Debug)]
/// Represents the type of [Context] for item declarations that have
/// fields, variants or methods, and acts as a wrapper for those values for rendering.
pub enum ContextType {
/// Stores the fields on a struct to be rendered.
StructFields(Vec<TyStructField>),
/// Stores the fields in storage to be rendered.
StorageFields(Vec<TyStorageField>),
/// Stores the variants of an enum to be rendered.
EnumVariants(Vec<TyEnumVariant>),
/// Stores the methods of a trait or abi to be rendered.
RequiredMethods(Vec<TyTraitFn>),
}
impl DocBlock for ContextType {
fn title(&self) -> BlockTitle {
match self {
ContextType::StructFields(_) | ContextType::StorageFields(_) => BlockTitle::Fields,
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | true |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-doc/src/doc/descriptor.rs | forc-plugins/forc-doc/src/doc/descriptor.rs | //! Determine whether a [Declaration] is documentable.
use crate::{
doc::{module::ModuleInfo, Document},
render::{
item::{
components::*,
context::{Context, ContextType, ItemContext},
documentable_type::DocumentableType,
},
util::format::docstring::DocStrings,
},
};
use anyhow::Result;
use sway_core::{
decl_engine::*,
language::ty::{self, TyTraitFn, TyTraitInterfaceItem},
Engines, TypeInfo,
};
use sway_features::ExperimentalFeatures;
use sway_types::{integer_bits::IntegerBits, Ident};
use swayfmt::parse;
trait RequiredMethods {
fn to_methods(&self, decl_engine: &DeclEngine) -> Vec<TyTraitFn>;
}
impl RequiredMethods for Vec<DeclRefTraitFn> {
fn to_methods(&self, decl_engine: &DeclEngine) -> Vec<TyTraitFn> {
self.iter()
.map(|decl_ref| decl_engine.get_trait_fn(decl_ref).as_ref().clone())
.collect()
}
}
/// Used in deciding whether or not a [Declaration] is documentable.
#[allow(clippy::large_enum_variant)]
pub(crate) enum Descriptor {
Documentable(Document),
NonDocumentable,
}
impl Descriptor {
/// Decides whether a [ty::TyDecl] is [Descriptor::Documentable] and returns a [Document] if so.
pub(crate) fn from_typed_decl(
decl_engine: &DeclEngine,
ty_decl: &ty::TyDecl,
module_info: ModuleInfo,
document_private_items: bool,
experimental: ExperimentalFeatures,
) -> Result<Self> {
const CONTRACT_STORAGE: &str = "Contract Storage";
match ty_decl {
ty::TyDecl::StructDecl(ty::StructDecl { decl_id, .. }) => {
let struct_decl = decl_engine.get_struct(decl_id);
if !document_private_items && struct_decl.visibility.is_private() {
Ok(Descriptor::NonDocumentable)
} else {
let item_name = struct_decl.call_path.suffix.clone();
let attrs_opt = (!struct_decl.attributes.is_empty())
.then(|| struct_decl.attributes.to_html_string());
let context = (!struct_decl.fields.is_empty()).then_some(Context::new(
module_info.clone(),
ContextType::StructFields(struct_decl.fields.clone()),
));
Ok(Descriptor::Documentable(Document {
module_info: module_info.clone(),
item_header: ItemHeader {
module_info: module_info.clone(),
friendly_name: ty_decl.friendly_type_name(),
item_name: item_name.clone(),
},
item_body: ItemBody {
module_info,
ty: DocumentableType::Declared(ty_decl.clone()),
item_name,
code_str: parse::parse_format::<sway_ast::ItemStruct>(
struct_decl.span.as_str(),
experimental,
)?,
attrs_opt: attrs_opt.clone(),
item_context: ItemContext {
context_opt: context,
..Default::default()
},
},
raw_attributes: attrs_opt,
}))
}
}
ty::TyDecl::EnumDecl(ty::EnumDecl { decl_id, .. }) => {
let enum_decl = decl_engine.get_enum(decl_id);
if !document_private_items && enum_decl.visibility.is_private() {
Ok(Descriptor::NonDocumentable)
} else {
let item_name = enum_decl.call_path.suffix.clone();
let attrs_opt = (!enum_decl.attributes.is_empty())
.then(|| enum_decl.attributes.to_html_string());
let context = (!enum_decl.variants.is_empty()).then_some(Context::new(
module_info.clone(),
ContextType::EnumVariants(enum_decl.variants.clone()),
));
Ok(Descriptor::Documentable(Document {
module_info: module_info.clone(),
item_header: ItemHeader {
module_info: module_info.clone(),
friendly_name: ty_decl.friendly_type_name(),
item_name: item_name.clone(),
},
item_body: ItemBody {
module_info,
ty: DocumentableType::Declared(ty_decl.clone()),
item_name,
code_str: parse::parse_format::<sway_ast::ItemEnum>(
enum_decl.span.as_str(),
experimental,
)?,
attrs_opt: attrs_opt.clone(),
item_context: ItemContext {
context_opt: context,
..Default::default()
},
},
raw_attributes: attrs_opt,
}))
}
}
ty::TyDecl::TraitDecl(ty::TraitDecl { decl_id, .. }) => {
let trait_decl = (*decl_engine.get_trait(decl_id)).clone();
if !document_private_items && trait_decl.visibility.is_private() {
Ok(Descriptor::NonDocumentable)
} else {
let item_name = trait_decl.name;
let attrs_opt = (!trait_decl.attributes.is_empty())
.then(|| trait_decl.attributes.to_html_string());
let context =
(!trait_decl.interface_surface.is_empty()).then_some(Context::new(
module_info.clone(),
ContextType::RequiredMethods(
trait_decl
.interface_surface
.into_iter()
.filter_map(|item| match item {
TyTraitInterfaceItem::TraitFn(fn_decl) => Some(fn_decl),
_ => None,
})
.collect::<Vec<_>>()
.to_methods(decl_engine),
),
));
Ok(Descriptor::Documentable(Document {
module_info: module_info.clone(),
item_header: ItemHeader {
module_info: module_info.clone(),
friendly_name: ty_decl.friendly_type_name(),
item_name: item_name.clone(),
},
item_body: ItemBody {
module_info,
ty: DocumentableType::Declared(ty_decl.clone()),
item_name,
code_str: parse::parse_format::<sway_ast::ItemTrait>(
trait_decl.span.as_str(),
experimental,
)?,
attrs_opt: attrs_opt.clone(),
item_context: ItemContext {
context_opt: context,
..Default::default()
},
},
raw_attributes: attrs_opt,
}))
}
}
ty::TyDecl::AbiDecl(ty::AbiDecl { decl_id, .. }) => {
let abi_decl = (*decl_engine.get_abi(decl_id)).clone();
let item_name = abi_decl.name;
let attrs_opt =
(!abi_decl.attributes.is_empty()).then(|| abi_decl.attributes.to_html_string());
let context = (!abi_decl.interface_surface.is_empty()).then_some(Context::new(
module_info.clone(),
ContextType::RequiredMethods(
abi_decl
.interface_surface
.into_iter()
.flat_map(|item| match item {
TyTraitInterfaceItem::TraitFn(fn_decl) => Some(fn_decl),
_ => None,
})
.collect::<Vec<_>>()
.to_methods(decl_engine),
),
));
Ok(Descriptor::Documentable(Document {
module_info: module_info.clone(),
item_header: ItemHeader {
module_info: module_info.clone(),
friendly_name: ty_decl.friendly_type_name(),
item_name: item_name.clone(),
},
item_body: ItemBody {
module_info,
ty: DocumentableType::Declared(ty_decl.clone()),
item_name,
code_str: parse::parse_format::<sway_ast::ItemAbi>(
abi_decl.span.as_str(),
experimental,
)?,
attrs_opt: attrs_opt.clone(),
item_context: ItemContext {
context_opt: context,
..Default::default()
},
},
raw_attributes: attrs_opt,
}))
}
ty::TyDecl::StorageDecl(ty::StorageDecl { decl_id, .. }) => {
let storage_decl = decl_engine.get_storage(decl_id);
let item_name = sway_types::BaseIdent::new_no_trim(
sway_types::span::Span::from_string(CONTRACT_STORAGE.to_string()),
);
let attrs_opt = (!storage_decl.attributes.is_empty())
.then(|| storage_decl.attributes.to_html_string());
let context = (!storage_decl.fields.is_empty()).then_some(Context::new(
module_info.clone(),
ContextType::StorageFields(storage_decl.fields.clone()),
));
Ok(Descriptor::Documentable(Document {
module_info: module_info.clone(),
item_header: ItemHeader {
module_info: module_info.clone(),
friendly_name: ty_decl.friendly_type_name(),
item_name: item_name.clone(),
},
item_body: ItemBody {
module_info,
ty: DocumentableType::Declared(ty_decl.clone()),
item_name,
code_str: parse::parse_format::<sway_ast::ItemStorage>(
storage_decl.span.as_str(),
experimental,
)?,
attrs_opt: attrs_opt.clone(),
item_context: ItemContext {
context_opt: context,
..Default::default()
},
},
raw_attributes: attrs_opt,
}))
}
ty::TyDecl::FunctionDecl(ty::FunctionDecl { decl_id, .. }) => {
let fn_decl = decl_engine.get_function(decl_id);
if !document_private_items && fn_decl.visibility.is_private() {
Ok(Descriptor::NonDocumentable)
} else {
let item_name = fn_decl.name.clone();
let attrs_opt = (!fn_decl.attributes.is_empty())
.then(|| fn_decl.attributes.to_html_string());
Ok(Descriptor::Documentable(Document {
module_info: module_info.clone(),
item_header: ItemHeader {
module_info: module_info.clone(),
friendly_name: ty_decl.friendly_type_name(),
item_name: item_name.clone(),
},
item_body: ItemBody {
module_info,
ty: DocumentableType::Declared(ty_decl.clone()),
item_name,
code_str: trim_fn_body(parse::parse_format::<sway_ast::ItemFn>(
fn_decl.span.as_str(),
experimental,
)?),
attrs_opt: attrs_opt.clone(),
item_context: ItemContext {
context_opt: None,
..Default::default()
},
},
raw_attributes: attrs_opt,
}))
}
}
ty::TyDecl::ConstantDecl(ty::ConstantDecl { decl_id, .. }) => {
let const_decl = decl_engine.get_constant(decl_id);
if !document_private_items && const_decl.visibility.is_private() {
Ok(Descriptor::NonDocumentable)
} else {
let item_name = const_decl.call_path.suffix.clone();
let attrs_opt = (!const_decl.attributes.is_empty())
.then(|| const_decl.attributes.to_html_string());
Ok(Descriptor::Documentable(Document {
module_info: module_info.clone(),
item_header: ItemHeader {
module_info: module_info.clone(),
friendly_name: ty_decl.friendly_type_name(),
item_name: item_name.clone(),
},
item_body: ItemBody {
module_info,
ty: DocumentableType::Declared(ty_decl.clone()),
item_name,
code_str: parse::parse_format::<sway_ast::ItemConst>(
const_decl.span.as_str(),
experimental,
)?,
attrs_opt: attrs_opt.clone(),
item_context: Default::default(),
},
raw_attributes: attrs_opt,
}))
}
}
_ => Ok(Descriptor::NonDocumentable),
}
}
/// Decides whether a [TypeInfo] is [Descriptor::Documentable] and returns a [Document] if so.
pub(crate) fn from_type_info(
type_info: &TypeInfo,
engines: &Engines,
module_info: ModuleInfo,
) -> Result<Self> {
// Only primitive types will result in a documentable item. All other type documentation should come
// from the a declaration. Since primitive types do not have sway declarations, we can only generate
// documentation from their implementations.
let item_name = Ident::new_no_span(format!("{}", engines.help_out(type_info)));
// Build a fake module info for the primitive type.
let module_info = ModuleInfo {
module_prefixes: vec![module_info.project_name().into()],
attributes: None,
};
// TODO: Find a way to add descriptions without hardcoding them.
let description = match type_info {
TypeInfo::StringSlice => "string slice",
TypeInfo::StringArray(_) => "fixed-length string",
TypeInfo::Boolean => "Boolean true or false",
TypeInfo::B256 => "256 bits (32 bytes), i.e. a hash",
TypeInfo::UnsignedInteger(bits) => match bits {
IntegerBits::Eight => "8-bit unsigned integer",
IntegerBits::Sixteen => "16-bit unsigned integer",
IntegerBits::ThirtyTwo => "32-bit unsigned integer",
IntegerBits::SixtyFour => "64-bit unsigned integer",
IntegerBits::V256 => "256-bit unsigned integer",
},
_ => return Ok(Descriptor::NonDocumentable),
};
let attrs_opt = Some(description.to_string());
match type_info {
TypeInfo::StringSlice
| TypeInfo::StringArray(_)
| TypeInfo::Boolean
| TypeInfo::B256
| TypeInfo::UnsignedInteger(_) => Ok(Descriptor::Documentable(Document {
module_info: module_info.clone(),
item_header: ItemHeader {
module_info: module_info.clone(),
friendly_name: "primitive",
item_name: item_name.clone(),
},
item_body: ItemBody {
module_info,
ty: DocumentableType::Primitive(type_info.clone()),
item_name: item_name.clone(),
code_str: item_name.to_string(),
attrs_opt: attrs_opt.clone(),
item_context: Default::default(),
},
raw_attributes: attrs_opt,
})),
_ => Ok(Descriptor::NonDocumentable),
}
}
}
/// Takes a formatted function signature & body and returns only the signature.
fn trim_fn_body(f: String) -> String {
match f.find('{') {
Some(index) => f.split_at(index).0.to_string(),
None => f,
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-doc/src/doc/module.rs | forc-plugins/forc-doc/src/doc/module.rs | //! Handles the gathering of module information used in navigation and documentation of modules.
use crate::render::{util::format::docstring::create_preview, INDEX_FILENAME};
use anyhow::Result;
use horrorshow::{box_html, Template};
use std::{fmt::Write, path::PathBuf};
use sway_core::language::CallPath;
pub(crate) type ModulePrefixes = Vec<String>;
/// Information about a Sway module.
#[derive(Debug, Clone, Ord, PartialOrd, Eq, PartialEq)]
pub struct ModuleInfo {
/// The preceding module names, used in navigating between modules.
pub module_prefixes: ModulePrefixes,
/// Doc attributes of a module.
/// Renders into the module level docstrings.
///
/// ```sway
/// //! Module level docstring
/// library;
/// ```
pub(crate) attributes: Option<String>,
}
impl ModuleInfo {
/// The current module.
///
/// Panics if there are no modules.
pub(crate) fn location(&self) -> &str {
self.module_prefixes
.last()
.expect("Expected Some module location, found None")
}
/// The name of the project.
///
/// Panics if the project root is missing.
pub(crate) fn project_name(&self) -> &str {
self.module_prefixes
.first()
.expect("Expected root module, project root missing")
}
/// The location of the parent of the current module.
///
/// Returns `None` if there is no parent.
pub(crate) fn parent(&self) -> Option<&String> {
if self.has_parent() {
let mut iter = self.module_prefixes.iter();
iter.next_back();
iter.next_back()
} else {
None
}
}
/// Determines if the current module has a parent module.
fn has_parent(&self) -> bool {
self.depth() > 1
}
pub(crate) fn is_root_module(&self) -> bool {
self.location() == self.project_name()
}
/// Create a qualified path literal String that represents the full path to an item.
///
/// Example: `project_name::module::Item`
pub(crate) fn to_path_literal_string(&self, item_name: &str, location: &str) -> String {
let prefix = self.to_path_literal_prefix(location);
match prefix.is_empty() {
true => item_name.to_owned(),
false => format!("{prefix}::{item_name}"),
}
}
/// Create a path literal prefix from the module prefixes.
/// Use in `to_path_literal_string()` to create a full literal path string.
///
/// Example: `module::submodule`
fn to_path_literal_prefix(&self, location: &str) -> String {
let mut iter = self.module_prefixes.iter();
for prefix in iter.by_ref() {
if prefix == location {
break;
}
}
iter.map(String::as_str).collect::<Vec<&str>>().join("::")
}
/// Renders the [ModuleInfo] into a [CallPath] with anchors. We return this as a `Result<Vec<String>>`
/// since the `box_html!` macro returns a closure and no two closures are considered the same type.
pub(crate) fn get_anchors(&self) -> Result<Vec<String>> {
let mut count = self.depth();
let mut rendered_module_anchors = Vec::with_capacity(self.depth());
for prefix in &self.module_prefixes {
let mut href = (1..count).map(|_| "../").collect::<String>();
href.push_str(INDEX_FILENAME);
rendered_module_anchors.push(
box_html! {
a(class="mod", href=href) {
: prefix;
}
span: "::";
}
.into_string()?,
);
count -= 1;
}
Ok(rendered_module_anchors)
}
/// Creates a String version of the path to an item,
/// used in navigation between pages. The location given is the break point.
///
/// This is only used for full path syntax, e.g `module/submodule/file_name.html`.
pub(crate) fn file_path_at_location(&self, file_name: &str, location: &str) -> Result<String> {
let mut iter = self.module_prefixes.iter();
for prefix in iter.by_ref() {
if prefix == location {
break;
}
}
let mut file_path = iter.collect::<PathBuf>();
file_path.push(file_name);
file_path
.to_str()
.map(|file_path_str| file_path_str.to_string())
.ok_or_else(|| anyhow::anyhow!("There will always be at least the item name"))
}
/// Compares the current `module_info` to the next `module_info` to determine how many directories to go back to make
/// the next file path valid, and returns that path as a `String`.
///
/// Example:
/// ```
/// // number of dirs: [match][ 2 ][ 1 ]
/// let current_location = "project_root/module/submodule1/submodule2/struct.Name.html";
/// let next_location = "module/other_submodule/enum.Name.html";
/// let result = "../../other_submodule/enum.Name.html";
/// ```
/// In this case the first module to match is "module", so we have no need to go back further than that.
pub(crate) fn file_path_from_location(
&self,
file_name: &str,
current_module_info: &ModuleInfo,
is_external_item: bool,
) -> Result<String> {
if is_external_item {
let mut new_path = (0..current_module_info.module_prefixes.len())
.map(|_| "../")
.collect::<String>();
write!(new_path, "{}/{}", self.module_prefixes.join("/"), file_name)?;
Ok(new_path)
} else {
let mut mid = 0; // the index to split the module_info from call_path at
let mut offset = 0; // the number of directories to go back
let mut next_location_iter = self.module_prefixes.iter().rev().enumerate().peekable();
while let Some((index, prefix)) = next_location_iter.peek() {
for (count, module) in current_module_info.module_prefixes.iter().rev().enumerate()
{
if module == *prefix {
offset = count;
mid = self.module_prefixes.len() - index;
break;
}
}
next_location_iter.next();
}
let mut new_path = (0..offset).map(|_| "../").collect::<String>();
write!(
new_path,
"{}/{}",
self.module_prefixes.split_at(mid).1.join("/"),
file_name
)?;
Ok(new_path)
}
}
/// Returns the relative path to the root of the project.
///
/// Example:
/// ```
/// let current_location = "project_root/module/submodule1/submodule2/struct.Name.html";
/// let result = "../..";
/// ```
/// In this case the first module to match is "module", so we have no need to go back further than that.
pub(crate) fn path_to_root(&self) -> String {
(0..self.module_prefixes.len())
.map(|_| "..")
.collect::<Vec<_>>()
.join("/")
}
/// Create a path `&str` for navigation from the `module.depth()` & `file_name`.
///
/// This is only used for shorthand path syntax, e.g `../../file_name.html`.
pub(crate) fn to_html_shorthand_path_string(&self, file_name: &str) -> String {
format!("{}{}", self.to_html_path_prefix(), file_name)
}
/// Create a path prefix `&str` for navigation from the `module.depth()`.
fn to_html_path_prefix(&self) -> String {
(0..self.depth()).map(|_| "../").collect::<String>()
}
/// The depth of a module as `usize`.
pub(crate) fn depth(&self) -> usize {
self.module_prefixes.len()
}
/// Create a new [ModuleInfo] from a `TyModule`.
pub(crate) fn from_ty_module(module_prefixes: Vec<String>, attributes: Option<String>) -> Self {
Self {
module_prefixes,
attributes,
}
}
/// Create a new [ModuleInfo] from a `CallPath`.
pub(crate) fn from_call_path(call_path: &CallPath) -> Self {
let module_prefixes = call_path
.prefixes
.iter()
.map(|p| p.as_str().to_string())
.collect::<Vec<String>>();
Self {
module_prefixes,
attributes: None,
}
}
/// Create a new [ModuleInfo] from a `&[String]`.
pub(crate) fn from_vec_str(module_prefixes: &[String]) -> Self {
Self {
module_prefixes: module_prefixes.to_owned(),
attributes: None,
}
}
pub(crate) fn preview_opt(&self) -> Option<String> {
create_preview(self.attributes.clone())
}
}
#[cfg(test)]
mod tests {
use super::ModuleInfo;
#[test]
fn test_parent() {
let project = String::from("project_name");
let module = String::from("module_name");
let mut module_vec = vec![project.clone(), module];
let module_info = ModuleInfo::from_ty_module(module_vec.clone(), None);
let project_opt = module_info.parent();
assert_eq!(Some(&project), project_opt);
module_vec.pop();
let module_info = ModuleInfo::from_ty_module(module_vec, None);
let project_opt = module_info.parent();
assert_eq!(None, project_opt);
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-doc/src/doc/mod.rs | forc-plugins/forc-doc/src/doc/mod.rs | //! Handles conversion of compiled typed Sway programs into [Document]s that can be rendered into HTML.
mod descriptor;
pub mod module;
use crate::{
doc::{descriptor::Descriptor, module::ModuleInfo},
render::{
item::{components::*, context::DocImplTrait, documentable_type::DocumentableType},
link::DocLink,
util::{
format::docstring::{create_preview, DocStrings},
strip_generic_suffix,
},
},
};
use anyhow::Result;
use rayon::prelude::*;
use std::{
collections::HashMap,
ops::{Deref, DerefMut},
option::Option,
};
use sway_core::{
decl_engine::DeclEngine,
language::ty::{TyAstNodeContent, TyDecl, TyImplSelfOrTrait, TyModule, TyProgram, TySubmodule},
Engines,
};
use sway_features::ExperimentalFeatures;
use sway_types::{BaseIdent, Spanned};
#[derive(Default, Clone)]
pub struct Documentation(pub Vec<Document>);
impl Documentation {
/// Gather [Documentation] from the [TyProgram].
pub fn from_ty_program(
engines: &Engines,
project_name: &str,
typed_program: &TyProgram,
document_private_items: bool,
experimental: ExperimentalFeatures,
) -> Result<Documentation> {
// the first module prefix will always be the project name
let mut docs = Documentation::default();
let mut impl_traits: Vec<(TyImplSelfOrTrait, ModuleInfo)> = Vec::new();
let module_info = ModuleInfo::from_ty_module(vec![project_name.to_owned()], None);
Documentation::from_ty_module(
engines.de(),
&module_info,
&typed_program.root_module,
&mut docs,
&mut impl_traits,
document_private_items,
experimental,
)?;
// this is the same process as before but for submodules
for (_, ref typed_submodule) in &typed_program.root_module.submodules {
let attributes = (!typed_submodule.module.attributes.is_empty())
.then(|| typed_submodule.module.attributes.to_html_string());
let module_prefix =
ModuleInfo::from_ty_module(vec![project_name.to_owned()], attributes);
Documentation::from_ty_submodule(
engines.de(),
typed_submodule,
&mut docs,
&mut impl_traits,
&module_prefix,
document_private_items,
experimental,
)?;
}
let trait_decls = docs
.iter()
.filter_map(|d| {
(d.item_header.friendly_name == "trait").then_some((
d.item_header.item_name.clone(),
d.item_header.module_info.clone(),
))
})
.collect::<HashMap<BaseIdent, ModuleInfo>>();
// Add one documentation page for each primitive type that has an implementation.
let primitive_docs: Vec<_> = impl_traits
.par_iter()
.filter_map(|(impl_trait, module_info)| {
let impl_for_type = engines.te().get(impl_trait.implementing_for.type_id);
if let Ok(Descriptor::Documentable(doc)) =
Descriptor::from_type_info(impl_for_type.as_ref(), engines, module_info.clone())
{
Some(doc)
} else {
None
}
})
.collect();
// Add unique primitive docs
for doc in primitive_docs {
if !docs.contains(&doc) {
docs.push(doc);
}
}
// match for the spans to add the impl_traits to their corresponding doc:
// currently this compares the spans as str, but this needs to change
// to compare the actual types
for doc in docs.iter_mut() {
let mut impl_trait_vec: Vec<DocImplTrait> = Vec::new();
let mut inherent_impl_vec: Vec<DocImplTrait> = Vec::new();
// Check for implementations of the current struct/enum/primitive.
match doc.item_body.ty {
DocumentableType::Declared(TyDecl::StructDecl(_))
| DocumentableType::Declared(TyDecl::EnumDecl(_))
| DocumentableType::Primitive(_) => {
let item_name = &doc.item_header.item_name;
for (impl_trait, _) in impl_traits.iter_mut() {
// Check if this implementation is for this struct/enum.
if item_name.as_str()
== strip_generic_suffix(impl_trait.implementing_for.span().as_str())
{
let module_info_override = if let Some(decl_module_info) =
trait_decls.get(&impl_trait.trait_name.suffix)
{
Some(decl_module_info.module_prefixes.clone())
} else {
impl_trait.trait_name = impl_trait
.trait_name
.to_canonical_path(engines, &typed_program.namespace);
None
};
let doc_impl_trait = DocImplTrait {
impl_for_module: doc.module_info.clone(),
impl_trait: impl_trait.clone(),
module_info_override,
};
if doc_impl_trait.is_inherent() {
inherent_impl_vec.push(doc_impl_trait);
} else {
impl_trait_vec.push(doc_impl_trait);
}
}
}
}
_ => {}
}
if !impl_trait_vec.is_empty() {
doc.item_body.item_context.impl_traits = Some(impl_trait_vec);
}
if !inherent_impl_vec.is_empty() {
doc.item_body.item_context.inherent_impls = Some(inherent_impl_vec);
}
}
Ok(docs)
}
fn from_ty_module(
decl_engine: &DeclEngine,
module_info: &ModuleInfo,
ty_module: &TyModule,
docs: &mut Documentation,
impl_traits: &mut Vec<(TyImplSelfOrTrait, ModuleInfo)>,
document_private_items: bool,
experimental: ExperimentalFeatures,
) -> Result<()> {
let results: Result<Vec<_>, anyhow::Error> = ty_module
.all_nodes
.par_iter()
.filter_map(|ast_node| {
if let TyAstNodeContent::Declaration(ref decl) = ast_node.content {
Some(decl)
} else {
None
}
})
.map(|decl| {
if let TyDecl::ImplSelfOrTrait(impl_trait) = decl {
let impl_data = (
(*decl_engine.get_impl_self_or_trait(&impl_trait.decl_id)).clone(),
module_info.clone(),
);
Ok((Some(impl_data), None))
} else {
let desc = Descriptor::from_typed_decl(
decl_engine,
decl,
module_info.clone(),
document_private_items,
experimental,
)?;
let doc = match desc {
Descriptor::Documentable(doc) => Some(doc),
Descriptor::NonDocumentable => None,
};
Ok((None, doc))
}
})
.collect();
for (impl_trait_opt, doc_opt) in results? {
if let Some(impl_trait) = impl_trait_opt {
impl_traits.push(impl_trait);
}
if let Some(doc) = doc_opt {
docs.push(doc);
}
}
Ok(())
}
fn from_ty_submodule(
decl_engine: &DeclEngine,
typed_submodule: &TySubmodule,
docs: &mut Documentation,
impl_traits: &mut Vec<(TyImplSelfOrTrait, ModuleInfo)>,
module_info: &ModuleInfo,
document_private_items: bool,
experimental: ExperimentalFeatures,
) -> Result<()> {
let mut module_info = module_info.to_owned();
module_info
.module_prefixes
.push(typed_submodule.mod_name_span.as_str().to_owned());
Documentation::from_ty_module(
decl_engine,
&module_info.clone(),
&typed_submodule.module,
docs,
impl_traits,
document_private_items,
experimental,
)?;
for (_, submodule) in &typed_submodule.module.submodules {
Documentation::from_ty_submodule(
decl_engine,
submodule,
docs,
impl_traits,
&module_info,
document_private_items,
experimental,
)?;
}
Ok(())
}
}
/// A finalized Document ready to be rendered. We want to retain all
/// information including spans, fields on structs, variants on enums etc.
#[derive(Clone, Debug)]
pub struct Document {
pub module_info: ModuleInfo,
pub item_header: ItemHeader,
pub item_body: ItemBody,
pub raw_attributes: Option<String>,
}
impl Document {
/// Creates an HTML file name from the [Document].
pub fn html_filename(&self) -> String {
use sway_core::language::ty::TyDecl::StorageDecl;
let name = match &self.item_body.ty {
&DocumentableType::Declared(StorageDecl { .. }) => None,
_ => Some(self.item_header.item_name.as_str()),
};
Document::create_html_filename(self.item_body.ty.doc_name(), name)
}
fn create_html_filename(ty: &str, name: Option<&str>) -> String {
match name {
Some(name) => format!("{ty}.{name}.html"),
None => {
format!("{ty}.html") // storage does not have an Ident
}
}
}
/// Generate link info used in navigation between docs.
pub fn link(&self) -> DocLink {
DocLink {
name: self.item_header.item_name.as_str().to_owned(),
module_info: self.module_info.clone(),
html_filename: self.html_filename(),
preview_opt: self.preview_opt(),
}
}
pub fn preview_opt(&self) -> Option<String> {
create_preview(self.raw_attributes.clone())
}
}
impl PartialEq for Document {
fn eq(&self, other: &Self) -> bool {
self.item_header.item_name == other.item_header.item_name
&& self.item_header.module_info.module_prefixes
== other.item_header.module_info.module_prefixes
}
}
impl Deref for Documentation {
type Target = Vec<Document>;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl DerefMut for Documentation {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-doc/tests/lib.rs | forc-plugins/forc-doc/tests/lib.rs | use dir_indexer::get_relative_file_paths_set;
use expect_test::{expect, Expect};
use forc_doc::{self, generate_docs, Command, DocResult};
use std::{
collections::HashSet,
path::{Path, PathBuf},
};
/// The path to the generated HTML of the type the traits are implemented on.
const IMPL_FOR: &str = "bar/struct.Bar.html";
const DATA_DIR: &str = "tests/fixtures";
const JS_SEARCH_FILE_PATH: &str = "search.js";
#[test]
fn builds_lib_std_docs() {
let path = Path::new("./../../sway-lib-std");
let build_instructions = Command {
path: Some(path.to_str().unwrap().to_string()),
..Default::default()
};
println!("Building docs for {:?}", build_instructions.path);
let res = generate_docs(&build_instructions);
assert!(res.is_ok());
}
#[test]
fn test_impl_traits_default() {
let doc_dir_name: &str = "impl_traits_default";
let project_name = "impl_traits";
let command = Command {
path: Some(format!("{DATA_DIR}/{project_name}")),
doc_path: Some(doc_dir_name.into()),
..Default::default()
};
let (doc_path, _doc_result) = generate_docs(&command).unwrap();
assert_index_html(
&doc_path,
project_name,
&expect![[r##"
<!DOCTYPE html><html><head><meta charset="utf-8"><meta name="viewport" content="width=device-width, initial-scale=1.0"><meta name="generator" content="swaydoc"><meta name="description" content="API documentation for the Sway `Bar` struct in `bar`."><meta name="keywords" content="sway, swaylang, sway-lang, Bar"><link rel="icon" href="../../static.files/sway-logo.svg"><title>Bar in bar - Sway</title><link rel="stylesheet" type="text/css" href="../../static.files/normalize.css"><link rel="stylesheet" type="text/css" href="../../static.files/swaydoc.css" id="mainThemeStyle"><link rel="stylesheet" type="text/css" href="../../static.files/ayu.css"><link rel="stylesheet" href="../../static.files/ayu.min.css"></head><body class="swaydoc struct"><nav class="sidebar"><a class="sidebar-logo" href="../../impl_traits/index.html"><div class="logo-container"><img class="sway-logo" src="../../static.files/sway-logo.svg" alt="logo"></div></a><h2 class="location">Struct Bar</h2><div class="sidebar-elems"><section><h3><a href="#methods">Methods</a></h3><ul class="block method"><li><a href="#method.foo_bar">foo_bar</a></li></ul></section><section><h3><a href="#trait-implementations">Trait Implementations</a></h3><ul class="block method"><li><a href="#impl-Foo">Foo</a></li><li><a href="#impl-Baz">Baz</a></li><li><a href="#impl-Add">Add</a></li><li><a href="#impl-Subtract">Subtract</a></li></ul></section></div></nav><main><div class="width-limiter"><script src="../../search.js" type="text/javascript"></script><script>function onSearchFormSubmit(event){event.preventDefault();const searchQuery=document.getElementById("search-input").value;const url=new URL(window.location.href);if(searchQuery){url.searchParams.set('search',searchQuery);}else{url.searchParams.delete('search');}history.pushState({search:searchQuery},"",url);window.dispatchEvent(new HashChangeEvent("hashchange"));}document.addEventListener('DOMContentLoaded',()=>{const searchbar=document.getElementById("search-input");const searchForm=document.getElementById("search-form");searchbar.addEventListener("keyup",function(event){onSearchFormSubmit(event);});searchbar.addEventListener("search",function(event){onSearchFormSubmit(event);});function onQueryParamsChange(){const searchParams=new URLSearchParams(window.location.search);const query=searchParams.get("search");const searchSection=document.getElementById('search');const mainSection=document.getElementById('main-content');const searchInput=document.getElementById('search-input');if(query){searchInput.value=query;const results=Object.values(SEARCH_INDEX).flat().filter(item=>{const lowerQuery=query.toLowerCase();return item.name.toLowerCase().includes(lowerQuery);});const header=`<h1>Results for ${query}</h1>`;if(results.length>0){const resultList=results.map(item=>{const formattedName=`<span class="type ${item.type_name}">${item.name}</span>`;const name=item.type_name==="module"?[...item.module_info.slice(0,-1),formattedName].join("::"):[...item.module_info,formattedName].join("::");const path=["../..",...item.module_info,item.html_filename].join("/");const left=`<td><span>${name}</span></td>`;const right=`<td><p>${item.preview}</p></td>`;return`<tr onclick="window.location='${path}';">${left}${right}</tr>`;}).join('');searchSection.innerHTML=`${header}<table>${resultList}</table>`;}else{searchSection.innerHTML=`${header}<p>No results found.</p>`;}searchSection.setAttribute("class","search-results");mainSection.setAttribute("class","content hidden");}else{searchSection.setAttribute("class","search-results hidden");mainSection.setAttribute("class","content");}}window.addEventListener('hashchange',onQueryParamsChange);onQueryParamsChange();});</script><nav class="sub"><form id="search-form" class="search-form" onsubmit="onSearchFormSubmit(event)"><div class="search-container"><input id="search-input" class="search-input" name="search" autocomplete="off" spellcheck="false" placeholder="Search the docs..." type="search"></div></form></nav><section id="main-content" class="content"><div class="main-heading"><h1 class="fqn"><span class="in-band">Struct <a class="mod" href="../index.html">impl_traits</a><span>::</span><a class="mod" href="index.html">bar</a><span>::</span><a class="struct" href="#">Bar</a></span></h1></div><div class="docblock item-decl"><pre class="sway struct"><code>pub struct Bar {}</code></pre></div><h2 id="methods" class="small-section-header">Implementations<a href="#methods" class="anchor"></a></h2><div id="methods-list"><details class="swaydoc-toggle implementors-toggle" open><summary><div id="impl-Bar" class="impl has-srclink"><a href="#impl-Bar" class="anchor"></a><h3 class="code-header in-band">impl Bar</h3></div></summary><div class="impl-items"><div id="method.foo_bar" class="method trait-impl"><a href="#method.foo_bar" class="anchor"></a><h4 class="code-header">fn <a class="fnname" href="#method.foo_bar">foo_bar</a>()</h4></div></div></details></div><h2 id="trait-implementations" class="small-section-header">Trait Implementations<a href="#trait-implementations" class="anchor"></a></h2><div id="trait-implementations-list"><details class="swaydoc-toggle implementors-toggle" open><summary><div id="impl-Foo" class="impl has-srclink"><a href="#impl-Foo" class="anchor"></a><h3 class="code-header in-band">impl <a class="trait" href="../foo/trait.Foo.html">Foo</a> for Bar</h3></div></summary><div class="impl-items"><details class="swaydoc-toggle method-toggle" open><summary><div id="method.foo" class="method trait-impl"><a href="#method.foo" class="anchor"></a><h4 class="code-header">pub fn <a class="fnname" href="#method.foo">foo</a>()</h4></div></summary><div class="docblock"><p>something more about foo();</p>
</div></details></div></details><div id="impl-Baz" class="impl has-srclink"><a href="#impl-Baz" class="anchor"></a><h3 class="code-header in-band">impl <a class="trait" href="../foo/trait.Baz.html">Baz</a> for Bar</h3></div><details class="swaydoc-toggle implementors-toggle" open><summary><div id="impl-Add" class="impl has-srclink"><a href="#impl-Add" class="anchor"></a><h3 class="code-header in-band">impl <a class="trait" href="..//trait.Add.html">Add</a> for Bar</h3></div></summary><div class="impl-items"><div id="method.add" class="method trait-impl"><a href="#method.add" class="anchor"></a><h4 class="code-header">pub fn <a class="fnname" href="#method.add">add</a>(self, other: Self) -> Self</h4></div></div></details><details class="swaydoc-toggle implementors-toggle" open><summary><div id="impl-Subtract" class="impl has-srclink"><a href="#impl-Subtract" class="anchor"></a><h3 class="code-header in-band">impl <a class="trait" href="../../ops/trait.Subtract.html">Subtract</a> for Bar</h3></div></summary><div class="impl-items"><div id="method.subtract" class="method trait-impl"><a href="#method.subtract" class="anchor"></a><h4 class="code-header">pub fn <a class="fnname" href="#method.subtract">subtract</a>(self, other: Self) -> Self</h4></div></div></details></div></section><section id="search" class="search-results"></section></div></main><script src="../../static.files/highlight.js"></script><script>hljs.highlightAll();</script></body></html>"##]],
);
assert_search_js(
&doc_path,
&expect![[
r#"var SEARCH_INDEX={"impl_traits":[{"html_filename":"trait.Foo.html","module_info":["impl_traits","foo"],"name":"Foo","preview":"","type_name":"trait"},{"html_filename":"trait.Baz.html","module_info":["impl_traits","foo"],"name":"Baz","preview":"","type_name":"trait"},{"html_filename":"struct.Bar.html","module_info":["impl_traits","bar"],"name":"Bar","preview":"","type_name":"struct"},{"html_filename":"index.html","module_info":["impl_traits","bar"],"name":"bar","preview":"","type_name":"module"},{"html_filename":"index.html","module_info":["impl_traits","foo"],"name":"foo","preview":"","type_name":"module"}],"ops":[{"html_filename":"trait.Add.html","module_info":["ops"],"name":"Add","preview":"","type_name":"trait"},{"html_filename":"trait.Subtract.html","module_info":["ops"],"name":"Subtract","preview":"","type_name":"trait"},{"html_filename":"index.html","module_info":["ops"],"name":"ops","preview":"","type_name":"module"}]};
"object"==typeof exports&&"undefined"!=typeof module&&(module.exports=SEARCH_INDEX);"#
]],
);
assert_file_tree(
doc_dir_name,
project_name,
vec![
"impl_traits/foo/trait.Foo.html",
"impl_traits/foo/index.html",
"impl_traits/all.html",
"ops/trait.Subtract.html",
"ops/all.html",
"impl_traits/bar/struct.Bar.html",
"impl_traits/bar/index.html",
"ops/trait.Add.html",
"search.js",
"impl_traits/index.html",
"ops/index.html",
"impl_traits/foo/trait.Baz.html",
],
);
}
#[test]
fn test_workspace_docs() {
let doc_dir_name: &str = "workspace_docs";
let workspace_name = "sample_workspace";
let command = Command {
path: Some(format!("{DATA_DIR}/{workspace_name}")),
doc_path: Some(doc_dir_name.into()),
..Default::default()
};
let (doc_path, doc_result) = generate_docs(&command).unwrap();
// Verify that we got a workspace result
match &doc_result {
DocResult::Workspace { name, libraries } => {
assert_eq!(name, workspace_name);
assert_eq!(
libraries.len(),
2,
"Expected 2 libraries, found {}: {:?}",
libraries.len(),
libraries
);
assert!(libraries.iter().any(|lib| lib.name == "lib_a"));
assert!(libraries.iter().any(|lib| lib.name == "lib_b"));
}
DocResult::Package(_) => panic!("Expected workspace result, got package"),
}
// Check that workspace index.html was created
let workspace_index_path = doc_path.join("index.html");
assert!(
workspace_index_path.exists(),
"Workspace index.html should exist"
);
// Check that library-specific docs were created
let lib_a_index = doc_path.join("lib_a").join("index.html");
let lib_b_index = doc_path.join("lib_b").join("index.html");
assert!(lib_a_index.exists(), "lib_a index.html should exist");
assert!(lib_b_index.exists(), "lib_b index.html should exist");
// Check that search.js was created
let search_js = doc_path.join("search.js");
assert!(search_js.exists(), "search.js should exist");
// Read and verify the workspace index contains library links
let workspace_content = std::fs::read_to_string(&workspace_index_path).unwrap();
assert!(
workspace_content.contains("lib_a/index.html"),
"Workspace index should link to lib_a"
);
assert!(
workspace_content.contains("lib_b/index.html"),
"Workspace index should link to lib_b"
);
assert!(
workspace_content.contains("This workspace contains the following libraries"),
"Should contain workspace description"
);
}
#[test]
fn test_impl_traits_no_deps() {
let doc_dir_name: &str = "impl_traits_no_deps";
let project_name: &str = "impl_traits_generic";
let command = Command {
path: Some(format!("{DATA_DIR}/{project_name}")),
doc_path: Some(doc_dir_name.into()),
no_deps: true,
..Default::default()
};
let (doc_path, _doc_result) = generate_docs(&command).unwrap();
assert_index_html(
&doc_path,
project_name,
&expect![[r##"
<!DOCTYPE html><html><head><meta charset="utf-8"><meta name="viewport" content="width=device-width, initial-scale=1.0"><meta name="generator" content="swaydoc"><meta name="description" content="API documentation for the Sway `Bar` struct in `bar`."><meta name="keywords" content="sway, swaylang, sway-lang, Bar"><link rel="icon" href="../../static.files/sway-logo.svg"><title>Bar in bar - Sway</title><link rel="stylesheet" type="text/css" href="../../static.files/normalize.css"><link rel="stylesheet" type="text/css" href="../../static.files/swaydoc.css" id="mainThemeStyle"><link rel="stylesheet" type="text/css" href="../../static.files/ayu.css"><link rel="stylesheet" href="../../static.files/ayu.min.css"></head><body class="swaydoc struct"><nav class="sidebar"><a class="sidebar-logo" href="../../impl_traits_generic/index.html"><div class="logo-container"><img class="sway-logo" src="../../static.files/sway-logo.svg" alt="logo"></div></a><h2 class="location">Struct Bar</h2><div class="sidebar-elems"><section><h3><a href="#trait-implementations">Trait Implementations</a></h3><ul class="block method"><li><a href="#impl-Foo">Foo</a></li><li><a href="#impl-Baz">Baz</a></li><li><a href="#impl-Bar">Bar</a></li><li><a href="#impl-Add">Add</a></li><li><a href="#impl-Subtract">Subtract</a></li></ul></section></div></nav><main><div class="width-limiter"><script src="../../search.js" type="text/javascript"></script><script>function onSearchFormSubmit(event){event.preventDefault();const searchQuery=document.getElementById("search-input").value;const url=new URL(window.location.href);if(searchQuery){url.searchParams.set('search',searchQuery);}else{url.searchParams.delete('search');}history.pushState({search:searchQuery},"",url);window.dispatchEvent(new HashChangeEvent("hashchange"));}document.addEventListener('DOMContentLoaded',()=>{const searchbar=document.getElementById("search-input");const searchForm=document.getElementById("search-form");searchbar.addEventListener("keyup",function(event){onSearchFormSubmit(event);});searchbar.addEventListener("search",function(event){onSearchFormSubmit(event);});function onQueryParamsChange(){const searchParams=new URLSearchParams(window.location.search);const query=searchParams.get("search");const searchSection=document.getElementById('search');const mainSection=document.getElementById('main-content');const searchInput=document.getElementById('search-input');if(query){searchInput.value=query;const results=Object.values(SEARCH_INDEX).flat().filter(item=>{const lowerQuery=query.toLowerCase();return item.name.toLowerCase().includes(lowerQuery);});const header=`<h1>Results for ${query}</h1>`;if(results.length>0){const resultList=results.map(item=>{const formattedName=`<span class="type ${item.type_name}">${item.name}</span>`;const name=item.type_name==="module"?[...item.module_info.slice(0,-1),formattedName].join("::"):[...item.module_info,formattedName].join("::");const path=["../..",...item.module_info,item.html_filename].join("/");const left=`<td><span>${name}</span></td>`;const right=`<td><p>${item.preview}</p></td>`;return`<tr onclick="window.location='${path}';">${left}${right}</tr>`;}).join('');searchSection.innerHTML=`${header}<table>${resultList}</table>`;}else{searchSection.innerHTML=`${header}<p>No results found.</p>`;}searchSection.setAttribute("class","search-results");mainSection.setAttribute("class","content hidden");}else{searchSection.setAttribute("class","search-results hidden");mainSection.setAttribute("class","content");}}window.addEventListener('hashchange',onQueryParamsChange);onQueryParamsChange();});</script><nav class="sub"><form id="search-form" class="search-form" onsubmit="onSearchFormSubmit(event)"><div class="search-container"><input id="search-input" class="search-input" name="search" autocomplete="off" spellcheck="false" placeholder="Search the docs..." type="search"></div></form></nav><section id="main-content" class="content"><div class="main-heading"><h1 class="fqn"><span class="in-band">Struct <a class="mod" href="../index.html">impl_traits_generic</a><span>::</span><a class="mod" href="index.html">bar</a><span>::</span><a class="struct" href="#">Bar</a></span></h1></div><div class="docblock item-decl"><pre class="sway struct"><code>pub struct Bar<T> {}</code></pre></div><h2 id="trait-implementations" class="small-section-header">Trait Implementations<a href="#trait-implementations" class="anchor"></a></h2><div id="trait-implementations-list"><details class="swaydoc-toggle implementors-toggle" open><summary><div id="impl-Foo" class="impl has-srclink"><a href="#impl-Foo" class="anchor"></a><h3 class="code-header in-band">impl <a class="trait" href="../foo/trait.Foo.html">Foo</a> for Bar<T></h3></div></summary><div class="impl-items"><details class="swaydoc-toggle method-toggle" open><summary><div id="method.foo" class="method trait-impl"><a href="#method.foo" class="anchor"></a><h4 class="code-header">pub fn <a class="fnname" href="#method.foo">foo</a>()</h4></div></summary><div class="docblock"><p>something more about foo();</p>
</div></details></div></details><div id="impl-Baz" class="impl has-srclink"><a href="#impl-Baz" class="anchor"></a><h3 class="code-header in-band">impl <a class="trait" href="../foo/trait.Baz.html">Baz</a> for Bar<T></h3></div><details class="swaydoc-toggle implementors-toggle" open><summary><div id="impl-Bar" class="impl has-srclink"><a href="#impl-Bar" class="anchor"></a><h3 class="code-header in-band">impl <a class="trait" href="../bar/trait.Bar.html">Bar</a> for Bar<T></h3></div></summary><div class="impl-items"><div id="method.foo_bar" class="method trait-impl"><a href="#method.foo_bar" class="anchor"></a><h4 class="code-header">fn <a class="fnname" href="#method.foo_bar">foo_bar</a>()</h4></div></div></details><details class="swaydoc-toggle implementors-toggle" open><summary><div id="impl-Add" class="impl has-srclink"><a href="#impl-Add" class="anchor"></a><h3 class="code-header in-band">impl <a class="trait" href="..//trait.Add.html">Add</a> for Bar<T></h3></div></summary><div class="impl-items"><div id="method.add" class="method trait-impl"><a href="#method.add" class="anchor"></a><h4 class="code-header">pub fn <a class="fnname" href="#method.add">add</a>(self, other: Self) -> Self</h4></div></div></details><details class="swaydoc-toggle implementors-toggle" open><summary><div id="impl-Subtract" class="impl has-srclink"><a href="#impl-Subtract" class="anchor"></a><h3 class="code-header in-band">impl Subtract for Bar<T></h3></div></summary><div class="impl-items"><div id="method.subtract" class="method trait-impl"><a href="#method.subtract" class="anchor"></a><h4 class="code-header">pub fn <a class="fnname" href="#method.subtract">subtract</a>(self, other: Self) -> Self</h4></div></div></details></div></section><section id="search" class="search-results"></section></div></main><script src="../../static.files/highlight.js"></script><script>hljs.highlightAll();</script></body></html>"##]],
);
assert_search_js(
&doc_path,
&expect![[
r#"var SEARCH_INDEX={"impl_traits_generic":[{"html_filename":"trait.Foo.html","module_info":["impl_traits_generic","foo"],"name":"Foo","preview":"","type_name":"trait"},{"html_filename":"trait.Baz.html","module_info":["impl_traits_generic","foo"],"name":"Baz","preview":"","type_name":"trait"},{"html_filename":"struct.Bar.html","module_info":["impl_traits_generic","bar"],"name":"Bar","preview":"","type_name":"struct"},{"html_filename":"index.html","module_info":["impl_traits_generic","bar"],"name":"bar","preview":"","type_name":"module"},{"html_filename":"index.html","module_info":["impl_traits_generic","foo"],"name":"foo","preview":"","type_name":"module"}]};
"object"==typeof exports&&"undefined"!=typeof module&&(module.exports=SEARCH_INDEX);"#
]],
);
assert_file_tree(
doc_dir_name,
project_name,
vec![
"impl_traits_generic/index.html",
"impl_traits_generic/all.html",
"impl_traits_generic/foo/trait.Foo.html",
"impl_traits_generic/bar/index.html",
"impl_traits_generic/foo/index.html",
"impl_traits_generic/foo/trait.Baz.html",
"search.js",
"impl_traits_generic/bar/struct.Bar.html",
],
);
}
fn assert_index_html(doc_path: &Path, project_name: &str, expect: &Expect) {
let path_to_file = PathBuf::from(format!("{project_name}/{IMPL_FOR}"));
check_file(doc_path, &path_to_file, expect);
}
fn assert_search_js(doc_path: &Path, expect: &Expect) {
let path_to_file = PathBuf::from(JS_SEARCH_FILE_PATH);
check_file(doc_path, &path_to_file, expect);
}
fn check_file(doc_path: &Path, path_to_file: &PathBuf, expect: &Expect) {
let path = doc_path.join(path_to_file);
let actual = std::fs::read_to_string(path.clone())
.unwrap_or_else(|_| panic!("failed to read file: {path:?}"));
expect.assert_eq(&actual)
}
fn assert_file_tree(doc_dir_name: &str, project_name: &str, expected_files: Vec<&str>) {
let doc_root: PathBuf = format!("{DATA_DIR}/{project_name}/out/{doc_dir_name}").into();
let expected = expected_files
.iter()
.map(PathBuf::from)
.collect::<HashSet<PathBuf>>();
let files = get_relative_file_paths_set(doc_root.clone());
if files != expected {
let diffs = files.symmetric_difference(&expected);
assert_eq!(
files, expected,
"Symmetric Difference: {diffs:?} at {doc_root:?}"
);
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-doc/benches/bench_main.rs | forc-plugins/forc-doc/benches/bench_main.rs | use codspeed_criterion_compat::{criterion_group, criterion_main, Criterion};
use forc_doc::{compile, compile_html, Command, DocContext};
use std::path::Path;
fn benchmarks(c: &mut Criterion) {
let path = Path::new("./../../sway-lib-std");
let opts = Command {
path: Some(path.to_str().unwrap().to_string()),
..Default::default()
};
let ctx = DocContext::from_options(&opts).unwrap();
let compile_results = compile(&ctx, &opts).unwrap().collect::<Vec<_>>();
c.bench_function("build_std_lib_docs", |b| {
b.iter(|| {
let mut results = compile_results.clone();
let _ = compile_html(&opts, &ctx, &mut results);
});
});
}
criterion_group! {
name = benches;
config = Criterion::default().measurement_time(std::time::Duration::from_secs(10));
targets = benchmarks
}
criterion_main!(benches);
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-fmt/src/main.rs | forc-plugins/forc-fmt/src/main.rs | //! A `forc` plugin for running the Sway code formatter.
use anyhow::{bail, Result};
use clap::Parser;
use forc_pkg::{
manifest::{GenericManifestFile, ManifestFile},
WorkspaceManifestFile,
};
use forc_tracing::{init_tracing_subscriber, println_error, println_green, println_red};
use forc_util::fs_locking::is_file_dirty;
use prettydiff::{basic::DiffOp, diff_lines};
use std::{
collections::HashMap,
default::Default,
fs,
path::{Path, PathBuf},
};
use sway_features::ExperimentalFeatures;
use sway_utils::{constants, find_parent_manifest_dir, get_sway_files, is_sway_file};
use swayfmt::Formatter;
use taplo::formatter as taplo_fmt;
use tracing::{debug, info};
forc_util::cli_examples! {
crate::App {
[ Run the formatter in check mode on the current directory => "forc fmt --check"]
[ Run the formatter in check mode on the current directory with short format => "forc fmt -c"]
[ Run formatter against a given file => "forc fmt --file {path}/src/main.sw"]
[ Run formatter against a given file with short format => "forc fmt -f {path}/src/main.sw"]
[ Run formatter against a given dir => "forc fmt --path {path}"]
[ Run formatter against a given dir with short format => "forc fmt -p {path}"]
}
}
#[derive(Debug, Parser)]
#[clap(
name = "forc-fmt",
about = "Forc plugin for running the Sway code formatter",
after_help = help(),
version
)]
pub struct App {
/// Run in 'check' mode.
///
/// - Exits with `0` if input is formatted correctly.
/// - Exits with `1` and prints a diff if formatting is required.
#[clap(short, long)]
pub check: bool,
/// Path to the project.
///
/// If not specified, current working directory will be used.
#[clap(short, long)]
pub path: Option<String>,
#[clap(short, long)]
/// Formats a single .sw file with the default settings.
/// If not specified, current working directory will be formatted using a Forc.toml
/// configuration.
pub file: Option<String>,
#[command(flatten)]
experimental: sway_features::CliFields,
}
fn main() {
init_tracing_subscriber(Default::default());
if let Err(err) = run() {
println_error("Formatting skipped due to error.");
println_error(&format!("{err}"));
std::process::exit(1);
}
}
fn run() -> Result<()> {
let app = App::parse();
let dir = match app.path.as_ref() {
Some(path) => PathBuf::from(path),
None => std::env::current_dir()?,
};
let experimental = ExperimentalFeatures::new(
&HashMap::default(),
&app.experimental.experimental,
&app.experimental.no_experimental,
)
.map_err(|err| anyhow::anyhow!("{err}"))?;
let mut formatter = Formatter::from_dir(&dir, experimental)?;
if let Some(f) = app.file.as_ref() {
let file_path = &PathBuf::from(f);
if is_sway_file(file_path) {
format_file(&app, file_path.to_path_buf(), &mut formatter)?;
return Ok(());
}
bail!(
"Provided file '{}' is not a valid Sway file",
file_path.display()
);
};
let manifest_file = forc_pkg::manifest::ManifestFile::from_dir(&dir)?;
match manifest_file {
ManifestFile::Workspace(ws) => {
format_workspace_at_dir(&app, &ws, &dir, experimental)?;
}
ManifestFile::Package(_) => {
format_pkg_at_dir(&app, &dir, &mut formatter)?;
}
}
Ok(())
}
/// Recursively get a Vec<PathBuf> of subdirectories that contains a Forc.toml.
fn get_sway_dirs(workspace_dir: PathBuf) -> Vec<PathBuf> {
let mut dirs_to_format = vec![];
let mut dirs_to_search = vec![workspace_dir];
while let Some(next_dir) = dirs_to_search.pop() {
if let Ok(read_dir) = fs::read_dir(next_dir) {
for entry in read_dir.filter_map(|res| res.ok()) {
let path = entry.path();
if path.is_dir() {
dirs_to_search.push(path.clone());
if path.join(constants::MANIFEST_FILE_NAME).exists() {
dirs_to_format.push(path);
}
}
}
}
}
dirs_to_format
}
/// Format a file, given its path.
/// Returns:
/// - Ok(true) if executed successfully and formatted,
/// - Ok(false) if executed successfully and not formatted,
/// - Err if it fails to execute at all.
fn format_file(app: &App, file: PathBuf, formatter: &mut Formatter) -> Result<bool> {
let file = file.canonicalize()?;
if is_file_dirty(&file) {
bail!(
"The below file is open in an editor and contains unsaved changes.\n \
Please save it before formatting.\n \
{}",
file.display()
);
}
if let Ok(file_content) = fs::read_to_string(&file) {
let mut edited = false;
match Formatter::format(formatter, file_content.as_str().into()) {
Ok(formatted_content) => {
if app.check {
if *file_content != formatted_content {
info!("File was edited by formatter: \n{:?}\n", file);
display_file_diff(&file_content, &formatted_content)?;
edited = true;
}
} else {
write_file_formatted(&file, &formatted_content)?;
}
return Ok(edited);
}
Err(err) => {
// TODO: Support formatting for incomplete/invalid sway code.
// https://github.com/FuelLabs/sway/issues/5012
debug!("{}", err);
if let Some(file) = file.to_str() {
bail!("Failed to compile {}\n{}", file, err);
} else {
bail!("Failed to compile.\n{}", err);
}
}
}
}
bail!("Could not read file: {:?}", file)
}
/// Format the workspace at the given directory.
fn format_workspace_at_dir(
app: &App,
workspace: &WorkspaceManifestFile,
dir: &Path,
experimental: ExperimentalFeatures,
) -> Result<()> {
let mut contains_edits = false;
let mut formatter = Formatter::from_dir(dir, experimental)?;
let mut members = vec![];
for member_path in workspace.member_paths()? {
members.push(member_path)
}
// Format files at the root - we do not want to start calling format_pkg_at_dir() here,
// since this would mean we format twice on each subdirectory.
if let Ok(read_dir) = fs::read_dir(dir) {
for entry in read_dir.filter_map(|res| res.ok()) {
let path = entry.path();
if is_sway_file(&path) {
format_file(app, path, &mut formatter)?;
}
}
}
// Format subdirectories. We do not call format on members directly here, since
// in workspaces, it is perfectly valid to have subdirectories containing Sway files,
// yet not be a member of the workspace.
for sub_dir in get_sway_dirs(dir.to_path_buf()) {
if sub_dir.join(constants::MANIFEST_FILE_NAME).exists() {
// Here, we cannot simply call Formatter::from_dir() and rely on defaults
// if there is no swayfmt.toml in the sub directory because we still want
// to use the swayfmt.toml at the workspace root (if any).
// In order of priority: member > workspace > default.
formatter = Formatter::from_dir(&sub_dir, experimental)?;
}
format_pkg_at_dir(app, &sub_dir, &mut formatter)?;
}
let manifest_file = dir.join(constants::MANIFEST_FILE_NAME);
// Finally, format the root manifest using taplo formatter
contains_edits |= format_manifest(app, manifest_file)?;
if app.check && contains_edits {
// One or more files are not formatted, exit with error
bail!("Files contain formatting violations.");
}
Ok(())
}
/// Format the given manifest at a path.
/// Returns:
/// - Ok(true) if executed successfully and formatted,
/// - Ok(false) if executed successfully and not formatted,
/// - Err if it fails to execute at all.
fn format_manifest(app: &App, manifest_file: PathBuf) -> Result<bool> {
if let Ok(manifest_content) = fs::read_to_string(&manifest_file) {
let mut edited = false;
// TODO: Alphabetize tables excluding the project table when https://github.com/tamasfe/taplo/issues/763 is supported
let formatted_content = taplo_fmt::format(&manifest_content, taplo_fmt::Options::default());
if !app.check {
write_file_formatted(&manifest_file, &formatted_content)?;
} else if formatted_content != manifest_content {
edited = true;
println_error(&format!(
"Improperly formatted manifest file: {}",
manifest_file.display()
));
display_file_diff(&manifest_content, &formatted_content)?;
} else {
info!(
"Manifest Forc.toml formatted correctly: {}",
manifest_file.display()
)
}
return Ok(edited);
};
bail!("failed to format manifest: {:?}", manifest_file)
}
/// Format the package at the given directory.
fn format_pkg_at_dir(app: &App, dir: &Path, formatter: &mut Formatter) -> Result<()> {
match find_parent_manifest_dir(dir) {
Some(path) => {
let manifest_path = path.clone();
let manifest_file = manifest_path.join(constants::MANIFEST_FILE_NAME);
let files = get_sway_files(path);
let mut contains_edits = false;
for file in files {
contains_edits |= format_file(app, file, formatter)?;
}
// format manifest using taplo formatter
contains_edits |= format_manifest(app, manifest_file)?;
if app.check && contains_edits {
// One or more files are not formatted, exit with error
bail!("Files contain formatting violations.");
}
Ok(())
}
_ => bail!("Manifest file does not exist"),
}
}
fn display_file_diff(file_content: &str, formatted_content: &str) -> Result<()> {
let changeset = diff_lines(file_content, formatted_content);
let mut count_of_updates = 0;
for diff in changeset.diff() {
// max 100 updates
if count_of_updates >= 100 {
break;
}
match diff {
DiffOp::Equal(old) => {
for o in old {
info!("{}", o)
}
}
DiffOp::Insert(new) => {
count_of_updates += 1;
for n in new {
println_green(&format!("+{n}"));
}
}
DiffOp::Remove(old) => {
count_of_updates += 1;
for o in old {
println_red(&format!("-{o}"));
}
}
DiffOp::Replace(old, new) => {
count_of_updates += 1;
for o in old {
println_red(&format!("-{o}"));
}
for n in new {
println_green(&format!("+{n}"));
}
}
}
}
Ok(())
}
fn write_file_formatted(file: &Path, formatted_content: &str) -> Result<()> {
fs::write(file, formatted_content)?;
Ok(())
}
#[cfg(test)]
mod tests {
use super::taplo_fmt;
use std::default::Default;
#[test]
fn test_forc_indentation() {
let correct_forc_manifest = r#"
[project]
authors = ["Fuel Labs <contact@fuel.sh>"]
license = "Apache-2.0"
name = "Fuel example project"
[dependencies]
std = { git = "https://github.com/FuelLabs/sway-lib-std", version = "v0.0.1" }
"#;
let taplo_alphabetize = taplo_fmt::Options {
reorder_keys: true,
..Default::default()
};
let formatted_content = taplo_fmt::format(correct_forc_manifest, taplo_alphabetize.clone());
assert_eq!(formatted_content, correct_forc_manifest);
let indented_forc_manifest = r#"
[project]
authors = ["Fuel Labs <contact@fuel.sh>"]
license = "Apache-2.0"
name = "Fuel example project"
[dependencies]
std = { git = "https://github.com/FuelLabs/sway-lib-std", version = "v0.0.1" }
"#;
let formatted_content =
taplo_fmt::format(indented_forc_manifest, taplo_alphabetize.clone());
assert_eq!(formatted_content, correct_forc_manifest);
let whitespace_forc_manifest = r#"
[project]
authors=["Fuel Labs <contact@fuel.sh>"]
license = "Apache-2.0"
name = "Fuel example project"
[dependencies]
std = { git = "https://github.com/FuelLabs/sway-lib-std" , version = "v0.0.1" }
"#;
let formatted_content = taplo_fmt::format(whitespace_forc_manifest, taplo_alphabetize);
assert_eq!(formatted_content, correct_forc_manifest);
}
#[test]
fn test_forc_alphabetization() {
let correct_forc_manifest = r#"
[project]
authors = ["Fuel Labs <contact@fuel.sh>"]
license = "Apache-2.0"
name = "Fuel example project"
[dependencies]
std = { git = "https://github.com/FuelLabs/sway-lib-std", version = "v0.0.1" }
"#;
let taplo_alphabetize = taplo_fmt::Options {
reorder_keys: true,
..Default::default()
};
let formatted_content = taplo_fmt::format(correct_forc_manifest, taplo_alphabetize.clone());
assert_eq!(formatted_content, correct_forc_manifest);
let disordered_forc_manifest = r#"
[project]
name = "Fuel example project"
license = "Apache-2.0"
authors = ["Fuel Labs <contact@fuel.sh>"]
[dependencies]
std = { git = "https://github.com/FuelLabs/sway-lib-std", version = "v0.0.1" }
"#;
let formatted_content = taplo_fmt::format(disordered_forc_manifest, taplo_alphabetize);
assert_eq!(formatted_content, correct_forc_manifest);
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-publish/src/lib.rs | forc-plugins/forc-publish/src/lib.rs | pub mod credentials;
pub mod error;
pub mod forc_pub_client;
mod md_pre_process;
pub mod tarball;
pub mod validate;
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-publish/src/forc_pub_client.rs | forc-plugins/forc-publish/src/forc_pub_client.rs | use crate::error::Error;
use crate::error::Result;
use reqwest::StatusCode;
use semver::Version;
use serde::{Deserialize, Serialize};
use std::fs;
use std::path::Path;
use url::Url;
use uuid::Uuid;
/// The publish request.
#[derive(Serialize, Debug)]
pub struct PublishRequest {
pub upload_id: Uuid,
}
/// The publish response.
#[derive(Serialize, Deserialize, Debug)]
pub struct PublishResponse {
pub name: String,
pub version: Version,
}
/// The response to an upload_project request.
#[derive(Deserialize, Debug)]
pub struct UploadResponse {
pub upload_id: Uuid,
}
pub struct ForcPubClient {
client: reqwest::Client,
uri: Url,
}
impl ForcPubClient {
pub fn new(uri: Url) -> Self {
let client = reqwest::Client::new();
Self { client, uri }
}
/// Uploads the given file to the server
pub async fn upload<P: AsRef<Path>>(&self, file_path: P, forc_version: &str) -> Result<Uuid> {
use futures_util::StreamExt;
use std::io::{stdout, Write};
let url = self
.uri
.join(&format!("upload_project?forc_version={forc_version}"))?;
let file_bytes = fs::read(file_path)?;
let response = self
.client
.post(url)
.header("Content-Type", "application/gzip")
.body(file_bytes)
.send()
.await;
if let Ok(response) = response {
let mut stream = response.bytes_stream();
// Process the SSE stream.
// The server sends events in the format: "data: <event>\n\n" or
// ": <event>\n\n" for keep-alive events.
// The first event is usually a progress event, and the last one contains the upload_id
// or an error message. If the stream is open for more than 60 seconds, it will be closed
// by the server, and we will return an HTTPError.
while let Some(chunk) = stream.next().await {
match chunk {
Ok(bytes) => {
let event_str = String::from_utf8_lossy(&bytes);
for event in event_str.split("\n\n") {
if let Some(stripped) = event.strip_prefix("data:") {
let data = &stripped.trim();
if let Ok(upload_response) =
serde_json::from_str::<UploadResponse>(data)
{
return Ok(upload_response.upload_id);
} else if data.starts_with("{") {
// Attempt to parse error from JSON
return Err(Error::ApiResponseError {
status: StatusCode::INTERNAL_SERVER_ERROR,
error: data.to_string(),
});
} else {
// Print the event data, replacing the previous message.
print!("\r\x1b[2K => {data}");
stdout().flush().unwrap();
}
}
// else if event.starts_with(":") {
// These are keep-alive events. Uncomment if you need to debug them.
// println!("Keep-alive event: {}", event);
// }
}
}
Err(e) => {
return Err(Error::HttpError(e));
}
}
}
Err(Error::ServerError)
} else {
eprintln!("Error during upload initiation: {response:?}");
Err(Error::ServerError)
}
}
/// Publishes the given upload_id to the registry
pub async fn publish(&self, upload_id: Uuid, auth_token: &str) -> Result<PublishResponse> {
let url = self.uri.join("publish")?;
let publish_request = PublishRequest { upload_id };
let response = self
.client
.post(url)
.header("Content-Type", "application/json")
.header("Authorization", format!("Bearer {auth_token}"))
.json(&publish_request)
.send()
.await?;
let status = response.status();
if status.is_success() {
let publish_response: PublishResponse = response.json().await?;
Ok(publish_response)
} else {
Err(Error::from_response(response).await)
}
}
}
#[cfg(test)]
mod test {
use super::*;
use reqwest::StatusCode;
use serde_json::json;
use std::fs;
use tempfile::NamedTempFile;
use uuid::Uuid;
use wiremock::matchers::{method, path, query_param};
use wiremock::{Mock, MockServer, ResponseTemplate};
async fn get_mock_client_server() -> (ForcPubClient, MockServer) {
let mock_server = MockServer::start().await;
let url = Url::parse(&mock_server.uri()).expect("url");
let mock_client = ForcPubClient::new(url);
(mock_client, mock_server)
}
#[tokio::test]
async fn test_upload_success() {
let (client, mock_server) = get_mock_client_server().await;
let upload_id = Uuid::new_v4();
// Simulate SSE response with a progress event and a final upload_id event
let sse_body = format!(
"data: uploading...\n\n\
data: {{\"upload_id\":\"{upload_id}\"}}\n\n"
);
Mock::given(method("POST"))
.and(path("/upload_project"))
.and(query_param("forc_version", "0.66.5"))
.respond_with(
ResponseTemplate::new(200)
.insert_header("Content-Type", "text/event-stream")
.set_body_string(sse_body),
)
.mount(&mock_server)
.await;
// Create a temporary gzip file
let temp_file = NamedTempFile::new().unwrap();
fs::write(temp_file.path(), b"test content").unwrap();
let result = client.upload(temp_file.path(), "0.66.5").await;
assert!(result.is_ok());
assert_eq!(result.unwrap(), upload_id);
}
#[tokio::test]
async fn test_upload_server_error() {
let (client, mock_server) = get_mock_client_server().await;
// Simulate SSE error event
let sse_body = "data: {\"error\":\"Internal Server Error\"}\n\n";
Mock::given(method("POST"))
.and(path("/upload_project"))
.respond_with(
ResponseTemplate::new(200)
.insert_header("Content-Type", "text/event-stream")
.set_body_string(sse_body),
)
.mount(&mock_server)
.await;
let temp_file = NamedTempFile::new().unwrap();
fs::write(temp_file.path(), b"test content").unwrap();
let result = client.upload(temp_file.path(), "0.66.5").await;
assert!(result.is_err());
match result {
Err(Error::ApiResponseError { status, error }) => {
assert_eq!(status, StatusCode::INTERNAL_SERVER_ERROR);
assert_eq!(error, "{\"error\":\"Internal Server Error\"}");
}
_ => panic!("Expected ApiResponseError"),
}
}
#[tokio::test]
async fn test_publish_success() {
let (client, mock_server) = get_mock_client_server().await;
let publish_response = json!({
"name": "test_project",
"version": "1.0.0"
});
Mock::given(method("POST"))
.and(path("/publish"))
.respond_with(ResponseTemplate::new(200).set_body_json(&publish_response))
.mount(&mock_server)
.await;
let upload_id = Uuid::new_v4();
let result = client.publish(upload_id, "valid_auth_token").await;
assert!(result.is_ok());
let response = result.unwrap();
assert_eq!(response.name, "test_project");
assert_eq!(response.version.to_string(), "1.0.0");
}
#[tokio::test]
async fn test_publish_unauthorized() {
let (client, mock_server) = get_mock_client_server().await;
Mock::given(method("POST"))
.and(path("/publish"))
.respond_with(ResponseTemplate::new(401).set_body_json(json!({
"error": "Unauthorized"
})))
.mount(&mock_server)
.await;
let upload_id = Uuid::new_v4();
let result = client.publish(upload_id, "invalid_token").await;
assert!(result.is_err());
match result {
Err(Error::ApiResponseError { status, error }) => {
assert_eq!(status, StatusCode::UNAUTHORIZED);
assert_eq!(error, "Unauthorized");
}
_ => panic!("Expected ApiResponseError"),
}
}
#[tokio::test]
async fn test_publish_server_error() {
let (client, mock_server) = get_mock_client_server().await;
Mock::given(method("POST"))
.and(path("/publish"))
.respond_with(ResponseTemplate::new(500).set_body_json(json!({
"error": "Internal Server Error"
})))
.mount(&mock_server)
.await;
let upload_id = Uuid::new_v4();
let result = client.publish(upload_id, "valid_token").await;
assert!(result.is_err());
match result {
Err(Error::ApiResponseError { status, error }) => {
assert_eq!(status, StatusCode::INTERNAL_SERVER_ERROR);
assert_eq!(error, "Internal Server Error");
}
_ => panic!("Expected ApiResponseError"),
}
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-publish/src/tarball.rs | forc-plugins/forc-publish/src/tarball.rs | use crate::error::Result;
use crate::md_pre_process::flatten_markdown;
use crate::validate::validate_dir;
use flate2::write::GzEncoder;
use flate2::Compression;
use forc_tracing::println_warning;
use std::fs::{self, File};
use std::path::{Path, PathBuf};
use tar::Builder;
use tempfile::{tempdir, TempDir};
use walkdir::WalkDir;
const TARBALL_FILE_NAME: &str = "sway-project.tgz";
/// Creates a .tgz tarball from the current directory in a temporary location.
/// Returns the path to the created tarball.
pub fn create_tarball_from_current_dir(temp_tarball_dir: &TempDir) -> Result<PathBuf> {
let current_dir = std::env::current_dir()?;
validate_dir(¤t_dir)?;
// Copy project to a temporary directory, excluding `/out/`
let temp_project_dir = tempdir()?;
copy_project_excluding_out(temp_project_dir.path())?;
// Process README.md if it exists
process_readme(temp_project_dir.path())?;
// Pack the temp directory into a tarball
let tarball_path = temp_tarball_dir.path().join(TARBALL_FILE_NAME);
let tar_gz = File::create(&tarball_path)?;
let enc = GzEncoder::new(tar_gz, Compression::default());
let mut tar = Builder::new(enc);
tar.append_dir_all(".", &temp_project_dir)?;
tar.finish()?;
// Return the tarball path
Ok(tarball_path)
}
// Process README.md by flattening includes, if it exists
fn process_readme(temp_project_dir: &Path) -> Result<()> {
let readme_path = temp_project_dir.join("README.md");
if readme_path.exists() {
match flatten_markdown(&readme_path) {
Ok(flattened_content) => {
fs::write(&readme_path, flattened_content)?;
}
Err(e) => {
// Log warning but don't fail the publish
println_warning(&format!("Failed to flatten README.md includes: {e}"));
}
}
}
Ok(())
}
/// Copies the current directory (excluding `/out/`) to a temporary directory.
fn copy_project_excluding_out(temp_project_dir: &Path) -> Result<()> {
let current_dir = std::env::current_dir()?;
for entry in WalkDir::new(¤t_dir)
.into_iter()
.filter_map(|e| e.ok())
{
let path = entry.path();
let relative_path = path.strip_prefix(¤t_dir)?;
// Skip the `/out` directory
if relative_path.starts_with("out") {
continue;
}
let new_path = temp_project_dir.join(relative_path);
if path.is_dir() {
fs::create_dir_all(&new_path)?;
} else {
fs::copy(path, &new_path)?;
}
}
Ok(())
}
#[cfg(test)]
mod test {
use super::*;
use crate::error::Error;
use flate2::read::GzDecoder;
use serial_test::serial;
use std::{env, fs};
use tar::Archive;
use tempfile::tempdir;
#[test]
#[serial]
fn test_create_tarball_success() {
let manifest_dir = PathBuf::from(env!("CARGO_MANIFEST_DIR"));
let tests_path = manifest_dir.join("tests").join("data");
let src_dir = tests_path.join("success_with_no_deps");
// Create another temporary directory for storing the tarball
let temp_output_dir = tempdir().unwrap();
// Run the function
env::set_current_dir(&src_dir).unwrap();
let result = create_tarball_from_current_dir(&temp_output_dir);
assert!(result.is_ok());
// Check that the tarball file was created
let tarball_path = result.unwrap();
assert!(tarball_path.exists());
// Verify that the tarball contains Forc.toml
let tar_file = fs::File::open(&tarball_path).unwrap();
let tar = GzDecoder::new(tar_file);
let mut archive = Archive::new(tar);
let mut contains_forc_toml = false;
for entry in archive.entries().unwrap() {
let entry = entry.unwrap();
let path = entry.path().unwrap().to_path_buf();
if path.ends_with("Forc.toml") {
contains_forc_toml = true;
break;
}
}
assert!(contains_forc_toml, "Tarball should contain Forc.toml");
}
#[test]
#[serial]
fn test_create_tarball_fails_without_forc_toml() {
// Create a temporary directory that DOES NOT contain Forc.toml
let temp_project_dir = tempdir().unwrap();
// Create another temporary directory for storing the tarball
let temp_output_dir = tempdir().unwrap();
// Run the function, expecting an error
env::set_current_dir(&temp_project_dir).unwrap();
let result = create_tarball_from_current_dir(&temp_output_dir);
assert!(matches!(result, Err(Error::ForcTomlNotFound)));
}
#[test]
#[serial]
fn test_create_tarball_excludes_out_dir() {
let temp_project_dir = tempdir().unwrap();
// Create necessary files
fs::write(
temp_project_dir.path().join("Forc.toml"),
"[project]\nname = \"test_project\"\nversion = \"0.0.0\"\nentry = \"main.sw\"\nlicense=\"Apache-2.0\"",
)
.unwrap();
fs::create_dir(temp_project_dir.path().join("src/")).unwrap();
fs::write(temp_project_dir.path().join("src/main.sw"), "fn main() {}").unwrap();
// Create an `out/debug/` directory with a dummy file
let out_dir = temp_project_dir.path().join("out/debug/");
fs::create_dir_all(&out_dir).unwrap();
fs::write(out_dir.join("compiled.bin"), "binary content").unwrap();
// Create temp dir for tarball storage
let temp_output_dir = tempdir().unwrap();
// Change working directory to our fake project
std::env::set_current_dir(temp_project_dir.path()).unwrap();
// Run the function
let result = create_tarball_from_current_dir(&temp_output_dir);
assert!(result.is_ok());
let tarball_path = result.unwrap();
assert!(tarball_path.exists());
// Verify that the tarball does NOT contain `out/`
let tar_file = fs::File::open(&tarball_path).unwrap();
let tar = GzDecoder::new(tar_file);
let mut archive = Archive::new(tar);
let mut contains_forc_toml = false;
let mut contains_main_sw = false;
let mut contains_out_dir = false;
for entry in archive.entries().unwrap() {
let entry = entry.unwrap();
let path = entry.path().unwrap().to_path_buf();
if path.starts_with("out") {
contains_out_dir = true;
} else if path.ends_with("Forc.toml") {
contains_forc_toml = true;
} else if path.ends_with("src/main.sw") {
contains_main_sw = true;
}
}
assert!(
!contains_out_dir,
"Tarball should not contain the `out/` directory"
);
assert!(contains_forc_toml, "Tarball should contain Forc.toml");
assert!(contains_main_sw, "Tarball should contain main.sw");
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-publish/src/error.rs | forc-plugins/forc-publish/src/error.rs | use reqwest::StatusCode;
use serde::Deserialize;
pub type Result<T> = std::result::Result<T, Error>;
#[derive(Debug, thiserror::Error)]
pub enum Error {
#[error("I/O error: {0}")]
IoError(#[from] std::io::Error),
#[error("Json error: {0}")]
JsonError(#[from] serde_json::Error),
#[error("HTTP error: {0}")]
HttpError(#[from] reqwest::Error),
#[error("TOML error: {0}")]
TomlError(#[from] toml::ser::Error),
#[error("URL error: {0}")]
UrlError(#[from] url::ParseError),
#[error("Failed to get relative path")]
RelativePathError(#[from] std::path::StripPrefixError),
#[error("{error}")]
ApiResponseError { status: StatusCode, error: String },
#[error("Forc.toml not found in the current directory")]
ForcTomlNotFound,
#[error("Invalid forc.toml: {0}")]
InvalidForcToml(#[from] anyhow::Error),
#[error("missing version field in [project] section of Forc.toml")]
MissingVersionField,
#[error("Workspace is not supported yet, deploy each member separately")]
WorkspaceNotSupported,
#[error("{0} is not a forc.pub dependency, depend on it using version.")]
DependencyMissingVersion(String),
#[error("Server error")]
ServerError,
#[error("Readme pre-process error: {0}")]
MDPreProcessError(#[from] crate::md_pre_process::error::MDPreProcessError),
}
#[derive(Deserialize)]
pub struct ApiErrorResponse {
error: String,
}
impl Error {
/// Converts a `reqwest::Response` into an `ApiError`
pub async fn from_response(response: reqwest::Response) -> Self {
let status = response.status();
match response.json::<ApiErrorResponse>().await {
Ok(parsed_error) => Error::ApiResponseError {
status,
error: parsed_error.error,
},
Err(err) => Error::ApiResponseError {
status,
error: format!("Unexpected API error: {err}"),
},
}
}
}
#[cfg(test)]
mod test {
use super::*;
use reqwest::StatusCode;
use serde_json::json;
use wiremock::matchers::{method, path};
use wiremock::{Mock, MockServer, ResponseTemplate};
#[tokio::test]
async fn test_error_from_response_with_valid_json() {
let mock_server = MockServer::start().await;
// Simulated JSON API error response
let error_json = json!({
"error": "Invalid request data"
});
Mock::given(method("POST"))
.and(path("/test"))
.respond_with(ResponseTemplate::new(400).set_body_json(&error_json))
.mount(&mock_server)
.await;
let client = reqwest::Client::new();
let response = client
.post(format!("{}/test", mock_server.uri()))
.send()
.await
.unwrap();
let error = Error::from_response(response).await;
match error {
Error::ApiResponseError { status, error } => {
assert_eq!(status, StatusCode::BAD_REQUEST);
assert_eq!(error, "Invalid request data");
}
_ => panic!("Expected ApiResponseError"),
}
}
#[tokio::test]
async fn test_error_from_response_with_invalid_json() {
let mock_server = MockServer::start().await;
// Simulated invalid JSON response (causing deserialization failure)
let invalid_json = "not a json object";
Mock::given(method("POST"))
.and(path("/test"))
.respond_with(ResponseTemplate::new(500).set_body_string(invalid_json))
.mount(&mock_server)
.await;
let client = reqwest::Client::new();
let response = client
.post(format!("{}/test", mock_server.uri()))
.send()
.await
.unwrap();
let error = Error::from_response(response).await;
match error {
Error::ApiResponseError { status, error } => {
assert_eq!(status, StatusCode::INTERNAL_SERVER_ERROR);
assert_eq!(error, "Unexpected API error: error decoding response body");
}
_ => panic!("Expected ApiResponseError"),
}
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-publish/src/credentials.rs | forc-plugins/forc-publish/src/credentials.rs | use crate::error::Result;
use forc_util::user_forc_directory;
use serde::{Deserialize, Serialize};
use std::fs;
use std::io::{self};
use std::path::PathBuf;
use toml;
const CREDENTIALS_FILE: &str = "credentials.toml";
#[derive(Serialize, Deserialize)]
struct Registry {
token: String,
}
#[derive(Serialize, Deserialize)]
struct Credentials {
registry: Registry,
}
/// Gets the user's auth token.
/// - First checks CLI arguments.
/// - Then checks `~/.forc/credentials.toml` inside the `[registry]` section.
/// - If neither are found, prompts the user and saves it to `credentials.toml`.
pub fn get_auth_token(
opt_token: Option<String>,
credentials_dir: Option<PathBuf>,
) -> Result<String> {
if let Some(token) = opt_token {
return Ok(token);
}
if let Ok(token) = std::env::var("FORC_PUB_TOKEN") {
return Ok(token);
}
let credentials_path = credentials_dir
.unwrap_or(user_forc_directory())
.join(CREDENTIALS_FILE);
if let Some(token) = get_auth_token_from_file(&credentials_path)? {
return Ok(token);
}
let auth_token =
get_auth_token_from_user_input(&credentials_path, io::stdin().lock(), io::stdout())?;
Ok(auth_token)
}
// Check if credentials file exists and read from it
fn get_auth_token_from_file(path: &PathBuf) -> Result<Option<String>> {
if path.exists() {
let content = fs::read_to_string(path)?;
if let Ok(credentials) = toml::from_str::<Credentials>(&content) {
return Ok(Some(credentials.registry.token));
}
}
Ok(None)
}
// Prompt user for input and save to credentials file
fn get_auth_token_from_user_input<R, W>(
credentials_path: &PathBuf,
mut reader: R,
mut writer: W,
) -> Result<String>
where
R: io::BufRead,
W: io::Write,
{
tracing::info!("Paste your auth token found on https://forc.pub/tokens below: ");
writer.flush()?;
let mut auth_token = String::new();
reader.read_line(&mut auth_token)?;
let auth_token = auth_token.trim().to_string();
// Save the token to the credentials file
if let Some(parent_path) = credentials_path.parent() {
fs::create_dir_all(parent_path)?;
let credentials = Credentials {
registry: Registry {
token: auth_token.clone(),
},
};
fs::write(credentials_path, toml::to_string(&credentials)?)?;
tracing::info!("Auth token saved to {}", credentials_path.display());
}
Ok(auth_token)
}
#[cfg(test)]
mod test {
use super::*;
use serial_test::serial;
use std::fs;
use tempfile::tempdir;
#[test]
#[serial]
fn test_get_auth_token_from_cli_arg() {
let token = Some("cli_token".to_string());
let result = get_auth_token(token, None).unwrap();
assert_eq!(result, "cli_token");
}
#[test]
#[serial]
fn test_get_auth_token_from_env() {
std::env::set_var("FORC_PUB_TOKEN", "env_token");
let result = get_auth_token(None, None).unwrap();
std::env::remove_var("FORC_PUB_TOKEN");
assert_eq!(result, "env_token");
}
#[test]
#[serial]
fn test_get_auth_token_from_file() {
let temp_dir = tempdir().unwrap();
let cred_path = temp_dir.path().join("credentials.toml");
let credentials = r#"
[registry]
token = "file_token"
"#;
fs::write(&cred_path, credentials).unwrap();
let result = get_auth_token(None, Some(temp_dir.path().into())).unwrap();
assert_eq!(result, "file_token".to_string());
}
#[test]
#[serial]
fn test_get_auth_token_from_user_input() {
let temp_dir = tempdir().unwrap();
let cred_path = temp_dir.path().join("credentials.toml");
let reader = io::Cursor::new(b"user_token");
let result =
get_auth_token_from_user_input(&cred_path.clone(), reader, io::sink()).unwrap();
assert_eq!(result, "user_token");
// Ensure the token is saved in the credentials file
let saved_content = fs::read_to_string(&cred_path).unwrap();
assert!(saved_content.contains("token = \"user_token\""));
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-publish/src/main.rs | forc-plugins/forc-publish/src/main.rs | use clap::{crate_version, Parser};
use forc_publish::credentials::get_auth_token;
use forc_publish::error::Result;
use forc_publish::forc_pub_client::ForcPubClient;
use forc_publish::tarball::create_tarball_from_current_dir;
use forc_tracing::{
init_tracing_subscriber, println_action_green, println_error, TracingSubscriberOptions,
};
use tempfile::tempdir;
use url::Url;
const FORC_PUB_URL: &str = "https://api.forc.pub";
#[derive(Parser, Debug)]
#[clap(name = "forc-publish", version)]
/// Forc plugin for uploading packages to the registry.
pub struct Opt {
/// Token to use when uploading
#[clap(long)]
pub token: Option<String>,
/// The registry URL to use
#[clap(long, default_value = FORC_PUB_URL)]
pub registry_url: String,
}
#[tokio::main]
async fn main() {
init_tracing_subscriber(TracingSubscriberOptions::default());
if let Err(err) = run().await {
println!();
println_error(&format!("{err}"));
std::process::exit(1);
}
}
async fn run() -> Result<()> {
let config = Opt::parse();
let auth_token = get_auth_token(config.token, None)?;
let forc_version = crate_version!();
let client = ForcPubClient::new(Url::parse(&config.registry_url)?);
// Create the compressed tarball
let temp_dir = tempdir()?;
let file_path = create_tarball_from_current_dir(&temp_dir)?;
// Upload the tarball and publish it
let upload_id = client.upload(file_path, forc_version).await?;
let published = client.publish(upload_id, &auth_token).await?;
println!();
println_action_green(
"Published",
&format!("{} {}", published.name, published.version),
);
Ok(())
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-publish/src/validate.rs | forc-plugins/forc-publish/src/validate.rs | use crate::error::{Error, Result};
use forc_pkg::manifest::{GenericManifestFile, ManifestFile};
use std::path::Path;
/// Checks the following cases for an early error generation:
/// 1. Target dir doesn't contain a Forc.toml
/// 2. Target manifest file doesn't contain a version
/// 3. Target project's dependencies are not all version based (git or path
/// based dep detection)
pub fn validate_dir(path: &Path) -> Result<()> {
// Check if Forc.toml exists
let forc_toml_path = path.join("Forc.toml");
if !forc_toml_path.exists() {
return Err(Error::ForcTomlNotFound);
}
let manifest_file = ManifestFile::from_file(forc_toml_path)?;
match manifest_file {
ManifestFile::Package(package_manifest_file) => {
// Check if the version exists
if package_manifest_file.as_ref().project.version.is_none() {
return Err(Error::MissingVersionField);
}
// Check if all the dependencies are declared with dep
for (dep_name, dep) in package_manifest_file
.as_ref()
.dependencies
.iter()
.flat_map(|deps| deps.iter())
{
if dep.version().is_none() {
return Err(Error::DependencyMissingVersion(dep_name.to_string()));
}
}
}
ManifestFile::Workspace(_) => {
return Err(Error::WorkspaceNotSupported);
}
}
Ok(())
}
#[cfg(test)]
mod test {
use super::validate_dir;
use std::path::PathBuf;
#[test]
fn without_version_should_fail() {
let manifest_dir = PathBuf::from(env!("CARGO_MANIFEST_DIR"));
let tests_path = manifest_dir.join("tests").join("data");
let no_version_project_test = tests_path.join("without_version");
let res = validate_dir(&no_version_project_test);
assert!(res.is_err());
assert!(res.err().is_some_and(|err| {
err.to_string() == "missing version field in [project] section of Forc.toml"
}));
}
#[test]
fn deps_without_version_should_fail() {
let manifest_dir = PathBuf::from(env!("CARGO_MANIFEST_DIR"));
let tests_path = manifest_dir.join("tests").join("data");
let no_version_project_test = tests_path.join("deps_without_version");
let res = validate_dir(&no_version_project_test);
assert!(res.is_err());
assert!(res.err().is_some_and(|err| {
err.to_string() == "dep_a is not a forc.pub dependency, depend on it using version."
}));
}
#[test]
fn success_without_deps() {
let manifest_dir = PathBuf::from(env!("CARGO_MANIFEST_DIR"));
let tests_path = manifest_dir.join("tests").join("data");
let success_without_deps = tests_path.join("success_with_no_deps");
validate_dir(&success_without_deps).unwrap()
}
#[test]
fn success_with_deps() {
let manifest_dir = PathBuf::from(env!("CARGO_MANIFEST_DIR"));
let tests_path = manifest_dir.join("tests").join("data");
let success_without_deps = tests_path.join("success_with_deps");
validate_dir(&success_without_deps).unwrap()
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-publish/src/md_pre_process/error.rs | forc-plugins/forc-publish/src/md_pre_process/error.rs | use regex::Error as RegexError;
use std::io;
use std::path::PathBuf;
use thiserror::Error;
#[derive(Error, Debug)]
pub enum MDPreProcessError {
#[error("I/O error: {0}")]
Io(#[from] io::Error),
#[error("Regex error: {0}")]
Regex(#[from] RegexError),
#[error("Missing include file: {0}")]
MissingInclude(PathBuf),
#[error("Cycle detected in includes!")]
Cycle,
#[error("Failed to canonicalize path: {0}")]
Canonicalize(PathBuf),
#[error("Other error: {0}")]
Other(String),
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-publish/src/md_pre_process/mod.rs | forc-plugins/forc-publish/src/md_pre_process/mod.rs | pub(crate) mod error;
use error::MDPreProcessError;
use regex::Regex;
use std::{
collections::{HashMap, HashSet, VecDeque},
fs,
path::{Path, PathBuf},
};
#[derive(Debug)]
struct MarkdownFile {
path: PathBuf,
includes: HashSet<PathBuf>,
}
impl MarkdownFile {
fn parse<P: AsRef<Path>>(path: P) -> Result<Self, MDPreProcessError> {
let path = path
.as_ref()
.canonicalize()
.map_err(|_| MDPreProcessError::Canonicalize(path.as_ref().to_path_buf()))?;
let content = fs::read_to_string(&path)?;
let dir = path.parent().unwrap_or(Path::new("."));
let re = Regex::new(r"\{\{#include\s+([^\}]+)\}\}")?;
let includes = re
.captures_iter(&content)
.filter_map(|caps| {
let inc_rel = caps[1].trim();
let inc_path = dir.join(inc_rel);
inc_path.canonicalize().ok()
})
.collect();
Ok(MarkdownFile { path, includes })
}
}
#[derive(Debug, Default)]
struct MarkdownDepGraph {
graph: HashMap<PathBuf, HashSet<PathBuf>>,
}
impl MarkdownDepGraph {
fn build(entry: &Path) -> Result<Self, MDPreProcessError> {
let mut graph = HashMap::new();
let mut visited = HashSet::new();
Self::build_recursive(entry, &mut graph, &mut visited)?;
Ok(MarkdownDepGraph { graph })
}
#[allow(clippy::iter_over_hash_type)]
fn build_recursive(
path: &Path,
graph: &mut HashMap<PathBuf, HashSet<PathBuf>>,
visited: &mut HashSet<PathBuf>,
) -> Result<(), MDPreProcessError> {
let file = MarkdownFile::parse(path)?;
if visited.insert(file.path.clone()) {
for dep in &file.includes {
Self::build_recursive(dep, graph, visited)?;
}
graph.insert(file.path.clone(), file.includes);
}
Ok(())
}
#[allow(clippy::iter_over_hash_type)]
fn topological_sort(&self) -> Result<Vec<PathBuf>, MDPreProcessError> {
let mut in_degree = HashMap::new();
for (node, deps) in &self.graph {
in_degree.entry(node.clone()).or_insert(0);
for dep in deps {
*in_degree.entry(dep.clone()).or_insert(0) += 1;
}
}
let mut queue: VecDeque<_> = in_degree
.iter()
.filter(|&(_, °)| deg == 0)
.map(|(n, _)| n.clone())
.collect();
let mut sorted = Vec::new();
let mut processed = 0;
while let Some(node) = queue.pop_front() {
sorted.push(node.clone());
processed += 1;
if let Some(deps) = self.graph.get(&node) {
for dep in deps {
let deg = in_degree.get_mut(dep).unwrap();
*deg -= 1;
if *deg == 0 {
queue.push_back(dep.clone());
}
}
}
}
if processed != in_degree.len() {
return Err(MDPreProcessError::Cycle);
}
Ok(sorted)
}
}
#[derive(Debug)]
struct MarkdownFlattener {
file_contents: HashMap<PathBuf, String>,
}
impl MarkdownFlattener {
fn flatten_files(order: &[PathBuf]) -> Result<Self, MDPreProcessError> {
let mut file_contents = HashMap::new();
let re = Regex::new(r"\{\{#include\s+([^\}]+)\}\}")?;
// Process leaves first (reverse topological order)
for file in order.iter().rev() {
let content = fs::read_to_string(file)?;
let expanded = Self::expand_includes(&content, file, &file_contents, &re)?;
file_contents.insert(file.clone(), expanded);
}
Ok(MarkdownFlattener { file_contents })
}
fn expand_includes(
content: &str,
file: &Path,
file_contents: &HashMap<PathBuf, String>,
re: &Regex,
) -> Result<String, MDPreProcessError> {
let dir = file.parent().unwrap_or(Path::new("."));
let mut result = String::new();
let mut last_end = 0;
for caps in re.captures_iter(content) {
let match_range = caps.get(0).unwrap();
// Add content before this match
result.push_str(&content[last_end..match_range.start()]);
// Process the include
let inc_rel = caps[1].trim();
let inc_path = dir.join(inc_rel);
match inc_path.canonicalize() {
Ok(canonical_path) => match file_contents.get(&canonical_path) {
Some(included_content) => {
result.push_str(included_content);
}
None => {
return Err(MDPreProcessError::MissingInclude(canonical_path));
}
},
Err(_) => {
return Err(MDPreProcessError::Canonicalize(inc_path));
}
}
last_end = match_range.end();
}
// Add remaining content after last match
result.push_str(&content[last_end..]);
Ok(result)
}
fn get_file(&self, entry: &Path) -> Option<&str> {
self.file_contents
.get(&entry.canonicalize().ok()?)
.map(|s| s.as_str())
}
}
pub fn flatten_markdown(entry: &Path) -> Result<String, MDPreProcessError> {
let dep_graph = MarkdownDepGraph::build(entry)?;
let order = dep_graph.topological_sort()?;
let flattener = MarkdownFlattener::flatten_files(&order)?;
flattener
.get_file(entry)
.map(|s| s.to_string())
.ok_or_else(|| MDPreProcessError::Other("Could not flatten entry file".to_string()))
}
#[cfg(test)]
mod tests {
use super::*;
use std::fs;
use std::path::Path;
use tempfile::tempdir;
fn write_file<P: AsRef<Path>>(path: P, content: &str) -> Result<(), MDPreProcessError> {
if let Some(parent) = path.as_ref().parent() {
fs::create_dir_all(parent)?;
}
fs::write(path, content)?;
Ok(())
}
#[test]
fn test_flatten_single_file_no_includes() -> Result<(), MDPreProcessError> {
let dir = tempdir()?;
let readme = dir.path().join("README.md");
write_file(&readme, "# Title\n\nHello world!")?;
let result = flatten_markdown(&readme)?;
assert_eq!(result, "# Title\n\nHello world!");
Ok(())
}
#[test]
fn test_flatten_simple_include() -> Result<(), MDPreProcessError> {
let dir = tempdir()?;
let readme = dir.path().join("README.md");
let inc = dir.path().join("inc.md");
write_file(&inc, "This is included.")?;
write_file(&readme, "# Main\n\n{{#include inc.md}}\n\nEnd.")?;
let result = flatten_markdown(&readme)?;
assert_eq!(result, "# Main\n\nThis is included.\n\nEnd.");
Ok(())
}
#[test]
fn test_flatten_nested_includes() -> Result<(), MDPreProcessError> {
let dir = tempdir()?;
let readme = dir.path().join("README.md");
let sub = dir.path().join("sub.md");
let subsub = dir.path().join("deep.md");
write_file(&subsub, "Deep content.")?;
write_file(&sub, "Subhead\n\n{{#include deep.md}}")?;
write_file(&readme, "# Root\n\n{{#include sub.md}}\n\nEnd.")?;
let result = flatten_markdown(&readme)?;
assert_eq!(result, "# Root\n\nSubhead\n\nDeep content.\n\nEnd.");
Ok(())
}
#[test]
fn test_flatten_multiple_includes() -> Result<(), MDPreProcessError> {
let dir = tempdir()?;
let readme = dir.path().join("README.md");
let a = dir.path().join("a.md");
let b = dir.path().join("b.md");
write_file(&a, "Alpha!")?;
write_file(&b, "Bravo!")?;
write_file(
&readme,
"# Combo\n\n{{#include a.md}}\n\n{{#include b.md}}\nDone.",
)?;
let result = flatten_markdown(&readme)?;
assert_eq!(result, "# Combo\n\nAlpha!\n\nBravo!\nDone.");
Ok(())
}
#[test]
fn test_flatten_missing_include() -> Result<(), MDPreProcessError> {
let dir = tempdir()?;
let readme = dir.path().join("README.md");
write_file(&readme, "# Main\n\n{{#include missing.md}}\nEnd.")?;
let result = flatten_markdown(&readme);
assert!(matches!(
result,
Err(MDPreProcessError::Canonicalize(_)) | Err(MDPreProcessError::MissingInclude(_))
));
Ok(())
}
#[test]
fn test_cycle_detection() -> Result<(), MDPreProcessError> {
let dir = tempdir()?;
let a = dir.path().join("a.md");
let b = dir.path().join("b.md");
write_file(&a, "A here\n{{#include b.md}}")?;
write_file(&b, "B here\n{{#include a.md}}")?;
let result = flatten_markdown(&a);
assert!(matches!(result, Err(MDPreProcessError::Cycle)));
Ok(())
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-lsp/src/main.rs | forc-plugins/forc-lsp/src/main.rs | //! A simple `forc` plugin for starting the sway language server.
//!
//! Once installed and available via `PATH`, can be executed via `forc lsp`.
#![recursion_limit = "256"]
// Use Jemalloc for main binary
#[global_allocator]
static GLOBAL: tikv_jemallocator::Jemalloc = tikv_jemallocator::Jemalloc;
use clap::Parser;
#[derive(Debug, Parser)]
#[clap(
name = "forc-lsp",
about = "Forc plugin for the Sway LSP (Language Server Protocol) implementation",
version
)]
struct App {}
#[tokio::main]
async fn main() {
App::parse();
sway_lsp::start().await
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-tx/src/lib.rs | forc-plugins/forc-tx/src/lib.rs | //! A simple tool for constructing transactions from the command line.
use clap::{Args, Parser};
use devault::Devault;
use forc_util::tx_utils::Salt;
use fuel_tx::{
output,
policies::{Policies, PolicyType},
Buildable, Chargeable, ConsensusParameters,
};
use fuels_core::types::transaction::TxPolicies;
use serde::{Deserialize, Serialize};
use std::path::PathBuf;
use thiserror::Error;
forc_util::cli_examples! {
{
// This parser has a custom parser
super::Command::try_parse_from_args
} {
[ Script example => r#"forc tx script --bytecode "{path}/out/debug/name.bin" --data "{path}/data.bin" \
--receipts-root 0x2222222222222222222222222222222222222222222222222222222222222222"# ]
[ Multiple inputs => r#"forc tx create --bytecode "{name}/out/debug/name.bin"
--storage-slots "{path}/out/debug/name-storage_slots.json"
--script-gas-limit 100 \
--gas-price 0 \
--maturity 0 \
--witness ADFD \
--witness DFDA \
input coin \
--utxo-id 0 \
--output-ix 0 \
--owner 0x0000000000000000000000000000000000000000000000000000000000000000 \
--amount 100 \
--asset-id 0x0000000000000000000000000000000000000000000000000000000000000000 \
--tx-ptr 89ACBDEFBDEF \
--witness-ix 0 \
--maturity 0 \
input contract \
--utxo-id 1 \
--output-ix 1 \
--balance-root 0x0000000000000000000000000000000000000000000000000000000000000000 \
--state-root 0x0000000000000000000000000000000000000000000000000000000000000000 \
--tx-ptr 89ACBDEFBDEF \
--contract-id 0xCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCC \
output coin \
--to 0x2222222222222222222222222222222222222222222222222222222222222222 \
--amount 100 \
--asset-id 0x0000000000000000000000000000000000000000000000000000000000000000 \
output contract \
--input-ix 1 \
--balance-root 0x0000000000000000000000000000000000000000000000000000000000000000 \
--state-root 0x0000000000000000000000000000000000000000000000000000000000000000 \
output change \
--to 0x2222222222222222222222222222222222222222222222222222222222222222 \
--amount 100 \
--asset-id 0x0000000000000000000000000000000000000000000000000000000000000000 \
output variable \
--to 0x2222222222222222222222222222222222222222222222222222222222222222 \
--amount 100 \
--asset-id 0x0000000000000000000000000000000000000000000000000000000000000000 \
output contract-created \
--contract-id 0xCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCC \
--state-root 0x0000000000000000000000000000000000000000000000000000000000000000
"#
]
[ An example constructing a create transaction => r#"forc tx create \
--bytecode {path}/out/debug/name.bin \
--storage-slots {path}/out/debug/name-storage_slots.json \
--script-gas-limit 100 \
--gas-price 0 \
--maturity 0 \
--witness ADFD \
--witness DFDA \
input coin \
--utxo-id 0 \
--output-ix 0 \
--owner 0x0000000000000000000000000000000000000000000000000000000000000000 \
--amount 100 \
--asset-id 0x0000000000000000000000000000000000000000000000000000000000000000 \
--tx-ptr 89ACBDEFBDEF \
--witness-ix 0 \
--maturity 0 \
input contract \
--utxo-id 1 \
--output-ix 1 \
--balance-root 0x0000000000000000000000000000000000000000000000000000000000000000 \
--state-root 0x0000000000000000000000000000000000000000000000000000000000000000 \
--tx-ptr 89ACBDEFBDEF \
--contract-id 0xCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCC \
input message \
--sender 0x1111111111111111111111111111111111111111111111111111111111111111 \
--recipient 0x2222222222222222222222222222222222222222222222222222222222222222 \
--amount 1 \
--nonce 0xBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB \
--msg-data {path}/message.dat \
--predicate {path}/my-predicate2.bin \
--predicate-data {path}/my-predicate2.dat \
output coin \
--to 0x2222222222222222222222222222222222222222222222222222222222222222 \
--amount 100 \
--asset-id 0x0000000000000000000000000000000000000000000000000000000000000000 \
output contract \
--input-ix 1 \
--balance-root 0x0000000000000000000000000000000000000000000000000000000000000000 \
--state-root 0x0000000000000000000000000000000000000000000000000000000000000000 \
output change \
--to 0x2222222222222222222222222222222222222222222222222222222222222222 \
--amount 100 \
--asset-id 0x0000000000000000000000000000000000000000000000000000000000000000 \
output variable \
--to 0x2222222222222222222222222222222222222222222222222222222222222222 \
--amount 100 \
--asset-id 0x0000000000000000000000000000000000000000000000000000000000000000 \
output contract-created \
--contract-id 0xCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCC \
--state-root 0x0000000000000000000000000000000000000000000000000000000000000000"#
]
}
}
/// The top-level `forc tx` command.
#[derive(Debug, Parser, Deserialize, Serialize)]
#[clap(about, version, after_help = help())]
pub struct Command {
#[clap(long, short = 'o')]
pub output_path: Option<PathBuf>,
#[clap(subcommand)]
pub tx: Transaction,
}
/// Construct a transaction.
#[derive(Debug, Parser, Deserialize, Serialize)]
#[clap(name = "transaction")]
pub enum Transaction {
Create(Create),
Script(Script),
}
/// Construct a `Create` transaction for deploying a contract.
#[derive(Debug, Parser, Deserialize, Serialize)]
pub struct Create {
#[clap(flatten)]
pub gas: Gas,
#[clap(flatten)]
pub maturity: Maturity,
#[clap(flatten)]
pub salt: Salt,
/// Path to the contract bytecode.
#[clap(long)]
pub bytecode: PathBuf,
/// Witness index of contract bytecode to create.
#[clap(long, default_value_t = 0)]
pub bytecode_witness_index: u16,
/// Path to a JSON file with a list of storage slots to initialize (key, value).
#[clap(long)]
pub storage_slots: PathBuf,
/// An arbitrary length string of hex-encoded bytes (e.g. "1F2E3D4C5B6A")
///
/// Can be specified multiple times.
#[clap(long = "witness", num_args(0..255))]
pub witnesses: Vec<String>,
// Inputs and outputs must follow all other arguments and are parsed separately.
#[clap(skip)]
pub inputs: Vec<Input>,
// Inputs and outputs must follow all other arguments and are parsed separately.
#[clap(skip)]
pub outputs: Vec<Output>,
}
/// Construct a `Script` transaction for running a script.
#[derive(Debug, Parser, Deserialize, Serialize)]
pub struct Script {
#[clap(flatten)]
pub gas: Gas,
#[clap(flatten)]
pub maturity: Maturity,
/// Script to execute.
#[clap(long)]
pub bytecode: PathBuf,
/// Script input data (parameters). Specified file is loaded as raw bytes.
#[clap(long)]
pub data: PathBuf,
/// Merkle root of receipts.
#[clap(long)]
pub receipts_root: fuel_tx::Bytes32,
/// An arbitrary length string of hex-encoded bytes (e.g. "1F2E3D4C5B6A")
///
/// Can be specified multiple times.
#[clap(long = "witness", num_args(0..=255))]
pub witnesses: Vec<String>,
// Inputs and outputs must follow all other arguments and are parsed separately.
#[clap(skip)]
pub inputs: Vec<Input>,
// Inputs and outputs must follow all other arguments and are parsed separately.
#[clap(skip)]
pub outputs: Vec<Output>,
}
/// Flag set for specifying gas price and limit.
#[derive(Debug, Devault, Clone, Parser, Deserialize, Serialize)]
pub struct Gas {
/// Gas price for the transaction.
#[clap(long = "gas-price")]
pub price: Option<u64>,
/// Gas limit for the transaction.
#[clap(long = "script-gas-limit")]
pub script_gas_limit: Option<u64>,
/// Max fee for the transaction.
#[clap(long)]
pub max_fee: Option<u64>,
/// The tip for the transaction.
#[clap(long)]
pub tip: Option<u64>,
}
/// Block until which tx cannot be included.
#[derive(Debug, Args, Default, Deserialize, Serialize)]
pub struct Maturity {
/// Block height until which tx cannot be included.
#[clap(long = "maturity", default_value_t = 0)]
pub maturity: u32,
}
/// Transaction input.
#[derive(Debug, Parser, Deserialize, Serialize)]
#[clap(name = "input")]
pub enum Input {
Coin(InputCoin),
Contract(InputContract),
Message(InputMessage),
}
#[derive(Debug, Parser, Deserialize, Serialize)]
pub struct InputCoin {
/// Hash of the unspent transaction.
#[clap(long)]
pub utxo_id: fuel_tx::UtxoId,
/// Index of transaction output.
#[clap(long)]
pub output_ix: u8,
/// Owning address or predicate root.
#[clap(long)]
pub owner: fuel_tx::Address,
/// Amount of coins.
#[clap(long)]
pub amount: u64,
/// Asset ID of the coins.
#[clap(long)]
pub asset_id: fuel_tx::AssetId,
/// Points to the TX whose output is being spent. Includes block height, tx index.
#[clap(long)]
pub tx_ptr: fuel_tx::TxPointer,
/// Index of witness that authorizes spending the coin.
#[clap(long)]
pub witness_ix: Option<u16>,
/// UTXO being spent must have been created at least this many blocks ago.
#[clap(long)]
pub maturity: u32,
/// Gas used by predicates.
#[clap(long, default_value_t = 0)]
pub predicate_gas_used: u64,
#[clap(flatten)]
pub predicate: Predicate,
}
#[derive(Debug, Parser, Deserialize, Serialize)]
pub struct InputContract {
/// Hash of the unspent transaction.
#[clap(long)]
pub utxo_id: fuel_tx::UtxoId,
/// Index of transaction output.
#[clap(long)]
pub output_ix: u8,
/// Root of the amount of coins owned by the contract before transaction execution.
#[clap(long)]
pub balance_root: fuel_tx::Bytes32,
/// State root of contract before transaction execution.
#[clap(long)]
pub state_root: fuel_tx::Bytes32,
/// Points to the TX whose output is being spent. Includes block height, tx index.
#[clap(long)]
pub tx_ptr: fuel_tx::TxPointer,
/// The ID of the contract.
#[clap(long)]
pub contract_id: fuel_tx::ContractId,
}
#[derive(Debug, Parser, Deserialize, Serialize)]
pub struct InputMessage {
/// The address of the message sender.
#[clap(long)]
pub sender: fuel_tx::Address,
/// The address or predicate root of the message recipient.
#[clap(long)]
pub recipient: fuel_tx::Address,
/// Amount of base asset coins sent with message.
#[clap(long)]
pub amount: u64,
/// The message nonce.
#[clap(long)]
pub nonce: fuel_types::Nonce,
/// The message data.
#[clap(long)]
pub msg_data: PathBuf,
/// Index of witness that authorizes the message.
#[clap(long)]
pub witness_ix: Option<u16>,
/// Gas used by predicates.
#[clap(long, default_value_t = 0)]
pub predicate_gas_used: u64,
#[clap(flatten)]
pub predicate: Predicate,
}
/// Grouped arguments related to an input's predicate.
#[derive(Debug, Parser, Deserialize, Serialize)]
pub struct Predicate {
/// The predicate bytecode.
#[clap(long = "predicate")]
pub bytecode: Option<PathBuf>,
/// The predicate's input data (parameters). Specified file is loaded as raw bytes.
#[clap(long = "predicate-data")]
pub data: Option<PathBuf>,
}
/// The location of the transaction in the block.
#[derive(Debug, Parser, Deserialize, Serialize)]
pub struct TxPointer {
/// The transaction block height.
#[clap(long = "tx-ptr-block-height")]
pub block_height: u32,
/// Transaction index.
#[clap(long = "tx-ptr-ix")]
pub tx_ix: u16,
}
/// Transaction output.
#[derive(Debug, Parser, Deserialize, Serialize)]
#[clap(name = "output")]
pub enum Output {
Coin(OutputCoin),
Contract(OutputContract),
Change(OutputChange),
Variable(OutputVariable),
ContractCreated(OutputContractCreated),
}
#[derive(Debug, Parser, Deserialize, Serialize)]
pub struct OutputCoin {
/// Hash of the unspent transaction.
#[clap(long)]
pub to: fuel_tx::Address,
/// Amount of coins.
#[clap(long)]
pub amount: fuel_tx::Word,
/// Asset ID of the coins.
#[clap(long)]
pub asset_id: fuel_tx::AssetId,
}
#[derive(Debug, Parser, Deserialize, Serialize)]
pub struct OutputContract {
/// Index of input contract.
#[clap(long)]
pub input_ix: u16,
/// Root of amount of coins owned by contract after transaction execution.
#[clap(long)]
pub balance_root: fuel_tx::Bytes32,
/// State root of contract after transaction execution.
#[clap(long)]
pub state_root: fuel_tx::Bytes32,
}
#[derive(Debug, Parser, Deserialize, Serialize)]
pub struct OutputChange {
/// Receiving address or predicate root.
#[clap(long)]
pub to: fuel_tx::Address,
/// Amount of coins to send.
#[clap(long)]
pub amount: fuel_tx::Word,
/// Asset ID of coins.
#[clap(long)]
pub asset_id: fuel_tx::AssetId,
}
#[derive(Debug, Parser, Deserialize, Serialize)]
pub struct OutputVariable {
/// Receiving address or predicate root.
#[clap(long)]
pub to: fuel_tx::Address,
/// Amount of coins to send.
#[clap(long)]
pub amount: fuel_tx::Word,
/// Asset ID of coins.
#[clap(long)]
pub asset_id: fuel_tx::AssetId,
}
#[derive(Debug, Parser, Deserialize, Serialize)]
pub struct OutputContractCreated {
/// Contract ID
#[clap(long)]
pub contract_id: fuel_tx::ContractId,
/// Initial state root of contract.
#[clap(long)]
pub state_root: fuel_tx::Bytes32,
}
/// Errors that can occur while parsing the `Command`.
#[derive(Debug, Error)]
pub enum ParseError {
#[error("Failed to parse the command")]
Command {
#[source]
err: clap::Error,
},
#[error("Failed to parse transaction `input`")]
Input {
#[source]
err: clap::Error,
},
#[error("Failed to parse transaction `output`")]
Output {
#[source]
err: clap::Error,
},
#[error("Unrecognized argument {arg:?}, expected `input` or `output`")]
UnrecognizedArgumentExpectedInputOutput { arg: String, remaining: Vec<String> },
#[error("Found argument `input` which isn't valid for a mint transaction")]
MintTxHasInput,
}
/// Errors that can occur during conversion from the CLI transaction
/// representation to the `fuel-tx` representation.
#[derive(Debug, Error)]
pub enum ConvertTxError {
#[error("failed to convert create transaction")]
Create(#[from] ConvertCreateTxError),
#[error("failed to convert script transaction")]
Script(#[from] ConvertScriptTxError),
}
/// Errors that can occur during "create" transaction conversion.
#[derive(Debug, Error)]
pub enum ConvertCreateTxError {
#[error("failed to open `--storage-slots` from {path:?}")]
StorageSlotsOpen {
path: PathBuf,
#[source]
err: std::io::Error,
},
#[error("failed to deserialize storage slots file")]
StorageSlotsDeserialize(#[source] serde_json::Error),
#[error("failed to convert an input")]
Input(#[from] ConvertInputError),
}
/// Errors that can occur during "script" transaction conversion.
#[derive(Debug, Error)]
pub enum ConvertScriptTxError {
#[error("failed to read `--bytecode` from {path:?}")]
BytecodeRead {
path: PathBuf,
#[source]
err: std::io::Error,
},
#[error("failed to read `--data` from {path:?}")]
DataRead {
path: PathBuf,
#[source]
err: std::io::Error,
},
#[error("failed to convert an input")]
Input(#[from] ConvertInputError),
}
/// Errors that can occur during transaction input conversion.
#[derive(Debug, Error)]
pub enum ConvertInputError {
#[error("failed to read `--msg-data` from {path:?}")]
MessageDataRead {
path: PathBuf,
#[source]
err: std::io::Error,
},
#[error("failed to read `--predicate` from {path:?}")]
PredicateRead {
path: PathBuf,
#[source]
err: std::io::Error,
},
#[error("failed to read `--predicate-data` from {path:?}")]
PredicateDataRead {
path: PathBuf,
#[source]
err: std::io::Error,
},
#[error("input accepts either witness index or predicate, not both")]
WitnessPredicateMismatch,
}
impl ParseError {
/// Print the error with clap's fancy formatting.
pub fn print(&self) -> Result<(), clap::Error> {
match self {
ParseError::Command { err } => {
err.print()?;
}
ParseError::Input { err } => {
err.print()?;
}
ParseError::Output { err } => {
err.print()?;
}
ParseError::UnrecognizedArgumentExpectedInputOutput { .. } => {
use clap::CommandFactory;
// Create a type as a hack to produce consistent-looking clap help output.
#[derive(Parser)]
enum ForcTxIo {
#[clap(subcommand)]
Input(Input),
#[clap(subcommand)]
Output(Output),
}
println!("{self}\n");
ForcTxIo::command().print_long_help()?;
}
ParseError::MintTxHasInput => {
println!("{self}");
}
}
Ok(())
}
}
impl Command {
/// Emulates `clap::Parser::parse` behaviour, but returns the parsed inputs and outputs.
///
/// If parsing fails, prints the error along with the help output and exits with an error code.
///
/// We provide this custom `parse` function solely due to clap's limitations around parsing
/// trailing subcommands.
pub fn parse() -> Self {
let err = match Self::try_parse() {
Err(err) => err,
Ok(cmd) => return cmd,
};
let _ = err.print();
std::process::exit(1);
}
/// Parse a full `Transaction` including trailing inputs and outputs.
pub fn try_parse() -> Result<Self, ParseError> {
Self::try_parse_from_args(std::env::args())
}
/// Parse a full `Transaction` including trailing inputs and outputs from an iterator yielding
/// whitespace-separate string arguments.
pub fn try_parse_from_args(args: impl IntoIterator<Item = String>) -> Result<Self, ParseError> {
const INPUT: &str = "input";
const OUTPUT: &str = "output";
fn is_input_or_output(s: &str) -> bool {
s == INPUT || s == OUTPUT
}
fn push_input(cmd: &mut Transaction, input: Input) -> Result<(), ParseError> {
match cmd {
Transaction::Create(ref mut create) => create.inputs.push(input),
Transaction::Script(ref mut script) => script.inputs.push(input),
}
Ok(())
}
fn push_output(cmd: &mut Transaction, output: Output) {
match cmd {
Transaction::Create(ref mut create) => create.outputs.push(output),
Transaction::Script(ref mut script) => script.outputs.push(output),
}
}
let mut args = args.into_iter().peekable();
// Collect args until the first `input` or `output` is reached.
let mut cmd = {
let cmd_args = std::iter::from_fn(|| args.next_if(|s| !is_input_or_output(s)));
Command::try_parse_from(cmd_args).map_err(|err| ParseError::Command { err })?
};
// The remaining args (if any) are the inputs and outputs.
while let Some(arg) = args.next() {
let args_til_next = std::iter::once(arg.clone()).chain(std::iter::from_fn(|| {
args.next_if(|s| !is_input_or_output(s))
}));
match &arg[..] {
INPUT => {
let input = Input::try_parse_from(args_til_next)
.map_err(|err| ParseError::Input { err })?;
push_input(&mut cmd.tx, input)?
}
OUTPUT => {
let output = Output::try_parse_from(args_til_next)
.map_err(|err| ParseError::Output { err })?;
push_output(&mut cmd.tx, output)
}
arg => {
return Err(ParseError::UnrecognizedArgumentExpectedInputOutput {
arg: arg.to_string(),
remaining: args.collect(),
})
}
}
}
// If there are args remaining, report them.
if args.peek().is_some() {
return Err(ParseError::UnrecognizedArgumentExpectedInputOutput {
arg: args.peek().unwrap().to_string(),
remaining: args.collect(),
});
}
Ok(cmd)
}
}
impl TryFrom<Transaction> for fuel_tx::Transaction {
type Error = ConvertTxError;
fn try_from(tx: Transaction) -> Result<Self, Self::Error> {
let tx = match tx {
Transaction::Create(create) => Self::Create(<_>::try_from(create)?),
Transaction::Script(script) => Self::Script(<_>::try_from(script)?),
};
Ok(tx)
}
}
impl TryFrom<Create> for fuel_tx::Create {
type Error = ConvertCreateTxError;
fn try_from(create: Create) -> Result<Self, Self::Error> {
let storage_slots = {
let file = std::fs::File::open(&create.storage_slots).map_err(|err| {
ConvertCreateTxError::StorageSlotsOpen {
path: create.storage_slots,
err,
}
})?;
let reader = std::io::BufReader::new(file);
serde_json::from_reader(reader)
.map_err(ConvertCreateTxError::StorageSlotsDeserialize)?
};
let inputs = create
.inputs
.into_iter()
.map(fuel_tx::Input::try_from)
.collect::<Result<Vec<_>, _>>()?;
let outputs = create
.outputs
.into_iter()
.map(fuel_tx::Output::from)
.collect();
let witnesses = create
.witnesses
.into_iter()
.map(|s| fuel_tx::Witness::from(s.as_bytes()))
.collect();
let maturity = (create.maturity.maturity != 0).then_some(create.maturity.maturity.into());
let mut policies = Policies::default();
policies.set(PolicyType::Tip, create.gas.price);
policies.set(PolicyType::Maturity, maturity);
let create = fuel_tx::Transaction::create(
create.bytecode_witness_index,
policies,
create.salt.salt.unwrap_or_default(),
storage_slots,
inputs,
outputs,
witnesses,
);
Ok(create)
}
}
impl TryFrom<Script> for fuel_tx::Script {
type Error = ConvertScriptTxError;
fn try_from(script: Script) -> Result<Self, Self::Error> {
let script_bytecode =
std::fs::read(&script.bytecode).map_err(|err| ConvertScriptTxError::BytecodeRead {
path: script.bytecode,
err,
})?;
let script_data =
std::fs::read(&script.data).map_err(|err| ConvertScriptTxError::DataRead {
path: script.data,
err,
})?;
let inputs = script
.inputs
.into_iter()
.map(fuel_tx::Input::try_from)
.collect::<Result<Vec<_>, _>>()?;
let outputs = script
.outputs
.into_iter()
.map(fuel_tx::Output::from)
.collect();
let witnesses = script
.witnesses
.into_iter()
.map(|s| fuel_tx::Witness::from(s.as_bytes()))
.collect();
let mut policies = Policies::default().with_maturity(script.maturity.maturity.into());
policies.set(PolicyType::Tip, script.gas.price);
let mut script_tx = fuel_tx::Transaction::script(
0, // Temporary value. Will be replaced below
script_bytecode,
script_data,
policies,
inputs,
outputs,
witnesses,
);
if let Some(script_gas_limit) = script.gas.script_gas_limit {
script_tx.set_script_gas_limit(script_gas_limit)
} else {
let consensus_params = ConsensusParameters::default();
// Get `max_gas` used by everything except the script execution. Add `1` because of rounding.
let max_gas =
script_tx.max_gas(consensus_params.gas_costs(), consensus_params.fee_params()) + 1;
// Increase `script_gas_limit` to the maximum allowed value.
script_tx.set_script_gas_limit(consensus_params.tx_params().max_gas_per_tx() - max_gas);
}
Ok(script_tx)
}
}
impl TryFrom<Input> for fuel_tx::Input {
type Error = ConvertInputError;
fn try_from(input: Input) -> Result<Self, Self::Error> {
let input = match input {
Input::Coin(coin) => {
let InputCoin {
utxo_id,
// TODO: Should this be verified / checked in some way?
output_ix: _,
owner,
amount,
asset_id,
tx_ptr: tx_pointer,
maturity: _,
predicate_gas_used,
predicate,
witness_ix,
} = coin;
match (witness_ix, predicate.bytecode, predicate.data) {
(Some(witness_index), None, None) => fuel_tx::Input::coin_signed(
utxo_id,
owner,
amount,
asset_id,
tx_pointer,
witness_index,
),
(None, Some(predicate), Some(predicate_data)) => {
fuel_tx::Input::coin_predicate(
utxo_id,
owner,
amount,
asset_id,
tx_pointer,
predicate_gas_used,
std::fs::read(&predicate).map_err(|err| {
ConvertInputError::PredicateRead {
path: predicate,
err,
}
})?,
std::fs::read(&predicate_data).map_err(|err| {
ConvertInputError::PredicateDataRead {
path: predicate_data,
err,
}
})?,
)
}
_ => return Err(ConvertInputError::WitnessPredicateMismatch),
}
}
Input::Contract(contract) => fuel_tx::Input::contract(
contract.utxo_id,
contract.balance_root,
contract.state_root,
contract.tx_ptr,
contract.contract_id,
),
Input::Message(msg) => {
let InputMessage {
sender,
recipient,
amount,
nonce,
msg_data,
witness_ix,
predicate_gas_used,
predicate,
} = msg;
let data =
std::fs::read(&msg_data).map_err(|err| ConvertInputError::MessageDataRead {
path: msg_data,
err,
})?;
match (witness_ix, predicate.bytecode, predicate.data) {
(Some(witness_index), None, None) => {
if data.is_empty() {
fuel_tx::Input::message_coin_signed(
sender,
recipient,
amount,
nonce,
witness_index,
)
} else {
fuel_tx::Input::message_data_signed(
sender,
recipient,
amount,
nonce,
witness_index,
data,
)
}
}
(None, Some(predicate), Some(predicate_data)) => {
let predicate = std::fs::read(&predicate).map_err(|err| {
ConvertInputError::PredicateRead {
path: predicate,
err,
}
})?;
let predicate_data = std::fs::read(&predicate_data).map_err(|err| {
ConvertInputError::PredicateDataRead {
path: predicate_data,
err,
}
})?;
if data.is_empty() {
fuel_tx::Input::message_coin_predicate(
sender,
recipient,
amount,
nonce,
predicate_gas_used,
predicate,
predicate_data,
)
} else {
fuel_tx::Input::message_data_predicate(
sender,
recipient,
amount,
nonce,
predicate_gas_used,
data,
predicate,
predicate_data,
)
}
}
_ => return Err(ConvertInputError::WitnessPredicateMismatch),
}
}
};
Ok(input)
}
}
impl From<Output> for fuel_tx::Output {
fn from(output: Output) -> Self {
match output {
Output::Coin(coin) => fuel_tx::Output::Coin {
to: coin.to,
amount: coin.amount,
asset_id: coin.asset_id,
},
Output::Contract(contract) => fuel_tx::Output::Contract(output::contract::Contract {
input_index: contract.input_ix,
balance_root: contract.balance_root,
state_root: contract.state_root,
}),
Output::Change(change) => fuel_tx::Output::Change {
to: change.to,
amount: change.amount,
asset_id: change.asset_id,
},
Output::Variable(variable) => fuel_tx::Output::Variable {
to: variable.to,
amount: variable.amount,
asset_id: variable.asset_id,
},
Output::ContractCreated(contract_created) => fuel_tx::Output::ContractCreated {
contract_id: contract_created.contract_id,
state_root: contract_created.state_root,
},
}
}
}
impl From<&Gas> for TxPolicies {
fn from(gas: &Gas) -> Self {
let mut policies = TxPolicies::default();
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | true |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-tx/src/main.rs | forc-plugins/forc-tx/src/main.rs | fn main() -> anyhow::Result<()> {
let cmd = forc_tx::Command::parse();
let tx = fuel_tx::Transaction::try_from(cmd.tx)?;
match cmd.output_path {
None => {
let string = serde_json::to_string_pretty(&tx)?;
println!("{string}");
}
Some(path) => {
let file = std::fs::File::create(path)?;
let writer = std::io::BufWriter::new(file);
serde_json::to_writer_pretty(writer, &tx)?;
}
}
Ok(())
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-node/src/consts.rs | forc-plugins/forc-node/src/consts.rs | /// Minimum fuel-core version supported.
pub const MIN_FUEL_CORE_VERSION: &str = "0.43.0";
pub const MINIMUM_OPEN_FILE_DESCRIPTOR_LIMIT: u64 = 51200;
pub const TESTNET_SERVICE_NAME: &str = "fuel-sepolia-testnet-node";
pub const TESTNET_SYNC_HEADER_BATCH_SIZE: u32 = 100;
pub const TESTNET_RELAYER_LISTENING_CONTRACT: &str = "0x01855B78C1f8868DE70e84507ec735983bf262dA";
pub const TESTNET_RELAYER_DA_DEPLOY_HEIGHT: u32 = 5827607;
pub const TESTNET_RELAYER_LOG_PAGE_SIZE: u32 = 500;
pub const TESTNET_SYNC_BLOCK_STREAM_BUFFER_SIZE: u32 = 30;
pub const TESTNET_BOOTSTRAP_NODE: &str = "/dnsaddr/testnet.fuel.network.";
pub const MAINNET_BOOTSTRAP_NODE: &str = "/dnsaddr/mainnet.fuel.network.";
pub const MAINNET_SERVICE_NAME: &str = "fuel-mainnet-node";
pub const MAINNET_SYNC_HEADER_BATCH_SIZE: u32 = 30;
pub const MAINNET_RELAYER_LISTENING_CONTRACT: &str = "0xAEB0c00D0125A8a788956ade4f4F12Ead9f65DDf";
pub const MAINNET_RELAYER_DA_DEPLOY_HEIGHT: u32 = 20620434;
pub const MAINNET_RELAYER_LOG_PAGE_SIZE: u32 = 100;
/// Name of the folder for testnet at the configuration repo:
/// https://github.com/fuelLabs/chain-configuration/
/// And name of the db path if persistent db is used.
pub const TESTNET_CONFIG_FOLDER_NAME: &str = "ignition-test";
/// Name of the folder for ignition mainnet at the configuration repo:
/// https://github.com/fuelLabs/chain-configuration/
/// And name of the db path if persistent db is used.
pub const IGNITION_CONFIG_FOLDER_NAME: &str = "ignition";
/// Name of the folder for local configuration repo:
/// And name of the db path if persistent db is used.
pub const LOCAL_CONFIG_FOLDER_NAME: &str = "local";
/// Name of the github repository that hosts chain-configurations.
pub const CHAIN_CONFIG_REPO_NAME: &str = "chain-configuration";
pub const DEFAULT_PORT: u16 = 4000;
pub const DEFAULT_PEERING_PORT: u16 = 30333;
pub const CONFIG_FOLDER: &str = "chainspecs";
pub const DB_FOLDER: &str = "db";
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-node/src/run_opts.rs | forc-plugins/forc-node/src/run_opts.rs | use std::{fmt::Display, path::PathBuf};
/// Possible parameters to set while integrating with `fuel-core run`.
#[derive(Debug, Default)]
pub struct RunOpts {
pub(crate) service_name: Option<String>,
/// DB type, possible options are: `["in-memory", "rocksdb"]`.
pub(crate) db_type: DbType,
/// Should be used for local development only. Enabling debug mode:
/// - Allows GraphQL Endpoints to arbitrarily advance blocks.
/// - Enables debugger GraphQL Endpoints.
/// - Allows setting `utxo_validation` to `false`.
pub(crate) debug: bool,
/// Snapshot from which to do (re)genesis.
pub(crate) snapshot: PathBuf,
/// Peering private key from generated key-pair.
pub(crate) keypair: Option<String>,
/// Ethereum RPC endpoint.
pub(crate) relayer: Option<String>,
/// The IP address to bind the GraphQL service to.
pub(crate) ip: Option<std::net::IpAddr>,
/// The port to bind the GraphQL service to.
pub(crate) port: Option<u16>,
/// p2p network's TCP port.
pub(crate) peering_port: Option<u16>,
/// The path to the database, only relevant if the db type is not
/// "in-memory".
pub(crate) db_path: Option<PathBuf>,
/// Enable full utxo stateful validation.
pub(crate) utxo_validation: bool,
/// Use instant block production mode.
/// Newly submitted txs will immediately trigger the production of the next block.
pub(crate) poa_instant: bool,
/// Enable P2P. By default, P2P is disabled.
pub(crate) enable_p2p: bool,
/// Addresses of the bootstrap nodes
/// They should contain PeerId within their `Multiaddr`.
pub(crate) bootstrap_nodes: Option<String>,
/// The maximum number of headers to request in a single batch.
pub(crate) sync_header_batch_size: Option<u32>,
/// Enable the Relayer. By default, the Relayer is disabled.
pub(crate) enable_relayer: bool,
/// Ethereum contract address for the relayer. Requires conversion of EthAddress into fuel_types.
pub(crate) relayer_listener: Option<String>,
/// Number of da block that the contract is deployed at.
pub(crate) relayer_da_deploy_height: Option<u32>,
/// Number of pages or blocks containing logs that
/// should be downloaded in a single call to the da layer
pub(crate) relayer_log_page_size: Option<u32>,
/// The maximum number of get transaction requests to make in a single batch.
pub(crate) sync_block_stream_buffer_size: Option<u32>,
}
#[derive(Debug)]
pub enum DbType {
InMemory,
RocksDb,
}
impl Default for DbType {
/// By default fuel-core interprets lack of explicit db-type declaration as
/// db-type = rocks-db.
fn default() -> Self {
Self::RocksDb
}
}
impl Display for DbType {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
DbType::InMemory => write!(f, "in-memory"),
DbType::RocksDb => write!(f, "rocks-db"),
}
}
}
impl RunOpts {
pub fn generate_params(self) -> Vec<String> {
let mut params = vec![];
if let Some(service_name) = self.service_name {
params.push(format!("--service-name {service_name}"));
}
if self.debug {
params.push("--debug".to_string());
}
if let Some(keypair) = self.keypair {
params.push(format!("--keypair {keypair}"));
}
if let Some(relayer) = self.relayer {
params.push(format!("--relayer {relayer}"));
}
if let Some(ip) = self.ip {
params.push(format!("--ip {ip}"));
}
if let Some(port) = self.port {
params.push(format!("--port {port}"));
}
if let Some(peering_port) = self.peering_port {
params.push(format!("--peering-port {peering_port}"));
}
if let Some(db_path) = self.db_path {
params.push(format!("--db-path {}", db_path.display()));
}
params.push(format!("--snapshot {}", self.snapshot.display()));
params.push(format!("--db-type {}", self.db_type));
if self.utxo_validation {
params.push("--utxo-validation".to_string());
}
// --poa-instant accepts `true` or `false` as param, and it is not a
// flag.
if self.poa_instant {
params.push("--poa-instant true".to_string());
} else {
params.push("--poa-instant false".to_string());
}
if self.enable_p2p {
params.push("--enable-p2p".to_string());
}
if let Some(node) = self.bootstrap_nodes {
params.push(format!("--bootstrap-nodes {node}"));
}
if let Some(sync_header_batch_size) = self.sync_header_batch_size {
params.push(format!("--sync-header-batch-size {sync_header_batch_size}"));
}
if self.enable_relayer {
params.push("--enable-relayer".to_string());
}
if let Some(relayer_listener) = self.relayer_listener {
params.push(format!(
"--relayer-v2-listening-contracts {relayer_listener}"
));
}
if let Some(da_deploy_height) = self.relayer_da_deploy_height {
params.push(format!("--relayer-da-deploy-height {da_deploy_height}"));
}
if let Some(log_page_size) = self.relayer_log_page_size {
params.push(format!("--relayer-log-page-size {log_page_size}"));
}
if let Some(sync_block) = self.sync_block_stream_buffer_size {
params.push(format!("--sync-block-stream-buffer-size {sync_block}"));
}
// Split run_cmd so that each arg is actually send as a separate
// arg. To correctly parse the args in the system level, each
// part of an arg should go to different indices of "argv". This
// means "--db-layer in-memory" needs to be interpreted as:
// "--db-layer", "in-memory" to be parsed correctly.
let params: Vec<String> = params
.iter()
.flat_map(|cmd| cmd.split_whitespace())
.map(|a| a.to_string())
.collect();
params
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-node/src/chain_config.rs | forc-plugins/forc-node/src/chain_config.rs | use crate::{
consts::{
CHAIN_CONFIG_REPO_NAME, CONFIG_FOLDER, IGNITION_CONFIG_FOLDER_NAME,
LOCAL_CONFIG_FOLDER_NAME, TESTNET_CONFIG_FOLDER_NAME,
},
util::ask_user_yes_no_question,
};
use anyhow::{bail, Result};
use forc_tracing::{println_action_green, println_warning};
use forc_util::user_forc_directory;
use serde::{Deserialize, Serialize};
use sha1::{Digest, Sha1};
use std::{
collections::{HashMap, HashSet},
fmt::Display,
fs,
path::PathBuf,
};
/// Different chain configuration options.
#[derive(PartialEq, Eq, PartialOrd, Ord, Debug)]
pub enum ChainConfig {
Local,
Testnet,
Ignition,
}
impl Display for ChainConfig {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
ChainConfig::Local => write!(f, "local"),
ChainConfig::Testnet => write!(f, "testnet"),
ChainConfig::Ignition => write!(f, "ignition"),
}
}
}
impl From<ChainConfig> for PathBuf {
fn from(value: ChainConfig) -> Self {
let folder_name = match value {
ChainConfig::Local => LOCAL_CONFIG_FOLDER_NAME,
ChainConfig::Testnet => TESTNET_CONFIG_FOLDER_NAME,
ChainConfig::Ignition => IGNITION_CONFIG_FOLDER_NAME,
};
user_forc_directory().join(CONFIG_FOLDER).join(folder_name)
}
}
/// A github api, content query response.
/// Mainly used for fetching a download url and hash for configuration files.
#[derive(Serialize, Deserialize, Debug)]
struct GithubContentDetails {
name: String,
sha: String,
download_url: Option<String>,
#[serde(rename = "type")]
content_type: String,
}
/// `ConfigFetcher` is responsible for github api integration related to the
/// configuration operations.
/// Basically checks remote hash of the corresponding chain configuration.
/// If there is a mismatch between local and remote instance, overrides the
/// local instance with remote changes for testnet and mainnet configurations.
///
/// For local chain configuration, we only check for existence of it locally.
/// If the local chain configuration is missing in user's local,
/// `ConfigFetcher` fetches it but remote updates are not tracked for it.
pub struct ConfigFetcher {
client: reqwest::Client,
base_url: String,
config_vault: PathBuf,
}
impl Default for ConfigFetcher {
/// Creates a new fetcher to interact with github.
/// By default user's chain configuration vault is at: `~/.forc/chainspecs`
fn default() -> Self {
Self {
client: reqwest::Client::new(),
base_url: "https://api.github.com".to_string(),
config_vault: user_forc_directory().join(CONFIG_FOLDER),
}
}
}
impl ConfigFetcher {
#[cfg(test)]
/// Override the base url, to be used in tests.
pub fn with_base_url(base_url: String) -> Self {
Self {
client: reqwest::Client::new(),
base_url,
config_vault: user_forc_directory().join(CONFIG_FOLDER),
}
}
#[cfg(test)]
pub fn with_test_config(base_url: String, config_vault: PathBuf) -> Self {
Self {
client: reqwest::Client::new(),
base_url,
config_vault,
}
}
fn get_base_url(&self) -> &str {
&self.base_url
}
fn build_api_endpoint(&self, folder_name: &str) -> String {
format!(
"{}/repos/FuelLabs/{}/contents/{}",
self.get_base_url(),
CHAIN_CONFIG_REPO_NAME,
folder_name,
)
}
/// Fetches contents from github to get hashes and download urls for
/// contents of the remote configuration repo at:
/// https://github.com/FuelLabs/chain-configuration/
async fn check_github_files(
&self,
conf: &ChainConfig,
) -> anyhow::Result<Vec<GithubContentDetails>> {
let folder_name = match conf {
ChainConfig::Local => LOCAL_CONFIG_FOLDER_NAME,
ChainConfig::Testnet => TESTNET_CONFIG_FOLDER_NAME,
ChainConfig::Ignition => IGNITION_CONFIG_FOLDER_NAME,
};
let api_endpoint = self.build_api_endpoint(folder_name);
let response = self
.client
.get(&api_endpoint)
.header("User-Agent", "forc-node")
.send()
.await?;
if !response.status().is_success() {
bail!("failed to fetch updates from github")
}
let contents: Vec<GithubContentDetails> = response.json().await?;
Ok(contents)
}
/// Calculates the hash for the local configuration instance.
/// The hash calculation is based on github's hash calculation to match the
/// github api response.
fn check_local_files(&self, conf: &ChainConfig) -> Result<Option<HashMap<String, String>>> {
let folder_name = match conf {
ChainConfig::Local => bail!("Local configuration should not be checked"),
ChainConfig::Testnet => TESTNET_CONFIG_FOLDER_NAME,
ChainConfig::Ignition => IGNITION_CONFIG_FOLDER_NAME,
};
let folder_path = self.config_vault.join(folder_name);
if !folder_path.exists() {
return Ok(None);
}
let mut files = HashMap::new();
for entry in std::fs::read_dir(&folder_path)? {
let entry = entry?;
if entry.path().is_file() {
let content = std::fs::read(entry.path())?;
// Calculate SHA1 the same way github does
let mut hasher = Sha1::new();
hasher.update(b"blob ");
hasher.update(content.len().to_string().as_bytes());
hasher.update([0]);
hasher.update(&content);
let sha = format!("{:x}", hasher.finalize());
let name = entry.file_name().into_string().unwrap();
files.insert(name, sha);
}
}
Ok(Some(files))
}
/// Checks if a fetch is required by comparing the hashes of individual files
/// of the given chain config in the local instance to the one in github by
/// utilizing the github content abi.
pub async fn check_fetch_required(&self, conf: &ChainConfig) -> anyhow::Result<bool> {
if *conf == ChainConfig::Local {
return Ok(false);
}
let local_files = match self.check_local_files(conf)? {
Some(files) => files,
None => return Ok(true), // No local files, need to fetch
};
let github_files = self.check_github_files(conf).await?;
// Compare files
for github_file in &github_files {
if github_file.content_type == "file" {
match local_files.get(&github_file.name) {
Some(local_sha) if local_sha == &github_file.sha => continue,
_ => return Ok(true), // SHA mismatch or file doesn't exist locally
}
}
}
// Also check if we have any extra files locally that aren't on GitHub
let github_filenames: HashSet<_> = github_files
.iter()
.filter(|f| f.content_type == "file")
.map(|f| &f.name)
.collect();
let local_filenames: HashSet<_> = local_files.keys().collect();
if local_filenames != github_filenames {
return Ok(true);
}
Ok(false)
}
/// Download the chain config for given mode. Fetches the corresponding
/// directory from: https://github.com/FuelLabs/chain-configuration/.
pub async fn download_config(&self, conf: &ChainConfig) -> anyhow::Result<()> {
let folder_name = match conf {
ChainConfig::Local => LOCAL_CONFIG_FOLDER_NAME,
ChainConfig::Testnet => TESTNET_CONFIG_FOLDER_NAME,
ChainConfig::Ignition => IGNITION_CONFIG_FOLDER_NAME,
};
let api_endpoint = format!(
"https://api.github.com/repos/FuelLabs/{CHAIN_CONFIG_REPO_NAME}/contents/{folder_name}",
);
let contents = self.fetch_folder_contents(&api_endpoint).await?;
// Create config directory if it doesn't exist
let config_dir = user_forc_directory().join(CONFIG_FOLDER);
let target_dir = config_dir.join(folder_name);
fs::create_dir_all(&target_dir)?;
// Download each file
for item in contents {
if item.content_type == "file" {
if let Some(download_url) = item.download_url {
let file_path = target_dir.join(&item.name);
let response = self.client.get(&download_url).send().await?;
if !response.status().is_success() {
bail!("Failed to download file: {}", item.name);
}
let content = response.bytes().await?;
fs::write(file_path, content)?;
}
}
}
Ok(())
}
/// Helper function to fetch folder contents from github.
async fn fetch_folder_contents(&self, url: &str) -> anyhow::Result<Vec<GithubContentDetails>> {
let response = self
.client
.get(url)
.header("User-Agent", "forc-node")
.send()
.await?;
if !response.status().is_success() {
bail!("failed to fetch contents from github");
}
Ok(response.json().await?)
}
}
/// Local configuration is validated based on its existence. Meaning that if
/// the configuration exists in user's local it is validated. If it is missing
/// the configuration files are fetched from remote.
async fn validate_local_chainconfig(fetcher: &ConfigFetcher) -> anyhow::Result<()> {
let user_conf_dir = user_forc_directory().join(CONFIG_FOLDER);
let local_conf_dir = user_conf_dir.join(LOCAL_CONFIG_FOLDER_NAME);
if !local_conf_dir.exists() {
println_warning(&format!(
"Local node configuration files are missing at {}",
local_conf_dir.display()
));
// Ask user if they want to update the chain config.
let update = ask_user_yes_no_question("Would you like to download network configuration?")?;
if update {
fetcher.download_config(&ChainConfig::Local).await?;
} else {
bail!(
"Missing local network configuration, create one at {}",
local_conf_dir.display()
);
}
}
Ok(())
}
/// Testnet and mainnet chain configurations are validated against the remote
/// versions from github. If local files exists for these configurations, hash
/// values are collected from remote, and compared to find out if there any
/// changes introduced in remote. If there is the chain configuration is
/// fetched again to ensure, the bootstrapped node can sync with the rest of
/// the network without any issues related to a different chain configuration.
async fn validate_remote_chainconfig(
fetcher: &ConfigFetcher,
conf: &ChainConfig,
) -> anyhow::Result<()> {
// For testnet and mainnet configs, we need to check online.
println_action_green("Checking", "for network configuration updates.");
if fetcher.check_fetch_required(conf).await? {
println_warning(&format!(
"A network configuration update detected for {conf}, this might create problems while syncing with rest of the network"
));
// Ask user if they want to update the chain config.
let update = ask_user_yes_no_question("Would you like to update network configuration?")?;
if update {
println_action_green("Updating", &format!("configuration files for {conf}",));
fetcher.download_config(conf).await?;
println_action_green(
"Finished",
&format!("updating configuration files for {conf}",),
);
}
} else {
println_action_green(&format!("{conf}"), "is up-to-date.");
}
Ok(())
}
/// Check local state of the configuration file in the vault (if they exists)
/// and compare them to the remote one in github. If a change is detected asks
/// user if they want to update, and does the update for them.
pub async fn check_and_update_chain_config(conf: ChainConfig) -> anyhow::Result<()> {
let fetcher = ConfigFetcher::default();
match conf {
ChainConfig::Local => validate_local_chainconfig(&fetcher).await?,
remote_config => validate_remote_chainconfig(&fetcher, &remote_config).await?,
}
Ok(())
}
#[cfg(test)]
mod tests {
use super::*;
use tempfile::TempDir;
use wiremock::{
matchers::{method, path},
Mock, MockServer, ResponseTemplate,
};
// Helper function to create dummy github response
fn create_github_response(files: &[(&str, &str)]) -> Vec<GithubContentDetails> {
files
.iter()
.map(|(name, content)| {
let mut hasher = Sha1::new();
hasher.update(b"blob ");
hasher.update(content.len().to_string().as_bytes());
hasher.update([0]);
hasher.update(content.as_bytes());
let sha = format!("{:x}", hasher.finalize());
GithubContentDetails {
name: name.to_string(),
sha,
download_url: Some(format!("https://raw.githubusercontent.com/test/{name}")),
content_type: "file".to_string(),
}
})
.collect()
}
#[tokio::test]
async fn test_fetch_not_required_when_files_match() {
let mock_server = MockServer::start().await;
let test_files = [
("config.json", "test config content"),
("metadata.json", "test metadata content"),
];
// Create test directory and files
let test_dir = TempDir::new().unwrap();
let config_path = test_dir.path().to_path_buf();
let test_folder = config_path.join(TESTNET_CONFIG_FOLDER_NAME);
fs::create_dir_all(&test_folder).unwrap();
for (name, content) in &test_files {
fs::write(test_folder.join(name), content).unwrap();
}
// Setup mock response
let github_response = create_github_response(&test_files);
Mock::given(method("GET"))
.and(path(format!(
"/repos/FuelLabs/{CHAIN_CONFIG_REPO_NAME}/contents/{TESTNET_CONFIG_FOLDER_NAME}"
)))
.respond_with(ResponseTemplate::new(200).set_body_json(&github_response))
.mount(&mock_server)
.await;
let fetcher = ConfigFetcher::with_test_config(mock_server.uri(), config_path);
let needs_fetch = fetcher
.check_fetch_required(&ChainConfig::Testnet)
.await
.unwrap();
assert!(
!needs_fetch,
"Fetch should not be required when files match"
);
}
#[tokio::test]
async fn test_fetch_required_when_files_differ() {
let mock_server = MockServer::start().await;
// Create local test files
let test_dir = TempDir::new().unwrap();
let config_path = test_dir.path().join("fuel").join("configs");
let test_folder = config_path.join(TESTNET_CONFIG_FOLDER_NAME);
fs::create_dir_all(&test_folder).unwrap();
let local_files = [
("config.json", "old config content"),
("metadata.json", "old metadata content"),
];
for (name, content) in &local_files {
fs::write(test_folder.join(name), content).unwrap();
}
// Setup mock GitHub response with different content
let github_files = [
("config.json", "new config content"),
("metadata.json", "new metadata content"),
];
let github_response = create_github_response(&github_files);
Mock::given(method("GET"))
.and(path(format!(
"/repos/FuelLabs/{CHAIN_CONFIG_REPO_NAME}/contents/{TESTNET_CONFIG_FOLDER_NAME}"
)))
.respond_with(ResponseTemplate::new(200).set_body_json(&github_response))
.mount(&mock_server)
.await;
let fetcher = ConfigFetcher::with_base_url(mock_server.uri());
let needs_fetch = fetcher
.check_fetch_required(&ChainConfig::Testnet)
.await
.unwrap();
assert!(needs_fetch, "Fetch should be required when files differ");
}
#[tokio::test]
async fn test_fetch_required_when_files_missing() {
let mock_server = MockServer::start().await;
// Create local test files (missing one file)
let test_dir = TempDir::new().unwrap();
let config_path = test_dir.path().join("fuel").join("configs");
let test_folder = config_path.join(TESTNET_CONFIG_FOLDER_NAME);
fs::create_dir_all(&test_folder).unwrap();
let local_files = [("config.json", "test config content")];
for (name, content) in &local_files {
fs::write(test_folder.join(name), content).unwrap();
}
// Setup mock GitHub response with extra file
let github_files = [
("config.json", "test config content"),
("metadata.json", "test metadata content"),
];
let github_response = create_github_response(&github_files);
Mock::given(method("GET"))
.and(path(format!(
"/repos/FuelLabs/{CHAIN_CONFIG_REPO_NAME}/contents/{TESTNET_CONFIG_FOLDER_NAME}"
)))
.respond_with(ResponseTemplate::new(200).set_body_json(&github_response))
.mount(&mock_server)
.await;
let fetcher = ConfigFetcher::with_base_url(mock_server.uri());
let needs_fetch = fetcher
.check_fetch_required(&ChainConfig::Testnet)
.await
.unwrap();
assert!(
needs_fetch,
"Fetch should be required when files are missing"
);
}
#[tokio::test]
async fn test_local_configuration_never_needs_fetch() {
let fetcher = ConfigFetcher::default();
let needs_fetch = fetcher
.check_fetch_required(&ChainConfig::Local)
.await
.unwrap();
assert!(!needs_fetch, "Local configuration should never need fetch");
}
#[tokio::test]
async fn test_fetch_required_when_extra_local_files() {
let mock_server = MockServer::start().await;
// Create local test files (with extra file)
let test_dir = TempDir::new().unwrap();
let config_path = test_dir.path().join("fuel").join("configs");
let test_folder = config_path.join(TESTNET_CONFIG_FOLDER_NAME);
fs::create_dir_all(&test_folder).unwrap();
let local_files = [
("config.json", "test config content"),
("metadata.json", "test metadata content"),
("extra.json", "extra file content"),
];
for (name, content) in &local_files {
fs::write(test_folder.join(name), content).unwrap();
}
// Setup mock GitHub response with fewer files
let github_files = [
("config.json", "test config content"),
("metadata.json", "test metadata content"),
];
let github_response = create_github_response(&github_files);
Mock::given(method("GET"))
.and(path(format!(
"/repos/FuelLabs/{CHAIN_CONFIG_REPO_NAME}/contents/{TESTNET_CONFIG_FOLDER_NAME}"
)))
.respond_with(ResponseTemplate::new(200).set_body_json(&github_response))
.mount(&mock_server)
.await;
let fetcher = ConfigFetcher::with_base_url(mock_server.uri());
let needs_fetch = fetcher
.check_fetch_required(&ChainConfig::Testnet)
.await
.unwrap();
assert!(
needs_fetch,
"Fetch should be required when there are extra local files"
);
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-node/src/lib.rs | forc-plugins/forc-node/src/lib.rs | pub mod chain_config;
pub mod cmd;
pub mod consts;
pub mod ignition;
pub mod local;
pub mod run_opts;
pub mod testnet;
pub mod util;
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-node/src/util.rs | forc-plugins/forc-node/src/util.rs | use crate::consts::{
DB_FOLDER, IGNITION_CONFIG_FOLDER_NAME, LOCAL_CONFIG_FOLDER_NAME, TESTNET_CONFIG_FOLDER_NAME,
};
use anyhow::{anyhow, Result};
use dialoguer::{theme::ColorfulTheme, Confirm, Input, Password};
use forc_util::user_forc_directory;
use fuel_crypto::{
rand::{prelude::StdRng, SeedableRng},
SecretKey,
};
use libp2p_identity::{secp256k1, Keypair, PeerId};
use semver::Version;
use serde::{Deserialize, Serialize};
use std::{
fmt::Display,
path::PathBuf,
process::{Command, Stdio},
};
use std::{
io::{Read, Write},
ops::Deref,
};
pub enum DbConfig {
Local,
Testnet,
Ignition,
}
impl From<DbConfig> for PathBuf {
fn from(value: DbConfig) -> Self {
let user_db_dir = user_forc_directory().join(DB_FOLDER);
match value {
DbConfig::Local => user_db_dir.join(LOCAL_CONFIG_FOLDER_NAME),
DbConfig::Testnet => user_db_dir.join(TESTNET_CONFIG_FOLDER_NAME),
DbConfig::Ignition => user_db_dir.join(IGNITION_CONFIG_FOLDER_NAME),
}
}
}
/// Given a `Command`, wrap it to enable generating the actual string that would
/// create this command.
/// Example:
/// ```rust
/// use std::process::Command;
/// use forc_node::util::HumanReadableCommand;
///
/// let mut command = Command::new("fuel-core");
/// command.arg("run");
/// let command = HumanReadableCommand::from(&command);
/// let formatted = format!("{command}");
/// assert_eq!(&formatted, "fuel-core run");
/// ```
pub struct HumanReadableCommand<'a>(&'a Command);
impl Display for HumanReadableCommand<'_> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let dbg_out = format!("{:?}", self.0);
// This is in the ""command-name" "param-name" "param-val"" format.
let parsed = dbg_out
.replace("\" \"", " ") // replace " " between items with space
.replace("\"", ""); // remove remaining quotes at start/end
write!(f, "{parsed}")
}
}
impl<'a> From<&'a Command> for HumanReadableCommand<'a> {
fn from(value: &'a Command) -> Self {
Self(value)
}
}
/// Display a fuel_core::service::Config in a human-readable format
pub struct HumanReadableConfig<'a>(pub &'a fuel_core::service::Config);
impl Display for HumanReadableConfig<'_> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
writeln!(f, "Fuel Core Configuration:")?;
writeln!(f, " GraphQL Address: {}", self.0.graphql_config.addr)?;
writeln!(f, " Continue on Error: {}", self.0.continue_on_error)?;
writeln!(f, " Debug Mode: {}", self.0.debug)?;
writeln!(f, " UTXO Validation: {}", self.0.utxo_validation)?;
writeln!(f, " Snapshot Reader: {:?}", self.0.snapshot_reader)?;
writeln!(
f,
" Database Type: {:?}",
self.0.combined_db_config.database_type
)?;
writeln!(
f,
" Database Path: {}",
self.0.combined_db_config.database_path.display()
)?;
Ok(())
}
}
impl<'a> From<&'a fuel_core::service::Config> for HumanReadableConfig<'a> {
fn from(value: &'a fuel_core::service::Config) -> Self {
Self(value)
}
}
#[derive(Serialize, Deserialize, Debug)]
pub struct KeyPair {
pub peer_id: String,
pub secret: String,
}
impl KeyPair {
pub fn random() -> Self {
let mut rng = StdRng::from_entropy();
let secret = SecretKey::random(&mut rng);
let mut bytes = *secret.deref();
let p2p_secret = secp256k1::SecretKey::try_from_bytes(&mut bytes)
.expect("Should be a valid private key");
let p2p_keypair = secp256k1::Keypair::from(p2p_secret);
let libp2p_keypair = Keypair::from(p2p_keypair);
let peer_id = PeerId::from_public_key(&libp2p_keypair.public());
Self {
peer_id: format!("{peer_id}"),
secret: format!("{secret}"),
}
}
}
pub(crate) fn ask_user_yes_no_question(question: &str) -> anyhow::Result<bool> {
let answer = Confirm::with_theme(&ColorfulTheme::default())
.with_prompt(question)
.default(false)
.show_default(false)
.interact()?;
Ok(answer)
}
pub(crate) fn ask_user_discreetly(question: &str) -> anyhow::Result<String> {
let discrete = Password::with_theme(&ColorfulTheme::default())
.with_prompt(question)
.interact()?;
Ok(discrete)
}
pub(crate) fn ask_user_string(question: &str) -> anyhow::Result<String> {
let response = Input::with_theme(&ColorfulTheme::default())
.with_prompt(question)
.interact_text()?;
Ok(response)
}
/// Print a string to an alternate screen, so the string isn't printed to the terminal.
pub(crate) fn display_string_discreetly(
discreet_string: &str,
continue_message: &str,
) -> Result<()> {
use termion::screen::IntoAlternateScreen;
let mut screen = std::io::stdout().into_alternate_screen()?;
writeln!(screen, "{discreet_string}")?;
screen.flush()?;
println!("{continue_message}");
wait_for_keypress();
Ok(())
}
pub(crate) fn wait_for_keypress() {
let mut single_key = [0u8];
std::io::stdin().read_exact(&mut single_key).unwrap();
}
/// Ask if the user has a keypair generated and if so, collect the details.
/// If not, bails out with a help message about how to generate a keypair.
pub(crate) fn ask_user_keypair() -> Result<KeyPair> {
let has_keypair = ask_user_yes_no_question("Do you have a keypair in hand?")?;
if has_keypair {
// ask the keypair
let peer_id = ask_user_string("Peer Id:")?;
let secret = ask_user_discreetly("Secret:")?;
Ok(KeyPair { peer_id, secret })
} else {
println!("Generating new keypair...");
let pair = KeyPair::random();
display_string_discreetly(
&format!(
"Generated keypair:\n PeerID: {}, secret: {}",
pair.peer_id, pair.secret
),
"### Do not share or lose this private key! Press any key to complete. ###",
)?;
Ok(pair)
}
}
/// Checks the local fuel-core's version that `forc-node` will be running.
pub fn get_fuel_core_version() -> anyhow::Result<Version> {
let version_cmd = Command::new("fuel-core")
.arg("--version")
.stdout(Stdio::piped())
.output()
.expect("failed to run fuel-core, make sure that it is installed.");
let version_output = String::from_utf8_lossy(&version_cmd.stdout).to_string();
// Version output is `fuel-core <SEMVER VERSION>`. We should split it to only
// get the version part of it before parsing as semver.
let version = version_output
.split_whitespace()
.last()
.ok_or_else(|| anyhow!("fuel-core version parse failed"))?;
let version_semver = Version::parse(version)?;
Ok(version_semver)
}
#[cfg(unix)]
pub fn check_open_fds_limit(max_files: u64) -> Result<(), Box<dyn std::error::Error>> {
use std::mem;
unsafe {
let mut fd_limit = mem::zeroed();
let mut err = libc::getrlimit(libc::RLIMIT_NOFILE, &mut fd_limit);
if err != 0 {
return Err("check_open_fds_limit failed".into());
}
if fd_limit.rlim_cur >= max_files {
return Ok(());
}
let prev_limit = fd_limit.rlim_cur;
fd_limit.rlim_cur = max_files;
if fd_limit.rlim_max < max_files {
// If the process is not started by privileged user, this will fail.
fd_limit.rlim_max = max_files;
}
err = libc::setrlimit(libc::RLIMIT_NOFILE, &fd_limit);
if err == 0 {
return Ok(());
}
Err(format!(
"the maximum number of open file descriptors is too \
small, got {prev_limit}, expect greater or equal to {max_files}"
)
.into())
}
}
#[cfg(not(unix))]
pub fn check_open_fds_limit(_max_files: u64) -> Result<(), Box<dyn std::error::Error>> {
Ok(())
}
#[cfg(test)]
mod tests {
use super::*;
use fuel_core::service::Config;
#[test]
fn test_human_readable_config() {
let config = Config::local_node();
let human_readable = HumanReadableConfig(&config);
let formatted = format!("{human_readable}");
let expected = format!(
r#"Fuel Core Configuration:
GraphQL Address: {}
Continue on Error: {}
Debug Mode: {}
UTXO Validation: {}
Snapshot Reader: {:?}
Database Type: {:?}
Database Path: {}
"#,
config.graphql_config.addr,
config.continue_on_error,
config.debug,
config.utxo_validation,
config.snapshot_reader,
config.combined_db_config.database_type,
config.combined_db_config.database_path.display()
);
assert_eq!(formatted, expected);
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-node/src/main.rs | forc-plugins/forc-node/src/main.rs | //! A forc plugin to start a fuel core instance, preconfigured for generic
//! usecases.
use clap::Parser;
use forc_node::{
cmd::{ForcNodeCmd, Mode},
consts::{MINIMUM_OPEN_FILE_DESCRIPTOR_LIMIT, MIN_FUEL_CORE_VERSION},
ignition, local, testnet,
util::{check_open_fds_limit, get_fuel_core_version},
};
use forc_tracing::init_tracing_subscriber;
use forc_util::forc_result_bail;
use semver::Version;
use std::{env, process::Child, str::FromStr};
use tracing_subscriber::{filter::EnvFilter, layer::SubscriberExt, registry, Layer};
/// Initialize logging with the same setup as fuel-core CLI
fn init_logging() {
const LOG_FILTER: &str = "RUST_LOG";
const HUMAN_LOGGING: &str = "HUMAN_LOGGING";
let filter = match env::var_os(LOG_FILTER) {
Some(_) => EnvFilter::try_from_default_env().expect("Invalid `RUST_LOG` provided"),
None => EnvFilter::new("info"),
};
let human_logging = env::var_os(HUMAN_LOGGING)
.map(|s| {
bool::from_str(s.to_str().unwrap())
.expect("Expected `true` or `false` to be provided for `HUMAN_LOGGING`")
})
.unwrap_or(true);
let layer = tracing_subscriber::fmt::Layer::default().with_writer(std::io::stderr);
let fmt = if human_logging {
// use pretty logs
layer
.with_ansi(true)
.with_level(true)
.with_line_number(true)
.boxed()
} else {
// use machine parseable structured logs
layer
// disable terminal colors
.with_ansi(false)
.with_level(true)
.with_line_number(true)
// use json
.json()
.boxed()
};
let subscriber = registry::Registry::default() // provide underlying span data store
.with(filter) // filter out low-level debug tracing (eg tokio executor)
.with(fmt); // log to stdout
tracing::subscriber::set_global_default(subscriber).expect("setting global default failed");
}
/// Initialize common setup for testnet and ignition modes
fn init_cli_setup() -> anyhow::Result<()> {
init_tracing_subscriber(Default::default());
let current_version = get_fuel_core_version()?;
let supported_min_version = Version::parse(MIN_FUEL_CORE_VERSION)?;
if current_version < supported_min_version {
forc_result_bail!(format!(
"Minimum supported fuel core version is {MIN_FUEL_CORE_VERSION}, system version: {}",
current_version
));
}
check_open_fds_limit(MINIMUM_OPEN_FILE_DESCRIPTOR_LIMIT)
.map_err(|e| anyhow::anyhow!("Failed to check open file descriptor limit: {}", e))?;
Ok(())
}
#[tokio::main]
async fn main() -> anyhow::Result<()> {
let command = ForcNodeCmd::parse();
let mut handle: Option<Child> = match command.mode {
Mode::Local(local) => {
// Local uses embedded fuel-core
init_logging();
let service = local::run(local, command.dry_run).await?;
if service.is_some() {
// For local, we keep the service alive by waiting for ctrl-c
tokio::signal::ctrl_c()
.await
.map_err(|e| anyhow::anyhow!("Failed to listen for ctrl-c: {e}"))?;
}
return Ok(());
}
Mode::Testnet(testnet) => {
init_cli_setup()?;
testnet::op::run(testnet, command.dry_run).await?
}
Mode::Ignition(ignition) => {
init_cli_setup()?;
ignition::op::run(ignition, command.dry_run).await?
}
};
// If not dry run, wait for the kill signal and kill fuel-core process
if let Some(handle) = &mut handle {
// Wait for the kill signal, if that comes we should kill child fuel-core
// process.
tokio::signal::ctrl_c()
.await
.map_err(|e| anyhow::anyhow!("Failed to listen for ctrl-c: {e}"))?;
handle.kill()?;
}
Ok(())
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-node/src/cmd.rs | forc-plugins/forc-node/src/cmd.rs | use std::net::IpAddr;
use crate::{
consts::{DEFAULT_PEERING_PORT, DEFAULT_PORT},
ignition::cmd::IgnitionCmd,
local::cmd::LocalCmd,
testnet::cmd::TestnetCmd,
};
use clap::{Parser, Subcommand};
#[derive(Debug, Parser)]
#[clap(name = "forc node", version)]
/// Forc node is a wrapper around fuel-core with sensible defaults to provide
/// easy way of bootstrapping a node for local development, testnet or mainnet.
pub struct ForcNodeCmd {
/// Print the fuel-core command without running it.
#[arg(long)]
pub dry_run: bool,
#[command(subcommand)]
pub mode: Mode,
}
#[derive(Subcommand, Debug)]
pub enum Mode {
/// Starts a local node for development purposes.
Local(LocalCmd),
/// Starts a node that will connect to latest testnet.
Testnet(TestnetCmd),
/// Starts a node that will connect to ignition network.
Ignition(IgnitionCmd),
}
/// Set of shared node settings, specifically related to connections.
#[derive(Parser, Debug, Clone)]
pub struct ConnectionSettings {
#[clap(long)]
pub peer_id: Option<String>,
#[clap(long)]
pub secret: Option<String>,
#[clap(long)]
pub relayer: Option<String>,
#[clap(long, default_value = "0.0.0.0")]
pub ip: IpAddr,
#[clap(long, default_value_t = DEFAULT_PORT, value_parser = clap::value_parser!(u16).range(1..=65535))]
pub port: u16,
#[clap(long, default_value_t = DEFAULT_PEERING_PORT, value_parser = clap::value_parser!(u16).range(1..=65535))]
pub peering_port: u16,
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-node/src/testnet/op.rs | forc-plugins/forc-node/src/testnet/op.rs | use crate::{
chain_config::{check_and_update_chain_config, ChainConfig},
consts::{
TESTNET_RELAYER_DA_DEPLOY_HEIGHT, TESTNET_RELAYER_LISTENING_CONTRACT,
TESTNET_RELAYER_LOG_PAGE_SIZE, TESTNET_SERVICE_NAME, TESTNET_SYNC_BLOCK_STREAM_BUFFER_SIZE,
TESTNET_SYNC_HEADER_BATCH_SIZE,
},
run_opts::{DbType, RunOpts},
testnet::cmd::TestnetCmd,
util::{ask_user_keypair, ask_user_string, HumanReadableCommand, KeyPair},
};
use anyhow::Context;
use forc_tracing::println_green;
use std::{
net::IpAddr,
path::PathBuf,
process::{Child, Command},
};
/// Configures the node with testnet configuration to connect the node to latest testnet.
/// Returns `None` if this is a dry_run and no child process created for fuel-core.
pub async fn run(cmd: TestnetCmd, dry_run: bool) -> anyhow::Result<Option<Child>> {
check_and_update_chain_config(ChainConfig::Testnet).await?;
let keypair = if let (Some(peer_id), Some(secret)) = (
&cmd.connection_settings.peer_id,
&cmd.connection_settings.secret,
) {
KeyPair {
peer_id: peer_id.clone(),
secret: secret.clone(),
}
} else {
ask_user_keypair()?
};
let relayer = cmd.connection_settings.relayer.unwrap_or_else(|| {
ask_user_string("Ethereum RPC (Sepolia) Endpoint:").expect("Failed to get RPC endpoint")
});
let opts = TestnetOpts {
keypair,
relayer,
ip: cmd.connection_settings.ip,
port: cmd.connection_settings.port,
peering_port: cmd.connection_settings.peering_port,
db_path: cmd.db_path,
bootstrap_node: cmd.bootstrap_node,
};
let run_opts = RunOpts::from(opts);
let params = run_opts.generate_params();
let mut fuel_core_command = Command::new("fuel-core");
fuel_core_command.arg("run");
fuel_core_command.args(params.as_slice());
println_green(&format!(
"{}",
HumanReadableCommand::from(&fuel_core_command)
));
if dry_run {
return Ok(None);
}
// Spawn the process with proper error handling
let handle = fuel_core_command
.spawn()
.with_context(|| "Failed to spawn fuel-core process:".to_string())?;
Ok(Some(handle))
}
#[derive(Debug)]
pub struct TestnetOpts {
keypair: KeyPair,
relayer: String,
ip: IpAddr,
port: u16,
peering_port: u16,
db_path: PathBuf,
bootstrap_node: String,
}
impl From<TestnetOpts> for RunOpts {
fn from(value: TestnetOpts) -> Self {
Self {
service_name: Some(TESTNET_SERVICE_NAME.to_string()),
db_type: DbType::RocksDb,
debug: false,
snapshot: ChainConfig::Testnet.into(),
keypair: Some(value.keypair.secret),
relayer: Some(value.relayer),
ip: Some(value.ip),
port: Some(value.port),
peering_port: Some(value.peering_port),
db_path: Some(value.db_path),
bootstrap_nodes: Some(value.bootstrap_node),
utxo_validation: true,
poa_instant: false,
enable_p2p: true,
sync_header_batch_size: Some(TESTNET_SYNC_HEADER_BATCH_SIZE),
enable_relayer: true,
relayer_listener: Some(TESTNET_RELAYER_LISTENING_CONTRACT.to_string()),
relayer_da_deploy_height: Some(TESTNET_RELAYER_DA_DEPLOY_HEIGHT),
relayer_log_page_size: Some(TESTNET_RELAYER_LOG_PAGE_SIZE),
sync_block_stream_buffer_size: Some(TESTNET_SYNC_BLOCK_STREAM_BUFFER_SIZE),
}
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-node/src/testnet/mod.rs | forc-plugins/forc-node/src/testnet/mod.rs | pub mod cmd;
pub mod op;
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-node/src/testnet/cmd.rs | forc-plugins/forc-node/src/testnet/cmd.rs | use crate::{cmd::ConnectionSettings, consts::TESTNET_BOOTSTRAP_NODE, util::DbConfig};
use clap::Parser;
use std::path::PathBuf;
#[derive(Parser, Debug, Clone)]
pub struct TestnetCmd {
#[clap(flatten)]
pub connection_settings: ConnectionSettings,
#[clap(long, default_value = default_testnet_db_path().into_os_string())]
pub db_path: PathBuf,
#[clap(long, default_value_t = TESTNET_BOOTSTRAP_NODE.to_string())]
pub bootstrap_node: String,
}
fn default_testnet_db_path() -> PathBuf {
DbConfig::Testnet.into()
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-node/src/local/mod.rs | forc-plugins/forc-node/src/local/mod.rs | pub mod cmd;
use crate::{
chain_config::{check_and_update_chain_config, ChainConfig},
util::HumanReadableConfig,
};
use forc_tracing::println_green;
use fuel_core::service::FuelService;
/// Local is a local node suited for local development.
/// By default, the node is in `debug` mode and the db used is `in-memory`.
/// Returns `None` if this is a dry_run and no service created for fuel-core.
pub async fn run(cmd: cmd::LocalCmd, dry_run: bool) -> anyhow::Result<Option<FuelService>> {
check_and_update_chain_config(ChainConfig::Local).await?;
let config = fuel_core::service::Config::from(cmd);
if dry_run {
// For dry run, display the configuration that would be used
println_green(&format!("{}", HumanReadableConfig::from(&config)));
return Ok(None);
}
println_green("Starting fuel-core service...");
let service = FuelService::new_node(config)
.await
.map_err(|e| anyhow::anyhow!("Failed to start fuel-core service: {}", e))?;
println_green(&format!("Service started on: {}", service.bound_address));
Ok(Some(service))
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-node/src/local/cmd.rs | forc-plugins/forc-node/src/local/cmd.rs | use crate::consts::DEFAULT_PORT;
use anyhow;
use clap::Parser;
use fuel_core::{
chain_config::default_consensus_dev_key,
service::{
config::{DbType, Trigger},
Config,
},
};
use fuel_core_chain_config::{
coin_config_helpers::CoinConfigGenerator, ChainConfig, CoinConfig, Owner, SnapshotMetadata,
TESTNET_INITIAL_BALANCE,
};
use fuel_core_types::{
fuel_crypto::fuel_types::{Address, AssetId},
secrecy::Secret,
signer::SignMode,
};
use std::{path::PathBuf, str::FromStr};
#[derive(Parser, Debug, Clone)]
pub struct LocalCmd {
#[clap(long)]
pub chain_config: Option<PathBuf>,
#[clap(long)]
pub port: Option<u16>,
/// If a db path is provided local node runs in persistent mode.
#[clap(long)]
pub db_path: Option<PathBuf>,
#[clap(long = "db-type", value_enum)]
pub db_type: Option<DbType>,
/// Fund accounts with the format: <account-id>:<asset-id>:<amount>
/// Multiple accounts can be provided via comma separation or multiple --account flags
#[clap(long)]
pub account: Vec<String>,
#[arg(long = "debug", env)]
pub debug: bool,
/// Allows execution of transactions based on past block, such as:
/// - Dry run in the past
#[arg(long = "historical-execution", env)]
pub historical_execution: bool,
#[arg(long = "poa-instant", env)]
pub poa_instant: bool,
}
fn get_coins_per_account(
account_strings: Vec<String>,
base_asset_id: &AssetId,
current_coin_idx: usize,
) -> anyhow::Result<Vec<CoinConfig>> {
let mut coin_generator = CoinConfigGenerator::new();
let mut coins = Vec::new();
for account_string in account_strings {
let parts: Vec<&str> = account_string.trim().split(':').collect();
let (owner, asset_id, amount) = match parts.as_slice() {
[owner_str] => {
// Only account-id provided, use default asset and amount
let owner = Address::from_str(owner_str)
.map_err(|e| anyhow::anyhow!("Invalid account ID: {}", e))?;
(owner, *base_asset_id, TESTNET_INITIAL_BALANCE)
}
[owner_str, asset_str] => {
// account-id:asset-id provided, use default amount
let owner = Address::from_str(owner_str)
.map_err(|e| anyhow::anyhow!("Invalid account ID: {}", e))?;
let asset_id = AssetId::from_str(asset_str)
.map_err(|e| anyhow::anyhow!("Invalid asset ID: {}", e))?;
(owner, asset_id, TESTNET_INITIAL_BALANCE)
}
[owner_str, asset_str, amount_str] => {
// Full format: account-id:asset-id:amount
let owner = Address::from_str(owner_str)
.map_err(|e| anyhow::anyhow!("Invalid account ID: {}", e))?;
let asset_id = AssetId::from_str(asset_str)
.map_err(|e| anyhow::anyhow!("Invalid asset ID: {}", e))?;
let amount = amount_str
.parse::<u64>()
.map_err(|e| anyhow::anyhow!("Invalid amount: {}", e))?;
(owner, asset_id, amount)
}
_ => {
return Err(anyhow::anyhow!(
"Invalid account format: {}. Expected format: <account-id>[:asset-id[:amount]]",
account_string
));
}
};
let coin = CoinConfig {
amount,
owner: owner.into(),
asset_id,
output_index: (current_coin_idx + coins.len()) as u16,
..coin_generator.generate()
};
coins.push(coin);
}
Ok(coins)
}
impl From<LocalCmd> for Config {
fn from(cmd: LocalCmd) -> Self {
let LocalCmd {
chain_config,
port,
db_path,
db_type,
account,
debug,
historical_execution,
poa_instant,
..
} = cmd;
let chain_config = match chain_config {
Some(path) => match SnapshotMetadata::read(&path) {
Ok(metadata) => ChainConfig::from_snapshot_metadata(&metadata).unwrap(),
Err(e) => {
tracing::error!("Failed to open snapshot reader: {}", e);
tracing::warn!("Using local testnet snapshot reader");
ChainConfig::local_testnet()
}
},
None => ChainConfig::local_testnet(),
};
let base_asset_id = chain_config.consensus_parameters.base_asset_id();
// Parse and validate account funding if provided
let mut state_config = fuel_core_chain_config::StateConfig::local_testnet();
state_config
.coins
.iter_mut()
.for_each(|coin| coin.asset_id = *base_asset_id);
let current_coin_idx = state_config.coins.len();
if !account.is_empty() {
let coins = get_coins_per_account(account, base_asset_id, current_coin_idx)
.map_err(|e| anyhow::anyhow!("Error parsing account funding: {}", e))
.unwrap();
if !coins.is_empty() {
tracing::info!("Additional accounts");
for coin in &coins {
let owner_hex = match &coin.owner {
Owner::Address(address) => format!("{address:#x}"),
Owner::SecretKey(secret_key) => format!("{secret_key:#x}"),
};
tracing::info!(
"Address({}), Asset ID({:#x}), Balance({})",
owner_hex,
coin.asset_id,
coin.amount
);
}
state_config.coins.extend(coins);
}
}
let mut config = Config::local_node_with_configs(chain_config, state_config);
config.name = "fuel-core".to_string();
// Local-specific settings
config.debug = debug;
let key = default_consensus_dev_key();
config.consensus_signer = SignMode::Key(Secret::new(key.into()));
// Database configuration
match db_type {
Some(DbType::RocksDb) => {
config.combined_db_config.database_type = DbType::RocksDb;
if let Some(db_path) = db_path {
config.combined_db_config.database_path = db_path;
}
config.historical_execution = historical_execution;
}
_ => {
config.combined_db_config.database_type = DbType::InMemory;
config.historical_execution = false;
}
}
config.block_production = match poa_instant {
true => Trigger::Instant,
false => Trigger::Never,
};
// Network configuration
let ip = "127.0.0.1".parse().unwrap();
let port = port.unwrap_or(DEFAULT_PORT);
config.graphql_config.addr = std::net::SocketAddr::new(ip, port);
config.utxo_validation = false; // local development
config
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_get_coins_per_account_single_account_with_defaults() {
let base_asset_id = AssetId::default();
let account_id = "0x0000000000000000000000000000000000000000000000000000000000000001";
let accounts = vec![account_id.to_string()];
let result = get_coins_per_account(accounts, &base_asset_id, 0);
assert!(result.is_ok());
let coins = result.unwrap();
assert_eq!(coins.len(), 1);
let coin = &coins[0];
let expected_owner = Owner::Address(Address::from_str(account_id).unwrap());
assert_eq!(coin.owner, expected_owner);
assert_eq!(coin.asset_id, base_asset_id);
assert_eq!(coin.amount, TESTNET_INITIAL_BALANCE);
assert_eq!(coin.output_index, 0);
}
#[test]
fn test_get_coins_per_account_with_custom_asset() {
let base_asset_id = AssetId::default();
let account_id = "0x0000000000000000000000000000000000000000000000000000000000000001";
let asset_id = "0x0000000000000000000000000000000000000000000000000000000000000002";
let accounts = vec![format!("{}:{}", account_id, asset_id)];
let result = get_coins_per_account(accounts, &base_asset_id, 0);
assert!(result.is_ok());
let coins = result.unwrap();
assert_eq!(coins.len(), 1);
let coin = &coins[0];
let expected_owner = Owner::Address(Address::from_str(account_id).unwrap());
assert_eq!(coin.owner, expected_owner);
assert_eq!(coin.asset_id, AssetId::from_str(asset_id).unwrap());
assert_eq!(coin.amount, TESTNET_INITIAL_BALANCE);
assert_eq!(coin.output_index, 0);
}
#[test]
fn test_get_coins_per_account_with_custom_amount() {
let base_asset_id = AssetId::default();
let account_id = "0x0000000000000000000000000000000000000000000000000000000000000001";
let asset_id = "0x0000000000000000000000000000000000000000000000000000000000000002";
let amount = 5000000u64;
let accounts = vec![format!("{}:{}:{}", account_id, asset_id, amount)];
let result = get_coins_per_account(accounts, &base_asset_id, 0);
assert!(result.is_ok());
let coins = result.unwrap();
assert_eq!(coins.len(), 1);
let coin = &coins[0];
let expected_owner = Owner::Address(Address::from_str(account_id).unwrap());
assert_eq!(coin.owner, expected_owner);
assert_eq!(coin.asset_id, AssetId::from_str(asset_id).unwrap());
assert_eq!(coin.amount, amount);
assert_eq!(coin.output_index, 0);
}
#[test]
fn test_get_coins_per_account_multiple_accounts() {
let base_asset_id = AssetId::default();
let account1 = "0x0000000000000000000000000000000000000000000000000000000000000001";
let account2 = "0x0000000000000000000000000000000000000000000000000000000000000002";
let accounts = vec![account1.to_string(), account2.to_string()];
let result = get_coins_per_account(accounts, &base_asset_id, 5);
assert!(result.is_ok());
let coins = result.unwrap();
assert_eq!(coins.len(), 2);
let coin1 = &coins[0];
let expected_owner1 = Owner::Address(Address::from_str(account1).unwrap());
assert_eq!(coin1.owner, expected_owner1);
assert_eq!(coin1.output_index, 5);
let coin2 = &coins[1];
let expected_owner2 = Owner::Address(Address::from_str(account2).unwrap());
assert_eq!(coin2.owner, expected_owner2);
assert_eq!(coin2.output_index, 6);
}
#[test]
fn test_get_coins_per_account_edge_cases_and_errors() {
let base_asset_id = AssetId::default();
let valid_account = "0x0000000000000000000000000000000000000000000000000000000000000001";
let valid_asset = "0x0000000000000000000000000000000000000000000000000000000000000002";
// Test empty input
let result = get_coins_per_account(vec![], &base_asset_id, 0);
assert!(result.is_ok());
let coins = result.unwrap();
assert_eq!(coins.len(), 0);
// Test invalid account ID
let result =
get_coins_per_account(vec!["invalid_account_id".to_string()], &base_asset_id, 0);
assert!(result.is_err());
assert_eq!(
result.unwrap_err().to_string(),
"Invalid account ID: Invalid encoded byte in Address"
);
// Test invalid asset ID
let result = get_coins_per_account(
vec![format!("{}:invalid_asset", valid_account)],
&base_asset_id,
0,
);
assert!(result.is_err());
assert_eq!(
result.unwrap_err().to_string(),
"Invalid asset ID: Invalid encoded byte in AssetId"
);
// Test invalid amount
let result = get_coins_per_account(
vec![format!("{}:{}:not_a_number", valid_account, valid_asset)],
&base_asset_id,
0,
);
assert!(result.is_err());
assert_eq!(
result.unwrap_err().to_string(),
"Invalid amount: invalid digit found in string"
);
// Test too many parts
let result = get_coins_per_account(
vec!["part1:part2:part3:part4".to_string()],
&base_asset_id,
0,
);
assert!(result.is_err());
assert_eq!(
result.unwrap_err().to_string(),
"Invalid account format: part1:part2:part3:part4. Expected format: <account-id>[:asset-id[:amount]]"
);
// Test empty account (should fail now)
let result = get_coins_per_account(vec!["".to_string()], &base_asset_id, 0);
assert!(result.is_err());
assert_eq!(
result.unwrap_err().to_string(),
"Invalid account ID: Invalid encoded byte in Address"
);
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-node/src/ignition/op.rs | forc-plugins/forc-node/src/ignition/op.rs | use super::cmd::IgnitionCmd;
use crate::{
chain_config::{check_and_update_chain_config, ChainConfig},
consts::{
MAINNET_BOOTSTRAP_NODE, MAINNET_RELAYER_DA_DEPLOY_HEIGHT,
MAINNET_RELAYER_LISTENING_CONTRACT, MAINNET_RELAYER_LOG_PAGE_SIZE, MAINNET_SERVICE_NAME,
MAINNET_SYNC_HEADER_BATCH_SIZE,
},
run_opts::{DbType, RunOpts},
util::{ask_user_keypair, ask_user_string, HumanReadableCommand, KeyPair},
};
use anyhow::Context;
use forc_tracing::println_green;
use std::{
net::IpAddr,
path::PathBuf,
process::{Child, Command},
};
/// Configures the node with ignition configuration to connect the node to latest mainnet.
/// Returns `None` if this is a dry_run and no child process created for fuel-core.
pub async fn run(cmd: IgnitionCmd, dry_run: bool) -> anyhow::Result<Option<Child>> {
check_and_update_chain_config(ChainConfig::Ignition).await?;
let keypair = if let (Some(peer_id), Some(secret)) = (
&cmd.connection_settings.peer_id,
&cmd.connection_settings.secret,
) {
KeyPair {
peer_id: peer_id.to_string(),
secret: secret.to_string(),
}
} else {
ask_user_keypair()?
};
let relayer = cmd
.connection_settings
.relayer
.map_or_else(|| ask_user_string("Ethereum RPC (mainnet) Endpoint:"), Ok)?;
let opts = IgnitionOpts {
keypair,
relayer,
ip: cmd.connection_settings.ip,
port: cmd.connection_settings.port,
peering_port: cmd.connection_settings.peering_port,
db_path: cmd.db_path,
};
let run_opts = RunOpts::from(opts);
let params = run_opts.generate_params();
let mut fuel_core_command = Command::new("fuel-core");
fuel_core_command.arg("run");
fuel_core_command.args(params.as_slice());
println_green(&format!(
"{}",
HumanReadableCommand::from(&fuel_core_command)
));
if dry_run {
return Ok(None);
}
// Spawn the process with proper error handling
let handle = fuel_core_command
.spawn()
.with_context(|| "Failed to spawn fuel-core process:".to_string())?;
Ok(Some(handle))
}
#[derive(Debug)]
pub struct IgnitionOpts {
keypair: KeyPair,
relayer: String,
ip: IpAddr,
port: u16,
peering_port: u16,
db_path: PathBuf,
}
impl From<IgnitionOpts> for RunOpts {
fn from(value: IgnitionOpts) -> Self {
Self {
service_name: Some(MAINNET_SERVICE_NAME.to_string()),
db_type: DbType::RocksDb,
debug: false,
snapshot: ChainConfig::Ignition.into(),
keypair: Some(value.keypair.secret),
relayer: Some(value.relayer),
ip: Some(value.ip),
port: Some(value.port),
peering_port: Some(value.peering_port),
db_path: Some(value.db_path),
utxo_validation: true,
poa_instant: false,
enable_p2p: true,
sync_header_batch_size: Some(MAINNET_SYNC_HEADER_BATCH_SIZE),
enable_relayer: true,
relayer_listener: Some(MAINNET_RELAYER_LISTENING_CONTRACT.to_string()),
relayer_da_deploy_height: Some(MAINNET_RELAYER_DA_DEPLOY_HEIGHT),
relayer_log_page_size: Some(MAINNET_RELAYER_LOG_PAGE_SIZE),
sync_block_stream_buffer_size: Some(MAINNET_SYNC_HEADER_BATCH_SIZE),
bootstrap_nodes: Some(MAINNET_BOOTSTRAP_NODE.to_string()),
}
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-node/src/ignition/mod.rs | forc-plugins/forc-node/src/ignition/mod.rs | pub mod cmd;
pub mod op;
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-node/src/ignition/cmd.rs | forc-plugins/forc-node/src/ignition/cmd.rs | use crate::{cmd::ConnectionSettings, consts::MAINNET_BOOTSTRAP_NODE, util::DbConfig};
use clap::Parser;
use std::path::PathBuf;
#[derive(Parser, Debug, Clone)]
pub struct IgnitionCmd {
#[clap(flatten)]
pub connection_settings: ConnectionSettings,
#[clap(long, default_value = default_ignition_db_path().into_os_string())]
pub db_path: PathBuf,
#[clap(long, default_value_t = MAINNET_BOOTSTRAP_NODE.to_string())]
pub bootstrap_node: String,
}
fn default_ignition_db_path() -> PathBuf {
DbConfig::Ignition.into()
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-node/tests/local.rs | forc-plugins/forc-node/tests/local.rs | use std::time::Duration;
use forc_node::local::{cmd::LocalCmd, run};
use serde_json::json;
use tokio::time::sleep;
#[ignore = "CI errors with: IO error: not a terminal"]
#[tokio::test]
async fn start_local_node_check_health() {
let port = portpicker::pick_unused_port().expect("No ports free");
let local_cmd = LocalCmd {
chain_config: None,
port: Some(port),
db_path: None,
account: vec![],
db_type: None,
debug: false,
historical_execution: false,
poa_instant: false,
};
let _service = run(local_cmd, false).await.unwrap().unwrap();
// Wait for node to start graphql service
sleep(Duration::from_secs(2)).await;
let client = reqwest::Client::new();
let response = client
.post(format!("http://127.0.0.1:{port}/v1/graphql"))
.header("Content-Type", "application/json")
.json(&json!({
"query": "{ health }"
}))
.send()
.await
.expect("Failed to send request");
assert!(response.status().is_success());
let body: serde_json::Value = response.json().await.expect("Failed to parse response");
assert_eq!(body["data"]["health"], true);
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-mcp/src/rate_limit.rs | forc-plugins/forc-mcp/src/rate_limit.rs | use axum::{
extract::Request,
http::{HeaderMap, StatusCode},
middleware::Next,
response::{IntoResponse, Response},
Json,
};
use serde_json::json;
use std::{
collections::HashMap,
net::{IpAddr, SocketAddr},
sync::Arc,
time::{Duration, Instant},
};
use tokio::sync::RwLock;
/// Rate limiter configuration
#[derive(Debug, Clone)]
pub struct RateLimitConfig {
pub requests_per_minute: u32,
pub requests_per_day: u32,
}
/// Request tracking for a specific key (IP or IP+API key)
#[derive(Debug, Clone)]
struct RequestTracker {
minute_requests: Vec<Instant>,
day_requests: Vec<Instant>,
last_cleanup: Instant,
}
impl RequestTracker {
fn new() -> Self {
Self {
minute_requests: Vec::new(),
day_requests: Vec::new(),
last_cleanup: Instant::now(),
}
}
/// Clean up expired requests and check if new request is allowed
fn check_and_add_request(&mut self, config: &RateLimitConfig) -> bool {
let now = Instant::now();
// Clean up old requests periodically
if now.duration_since(self.last_cleanup) > Duration::from_secs(60) {
self.cleanup_expired_requests(now);
self.last_cleanup = now;
}
// Check minute limit
let minute_cutoff = now - Duration::from_secs(60);
self.minute_requests.retain(|&time| time > minute_cutoff);
if self.minute_requests.len() >= config.requests_per_minute as usize {
return false;
}
// Check day limit
let day_cutoff = now - Duration::from_secs(24 * 60 * 60);
self.day_requests.retain(|&time| time > day_cutoff);
if self.day_requests.len() >= config.requests_per_day as usize {
return false;
}
// Add this request
self.minute_requests.push(now);
self.day_requests.push(now);
true
}
fn cleanup_expired_requests(&mut self, now: Instant) {
let minute_cutoff = now - Duration::from_secs(60);
let day_cutoff = now - Duration::from_secs(24 * 60 * 60);
self.minute_requests.retain(|&time| time > minute_cutoff);
self.day_requests.retain(|&time| time > day_cutoff);
}
/// Get current usage stats
fn get_usage(&self, now: Instant) -> (usize, usize) {
let minute_cutoff = now - Duration::from_secs(60);
let day_cutoff = now - Duration::from_secs(24 * 60 * 60);
let minute_count = self
.minute_requests
.iter()
.filter(|&&time| time > minute_cutoff)
.count();
let day_count = self
.day_requests
.iter()
.filter(|&&time| time > day_cutoff)
.count();
(minute_count, day_count)
}
}
/// In-memory rate limiter
#[derive(Debug)]
pub struct RateLimiter {
trackers: Arc<RwLock<HashMap<String, RequestTracker>>>,
config: RateLimitConfig,
}
impl RateLimiter {
pub fn new(config: RateLimitConfig) -> Self {
Self {
trackers: Arc::new(RwLock::new(HashMap::new())),
config,
}
}
/// Check if a request from the given key is allowed
pub async fn check_request(&self, key: &str) -> Result<(), RateLimitError> {
let mut trackers = self.trackers.write().await;
let tracker = trackers
.entry(key.to_string())
.or_insert_with(RequestTracker::new);
if tracker.check_and_add_request(&self.config) {
Ok(())
} else {
let now = Instant::now();
let (minute_usage, day_usage) = tracker.get_usage(now);
if minute_usage >= self.config.requests_per_minute as usize {
Err(RateLimitError::MinuteLimit {
limit: self.config.requests_per_minute,
current: minute_usage as u32,
})
} else {
Err(RateLimitError::DayLimit {
limit: self.config.requests_per_day,
current: day_usage as u32,
})
}
}
}
/// Get current usage stats for a key
pub async fn get_usage(&self, key: &str) -> (u32, u32) {
let trackers = self.trackers.read().await;
if let Some(tracker) = trackers.get(key) {
let now = Instant::now();
let (minute_usage, day_usage) = tracker.get_usage(now);
(minute_usage as u32, day_usage as u32)
} else {
(0, 0)
}
}
/// Periodic cleanup of expired trackers
pub async fn cleanup_expired_trackers(&self) {
let mut trackers = self.trackers.write().await;
let now = Instant::now();
trackers.retain(|_, tracker| {
let (minute_usage, day_usage) = tracker.get_usage(now);
// Keep trackers that have recent activity
minute_usage > 0 || day_usage > 0
});
}
}
/// Rate limiting errors
#[derive(Debug)]
pub enum RateLimitError {
MinuteLimit { limit: u32, current: u32 },
DayLimit { limit: u32, current: u32 },
}
impl IntoResponse for RateLimitError {
fn into_response(self) -> Response {
let (status, message, limit, current) = match self {
RateLimitError::MinuteLimit { limit, current } => (
StatusCode::TOO_MANY_REQUESTS,
"Rate limit exceeded: too many requests per minute",
limit,
current,
),
RateLimitError::DayLimit { limit, current } => (
StatusCode::TOO_MANY_REQUESTS,
"Rate limit exceeded: too many requests per day",
limit,
current,
),
};
let body = Json(json!({
"error": message,
"limit": limit,
"current": current,
"retry_after": "60"
}));
(status, body).into_response()
}
}
/// Extract client IP from request
pub fn extract_client_ip(headers: &HeaderMap, remote_addr: Option<SocketAddr>) -> IpAddr {
// Try X-Forwarded-For header first
if let Some(forwarded) = headers.get("x-forwarded-for") {
if let Ok(forwarded_str) = forwarded.to_str() {
if let Some(ip_str) = forwarded_str.split(',').next() {
if let Ok(ip) = ip_str.trim().parse::<IpAddr>() {
return ip;
}
}
}
}
// Try X-Real-IP header
if let Some(real_ip) = headers.get("x-real-ip") {
if let Ok(ip_str) = real_ip.to_str() {
if let Ok(ip) = ip_str.parse::<IpAddr>() {
return ip;
}
}
}
// Fall back to remote address
if let Some(addr) = remote_addr {
return addr.ip();
}
// Fallback to localhost
IpAddr::from([127, 0, 0, 1])
}
/// Rate limiting middleware that handles both API key authenticated and public requests
pub async fn public_rate_limit_middleware(
request: Request,
next: Next,
) -> Result<Response, Response> {
let remote_addr = request.extensions().get::<SocketAddr>().copied();
let headers = request.headers().clone();
// Extract IP for rate limiting
let client_ip = extract_client_ip(&headers, remote_addr);
// Apply rate limiting if rate limiter is available
if let Some(limiter) = request.extensions().get::<Arc<RateLimiter>>().cloned() {
if let Err(rate_limit_error) = limiter.check_request(&client_ip.to_string()).await {
return Err(rate_limit_error.into_response());
}
}
Ok(next.run(request).await)
}
#[cfg(test)]
mod tests {
use super::*;
#[tokio::test]
async fn test_rate_limiter_basic() {
let config = RateLimitConfig {
requests_per_minute: 2,
requests_per_day: 10,
};
let limiter = RateLimiter::new(config);
// First two requests should succeed
assert!(limiter.check_request("test_ip").await.is_ok());
assert!(limiter.check_request("test_ip").await.is_ok());
// Third request should fail (minute limit)
assert!(limiter.check_request("test_ip").await.is_err());
}
#[tokio::test]
async fn test_rate_limiter_different_ips() {
let config = RateLimitConfig {
requests_per_minute: 1,
requests_per_day: 10,
};
let limiter = RateLimiter::new(config);
// Different IPs should have separate limits
assert!(limiter.check_request("ip1").await.is_ok());
assert!(limiter.check_request("ip2").await.is_ok());
// Second request from same IP should fail
assert!(limiter.check_request("ip1").await.is_err());
assert!(limiter.check_request("ip2").await.is_err());
}
#[tokio::test]
async fn test_extract_client_ip() {
use axum::http::HeaderMap;
use std::net::{IpAddr, SocketAddr};
let mut headers = HeaderMap::new();
headers.insert("x-forwarded-for", "192.168.1.1, 10.0.0.1".parse().unwrap());
let ip = extract_client_ip(&headers, None);
assert_eq!(ip, IpAddr::from([192, 168, 1, 1]));
// Test with socket addr fallback
let socket_addr = SocketAddr::from(([10, 0, 0, 1], 8080));
let ip = extract_client_ip(&HeaderMap::new(), Some(socket_addr));
assert_eq!(ip, IpAddr::from([10, 0, 0, 1]));
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-mcp/src/lib.rs | forc-plugins/forc-mcp/src/lib.rs | pub mod auth;
pub mod forc_call;
pub mod rate_limit;
use axum::{
extract::State,
response::IntoResponse,
routing::{get, post},
Router,
};
use rate_limit::{public_rate_limit_middleware, RateLimitConfig, RateLimiter};
use rmcp::{
model::*,
service::RequestContext,
transport::{
sse_server::SseServer,
stdio,
streamable_http_server::{session::local::LocalSessionManager, StreamableHttpService},
},
Error as McpError, RoleServer, ServiceExt,
};
use std::{future::Future, pin::Pin, sync::Arc};
/// Trait that all MCP tool modules must implement to be registered with ForcMcpServer
///
/// This trait provides a common interface for all tool modules, allowing them to be
/// registered and managed by the main MCP server.
pub trait McpToolModule: Send + Sync + 'static {
/// Get the name of this tool module
fn get_module_name(&self) -> &'static str;
/// List all tools provided by this module
fn list_tools(
&self,
request: Option<PaginatedRequestParam>,
ctx: RequestContext<RoleServer>,
) -> Pin<Box<dyn Future<Output = Result<ListToolsResult, McpError>> + Send>>;
/// Handle a tool call for this module
fn call_tool(
&self,
request: CallToolRequestParam,
ctx: RequestContext<RoleServer>,
) -> Pin<Box<dyn Future<Output = Result<CallToolResult, McpError>> + Send>>;
/// List all resources provided by this module (optional)
fn list_resources(
&self,
_request: Option<PaginatedRequestParam>,
_ctx: RequestContext<RoleServer>,
) -> Pin<Box<dyn Future<Output = Result<ListResourcesResult, McpError>> + Send>> {
Box::pin(async move {
Ok(ListResourcesResult {
resources: vec![],
next_cursor: None,
})
})
}
/// Read a resource from this module (optional)
fn read_resource(
&self,
_request: ReadResourceRequestParam,
_ctx: RequestContext<RoleServer>,
) -> Pin<Box<dyn Future<Output = Result<ReadResourceResult, McpError>> + Send>> {
Box::pin(async move { Err(McpError::resource_not_found("Resource not found", None)) })
}
/// Get server info for this module
fn get_info(&self) -> ServerInfo;
}
#[derive(Clone, Default)]
pub struct ForcMcpServer {
tool_handlers: Vec<Arc<dyn McpToolModule>>,
}
impl ForcMcpServer {
/// Create a new empty MCP server
///
/// Tool modules must be registered explicitly using `register_module()`
pub fn new() -> Self {
Self::default()
}
/// Register a tool module with the server
///
/// This allows the server to route tool calls to the appropriate module.
pub fn register_module<T: McpToolModule + 'static>(mut self, module: T) -> Self {
self.tool_handlers.push(Arc::new(module));
self
}
}
impl rmcp::ServerHandler for ForcMcpServer {
fn get_info(&self) -> ServerInfo {
let module_names = self
.tool_handlers
.iter()
.map(|handler| handler.get_module_name().to_string())
.collect::<Vec<String>>();
ServerInfo {
protocol_version: ProtocolVersion::V_2024_11_05,
capabilities: ServerCapabilities::builder()
.enable_tools()
.enable_resources()
.build(),
server_info: Implementation {
name: "forc-mcp-server".to_string(),
version: env!("CARGO_PKG_VERSION").to_string(),
},
instructions: Some(format!(
"Forc MCP server with modules: {}",
module_names.join(", ")
)),
}
}
async fn list_tools(
&self,
request: Option<PaginatedRequestParam>,
ctx: RequestContext<RoleServer>,
) -> Result<ListToolsResult, McpError> {
let mut tools = Vec::new();
for handler in &self.tool_handlers {
let result = handler.list_tools(request.clone(), ctx.clone()).await?;
tools.extend(result.tools);
}
Ok(ListToolsResult {
tools,
next_cursor: None,
})
}
async fn call_tool(
&self,
request: CallToolRequestParam,
ctx: RequestContext<RoleServer>,
) -> Result<CallToolResult, McpError> {
let tool_name = request.name.to_string();
// Find the module that has this tool
for handler in &self.tool_handlers {
let tools_result = handler.list_tools(None, ctx.clone()).await?;
if tools_result.tools.iter().any(|tool| tool.name == tool_name) {
return handler.call_tool(request, ctx).await;
}
}
Err(McpError::method_not_found::<CallToolRequestMethod>())
}
async fn list_resources(
&self,
request: Option<PaginatedRequestParam>,
ctx: RequestContext<RoleServer>,
) -> Result<ListResourcesResult, McpError> {
let mut resources = Vec::new();
for handler in &self.tool_handlers {
let result = handler.list_resources(request.clone(), ctx.clone()).await?;
resources.extend(result.resources);
}
Ok(ListResourcesResult {
resources,
next_cursor: None,
})
}
async fn read_resource(
&self,
request: ReadResourceRequestParam,
ctx: RequestContext<RoleServer>,
) -> Result<ReadResourceResult, McpError> {
// Try each handler until one successfully reads the resource
for handler in &self.tool_handlers {
match handler.read_resource(request.clone(), ctx.clone()).await {
Ok(result) => return Ok(result),
Err(e) => {
// Check if this is a resource_not_found error
if let Ok(json) = serde_json::to_value(&e) {
if let Some(error) = json.get("error") {
if let Some(code) = error.get("code") {
if code == "resource_not_found" {
// Continue to next handler
continue;
}
}
}
}
return Err(e);
}
}
}
Err(McpError::resource_not_found("Resource not found", None))
}
}
// Server runner functions
pub async fn run_stdio_server(server: ForcMcpServer) -> anyhow::Result<()> {
tracing::info!("Starting MCP server in STDIO mode");
let server_handler = server.serve(stdio()).await?;
tracing::info!("MCP server started successfully in STDIO mode");
server_handler.waiting().await?;
Ok(())
}
pub async fn run_sse_server(server: ForcMcpServer, port: Option<u16>) -> anyhow::Result<()> {
let port = match port {
Some(p) => p,
None => find_available_port().await?,
};
tracing::info!("Starting MCP SSE server on port {port}");
let bind_addr = format!("0.0.0.0:{port}").parse()?;
let ct = SseServer::serve(bind_addr)
.await?
.with_service(move || server.clone());
tracing::info!("MCP SSE server started successfully on port: {port}");
tracing::info!("SSE endpoint: /sse");
tracing::info!("Messages endpoint: /message");
tokio::signal::ctrl_c().await?;
ct.cancel();
tracing::info!("MCP SSE server shut down successfully");
Ok(())
}
pub async fn run_http_server(
server: ForcMcpServer,
port: Option<u16>,
auth_config: auth::AuthConfig,
) -> anyhow::Result<()> {
let port = match port {
Some(p) => p,
None => find_available_port().await?,
};
tracing::info!("Starting MCP HTTP streamable server on port {port}");
let bind_addr = format!("0.0.0.0:{port}");
let auth_manager = if auth_config.enabled {
Some(Arc::new(auth::AuthManager::new(auth_config.clone()).await?))
} else {
None
};
let service = StreamableHttpService::new(
move || Ok(server.clone()),
LocalSessionManager::default().into(),
Default::default(),
);
// Create separate rate limiters for public and authenticated requests
let public_rate_limiter = Arc::new(RateLimiter::new(RateLimitConfig {
requests_per_minute: auth_config.public_rate_limit_per_minute,
requests_per_day: auth_config.public_rate_limit_per_day,
}));
let api_key_rate_limiter = Arc::new(RateLimiter::new(RateLimitConfig {
requests_per_minute: auth_config.api_key_rate_limit_per_minute,
requests_per_day: auth_config.api_key_rate_limit_per_day,
}));
// Spawn cleanup task for rate limiters
let public_limiter_cleanup = public_rate_limiter.clone();
let api_key_limiter_cleanup = api_key_rate_limiter.clone();
tokio::spawn(async move {
let mut interval = tokio::time::interval(tokio::time::Duration::from_secs(300)); // 5 minutes
loop {
interval.tick().await;
public_limiter_cleanup.cleanup_expired_trackers().await;
api_key_limiter_cleanup.cleanup_expired_trackers().await;
}
});
let mut router = Router::new().route("/health", get(|| async { "OK" }));
tracing::info!("MCP endpoint: /mcp");
if let Some(auth_mgr) = &auth_manager {
tracing::info!("Authentication enabled");
// Single /mcp endpoint with unified auth and rate limiting
router = router
.nest_service("/mcp", service.clone())
.layer(axum::middleware::from_fn_with_state(
auth_mgr.clone(),
unified_api_key_auth_middleware,
))
.layer(axum::Extension(public_rate_limiter.clone()))
.layer(axum::Extension(api_key_rate_limiter.clone()));
if !auth_config.api_keys_only {
tracing::info!(
"Public rate limits: {}/min, {}/day",
auth_config.public_rate_limit_per_minute,
auth_config.public_rate_limit_per_day
);
}
tracing::info!(
"API key rate limits: {}/min, {}/day",
auth_config.api_key_rate_limit_per_minute,
auth_config.api_key_rate_limit_per_day
);
// Admin routes with authentication
let admin_routes = Router::new()
.route(
"/api-keys",
post(auth::create_api_key).get(auth::list_api_keys),
)
.route(
"/api-keys/{key_id}",
get(auth::get_api_key).delete(auth::delete_api_key),
)
.route("/import", post(auth::import_api_keys))
.layer(axum::middleware::from_fn_with_state(
auth_mgr.clone(),
admin_auth_middleware,
))
.with_state(auth_mgr.clone());
router = router.nest("/admin", admin_routes);
tracing::info!("Admin endpoint: /admin/* (requires X-API-Key: <admin-api-key> header)");
} else {
// No auth, just basic service with public rate limiting
router = router
.nest_service("/mcp", service)
.layer(axum::middleware::from_fn(public_rate_limit_middleware))
.layer(axum::Extension(public_rate_limiter.clone()));
tracing::info!("Authentication disabled - public endpoint only");
tracing::info!(
"Public rate limits: {}/min, {}/day",
auth_config.public_rate_limit_per_minute,
auth_config.public_rate_limit_per_day
);
}
let tcp_listener = tokio::net::TcpListener::bind(bind_addr).await?;
tracing::info!("MCP HTTP streamable server started successfully on port: {port}");
// Run the server with proper connection info for IP extraction
axum::serve(
tcp_listener,
router.into_make_service_with_connect_info::<std::net::SocketAddr>(),
)
.with_graceful_shutdown(async {
tokio::signal::ctrl_c()
.await
.expect("Failed to install CTRL+C signal handler");
tracing::info!("MCP HTTP streamable server shutting down...");
})
.await
.map_err(|e| anyhow::anyhow!("Failed to serve HTTP streamable server: {}", e))?;
Ok(())
}
/// Unified authentication middleware for /mcp endpoint
/// Handles both public and authenticated requests based on auth_only setting
async fn unified_api_key_auth_middleware(
State(auth_manager): axum::extract::State<Arc<auth::AuthManager>>,
req: axum::extract::Request,
next: axum::middleware::Next,
) -> Result<axum::response::Response, axum::response::Response> {
let headers = req.headers();
let api_key = auth::extract_api_key(headers);
// Check if api_keys_only mode is enabled (get from config through auth_manager)
let api_keys_only = auth_manager.config.api_keys_only;
match (api_key, api_keys_only) {
// API key provided - validate and track usage
(Some(key), _) => {
match auth_manager.check_and_track_usage(&key).await {
Ok(Some(_)) => {
// Valid API key with rate limit check passed
Ok(next.run(req).await)
}
Ok(None) => Err((
axum::http::StatusCode::UNAUTHORIZED,
axum::Json(auth::ErrorResponse {
error: "Invalid API key".to_string(),
}),
)
.into_response()),
Err(e) => {
// Check if it's a rate limit error
let error_msg = e.to_string();
if error_msg.contains("Rate limit exceeded") {
Err((
axum::http::StatusCode::TOO_MANY_REQUESTS,
axum::Json(auth::ErrorResponse { error: error_msg }),
)
.into_response())
} else {
Err((
axum::http::StatusCode::INTERNAL_SERVER_ERROR,
axum::Json(auth::ErrorResponse {
error: "Internal server error".to_string(),
}),
)
.into_response())
}
}
}
}
// No API key, but api_keys_only mode - reject
(None, true) => Err((
axum::http::StatusCode::UNAUTHORIZED,
axum::Json(auth::ErrorResponse {
error: "X-API-Key header required".to_string(),
}),
)
.into_response()),
// No API key, public access allowed - proceed with public rate limits
(None, false) => Ok(next.run(req).await),
}
}
/// Admin authentication middleware
async fn admin_auth_middleware(
State(auth_manager): axum::extract::State<Arc<auth::AuthManager>>,
req: axum::extract::Request,
next: axum::middleware::Next,
) -> Result<axum::response::Response, axum::response::Response> {
// Extract API key from X-API-Key header
let headers = req.headers();
let api_key = auth::extract_api_key(headers);
if let Some(key) = api_key {
match auth_manager.check_and_track_usage(&key).await {
Ok(Some(api_key)) if api_key.role == auth::Role::Admin => Ok(next.run(req).await),
Ok(Some(_)) => Err((
axum::http::StatusCode::FORBIDDEN,
axum::Json(auth::ErrorResponse {
error: "Admin access required".to_string(),
}),
)
.into_response()),
Ok(None) => Err((
axum::http::StatusCode::UNAUTHORIZED,
axum::Json(auth::ErrorResponse {
error: "Invalid API key".to_string(),
}),
)
.into_response()),
Err(e) => Err((
axum::http::StatusCode::INTERNAL_SERVER_ERROR,
axum::Json(auth::ErrorResponse {
error: format!("Internal server error: {e}"),
}),
)
.into_response()),
}
} else {
Err((
axum::http::StatusCode::UNAUTHORIZED,
axum::Json(auth::ErrorResponse {
error: "X-API-Key header required".to_string(),
}),
)
.into_response())
}
}
async fn find_available_port() -> anyhow::Result<u16> {
let listener = tokio::net::TcpListener::bind("127.0.0.1:0").await?;
let addr = listener.local_addr()?;
Ok(addr.port())
}
#[cfg(test)]
pub mod tests {
use super::*;
use anyhow::{anyhow, Result};
use forc_call::ForcCallTools;
use rmcp::model::{CallToolRequestParam, ClientCapabilities, ClientInfo, Implementation};
use rmcp::transport::{sse_client::SseClientTransport, StreamableHttpClientTransport};
use rmcp::{ServerHandler, ServiceExt};
use tokio::time::{sleep, Duration};
/// Unified test utility for running e2e tests against MCP servers
pub struct ForcMcpClient {
mcp_client: rmcp::service::RunningService<rmcp::service::RoleClient, ClientInfo>,
server_handle: tokio::task::JoinHandle<Result<()>>,
}
impl ForcMcpClient {
/// Create a new MCP SSE test client
pub async fn sse_client() -> Result<Self> {
let port = find_available_port().await?;
// Start the SSE server in a background task with the specific port
let server = ForcMcpServer::new().register_module(ForcCallTools::new());
let server_handle =
tokio::spawn(async move { run_sse_server(server, Some(port)).await });
// Wait a bit for the server to start
sleep(Duration::from_millis(100)).await;
// Check if server is still running
if server_handle.is_finished() {
return Err(anyhow!("Server task completed before test could run"));
}
let base_url = format!("http://127.0.0.1:{port}");
// Create MCP client using SSE transport
let transport = SseClientTransport::start(format!("{base_url}/sse")).await?;
let client_info = ClientInfo {
protocol_version: Default::default(),
capabilities: ClientCapabilities::default(),
client_info: Implementation {
name: "forc-mcp-sse-client".to_string(),
version: "0.1.0".to_string(),
},
};
let mcp_client = client_info.serve(transport).await?;
let test_client = ForcMcpClient {
mcp_client,
server_handle,
};
Ok(test_client)
}
/// Create a new MCP HTTP streamable test client
pub async fn http_stream_client() -> Result<Self> {
let port = find_available_port().await?;
// Start the HTTP server in a background task with the specific port
let server = ForcMcpServer::new().register_module(ForcCallTools::new());
let server_handle = tokio::spawn(async move {
run_http_server(server, Some(port), auth::AuthConfig::default()).await
});
// Wait a bit for the server to start
sleep(Duration::from_millis(100)).await;
// Check if server is still running
if server_handle.is_finished() {
return Err(anyhow!("Server task completed before test could run"));
}
let base_url = format!("http://127.0.0.1:{port}/mcp");
// Create MCP client using HTTP streamable transport
let transport = StreamableHttpClientTransport::from_uri(base_url);
let client_info = ClientInfo {
protocol_version: Default::default(),
capabilities: ClientCapabilities::default(),
client_info: Implementation {
name: "forc-mcp-http-client".to_string(),
version: "0.1.0".to_string(),
},
};
let mcp_client = client_info.serve(transport).await?;
let test_client = ForcMcpClient {
mcp_client,
server_handle,
};
Ok(test_client)
}
pub async fn list_tools(&mut self) -> Result<Vec<String>> {
let tools = self.mcp_client.list_tools(Default::default()).await?;
Ok(tools
.tools
.into_iter()
.map(|tool| tool.name.to_string())
.collect())
}
pub async fn call_tool(
&mut self,
tool_name: &str,
arguments: std::collections::HashMap<String, serde_json::Value>,
) -> Result<rmcp::model::CallToolResult> {
let param = CallToolRequestParam {
name: tool_name.to_string().into(),
arguments: Some(arguments.into_iter().collect()),
};
let result = self.mcp_client.call_tool(param).await?;
Ok(result)
}
pub async fn list_resources(&mut self) -> Result<Vec<String>> {
let resources = self.mcp_client.list_resources(Default::default()).await?;
Ok(resources
.resources
.into_iter()
.map(|resource| resource.raw.uri)
.collect())
}
pub async fn read_resource(&mut self, uri: &str) -> Result<String> {
let param = ReadResourceRequestParam {
uri: uri.to_string(),
};
let result = self.mcp_client.read_resource(param).await?;
if let Some(content) = result.contents.first() {
// Extract text from ResourceContents
let json_value = serde_json::to_value(content)?;
if let Some(text) = json_value.get("text") {
if let Some(text_str) = text.as_str() {
return Ok(text_str.to_string());
}
}
}
Err(anyhow!("No text content found in resource"))
}
}
impl Drop for ForcMcpClient {
fn drop(&mut self) {
self.server_handle.abort();
}
}
#[tokio::test]
async fn test_server_info() -> Result<()> {
let server = ForcMcpServer::new().register_module(ForcCallTools::new());
let info = server.get_info();
assert_eq!(info.server_info.name, "forc-mcp-server");
assert!(info.capabilities.tools.is_some());
assert!(info.capabilities.resources.is_some());
assert!(info.instructions.is_some());
assert!(info.instructions.unwrap().contains("forc-call-tools"));
Ok(())
}
#[test]
fn test_server_creation() {
let server = ForcMcpServer::new().register_module(ForcCallTools::new());
assert_eq!(server.get_info().server_info.name, "forc-mcp-server");
assert_eq!(
server.get_info().instructions.unwrap(),
"Forc MCP server with modules: forc-call-tools"
);
}
#[tokio::test]
async fn test_unified_client_both_transports() -> Result<()> {
// Test SSE client
let mut sse_client = ForcMcpClient::sse_client().await?;
let sse_tools = sse_client.list_tools().await?;
// Test HTTP streamable client
let mut http_client = ForcMcpClient::http_stream_client().await?;
let http_tools = http_client.list_tools().await?;
// Both clients should expose the same tools
assert_eq!(sse_tools.len(), http_tools.len());
assert!(sse_tools.contains(&"list_contract_functions".to_string()));
assert!(http_tools.contains(&"list_contract_functions".to_string()));
assert!(sse_tools.contains(&"call_contract".to_string()));
assert!(http_tools.contains(&"call_contract".to_string()));
assert!(sse_tools.contains(&"transfer_assets".to_string()));
assert!(http_tools.contains(&"transfer_assets".to_string()));
assert!(sse_tools.contains(&"get_execution_trace".to_string()));
assert!(http_tools.contains(&"get_execution_trace".to_string()));
Ok(())
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-mcp/src/main.rs | forc-plugins/forc-mcp/src/main.rs | use anyhow::Result;
use clap::{Parser, Subcommand};
use forc_mcp::{
auth::AuthConfig, forc_call::ForcCallTools, run_http_server, run_sse_server, run_stdio_server,
ForcMcpServer,
};
/// Model Context Protocol (MCP) server for Forc
#[derive(Parser)]
#[command(name = "forc-mcp")]
#[command(about = "MCP server plugin for Forc")]
#[command(version = env!("CARGO_PKG_VERSION"))]
struct Cli {
#[command(subcommand)]
command: Option<Commands>,
}
#[derive(Subcommand, Default)]
enum Commands {
/// Run MCP server in STDIO mode
#[default]
Stdio,
/// Run MCP server in SSE mode
Sse {
/// Port to bind the SSE server to
#[arg(short, long, default_value = "3001")]
port: u16,
},
/// Run MCP server in HTTP streamable mode
Http {
/// Port to bind the HTTP server to
#[arg(short, long, default_value = "3001")]
port: u16,
/// Enable authentication mode with API keys
#[arg(long)]
auth: bool,
/// Require API key for all requests (no public access)
#[arg(long, requires = "auth")]
api_keys_only: bool,
/// Path to persist API keys (default: in-memory only)
#[arg(long, value_name = "FILE")]
api_keys_file: Option<String>,
/// Pre-configured admin API key (if not provided, one will be generated)
#[arg(long, value_name = "KEY", requires = "auth")]
admin_api_key: Option<String>,
/// Public rate limit per minute (unauthenticated requests)
#[arg(long, default_value = "10")]
public_rate_limit_per_minute: u32,
/// Public rate limit per day (unauthenticated requests)
#[arg(long, default_value = "1000")]
public_rate_limit_per_day: u32,
/// API key rate limit per minute
#[arg(long, default_value = "120")]
api_key_rate_limit_per_minute: u32,
/// API key rate limit per day
#[arg(long, default_value = "10000")]
api_key_rate_limit_per_day: u32,
},
}
#[tokio::main]
async fn main() -> Result<()> {
tracing_subscriber::fmt()
.with_env_filter(
tracing_subscriber::EnvFilter::from_default_env()
.add_directive("forc_mcp=info".parse().unwrap()),
)
.init();
// Create the MCP server and register tool modules
let mcp_server = ForcMcpServer::new().register_module(ForcCallTools::new());
let cli = Cli::parse();
match cli.command.unwrap_or_default() {
Commands::Stdio => run_stdio_server(mcp_server).await,
Commands::Sse { port } => run_sse_server(mcp_server, Some(port)).await,
Commands::Http {
port,
auth,
api_keys_only,
api_keys_file,
admin_api_key,
public_rate_limit_per_minute,
public_rate_limit_per_day,
api_key_rate_limit_per_minute,
api_key_rate_limit_per_day,
} => {
let auth_config = AuthConfig {
enabled: auth,
api_keys_only,
api_keys_file,
admin_api_key,
public_rate_limit_per_minute,
public_rate_limit_per_day,
api_key_rate_limit_per_minute,
api_key_rate_limit_per_day,
};
run_http_server(mcp_server, Some(port), auth_config).await
}
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-mcp/src/auth/storage.rs | forc-plugins/forc-mcp/src/auth/storage.rs | use super::ApiKey;
use anyhow::Result;
use async_trait::async_trait;
use std::collections::HashMap;
use std::path::Path;
use std::sync::Arc;
use tokio::sync::RwLock;
/// Storage trait for API keys
#[async_trait]
pub trait ApiKeyStorage: Send + Sync {
async fn create(&self, key: ApiKey) -> Result<()>;
async fn get(&self, hash: &str) -> Result<Option<ApiKey>>;
async fn update(&self, hash: &str, key: ApiKey) -> Result<()>;
async fn delete(&self, hash: &str) -> Result<()>;
async fn list(&self) -> Result<Vec<ApiKey>>;
/// Create multiple API keys in batch with optional clearing
async fn create_batch(&self, keys: Vec<ApiKey>, clear_existing: bool) -> Result<()>;
}
/// In-memory storage implementation
pub struct InMemoryStorage {
keys: Arc<RwLock<HashMap<String, ApiKey>>>,
}
impl Default for InMemoryStorage {
fn default() -> Self {
Self::new()
}
}
impl InMemoryStorage {
pub fn new() -> Self {
Self {
keys: Arc::new(RwLock::new(HashMap::new())),
}
}
}
#[async_trait]
impl ApiKeyStorage for InMemoryStorage {
async fn create(&self, key: ApiKey) -> Result<()> {
let mut keys = self.keys.write().await;
keys.insert(key.id.clone(), key);
Ok(())
}
async fn get(&self, hash: &str) -> Result<Option<ApiKey>> {
let keys = self.keys.read().await;
Ok(keys.get(hash).cloned())
}
async fn update(&self, hash: &str, key: ApiKey) -> Result<()> {
let mut keys = self.keys.write().await;
keys.insert(hash.to_string(), key);
Ok(())
}
async fn delete(&self, hash: &str) -> Result<()> {
let mut keys = self.keys.write().await;
keys.remove(hash);
Ok(())
}
async fn list(&self) -> Result<Vec<ApiKey>> {
let keys = self.keys.read().await;
Ok(keys.values().cloned().collect())
}
async fn create_batch(&self, keys: Vec<ApiKey>, clear_existing: bool) -> Result<()> {
let mut storage = self.keys.write().await;
if clear_existing {
storage.clear();
}
for key in keys {
storage.insert(key.id.clone(), key);
}
Ok(())
}
}
/// File-based storage implementation
pub struct FileStorage {
file_path: String,
keys: Arc<RwLock<HashMap<String, ApiKey>>>,
}
impl FileStorage {
pub async fn new(file_path: &str) -> Result<Self> {
let keys = if Path::new(file_path).exists() {
let contents = tokio::fs::read_to_string(file_path).await?;
let keys: Vec<ApiKey> = serde_json::from_str(&contents)?;
keys.into_iter().map(|k| (k.id.clone(), k)).collect()
} else {
HashMap::new()
};
Ok(Self {
file_path: file_path.to_string(),
keys: Arc::new(RwLock::new(keys)),
})
}
async fn save(&self) -> Result<()> {
let keys = self.keys.read().await;
let keys_vec: Vec<&ApiKey> = keys.values().collect();
let json = serde_json::to_string_pretty(&keys_vec)?;
tokio::fs::write(&self.file_path, json).await?;
Ok(())
}
}
#[async_trait]
impl ApiKeyStorage for FileStorage {
async fn create(&self, key: ApiKey) -> Result<()> {
{
let mut keys = self.keys.write().await;
keys.insert(key.id.clone(), key);
}
self.save().await
}
async fn get(&self, hash: &str) -> Result<Option<ApiKey>> {
let keys = self.keys.read().await;
Ok(keys.values().find(|k| k.id == hash).cloned())
}
async fn update(&self, hash: &str, key: ApiKey) -> Result<()> {
{
let mut keys = self.keys.write().await;
keys.insert(hash.to_string(), key);
}
self.save().await
}
async fn delete(&self, hash: &str) -> Result<()> {
{
let mut keys = self.keys.write().await;
keys.remove(hash);
}
self.save().await
}
async fn list(&self) -> Result<Vec<ApiKey>> {
let keys = self.keys.read().await;
Ok(keys.values().cloned().collect())
}
async fn create_batch(&self, keys: Vec<ApiKey>, clear_existing: bool) -> Result<()> {
{
let mut storage = self.keys.write().await;
if clear_existing {
storage.clear();
}
for key in keys {
storage.insert(key.id.clone(), key);
}
}
self.save().await
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-mcp/src/auth/service.rs | forc-plugins/forc-mcp/src/auth/service.rs | use super::{
generate_api_key, ApiKey, ApiKeyStorage, AuthConfig, AuthError, CreateApiKeyResponse,
ErrorResponse, FileStorage, ImportRequest, InMemoryStorage, Role,
};
use anyhow::Result;
use axum::{
extract::{Path, Query, State},
http::StatusCode,
response::IntoResponse,
Json,
};
use chrono::Utc;
use std::{collections::HashMap, sync::Arc};
/// Auth manager that handles API keys
pub struct AuthManager {
pub config: AuthConfig,
pub storage: Arc<dyn ApiKeyStorage>,
pub admin_api_key: String,
}
impl AuthManager {
pub async fn new(config: AuthConfig) -> Result<Self> {
let storage: Arc<dyn ApiKeyStorage> = if let Some(file_path) = &config.api_keys_file {
Arc::new(FileStorage::new(file_path).await?)
} else {
Arc::new(InMemoryStorage::new())
};
// Use provided admin API key or generate one
let admin_api_key = config
.admin_api_key
.clone()
.unwrap_or_else(generate_api_key);
let admin_key_info = ApiKey {
id: admin_api_key.clone(),
role: Role::Admin,
requests_today: 0,
requests_this_minute: 0,
last_request_minute: None,
last_request_day: None,
};
storage.create(admin_key_info).await?;
tracing::info!("Auth Manager initialized");
tracing::info!("Admin API Key: {}", admin_api_key);
Ok(Self {
config,
storage,
admin_api_key,
})
}
/// Check and update API key usage, enforcing rate limits
pub async fn check_and_track_usage(&self, key: &str) -> Result<Option<ApiKey>, AuthError> {
let api_key = self
.storage
.get(key)
.await
.map_err(|e| AuthError::StorageError(e.to_string()))?;
if let Some(mut key_info) = api_key {
let now = Utc::now();
// Check if we need to reset counters
let needs_minute_reset = key_info
.last_request_minute
.map(|last| (now - last).num_seconds() >= 60)
.unwrap_or(true);
let needs_day_reset = key_info
.last_request_day
.map(|last| {
let last_date = last.date_naive();
let now_date = now.date_naive();
last_date != now_date
})
.unwrap_or(true);
// Reset counters if needed
if needs_minute_reset {
key_info.requests_this_minute = 0;
key_info.last_request_minute = Some(now);
}
if needs_day_reset {
key_info.requests_today = 0;
key_info.last_request_day = Some(now);
}
// Check rate limits based on role
match key_info.role {
// No rate limits for admin
Role::Admin => {}
// Use default rate limits from config
Role::User => {
if key_info.requests_this_minute >= self.config.api_key_rate_limit_per_minute {
return Err(AuthError::RateLimitExceededPerMinute);
}
if key_info.requests_today >= self.config.api_key_rate_limit_per_day {
return Err(AuthError::RateLimitExceededPerDay);
}
}
}
// Increment counters
key_info.requests_this_minute += 1;
key_info.requests_today += 1;
// Update timestamps
key_info.last_request_minute = Some(now);
key_info.last_request_day = Some(now);
// Update storage
self.storage
.update(&key_info.id, key_info.clone())
.await
.map_err(|e| AuthError::StorageError(e.to_string()))?;
Ok(Some(key_info))
} else {
Ok(None)
}
}
/// Get usage statistics for an API key
pub async fn get_usage_stats(&self, key_id: &str) -> Result<(u32, u32)> {
if let Some(key_info) = self.storage.get(key_id).await? {
let now = Utc::now();
// Check if counters need reset
let needs_minute_reset = key_info
.last_request_minute
.map(|last| (now - last).num_seconds() >= 60)
.unwrap_or(true);
let needs_day_reset = key_info
.last_request_day
.map(|last| {
let last_date = last.date_naive();
let now_date = now.date_naive();
last_date != now_date
})
.unwrap_or(true);
let minute_count = if needs_minute_reset {
0
} else {
key_info.requests_this_minute
};
let day_count = if needs_day_reset {
0
} else {
key_info.requests_today
};
Ok((minute_count, day_count))
} else {
Ok((0, 0))
}
}
}
/// Create a new API key
pub async fn create_api_key(
State(auth_manager): State<Arc<AuthManager>>,
) -> Result<impl IntoResponse, impl IntoResponse> {
let key = generate_api_key();
let key_info = ApiKey {
id: key.clone(),
role: Role::User,
requests_today: 0,
requests_this_minute: 0,
last_request_minute: None,
last_request_day: None,
};
match auth_manager.storage.create(key_info).await {
Ok(_) => {
let response = CreateApiKeyResponse { api_key: key };
Ok((StatusCode::CREATED, Json(response)))
}
Err(e) => Err((
StatusCode::INTERNAL_SERVER_ERROR,
Json(ErrorResponse {
error: format!("Failed to create API key: {e}"),
}),
)),
}
}
/// List API keys with optional admin inclusion and full details
pub async fn list_api_keys(
State(auth_manager): State<Arc<AuthManager>>,
Query(params): Query<HashMap<String, String>>,
) -> Result<impl IntoResponse, impl IntoResponse> {
let include_admin = params
.get("include_admin")
.is_some_and(|v| v.to_lowercase() == "true");
match auth_manager.storage.list().await {
Ok(keys) => {
// Return full ApiKey objects
let filtered_keys: Vec<ApiKey> = keys
.into_iter()
.filter(|key| include_admin || key.role != Role::Admin)
.collect();
Ok(Json(serde_json::json!({ "api_keys": filtered_keys })))
}
Err(e) => Err((
StatusCode::INTERNAL_SERVER_ERROR,
Json(ErrorResponse {
error: format!("Failed to list API keys: {e}"),
}),
)),
}
}
/// Get a specific API key
pub async fn get_api_key(
State(auth_manager): State<Arc<AuthManager>>,
Path(key_hash): Path<String>,
) -> Result<impl IntoResponse, impl IntoResponse> {
match auth_manager.storage.get(&key_hash).await {
Ok(Some(key)) if key.role != Role::Admin => Ok(Json(key)),
Ok(Some(_)) => Err((
StatusCode::NOT_FOUND,
Json(ErrorResponse {
error: "API key not found".to_string(),
}),
)),
Ok(None) => Err((
StatusCode::NOT_FOUND,
Json(ErrorResponse {
error: "API key not found".to_string(),
}),
)),
Err(e) => Err((
StatusCode::INTERNAL_SERVER_ERROR,
Json(ErrorResponse {
error: format!("Failed to get API key: {e}"),
}),
)),
}
}
/// Delete an API key
pub async fn delete_api_key(
State(auth_manager): State<Arc<AuthManager>>,
Path(key_hash): Path<String>,
) -> Result<impl IntoResponse, impl IntoResponse> {
// Check if key exists and is not admin
match auth_manager.storage.get(&key_hash).await {
Ok(Some(key)) if key.role == Role::Admin => {
return Err((
StatusCode::FORBIDDEN,
Json(ErrorResponse {
error: "Cannot delete admin key".to_string(),
}),
));
}
Ok(None) => {
return Err((
StatusCode::NOT_FOUND,
Json(ErrorResponse {
error: "API key not found".to_string(),
}),
));
}
Ok(_) => {} // Key exists and is not admin, proceed
Err(e) => {
return Err((
StatusCode::INTERNAL_SERVER_ERROR,
Json(ErrorResponse {
error: format!("Failed to check API key: {e}"),
}),
));
}
}
match auth_manager.storage.delete(&key_hash).await {
Ok(_) => Ok(StatusCode::NO_CONTENT),
Err(e) => Err((
StatusCode::INTERNAL_SERVER_ERROR,
Json(ErrorResponse {
error: format!("Failed to delete API key: {e}"),
}),
)),
}
}
/// Import API keys with optional merge or replace
pub async fn import_api_keys(
State(auth_manager): State<Arc<AuthManager>>,
Json(import_request): Json<ImportRequest>,
) -> Result<impl IntoResponse, impl IntoResponse> {
// Convert import keys to full ApiKey, using provided values or defaults
let full_keys: Vec<ApiKey> = import_request
.api_keys
.into_iter()
.map(|import_key| ApiKey {
id: import_key.id,
role: import_key.role,
requests_today: import_key.requests_today,
requests_this_minute: import_key.requests_this_minute,
last_request_minute: import_key.last_request_minute,
last_request_day: import_key.last_request_day,
})
.collect();
match auth_manager
.storage
.create_batch(full_keys, import_request.clear_existing)
.await
{
Ok(_) => Ok(StatusCode::OK),
Err(e) => Err((
StatusCode::INTERNAL_SERVER_ERROR,
Json(ErrorResponse {
error: format!("Failed to import API keys: {e}"),
}),
)),
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::auth::ImportApiKey;
// Helper function to extract response data from IntoResponse results
async fn extract_response_data<T: serde::de::DeserializeOwned>(
result: Result<impl IntoResponse, impl IntoResponse>,
) -> Result<(StatusCode, Option<T>)> {
match result {
Ok(response) => {
let response = response.into_response();
let status = response.status();
if status == StatusCode::NO_CONTENT {
Ok((status, None))
} else {
let body = axum::body::to_bytes(response.into_body(), usize::MAX).await?;
let data: T = serde_json::from_slice(&body)?;
Ok((status, Some(data)))
}
}
Err(response) => {
let response = response.into_response();
let status = response.status();
Ok((status, None))
}
}
}
#[tokio::test]
async fn test_api_key_crud_operations() -> Result<()> {
// Create an AuthManager with in-memory storage
let config = AuthConfig {
enabled: true,
api_keys_only: false,
admin_api_key: Some("test-admin-key".to_string()),
api_keys_file: None, // Use in-memory storage for testing
public_rate_limit_per_minute: 100,
public_rate_limit_per_day: 1000,
api_key_rate_limit_per_minute: 60,
api_key_rate_limit_per_day: 500,
};
let auth_manager = Arc::new(AuthManager::new(config).await?);
let state = State(auth_manager.clone());
// Test CREATE operation
let create_result = create_api_key(state.clone()).await;
let (status, create_data) =
extract_response_data::<CreateApiKeyResponse>(create_result).await?;
assert_eq!(status, StatusCode::CREATED);
let api_key = create_data.unwrap().api_key;
// Test LIST operation
let list_result = list_api_keys(state.clone(), Query(HashMap::new())).await;
let (status, list_data) = extract_response_data::<serde_json::Value>(list_result).await?;
assert_eq!(status, StatusCode::OK);
let list_response = list_data.unwrap();
let api_keys = list_response["api_keys"].as_array().unwrap();
// Should have exactly one key (the one we just created)
assert!(api_keys.len() == 1);
// Find our created key in the list
let found = api_keys.iter().any(|k| {
// Keys are no longer truncated in list response
k["id"].as_str().unwrap() == api_key
});
assert!(found);
// Verify admin keys are not in the list
let admin_found = api_keys
.iter()
.any(|k| k["role"].as_str().unwrap() == "Admin");
assert!(!admin_found);
// Test GET operation
let get_result = get_api_key(state.clone(), Path(api_key.clone())).await;
let (status, get_data) = extract_response_data::<ApiKey>(get_result).await?;
assert_eq!(status, StatusCode::OK);
let key_info = get_data.unwrap();
assert_eq!(key_info.id, api_key);
assert_eq!(key_info.role, Role::User);
assert_eq!(key_info.requests_today, 0);
// Test UPDATE indirectly by tracking usage
auth_manager.check_and_track_usage(&api_key).await?;
// Verify usage was tracked by checking again
let get_result_after_update = get_api_key(state.clone(), Path(api_key.clone())).await;
let (status, get_data) = extract_response_data::<ApiKey>(get_result_after_update).await?;
assert_eq!(status, StatusCode::OK);
let key_info = get_data.unwrap();
assert_eq!(key_info.requests_today, 1);
// Test DELETE operation
let delete_result = delete_api_key(state.clone(), Path(api_key.clone())).await;
let (status, _) = extract_response_data::<()>(delete_result).await?;
assert_eq!(status, StatusCode::NO_CONTENT);
// Verify the key was deleted by trying to GET it
let get_result_after_delete = get_api_key(state.clone(), Path(api_key)).await;
let (status, _) = extract_response_data::<ApiKey>(get_result_after_delete).await?;
assert_eq!(status, StatusCode::NOT_FOUND);
Ok(())
}
#[tokio::test]
async fn test_api_key_crud_operations_edge_cases() -> Result<()> {
// Create an AuthManager with in-memory storage
let config = AuthConfig {
enabled: true,
api_keys_only: false,
admin_api_key: Some("test-admin-key".to_string()),
api_keys_file: None, // Use in-memory storage for testing
public_rate_limit_per_minute: 100,
public_rate_limit_per_day: 1000,
api_key_rate_limit_per_minute: 60,
api_key_rate_limit_per_day: 500,
};
let auth_manager = Arc::new(AuthManager::new(config).await?);
let state = State(auth_manager.clone());
// Test getting non-existent key
let get_nonexistent =
get_api_key(state.clone(), Path("non-existent-key".to_string())).await;
let (status, _) = extract_response_data::<ApiKey>(get_nonexistent).await?;
assert_eq!(status, StatusCode::NOT_FOUND);
// Test deleting non-existent key
let delete_nonexistent =
delete_api_key(state.clone(), Path("non-existent-key".to_string())).await;
let (status, _) = extract_response_data::<()>(delete_nonexistent).await?;
assert_eq!(status, StatusCode::NOT_FOUND);
// Test getting empty string key
let get_empty = get_api_key(state.clone(), Path("".to_string())).await;
let (status, _) = extract_response_data::<ApiKey>(get_empty).await?;
assert_eq!(status, StatusCode::NOT_FOUND);
// Test deleting empty string key
let delete_empty = delete_api_key(state.clone(), Path("".to_string())).await;
let (status, _) = extract_response_data::<()>(delete_empty).await?;
assert_eq!(status, StatusCode::NOT_FOUND);
// Test getting key with special characters
let get_special = get_api_key(state.clone(), Path("special-@#$%^&*()".to_string())).await;
let (status, _) = extract_response_data::<ApiKey>(get_special).await?;
assert_eq!(status, StatusCode::NOT_FOUND);
// Test deleting key with special characters
let delete_special =
delete_api_key(state.clone(), Path("special-@#$%^&*()".to_string())).await;
let (status, _) = extract_response_data::<()>(delete_special).await?;
assert_eq!(status, StatusCode::NOT_FOUND);
// Test getting very long key
let long_key = "a".repeat(1000);
let get_long = get_api_key(state.clone(), Path(long_key.clone())).await;
let (status, _) = extract_response_data::<ApiKey>(get_long).await?;
assert_eq!(status, StatusCode::NOT_FOUND);
// Test deleting very long key
let delete_long = delete_api_key(state.clone(), Path(long_key)).await;
let (status, _) = extract_response_data::<()>(delete_long).await?;
assert_eq!(status, StatusCode::NOT_FOUND);
// Admin key edge cases
let admin_key = auth_manager.admin_api_key.clone();
// Test that admin keys are not returned by get (should return NOT_FOUND, not the actual key)
let get_admin = get_api_key(state.clone(), Path(admin_key.clone())).await;
let (status, data) = extract_response_data::<ApiKey>(get_admin).await?;
assert_eq!(status, StatusCode::NOT_FOUND);
assert!(data.is_none());
// Test that admin keys cannot be deleted (should return FORBIDDEN)
let delete_admin = delete_api_key(state.clone(), Path(admin_key.clone())).await;
let (status, _) = extract_response_data::<()>(delete_admin).await?;
assert_eq!(status, StatusCode::FORBIDDEN);
// Test that admin key variations don't work
let admin_key_upper = admin_key.to_uppercase();
let get_admin_upper = get_api_key(state.clone(), Path(admin_key_upper)).await;
let (status, _) = extract_response_data::<ApiKey>(get_admin_upper).await?;
assert_eq!(status, StatusCode::NOT_FOUND);
// Test LIST with empty storage (only admin key should exist but be filtered out)
let list_empty = list_api_keys(state.clone(), Query(HashMap::new())).await;
let (status, list_data) = extract_response_data::<serde_json::Value>(list_empty).await?;
assert_eq!(status, StatusCode::OK);
let list_response = list_data.unwrap();
let api_keys = list_response["api_keys"].as_array().unwrap();
// Should be empty since admin keys are filtered out
assert!(api_keys.is_empty());
// Create a user key and verify admin filtering works
let create_result = create_api_key(state.clone()).await;
let (status, create_data) =
extract_response_data::<CreateApiKeyResponse>(create_result).await?;
assert_eq!(status, StatusCode::CREATED);
let user_key = create_data.unwrap().api_key;
// Verify LIST contains user key but not admin key
let list_with_user = list_api_keys(state.clone(), Query(HashMap::new())).await;
let (status, list_data) =
extract_response_data::<serde_json::Value>(list_with_user).await?;
assert_eq!(status, StatusCode::OK);
let list_response = list_data.unwrap();
let api_keys = list_response["api_keys"].as_array().unwrap();
// Should have exactly one key (the user key)
assert_eq!(api_keys.len(), 1);
// Should contain the user key
let found_user = api_keys
.iter()
.any(|k| k["id"].as_str().unwrap() == user_key);
assert!(found_user);
// Should not contain admin key
let found_admin = api_keys
.iter()
.any(|k| k["role"].as_str().unwrap() == "Admin");
assert!(!found_admin);
// Test double deletion
let delete_first = delete_api_key(state.clone(), Path(user_key.clone())).await;
let (status, _) = extract_response_data::<()>(delete_first).await?;
assert_eq!(status, StatusCode::NO_CONTENT);
// Try to delete the same key again
let delete_second = delete_api_key(state.clone(), Path(user_key)).await;
let (status, _) = extract_response_data::<()>(delete_second).await?;
assert_eq!(status, StatusCode::NOT_FOUND);
Ok(())
}
#[tokio::test]
async fn test_api_key_persistent_storage() -> Result<()> {
let temp_dir = tempfile::tempdir()?;
let storage_file = temp_dir.path().join("api_keys.json");
// Create AuthManager with file storage
let config = AuthConfig {
enabled: true,
api_keys_only: false,
admin_api_key: Some("admin-key".to_string()),
api_keys_file: Some(storage_file.to_string_lossy().to_string()),
public_rate_limit_per_minute: 100,
public_rate_limit_per_day: 1000,
api_key_rate_limit_per_minute: 60,
api_key_rate_limit_per_day: 500,
};
let auth_manager = Arc::new(AuthManager::new(config).await?);
let state = State(auth_manager.clone());
// CREATE: Add a test key via handler
let create_result = create_api_key(state.clone()).await;
let (status, create_data) =
extract_response_data::<CreateApiKeyResponse>(create_result).await?;
assert_eq!(status, StatusCode::CREATED);
let test_key = create_data.unwrap().api_key;
// GET: Retrieve the key via handler
let get_result = get_api_key(state.clone(), Path(test_key.clone())).await;
let (status, get_data) = extract_response_data::<ApiKey>(get_result).await?;
assert_eq!(status, StatusCode::OK);
let key_info = get_data.unwrap();
assert_eq!(key_info.id, test_key);
assert_eq!(key_info.requests_today, 0);
// LIST: Verify key appears in list via handler
let list_result = list_api_keys(state.clone(), Query(HashMap::new())).await;
let (status, list_data) = extract_response_data::<serde_json::Value>(list_result).await?;
assert_eq!(status, StatusCode::OK);
let list_response = list_data.unwrap();
let api_keys = list_response["api_keys"].as_array().unwrap();
assert!(api_keys
.iter()
.any(|k| k["id"].as_str().unwrap() == test_key));
// UPDATE: Track usage to modify the key
auth_manager.check_and_track_usage(&test_key).await?;
// Verify update via handler
let get_updated = get_api_key(state.clone(), Path(test_key.clone())).await;
let (status, get_data) = extract_response_data::<ApiKey>(get_updated).await?;
assert_eq!(status, StatusCode::OK);
let updated_info = get_data.unwrap();
assert_eq!(updated_info.requests_today, 1);
// DELETE: Remove the key via handler
let delete_result = delete_api_key(state.clone(), Path(test_key.clone())).await;
let (status, _) = extract_response_data::<()>(delete_result).await?;
assert_eq!(status, StatusCode::NO_CONTENT);
// Verify deletion via handler
let get_deleted = get_api_key(state.clone(), Path(test_key.clone())).await;
let (status, _) = extract_response_data::<ApiKey>(get_deleted).await?;
assert_eq!(status, StatusCode::NOT_FOUND);
// Test persistence: Create new manager with same file
let config2 = AuthConfig {
enabled: true,
api_keys_only: false,
admin_api_key: Some("admin-key".to_string()),
api_keys_file: Some(storage_file.to_string_lossy().to_string()),
public_rate_limit_per_minute: 100,
public_rate_limit_per_day: 1000,
api_key_rate_limit_per_minute: 60,
api_key_rate_limit_per_day: 500,
};
let auth_manager2 = Arc::new(AuthManager::new(config2).await?);
let state2 = State(auth_manager2.clone());
// Verify deleted key is still gone via handler
let get_result2 = get_api_key(state2.clone(), Path(test_key)).await;
let (status, _) = extract_response_data::<ApiKey>(get_result2).await?;
assert_eq!(status, StatusCode::NOT_FOUND);
// Verify admin key persisted
let list_result2 = list_api_keys(state2, Query(HashMap::new())).await;
let (status, list_data2) = extract_response_data::<serde_json::Value>(list_result2).await?;
assert_eq!(status, StatusCode::OK);
let list_response2 = list_data2.unwrap();
let api_keys2 = list_response2["api_keys"].as_array().unwrap();
// Should be empty since we deleted the only user key and admin keys are filtered
assert!(api_keys2.is_empty());
Ok(())
}
#[tokio::test]
async fn test_api_key_rate_limiting() -> Result<()> {
// Create an AuthManager with in-memory storage
let config = AuthConfig {
enabled: true,
api_keys_only: false,
admin_api_key: Some("test-admin-key".to_string()),
api_keys_file: None, // Use in-memory storage for testing
public_rate_limit_per_minute: 100,
public_rate_limit_per_day: 1000,
api_key_rate_limit_per_minute: 60,
api_key_rate_limit_per_day: 500,
};
let auth_manager = Arc::new(AuthManager::new(config).await?);
// Create an API key (all user keys have the same rate limits from config)
let test_api_key = generate_api_key();
let test_key_info = ApiKey {
id: test_api_key.clone(),
role: Role::User,
requests_today: 0,
requests_this_minute: 0,
last_request_minute: None,
last_request_day: None,
};
auth_manager.storage.create(test_key_info).await?;
// Make requests and verify that usage is tracked
// Make requests up to the minute limit (60 by default)
for _ in 1..=auth_manager.config.api_key_rate_limit_per_minute {
let result = auth_manager.check_and_track_usage(&test_api_key).await?;
assert!(result.is_some());
}
// Verify that rate limiting kicks in after exceeding the limit
// Next request should fail (exceeds minute limit)
let result = auth_manager.check_and_track_usage(&test_api_key).await;
assert!(result.is_err());
let err = result.unwrap_err();
assert!(matches!(err, AuthError::RateLimitExceededPerMinute));
// Verify usage stats haven't increased
let (minute_usage, day_usage) = auth_manager.get_usage_stats(&test_api_key).await?;
assert_eq!(minute_usage, 60); // Should be at the limit
assert_eq!(day_usage, 60);
// Test the reset functionality for time windows
// Manually update the last_request_minute to simulate time passing
let mut key_info = auth_manager.storage.get(&test_api_key).await?.unwrap();
key_info.last_request_minute = Some(Utc::now() - chrono::Duration::seconds(61));
auth_manager.storage.update(&test_api_key, key_info).await?;
// Now the request should succeed again (minute counter reset)
let result = auth_manager.check_and_track_usage(&test_api_key).await?;
assert!(result.is_some());
let key_info = result.unwrap();
assert_eq!(key_info.requests_this_minute, 1); // Reset to 1
assert_eq!(key_info.requests_today, 61); // Day counter continues
// Test admin key has no rate limits
let admin_key = &auth_manager.admin_api_key;
// Make multiple requests with admin key
for _ in 1..=5 {
let result = auth_manager.check_and_track_usage(admin_key).await?;
assert!(result.is_some());
}
// Test day counter reset
println!("\nTesting day counter reset...");
let mut key_info = auth_manager.storage.get(&test_api_key).await?.unwrap();
// Simulate next day by setting last_request_day to yesterday
key_info.last_request_day = Some(Utc::now() - chrono::Duration::days(1));
key_info.requests_today = 499; // Close to daily limit
auth_manager.storage.update(&test_api_key, key_info).await?;
// Request should succeed with reset day counter
let result = auth_manager.check_and_track_usage(&test_api_key).await?;
assert!(result.is_some());
let key_info = result.unwrap();
assert_eq!(key_info.requests_today, 1); // Day counter reset
Ok(())
}
#[tokio::test]
async fn test_export_import_api_keys() -> Result<()> {
// Create an AuthManager with in-memory storage
let config = AuthConfig {
enabled: true,
api_keys_only: false,
admin_api_key: Some("test-admin-key".to_string()),
api_keys_file: None,
public_rate_limit_per_minute: 100,
public_rate_limit_per_day: 1000,
api_key_rate_limit_per_minute: 60,
api_key_rate_limit_per_day: 500,
};
let auth_manager = Arc::new(AuthManager::new(config).await?);
// Create a user API key
let create_result = create_api_key(State(auth_manager.clone())).await;
let (status, create_data) =
extract_response_data::<CreateApiKeyResponse>(create_result).await?;
assert_eq!(status, StatusCode::CREATED);
let user_key = create_data.unwrap().api_key;
// Export data using list handler with full details - should have 2 keys (1 user + 1 admin)
let mut export_params = HashMap::new();
export_params.insert("include_admin".to_string(), "True".to_string());
let export_result = list_api_keys(State(auth_manager.clone()), Query(export_params)).await;
let (status, export_data) =
extract_response_data::<serde_json::Value>(export_result).await?;
assert_eq!(status, StatusCode::OK);
let export_response = export_data.unwrap();
let exported_keys: Vec<ApiKey> =
serde_json::from_value(export_response["api_keys"].clone()).unwrap();
assert_eq!(exported_keys.len(), 2);
// Add an additional API key to the existing storage
let additional_key = ApiKey {
id: "additional-key".to_string(),
role: Role::User,
requests_today: 5,
requests_this_minute: 1,
last_request_minute: Some(chrono::Utc::now()),
last_request_day: Some(chrono::Utc::now()),
};
auth_manager.storage.create(additional_key.clone()).await?;
// Verify we now have 3 keys total using list handler with full details
let mut params_before = HashMap::new();
params_before.insert("include_admin".to_string(), "true".to_string());
let export_before_import =
list_api_keys(State(auth_manager.clone()), Query(params_before)).await;
let (status, export_data) =
extract_response_data::<serde_json::Value>(export_before_import).await?;
assert_eq!(status, StatusCode::OK);
let export_response = export_data.unwrap();
let keys_before: Vec<ApiKey> =
serde_json::from_value(export_response["api_keys"].clone()).unwrap();
assert_eq!(keys_before.len(), 3);
// Import the exported data (without clear_existing, i.e. merge mode)
let import_data = ImportRequest {
api_keys: exported_keys
.into_iter()
.map(|k| ImportApiKey {
id: k.id,
role: k.role,
requests_today: k.requests_today,
requests_this_minute: k.requests_this_minute,
last_request_minute: k.last_request_minute,
last_request_day: k.last_request_day,
})
.collect(),
clear_existing: false, // Default merge mode
};
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | true |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-mcp/src/auth/mod.rs | forc-plugins/forc-mcp/src/auth/mod.rs | pub mod service;
pub mod storage;
pub use service::{
create_api_key, delete_api_key, get_api_key, import_api_keys, list_api_keys, AuthManager,
};
pub use storage::{ApiKeyStorage, FileStorage, InMemoryStorage};
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use sha2::{Digest, Sha256};
use thiserror::Error;
/// Auth-specific error types
#[derive(Error, Debug)]
pub enum AuthError {
#[error("Rate limit exceeded: too many requests per minute")]
RateLimitExceededPerMinute,
#[error("Rate limit exceeded: too many requests per day")]
RateLimitExceededPerDay,
#[error("Storage error: {0}")]
StorageError(String),
#[error("Internal error: {0}")]
InternalError(#[from] anyhow::Error),
}
/// User role for API key
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
pub enum Role {
Admin,
User,
}
/// API Key information
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ApiKey {
pub id: String,
pub role: Role,
pub requests_today: u32,
pub requests_this_minute: u32,
pub last_request_minute: Option<DateTime<Utc>>,
pub last_request_day: Option<DateTime<Utc>>,
}
/// API Key creation response
#[derive(Debug, Serialize, Deserialize)]
pub struct CreateApiKeyResponse {
pub api_key: String, // Only returned on creation
}
/// Generic error response
#[derive(Debug, Serialize, Deserialize)]
pub struct ErrorResponse {
pub error: String,
}
/// Auth configuration
#[derive(Clone)]
pub struct AuthConfig {
pub enabled: bool,
/// Require API key for all requests
pub api_keys_only: bool,
/// Path to persist API keys (default: in-memory only)
pub api_keys_file: Option<String>,
/// Pre-configured admin API key
pub admin_api_key: Option<String>,
pub public_rate_limit_per_minute: u32,
pub public_rate_limit_per_day: u32,
pub api_key_rate_limit_per_minute: u32,
pub api_key_rate_limit_per_day: u32,
}
impl Default for AuthConfig {
fn default() -> Self {
Self {
enabled: false,
api_keys_only: false,
api_keys_file: None,
admin_api_key: None,
public_rate_limit_per_minute: 10,
public_rate_limit_per_day: 1_000,
api_key_rate_limit_per_minute: 120,
api_key_rate_limit_per_day: 10_000,
}
}
}
/// Generate a new API key
pub fn generate_api_key() -> String {
use rand::Rng;
let mut rng = rand::thread_rng();
let random_bytes: [u8; 32] = rng.gen();
let mut hasher = Sha256::new();
hasher.update(random_bytes);
let result = hasher.finalize();
format!("mcp_{}", hex::encode(&result[..16])) // Use first 16 bytes for a shorter key
}
/// Extract API key from X-API-Key header
pub fn extract_api_key(headers: &axum::http::HeaderMap) -> Option<String> {
headers
.get("X-API-Key")
.and_then(|value| value.to_str().ok())
.map(|key| key.to_string())
}
/// API key data for import with optional usage stats
#[derive(Debug, Serialize, Deserialize)]
pub struct ImportApiKey {
pub id: String,
pub role: Role,
#[serde(default)]
pub requests_today: u32,
#[serde(default)]
pub requests_this_minute: u32,
pub last_request_minute: Option<DateTime<Utc>>,
pub last_request_day: Option<DateTime<Utc>>,
}
/// Data structure for importing API keys
#[derive(Debug, Serialize, Deserialize)]
pub struct ImportRequest {
pub api_keys: Vec<ImportApiKey>,
/// If true, clear all existing keys before importing. If false (default), merge with existing keys.
#[serde(default)]
pub clear_existing: bool,
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-mcp/src/forc_call/resources.rs | forc-plugins/forc-mcp/src/forc_call/resources.rs | use rmcp::{model::*, service::RequestContext, Error as McpError, RoleServer};
// Resource URI constants
pub const TYPE_ENCODING_REFERENCE_URI: &str = "forc-call://type-encoding-reference";
pub const COMMON_COMMANDS_URI: &str = "forc-call://examples/common-commands";
pub const CONTRACT_SAMPLES_URI: &str = "forc-call://examples/contract-samples";
/// Get the type encoding reference content
pub fn get_type_encoding_reference() -> &'static str {
include_str!(
"../../../../docs/book/src/forc/plugins/forc_mcp/forc_call_tool/type_encoding_reference.md"
)
}
/// Get the common commands content
pub fn get_common_commands() -> &'static str {
include_str!(
"../../../../docs/book/src/forc/plugins/forc_mcp/forc_call_tool/common_commands.md"
)
}
/// Get the contract samples content
pub fn get_contract_samples() -> &'static str {
include_str!(
"../../../../docs/book/src/forc/plugins/forc_mcp/forc_call_tool/contract_samples.md"
)
}
/// Handle resource read requests
pub async fn read_resource(
uri: &str,
_: RequestContext<RoleServer>,
) -> Result<ReadResourceResult, McpError> {
match uri {
TYPE_ENCODING_REFERENCE_URI => {
let content = get_type_encoding_reference();
Ok(ReadResourceResult {
contents: vec![ResourceContents::text(content, uri)],
})
}
COMMON_COMMANDS_URI => {
let content = get_common_commands();
Ok(ReadResourceResult {
contents: vec![ResourceContents::text(content, uri)],
})
}
CONTRACT_SAMPLES_URI => {
let content = get_contract_samples();
Ok(ReadResourceResult {
contents: vec![ResourceContents::text(content, uri)],
})
}
_ => Err(McpError::resource_not_found(
"Resource not found",
Some(serde_json::json!({
"uri": uri
})),
)),
}
}
#[cfg(test)]
mod tests {
use super::{COMMON_COMMANDS_URI, CONTRACT_SAMPLES_URI, TYPE_ENCODING_REFERENCE_URI};
use crate::tests::ForcMcpClient;
use anyhow::Result;
#[tokio::test]
async fn test_forc_call_resources() -> Result<()> {
let mut client = ForcMcpClient::http_stream_client().await?;
// List resources
let resources = client.list_resources().await?;
assert_eq!(resources.len(), 3);
assert!(resources.contains(&TYPE_ENCODING_REFERENCE_URI.to_string()));
assert!(resources.contains(&COMMON_COMMANDS_URI.to_string()));
assert!(resources.contains(&CONTRACT_SAMPLES_URI.to_string()));
// Read type encoding reference
let type_ref = client.read_resource(TYPE_ENCODING_REFERENCE_URI).await?;
assert!(type_ref.contains("MCP Tool Type Encoding Reference"));
assert!(type_ref.contains("bool"));
assert!(type_ref.contains("Structs are encoded as tuples"));
assert!(type_ref.contains("call_contract"));
// Read common commands
let commands = client.read_resource(COMMON_COMMANDS_URI).await?;
assert!(commands.contains("Common MCP Tool Usage"));
assert!(commands.contains("\"mode\": \"dry-run\""));
assert!(commands.contains("\"mode\": \"simulate\""));
assert!(commands.contains("\"mode\": \"live\""));
assert!(commands.contains("\"tool\": \"call_contract\""));
// Read contract samples
let samples = client.read_resource(CONTRACT_SAMPLES_URI).await?;
assert!(samples.contains("Contract Examples with MCP Tool Usage"));
assert!(samples.contains("Simple Counter Contract"));
assert!(samples.contains("Token Contract"));
assert!(samples.contains("Complex Types Contract"));
assert!(samples.contains("MCP Tool Commands"));
Ok(())
}
#[tokio::test]
async fn test_resource_not_found() -> Result<()> {
let mut client = ForcMcpClient::http_stream_client().await?;
// Try to read non-existent resource
let result = client.read_resource("forc-call://non-existent").await;
assert!(result.is_err());
Ok(())
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-mcp/src/forc_call/mod.rs | forc-plugins/forc-mcp/src/forc_call/mod.rs | mod resources;
use crate::McpToolModule;
use resources::{COMMON_COMMANDS_URI, CONTRACT_SAMPLES_URI, TYPE_ENCODING_REFERENCE_URI};
use rmcp::{
handler::server::{router::tool::ToolRouter, tool::Parameters},
model::*,
schemars::{self, JsonSchema},
service::RequestContext,
tool, tool_handler, tool_router, Error as McpError, RoleServer, ServerHandler,
};
use serde::Deserialize;
use serde_json::Value;
use std::{collections::HashMap, future::Future, pin::Pin, str::FromStr};
#[derive(Debug, Deserialize, JsonSchema)]
pub struct CallContractArgs {
pub contract_id: String,
pub abi: String,
pub function: String,
#[serde(default)]
pub function_args: Vec<String>,
#[serde(default = "default_mode")]
pub mode: String,
pub node_url: Option<String>,
pub signing_key: Option<String>,
#[serde(default)]
pub amount: u64,
pub asset_id: Option<String>,
pub gas_price: Option<u64>,
#[serde(default)]
pub verbosity: u8,
}
#[derive(Debug, Deserialize, JsonSchema)]
pub struct ListFunctionsArgs {
pub contract_id: String,
pub abi: String,
}
#[derive(Debug, Deserialize, JsonSchema)]
pub struct TransferAssetsArgs {
pub signing_key: String,
pub recipient: String,
pub amount: u64,
pub asset_id: Option<String>,
pub node_url: Option<String>,
#[serde(default)]
pub verbosity: u8,
}
#[derive(Debug, Deserialize, JsonSchema)]
pub struct GetExecutionTraceArgs {
/// JSON objects representing TraceEvent
pub trace_events: Vec<HashMap<String, Value>>,
/// Total gas used in the execution trace
pub total_gas: u64,
/// JSON string representation of HashMap<ContractId, String>
pub labels: Option<HashMap<String, String>>,
}
fn default_mode() -> String {
"dry-run".to_string()
}
/// Forc-call specific MCP tools
#[derive(Clone)]
pub struct ForcCallTools {
pub tool_router: ToolRouter<ForcCallTools>,
}
#[tool_router]
impl ForcCallTools {
pub fn new() -> Self {
Self {
tool_router: Self::tool_router(),
}
}
#[tool(
description = "Call a function on a deployed Fuel contract. Defaults to dry-run mode with default signer. Provide signing key to execute in live mode."
)]
async fn call_contract(
&self,
Parameters(args): Parameters<CallContractArgs>,
) -> Result<CallToolResult, McpError> {
// Catch all errors and return them as error CallToolResults instead of McpErrors
let cmd = match build_call_command(
&args.contract_id,
&args.abi,
&args.function,
args.function_args,
&args.mode,
args.node_url.as_deref(),
args.signing_key.as_deref(),
args.amount,
args.asset_id.as_deref(),
args.gas_price,
args.verbosity,
) {
Ok(cmd) => cmd,
Err(e) => {
return Ok(CallToolResult::error(vec![Content::text(format!(
"Error: Invalid arguments: {e}"
))]))
}
};
let operation = match cmd.validate_and_get_operation() {
Ok(op) => op,
Err(e) => {
return Ok(CallToolResult::error(vec![Content::text(format!(
"Error: Failed to validate command: {e}"
))]))
}
};
let response = match forc_client::op::call(operation, cmd).await {
Ok(resp) => resp,
Err(e) => {
return Ok(CallToolResult::error(vec![Content::text(format!(
"Error: Contract call failed: {e}"
))]))
}
};
let content = match Content::json(response) {
Ok(content) => content,
Err(e) => {
return Ok(CallToolResult::error(vec![Content::text(format!(
"Failed to convert response to JSON: {e}"
))]))
}
};
Ok(CallToolResult::success(vec![content]))
}
#[tool(description = "List all callable functions in a contract's ABI with example usage.")]
async fn list_contract_functions(
&self,
Parameters(args): Parameters<ListFunctionsArgs>,
) -> Result<CallToolResult, McpError> {
let cmd = match build_list_command(&args.contract_id, &args.abi) {
Ok(cmd) => cmd,
Err(e) => {
return Ok(CallToolResult::error(vec![Content::text(format!(
"Error: Invalid arguments: {e}"
))]))
}
};
let operation = match cmd.validate_and_get_operation() {
Ok(op) => op,
Err(e) => {
return Ok(CallToolResult::error(vec![Content::text(format!(
"Error: Failed to validate command: {e}"
))]))
}
};
let (contract_id_parsed, abi_map) = match operation {
forc_client::cmd::call::Operation::ListFunctions { contract_id, abi } => {
match forc_client::op::call::create_abi_map(contract_id, &abi, cmd.contract_abis)
.await
{
Ok(abi_map) => (contract_id, abi_map),
Err(e) => {
return Ok(CallToolResult::error(vec![Content::text(format!(
"Failed to create ABI map: {e}"
))]))
}
}
}
_ => {
return Ok(CallToolResult::error(vec![Content::text(
"Expected ListFunctions operation".to_string(),
)]))
}
};
let mut output_buffer = std::io::Cursor::new(Vec::<u8>::new());
if let Err(e) = forc_client::op::call::list_functions::list_contract_functions(
&contract_id_parsed,
&abi_map,
&mut output_buffer,
) {
return Ok(CallToolResult::error(vec![Content::text(format!(
"Failed to list contract functions: {e}"
))]));
}
let output_bytes = output_buffer.into_inner();
let output_string = match String::from_utf8(output_bytes) {
Ok(s) => s,
Err(e) => {
return Ok(CallToolResult::error(vec![Content::text(format!(
"Output was not valid UTF-8: {e}"
))]))
}
};
let content = Content::text(output_string);
Ok(CallToolResult::success(vec![content]))
}
#[tool(
description = "Transfer assets directly to an address or contract. Uses default signer and live mode."
)]
async fn transfer_assets(
&self,
Parameters(args): Parameters<TransferAssetsArgs>,
) -> Result<CallToolResult, McpError> {
let cmd = match build_transfer_command(
&args.signing_key,
&args.recipient,
args.amount,
args.asset_id.as_deref(),
args.node_url.as_deref(),
args.verbosity,
) {
Ok(cmd) => cmd,
Err(e) => {
return Ok(CallToolResult::error(vec![Content::text(format!(
"Error: Invalid arguments: {e}"
))]))
}
};
let operation = match cmd.validate_and_get_operation() {
Ok(op) => op,
Err(e) => {
return Ok(CallToolResult::error(vec![Content::text(format!(
"Error: Failed to validate command: {e}"
))]))
}
};
let response = match forc_client::op::call(operation, cmd).await {
Ok(resp) => resp,
Err(e) => {
return Ok(CallToolResult::error(vec![Content::text(format!(
"Error: Transfer failed: {e}"
))]))
}
};
let content = match Content::json(response) {
Ok(content) => content,
Err(e) => {
return Ok(CallToolResult::error(vec![Content::text(format!(
"Failed to convert response to JSON: {e}"
))]))
}
};
Ok(CallToolResult::success(vec![content]))
}
#[tool(
description = "Generate a formatted execution trace from trace events. Takes trace events from a CallResponse and returns a human-readable trace visualization."
)]
#[allow(clippy::iter_over_hash_type)]
async fn get_execution_trace(
&self,
Parameters(args): Parameters<GetExecutionTraceArgs>,
) -> Result<CallToolResult, McpError> {
// Parse trace events from array of JSON objects
let mut trace_events: Vec<forc_client::op::call::trace::TraceEvent> = Vec::new();
for event_obj in &args.trace_events {
match serde_json::from_value(serde_json::Value::Object(
event_obj.clone().into_iter().collect(),
)) {
Ok(event) => trace_events.push(event),
Err(e) => {
return Ok(CallToolResult::error(vec![Content::text(format!(
"Error: Failed to parse trace_event: {e}"
))]))
}
}
}
// Convert labels from HashMap<String, String> to HashMap<ContractId, String>
let labels: std::collections::HashMap<fuels_core::types::ContractId, String> =
if let Some(labels_map) = args.labels {
let mut converted_labels = std::collections::HashMap::new();
for (contract_id_str, label) in labels_map {
match fuels_core::types::ContractId::from_str(&contract_id_str) {
Ok(contract_id) => {
converted_labels.insert(contract_id, label);
}
Err(e) => {
return Ok(CallToolResult::error(vec![Content::text(format!(
"Error: Failed to parse contract ID '{contract_id_str}': {e}"
))]))
}
}
}
converted_labels
} else {
std::collections::HashMap::new()
};
// Create a buffer to capture the trace output
let mut trace_buffer = Vec::new();
// Generate the formatted trace
if let Err(e) = forc_client::op::call::trace::display_transaction_trace(
args.total_gas,
&trace_events,
&labels,
&mut trace_buffer,
) {
return Ok(CallToolResult::error(vec![Content::text(format!(
"Error: Failed to generate trace: {e}"
))]));
}
let trace_output = match String::from_utf8(trace_buffer) {
Ok(output) => output,
Err(e) => {
return Ok(CallToolResult::error(vec![Content::text(format!(
"Error: Failed to convert trace output to string: {e}"
))]))
}
};
Ok(CallToolResult::success(vec![Content::text(trace_output)]))
}
}
impl Default for ForcCallTools {
fn default() -> Self {
Self::new()
}
}
#[tool_handler]
impl ServerHandler for ForcCallTools {
fn get_info(&self) -> ServerInfo {
ServerInfo {
protocol_version: ProtocolVersion::V_2024_11_05,
capabilities: ServerCapabilities::builder()
.enable_tools()
.enable_resources()
.build(),
server_info: Implementation {
name: self.get_module_name().to_string(),
version: env!("CARGO_PKG_VERSION").to_string(),
},
instructions: Some(
"Forc-call specific MCP tools for contract interaction. Resources provide type encoding reference and examples.".to_string(),
),
}
}
async fn list_resources(
&self,
_request: Option<PaginatedRequestParam>,
_: RequestContext<RoleServer>,
) -> Result<ListResourcesResult, McpError> {
Ok(ListResourcesResult {
resources: vec![
RawResource {
uri: TYPE_ENCODING_REFERENCE_URI.to_string(),
name: "MCP Type Encoding Reference".to_string(),
description: Some(
"Complete reference for encoding Sway types as MCP tool parameters"
.to_string(),
),
mime_type: Some("text/markdown".to_string()),
size: None,
}
.no_annotation(),
RawResource {
uri: COMMON_COMMANDS_URI.to_string(),
name: "MCP Tool Usage Examples".to_string(),
description: Some(
"Examples of common MCP tool usage patterns and parameters".to_string(),
),
mime_type: Some("text/markdown".to_string()),
size: None,
}
.no_annotation(),
RawResource {
uri: CONTRACT_SAMPLES_URI.to_string(),
name: "Contract Examples with MCP Tools".to_string(),
description: Some(
"Sample Sway contracts with MCP tool usage examples".to_string(),
),
mime_type: Some("text/markdown".to_string()),
size: None,
}
.no_annotation(),
],
next_cursor: None,
})
}
async fn read_resource(
&self,
ReadResourceRequestParam { uri }: ReadResourceRequestParam,
ctx: RequestContext<RoleServer>,
) -> Result<ReadResourceResult, McpError> {
resources::read_resource(&uri, ctx).await
}
}
impl McpToolModule for ForcCallTools {
fn get_module_name(&self) -> &'static str {
"forc-call-tools"
}
fn list_tools(
&self,
request: Option<PaginatedRequestParam>,
ctx: RequestContext<RoleServer>,
) -> Pin<Box<dyn Future<Output = Result<ListToolsResult, McpError>> + Send>> {
let self_clone = self.clone();
Box::pin(async move { ServerHandler::list_tools(&self_clone, request, ctx).await })
}
fn call_tool(
&self,
request: CallToolRequestParam,
ctx: RequestContext<RoleServer>,
) -> Pin<Box<dyn Future<Output = Result<CallToolResult, McpError>> + Send>> {
let self_clone = self.clone();
Box::pin(async move { ServerHandler::call_tool(&self_clone, request, ctx).await })
}
fn list_resources(
&self,
request: Option<PaginatedRequestParam>,
ctx: RequestContext<RoleServer>,
) -> Pin<Box<dyn Future<Output = Result<ListResourcesResult, McpError>> + Send>> {
let self_clone = self.clone();
Box::pin(async move { ServerHandler::list_resources(&self_clone, request, ctx).await })
}
fn read_resource(
&self,
request: ReadResourceRequestParam,
ctx: RequestContext<RoleServer>,
) -> Pin<Box<dyn Future<Output = Result<ReadResourceResult, McpError>> + Send>> {
let self_clone = self.clone();
Box::pin(async move { ServerHandler::read_resource(&self_clone, request, ctx).await })
}
fn get_info(&self) -> ServerInfo {
ServerHandler::get_info(self)
}
}
// Helper functions for building forc-client commands
#[allow(clippy::too_many_arguments)]
fn build_call_command(
contract_id: &str,
abi: &str,
function: &str,
function_args: Vec<String>,
mode: &str,
node_url: Option<&str>,
signing_key: Option<&str>,
amount: u64,
asset_id: Option<&str>,
gas_price: Option<u64>,
verbosity: u8,
) -> anyhow::Result<forc_client::cmd::Call> {
use forc_client::cmd::call::*;
use fuels_core::types::{Address, AssetId};
use std::str::FromStr;
let address = Address::from_str(contract_id)
.map_err(|e| anyhow::anyhow!("Invalid contract address: {}", e))?;
let abi_source =
AbiSource::try_from(abi.to_string()).map_err(|e| anyhow::anyhow!("Invalid ABI: {}", e))?;
let execution_mode = match mode {
"dry-run" => ExecutionMode::DryRun,
"simulate" => ExecutionMode::Simulate,
"live" => ExecutionMode::Live,
_ => {
return Err(anyhow::anyhow!(
"Invalid mode. Use: dry-run, simulate, or live"
))
}
};
let signing_key_parsed = if let Some(key) = signing_key {
Some(
fuel_crypto::SecretKey::from_str(key)
.map_err(|e| anyhow::anyhow!("Invalid signing key: {}", e))?,
)
} else {
None
};
let asset_id_parsed = if let Some(id) = asset_id {
Some(AssetId::from_str(id).map_err(|e| anyhow::anyhow!("Invalid asset ID: {}", e))?)
} else {
None
};
let node = forc_client::NodeTarget {
node_url: node_url.map(String::from),
target: None,
testnet: false,
mainnet: false,
devnet: false,
};
let gas = gas_price.map(|price| forc_tx::Gas {
price: Some(price),
script_gas_limit: None,
max_fee: None,
tip: None,
});
Ok(forc_client::cmd::Call {
address,
abi: Some(abi_source),
contract_abis: None,
label: None,
function: Some(function.to_string()),
function_args,
node,
caller: Caller {
signing_key: signing_key_parsed,
wallet: false,
},
call_parameters: CallParametersOpts {
amount,
asset_id: asset_id_parsed,
gas_forwarded: None,
},
mode: execution_mode,
list_functions: false,
gas,
external_contracts: None,
output: OutputFormat::Json,
variable_output: None,
verbosity,
debug: false,
})
}
fn build_list_command(contract_id: &str, abi: &str) -> anyhow::Result<forc_client::cmd::Call> {
use forc_client::cmd::call::*;
use fuels_core::types::Address;
use std::str::FromStr;
let address = Address::from_str(contract_id)
.map_err(|e| anyhow::anyhow!("Invalid contract address: {}", e))?;
let abi_source =
AbiSource::try_from(abi.to_string()).map_err(|e| anyhow::anyhow!("Invalid ABI: {}", e))?;
let node = forc_client::NodeTarget {
node_url: None,
target: None,
testnet: false,
mainnet: false,
devnet: false,
};
Ok(forc_client::cmd::Call {
address,
abi: Some(abi_source),
contract_abis: None,
label: None,
function: None,
function_args: vec![],
node,
caller: Caller {
signing_key: None,
wallet: false,
},
call_parameters: CallParametersOpts::default(),
mode: ExecutionMode::DryRun,
list_functions: true,
gas: None,
external_contracts: None,
output: OutputFormat::Default,
variable_output: None,
verbosity: 0,
debug: false,
})
}
fn build_transfer_command(
signing_key: &str,
recipient: &str,
amount: u64,
asset_id: Option<&str>,
node_url: Option<&str>,
verbosity: u8,
) -> anyhow::Result<forc_client::cmd::Call> {
use forc_client::cmd::call::*;
use fuels_core::types::{Address, AssetId};
use std::str::FromStr;
let signing_key_parsed = fuel_crypto::SecretKey::from_str(signing_key)
.map_err(|e| anyhow::anyhow!("Invalid signing key: {}", e))?;
let address = Address::from_str(recipient)
.map_err(|e| anyhow::anyhow!("Invalid recipient address: {}", e))?;
let asset_id_parsed = if let Some(id) = asset_id {
Some(AssetId::from_str(id).map_err(|e| anyhow::anyhow!("Invalid asset ID: {}", e))?)
} else {
None
};
let node = forc_client::NodeTarget {
node_url: node_url.map(String::from),
target: None,
testnet: false,
mainnet: false,
devnet: false,
};
Ok(forc_client::cmd::Call {
address,
abi: None,
contract_abis: None,
label: None,
function: None,
function_args: vec![],
node,
caller: Caller {
signing_key: Some(signing_key_parsed),
wallet: false,
},
call_parameters: CallParametersOpts {
amount,
asset_id: asset_id_parsed,
gas_forwarded: None,
},
mode: ExecutionMode::Live,
list_functions: false,
gas: None,
external_contracts: None,
output: OutputFormat::Json,
variable_output: None,
verbosity,
debug: false,
})
}
#[cfg(test)]
mod tests {
use super::*;
use crate::tests::ForcMcpClient;
use anyhow::Result;
use forc_client::cmd::call::{ExecutionMode, OutputFormat};
use fuels::crypto::SecretKey;
use fuels::prelude::*;
use fuels_accounts::signers::private_key::PrivateKeySigner;
use serde_json::Value;
use std::{collections::HashMap, str::FromStr};
#[test]
fn test_call_contract_command_construction() {
let cmd = build_call_command(
"0x1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef",
"./test.json",
"test_function",
vec!["arg1".to_string(), "arg2".to_string()],
"simulate",
None,
None,
100,
None,
None,
2,
)
.unwrap();
assert_eq!(cmd.function.unwrap(), "test_function");
assert_eq!(cmd.function_args, vec!["arg1", "arg2"]);
assert_eq!(cmd.mode, ExecutionMode::Simulate);
assert_eq!(cmd.call_parameters.amount, 100);
assert_eq!(cmd.verbosity, 2);
assert!(!cmd.list_functions);
assert_eq!(cmd.output, OutputFormat::Json);
assert!(cmd.abi.is_some());
assert!(!cmd.caller.wallet);
}
#[test]
fn test_list_functions_command_construction() {
let cmd = build_list_command(
"0x1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef",
"https://example.com/abi.json",
)
.unwrap();
assert!(cmd.list_functions);
assert_eq!(cmd.function, None);
assert_eq!(cmd.function_args, Vec::<String>::new());
assert_eq!(cmd.mode, ExecutionMode::DryRun);
assert_eq!(cmd.output, OutputFormat::Default);
assert_eq!(cmd.verbosity, 0);
assert!(cmd.abi.is_some());
}
#[test]
fn test_transfer_assets_command_construction() {
let cmd = build_transfer_command(
"0xabcdef1234567890abcdef1234567890abcdef1234567890abcdef1234567890",
"0x1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef",
500,
None,
None,
1,
)
.unwrap();
assert_eq!(cmd.mode, ExecutionMode::Live);
assert_eq!(cmd.call_parameters.amount, 500);
assert_eq!(cmd.verbosity, 1);
assert_eq!(cmd.abi, None);
assert_eq!(cmd.function, None);
assert_eq!(cmd.function_args, Vec::<String>::new());
assert!(!cmd.list_functions);
assert_eq!(cmd.output, OutputFormat::Json);
}
#[test]
fn test_forc_call_tools_available() {
let tools = ForcCallTools::new();
let tool_list = tools.tool_router.list_all();
let tool_names: Vec<String> = tool_list
.into_iter()
.map(|tool| tool.name.to_string())
.collect();
assert_eq!(tool_names.len(), 4, "Should have exactly 4 forc-call tools");
assert!(tool_names.contains(&"call_contract".to_string()));
assert!(tool_names.contains(&"list_contract_functions".to_string()));
assert!(tool_names.contains(&"transfer_assets".to_string()));
assert!(tool_names.contains(&"get_execution_trace".to_string()));
}
struct E2ETestFixture {
pub contract_id: String,
pub abi_path: String,
pub node_url: String,
pub secret_key: String,
pub provider: Provider,
}
impl E2ETestFixture {
pub async fn new() -> Result<E2ETestFixture> {
// Setup local node and deploy contract
let secret_key = SecretKey::random(&mut rand::thread_rng());
let signer = PrivateKeySigner::new(secret_key);
let coins =
setup_single_asset_coins(signer.address(), AssetId::zeroed(), 10, 1_000_000_000);
let provider = setup_test_provider(coins, vec![], None, None).await?;
let wallet = Wallet::new(signer, provider.clone());
// Deploy the test contract
let contract_id = Contract::load_from(
"../../forc-plugins/forc-client/test/data/contract_with_types/contract_with_types.bin",
LoadConfiguration::default(),
)?
.deploy(&wallet, TxPolicies::default())
.await?
.contract_id;
// Use the existing ABI file directly (no temp file needed)
let abi_path = "../../forc-plugins/forc-client/test/data/contract_with_types/contract_with_types-abi.json";
Ok(E2ETestFixture {
contract_id: format!("0x{contract_id}"),
abi_path: abi_path.to_string(),
node_url: provider.url().to_string(),
secret_key: format!("0x{secret_key}"),
provider,
})
}
/// Helper to extract text from MCP Content - reusable across different MCP tools
pub fn extract_text_from_content(content: &rmcp::model::Content) -> Option<String> {
// Since we can't pattern match due to type constraints, we'll use serialization
// This is a workaround for the complex generic type structure
if let Ok(json) = serde_json::to_value(content) {
if let Some(text) = json.get("text") {
if let Some(text_str) = text.as_str() {
return Some(text_str.to_string());
}
}
}
None
}
/// Create arguments for contract call tool
pub fn create_call_tool_args(
&self,
function: &str,
function_args: Vec<&str>,
) -> HashMap<String, Value> {
let mut args = HashMap::new();
args.insert(
"contract_id".to_string(),
Value::String(self.contract_id.clone()),
);
args.insert("abi".to_string(), Value::String(self.abi_path.clone()));
args.insert("function".to_string(), Value::String(function.to_string()));
args.insert(
"function_args".to_string(),
Value::Array(
function_args
.into_iter()
.map(|s| Value::String(s.to_string()))
.collect(),
),
);
args.insert("node_url".to_string(), Value::String(self.node_url.clone()));
args.insert(
"signing_key".to_string(),
Value::String(self.secret_key.clone()),
);
args.insert("mode".to_string(), Value::String("dry-run".to_string()));
args
}
/// Create arguments for list functions tool
pub fn create_list_tool_args(&self) -> HashMap<String, Value> {
let mut args = HashMap::new();
args.insert(
"contract_id".to_string(),
Value::String(self.contract_id.clone()),
);
args.insert("abi".to_string(), Value::String(self.abi_path.clone()));
args.insert("node_url".to_string(), Value::String(self.node_url.clone()));
args
}
/// Create arguments for transfer assets tool
pub fn create_transfer_tool_args(
&self,
recipient: &str,
amount: u64,
) -> HashMap<String, Value> {
let mut args = HashMap::new();
args.insert(
"recipient".to_string(),
Value::String(recipient.to_string()),
);
args.insert("amount".to_string(), Value::Number(amount.into()));
args.insert("node_url".to_string(), Value::String(self.node_url.clone()));
args.insert(
"signing_key".to_string(),
Value::String(self.secret_key.clone()),
);
args
}
/// Create arguments for get_execution_trace tool
#[allow(dead_code)]
pub fn create_trace_tool_args(
&self,
trace_events: &[forc_client::op::call::trace::TraceEvent],
total_gas: u64,
labels: Option<&std::collections::HashMap<fuels_core::types::ContractId, String>>,
) -> HashMap<String, Value> {
let mut args = HashMap::new();
// Convert each trace event to JSON object (HashMap<String, Value>)
let trace_events_array: Vec<Value> = trace_events
.iter()
.map(|event| serde_json::to_value(event).unwrap())
.collect();
args.insert("trace_events".to_string(), Value::Array(trace_events_array));
args.insert("total_gas".to_string(), Value::Number(total_gas.into()));
if let Some(labels) = labels {
// Convert HashMap<ContractId, String> to HashMap<String, String>
let labels_map: HashMap<String, String> = labels
.iter()
.map(|(contract_id, label)| (format!("0x{contract_id}"), label.clone()))
.collect();
args.insert(
"labels".to_string(),
serde_json::to_value(labels_map).unwrap(),
);
}
args
}
}
#[tokio::test]
async fn test_forc_call_mcp_tools_available_via_http_mcp() -> Result<()> {
// Test that all expected forc-call tools are available via the SSE server
let mut client = ForcMcpClient::http_stream_client().await?;
let tool_names = client.list_tools().await?;
assert_eq!(tool_names.len(), 4, "Should have exactly 4 forc-call tools");
assert!(
tool_names.contains(&"call_contract".to_string()),
"Should have call_contract tool"
);
assert!(
tool_names.contains(&"list_contract_functions".to_string()),
"Should have list_contract_functions tool"
);
assert!(
tool_names.contains(&"transfer_assets".to_string()),
"Should have transfer_assets tool"
);
assert!(
tool_names.contains(&"get_execution_trace".to_string()),
"Should have get_execution_trace tool"
);
Ok(())
}
#[tokio::test]
async fn test_call_contract_tool_http_mcp() -> Result<()> {
let fixture = E2ETestFixture::new().await.unwrap();
let mut client = ForcMcpClient::http_stream_client().await?;
// Test calling a simple function through the MCP SSE server
let args = fixture.create_call_tool_args("test_u8", vec!["255"]);
let result = client.call_tool("call_contract", args).await?;
// Full validation for the first e2e test
assert_eq!(result.is_error, Some(false), "Call should not be an error");
assert!(!result.content.is_empty(), "Content should not be empty");
// Extract and parse the response content
let text = E2ETestFixture::extract_text_from_content(&result.content[0])
.expect("Response content should be text");
let call_response: serde_json::Value = serde_json::from_str(&text)?;
// Verify the function returned the expected value
assert!(
call_response.get("tx_hash").is_some(),
"Response should have tx_hash"
);
assert!(
call_response.get("result").is_some(),
"Response should have result"
);
assert_eq!(
call_response["result"], "255",
"Function should return input value"
);
Ok(())
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | true |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-debug/src/lib.rs | forc-plugins/forc-debug/src/lib.rs | pub mod cli;
pub mod debugger;
pub mod error;
pub mod names;
pub mod server;
pub mod types;
// Re-exports
pub use fuel_core_client::client::{schema::RunResult, FuelClient};
pub use fuel_vm::prelude::{ContractId, Transaction};
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-debug/src/names.rs | forc-plugins/forc-debug/src/names.rs | /// A list of predefined register names mapped to their corresponding indices.
pub const REGISTERS: [&str; 16] = [
"zero", "one", "of", "pc", "ssp", "sp", "fp", "hp", "err", "ggas", "cgas", "bal", "is", "ret",
"retl", "flag",
];
/// Returns the name of a register given its index.
///
/// If the index corresponds to a predefined register, the corresponding name
/// from `REGISTERS` is returned. Otherwise, it returns a formatted name
/// like `"reg{index}"`.
///
/// # Examples
///
/// ```
/// use forc_debug::names::register_name;
/// assert_eq!(register_name(0), "zero".to_string());
/// assert_eq!(register_name(15), "flag".to_string());
/// ```
pub fn register_name(index: usize) -> String {
if index < REGISTERS.len() {
REGISTERS[index].to_owned()
} else {
format!("reg{index}")
}
}
/// Returns the index of a register given its name.
///
/// If the name matches a predefined register in `REGISTERS`, the corresponding
/// index is returned. Otherwise, returns `None`.
///
/// # Examples
///
/// ```
/// use forc_debug::names::register_index;
/// assert_eq!(register_index("zero"), Some(0));
/// assert_eq!(register_index("flag"), Some(15));
/// assert_eq!(register_index("unknown"), None);
/// ```
pub fn register_index(name: &str) -> Option<usize> {
REGISTERS.iter().position(|&n| n == name)
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-debug/src/error.rs | forc-plugins/forc-debug/src/error.rs | use crate::types::Instruction;
use dap::requests::Command;
pub type Result<T> = std::result::Result<T, Error>;
#[derive(Debug, thiserror::Error)]
pub enum Error {
#[error(transparent)]
ArgumentError(#[from] ArgumentError),
#[error(transparent)]
AdapterError(#[from] AdapterError),
#[error("VM error: {0}")]
VMError(String),
#[error("Fuel Client error: {0}")]
FuelClientError(String),
#[error("Session error: {0}")]
SessionError(String),
#[error("I/O error")]
IoError(#[from] std::io::Error),
#[error("ABI error: {0}")]
AbiError(String),
#[error("Json error")]
JsonError(#[from] serde_json::Error),
#[error("Server error: {0}")]
DapServerError(#[from] dap::errors::ServerError),
#[error("Readline error: {0}")]
Readline(#[from] rustyline::error::ReadlineError),
}
#[derive(Debug, thiserror::Error)]
pub enum ArgumentError {
#[error("Invalid argument: {0}")]
Invalid(String),
#[error("Unknown command: {0}")]
UnknownCommand(String),
#[error("Not enough arguments, expected {expected} but got {got}")]
NotEnough { expected: usize, got: usize },
#[error("Too many arguments, expected {expected} but got {got}")]
TooMany { expected: usize, got: usize },
#[error("Invalid number format: {0}")]
InvalidNumber(String),
}
#[allow(clippy::large_enum_variant)]
#[derive(Debug, thiserror::Error)]
pub enum AdapterError {
#[error("Unhandled command")]
UnhandledCommand { command: Command },
#[error("Missing command")]
MissingCommand,
#[error("Missing configuration")]
MissingConfiguration,
#[error("Missing source path argument")]
MissingSourcePathArgument,
#[error("Missing breakpoint location")]
MissingBreakpointLocation,
#[error("Missing source map")]
MissingSourceMap { pc: Instruction },
#[error("Unknown breakpoint")]
UnknownBreakpoint { pc: Instruction },
#[error("Build failed")]
BuildFailed { reason: String },
#[error("No active test executor")]
NoActiveTestExecutor,
#[error("Test execution failed")]
TestExecutionFailed {
#[from]
source: anyhow::Error,
},
}
impl ArgumentError {
/// Ensures argument count falls within [min, max] range.
pub fn ensure_arg_count(
args: &[String],
min: usize,
max: usize,
) -> std::result::Result<(), ArgumentError> {
let count = args.len();
if count < min {
Err(ArgumentError::NotEnough {
expected: min,
got: count,
})
} else if count > max {
Err(ArgumentError::TooMany {
expected: max,
got: count,
})
} else {
Ok(())
}
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-debug/src/types.rs | forc-plugins/forc-debug/src/types.rs | use crate::error::{Error, Result};
use dap::types::Breakpoint;
use fuel_types::ContractId;
use std::{
collections::HashMap,
ops::{Deref, DerefMut},
path::PathBuf,
};
use sway_core::asm_generation::ProgramABI;
pub type ExitCode = i64;
pub type Instruction = u64;
pub type Breakpoints = HashMap<PathBuf, Vec<Breakpoint>>;
/// A map storing ABIs for contracts, capable of fetching ABIs from the registry for unknown contracts.
#[derive(Debug, Default)]
pub struct AbiMap(HashMap<ContractId, ProgramABI>);
impl AbiMap {
/// Registers the given ABI for the given contract ID.
pub fn register_abi(&mut self, contract_id: ContractId, abi: ProgramABI) {
self.insert(contract_id, abi);
}
/// Either fetches the ABI from the Sway ABI Registry or returns it from the cache if it's already known.
pub fn get_or_fetch_abi(&mut self, contract_id: &ContractId) -> Option<&ProgramABI> {
// If we already have it, return it
if self.contains_key(contract_id) {
return self.get(contract_id);
}
// Try to fetch from ABI Registry
match fetch_abi_from_registry(contract_id) {
Ok(abi) => {
self.register_abi(*contract_id, abi);
self.get(contract_id)
}
Err(_) => None,
}
}
}
impl Deref for AbiMap {
type Target = HashMap<ContractId, ProgramABI>;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl DerefMut for AbiMap {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
/// Fetches the ABI for the given contract ID from the Sway ABI Registry.
fn fetch_abi_from_registry(_contract_id: &ContractId) -> Result<ProgramABI> {
// TODO: Implement this once the Sway ABI Registry is available
// See this github issue: https://github.com/FuelLabs/sway/issues/6862
Err(Error::AbiError("Not implemented yet".to_string()))
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-debug/src/main.rs | forc-plugins/forc-debug/src/main.rs | use clap::Parser;
use forc_tracing::{init_tracing_subscriber, println_error, TracingSubscriberOptions};
#[derive(Parser, Debug)]
#[clap(name = "forc-debug", version)]
/// Forc plugin for the Sway DAP (Debug Adapter Protocol) implementation.
pub struct Opt {
/// The URL of the Fuel Client GraphQL API
#[clap(default_value = "http://127.0.0.1:4000/graphql")]
pub api_url: String,
/// Start the DAP server
#[clap(short, long)]
pub serve: bool,
}
#[tokio::main]
async fn main() {
init_tracing_subscriber(TracingSubscriberOptions::default());
let config = Opt::parse();
let result = if config.serve {
forc_debug::server::DapServer::default().start()
} else {
forc_debug::cli::start_cli(&config.api_url).await
};
if let Err(err) = result {
println_error(&format!("{err}"));
std::process::exit(1);
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-debug/src/server/state.rs | forc-plugins/forc-debug/src/server/state.rs | use crate::{
error::AdapterError,
types::{Breakpoints, Instruction},
};
use dap::types::StartDebuggingRequestKind;
use forc_pkg::BuiltPackage;
use forc_test::{execute::TestExecutor, setup::TestSetup, TestResult};
use std::path::PathBuf;
use sway_core::source_map::SourceMap;
#[derive(Default, Debug, Clone)]
/// The state of the DAP server.
pub struct ServerState {
// DAP state
pub program_path: PathBuf,
pub mode: Option<StartDebuggingRequestKind>,
pub initialized_event_sent: bool,
pub started_debugging: bool,
pub configuration_done: bool,
pub breakpoints_need_update: bool,
pub stopped_on_breakpoint_id: Option<i64>,
pub breakpoints: Breakpoints,
// Build state
pub source_map: SourceMap,
pub built_package: Option<BuiltPackage>,
// Test state
pub test_setup: Option<TestSetup>,
pub test_results: Vec<forc_test::TestResult>,
pub executors: Vec<TestExecutor>,
original_executors: Vec<TestExecutor>,
}
impl ServerState {
/// Resets the data for a new run of the tests.
pub fn reset(&mut self) {
self.started_debugging = false;
self.executors.clone_from(&self.original_executors);
self.built_package = None;
self.test_setup = None;
self.test_results = vec![];
self.stopped_on_breakpoint_id = None;
self.breakpoints_need_update = true;
}
/// Initializes the executor stores.
pub fn init_executors(&mut self, executors: Vec<TestExecutor>) {
self.executors.clone_from(&executors);
self.original_executors = executors;
}
/// Returns the active [TestExecutor], if any.
pub fn executor(&mut self) -> Option<&mut TestExecutor> {
self.executors.first_mut()
}
/// Finds the source location matching a VM program counter.
pub fn vm_pc_to_source_location(
&self,
pc: Instruction,
) -> Result<(PathBuf, i64), AdapterError> {
// Convert PC to instruction index (divide by 4 for byte offset)
let instruction_idx = (pc / 4) as usize;
if let Some((path, range)) = self.source_map.addr_to_span(instruction_idx) {
Ok((path, range.start.line as i64))
} else {
Err(AdapterError::MissingSourceMap { pc })
}
}
/// Updates the breakpoints in the VM for all remaining [TestExecutor]s.
pub(crate) fn update_vm_breakpoints(&mut self) {
if !self.breakpoints_need_update {
return;
}
// Convert breakpoints to instruction offsets using the source map
let opcode_indexes = self
.breakpoints
.iter()
.flat_map(|(source_path, breakpoints)| {
breakpoints
.iter()
.filter_map(|bp| {
bp.line.and_then(|line| {
// Find any instruction that maps to this line in the source map
self.source_map.map.iter().find_map(|(pc, _)| {
self.source_map
.addr_to_span(*pc)
.filter(|(path, range)| {
path == source_path && range.start.line as i64 == line
})
.map(|_| pc)
})
})
})
.collect::<Vec<_>>()
});
// Set breakpoints in the VM
self.executors.iter_mut().for_each(|executor| {
let bps: Vec<_> = opcode_indexes
.clone()
.map(|opcode_index| fuel_vm::state::Breakpoint::script(*opcode_index as u64))
.collect();
executor.interpreter.overwrite_breakpoints(&bps);
});
self.breakpoints_need_update = false;
}
/// Finds the breakpoint matching a VM program counter.
pub fn vm_pc_to_breakpoint_id(&self, pc: u64) -> Result<i64, AdapterError> {
let (source_path, source_line) = self.vm_pc_to_source_location(pc)?;
// Find the breakpoint ID matching the source location.
let source_bps = self
.breakpoints
.get(&source_path)
.ok_or(AdapterError::UnknownBreakpoint { pc })?;
let breakpoint_id = source_bps
.iter()
.find_map(|bp| {
if bp.line == Some(source_line) {
bp.id
} else {
None
}
})
.ok_or(AdapterError::UnknownBreakpoint { pc })?;
Ok(breakpoint_id)
}
pub(crate) fn test_complete(&mut self, result: TestResult) {
self.test_results.push(result);
self.executors.remove(0);
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-debug/src/server/util.rs | forc-plugins/forc-debug/src/server/util.rs | use crate::types::Instruction;
use dap::types::Source;
use fuel_vm::fuel_asm::RegId;
use std::path::Path;
#[derive(Debug, Clone)]
/// Utility for generating unique, incremental IDs.
pub(crate) struct IdGenerator {
next_id: i64,
}
impl Default for IdGenerator {
fn default() -> Self {
Self::new()
}
}
impl IdGenerator {
pub(crate) fn new() -> Self {
Self { next_id: 0 }
}
pub(crate) fn next(&mut self) -> i64 {
let id = self.next_id;
self.next_id += 1;
id
}
}
/// Converts a filesystem path into a DAP Source object, which is used by the debug adapter
/// to identify source locations. Only sets the path field, leaving other Source fields at
/// their default values.
pub(crate) fn path_into_source(path: &Path) -> Source {
Source {
path: Some(path.to_string_lossy().into_owned()),
..Default::default()
}
}
pub(crate) fn current_instruction(registers: &[u64]) -> Instruction {
let pc = registers[RegId::PC];
let is = registers[RegId::IS];
pc - is
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-debug/src/server/mod.rs | forc-plugins/forc-debug/src/server/mod.rs | mod handlers;
mod state;
mod util;
use crate::{
error::{self, AdapterError, Error},
server::{state::ServerState, util::IdGenerator},
types::{ExitCode, Instruction},
};
use dap::{
events::{ExitedEventBody, OutputEventBody, StoppedEventBody},
prelude::*,
types::StartDebuggingRequestKind,
};
use forc_pkg::{
manifest::GenericManifestFile,
source::IPFSNode,
{self, BuildProfile, Built, BuiltPackage, PackageManifestFile},
};
use forc_test::{
execute::{DebugResult, TestExecutor},
setup::TestSetup,
BuiltTests, TestGasLimit,
};
use fuel_tx::GasCostsValues;
use serde::{Deserialize, Serialize};
use std::{
io::{BufReader, BufWriter, Read, Write},
process,
sync::Arc,
};
use sway_core::BuildTarget;
pub const THREAD_ID: i64 = 0;
pub const REGISTERS_VARIABLE_REF: i64 = 1;
pub const INSTRUCTIONS_VARIABLE_REF: i64 = 2;
#[derive(Debug, Clone, Default, Serialize, Deserialize)]
pub struct AdditionalData {
pub program: String,
}
/// This struct is a stateful representation of a Debug Adapter Protocol (DAP) server. It holds everything
/// needed to implement (DAP)[https://microsoft.github.io/debug-adapter-protocol/].
///
/// It is responsible for handling requests and sending responses and events to the client. It manages
/// the state of the server and the underlying VM instances used for debugging sway tests. It builds sway code
/// and generates source maps for debugging. It also manages the test setup and reports results back to the client.
pub struct DapServer {
/// The DAP server transport.
server: Server<Box<dyn Read>, Box<dyn Write>>,
/// Used to generate unique breakpoint IDs.
breakpoint_id_gen: IdGenerator,
/// The server state.
pub state: ServerState,
}
impl Default for DapServer {
fn default() -> Self {
Self::new(Box::new(std::io::stdin()), Box::new(std::io::stdout()))
}
}
impl DapServer {
/// Creates a new DAP server with custom input and output streams.
///
/// # Arguments
/// * `input` - Source of DAP protocol messages (usually stdin)
/// * `output` - Destination for DAP protocol messages (usually stdout)
pub fn new(input: Box<dyn Read>, output: Box<dyn Write>) -> Self {
let server = Server::new(BufReader::new(input), BufWriter::new(output));
DapServer {
server,
state: ServerState::default(),
breakpoint_id_gen: IdGenerator::default(),
}
}
/// Runs the debug server event loop, handling client requests until completion or error.
pub fn start(&mut self) -> error::Result<()> {
loop {
let req = match self.server.poll_request()? {
Some(req) => req,
None => return Err(Error::AdapterError(AdapterError::MissingCommand)),
};
// Handle the request and send response
let response = self.handle_request(req)?;
self.server.respond(response)?;
// Handle one-time initialization
if !self.state.initialized_event_sent {
let _ = self.server.send_event(Event::Initialized);
self.state.initialized_event_sent = true;
}
// Handle launch after configuration is complete
if self.should_launch() {
self.state.started_debugging = true;
match self.launch() {
Ok(true) => continue,
Ok(false) => self.exit(0), // The tests finished executing
Err(e) => {
self.error(format!("Launch error: {e:?}"));
self.exit(1);
}
}
}
}
}
/// Processes a debug adapter request and generates appropriate response.
fn handle_request(&mut self, req: Request) -> error::Result<Response> {
let (result, exit_code) = self.handle_command(&req.command).into_tuple();
let response = match result {
Ok(rsp) => Ok(req.success(rsp)),
Err(e) => {
self.error(format!("{e:?}"));
Ok(req.error(&format!("{e:?}")))
}
};
if let Some(exit_code) = exit_code {
self.exit(exit_code);
}
response
}
/// Handles a command and returns the result and exit code, if any.
pub fn handle_command(&mut self, command: &Command) -> HandlerResult {
match command {
Command::Attach(_) => self.handle_attach(),
Command::BreakpointLocations(ref args) => {
self.handle_breakpoint_locations_command(args)
}
Command::ConfigurationDone => self.handle_configuration_done(),
Command::Continue(_) => self.handle_continue(),
Command::Disconnect(_) => HandlerResult::ok_with_exit(ResponseBody::Disconnect, 0),
Command::Evaluate(args) => self.handle_evaluate(args),
Command::Initialize(_) => self.handle_initialize(),
Command::Launch(ref args) => self.handle_launch(args),
Command::Next(_) => self.handle_next(),
Command::Pause(_) => self.handle_pause(),
Command::Restart(_) => self.handle_restart(),
Command::Scopes(_) => self.handle_scopes(),
Command::SetBreakpoints(ref args) => self.handle_set_breakpoints_command(args),
Command::StackTrace(_) => self.handle_stack_trace_command(),
Command::StepIn(_) => {
self.error("This feature is not currently supported.".into());
HandlerResult::ok(ResponseBody::StepIn)
}
Command::StepOut(_) => {
self.error("This feature is not currently supported.".into());
HandlerResult::ok(ResponseBody::StepOut)
}
Command::Terminate(_) => HandlerResult::ok_with_exit(ResponseBody::Terminate, 0),
Command::TerminateThreads(_) => {
HandlerResult::ok_with_exit(ResponseBody::TerminateThreads, 0)
}
Command::Threads => self.handle_threads(),
Command::Variables(ref args) => self.handle_variables_command(args),
_ => HandlerResult::err(AdapterError::UnhandledCommand {
command: command.clone(),
}),
}
}
/// Checks whether debug session is ready to begin launching tests.
fn should_launch(&self) -> bool {
self.state.configuration_done
&& !self.state.started_debugging
&& matches!(self.state.mode, Some(StartDebuggingRequestKind::Launch))
}
/// Logs a message to the client's debugger console output.
fn log(&mut self, output: String) {
let _ = self.server.send_event(Event::Output(OutputEventBody {
output,
..Default::default()
}));
}
/// Logs an error message to the client's debugger console output.
fn error(&mut self, output: String) {
let _ = self.server.send_event(Event::Output(OutputEventBody {
output,
category: Some(types::OutputEventCategory::Stderr),
..Default::default()
}));
}
/// Logs test execution results in a cargo-test-like format, showing duration and gas usage for each test.
fn log_test_results(&mut self) {
if !self.state.executors.is_empty() {
return;
}
let test_results = &self.state.test_results;
let test_lines = test_results
.iter()
.map(|r| {
let outcome = if r.passed() { "ok" } else { "failed" };
format!(
"test {} ... {} ({}ms, {} gas)",
r.name,
outcome,
r.duration.as_millis(),
r.gas_used
)
})
.collect::<Vec<_>>()
.join("\n");
let passed = test_results.iter().filter(|r| r.passed()).count();
let final_outcome = if passed == test_results.len() {
"OK"
} else {
"FAILED"
};
self.log(format!(
"{test_lines}\nResult: {final_outcome}. {passed} passed. {} failed.\n",
test_results.len() - passed
));
}
/// Handles a `launch` request. Returns true if the server should continue running.
pub fn launch(&mut self) -> Result<bool, AdapterError> {
// Build tests for the given path.
let (pkg_to_debug, test_setup) = self.build_tests()?;
let entries = pkg_to_debug.bytecode.entries.iter().filter_map(|entry| {
if let Some(test_entry) = entry.kind.test() {
return Some((entry, test_entry));
}
None
});
// Construct a TestExecutor for each test and store it
let executors: Vec<TestExecutor> = entries
.filter_map(|(entry, test_entry)| {
let offset = u32::try_from(entry.finalized.imm)
.expect("test instruction offset out of range");
let name = entry.finalized.fn_name.clone();
if test_entry.file_path.as_path() != self.state.program_path.as_path() {
return None;
}
TestExecutor::build(
&pkg_to_debug.bytecode.bytes,
offset,
test_setup.clone(),
test_entry,
name.clone(),
// TODO: (GAS-COSTS) Provide gas costs values here, similar like in `forc test`.
// See: https://github.com/FuelLabs/sway/issues/7472
GasCostsValues::default(),
TestGasLimit::default(),
)
.ok()
})
.collect();
self.state.init_executors(executors);
// Start debugging
self.start_debugging_tests(false)
}
/// Builds the tests at the given [PathBuf] and stores the source maps.
pub fn build_tests(&mut self) -> Result<(BuiltPackage, TestSetup), AdapterError> {
if let Some(pkg) = &self.state.built_package {
if let Some(setup) = &self.state.test_setup {
return Ok((pkg.clone(), setup.clone()));
}
}
// 1. Build the packages
let manifest_file = forc_pkg::manifest::ManifestFile::from_dir(&self.state.program_path)
.map_err(|err| AdapterError::BuildFailed {
reason: format!("read manifest file: {err:?}"),
})?;
let pkg_manifest: PackageManifestFile =
manifest_file
.clone()
.try_into()
.map_err(|err: anyhow::Error| AdapterError::BuildFailed {
reason: format!("package manifest: {err:?}"),
})?;
let member_manifests =
manifest_file
.member_manifests()
.map_err(|err| AdapterError::BuildFailed {
reason: format!("member manifests: {err:?}"),
})?;
let lock_path = manifest_file
.lock_path()
.map_err(|err| AdapterError::BuildFailed {
reason: format!("lock path: {err:?}"),
})?;
let build_plan = forc_pkg::BuildPlan::from_lock_and_manifests(
&lock_path,
&member_manifests,
false,
false,
&IPFSNode::default(),
)
.map_err(|err| AdapterError::BuildFailed {
reason: format!("build plan: {err:?}"),
})?;
let project_name = pkg_manifest.project_name();
let outputs = std::iter::once(build_plan.find_member_index(project_name).ok_or(
AdapterError::BuildFailed {
reason: format!("find built project: {project_name}"),
},
)?)
.collect();
let built_packages = forc_pkg::build(
&build_plan,
BuildTarget::default(),
&BuildProfile {
optimization_level: sway_core::OptLevel::Opt0,
include_tests: true,
..Default::default()
},
&outputs,
&[],
&[],
None,
)
.map_err(|err| AdapterError::BuildFailed {
reason: format!("build packages: {err:?}"),
})?;
// 2. Store the source maps and find debug package
let pkg_to_debug = built_packages
.iter()
.find(|(_, pkg)| pkg.descriptor.manifest_file == pkg_manifest)
.map(|(_, pkg)| pkg)
.ok_or(AdapterError::BuildFailed {
reason: format!("find package: {project_name}"),
})?;
self.state.source_map = pkg_to_debug.source_map.clone();
// 3. Build the tests
let built = Built::Package(Arc::from(pkg_to_debug.clone()));
let built_tests = BuiltTests::from_built(built, &build_plan).map_err(|err| {
AdapterError::BuildFailed {
reason: format!("build tests: {err:?}"),
}
})?;
let pkg_tests = match built_tests {
BuiltTests::Package(pkg_tests) => pkg_tests,
BuiltTests::Workspace(_) => {
return Err(AdapterError::BuildFailed {
reason: "package tests: workspace tests not supported".into(),
})
}
};
let test_setup = pkg_tests.setup().map_err(|err| AdapterError::BuildFailed {
reason: format!("test setup: {err:?}"),
})?;
self.state.built_package = Some(pkg_to_debug.clone());
self.state.test_setup = Some(test_setup.clone());
Ok((pkg_to_debug.clone(), test_setup))
}
/// Sends the 'exited' event to the client and kills the server process.
fn exit(&mut self, exit_code: i64) {
let _ = self
.server
.send_event(Event::Exited(ExitedEventBody { exit_code }));
process::exit(exit_code as i32);
}
fn stop(&mut self, pc: Instruction) -> Result<bool, AdapterError> {
let (hit_breakpoint_ids, reason) =
if let Ok(breakpoint_id) = self.state.vm_pc_to_breakpoint_id(pc) {
self.state.stopped_on_breakpoint_id = Some(breakpoint_id);
(
Some(vec![breakpoint_id]),
types::StoppedEventReason::Breakpoint,
)
} else {
self.state.stopped_on_breakpoint_id = None;
(None, types::StoppedEventReason::Step)
};
let _ = self.server.send_event(Event::Stopped(StoppedEventBody {
reason,
hit_breakpoint_ids,
description: None,
thread_id: Some(THREAD_ID),
preserve_focus_hint: None,
text: None,
all_threads_stopped: None,
}));
Ok(true)
}
/// Starts debugging all tests.
/// `single_stepping` indicates whether the VM should break after one instruction.
///
/// Returns true if it has stopped on a breakpoint or false if all tests have finished.
fn start_debugging_tests(&mut self, single_stepping: bool) -> Result<bool, AdapterError> {
self.state.update_vm_breakpoints();
while let Some(executor) = self.state.executors.first_mut() {
executor.interpreter.set_single_stepping(single_stepping);
match executor.start_debugging()? {
DebugResult::TestComplete(result) => {
self.state.test_complete(result);
}
DebugResult::Breakpoint(pc) => {
executor.interpreter.set_single_stepping(false);
return self.stop(pc);
}
};
}
self.log_test_results();
Ok(false)
}
/// Continues debugging the current test and starts the next one if no breakpoint is hit.
/// `single_stepping` indicates whether the VM should break after one instruction.
///
/// Returns true if it has stopped on a breakpoint or false if all tests have finished.
fn continue_debugging_tests(&mut self, single_stepping: bool) -> Result<bool, AdapterError> {
self.state.update_vm_breakpoints();
if let Some(executor) = self.state.executors.first_mut() {
executor.interpreter.set_single_stepping(single_stepping);
match executor.continue_debugging()? {
DebugResult::TestComplete(result) => {
self.state.test_complete(result);
// The current test has finished, but there could be more tests to run. Start debugging the
// remaining tests.
return self.start_debugging_tests(single_stepping);
}
DebugResult::Breakpoint(pc) => {
executor.interpreter.set_single_stepping(false);
return self.stop(pc);
}
}
}
self.log_test_results();
Ok(false)
}
}
/// Represents the result of a DAP handler operation, combining the response/error and an optional exit code
#[derive(Debug)]
pub struct HandlerResult {
response: Result<ResponseBody, AdapterError>,
exit_code: Option<ExitCode>,
}
impl HandlerResult {
/// Creates a new successful result with no exit code
pub fn ok(response: ResponseBody) -> Self {
Self {
response: Ok(response),
exit_code: None,
}
}
/// Creates a new successful result with an exit code
pub fn ok_with_exit(response: ResponseBody, code: ExitCode) -> Self {
Self {
response: Ok(response),
exit_code: Some(code),
}
}
/// Creates a new error result with an exit code
pub fn err_with_exit(error: AdapterError, code: ExitCode) -> Self {
Self {
response: Err(error),
exit_code: Some(code),
}
}
/// Creates a new error result with no exit code
pub fn err(error: AdapterError) -> Self {
Self {
response: Err(error),
exit_code: None,
}
}
/// Deconstructs the result into its original tuple form
pub fn into_tuple(self) -> (Result<ResponseBody, AdapterError>, Option<ExitCode>) {
(self.response, self.exit_code)
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-debug/src/server/handlers/handle_stack_trace.rs | forc-plugins/forc-debug/src/server/handlers/handle_stack_trace.rs | use crate::server::{util, AdapterError, DapServer, HandlerResult};
use dap::{
responses::ResponseBody,
types::{StackFrame, StackFramePresentationhint},
};
impl DapServer {
/// Handles a `stack_trace` request. Returns the list of [StackFrame]s for the current execution state.
pub(crate) fn handle_stack_trace_command(&self) -> HandlerResult {
let result = self.stack_trace().map(|stack_frames| {
ResponseBody::StackTrace(dap::responses::StackTraceResponse {
stack_frames,
total_frames: None,
})
});
match result {
Ok(result) => HandlerResult::ok(result),
Err(e) => HandlerResult::err_with_exit(e, 1),
}
}
fn stack_trace(&self) -> Result<Vec<StackFrame>, AdapterError> {
let executor = self
.state
.executors
.first()
.ok_or(AdapterError::NoActiveTestExecutor)?;
let name = executor.name.clone();
let source_location = match self.state.stopped_on_breakpoint_id {
// If we stopped on a breakpoint, use the breakpoint's source location.
Some(breakpoint_id) => self.state.breakpoints.iter().find_map(|(_, breakpoints)| {
breakpoints.iter().find_map(|bp| {
if Some(breakpoint_id) == bp.id {
if let Some(bp_line) = bp.line {
return Some((bp.source.clone(), bp_line));
}
}
None
})
}),
// Otherwise, use the current instruction's source location.
None => self
.state
.vm_pc_to_source_location(util::current_instruction(
executor.interpreter.registers(),
))
.ok()
.map(|(source_path, line)| (Some(util::path_into_source(&source_path)), line)),
};
// For now, we only return 1 stack frame.
let stack_frames = source_location
.map(|(source, line)| {
vec![StackFrame {
id: 0,
name,
source,
line,
column: 0,
presentation_hint: Some(StackFramePresentationhint::Normal),
..Default::default()
}]
})
.unwrap_or_default();
Ok(stack_frames)
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-debug/src/server/handlers/mod.rs | forc-plugins/forc-debug/src/server/handlers/mod.rs | use crate::{
error::AdapterError,
server::{
AdditionalData, DapServer, HandlerResult, INSTRUCTIONS_VARIABLE_REF,
REGISTERS_VARIABLE_REF, THREAD_ID,
},
};
use dap::{
prelude::*,
types::{Scope, StartDebuggingRequestKind},
};
use requests::{EvaluateArguments, LaunchRequestArguments};
use std::path::PathBuf;
pub(crate) mod handle_breakpoint_locations;
pub(crate) mod handle_set_breakpoints;
pub(crate) mod handle_stack_trace;
pub(crate) mod handle_variables;
impl DapServer {
pub(crate) fn handle_attach(&mut self) -> HandlerResult {
self.state.mode = Some(StartDebuggingRequestKind::Attach);
self.error("This feature is not currently supported.".into());
HandlerResult::ok_with_exit(ResponseBody::Attach, 0)
}
pub(crate) fn handle_initialize(&mut self) -> HandlerResult {
HandlerResult::ok(ResponseBody::Initialize(types::Capabilities {
supports_breakpoint_locations_request: Some(true),
supports_configuration_done_request: Some(true),
..Default::default()
}))
}
pub(crate) fn handle_configuration_done(&mut self) -> HandlerResult {
self.state.configuration_done = true;
HandlerResult::ok(ResponseBody::ConfigurationDone)
}
pub(crate) fn handle_launch(&mut self, args: &LaunchRequestArguments) -> HandlerResult {
self.state.mode = Some(StartDebuggingRequestKind::Launch);
if let Some(additional_data) = &args.additional_data {
if let Ok(data) = serde_json::from_value::<AdditionalData>(additional_data.clone()) {
self.state.program_path = PathBuf::from(data.program);
return HandlerResult::ok(ResponseBody::Launch);
}
}
HandlerResult::err_with_exit(AdapterError::MissingConfiguration, 1)
}
/// Handles a `next` request. Returns true if the server should continue running.
pub(crate) fn handle_next(&mut self) -> HandlerResult {
match self.continue_debugging_tests(true) {
Ok(true) => HandlerResult::ok(ResponseBody::Next),
Ok(false) => {
// The tests finished executing
HandlerResult::ok_with_exit(ResponseBody::Next, 0)
}
Err(e) => HandlerResult::err_with_exit(e, 1),
}
}
/// Handles a `continue` request. Returns true if the server should continue running.
pub(crate) fn handle_continue(&mut self) -> HandlerResult {
match self.continue_debugging_tests(false) {
Ok(true) => HandlerResult::ok(ResponseBody::Continue(responses::ContinueResponse {
all_threads_continued: Some(true),
})),
Ok(false) => HandlerResult::ok_with_exit(
ResponseBody::Continue(responses::ContinueResponse {
all_threads_continued: Some(true),
}),
0,
),
Err(e) => HandlerResult::err_with_exit(e, 1),
}
}
pub(crate) fn handle_evaluate(&mut self, args: &EvaluateArguments) -> HandlerResult {
let result = match args.context {
Some(types::EvaluateArgumentsContext::Variables) => args.expression.clone(),
_ => "Evaluate expressions not supported in this context".into(),
};
HandlerResult::ok(ResponseBody::Evaluate(responses::EvaluateResponse {
result,
..Default::default()
}))
}
pub(crate) fn handle_pause(&mut self) -> HandlerResult {
// TODO: interpreter pause function
if let Some(executor) = self.state.executor() {
executor.interpreter.set_single_stepping(true);
}
HandlerResult::ok(ResponseBody::Pause)
}
pub(crate) fn handle_restart(&mut self) -> HandlerResult {
self.state.reset();
HandlerResult::ok(ResponseBody::Restart)
}
pub(crate) fn handle_scopes(&mut self) -> HandlerResult {
HandlerResult::ok(ResponseBody::Scopes(responses::ScopesResponse {
scopes: vec![
Scope {
name: "Current VM Instruction".into(),
presentation_hint: Some(types::ScopePresentationhint::Registers),
variables_reference: INSTRUCTIONS_VARIABLE_REF,
..Default::default()
},
Scope {
name: "Registers".into(),
presentation_hint: Some(types::ScopePresentationhint::Registers),
variables_reference: REGISTERS_VARIABLE_REF,
..Default::default()
},
],
}))
}
pub(crate) fn handle_threads(&mut self) -> HandlerResult {
HandlerResult::ok(ResponseBody::Threads(responses::ThreadsResponse {
threads: vec![types::Thread {
id: THREAD_ID,
name: "main".into(),
}],
}))
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-debug/src/server/handlers/handle_breakpoint_locations.rs | forc-plugins/forc-debug/src/server/handlers/handle_breakpoint_locations.rs | use crate::server::{AdapterError, DapServer, HandlerResult};
use dap::{
requests::BreakpointLocationsArguments, responses::ResponseBody, types::BreakpointLocation,
};
use std::path::PathBuf;
impl DapServer {
/// Handles a `breakpoint_locations` request. Returns the list of [BreakpointLocation]s.
pub(crate) fn handle_breakpoint_locations_command(
&self,
args: &BreakpointLocationsArguments,
) -> HandlerResult {
let result = self.breakpoint_locations(args).map(|breakpoints| {
ResponseBody::BreakpointLocations(dap::responses::BreakpointLocationsResponse {
breakpoints,
})
});
match result {
Ok(result) => HandlerResult::ok(result),
Err(e) => HandlerResult::err_with_exit(e, 1),
}
}
fn breakpoint_locations(
&self,
args: &BreakpointLocationsArguments,
) -> Result<Vec<BreakpointLocation>, AdapterError> {
let source_path = args
.source
.path
.as_ref()
.ok_or(AdapterError::MissingSourcePathArgument)?;
let existing_breakpoints = self
.state
.breakpoints
.get(&PathBuf::from(source_path))
.ok_or(AdapterError::MissingBreakpointLocation)?;
let breakpoints = existing_breakpoints
.iter()
.filter_map(|bp| {
bp.line.map(|line| BreakpointLocation {
line,
..Default::default()
})
})
.collect();
Ok(breakpoints)
}
}
#[cfg(test)]
mod tests {
use super::*;
const MOCK_SOURCE_PATH: &str = "some/path";
const MOCK_BP_ID: i64 = 1;
const MOCK_LINE: i64 = 1;
#[test]
fn test_handle_breakpoint_locations_success() {
let mut server = DapServer::default();
server.state.breakpoints.insert(
PathBuf::from(MOCK_SOURCE_PATH),
vec![dap::types::Breakpoint {
id: Some(MOCK_BP_ID),
line: Some(MOCK_LINE),
..Default::default()
}],
);
let args = BreakpointLocationsArguments {
source: dap::types::Source {
path: Some(MOCK_SOURCE_PATH.into()),
..Default::default()
},
..Default::default()
};
let result = server.breakpoint_locations(&args).expect("success");
assert_eq!(result.len(), 1);
assert_eq!(result[0].line, MOCK_LINE);
}
#[test]
#[should_panic(expected = "MissingSourcePathArgument")]
fn test_handle_breakpoint_locations_missing_argument() {
let server = DapServer::default();
let args = BreakpointLocationsArguments::default();
server.breakpoint_locations(&args).unwrap();
}
#[test]
#[should_panic(expected = "MissingBreakpointLocation")]
fn test_handle_breakpoint_locations_missing_breakpoint() {
let server = DapServer::default();
let args = BreakpointLocationsArguments {
source: dap::types::Source {
path: Some(MOCK_SOURCE_PATH.into()),
..Default::default()
},
..Default::default()
};
server.breakpoint_locations(&args).unwrap();
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-debug/src/server/handlers/handle_variables.rs | forc-plugins/forc-debug/src/server/handlers/handle_variables.rs | use crate::{
names::register_name,
server::{
AdapterError, DapServer, HandlerResult, INSTRUCTIONS_VARIABLE_REF, REGISTERS_VARIABLE_REF,
},
};
use dap::{requests::VariablesArguments, responses::ResponseBody, types::Variable};
use fuel_vm::fuel_asm::{Imm06, Imm12, Imm18, Imm24, Instruction, RawInstruction, RegId};
impl DapServer {
/// Processes a variables request, returning all variables and their current values.
pub(crate) fn handle_variables_command(&self, args: &VariablesArguments) -> HandlerResult {
let result = self.get_variables(args).map(|variables| {
ResponseBody::Variables(dap::responses::VariablesResponse { variables })
});
match result {
Ok(result) => HandlerResult::ok(result),
Err(e) => HandlerResult::err_with_exit(e, 1),
}
}
/// Returns the list of [Variable]s for the current execution state.
pub(crate) fn get_variables(
&self,
args: &VariablesArguments,
) -> Result<Vec<Variable>, AdapterError> {
let executor = self
.state
.executors
.first()
.ok_or(AdapterError::NoActiveTestExecutor)?;
let register_variables = executor
.interpreter
.registers()
.iter()
.enumerate()
.map(|(index, value)| Variable {
name: register_name(index),
value: format!("0x{value:X?}"),
..Default::default()
})
.collect::<Vec<_>>();
// Slice out current opcode pc-4..pc and then parse using fuel-asm
// to return the opcode and its arguments.
let pc = executor.interpreter.registers()[RegId::PC] as usize;
let instruction_variables = match Instruction::try_from(RawInstruction::from_be_bytes(
executor.interpreter.memory()[pc..pc + 4]
.try_into()
.unwrap(),
)) {
Ok(instruction) => vec![
("Opcode", Some(format!("{:?}", instruction.opcode()))),
("rA", ra(instruction)),
("rB", rb(instruction)),
("rC", rc(instruction)),
("rD", rd(instruction)),
("imm", imm(instruction)),
]
.iter()
.filter_map(|(name, value)| {
value.as_ref().map(|value| Variable {
name: (*name).to_string(),
value: value.to_string(),
..Default::default()
})
})
.collect(),
Err(_) => vec![],
};
match args.variables_reference {
REGISTERS_VARIABLE_REF => Ok(register_variables),
INSTRUCTIONS_VARIABLE_REF => Ok(instruction_variables),
_ => Ok(vec![]),
}
}
}
fn reg_id_to_string(reg_id: Option<RegId>) -> Option<String> {
reg_id.map(|reg_id| register_name(reg_id.into()))
}
fn imm06_to_string(value: Imm06) -> Option<String> {
Some(format!("0x{:X?}", value.to_u8()))
}
fn imm12_to_string(value: Imm12) -> Option<String> {
Some(format!("0x{:X?}", value.to_u16()))
}
fn imm18_to_string(value: Imm18) -> Option<String> {
Some(format!("0x{:X?}", value.to_u32()))
}
fn imm24_to_string(value: Imm24) -> Option<String> {
Some(format!("0x{:X?}", value.to_u32()))
}
fn ra(instruction: Instruction) -> Option<String> {
reg_id_to_string(match instruction {
Instruction::ADD(op) => Some(op.ra()),
Instruction::AND(op) => Some(op.ra()),
Instruction::DIV(op) => Some(op.ra()),
Instruction::EQ(op) => Some(op.ra()),
Instruction::EXP(op) => Some(op.ra()),
Instruction::GT(op) => Some(op.ra()),
Instruction::LT(op) => Some(op.ra()),
Instruction::MLOG(op) => Some(op.ra()),
Instruction::MROO(op) => Some(op.ra()),
Instruction::MOD(op) => Some(op.ra()),
Instruction::MOVE(op) => Some(op.ra()),
Instruction::MUL(op) => Some(op.ra()),
Instruction::NOT(op) => Some(op.ra()),
Instruction::OR(op) => Some(op.ra()),
Instruction::SLL(op) => Some(op.ra()),
Instruction::SRL(op) => Some(op.ra()),
Instruction::SUB(op) => Some(op.ra()),
Instruction::XOR(op) => Some(op.ra()),
Instruction::MLDV(op) => Some(op.ra()),
Instruction::RET(op) => Some(op.ra()),
Instruction::RETD(op) => Some(op.ra()),
Instruction::ALOC(op) => Some(op.ra()),
Instruction::MCL(op) => Some(op.ra()),
Instruction::MCP(op) => Some(op.ra()),
Instruction::MEQ(op) => Some(op.ra()),
Instruction::BHSH(op) => Some(op.ra()),
Instruction::BHEI(op) => Some(op.ra()),
Instruction::BURN(op) => Some(op.ra()),
Instruction::CALL(op) => Some(op.ra()),
Instruction::CCP(op) => Some(op.ra()),
Instruction::CROO(op) => Some(op.ra()),
Instruction::CSIZ(op) => Some(op.ra()),
Instruction::CB(op) => Some(op.ra()),
Instruction::LDC(op) => Some(op.ra()),
Instruction::LOG(op) => Some(op.ra()),
Instruction::LOGD(op) => Some(op.ra()),
Instruction::MINT(op) => Some(op.ra()),
Instruction::RVRT(op) => Some(op.ra()),
Instruction::SCWQ(op) => Some(op.ra()),
Instruction::SRW(op) => Some(op.ra()),
Instruction::SRWQ(op) => Some(op.ra()),
Instruction::SWW(op) => Some(op.ra()),
Instruction::SWWQ(op) => Some(op.ra()),
Instruction::TR(op) => Some(op.ra()),
Instruction::TRO(op) => Some(op.ra()),
Instruction::ECK1(op) => Some(op.ra()),
Instruction::ECR1(op) => Some(op.ra()),
Instruction::ED19(op) => Some(op.ra()),
Instruction::K256(op) => Some(op.ra()),
Instruction::S256(op) => Some(op.ra()),
Instruction::TIME(op) => Some(op.ra()),
Instruction::FLAG(op) => Some(op.ra()),
Instruction::BAL(op) => Some(op.ra()),
Instruction::JMP(op) => Some(op.ra()),
Instruction::JNE(op) => Some(op.ra()),
Instruction::SMO(op) => Some(op.ra()),
Instruction::ADDI(op) => Some(op.ra()),
Instruction::ANDI(op) => Some(op.ra()),
Instruction::DIVI(op) => Some(op.ra()),
Instruction::EXPI(op) => Some(op.ra()),
Instruction::MODI(op) => Some(op.ra()),
Instruction::MULI(op) => Some(op.ra()),
Instruction::ORI(op) => Some(op.ra()),
Instruction::SLLI(op) => Some(op.ra()),
Instruction::SRLI(op) => Some(op.ra()),
Instruction::SUBI(op) => Some(op.ra()),
Instruction::XORI(op) => Some(op.ra()),
Instruction::JNEI(op) => Some(op.ra()),
Instruction::LB(op) => Some(op.ra()),
Instruction::LW(op) => Some(op.ra()),
Instruction::SB(op) => Some(op.ra()),
Instruction::SW(op) => Some(op.ra()),
Instruction::MCPI(op) => Some(op.ra()),
Instruction::GTF(op) => Some(op.ra()),
Instruction::MCLI(op) => Some(op.ra()),
Instruction::GM(op) => Some(op.ra()),
Instruction::MOVI(op) => Some(op.ra()),
Instruction::JNZI(op) => Some(op.ra()),
Instruction::JMPF(op) => Some(op.ra()),
Instruction::JMPB(op) => Some(op.ra()),
Instruction::JNZF(op) => Some(op.ra()),
Instruction::JNZB(op) => Some(op.ra()),
Instruction::JNEF(op) => Some(op.ra()),
Instruction::JNEB(op) => Some(op.ra()),
Instruction::CFE(op) => Some(op.ra()),
Instruction::CFS(op) => Some(op.ra()),
Instruction::WDCM(op) => Some(op.ra()),
Instruction::WQCM(op) => Some(op.ra()),
Instruction::WDOP(op) => Some(op.ra()),
Instruction::WQOP(op) => Some(op.ra()),
Instruction::WDML(op) => Some(op.ra()),
Instruction::WQML(op) => Some(op.ra()),
Instruction::WDDV(op) => Some(op.ra()),
Instruction::WQDV(op) => Some(op.ra()),
Instruction::WDMD(op) => Some(op.ra()),
Instruction::WQMD(op) => Some(op.ra()),
Instruction::WDAM(op) => Some(op.ra()),
Instruction::WQAM(op) => Some(op.ra()),
Instruction::WDMM(op) => Some(op.ra()),
Instruction::WQMM(op) => Some(op.ra()),
Instruction::ECAL(op) => Some(op.ra()),
_ => None,
})
}
fn rb(instruction: Instruction) -> Option<String> {
reg_id_to_string(match instruction {
Instruction::ADD(op) => Some(op.rb()),
Instruction::AND(op) => Some(op.rb()),
Instruction::DIV(op) => Some(op.rb()),
Instruction::EQ(op) => Some(op.rb()),
Instruction::EXP(op) => Some(op.rb()),
Instruction::GT(op) => Some(op.rb()),
Instruction::LT(op) => Some(op.rb()),
Instruction::MLOG(op) => Some(op.rb()),
Instruction::MROO(op) => Some(op.rb()),
Instruction::MOD(op) => Some(op.rb()),
Instruction::MOVE(op) => Some(op.rb()),
Instruction::MUL(op) => Some(op.rb()),
Instruction::NOT(op) => Some(op.rb()),
Instruction::OR(op) => Some(op.rb()),
Instruction::SLL(op) => Some(op.rb()),
Instruction::SRL(op) => Some(op.rb()),
Instruction::SUB(op) => Some(op.rb()),
Instruction::XOR(op) => Some(op.rb()),
Instruction::MLDV(op) => Some(op.rb()),
Instruction::RETD(op) => Some(op.rb()),
Instruction::MCL(op) => Some(op.rb()),
Instruction::MCP(op) => Some(op.rb()),
Instruction::MEQ(op) => Some(op.rb()),
Instruction::BHSH(op) => Some(op.rb()),
Instruction::BURN(op) => Some(op.rb()),
Instruction::CALL(op) => Some(op.rb()),
Instruction::CCP(op) => Some(op.rb()),
Instruction::CROO(op) => Some(op.rb()),
Instruction::CSIZ(op) => Some(op.rb()),
Instruction::LDC(op) => Some(op.rb()),
Instruction::LOG(op) => Some(op.rb()),
Instruction::LOGD(op) => Some(op.rb()),
Instruction::MINT(op) => Some(op.rb()),
Instruction::SCWQ(op) => Some(op.rb()),
Instruction::SRW(op) => Some(op.rb()),
Instruction::SRWQ(op) => Some(op.rb()),
Instruction::SWW(op) => Some(op.rb()),
Instruction::SWWQ(op) => Some(op.rb()),
Instruction::TR(op) => Some(op.rb()),
Instruction::TRO(op) => Some(op.rb()),
Instruction::ECK1(op) => Some(op.rb()),
Instruction::ECR1(op) => Some(op.rb()),
Instruction::ED19(op) => Some(op.rb()),
Instruction::K256(op) => Some(op.rb()),
Instruction::S256(op) => Some(op.rb()),
Instruction::TIME(op) => Some(op.rb()),
Instruction::BAL(op) => Some(op.rb()),
Instruction::JNE(op) => Some(op.rb()),
Instruction::SMO(op) => Some(op.rb()),
Instruction::ADDI(op) => Some(op.rb()),
Instruction::ANDI(op) => Some(op.rb()),
Instruction::DIVI(op) => Some(op.rb()),
Instruction::EXPI(op) => Some(op.rb()),
Instruction::MODI(op) => Some(op.rb()),
Instruction::MULI(op) => Some(op.rb()),
Instruction::ORI(op) => Some(op.rb()),
Instruction::SLLI(op) => Some(op.rb()),
Instruction::SRLI(op) => Some(op.rb()),
Instruction::SUBI(op) => Some(op.rb()),
Instruction::XORI(op) => Some(op.rb()),
Instruction::JNEI(op) => Some(op.rb()),
Instruction::LB(op) => Some(op.rb()),
Instruction::LW(op) => Some(op.rb()),
Instruction::SB(op) => Some(op.rb()),
Instruction::SW(op) => Some(op.rb()),
Instruction::MCPI(op) => Some(op.rb()),
Instruction::GTF(op) => Some(op.rb()),
Instruction::JNZF(op) => Some(op.rb()),
Instruction::JNZB(op) => Some(op.rb()),
Instruction::JNEF(op) => Some(op.rb()),
Instruction::JNEB(op) => Some(op.rb()),
Instruction::WDCM(op) => Some(op.rb()),
Instruction::WQCM(op) => Some(op.rb()),
Instruction::WDOP(op) => Some(op.rb()),
Instruction::WQOP(op) => Some(op.rb()),
Instruction::WDML(op) => Some(op.rb()),
Instruction::WQML(op) => Some(op.rb()),
Instruction::WDDV(op) => Some(op.rb()),
Instruction::WQDV(op) => Some(op.rb()),
Instruction::WDMD(op) => Some(op.rb()),
Instruction::WQMD(op) => Some(op.rb()),
Instruction::WDAM(op) => Some(op.rb()),
Instruction::WQAM(op) => Some(op.rb()),
Instruction::WDMM(op) => Some(op.rb()),
Instruction::WQMM(op) => Some(op.rb()),
Instruction::ECAL(op) => Some(op.rb()),
_ => None,
})
}
fn rc(instruction: Instruction) -> Option<String> {
reg_id_to_string(match instruction {
Instruction::ADD(op) => Some(op.rc()),
Instruction::AND(op) => Some(op.rc()),
Instruction::DIV(op) => Some(op.rc()),
Instruction::EQ(op) => Some(op.rc()),
Instruction::EXP(op) => Some(op.rc()),
Instruction::GT(op) => Some(op.rc()),
Instruction::LT(op) => Some(op.rc()),
Instruction::MLOG(op) => Some(op.rc()),
Instruction::MROO(op) => Some(op.rc()),
Instruction::MOD(op) => Some(op.rc()),
Instruction::MUL(op) => Some(op.rc()),
Instruction::OR(op) => Some(op.rc()),
Instruction::SLL(op) => Some(op.rc()),
Instruction::SRL(op) => Some(op.rc()),
Instruction::SUB(op) => Some(op.rc()),
Instruction::XOR(op) => Some(op.rc()),
Instruction::MLDV(op) => Some(op.rc()),
Instruction::MCP(op) => Some(op.rc()),
Instruction::MEQ(op) => Some(op.rc()),
Instruction::CALL(op) => Some(op.rc()),
Instruction::CCP(op) => Some(op.rc()),
Instruction::LDC(op) => Some(op.rc()),
Instruction::LOG(op) => Some(op.rc()),
Instruction::LOGD(op) => Some(op.rc()),
Instruction::SCWQ(op) => Some(op.rc()),
Instruction::SRW(op) => Some(op.rc()),
Instruction::SRWQ(op) => Some(op.rc()),
Instruction::SWW(op) => Some(op.rc()),
Instruction::SWWQ(op) => Some(op.rc()),
Instruction::TR(op) => Some(op.rc()),
Instruction::TRO(op) => Some(op.rc()),
Instruction::ECK1(op) => Some(op.rc()),
Instruction::ECR1(op) => Some(op.rc()),
Instruction::ED19(op) => Some(op.rc()),
Instruction::K256(op) => Some(op.rc()),
Instruction::S256(op) => Some(op.rc()),
Instruction::BAL(op) => Some(op.rc()),
Instruction::JNE(op) => Some(op.rc()),
Instruction::SMO(op) => Some(op.rc()),
Instruction::JNEF(op) => Some(op.rc()),
Instruction::JNEB(op) => Some(op.rc()),
Instruction::WDCM(op) => Some(op.rc()),
Instruction::WQCM(op) => Some(op.rc()),
Instruction::WDOP(op) => Some(op.rc()),
Instruction::WQOP(op) => Some(op.rc()),
Instruction::WDML(op) => Some(op.rc()),
Instruction::WQML(op) => Some(op.rc()),
Instruction::WDDV(op) => Some(op.rc()),
Instruction::WQDV(op) => Some(op.rc()),
Instruction::WDMD(op) => Some(op.rc()),
Instruction::WQMD(op) => Some(op.rc()),
Instruction::WDAM(op) => Some(op.rc()),
Instruction::WQAM(op) => Some(op.rc()),
Instruction::WDMM(op) => Some(op.rc()),
Instruction::WQMM(op) => Some(op.rc()),
Instruction::ECAL(op) => Some(op.rc()),
_ => None,
})
}
fn rd(instruction: Instruction) -> Option<String> {
reg_id_to_string(match instruction {
Instruction::MLDV(op) => Some(op.rd()),
Instruction::MEQ(op) => Some(op.rd()),
Instruction::CALL(op) => Some(op.rd()),
Instruction::CCP(op) => Some(op.rd()),
Instruction::LOG(op) => Some(op.rd()),
Instruction::LOGD(op) => Some(op.rd()),
Instruction::SRWQ(op) => Some(op.rd()),
Instruction::SWWQ(op) => Some(op.rd()),
Instruction::TRO(op) => Some(op.rd()),
Instruction::WDMD(op) => Some(op.rd()),
Instruction::WQMD(op) => Some(op.rd()),
Instruction::WDAM(op) => Some(op.rd()),
Instruction::WQAM(op) => Some(op.rd()),
Instruction::WDMM(op) => Some(op.rd()),
Instruction::WQMM(op) => Some(op.rd()),
Instruction::ECAL(op) => Some(op.rd()),
_ => None,
})
}
fn imm(instruction: Instruction) -> Option<String> {
match instruction {
Instruction::ADDI(op) => imm12_to_string(op.imm12()),
Instruction::ANDI(op) => imm12_to_string(op.imm12()),
Instruction::DIVI(op) => imm12_to_string(op.imm12()),
Instruction::EXPI(op) => imm12_to_string(op.imm12()),
Instruction::MODI(op) => imm12_to_string(op.imm12()),
Instruction::MULI(op) => imm12_to_string(op.imm12()),
Instruction::ORI(op) => imm12_to_string(op.imm12()),
Instruction::SLLI(op) => imm12_to_string(op.imm12()),
Instruction::SRLI(op) => imm12_to_string(op.imm12()),
Instruction::SUBI(op) => imm12_to_string(op.imm12()),
Instruction::XORI(op) => imm12_to_string(op.imm12()),
Instruction::JNEI(op) => imm12_to_string(op.imm12()),
Instruction::LB(op) => imm12_to_string(op.imm12()),
Instruction::LW(op) => imm12_to_string(op.imm12()),
Instruction::SB(op) => imm12_to_string(op.imm12()),
Instruction::SW(op) => imm12_to_string(op.imm12()),
Instruction::MCPI(op) => imm12_to_string(op.imm12()),
Instruction::GTF(op) => imm12_to_string(op.imm12()),
Instruction::MCLI(op) => imm18_to_string(op.imm18()),
Instruction::GM(op) => imm18_to_string(op.imm18()),
Instruction::MOVI(op) => imm18_to_string(op.imm18()),
Instruction::JNZI(op) => imm18_to_string(op.imm18()),
Instruction::JMPF(op) => imm18_to_string(op.imm18()),
Instruction::JMPB(op) => imm18_to_string(op.imm18()),
Instruction::JNZF(op) => imm12_to_string(op.imm12()),
Instruction::JNZB(op) => imm12_to_string(op.imm12()),
Instruction::JNEF(op) => imm06_to_string(op.imm06()),
Instruction::JNEB(op) => imm06_to_string(op.imm06()),
Instruction::JI(op) => imm24_to_string(op.imm24()),
Instruction::CFEI(op) => imm24_to_string(op.imm24()),
Instruction::CFSI(op) => imm24_to_string(op.imm24()),
Instruction::PSHL(op) => imm24_to_string(op.imm24()),
Instruction::PSHH(op) => imm24_to_string(op.imm24()),
Instruction::POPL(op) => imm24_to_string(op.imm24()),
Instruction::POPH(op) => imm24_to_string(op.imm24()),
Instruction::WDCM(op) => imm06_to_string(op.imm06()),
Instruction::WQCM(op) => imm06_to_string(op.imm06()),
Instruction::WDOP(op) => imm06_to_string(op.imm06()),
Instruction::WQOP(op) => imm06_to_string(op.imm06()),
Instruction::WDML(op) => imm06_to_string(op.imm06()),
Instruction::WQML(op) => imm06_to_string(op.imm06()),
Instruction::WDDV(op) => imm06_to_string(op.imm06()),
Instruction::WQDV(op) => imm06_to_string(op.imm06()),
_ => None,
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-debug/src/server/handlers/handle_set_breakpoints.rs | forc-plugins/forc-debug/src/server/handlers/handle_set_breakpoints.rs | use crate::server::{AdapterError, DapServer, HandlerResult};
use dap::{
requests::SetBreakpointsArguments,
responses::ResponseBody,
types::{Breakpoint, StartDebuggingRequestKind},
};
use std::path::PathBuf;
impl DapServer {
/// Handles a `set_breakpoints` request. Returns the list of [Breakpoint]s for the path provided in `args`.
pub(crate) fn handle_set_breakpoints_command(
&mut self,
args: &SetBreakpointsArguments,
) -> HandlerResult {
let result = self.set_breakpoints(args).map(|breakpoints| {
ResponseBody::SetBreakpoints(dap::responses::SetBreakpointsResponse { breakpoints })
});
match result {
Ok(result) => HandlerResult::ok(result),
Err(e) => HandlerResult::err_with_exit(e, 1),
}
}
fn set_breakpoints(
&mut self,
args: &SetBreakpointsArguments,
) -> Result<Vec<Breakpoint>, AdapterError> {
// Build the source maps so we can verify breakpoints
if let Some(StartDebuggingRequestKind::Launch) = self.state.mode {
let _ = self.build_tests()?;
}
let source_path = args
.source
.path
.as_ref()
.ok_or(AdapterError::MissingSourcePathArgument)?;
let source_path_buf = PathBuf::from(source_path);
let existing_breakpoints = self
.state
.breakpoints
.get(&source_path_buf)
.cloned()
.unwrap_or_default();
let breakpoints = args
.breakpoints
.clone()
.unwrap_or_default()
.iter()
.map(|source_bp| {
// Check if there are any instructions mapped to this line in the source
let verified = self.state.source_map.map.iter().any(|(pc, _)| {
if let Some((path, range)) = self.state.source_map.addr_to_span(*pc) {
path == source_path_buf && range.start.line as i64 == source_bp.line
} else {
false
}
});
if let Some(existing_bp) = existing_breakpoints
.iter()
.find(|bp| bp.line == Some(source_bp.line))
{
Breakpoint {
verified,
..existing_bp.clone()
}
} else {
let id = Some(self.breakpoint_id_gen.next());
Breakpoint {
id,
verified,
line: Some(source_bp.line),
source: Some(args.source.clone()),
..Default::default()
}
}
})
.collect::<Vec<_>>();
self.state
.breakpoints
.insert(source_path_buf, breakpoints.clone());
self.state.breakpoints_need_update = true;
Ok(breakpoints)
}
}
#[cfg(test)]
mod tests {
use super::*;
use sway_core::source_map::{LocationRange, PathIndex, SourceMap, SourceMapSpan};
use sway_types::LineCol;
const MOCK_SOURCE_PATH: &str = "some/path";
const MOCK_BP_ID: i64 = 1;
const MOCK_LINE: i64 = 1;
const MOCK_INSTRUCTION: u64 = 1;
fn get_test_server(source_map: bool, existing_bp: bool) -> DapServer {
let mut server = DapServer::default();
if source_map {
// Create a source map with our test line
let mut map = SourceMap::new();
map.paths.push(PathBuf::from(MOCK_SOURCE_PATH));
map.map.insert(
MOCK_INSTRUCTION as usize,
SourceMapSpan {
path: PathIndex(0),
range: LocationRange {
start: LineCol {
line: MOCK_LINE as usize,
col: 0,
},
end: LineCol {
line: MOCK_LINE as usize,
col: 10,
},
},
},
);
server.state.source_map = map;
}
if existing_bp {
server.state.breakpoints.insert(
PathBuf::from(MOCK_SOURCE_PATH),
vec![dap::types::Breakpoint {
id: Some(MOCK_BP_ID),
line: Some(MOCK_LINE),
verified: false,
source: Some(dap::types::Source {
path: Some(MOCK_SOURCE_PATH.into()),
..Default::default()
}),
..Default::default()
}],
);
}
server
}
fn get_test_args() -> SetBreakpointsArguments {
SetBreakpointsArguments {
source: dap::types::Source {
path: Some(MOCK_SOURCE_PATH.into()),
..Default::default()
},
breakpoints: Some(vec![dap::types::SourceBreakpoint {
line: MOCK_LINE,
..Default::default()
}]),
..Default::default()
}
}
#[test]
fn test_handle_set_breakpoints_existing_verified() {
let mut server = get_test_server(true, true);
let args = get_test_args();
let result = server.set_breakpoints(&args).expect("success");
assert_eq!(result.len(), 1);
assert_eq!(result[0].line, Some(MOCK_LINE));
assert_eq!(result[0].id, Some(MOCK_BP_ID));
assert_eq!(
result[0].source.clone().expect("source").path,
Some(MOCK_SOURCE_PATH.into())
);
assert!(result[0].verified);
}
#[test]
fn test_handle_set_breakpoints_existing_unverified() {
let mut server = get_test_server(false, true);
let args = get_test_args();
let result = server.set_breakpoints(&args).expect("success");
assert_eq!(result.len(), 1);
assert_eq!(result[0].line, Some(MOCK_LINE));
assert_eq!(result[0].id, Some(MOCK_BP_ID));
assert_eq!(
result[0].source.clone().expect("source").path,
Some(MOCK_SOURCE_PATH.into())
);
assert!(!result[0].verified);
}
#[test]
fn test_handle_set_breakpoints_new() {
let mut server = get_test_server(true, false);
let args = get_test_args();
let result = server.set_breakpoints(&args).expect("success");
assert_eq!(result.len(), 1);
assert_eq!(result[0].line, Some(MOCK_LINE));
assert_eq!(
result[0].source.clone().expect("source").path,
Some(MOCK_SOURCE_PATH.into())
);
assert!(result[0].verified);
}
#[test]
#[should_panic(expected = "MissingSourcePathArgument")]
fn test_handle_breakpoint_locations_missing_argument() {
let mut server = get_test_server(true, true);
let args = SetBreakpointsArguments::default();
server.set_breakpoints(&args).unwrap();
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-debug/src/cli/state.rs | forc-plugins/forc-debug/src/cli/state.rs | use crate::{cli::commands::Commands, names};
use rustyline::{
completion::Completer,
highlight::{CmdKind, Highlighter},
hint::Hinter,
validate::{ValidationContext, ValidationResult, Validator},
Context, Helper,
};
use serde_json::Value;
use std::{borrow::Cow, collections::HashSet, fs};
pub struct DebuggerHelper {
pub commands: Commands,
}
impl DebuggerHelper {
pub fn new() -> Self {
Self {
commands: Commands::new(),
}
}
}
impl Completer for DebuggerHelper {
type Candidate = String;
fn complete(
&self,
line: &str,
pos: usize,
_ctx: &Context<'_>,
) -> rustyline::Result<(usize, Vec<Self::Candidate>)> {
let words: Vec<&str> = line[..pos].split_whitespace().collect();
let word_start = line[..pos].rfind(char::is_whitespace).map_or(0, |i| i + 1);
let word_to_complete = &line[word_start..pos];
// Transaction command context
if let Some(first_word) = words.first() {
if self.commands.is_tx_command(first_word) {
match words.len() {
1 => {
// First argument is transaction file
return Ok((word_start, get_transaction_files(word_to_complete)));
}
2 => {
// Second argument is local ABI file
return Ok((word_start, get_abi_files(word_to_complete)));
}
_ => {
// After this, if someone explicitly types --abi, then we can help with contract_id:abi.json
if words[words.len() - 2] == "--abi" {
let abi_files = get_abi_files(word_to_complete);
return Ok((word_start, abi_files));
}
}
}
}
// Register command context
if self.commands.is_register_command(first_word) && line[..word_start].ends_with(' ') {
let matches: Vec<String> = names::REGISTERS
.into_iter()
.filter(|name| name.starts_with(word_to_complete))
.map(String::from)
.collect();
return Ok((word_start, matches));
}
}
// Main command completion
let matches: Vec<String> = self
.commands
.get_all_command_strings()
.into_iter()
.filter(|cmd| cmd.starts_with(word_to_complete))
.map(String::from)
.collect();
Ok((word_start, matches))
}
}
impl Hinter for DebuggerHelper {
type Hint = String;
fn hint(&self, line: &str, pos: usize, _ctx: &Context<'_>) -> Option<Self::Hint> {
let cmd = line[..pos].split_whitespace().next()?;
let command = self.commands.find_command(cmd)?;
if line[..pos].split_whitespace().count() == 1 {
return Some(format!(" - {}", command.help));
}
if self.commands.is_help_command(cmd) {
Some(" [command] - show help for a command".into())
} else {
None
}
}
}
impl Highlighter for DebuggerHelper {
fn highlight_hint<'h>(&self, hint: &'h str) -> Cow<'h, str> {
Cow::Borrowed(hint)
}
fn highlight_candidate<'c>(
&self,
candidate: &'c str,
_completion: rustyline::CompletionType,
) -> Cow<'c, str> {
Cow::Borrowed(candidate)
}
fn highlight<'l>(&self, line: &'l str, _pos: usize) -> Cow<'l, str> {
Cow::Borrowed(line)
}
fn highlight_char(&self, _line: &str, _pos: usize, _kind: CmdKind) -> bool {
true
}
fn highlight_prompt<'b, 's: 'b, 'p: 'b>(
&'s self,
prompt: &'p str,
default: bool,
) -> Cow<'b, str> {
if default {
// Using RGB values: 4, 234, 130 | fuel green :)
Cow::Owned("\x1b[38;2;4;234;130m>>\x1b[0m ".to_owned())
} else {
Cow::Borrowed(prompt)
}
}
}
impl Validator for DebuggerHelper {
fn validate(&self, _ctx: &mut ValidationContext) -> rustyline::Result<ValidationResult> {
Ok(ValidationResult::Valid(None))
}
}
impl Helper for DebuggerHelper {}
/// Get valid ABI files matching the current word
fn get_abi_files(current_word: &str) -> Vec<String> {
find_valid_json_files(current_word, is_valid_abi)
}
/// Returns valid transaction JSON files from current directory and subdirectories.
/// Files must contain one of: Script, Create, Mint, Upgrade, Upload, or Blob keys.
fn get_transaction_files(current_word: &str) -> Vec<String> {
find_valid_json_files(current_word, is_valid_transaction)
}
/// Generic function to find and validate JSON files
fn find_valid_json_files<F>(current_word: &str, is_valid: F) -> Vec<String>
where
F: Fn(&Value) -> bool,
{
let mut matches = Vec::new();
let walker = walkdir::WalkDir::new(".").follow_links(true);
for entry in walker.into_iter().filter_map(|e| e.ok()) {
if entry.file_type().is_file() {
if let Some(filename) = entry
.path()
.to_string_lossy()
.strip_prefix("./")
.map(|f| f.to_string())
{
if filename.ends_with(".json") && filename.starts_with(current_word) {
// Try to read and parse the file
if let Ok(content) = fs::read_to_string(entry.path()) {
if let Ok(json) = serde_json::from_str::<Value>(&content) {
if is_valid(&json) {
matches.push(filename);
}
}
}
}
}
}
}
matches
}
/// Checks if a JSON value represents a valid transaction
fn is_valid_transaction(json: &Value) -> bool {
if let Value::Object(obj) = json {
// Check for transaction type
obj.keys().any(|key| {
matches!(
key.as_str(),
"Script" | "Create" | "Mint" | "Upgrade" | "Upload" | "Blob"
)
})
} else {
false
}
}
/// Checks if a JSON value represents a valid ABI
fn is_valid_abi(json: &Value) -> bool {
if let Value::Object(obj) = json {
// Required fields for an ABI
let required_fields: HashSet<_> = [
"programType",
"functions",
"concreteTypes",
"encodingVersion",
]
.iter()
.collect();
// Check that all required fields exist and have the correct type
if !required_fields
.iter()
.all(|&field| obj.contains_key(*field))
{
return false;
}
// Validate functions array
if let Some(Value::Array(functions)) = obj.get("functions") {
// Every function should have a name and inputs field
functions.iter().all(|f| {
matches!(f, Value::Object(f_obj) if f_obj.contains_key("name") && f_obj.contains_key("inputs"))
})
} else {
false
}
} else {
false
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-debug/src/cli/commands.rs | forc-plugins/forc-debug/src/cli/commands.rs | use crate::{cli::state::DebuggerHelper, error::Result};
use std::collections::HashSet;
use strsim::levenshtein;
#[derive(Debug, Clone)]
pub struct Command {
pub name: &'static str,
pub aliases: &'static [&'static str],
pub help: &'static str,
}
pub struct Commands {
pub tx: Command,
pub reset: Command,
pub continue_: Command,
pub step: Command,
pub breakpoint: Command,
pub registers: Command,
pub memory: Command,
pub quit: Command,
pub help: Command,
}
impl Commands {
pub const fn new() -> Self {
Self {
tx: Command {
name: "start_tx",
aliases: &["n", "tx", "new_tx"],
help: "Start a new transaction",
},
reset: Command {
name: "reset",
aliases: &[],
help: "Reset debugger state",
},
continue_: Command {
name: "continue",
aliases: &["c"],
help: "Continue execution",
},
step: Command {
name: "step",
aliases: &["s"],
help: "Step execution",
},
breakpoint: Command {
name: "breakpoint",
aliases: &["b"],
help: "Set breakpoint",
},
registers: Command {
name: "register",
aliases: &["r", "reg", "registers"],
help: "View registers",
},
memory: Command {
name: "memory",
aliases: &["m", "mem"],
help: "View memory",
},
quit: Command {
name: "quit",
aliases: &["exit"],
help: "Exit debugger",
},
help: Command {
name: "help",
aliases: &["h", "?"],
help: "Show help for commands",
},
}
}
pub fn all_commands(&self) -> Vec<&Command> {
vec![
&self.tx,
&self.reset,
&self.continue_,
&self.step,
&self.breakpoint,
&self.registers,
&self.memory,
&self.quit,
&self.help,
]
}
pub fn is_tx_command(&self, cmd: &str) -> bool {
self.tx.name == cmd || self.tx.aliases.contains(&cmd)
}
pub fn is_register_command(&self, cmd: &str) -> bool {
self.registers.name == cmd || self.registers.aliases.contains(&cmd)
}
pub fn is_quit_command(&self, cmd: &str) -> bool {
self.quit.name == cmd || self.quit.aliases.contains(&cmd)
}
pub fn is_help_command(&self, cmd: &str) -> bool {
self.help.name == cmd || self.help.aliases.contains(&cmd)
}
pub fn find_command(&self, name: &str) -> Option<&Command> {
self.all_commands()
.into_iter()
.find(|cmd| cmd.name == name || cmd.aliases.contains(&name))
}
/// Returns a set of all valid command strings including aliases
pub fn get_all_command_strings(&self) -> HashSet<&'static str> {
let mut commands = HashSet::new();
for cmd in self.all_commands() {
commands.insert(cmd.name);
commands.extend(cmd.aliases);
}
commands
}
/// Suggests a similar command
pub fn find_closest(&self, unknown_cmd: &str) -> Option<&Command> {
self.all_commands()
.into_iter()
.flat_map(|cmd| {
std::iter::once((cmd, cmd.name))
.chain(cmd.aliases.iter().map(move |&alias| (cmd, alias)))
})
.map(|(cmd, name)| (cmd, levenshtein(unknown_cmd, name)))
.filter(|&(_, distance)| distance <= 2)
.min_by_key(|&(_, distance)| distance)
.map(|(cmd, _)| cmd)
}
}
// Add help command implementation:
pub async fn cmd_help(helper: &DebuggerHelper, args: &[String]) -> Result<()> {
if args.len() > 1 {
// Help for specific command
if let Some(cmd) = helper.commands.find_command(&args[1]) {
println!("{} - {}", cmd.name, cmd.help);
if !cmd.aliases.is_empty() {
println!("Aliases: {}", cmd.aliases.join(", "));
}
return Ok(());
}
println!("Unknown command: '{}'", args[1]);
}
println!("Available commands:");
for cmd in helper.commands.all_commands() {
println!(" {:<12} - {}", cmd.name, cmd.help);
if !cmd.aliases.is_empty() {
println!(" aliases: {}", cmd.aliases.join(", "));
}
}
Ok(())
}
/// Parses a string representing a number and returns it as a `usize`.
///
/// The input string can be in decimal or hexadecimal format:
/// - Decimal numbers are parsed normally (e.g., `"123"`).
/// - Hexadecimal numbers must be prefixed with `"0x"` (e.g., `"0x7B"`).
/// - Underscores can be used as visual separators (e.g., `"1_000"` or `"0x1_F4"`).
///
/// If the input string is not a valid number in the specified format, `None` is returned.
///
/// # Examples
///
/// ```
/// use forc_debug::cli::parse_int;
/// /// Use underscores as separators in decimal and hexadecimal numbers
/// assert_eq!(parse_int("123"), Some(123));
/// assert_eq!(parse_int("1_000"), Some(1000));
///
/// /// Parse hexadecimal numbers with "0x" prefix
/// assert_eq!(parse_int("0x7B"), Some(123));
/// assert_eq!(parse_int("0x1_F4"), Some(500));
///
/// /// Handle invalid inputs gracefully
/// assert_eq!(parse_int("abc"), None);
/// assert_eq!(parse_int("0xZZZ"), None);
/// assert_eq!(parse_int(""), None);
/// ```
///
/// # Errors
///
/// Returns `None` if the input string contains invalid characters,
/// is not properly formatted, or cannot be parsed into a `usize`.
pub fn parse_int(s: &str) -> Option<usize> {
let (s, radix) = s.strip_prefix("0x").map_or((s, 10), |s| (s, 16));
usize::from_str_radix(&s.replace('_', ""), radix).ok()
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-debug/src/cli/mod.rs | forc-plugins/forc-debug/src/cli/mod.rs | mod commands;
mod state;
pub use commands::parse_int;
use crate::{
debugger::Debugger,
error::{ArgumentError, Error, Result},
};
use rustyline::{CompletionType, Config, EditMode, Editor};
use state::DebuggerHelper;
use std::path::PathBuf;
/// Start the CLI debug interface
pub async fn start_cli(api_url: &str) -> Result<()> {
let mut cli = Cli::new()?;
let mut debugger = Debugger::new(api_url).await?;
cli.run(&mut debugger, None).await
}
pub struct Cli {
editor: Editor<DebuggerHelper, rustyline::history::FileHistory>,
history_path: PathBuf,
}
impl Drop for Cli {
fn drop(&mut self) {
// Save the terminal history
let _ = self.editor.save_history(&self.history_path);
}
}
impl Cli {
pub fn new() -> Result<Self> {
// Initialize editor with config
let config = Config::builder()
.auto_add_history(true)
.history_ignore_space(true)
.completion_type(CompletionType::Circular)
.edit_mode(EditMode::Vi)
.max_history_size(100)?
.build();
let mut editor = Editor::with_config(config)?;
let helper = DebuggerHelper::new();
editor.set_helper(Some(helper));
// Load history from .forc/.debug/history
let history_path = get_history_file_path()?;
let _ = editor.load_history(&history_path);
Ok(Self {
editor,
history_path,
})
}
/// Main CLI entry point with optional initial input
pub async fn run(
&mut self,
debugger: &mut Debugger,
initial_input: Option<String>,
) -> Result<()> {
println!("Welcome to the Sway Debugger! Type \"help\" for a list of commands.");
let mut prefill_next = initial_input;
// Main REPL loop
loop {
let readline = if let Some(prefill) = prefill_next.take() {
self.editor.readline_with_initial(">> ", (&prefill, ""))
} else {
self.editor.readline(">> ")
};
match readline {
Ok(line) => {
let args: Vec<String> = line.split_whitespace().map(String::from).collect();
if args.is_empty() {
continue;
}
if let Some(helper) = self.editor.helper() {
match args[0].as_str() {
cmd if helper.commands.is_help_command(cmd) => {
if let Err(e) = commands::cmd_help(helper, &args).await {
println!("Error: {e}");
}
}
cmd if helper.commands.is_quit_command(cmd) => {
break Ok(());
}
_ => {
// Execute the command using debugger
if let Err(e) = debugger
.execute_from_args(args.clone(), &mut std::io::stdout())
.await
{
if let Error::ArgumentError(ArgumentError::UnknownCommand(
cmd,
)) = &e
{
// Check if this is an unknown command error and provide suggestions
if let Some(suggestion) = helper.commands.find_closest(cmd)
{
println!(
"Unknown command: '{}'. Did you mean '{}'?",
cmd, suggestion.name
);
} else {
println!("Error: {e}");
}
} else {
println!("Error: {e}");
}
}
}
}
}
}
Err(rustyline::error::ReadlineError::Interrupted) => {
println!("CTRL-C");
break Ok(());
}
Err(rustyline::error::ReadlineError::Eof) => {
println!("CTRL-D");
break Ok(());
}
Err(err) => {
println!("Error: {err}");
break Ok(());
}
}
}
}
}
fn get_history_file_path() -> Result<PathBuf> {
let home = dirs::home_dir().ok_or_else(|| {
Error::IoError(std::io::Error::new(
std::io::ErrorKind::NotFound,
"Could not find home directory",
))
})?;
let debug_dir = home.join(".forc").join(".debug");
std::fs::create_dir_all(&debug_dir).map_err(Error::IoError)?;
Ok(debug_dir.join("history"))
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-debug/src/debugger/commands.rs | forc-plugins/forc-debug/src/debugger/commands.rs | use crate::{error::ArgumentError, ContractId};
use fuel_tx::Receipt;
use serde::{Deserialize, Serialize};
/// Commands representing all debug operations
#[derive(Debug, Clone, Serialize, Deserialize)]
pub enum DebugCommand {
/// Start a new transaction with optional ABI information
StartTransaction {
/// Path to the transaction JSON file
tx_path: String,
/// Optional ABI mappings - either a single ABI path for local dev
/// or contract_id:abi_path pairs for contract-specific ABIs
abi_mappings: Vec<AbiMapping>,
},
/// Reset the debugger state
Reset,
/// Continue execution until next breakpoint or termination
Continue,
/// Set single stepping mode
SetSingleStepping {
/// Whether to enable single stepping
enable: bool,
},
/// Set a breakpoint at the specified location
SetBreakpoint {
/// Contract ID (zeroed for script breakpoints)
contract_id: ContractId,
/// Instruction offset
offset: u64,
},
/// Get register value(s)
GetRegisters {
/// Optional specific register indices. If empty, returns all registers
indices: Vec<u32>,
},
/// Get memory contents
GetMemory {
/// Starting offset in memory
offset: u32,
/// Number of bytes to read
limit: u32,
},
/// Exit the debugger
Quit,
}
/// ABI mapping for contract debugging
#[derive(Debug, Clone, Serialize, Deserialize)]
pub enum AbiMapping {
/// Local development ABI (no specific contract ID)
Local { abi_path: String },
/// Contract-specific ABI mapping
Contract {
contract_id: ContractId,
abi_path: String,
},
}
/// Response types for debug commands
#[derive(Debug, Clone, Serialize, Deserialize)]
pub enum DebugResponse {
/// Transaction started or continued with execution result
RunResult {
receipts: Vec<Receipt>,
breakpoint: Option<BreakpointHit>,
},
/// Command completed successfully with no data
Success,
/// Register values
Registers(Vec<RegisterValue>),
/// Memory contents
Memory(Vec<u8>),
/// Error occurred
Error(String),
}
/// Information about a breakpoint hit
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct BreakpointHit {
pub contract: ContractId,
pub pc: u64,
}
/// Register value with metadata
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct RegisterValue {
pub index: u32,
pub value: u64,
pub name: String,
}
impl DebugCommand {
/// Parse a command from CLI arguments
pub fn from_cli_args(args: &[String]) -> Result<Self, ArgumentError> {
if args.is_empty() {
return Err(ArgumentError::NotEnough {
expected: 1,
got: 0,
});
}
let cmd = &args[0];
let args = &args[1..];
match cmd.as_str() {
"start_tx" | "n" | "tx" | "new_tx" => {
Self::parse_start_tx(args).map_err(ArgumentError::Invalid)
}
"reset" => {
if !args.is_empty() {
return Err(ArgumentError::Invalid(
"reset command takes no arguments".to_string(),
));
}
Ok(DebugCommand::Reset)
}
"continue" | "c" => {
if !args.is_empty() {
return Err(ArgumentError::Invalid(
"continue command takes no arguments".to_string(),
));
}
Ok(DebugCommand::Continue)
}
"step" | "s" => Self::parse_step(args).map_err(ArgumentError::Invalid),
"breakpoint" | "bp" | "b" => {
Self::parse_breakpoint(args).map_err(ArgumentError::Invalid)
}
"register" | "r" | "reg" | "registers" => {
Self::parse_registers(args).map_err(ArgumentError::Invalid)
}
"memory" | "m" | "mem" => Self::parse_memory(args).map_err(ArgumentError::Invalid),
"quit" | "q" | "exit" => Ok(DebugCommand::Quit),
_ => Err(ArgumentError::UnknownCommand(cmd.to_string())),
}
}
/// Parse a start_tx command from CLI arguments
///
/// Handles two distinct modes of operation:
/// 1. Local Development: `tx transaction.json abi.json`
/// 2. Contract-specific: `tx transaction.json --abi <contract_id>:<abi_file.json>`
fn parse_start_tx(args: &[String]) -> Result<Self, String> {
if args.is_empty() {
return Err("Transaction file path required".to_string());
}
let tx_path = args[0].clone();
let mut abi_mappings = Vec::new();
let mut i = 1;
while i < args.len() {
if args[i] == "--abi" {
if i + 1 >= args.len() {
return Err("Missing argument for --abi".to_string());
}
let abi_arg = &args[i + 1];
if let Some((contract_id, abi_path)) = abi_arg.split_once(':') {
let contract_id = contract_id
.parse::<ContractId>()
.map_err(|_| format!("Invalid contract ID: {contract_id}"))?;
abi_mappings.push(AbiMapping::Contract {
contract_id,
abi_path: abi_path.to_string(),
});
} else {
return Err(format!("Invalid --abi argument: {abi_arg}"));
}
i += 2;
} else if args[i].ends_with(".json") {
// Local development ABI
abi_mappings.push(AbiMapping::Local {
abi_path: args[i].clone(),
});
i += 1;
} else {
return Err(format!("Unexpected argument: {}", args[i]));
}
}
Ok(DebugCommand::StartTransaction {
tx_path,
abi_mappings,
})
}
fn parse_step(args: &[String]) -> Result<Self, String> {
let enable = args
.first()
.is_none_or(|v| !["off", "no", "disable"].contains(&v.as_str()));
Ok(DebugCommand::SetSingleStepping { enable })
}
fn parse_breakpoint(args: &[String]) -> Result<Self, String> {
if args.is_empty() {
return Err("Breakpoint offset required".to_string());
}
let (contract_id, offset_str) = if args.len() == 2 {
// Contract ID provided
let contract_id = args[0]
.parse::<ContractId>()
.map_err(|_| format!("Invalid contract ID: {}", args[0]))?;
(contract_id, &args[1])
} else {
// No contract ID, use zeroed
(ContractId::zeroed(), &args[0])
};
let offset = crate::cli::parse_int(offset_str)
.ok_or_else(|| format!("Invalid offset: {offset_str}"))? as u64;
Ok(DebugCommand::SetBreakpoint {
contract_id,
offset,
})
}
fn parse_registers(args: &[String]) -> Result<Self, String> {
let mut indices = Vec::new();
for arg in args {
if let Some(v) = crate::cli::parse_int(arg) {
indices.push(v as u32);
} else if let Some(index) = crate::names::register_index(arg) {
indices.push(index as u32);
} else {
return Err(format!("Unknown register: {arg}"));
}
}
Ok(DebugCommand::GetRegisters { indices })
}
fn parse_memory(args: &[String]) -> Result<Self, String> {
use fuel_vm::consts::{VM_MAX_RAM, WORD_SIZE};
let offset = args
.first()
.map(|a| crate::cli::parse_int(a).ok_or_else(|| format!("Invalid offset: {a}")))
.transpose()?
.unwrap_or(0) as u32;
let limit = args
.get(1)
.map(|a| crate::cli::parse_int(a).ok_or_else(|| format!("Invalid limit: {a}")))
.transpose()?
.unwrap_or(WORD_SIZE * (VM_MAX_RAM as usize)) as u32;
Ok(DebugCommand::GetMemory { offset, limit })
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_start_tx_command() {
let args = vec!["start_tx".to_string(), "test.json".to_string()];
let result = DebugCommand::from_cli_args(&args).unwrap();
assert!(matches!(
result,
DebugCommand::StartTransaction { ref tx_path, ref abi_mappings }
if tx_path == "test.json" && abi_mappings.is_empty()
));
// Test alias
let args = vec!["n".to_string(), "test.json".to_string()];
let result = DebugCommand::from_cli_args(&args).unwrap();
assert!(matches!(result, DebugCommand::StartTransaction { .. }));
}
#[test]
fn test_reset_command() {
let args = vec!["reset".to_string()];
let result = DebugCommand::from_cli_args(&args).unwrap();
assert!(matches!(result, DebugCommand::Reset));
}
#[test]
fn test_continue_command() {
let args = vec!["continue".to_string()];
let result = DebugCommand::from_cli_args(&args).unwrap();
assert!(matches!(result, DebugCommand::Continue));
// Test alias
let args = vec!["c".to_string()];
let result = DebugCommand::from_cli_args(&args).unwrap();
assert!(matches!(result, DebugCommand::Continue));
}
#[test]
fn test_step_command() {
let args = vec!["step".to_string()];
let result = DebugCommand::from_cli_args(&args).unwrap();
assert!(matches!(
result,
DebugCommand::SetSingleStepping { enable: true }
));
let args = vec!["step".to_string(), "off".to_string()];
let result = DebugCommand::from_cli_args(&args).unwrap();
assert!(matches!(
result,
DebugCommand::SetSingleStepping { enable: false }
));
// Test alias
let args = vec!["s".to_string()];
let result = DebugCommand::from_cli_args(&args).unwrap();
assert!(matches!(
result,
DebugCommand::SetSingleStepping { enable: true }
));
}
#[test]
fn test_breakpoint_command() {
let args = vec!["breakpoint".to_string(), "100".to_string()];
let result = DebugCommand::from_cli_args(&args).unwrap();
assert!(matches!(
result,
DebugCommand::SetBreakpoint { contract_id, offset: 100 }
if contract_id == ContractId::zeroed()
));
// Test alias
let args = vec!["bp".to_string(), "50".to_string()];
let result = DebugCommand::from_cli_args(&args).unwrap();
assert!(matches!(
result,
DebugCommand::SetBreakpoint { offset: 50, .. }
));
}
#[test]
fn test_register_command() {
let args = vec!["register".to_string()];
let result = DebugCommand::from_cli_args(&args).unwrap();
assert!(matches!(
result,
DebugCommand::GetRegisters { ref indices }
if indices.is_empty()
));
let args = vec!["reg".to_string(), "0".to_string()];
let result = DebugCommand::from_cli_args(&args).unwrap();
assert!(matches!(
result,
DebugCommand::GetRegisters { ref indices }
if indices == &vec![0]
));
}
#[test]
fn test_memory_command() {
let args = vec!["memory".to_string()];
let result = DebugCommand::from_cli_args(&args).unwrap();
assert!(matches!(
result,
DebugCommand::GetMemory {
offset: 0,
limit: _
}
));
let args = vec!["memory".to_string(), "100".to_string(), "200".to_string()];
let result = DebugCommand::from_cli_args(&args).unwrap();
assert!(matches!(
result,
DebugCommand::GetMemory {
offset: 100,
limit: 200
}
));
// Test alias
let args = vec!["m".to_string(), "50".to_string()];
let result = DebugCommand::from_cli_args(&args).unwrap();
assert!(matches!(result, DebugCommand::GetMemory { offset: 50, .. }));
}
#[test]
fn test_quit_command() {
let args = vec!["quit".to_string()];
let result = DebugCommand::from_cli_args(&args).unwrap();
assert!(matches!(result, DebugCommand::Quit));
// Test aliases
let args = vec!["q".to_string()];
let result = DebugCommand::from_cli_args(&args).unwrap();
assert!(matches!(result, DebugCommand::Quit));
}
#[test]
fn test_error_cases() {
// Empty args
let args = vec![];
let result = DebugCommand::from_cli_args(&args);
assert!(matches!(
result,
Err(ArgumentError::NotEnough {
expected: 1,
got: 0
})
));
// Unknown command
let args = vec!["unknown".to_string()];
let result = DebugCommand::from_cli_args(&args);
assert!(matches!(result, Err(ArgumentError::UnknownCommand(_))));
// Missing arguments
let args = vec!["start_tx".to_string()];
let result = DebugCommand::from_cli_args(&args);
assert!(result.is_err());
let args = vec!["breakpoint".to_string()];
let result = DebugCommand::from_cli_args(&args);
assert!(result.is_err());
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-debug/src/debugger/mod.rs | forc-plugins/forc-debug/src/debugger/mod.rs | pub mod commands;
pub use commands::{AbiMapping, BreakpointHit, DebugCommand, DebugResponse, RegisterValue};
use crate::{
error::{Error, Result},
names::register_name,
types::AbiMap,
ContractId, FuelClient, RunResult, Transaction,
};
use fuel_tx::Receipt;
use fuel_vm::consts::{VM_REGISTER_COUNT, WORD_SIZE};
use sway_core::asm_generation::ProgramABI;
pub struct Debugger {
client: FuelClient,
session_id: String,
contract_abis: AbiMap,
}
impl Debugger {
/// Create a debugger instance connected to the given API URL
pub async fn new(api_url: &str) -> Result<Self> {
let client = FuelClient::new(api_url).map_err(|e| Error::FuelClientError(e.to_string()))?;
Self::from_client(client).await
}
/// Create a debugger instance from FuelClient
pub async fn from_client(client: FuelClient) -> Result<Self> {
let session_id = client
.start_session()
.await
.map_err(|e| Error::FuelClientError(e.to_string()))?;
Ok(Self {
client,
session_id,
contract_abis: AbiMap::default(),
})
}
/// Execute a debugger command from CLI arguments
pub async fn execute_from_args<W: std::io::Write>(
&mut self,
args: Vec<String>,
writer: &mut W,
) -> Result<()> {
let command = DebugCommand::from_cli_args(&args)?;
let response = self.execute(command).await?;
match response {
DebugResponse::RunResult {
receipts,
breakpoint,
} => {
// Process receipts with ABI decoding
let decoded_receipts = self.process_receipts(&receipts);
for decoded in decoded_receipts {
match decoded {
DecodedReceipt::Regular(receipt) => {
writeln!(writer, "Receipt: {receipt:?}")?;
}
DecodedReceipt::LogData {
receipt,
decoded_value,
contract_id,
} => {
writeln!(writer, "Receipt: {receipt:?}")?;
if let Some(value) = decoded_value {
writeln!(
writer,
"Decoded log value: {value}, from contract: {contract_id}"
)?;
}
}
}
}
// Print breakpoint info
if let Some(bp) = breakpoint {
writeln!(
writer,
"Stopped on breakpoint at address {} of contract 0x{}",
bp.pc, bp.contract
)?;
} else {
writeln!(writer, "Terminated")?;
}
}
DebugResponse::Success => {
// Command completed successfully, no output needed
}
DebugResponse::Registers(registers) => {
for reg in registers {
writeln!(
writer,
"reg[{:#02x}] = {:<8} # {}",
reg.index, reg.value, reg.name
)?;
}
}
DebugResponse::Memory(mem) => {
for (i, chunk) in mem.chunks(WORD_SIZE).enumerate() {
write!(writer, " {:06x}:", i * WORD_SIZE)?;
for byte in chunk {
write!(writer, " {byte:02x}")?;
}
writeln!(writer)?;
}
}
DebugResponse::Error(err) => {
writeln!(writer, "Error: {err}")?;
}
}
Ok(())
}
pub async fn execute(&mut self, command: DebugCommand) -> Result<DebugResponse> {
match command {
DebugCommand::StartTransaction {
tx_path,
abi_mappings,
} => self.start_transaction(tx_path, abi_mappings).await,
DebugCommand::Reset => self.reset().await,
DebugCommand::Continue => self.continue_execution().await,
DebugCommand::SetSingleStepping { enable } => self.set_single_stepping(enable).await,
DebugCommand::SetBreakpoint {
contract_id,
offset,
} => self.set_breakpoint(contract_id, offset).await,
DebugCommand::GetRegisters { indices } => self.get_registers(indices).await,
DebugCommand::GetMemory { offset, limit } => self.get_memory(offset, limit).await,
DebugCommand::Quit => Ok(DebugResponse::Success),
}
}
/// Start a new transaction with optional ABI support
async fn start_transaction(
&mut self,
tx_path: String,
abi_mappings: Vec<AbiMapping>,
) -> Result<DebugResponse> {
let load_and_parse_abi = |abi_path: &str| -> Result<ProgramABI> {
let abi_content = std::fs::read_to_string(abi_path)?;
let fuel_abi =
serde_json::from_str::<fuel_abi_types::abi::program::ProgramABI>(&abi_content)
.map_err(Error::JsonError)?;
Ok(ProgramABI::Fuel(fuel_abi))
};
// Process ABI mappings
for mapping in abi_mappings {
match mapping {
AbiMapping::Local { abi_path } => {
let abi = load_and_parse_abi(&abi_path)?;
self.contract_abis.register_abi(ContractId::zeroed(), abi);
}
AbiMapping::Contract {
contract_id,
abi_path,
} => {
let abi = load_and_parse_abi(&abi_path)?;
self.contract_abis.register_abi(contract_id, abi);
}
}
}
// Load and start transaction
let tx_json = std::fs::read(&tx_path)?;
let tx: Transaction = serde_json::from_slice(&tx_json).map_err(Error::JsonError)?;
let status = self
.client
.start_tx(&self.session_id, &tx)
.await
.map_err(|e| Error::FuelClientError(e.to_string()))?;
Ok(self.create_run_result_response(&status))
}
async fn reset(&mut self) -> Result<DebugResponse> {
self.client
.reset(&self.session_id)
.await
.map_err(|e| Error::FuelClientError(e.to_string()))?;
Ok(DebugResponse::Success)
}
async fn continue_execution(&mut self) -> Result<DebugResponse> {
let status = self
.client
.continue_tx(&self.session_id)
.await
.map_err(|e| Error::FuelClientError(e.to_string()))?;
Ok(self.create_run_result_response(&status))
}
async fn set_single_stepping(&mut self, enable: bool) -> Result<DebugResponse> {
self.client
.set_single_stepping(&self.session_id, enable)
.await
.map_err(|e| Error::FuelClientError(e.to_string()))?;
Ok(DebugResponse::Success)
}
async fn set_breakpoint(
&mut self,
contract_id: ContractId,
offset: u64,
) -> Result<DebugResponse> {
self.client
.set_breakpoint(&self.session_id, contract_id, offset)
.await
.map_err(|e| Error::FuelClientError(e.to_string()))?;
Ok(DebugResponse::Success)
}
async fn get_registers(&mut self, indices: Vec<u32>) -> Result<DebugResponse> {
let indices = if indices.is_empty() {
(0..VM_REGISTER_COUNT as u32).collect()
} else {
indices
};
let mut values = Vec::new();
for index in indices {
if index >= VM_REGISTER_COUNT as u32 {
return Err(Error::ArgumentError(crate::error::ArgumentError::Invalid(
format!("Register index too large: {index}"),
)));
}
let value = self
.client
.register(&self.session_id, index)
.await
.map_err(|e| Error::FuelClientError(e.to_string()))?;
values.push(RegisterValue {
index,
value,
name: register_name(index as usize).to_string(),
});
}
Ok(DebugResponse::Registers(values))
}
async fn get_memory(&mut self, offset: u32, limit: u32) -> Result<DebugResponse> {
let mem = self
.client
.memory(&self.session_id, offset, limit)
.await
.map_err(|e| Error::FuelClientError(e.to_string()))?;
Ok(DebugResponse::Memory(mem))
}
/// Convert RunResult to DebugResponse
fn create_run_result_response(&self, run_result: &RunResult) -> DebugResponse {
let receipts: Vec<Receipt> = run_result.receipts().collect();
let breakpoint = run_result.breakpoint.as_ref().map(|bp| BreakpointHit {
contract: bp.contract.clone().into(),
pc: bp.pc.0,
});
DebugResponse::RunResult {
receipts,
breakpoint,
}
}
/// Process receipts with ABI decoding (used for pretty printing in CLI)
pub fn process_receipts(&mut self, receipts: &[Receipt]) -> Vec<DecodedReceipt> {
receipts
.iter()
.map(|receipt| {
if let Receipt::LogData {
id,
rb,
data: Some(data),
..
} = receipt
{
self.contract_abis
.get_or_fetch_abi(id)
.and_then(|abi| {
forc_util::tx_utils::decode_log_data(&rb.to_string(), data, abi).ok()
})
.map(|decoded_log| DecodedReceipt::LogData {
receipt: receipt.clone(),
decoded_value: Some(decoded_log.value),
contract_id: *id,
})
.unwrap_or_else(|| DecodedReceipt::Regular(receipt.clone()))
} else {
DecodedReceipt::Regular(receipt.clone())
}
})
.collect()
}
/// Get the current session ID
pub fn session_id(&self) -> &str {
&self.session_id
}
}
/// Decoded receipt for pretty printing
#[derive(Debug, Clone)]
pub enum DecodedReceipt {
Regular(Receipt),
LogData {
receipt: Receipt,
decoded_value: Option<String>,
contract_id: ContractId,
},
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-debug/tests/cli_integration.rs | forc-plugins/forc-debug/tests/cli_integration.rs | #![deny(unused_must_use)]
use escargot::CargoBuild;
use rexpect::session::spawn_command;
use std::process::Command;
#[test]
fn test_cli() {
let port = portpicker::pick_unused_port().expect("No ports free");
#[allow(clippy::zombie_processes)]
let mut fuel_core = Command::new("fuel-core")
.arg("run")
.arg("--debug")
.arg("--db-type")
.arg("in-memory")
.arg("--port")
.arg(port.to_string())
.spawn()
.expect("Failed to start fuel-core");
let mut run_cmd = CargoBuild::new()
.bin("forc-debug")
.current_release()
.current_target()
.run()
.unwrap()
.command();
dbg!(&run_cmd);
run_cmd.arg(format!("http://127.0.0.1:{port}/graphql"));
// Increased timeout to account for rustyline initialization
let mut cmd = spawn_command(run_cmd, Some(5000)).unwrap();
// Handle rustyline's escape sequences before the prompt
cmd.exp_string("\u{1b}[?2004h").unwrap();
// Green >> prompt
let prompt = "\u{1b}[38;2;4;234;130m>>\u{1b}[0m ";
cmd.exp_string(prompt).unwrap();
cmd.send_line("reg 0").unwrap();
cmd.exp_regex(r"reg\[0x0\] = 0\s+# zero").unwrap();
cmd.exp_string(prompt).unwrap();
cmd.send_line("reg 1").unwrap();
cmd.exp_regex(r"reg\[0x1\] = 1\s+# one").unwrap();
cmd.exp_string(prompt).unwrap();
cmd.send_line("breakpoint 0").unwrap();
cmd.exp_string(prompt).unwrap();
cmd.send_line("start_tx examples/example_tx.json examples/example_abi.json")
.unwrap();
cmd.exp_regex(r"Stopped on breakpoint at address 0 of contract 0x0{64}")
.unwrap();
cmd.exp_string(prompt).unwrap();
cmd.send_line("step on").unwrap();
cmd.exp_string(prompt).unwrap();
cmd.send_line("continue").unwrap();
cmd.exp_regex(r"Stopped on breakpoint at address 4 of contract 0x0{64}")
.unwrap();
cmd.exp_string(prompt).unwrap();
cmd.send_line("step off").unwrap();
cmd.exp_string(prompt).unwrap();
cmd.send_line("continue").unwrap();
cmd.exp_string(prompt).unwrap();
cmd.send_line("reset").unwrap();
cmd.exp_string(prompt).unwrap();
cmd.send_line("start_tx examples/example_tx.json examples/example_abi.json")
.unwrap();
cmd.exp_regex(r"Decoded log value: 120").unwrap();
cmd.exp_string(prompt).unwrap();
cmd.send_line("quit").unwrap();
fuel_core.kill().expect("Couldn't kill fuel-core");
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-debug/tests/server_integration.rs | forc-plugins/forc-debug/tests/server_integration.rs | use dap::{
events::{Event, OutputEventBody},
requests::{Command, LaunchRequestArguments, SetBreakpointsArguments, VariablesArguments},
responses::ResponseBody,
types::{
OutputEventCategory, Source, SourceBreakpoint, StartDebuggingRequestKind,
StoppedEventReason, Variable,
},
};
use forc_debug::server::{
AdditionalData, DapServer, INSTRUCTIONS_VARIABLE_REF, REGISTERS_VARIABLE_REF,
};
use std::{
collections::BTreeMap,
env,
io::Write,
path::PathBuf,
sync::{Arc, Mutex},
};
pub fn sway_workspace_dir() -> PathBuf {
env::current_dir().unwrap().parent().unwrap().to_path_buf()
}
pub fn test_fixtures_dir() -> PathBuf {
env::current_dir().unwrap().join("tests/fixtures")
}
#[derive(Debug, Default, Clone)]
/// A simple struct to capture event output from the server for testing purposes.
struct EventCapture {
pub output: Arc<Mutex<String>>,
}
impl Write for EventCapture {
fn write(&mut self, buf: &[u8]) -> std::io::Result<usize> {
let mut lock = self.output.lock().unwrap();
lock.push_str(&String::from_utf8_lossy(buf));
Ok(buf.len())
}
fn flush(&mut self) -> std::io::Result<()> {
Ok(())
}
}
impl EventCapture {
pub fn take_event(&self) -> Option<Event> {
let mut lock = self.output.lock().unwrap();
while !lock.is_empty() {
let cloned = lock.clone();
let (line, rest) = cloned.split_once('\n')?;
*lock = rest.to_string();
if let Ok(event) = serde_json::from_str::<Event>(line) {
return Some(event);
}
}
None
}
}
#[test]
fn test_server_attach_mode() {
let output_capture = EventCapture::default();
let input = Box::new(std::io::stdin());
let output = Box::new(output_capture.clone());
let mut server = DapServer::new(input, output);
// Initialize request
let (result, exit_code) = server
.handle_command(&Command::Initialize(Default::default()))
.into_tuple();
assert!(matches!(result, Ok(ResponseBody::Initialize(_))));
assert!(exit_code.is_none());
// Attach request
let (result, exit_code) = server
.handle_command(&Command::Attach(Default::default()))
.into_tuple();
assert!(matches!(result, Ok(ResponseBody::Attach)));
assert_eq!(exit_code, Some(0));
assert_not_supported_event(output_capture.take_event());
}
#[test]
fn test_server_launch_mode() {
let output_capture = EventCapture::default();
let input = Box::new(std::io::stdin());
let output = Box::new(output_capture.clone());
let mut server = DapServer::new(input, output);
let program_path = test_fixtures_dir().join("simple/src/main.sw");
let source_str = program_path.to_string_lossy().to_string();
// Initialize request
let (result, exit_code) = server
.handle_command(&Command::Initialize(Default::default()))
.into_tuple();
assert!(matches!(result, Ok(ResponseBody::Initialize(_))));
assert!(exit_code.is_none());
// Launch request
let additional_data = serde_json::to_value(AdditionalData {
program: source_str.clone(),
})
.unwrap();
let (result, exit_code) = server
.handle_command(&Command::Launch(LaunchRequestArguments {
additional_data: Some(additional_data),
..Default::default()
}))
.into_tuple();
assert!(matches!(result, Ok(ResponseBody::Launch)));
assert!(exit_code.is_none());
// Set Breakpoints
let (result, exit_code) = server
.handle_command(&Command::SetBreakpoints(SetBreakpointsArguments {
source: Source {
path: Some(source_str.clone()),
..Default::default()
},
breakpoints: Some(vec![
SourceBreakpoint {
line: 21,
..Default::default()
},
SourceBreakpoint {
line: 30,
..Default::default()
},
SourceBreakpoint {
line: 39,
..Default::default()
},
]),
..Default::default()
}))
.into_tuple();
match result.expect("set breakpoints result") {
ResponseBody::SetBreakpoints(res) => {
assert!(res.breakpoints.len() == 3);
}
other => panic!("Expected SetBreakpoints response, got {other:?}"),
}
assert!(exit_code.is_none());
// Configuration Done request
let (result, exit_code) = server
.handle_command(&Command::ConfigurationDone)
.into_tuple();
assert!(matches!(result, Ok(ResponseBody::ConfigurationDone)));
assert!(exit_code.is_none());
// Launch, should hit first breakpoint
let keep_running = server.launch().expect("launched without error");
assert!(keep_running);
assert_stopped_breakpoint_event(output_capture.take_event(), 0);
// Threads request
let (result, exit_code) = server.handle_command(&Command::Threads).into_tuple();
match result.expect("threads result") {
ResponseBody::Threads(res) => {
assert_eq!(res.threads.len(), 1);
}
other => panic!("Expected Threads response, got {other:?}"),
}
assert!(exit_code.is_none());
// Stack Trace request
let (result, exit_code) = server
.handle_command(&Command::StackTrace(Default::default()))
.into_tuple();
match result.expect("stack trace result") {
ResponseBody::StackTrace(res) => {
assert_eq!(res.stack_frames.len(), 1);
}
other => panic!("Expected StackTrace response, got {other:?}"),
}
assert!(exit_code.is_none());
// Scopes request
let (result, exit_code) = server
.handle_command(&Command::Scopes(Default::default()))
.into_tuple();
match result.expect("scopes result") {
ResponseBody::Scopes(res) => {
assert_eq!(res.scopes.len(), 2);
}
other => panic!("Expected Scopes response, got {other:?}"),
}
assert!(exit_code.is_none());
// Variables request - registers
let (result, exit_code) = server
.handle_command(&Command::Variables(VariablesArguments {
variables_reference: REGISTERS_VARIABLE_REF,
..Default::default()
}))
.into_tuple();
match result.expect("registers variables result") {
ResponseBody::Variables(res) => {
assert_eq!(res.variables.len(), 64);
}
other => panic!("Expected Variables response, got {other:?}"),
}
assert!(exit_code.is_none());
// Variables request - VM instructions
let (result, exit_code) = server
.handle_command(&Command::Variables(VariablesArguments {
variables_reference: INSTRUCTIONS_VARIABLE_REF,
..Default::default()
}))
.into_tuple();
match result.expect("instructions variables result") {
ResponseBody::Variables(res) => {
let expected = vec![
("Opcode", "SW"),
("rA", "reg59"),
("rB", "one"),
("imm", "0x1"),
];
assert_variables_eq(expected, res.variables);
}
other => panic!("Expected Variables response, got {other:?}"),
}
assert!(exit_code.is_none());
// Next request
let (result, exit_code) = server
.handle_command(&Command::Next(Default::default()))
.into_tuple();
assert!(result.is_ok());
assert!(exit_code.is_none());
assert_stopped_next_event(output_capture.take_event());
// Step In request
let (result, exit_code) = server
.handle_command(&Command::StepIn(Default::default()))
.into_tuple();
assert!(result.is_ok());
assert!(exit_code.is_none());
assert_not_supported_event(output_capture.take_event());
// Step Out request
let (result, exit_code) = server
.handle_command(&Command::StepOut(Default::default()))
.into_tuple();
assert!(result.is_ok());
assert!(exit_code.is_none());
assert_not_supported_event(output_capture.take_event());
// Continue request, should hit 2nd breakpoint
let (result, exit_code) = server
.handle_command(&Command::Continue(Default::default()))
.into_tuple();
assert!(result.is_ok());
assert!(exit_code.is_none());
assert_stopped_breakpoint_event(output_capture.take_event(), 1);
// Continue request, should hit 3rd breakpoint
let (result, exit_code) = server
.handle_command(&Command::Continue(Default::default()))
.into_tuple();
assert!(result.is_ok());
assert!(exit_code.is_none());
assert_stopped_breakpoint_event(output_capture.take_event(), 2);
// Continue request, should exit cleanly
let (result, exit_code) = server
.handle_command(&Command::Continue(Default::default()))
.into_tuple();
assert!(result.is_ok());
assert_eq!(exit_code, Some(0));
// Test results should be logged
let body = assert_output_event_body(output_capture.take_event());
assert!(body.category.is_none());
assert!(body.output.contains("test test_1 ... ok"));
assert!(body.output.contains("test test_2 ... ok"));
assert!(body.output.contains("test test_3 ... ok"));
assert!(body.output.contains("Result: OK. 3 passed. 0 failed"));
}
#[test]
fn test_sourcemap_build() {
let mut server = DapServer::new(Box::new(std::io::stdin()), Box::new(std::io::sink()));
let program_path = test_fixtures_dir().join("simple/src/main.sw");
// Initialize and set the program path
server.handle_command(&Command::Initialize(Default::default()));
server.state.program_path = program_path.clone();
server.state.mode = Some(StartDebuggingRequestKind::Launch);
// Explicitly build the tests
server.build_tests().expect("Failed to build tests");
// Group instructions by line number
let mut line_to_instructions: BTreeMap<i64, Vec<usize>> = BTreeMap::new();
let source_map = &server.state.source_map;
for pc in source_map.map.keys() {
if let Some((path, range)) = source_map.addr_to_span(*pc) {
if path == program_path {
line_to_instructions
.entry(range.start.line as i64)
.or_default()
.push(*pc);
}
}
}
// Verify essential source locations are mapped correctly
let key_locations = [
// Main function and its contents
(3, 3, "main function parameters"), // Should have 3 instructions
(4, 4, "addition operation"), // Should have 4 instructions
// Helper function and its contents
(11, 3, "helper function parameters"), // Should have 3 instructions
(12, 4, "helper addition operation"), // Should have 4 instructions
// Test functions (identical patterns)
(21, 1, "test_1 first line"), // Each test line should have
(22, 1, "test_1 second line"), // exactly one instruction
(23, 1, "test_1 helper call"),
(24, 1, "test_1 assertion"),
];
for (line, expected_count, description) in key_locations {
let instructions = line_to_instructions
.get(&line)
.unwrap_or_else(|| panic!("Missing mapping for line {line}: {description}"));
assert_eq!(
instructions.len(),
expected_count,
"Line {line} ({description}): Expected {expected_count} instructions, found {instructions:?}"
);
}
}
/// Asserts that the given event is a Stopped event with a breakpoint reason and the given breakpoint ID.
fn assert_stopped_breakpoint_event(event: Option<Event>, breakpoint_id: i64) {
match event.expect("received event") {
Event::Stopped(body) => {
assert!(matches!(body.reason, StoppedEventReason::Breakpoint));
assert_eq!(body.hit_breakpoint_ids, Some(vec![breakpoint_id]));
}
other => panic!("Expected Stopped event, got {other:?}"),
};
}
/// Asserts that the given event is a Stopped event with the right reason and no breakpoint ID.
fn assert_stopped_next_event(event: Option<Event>) {
match event.expect("received event") {
Event::Stopped(body) => {
assert!(matches!(body.reason, StoppedEventReason::Step));
assert_eq!(body.hit_breakpoint_ids, None);
}
other => panic!("Expected Stopped event, got {other:?}"),
};
}
fn assert_output_event_body(event: Option<Event>) -> OutputEventBody {
match event.expect("received event") {
Event::Output(body) => body,
other => panic!("Expected Output event, got {other:?}"),
}
}
fn assert_not_supported_event(event: Option<Event>) {
let body = assert_output_event_body(event);
assert_eq!(body.output, "This feature is not currently supported.");
assert!(matches!(body.category, Some(OutputEventCategory::Stderr)));
}
/// Asserts that the given variables match the expected (name, value) pairs.
fn assert_variables_eq(expected: Vec<(&str, &str)>, actual: Vec<Variable>) {
assert_eq!(actual.len(), expected.len());
for (i, (name, value)) in expected.iter().enumerate() {
assert_eq!(actual[i].name, *name);
assert_eq!(actual[i].value, *value);
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-debug/examples/client_usage.rs | forc-plugins/forc-debug/examples/client_usage.rs | use forc_debug::{ContractId, FuelClient, Transaction};
#[tokio::main]
async fn main() {
run_example().await.expect("Running example failed");
}
async fn run_example() -> Result<(), anyhow::Error> {
let client = FuelClient::new("http://localhost:4000/graphql")?;
let session_id = client.start_session().await?;
client
.set_breakpoint(&session_id, ContractId::zeroed(), 0)
.await?;
let tx: Transaction =
serde_json::from_str(include_str!("example_tx.json")).expect("Invalid transaction JSON");
let status = client.start_tx(&session_id, &tx).await?;
assert!(status.breakpoint.is_some());
let value = client.register(&session_id, 12).await?;
println!("reg[12] = {value}");
let mem = client.memory(&session_id, 0x10, 0x20).await?;
println!("mem[0x10..0x30] = {mem:?}");
client.set_single_stepping(&session_id, true).await?;
let status = client.continue_tx(&session_id).await?;
assert!(status.breakpoint.is_some());
client.set_single_stepping(&session_id, false).await?;
let status = client.continue_tx(&session_id).await?;
assert!(status.breakpoint.is_none());
client.end_session(&session_id).await?;
Ok(())
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-client/build.rs | forc-plugins/forc-client/build.rs | use std::fs;
use std::path::{Path, PathBuf};
fn minify_json(json: &str) -> String {
let mut result = String::with_capacity(json.len());
let mut in_string = false;
let mut previous_char: Option<char> = None;
for c in json.chars() {
if in_string {
result.push(c);
if c == '"' && previous_char != Some('\\') {
in_string = false;
}
} else {
match c {
'"' => {
result.push(c);
in_string = true;
}
' ' | '\n' | '\r' | '\t' => continue, // Skip whitespace
_ => result.push(c),
}
}
previous_char = Some(c);
}
result
}
fn update_proxy_abi_decl_with_file(source_file_path: &Path, minified_json: &str) {
// Read the contents of the source file
let mut source_code = fs::read_to_string(source_file_path).expect("Unable to read source file");
// Prepare the replacement string for the `abigen!` macro
let escaped_json = minified_json.replace('\\', "\\\\").replace('"', "\\\"");
let new_abigen =
format!("abigen!(Contract(name = \"ProxyContract\", abi = \"{escaped_json}\",));");
// Use a regular expression to find and replace the `abigen!` macro
let re = regex::Regex::new(r#"abigen!\(Contract\(name = "ProxyContract", abi = ".*?",\)\);"#)
.expect("Invalid regex pattern");
// Replace the existing `abigen!` macro with the new one containing the updated ABI
if re.is_match(&source_code) {
source_code = re.replace(&source_code, new_abigen.as_str()).to_string();
} else {
panic!("abigen! macro not found in the source file");
}
// Write the modified source code back to the source file
fs::write(source_file_path, source_code).expect("Unable to write back to the source file");
}
fn main() {
// Path to the JSON file in the root directory next to the `src` folder
let json_path = PathBuf::from("proxy_abi/proxy_contract-abi.json");
// Read the contents of the JSON file
let json_content =
fs::read_to_string(json_path).expect("Unable to read proxy_contract-abi.json");
// Minify the JSON content
let minified_json = minify_json(&json_content);
// If proxy_contract-abi.json is changed, re-run this script
println!("cargo:rerun-if-changed=proxy_abi/proxy_contract-abi.json");
// Path to the Rust source file that contains the `abigen!` macro that
// creates a `ProxyContract`.
let util_tx_path = PathBuf::from("src/util/tx.rs");
update_proxy_abi_decl_with_file(&util_tx_path, &minified_json);
let test_path = PathBuf::from("tests/deploy.rs");
update_proxy_abi_decl_with_file(&test_path, &minified_json);
let deploy_path = PathBuf::from("src/op/deploy.rs");
update_proxy_abi_decl_with_file(&deploy_path, &minified_json);
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-client/src/lib.rs | forc-plugins/forc-client/src/lib.rs | pub mod cmd;
pub mod constants;
pub mod op;
pub mod util;
use clap::Parser;
use forc_pkg::manifest::Network;
use serde::{Deserialize, Serialize};
use util::target::Target;
/// Flags for specifying the node to target.
#[derive(Debug, Default, Clone, Parser, Deserialize, Serialize)]
pub struct NodeTarget {
/// The URL of the Fuel node to which we're submitting the transaction.
/// If unspecified, checks the manifest's `network` table, then falls back
/// to `http://127.0.0.1:4000`
///
/// You can also use `--target`, `--devnet`, `--testnet`, or `--mainnet` to specify the Fuel node.
#[clap(long, env = "FUEL_NODE_URL")]
pub node_url: Option<String>,
/// Preset configurations for using a specific target.
///
/// You can also use `--node-url`, `--devnet`, `--testnet`, or `--mainnet` to specify the Fuel node.
///
/// Possible values are: [local, testnet, mainnet]
#[clap(long)]
pub target: Option<Target>,
/// Use preset configuration for mainnet.
///
/// You can also use `--node-url`, `--target`, or `--testnet` to specify the Fuel node.
#[clap(long)]
pub mainnet: bool,
/// Use preset configuration for testnet.
///
/// You can also use `--node-url`, `--target`, or `--mainnet` to specify the Fuel node.
#[clap(long)]
pub testnet: bool,
/// Use preset configuration for devnet.
///
/// You can also use `--node-url`, `--target`, or `--testnet` to specify the Fuel node.
#[clap(long)]
pub devnet: bool,
}
impl NodeTarget {
/// Returns the URL to use for connecting to Fuel Core node.
pub fn get_node_url(&self, manifest_network: &Option<Network>) -> anyhow::Result<String> {
let options_count = [
self.mainnet,
self.testnet,
self.devnet,
self.target.is_some(),
self.node_url.is_some(),
]
.iter()
.filter(|&&x| x)
.count();
// ensure at most one option is specified
if options_count > 1 {
anyhow::bail!("Only one of `--mainnet`, `--testnet`, `--devnet`, `--target`, or `--node-url` should be specified");
}
let node_url = match () {
_ if self.mainnet => Target::mainnet().target_url(),
_ if self.testnet => Target::testnet().target_url(),
_ if self.devnet => Target::devnet().target_url(),
_ if self.target.is_some() => self.target.as_ref().unwrap().target_url(),
_ if self.node_url.is_some() => self.node_url.as_ref().unwrap().clone(),
_ => manifest_network
.as_ref()
.map(|nw| &nw.url[..])
.unwrap_or(crate::constants::NODE_URL)
.to_string(),
};
Ok(node_url)
}
/// Returns the URL for explorer
pub fn get_explorer_url(&self) -> Option<String> {
match (
self.testnet,
self.mainnet,
self.devnet,
self.target.clone(),
self.node_url.clone(),
) {
(true, false, _, None, None) => Target::testnet().explorer_url(),
(false, true, _, None, None) => Target::mainnet().explorer_url(),
(false, false, _, Some(target), None) => target.explorer_url(),
_ => None,
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_get_explorer_url_mainnet() {
let node = NodeTarget {
target: Some(Target::Mainnet),
node_url: None,
mainnet: false,
testnet: false,
devnet: false,
};
let actual = node.get_explorer_url().unwrap();
assert_eq!("https://app.fuel.network", actual);
}
#[test]
fn test_get_explorer_url_testnet() {
let node = NodeTarget {
target: Some(Target::Testnet),
node_url: None,
mainnet: false,
testnet: false,
devnet: false,
};
let actual = node.get_explorer_url().unwrap();
assert_eq!("https://app-testnet.fuel.network", actual);
}
#[test]
fn test_get_explorer_url_devnet() {
let node = NodeTarget {
target: Some(Target::Devnet),
node_url: None,
mainnet: false,
testnet: false,
devnet: true,
};
let actual = node.get_explorer_url();
assert_eq!(None, actual);
}
#[test]
fn test_get_explorer_url_local() {
let node = NodeTarget {
target: Some(Target::Local),
node_url: None,
mainnet: false,
testnet: false,
devnet: false,
};
let actual = node.get_explorer_url();
assert_eq!(None, actual);
}
#[test]
fn test_get_node_url_testnet() {
let node = NodeTarget {
target: None,
node_url: None,
mainnet: false,
testnet: true,
devnet: false,
};
let actual = node.get_node_url(&None).unwrap();
assert_eq!("https://testnet.fuel.network", actual);
}
#[test]
fn test_get_node_url_mainnet() {
let node = NodeTarget {
target: None,
node_url: None,
mainnet: true,
testnet: false,
devnet: false,
};
let actual = node.get_node_url(&None).unwrap();
assert_eq!("https://mainnet.fuel.network", actual);
}
#[test]
fn test_get_node_url_target_mainnet() {
let node = NodeTarget {
target: Some(Target::Mainnet),
node_url: None,
mainnet: false,
testnet: false,
devnet: false,
};
let actual = node.get_node_url(&None).unwrap();
assert_eq!("https://mainnet.fuel.network", actual);
}
#[test]
fn test_get_node_url_target_testnet() {
let node = NodeTarget {
target: Some(Target::Testnet),
node_url: None,
mainnet: false,
testnet: false,
devnet: false,
};
let actual = node.get_node_url(&None).unwrap();
assert_eq!("https://testnet.fuel.network", actual);
}
#[test]
fn test_get_node_url_default() {
let node = NodeTarget {
target: None,
node_url: None,
mainnet: false,
testnet: false,
devnet: false,
};
let actual = node.get_node_url(&None).unwrap();
assert_eq!("http://127.0.0.1:4000", actual);
}
#[test]
fn test_get_node_url_local() {
let node = NodeTarget {
target: Some(Target::Local),
node_url: None,
mainnet: false,
testnet: false,
devnet: false,
};
let actual = node.get_node_url(&None).unwrap();
assert_eq!("http://127.0.0.1:4000", actual);
}
#[test]
#[should_panic(
expected = "Only one of `--mainnet`, `--testnet`, `--devnet`, `--target`, or `--node-url` should be specified"
)]
fn test_get_node_url_local_testnet() {
let node = NodeTarget {
target: Some(Target::Local),
node_url: None,
mainnet: false,
testnet: true,
devnet: false,
};
node.get_node_url(&None).unwrap();
}
#[test]
#[should_panic(
expected = "Only one of `--mainnet`, `--testnet`, `--devnet`, `--target`, or `--node-url` should be specified"
)]
fn test_get_node_url_same_url() {
let node = NodeTarget {
target: Some(Target::Testnet),
node_url: Some("testnet.fuel.network".to_string()),
mainnet: false,
testnet: false,
devnet: false,
};
node.get_node_url(&None).unwrap();
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-client/src/constants.rs | forc-plugins/forc-client/src/constants.rs | /// Default to localhost to favour the common case of testing.
pub const NODE_URL: &str = sway_utils::constants::DEFAULT_NODE_URL;
pub const MAINNET_ENDPOINT_URL: &str = "https://mainnet.fuel.network";
pub const TESTNET_ENDPOINT_URL: &str = "https://testnet.fuel.network";
pub const DEVNET_ENDPOINT_URL: &str = "https://devnet.fuel.network";
pub const TESTNET_FAUCET_URL: &str = "https://faucet-testnet.fuel.network";
pub const DEVNET_FAUCET_URL: &str = "https://faucet-devnet.fuel.network";
pub const MAINNET_EXPLORER_URL: &str = "https://app.fuel.network";
pub const TESTNET_EXPLORER_URL: &str = "https://app-testnet.fuel.network";
/// Default PrivateKey to sign transactions submitted to local node.
pub const DEFAULT_PRIVATE_KEY: &str =
"0xde97d8624a438121b86a1956544bd72ed68cd69f2c99555b08b1e8c51ffd511c";
/// The maximum time to wait for a transaction to be included in a block by the node
pub const TX_SUBMIT_TIMEOUT_MS: u64 = 30_000u64;
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-client/src/util/aws.rs | forc-plugins/forc-client/src/util/aws.rs | use async_trait::async_trait;
use aws_config::{default_provider::credentials::DefaultCredentialsChain, Region, SdkConfig};
use aws_sdk_kms::config::Credentials;
use aws_sdk_kms::operation::get_public_key::GetPublicKeyOutput;
use aws_sdk_kms::primitives::Blob;
use aws_sdk_kms::types::{MessageType, SigningAlgorithmSpec};
use aws_sdk_kms::{config::BehaviorVersion, Client};
use fuel_crypto::Message;
use fuels::prelude::*;
use fuels::types::coin_type_id::CoinTypeId;
use fuels::types::input::Input;
use fuels_accounts::provider::Provider;
use fuels_accounts::{Account, ViewOnlyAccount};
use fuels_core::traits::Signer;
/// AWS configuration for the `AwsSigner` to be created.
/// De-facto way of creating the configuration is to load it from env.
#[derive(Debug, Clone)]
pub struct AwsConfig {
sdk_config: SdkConfig,
}
impl AwsConfig {
/// Load configuration from environment variables.
/// For more details see: https://docs.rs/aws-config/latest/aws_config/
pub async fn from_env() -> Self {
let loader = aws_config::defaults(BehaviorVersion::latest())
.credentials_provider(DefaultCredentialsChain::builder().build().await);
let loader = match std::env::var("E2E_TEST_AWS_ENDPOINT") {
Ok(url) => loader.endpoint_url(url),
_ => loader,
};
Self {
sdk_config: loader.load().await,
}
}
pub async fn for_testing(url: String) -> Self {
let sdk_config = aws_config::defaults(BehaviorVersion::latest())
.credentials_provider(Credentials::new(
"test",
"test",
None,
None,
"Static Credentials",
))
.endpoint_url(url)
.region(Region::new("us-east-1")) // placeholder region for test
.load()
.await;
Self { sdk_config }
}
pub fn url(&self) -> Option<&str> {
self.sdk_config.endpoint_url()
}
pub fn region(&self) -> Option<&Region> {
self.sdk_config.region()
}
}
/// A configured `AwsClient` which allows using the AWS KMS SDK.
#[derive(Clone, Debug)]
pub struct AwsClient {
client: Client,
}
impl AwsClient {
pub fn new(config: AwsConfig) -> Self {
let config = config.sdk_config;
let client = Client::new(&config);
Self { client }
}
pub fn inner(&self) -> &Client {
&self.client
}
}
/// A signer which is capable of signing `fuel_crypto::Message`s using AWS KMS.
/// This is both a `Signer` and `Account`, which means it is directly usable
/// with most of the fuels-* calls, without any additional operations on the
/// representation.
#[derive(Clone, Debug)]
pub struct AwsSigner {
kms: AwsClient,
key_id: String,
address: Address,
public_key_bytes: Vec<u8>,
provider: Provider,
}
async fn request_get_pubkey(
kms: &Client,
key_id: String,
) -> std::result::Result<GetPublicKeyOutput, anyhow::Error> {
kms.get_public_key()
.key_id(key_id)
.send()
.await
.map_err(Into::into)
}
/// Decode an AWS KMS Pubkey response.
fn decode_pubkey(resp: &GetPublicKeyOutput) -> std::result::Result<Vec<u8>, anyhow::Error> {
let raw = resp
.public_key
.as_ref()
.ok_or(anyhow::anyhow!("public key not found"))?;
Ok(raw.clone().into_inner())
}
async fn sign_with_kms(
client: &aws_sdk_kms::Client,
key_id: &str,
public_key_bytes: &[u8],
message: Message,
) -> anyhow::Result<fuel_crypto::Signature> {
use k256::{
ecdsa::{RecoveryId, VerifyingKey},
pkcs8::DecodePublicKey,
};
let reply = client
.sign()
.key_id(key_id)
.signing_algorithm(SigningAlgorithmSpec::EcdsaSha256)
.message_type(MessageType::Digest)
.message(Blob::new(*message))
.send()
.await
.inspect_err(|err| tracing::error!("Failed to sign with AWS KMS: {err:?}"))?;
let signature_der = reply
.signature
.ok_or_else(|| anyhow::anyhow!("no signature returned from AWS KMS"))?
.into_inner();
// https://stackoverflow.com/a/71475108
let sig = k256::ecdsa::Signature::from_der(&signature_der)
.map_err(|_| anyhow::anyhow!("invalid DER signature from AWS KMS"))?;
let sig = sig.normalize_s().unwrap_or(sig);
// This is a hack to get the recovery id. The signature should be normalized
// before computing the recovery id, but aws kms doesn't support this, and
// instead always computes the recovery id from non-normalized signature.
// So instead the recovery id is determined by checking which variant matches
// the original public key.
let recid1 = RecoveryId::new(false, false);
let recid2 = RecoveryId::new(true, false);
let rec1 = VerifyingKey::recover_from_prehash(&*message, &sig, recid1);
let rec2 = VerifyingKey::recover_from_prehash(&*message, &sig, recid2);
let correct_public_key = k256::PublicKey::from_public_key_der(public_key_bytes)
.map_err(|_| anyhow::anyhow!("invalid DER public key from AWS KMS"))?
.into();
let recovery_id = if rec1.map(|r| r == correct_public_key).unwrap_or(false) {
recid1
} else if rec2.map(|r| r == correct_public_key).unwrap_or(false) {
recid2
} else {
anyhow::bail!("Invalid signature generated (reduced-x form coordinate)");
};
// Insert the recovery id into the signature
debug_assert!(
!recovery_id.is_x_reduced(),
"reduced-x form coordinates are caught by the if-else chain above"
);
let v = recovery_id.is_y_odd() as u8;
let mut signature = <[u8; 64]>::from(sig.to_bytes());
signature[32] = (v << 7) | (signature[32] & 0x7f);
Ok(fuel_crypto::Signature::from_bytes(signature))
}
impl AwsSigner {
pub async fn new(
kms: AwsClient,
key_id: String,
provider: Provider,
) -> std::result::Result<Self, anyhow::Error> {
use k256::pkcs8::DecodePublicKey;
let resp = request_get_pubkey(kms.inner(), key_id.clone()).await?;
let public_key_bytes = decode_pubkey(&resp)?;
let k256_public_key = k256::PublicKey::from_public_key_der(&public_key_bytes)?;
let public_key = fuel_crypto::PublicKey::from(k256_public_key);
let hashed = public_key.hash();
let address = Address::from(*hashed);
Ok(Self {
kms,
key_id,
address,
public_key_bytes,
provider,
})
}
/// Sign a digest with the key associated with a key ID.
pub async fn sign_message_with_key(
&self,
key_id: String,
message: Message,
) -> std::result::Result<fuel_crypto::Signature, anyhow::Error> {
sign_with_kms(self.kms.inner(), &key_id, &self.public_key_bytes, message).await
}
/// Sign a digest with this signer's key.
pub async fn sign_message(
&self,
message: Message,
) -> std::result::Result<fuel_crypto::Signature, anyhow::Error> {
self.sign_message_with_key(self.key_id.clone(), message)
.await
}
pub fn provider(&self) -> &Provider {
&self.provider
}
}
#[async_trait]
impl Signer for AwsSigner {
async fn sign(&self, message: Message) -> Result<fuel_crypto::Signature> {
let sig = self.sign_message(message).await.map_err(|_| {
fuels_core::types::errors::Error::Other("aws signer failed".to_string())
})?;
Ok(sig)
}
fn address(&self) -> Address {
self.address
}
}
#[async_trait]
impl ViewOnlyAccount for AwsSigner {
fn address(&self) -> Address {
self.address
}
fn try_provider(&self) -> Result<&Provider> {
Ok(&self.provider)
}
async fn get_asset_inputs_for_amount(
&self,
asset_id: AssetId,
amount: u128,
excluded_coins: Option<Vec<CoinTypeId>>,
) -> Result<Vec<Input>> {
Ok(self
.get_spendable_resources(asset_id, amount, excluded_coins)
.await?
.into_iter()
.map(Input::resource_signed)
.collect::<Vec<Input>>())
}
}
#[async_trait]
impl Account for AwsSigner {}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-client/src/util/encode.rs | forc-plugins/forc-client/src/util/encode.rs | use anyhow::Context;
use fuel_abi_types::abi::full_program::FullTypeApplication;
use std::str::FromStr;
use sway_types::u256::U256;
/// A wrapper around fuels_core::types::Token, which enables serde de/serialization.
#[derive(Debug, PartialEq, serde::Serialize, serde::Deserialize)]
pub(crate) struct Token(pub(crate) fuels_core::types::Token);
#[derive(Debug, PartialEq, Eq)]
pub(crate) enum Type {
Unit,
U8,
U16,
U32,
U64,
U256,
Bool,
}
impl TryFrom<&FullTypeApplication> for Type {
type Error = anyhow::Error;
fn try_from(value: &FullTypeApplication) -> Result<Self, Self::Error> {
let type_field_string = &value.type_decl.type_field;
Type::from_str(type_field_string)
}
}
impl Token {
/// Generate a new token using provided type information and the value for the argument.
///
/// Generates an error if there is a mismatch between the type information and the provided
/// value for that type.
#[allow(dead_code)]
pub(crate) fn from_type_and_value(arg_type: &Type, value: &str) -> anyhow::Result<Self> {
match arg_type {
Type::Unit => Ok(Token(fuels_core::types::Token::Unit)),
Type::U8 => {
let u8_val = value.parse::<u8>()?;
Ok(Token(fuels_core::types::Token::U8(u8_val)))
}
Type::U16 => {
let u16_val = value.parse::<u16>()?;
Ok(Token(fuels_core::types::Token::U16(u16_val)))
}
Type::U32 => {
let u32_val = value.parse::<u32>()?;
Ok(Token(fuels_core::types::Token::U32(u32_val)))
}
Type::U64 => {
let u64_val = value.parse::<u64>()?;
Ok(Token(fuels_core::types::Token::U64(u64_val)))
}
Type::U256 => {
let v = value.parse::<U256>().context("u256 literal out of range")?;
let bytes = v.to_be_bytes();
Ok(Token(fuels_core::types::Token::U256(bytes.into())))
}
Type::Bool => {
let bool_val = value.parse::<bool>()?;
Ok(Token(fuels_core::types::Token::Bool(bool_val)))
}
}
}
}
impl FromStr for Type {
type Err = anyhow::Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s {
"()" => Ok(Type::Unit),
"u8" => Ok(Type::U8),
"u16" => Ok(Type::U16),
"u32" => Ok(Type::U32),
"u64" => Ok(Type::U64),
"u256" => Ok(Type::U256),
"bool" => Ok(Type::Bool),
other => anyhow::bail!("{other} type is not supported."),
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_token_generation_success() {
let u8_token = Token::from_type_and_value(&Type::U8, "1").unwrap();
let u16_token = Token::from_type_and_value(&Type::U16, "1").unwrap();
let u32_token = Token::from_type_and_value(&Type::U32, "1").unwrap();
let u64_token = Token::from_type_and_value(&Type::U64, "1").unwrap();
let bool_token = Token::from_type_and_value(&Type::Bool, "true").unwrap();
let generated_tokens = [u8_token, u16_token, u32_token, u64_token, bool_token];
let expected_tokens = [
Token(fuels_core::types::Token::U8(1)),
Token(fuels_core::types::Token::U16(1)),
Token(fuels_core::types::Token::U32(1)),
Token(fuels_core::types::Token::U64(1)),
Token(fuels_core::types::Token::Bool(true)),
];
assert_eq!(generated_tokens, expected_tokens)
}
#[test]
#[should_panic]
fn test_token_generation_fail_type_mismatch() {
Token::from_type_and_value(&Type::U8, "false").unwrap();
}
#[test]
fn test_type_generation_success() {
let possible_type_list = ["()", "u8", "u16", "u32", "u64", "u256", "bool"];
let types = possible_type_list
.iter()
.map(|type_str| Type::from_str(type_str))
.collect::<anyhow::Result<Vec<_>>>()
.unwrap();
let expected_types = vec![
Type::Unit,
Type::U8,
Type::U16,
Type::U32,
Type::U64,
Type::U256,
Type::Bool,
];
assert_eq!(types, expected_types)
}
#[test]
#[should_panic(expected = "u2 type is not supported.")]
fn test_type_generation_fail_invalid_type() {
let invalid_type_str = "u2";
Type::from_str(invalid_type_str).unwrap();
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.