repo stringlengths 6 65 | file_url stringlengths 81 311 | file_path stringlengths 6 227 | content stringlengths 0 32.8k | language stringclasses 1
value | license stringclasses 7
values | commit_sha stringlengths 40 40 | retrieved_at stringdate 2026-01-04 15:31:58 2026-01-04 20:25:31 | truncated bool 2
classes |
|---|---|---|---|---|---|---|---|---|
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-client/src/util/tx.rs | forc-plugins/forc-client/src/util/tx.rs | use crate::{
constants::DEFAULT_PRIVATE_KEY,
util::{account::ForcClientAccount, aws::AwsSigner, target::Target},
};
use anyhow::Result;
use dialoguer::{theme::ColorfulTheme, Confirm, Password, Select};
use forc_tracing::{println_action_green, println_warning};
use forc_wallet::{
account::{derive_secret_key, new_at_index_cli},
balance::{collect_accounts_with_verification, AccountBalances, AccountVerification},
import::{import_wallet_cli, Import},
new::{new_wallet_cli, New},
utils::default_wallet_path,
};
use fuel_crypto::SecretKey;
use fuel_tx::{AssetId, ContractId};
use fuels::{
macros::abigen, programs::responses::CallResponse, types::checksum_address::checksum_encode,
};
use fuels_accounts::{
provider::Provider,
signers::private_key::PrivateKeySigner,
wallet::{Unlocked, Wallet},
ViewOnlyAccount,
};
use std::{collections::BTreeMap, path::Path, str::FromStr};
use super::aws::{AwsClient, AwsConfig};
type AccountsMap = BTreeMap<usize, fuel_tx::Address>;
#[derive(PartialEq, Eq)]
pub enum SignerSelectionMode {
/// Holds the password of forc-wallet instance.
ForcWallet(String),
/// Holds ARN of the AWS signer.
AwsSigner(String),
Manual,
}
fn ask_user_yes_no_question(question: &str) -> Result<bool> {
let answer = Confirm::with_theme(&ColorfulTheme::default())
.with_prompt(question)
.default(false)
.show_default(false)
.interact()?;
Ok(answer)
}
fn ask_user_with_options(question: &str, options: &[&str], default: usize) -> Result<usize> {
let selection = Select::with_theme(&ColorfulTheme::default())
.with_prompt(question)
.items(options)
.default(default)
.interact()?;
Ok(selection)
}
async fn collect_user_accounts(
wallet_path: &Path,
password: &str,
node_url: &str,
) -> Result<AccountsMap> {
let verification = AccountVerification::Yes(password.to_string());
let node_url = reqwest::Url::parse(node_url)
.map_err(|e| anyhow::anyhow!("Failed to parse node URL: {}", e))?;
let accounts = collect_accounts_with_verification(wallet_path, verification, &node_url)
.await
.map_err(|e| {
if e.to_string().contains("Mac Mismatch") {
anyhow::anyhow!("Failed to access forc-wallet vault. Please check your password")
} else {
e
}
})?;
let accounts = accounts
.into_iter()
.map(|(index, address)| {
let bytes: [u8; fuel_tx::Address::LEN] = address.into();
(index, fuel_tx::Address::from(bytes))
})
.collect();
Ok(accounts)
}
pub(crate) fn prompt_forc_wallet_password() -> Result<String> {
let password = Password::with_theme(&ColorfulTheme::default())
.with_prompt("Wallet password")
.allow_empty_password(true)
.interact()?;
Ok(password)
}
pub(crate) async fn check_and_create_wallet_at_default_path(wallet_path: &Path) -> Result<()> {
if !wallet_path.exists() {
let question =
format!("Could not find a wallet at {wallet_path:?}, please select an option: ");
let wallet_options = ask_user_with_options(
&question,
&["Create new wallet", "Import existing wallet"],
0,
)?;
let ctx = forc_wallet::CliContext {
wallet_path: wallet_path.to_path_buf(),
node_url: forc_wallet::network::DEFAULT.parse().unwrap(),
};
match wallet_options {
0 => {
new_wallet_cli(&ctx, New { force: false, cache_accounts: None }).await?;
println!("Wallet created successfully.");
}
1 => {
import_wallet_cli(&ctx, Import { force: false, cache_accounts: None }).await?;
println!("Wallet imported successfully.");
},
_ => anyhow::bail!("Refused to create or import a new wallet. If you don't want to use forc-wallet, you can sign this transaction manually with --manual-signing flag."),
}
// Derive first account for the fresh wallet we created.
new_at_index_cli(&ctx, 0).await?;
println!("Account derived successfully.");
}
Ok(())
}
pub(crate) fn secret_key_from_forc_wallet(
wallet_path: &Path,
account_index: usize,
password: &str,
) -> Result<SecretKey> {
let secret_key = derive_secret_key(wallet_path, account_index, password).map_err(|e| {
if e.to_string().contains("Mac Mismatch") {
anyhow::anyhow!("Failed to access forc-wallet vault. Please check your password")
} else {
e
}
})?;
SecretKey::try_from(secret_key.as_ref())
.map_err(|e| anyhow::anyhow!("Failed to convert secret key: {e}"))
}
pub(crate) fn select_manual_secret_key(
default_signer: bool,
signing_key: Option<SecretKey>,
) -> Option<SecretKey> {
match (default_signer, signing_key) {
// Note: unwrap is safe here as we already know that 'DEFAULT_PRIVATE_KEY' is a valid private key.
(true, None) => Some(SecretKey::from_str(DEFAULT_PRIVATE_KEY).unwrap()),
(true, Some(signing_key)) => {
println_warning("Signing key is provided while requesting to sign with a default signer. Using signing key");
Some(signing_key)
}
(false, None) => None,
(false, Some(signing_key)) => Some(signing_key),
}
}
/// Collect and return balances of each account in the accounts map.
async fn collect_account_balances(
accounts_map: &AccountsMap,
provider: &Provider,
) -> Result<AccountBalances> {
let accounts: Vec<_> = accounts_map
.values()
.map(|addr| Wallet::new_locked(*addr, provider.clone()))
.collect();
futures::future::try_join_all(accounts.iter().map(|acc| acc.get_balances()))
.await
.map_err(|e| anyhow::anyhow!("{e}"))
}
/// Format collected account balances for each asset type, including only the balance of the base asset that can be used to pay gas.
pub fn format_base_asset_account_balances(
accounts_map: &AccountsMap,
account_balances: &AccountBalances,
base_asset_id: &AssetId,
) -> Result<Vec<String>> {
accounts_map
.iter()
.zip(account_balances)
.map(|((ix, address), balance)| {
let base_asset_amount = balance
.get(&base_asset_id.to_string())
.copied()
.unwrap_or(0);
let raw_addr = format!("0x{address}");
let checksum_addr = checksum_encode(&raw_addr)?;
let eth_amount = base_asset_amount as f64 / 1_000_000_000.0;
Ok(format!("[{ix}] {checksum_addr} - {eth_amount} ETH"))
})
.collect::<Result<Vec<_>>>()
}
// TODO: Simplify the function signature once https://github.com/FuelLabs/sway/issues/6071 is closed.
pub(crate) async fn select_account(
wallet_mode: &SignerSelectionMode,
default_sign: bool,
signing_key: Option<SecretKey>,
provider: &Provider,
tx_count: usize,
) -> Result<ForcClientAccount> {
let chain_info = provider.chain_info().await?;
match wallet_mode {
SignerSelectionMode::ForcWallet(password) => {
let wallet_path = default_wallet_path();
let accounts = collect_user_accounts(&wallet_path, password, provider.url()).await?;
let account_balances = collect_account_balances(&accounts, provider).await?;
let total_balance = account_balances
.iter()
.flat_map(|account| account.values())
.sum::<u128>();
if total_balance == 0 {
let first_account = accounts
.get(&0)
.ok_or_else(|| anyhow::anyhow!("No account derived for this wallet"))?;
let target = Target::from_str(&chain_info.name).unwrap_or_default();
let message = if let Some(faucet_url) = target.faucet_url() {
format!(
"Your wallet does not have any funds to pay for the transaction.\
\n\nIf you are interacting with a testnet, consider using the faucet.\
\n-> {target} network faucet: {faucet_url}/?address={first_account}\
\nIf you are interacting with a local node, consider providing a chainConfig which funds your account."
)
} else {
"Your wallet does not have any funds to pay for the transaction.".to_string()
};
anyhow::bail!(message)
}
// TODO: Do this via forc-wallet once the functionality is exposed.
// TODO: calculate the number of transactions to sign and ask the user to confirm.
let question = format!(
"Do you agree to sign {tx_count} transaction{}?",
if tx_count > 1 { "s" } else { "" }
);
let accepted = ask_user_yes_no_question(&question)?;
if !accepted {
anyhow::bail!("User refused to sign");
}
let wallet = select_local_wallet_account(password, provider).await?;
Ok(ForcClientAccount::Wallet(wallet))
}
SignerSelectionMode::Manual => {
let secret_key = select_manual_secret_key(default_sign, signing_key)
.ok_or_else(|| anyhow::anyhow!("missing manual secret key"))?;
let signer = PrivateKeySigner::new(secret_key);
let wallet = Wallet::new(signer, provider.clone());
Ok(ForcClientAccount::Wallet(wallet))
}
SignerSelectionMode::AwsSigner(arn) => {
let aws_config = AwsConfig::from_env().await;
let aws_client = AwsClient::new(aws_config);
let aws_signer = AwsSigner::new(aws_client, arn.clone(), provider.clone()).await?;
let account = ForcClientAccount::KmsSigner(aws_signer);
Ok(account)
}
}
}
pub(crate) async fn select_local_wallet_account(
password: &str,
provider: &Provider,
) -> Result<Wallet<Unlocked<PrivateKeySigner>>> {
let wallet_path = default_wallet_path();
let accounts = collect_user_accounts(&wallet_path, password, provider.url()).await?;
let account_balances = collect_account_balances(&accounts, provider).await?;
let consensus_parameters = provider.consensus_parameters().await?;
let base_asset_id = consensus_parameters.base_asset_id();
let selections =
format_base_asset_account_balances(&accounts, &account_balances, base_asset_id)?;
let mut account_index;
loop {
account_index = Select::with_theme(&ColorfulTheme::default())
.with_prompt("Wallet account")
.max_length(5)
.items(&selections[..])
.default(0)
.interact()?;
if accounts.contains_key(&account_index) {
break;
}
let options: Vec<String> = accounts
.keys()
.map(|key| {
let raw_addr = format!("0x{key}");
let checksum_addr = checksum_encode(&raw_addr)?;
Ok(checksum_addr)
})
.collect::<Result<Vec<_>>>()?;
println_warning(&format!(
"\"{}\" is not a valid account.\nPlease choose a valid option from {}",
account_index,
options.join(","),
));
}
let secret_key = secret_key_from_forc_wallet(&wallet_path, account_index, password)?;
let signer = PrivateKeySigner::new(secret_key);
let wallet = Wallet::new(signer, provider.clone());
Ok(wallet)
}
pub async fn update_proxy_contract_target(
account: &ForcClientAccount,
proxy_contract_id: ContractId,
new_target: ContractId,
) -> Result<CallResponse<()>> {
abigen!(Contract(name = "ProxyContract", abi = "{\"programType\":\"contract\",\"specVersion\":\"1.1\",\"encodingVersion\":\"1\",\"concreteTypes\":[{\"type\":\"()\",\"concreteTypeId\":\"2e38e77b22c314a449e91fafed92a43826ac6aa403ae6a8acb6cf58239fbaf5d\"},{\"type\":\"enum standards::src5::AccessError\",\"concreteTypeId\":\"3f702ea3351c9c1ece2b84048006c8034a24cbc2bad2e740d0412b4172951d3d\",\"metadataTypeId\":1},{\"type\":\"enum standards::src5::State\",\"concreteTypeId\":\"192bc7098e2fe60635a9918afb563e4e5419d386da2bdbf0d716b4bc8549802c\",\"metadataTypeId\":2},{\"type\":\"enum std::option::Option<struct std::contract_id::ContractId>\",\"concreteTypeId\":\"0d79387ad3bacdc3b7aad9da3a96f4ce60d9a1b6002df254069ad95a3931d5c8\",\"metadataTypeId\":4,\"typeArguments\":[\"29c10735d33b5159f0c71ee1dbd17b36a3e69e41f00fab0d42e1bd9f428d8a54\"]},{\"type\":\"enum sway_libs::ownership::errors::InitializationError\",\"concreteTypeId\":\"1dfe7feadc1d9667a4351761230f948744068a090fe91b1bc6763a90ed5d3893\",\"metadataTypeId\":5},{\"type\":\"enum sway_libs::upgradability::errors::SetProxyOwnerError\",\"concreteTypeId\":\"3c6e90ae504df6aad8b34a93ba77dc62623e00b777eecacfa034a8ac6e890c74\",\"metadataTypeId\":6},{\"type\":\"str\",\"concreteTypeId\":\"8c25cb3686462e9a86d2883c5688a22fe738b0bbc85f458d2d2b5f3f667c6d5a\"},{\"type\":\"struct std::contract_id::ContractId\",\"concreteTypeId\":\"29c10735d33b5159f0c71ee1dbd17b36a3e69e41f00fab0d42e1bd9f428d8a54\",\"metadataTypeId\":9},{\"type\":\"struct sway_libs::upgradability::events::ProxyOwnerSet\",\"concreteTypeId\":\"96dd838b44f99d8ccae2a7948137ab6256c48ca4abc6168abc880de07fba7247\",\"metadataTypeId\":10},{\"type\":\"struct sway_libs::upgradability::events::ProxyTargetSet\",\"concreteTypeId\":\"1ddc0adda1270a016c08ffd614f29f599b4725407c8954c8b960bdf651a9a6c8\",\"metadataTypeId\":11}],\"metadataTypes\":[{\"type\":\"b256\",\"metadataTypeId\":0},{\"type\":\"enum standards::src5::AccessError\",\"metadataTypeId\":1,\"components\":[{\"name\":\"NotOwner\",\"typeId\":\"2e38e77b22c314a449e91fafed92a43826ac6aa403ae6a8acb6cf58239fbaf5d\"}]},{\"type\":\"enum standards::src5::State\",\"metadataTypeId\":2,\"components\":[{\"name\":\"Uninitialized\",\"typeId\":\"2e38e77b22c314a449e91fafed92a43826ac6aa403ae6a8acb6cf58239fbaf5d\"},{\"name\":\"Initialized\",\"typeId\":3},{\"name\":\"Revoked\",\"typeId\":\"2e38e77b22c314a449e91fafed92a43826ac6aa403ae6a8acb6cf58239fbaf5d\"}]},{\"type\":\"enum std::identity::Identity\",\"metadataTypeId\":3,\"components\":[{\"name\":\"Address\",\"typeId\":8},{\"name\":\"ContractId\",\"typeId\":9}]},{\"type\":\"enum std::option::Option\",\"metadataTypeId\":4,\"components\":[{\"name\":\"None\",\"typeId\":\"2e38e77b22c314a449e91fafed92a43826ac6aa403ae6a8acb6cf58239fbaf5d\"},{\"name\":\"Some\",\"typeId\":7}],\"typeParameters\":[7]},{\"type\":\"enum sway_libs::ownership::errors::InitializationError\",\"metadataTypeId\":5,\"components\":[{\"name\":\"CannotReinitialized\",\"typeId\":\"2e38e77b22c314a449e91fafed92a43826ac6aa403ae6a8acb6cf58239fbaf5d\"}]},{\"type\":\"enum sway_libs::upgradability::errors::SetProxyOwnerError\",\"metadataTypeId\":6,\"components\":[{\"name\":\"CannotUninitialize\",\"typeId\":\"2e38e77b22c314a449e91fafed92a43826ac6aa403ae6a8acb6cf58239fbaf5d\"}]},{\"type\":\"generic T\",\"metadataTypeId\":7},{\"type\":\"struct std::address::Address\",\"metadataTypeId\":8,\"components\":[{\"name\":\"bits\",\"typeId\":0}]},{\"type\":\"struct std::contract_id::ContractId\",\"metadataTypeId\":9,\"components\":[{\"name\":\"bits\",\"typeId\":0}]},{\"type\":\"struct sway_libs::upgradability::events::ProxyOwnerSet\",\"metadataTypeId\":10,\"components\":[{\"name\":\"new_proxy_owner\",\"typeId\":2}]},{\"type\":\"struct sway_libs::upgradability::events::ProxyTargetSet\",\"metadataTypeId\":11,\"components\":[{\"name\":\"new_target\",\"typeId\":9}]}],\"functions\":[{\"inputs\":[],\"name\":\"proxy_target\",\"output\":\"0d79387ad3bacdc3b7aad9da3a96f4ce60d9a1b6002df254069ad95a3931d5c8\",\"attributes\":[{\"name\":\"doc-comment\",\"arguments\":[\" Returns the target contract of the proxy contract.\"]},{\"name\":\"doc-comment\",\"arguments\":[\"\"]},{\"name\":\"doc-comment\",\"arguments\":[\" # Returns\"]},{\"name\":\"doc-comment\",\"arguments\":[\"\"]},{\"name\":\"doc-comment\",\"arguments\":[\" * [Option<ContractId>] - The new proxy contract to which all fallback calls will be passed or `None`.\"]},{\"name\":\"doc-comment\",\"arguments\":[\"\"]},{\"name\":\"doc-comment\",\"arguments\":[\" # Number of Storage Accesses\"]},{\"name\":\"doc-comment\",\"arguments\":[\"\"]},{\"name\":\"doc-comment\",\"arguments\":[\" * Reads: `1`\"]},{\"name\":\"storage\",\"arguments\":[\"read\"]}]},{\"inputs\":[{\"name\":\"new_target\",\"concreteTypeId\":\"29c10735d33b5159f0c71ee1dbd17b36a3e69e41f00fab0d42e1bd9f428d8a54\"}],\"name\":\"set_proxy_target\",\"output\":\"2e38e77b22c314a449e91fafed92a43826ac6aa403ae6a8acb6cf58239fbaf5d\",\"attributes\":[{\"name\":\"doc-comment\",\"arguments\":[\" Change the target contract of the proxy contract.\"]},{\"name\":\"doc-comment\",\"arguments\":[\"\"]},{\"name\":\"doc-comment\",\"arguments\":[\" # Additional Information\"]},{\"name\":\"doc-comment\",\"arguments\":[\"\"]},{\"name\":\"doc-comment\",\"arguments\":[\" This method can only be called by the `proxy_owner`.\"]},{\"name\":\"doc-comment\",\"arguments\":[\"\"]},{\"name\":\"doc-comment\",\"arguments\":[\" # Arguments\"]},{\"name\":\"doc-comment\",\"arguments\":[\"\"]},{\"name\":\"doc-comment\",\"arguments\":[\" * `new_target`: [ContractId] - The new proxy contract to which all fallback calls will be passed.\"]},{\"name\":\"doc-comment\",\"arguments\":[\"\"]},{\"name\":\"doc-comment\",\"arguments\":[\" # Reverts\"]},{\"name\":\"doc-comment\",\"arguments\":[\"\"]},{\"name\":\"doc-comment\",\"arguments\":[\" * When not called by `proxy_owner`.\"]},{\"name\":\"doc-comment\",\"arguments\":[\"\"]},{\"name\":\"doc-comment\",\"arguments\":[\" # Number of Storage Accesses\"]},{\"name\":\"doc-comment\",\"arguments\":[\"\"]},{\"name\":\"doc-comment\",\"arguments\":[\" * Reads: `1`\"]},{\"name\":\"doc-comment\",\"arguments\":[\" * Write: `1`\"]},{\"name\":\"storage\",\"arguments\":[\"read\",\"write\"]}]},{\"inputs\":[],\"name\":\"proxy_owner\",\"output\":\"192bc7098e2fe60635a9918afb563e4e5419d386da2bdbf0d716b4bc8549802c\",\"attributes\":[{\"name\":\"doc-comment\",\"arguments\":[\" Returns the owner of the proxy contract.\"]},{\"name\":\"doc-comment\",\"arguments\":[\"\"]},{\"name\":\"doc-comment\",\"arguments\":[\" # Returns\"]},{\"name\":\"doc-comment\",\"arguments\":[\"\"]},{\"name\":\"doc-comment\",\"arguments\":[\" * [State] - Represents the state of ownership for this contract.\"]},{\"name\":\"doc-comment\",\"arguments\":[\"\"]},{\"name\":\"doc-comment\",\"arguments\":[\" # Number of Storage Accesses\"]},{\"name\":\"doc-comment\",\"arguments\":[\"\"]},{\"name\":\"doc-comment\",\"arguments\":[\" * Reads: `1`\"]},{\"name\":\"storage\",\"arguments\":[\"read\"]}]},{\"inputs\":[],\"name\":\"initialize_proxy\",\"output\":\"2e38e77b22c314a449e91fafed92a43826ac6aa403ae6a8acb6cf58239fbaf5d\",\"attributes\":[{\"name\":\"doc-comment\",\"arguments\":[\" Initializes the proxy contract.\"]},{\"name\":\"doc-comment\",\"arguments\":[\"\"]},{\"name\":\"doc-comment\",\"arguments\":[\" # Additional Information\"]},{\"name\":\"doc-comment\",\"arguments\":[\"\"]},{\"name\":\"doc-comment\",\"arguments\":[\" This method sets the storage values using the values of the configurable constants `INITIAL_TARGET` and `INITIAL_OWNER`.\"]},{\"name\":\"doc-comment\",\"arguments\":[\" This then allows methods that write to storage to be called.\"]},{\"name\":\"doc-comment\",\"arguments\":[\" This method can only be called once.\"]},{\"name\":\"doc-comment\",\"arguments\":[\"\"]},{\"name\":\"doc-comment\",\"arguments\":[\" # Reverts\"]},{\"name\":\"doc-comment\",\"arguments\":[\"\"]},{\"name\":\"doc-comment\",\"arguments\":[\" * When `storage::SRC14.proxy_owner` is not [State::Uninitialized].\"]},{\"name\":\"doc-comment\",\"arguments\":[\"\"]},{\"name\":\"doc-comment\",\"arguments\":[\" # Number of Storage Accesses\"]},{\"name\":\"doc-comment\",\"arguments\":[\"\"]},{\"name\":\"doc-comment\",\"arguments\":[\" * Writes: `2`\"]},{\"name\":\"storage\",\"arguments\":[\"write\"]}]},{\"inputs\":[{\"name\":\"new_proxy_owner\",\"concreteTypeId\":\"192bc7098e2fe60635a9918afb563e4e5419d386da2bdbf0d716b4bc8549802c\"}],\"name\":\"set_proxy_owner\",\"output\":\"2e38e77b22c314a449e91fafed92a43826ac6aa403ae6a8acb6cf58239fbaf5d\",\"attributes\":[{\"name\":\"doc-comment\",\"arguments\":[\" Changes proxy ownership to the passed State.\"]},{\"name\":\"doc-comment\",\"arguments\":[\"\"]},{\"name\":\"doc-comment\",\"arguments\":[\" # Additional Information\"]},{\"name\":\"doc-comment\",\"arguments\":[\"\"]},{\"name\":\"doc-comment\",\"arguments\":[\" This method can be used to transfer ownership between Identities or to revoke ownership.\"]},{\"name\":\"doc-comment\",\"arguments\":[\"\"]},{\"name\":\"doc-comment\",\"arguments\":[\" # Arguments\"]},{\"name\":\"doc-comment\",\"arguments\":[\"\"]},{\"name\":\"doc-comment\",\"arguments\":[\" * `new_proxy_owner`: [State] - The new state of the proxy ownership.\"]},{\"name\":\"doc-comment\",\"arguments\":[\"\"]},{\"name\":\"doc-comment\",\"arguments\":[\" # Reverts\"]},{\"name\":\"doc-comment\",\"arguments\":[\"\"]},{\"name\":\"doc-comment\",\"arguments\":[\" * When the sender is not the current proxy owner.\"]},{\"name\":\"doc-comment\",\"arguments\":[\" * When the new state of the proxy ownership is [State::Uninitialized].\"]},{\"name\":\"doc-comment\",\"arguments\":[\"\"]},{\"name\":\"doc-comment\",\"arguments\":[\" # Number of Storage Accesses\"]},{\"name\":\"doc-comment\",\"arguments\":[\"\"]},{\"name\":\"doc-comment\",\"arguments\":[\" * Reads: `1`\"]},{\"name\":\"doc-comment\",\"arguments\":[\" * Writes: `1`\"]},{\"name\":\"storage\",\"arguments\":[\"write\"]}]}],\"loggedTypes\":[{\"logId\":\"4571204900286667806\",\"concreteTypeId\":\"3f702ea3351c9c1ece2b84048006c8034a24cbc2bad2e740d0412b4172951d3d\"},{\"logId\":\"2151606668983994881\",\"concreteTypeId\":\"1ddc0adda1270a016c08ffd614f29f599b4725407c8954c8b960bdf651a9a6c8\"},{\"logId\":\"2161305517876418151\",\"concreteTypeId\":\"1dfe7feadc1d9667a4351761230f948744068a090fe91b1bc6763a90ed5d3893\"},{\"logId\":\"4354576968059844266\",\"concreteTypeId\":\"3c6e90ae504df6aad8b34a93ba77dc62623e00b777eecacfa034a8ac6e890c74\"},{\"logId\":\"10870989709723147660\",\"concreteTypeId\":\"96dd838b44f99d8ccae2a7948137ab6256c48ca4abc6168abc880de07fba7247\"},{\"logId\":\"10098701174489624218\",\"concreteTypeId\":\"8c25cb3686462e9a86d2883c5688a22fe738b0bbc85f458d2d2b5f3f667c6d5a\"}],\"messagesTypes\":[],\"configurables\":[{\"name\":\"INITIAL_TARGET\",\"concreteTypeId\":\"0d79387ad3bacdc3b7aad9da3a96f4ce60d9a1b6002df254069ad95a3931d5c8\",\"offset\":13368},{\"name\":\"INITIAL_OWNER\",\"concreteTypeId\":\"192bc7098e2fe60635a9918afb563e4e5419d386da2bdbf0d716b4bc8549802c\",\"offset\":13320}]}",));
let proxy_contract = ProxyContract::new(proxy_contract_id, account.clone());
let result = proxy_contract
.methods()
.set_proxy_target(new_target)
.call()
.await?;
println_action_green(
"Updated",
&format!("proxy contract target to 0x{new_target}"),
);
Ok(result)
}
#[cfg(test)]
mod tests {
use super::*;
use std::collections::{BTreeMap, HashMap};
#[test]
fn test_format_base_asset_account_balances() {
let mut accounts_map: AccountsMap = BTreeMap::new();
let address1 = fuel_tx::Address::from_str(
"7bbd8a4ea06e94461b959ab18d35802bbac3cf47e2bf29195f7db2ce41630cd7",
)
.expect("address1");
let address2 = fuel_tx::Address::from_str(
"99bd8a4ea06e94461b959ab18d35802bbac3cf47e2bf29195f7db2ce41630cd7",
)
.expect("address2");
let base_asset_id = AssetId::zeroed();
accounts_map.insert(0, address1);
accounts_map.insert(1, address2);
let mut account_balances: AccountBalances = Vec::new();
let mut balance1 = HashMap::new();
balance1.insert(base_asset_id.to_string(), 1_500_000_000);
balance1.insert("other_asset".to_string(), 2_000_000_000);
account_balances.push(balance1);
let mut balance2 = HashMap::new();
balance2.insert("other_asset".to_string(), 3_000_000_000);
account_balances.push(balance2);
let address1_expected =
"0x7bBD8a4ea06E94461b959aB18d35802BbAC3cf47e2bF29195F7db2CE41630CD7";
let address2_expected =
"0x99Bd8a4eA06E94461b959AB18d35802bBaC3Cf47E2Bf29195f7DB2cE41630cD7";
let expected = vec![
format!("[0] {address1_expected} - 1.5 ETH"),
format!("[1] {address2_expected} - 0 ETH"),
];
let result =
format_base_asset_account_balances(&accounts_map, &account_balances, &base_asset_id)
.unwrap();
assert_eq!(result, expected);
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-client/src/util/target.rs | forc-plugins/forc-client/src/util/target.rs | use crate::constants::{
DEVNET_ENDPOINT_URL, DEVNET_FAUCET_URL, MAINNET_ENDPOINT_URL, MAINNET_EXPLORER_URL, NODE_URL,
TESTNET_ENDPOINT_URL, TESTNET_EXPLORER_URL, TESTNET_FAUCET_URL,
};
use anyhow::{bail, Result};
use serde::{Deserialize, Serialize};
use std::str::FromStr;
#[derive(Debug, Clone, PartialEq, Eq, Deserialize, Serialize)]
/// Possible target values that forc-client can interact with.
pub enum Target {
Mainnet,
Testnet,
Devnet,
Local,
}
impl Default for Target {
fn default() -> Self {
Self::Local
}
}
impl Target {
pub fn target_url(&self) -> String {
let url = match self {
Target::Mainnet => MAINNET_ENDPOINT_URL,
Target::Testnet => TESTNET_ENDPOINT_URL,
Target::Devnet => DEVNET_ENDPOINT_URL,
Target::Local => NODE_URL,
};
url.to_string()
}
pub fn from_target_url(target_url: &str) -> Option<Self> {
match target_url {
TESTNET_ENDPOINT_URL => Some(Target::Testnet),
MAINNET_ENDPOINT_URL => Some(Target::Mainnet),
DEVNET_ENDPOINT_URL => Some(Target::Devnet),
NODE_URL => Some(Target::Local),
_ => None,
}
}
pub fn local() -> Self {
Target::Local
}
pub fn devnet() -> Self {
Target::Devnet
}
pub fn testnet() -> Self {
Target::Testnet
}
pub fn mainnet() -> Self {
Target::Mainnet
}
pub fn faucet_url(&self) -> Option<String> {
match self {
Target::Mainnet => None,
Target::Testnet => Some(TESTNET_FAUCET_URL.to_string()),
Target::Devnet => Some(DEVNET_FAUCET_URL.to_string()),
Target::Local => Some("http://localhost:3000".to_string()),
}
}
pub fn explorer_url(&self) -> Option<String> {
match self {
Target::Mainnet => Some(MAINNET_EXPLORER_URL.to_string()),
Target::Testnet => Some(TESTNET_EXPLORER_URL.to_string()),
Target::Devnet => None,
_ => None,
}
}
}
impl FromStr for Target {
type Err = anyhow::Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s {
"Fuel Sepolia Testnet" => Ok(Target::Testnet),
"Ignition" => Ok(Target::Mainnet),
"local" => Ok(Target::Local),
"Devnet" | "devnet" => Ok(Target::Devnet),
_ => bail!(
"'{s}' is not a valid target name. Possible values: '{}', '{}', '{}', '{}'",
Target::Testnet,
Target::Mainnet,
Target::Local,
Target::Devnet,
),
}
}
}
impl std::fmt::Display for Target {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let s = match self {
Target::Mainnet => "Ignition",
Target::Testnet => "Fuel Sepolia Testnet",
Target::Devnet => "Devnet",
Target::Local => "local",
};
write!(f, "{s}")
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-client/src/util/pkg.rs | forc-plugins/forc-client/src/util/pkg.rs | use anyhow::Result;
use forc_pkg::manifest::GenericManifestFile;
use forc_pkg::{self as pkg, manifest::ManifestFile, BuildOpts, BuildPlan};
use forc_util::user_forc_directory;
use pkg::{build_with_options, BuiltPackage, PackageManifestFile};
use std::fs::File;
use std::io::{Read, Write};
use std::path::PathBuf;
use std::{collections::HashMap, path::Path, sync::Arc};
/// The name of the folder that forc generated proxy contract project will reside at.
pub const GENERATED_CONTRACT_FOLDER_NAME: &str = ".generated_contracts";
pub const PROXY_CONTRACT_BIN: &[u8] = include_bytes!("../../proxy_abi/proxy_contract.bin");
pub const PROXY_CONTRACT_STORAGE_SLOTS: &str =
include_str!("../../proxy_abi/proxy_contract-storage_slots.json");
pub const PROXY_BIN_FILE_NAME: &str = "proxy.bin";
pub const PROXY_STORAGE_SLOTS_FILE_NAME: &str = "proxy-storage_slots.json";
/// Updates the given package manifest file such that the address field under the proxy table updated to the given value.
/// Updated manifest file is written back to the same location, without thouching anything else such as comments etc.
/// A safety check is done to ensure the proxy table exists before attempting to update the value.
pub(crate) fn update_proxy_address_in_manifest(
address: &str,
manifest: &PackageManifestFile,
) -> Result<()> {
let mut toml = String::new();
let mut file = File::open(manifest.path())?;
file.read_to_string(&mut toml)?;
let mut manifest_toml = toml.parse::<toml_edit::DocumentMut>()?;
if manifest.proxy().is_some() {
manifest_toml["proxy"]["address"] = toml_edit::value(address);
let mut file = std::fs::OpenOptions::new()
.write(true)
.truncate(true)
.open(manifest.path())?;
file.write_all(manifest_toml.to_string().as_bytes())?;
}
Ok(())
}
/// Creates a proxy contract project at the given path, adds a forc.toml and source file.
pub(crate) fn create_proxy_contract(pkg_name: &str) -> Result<PathBuf> {
// Create the proxy contract folder.
let proxy_contract_dir = user_forc_directory()
.join(GENERATED_CONTRACT_FOLDER_NAME)
.join(format!("{pkg_name}-proxy"));
std::fs::create_dir_all(&proxy_contract_dir)?;
std::fs::write(
proxy_contract_dir.join(PROXY_BIN_FILE_NAME),
PROXY_CONTRACT_BIN,
)?;
std::fs::write(
proxy_contract_dir.join(PROXY_STORAGE_SLOTS_FILE_NAME),
PROXY_CONTRACT_STORAGE_SLOTS,
)?;
Ok(proxy_contract_dir)
}
pub(crate) fn built_pkgs(path: &Path, build_opts: &BuildOpts) -> Result<Vec<Arc<BuiltPackage>>> {
let manifest_file = ManifestFile::from_dir(path)?;
let lock_path = manifest_file.lock_path()?;
let build_plan = BuildPlan::from_lock_and_manifests(
&lock_path,
&manifest_file.member_manifests()?,
build_opts.pkg.locked,
build_opts.pkg.offline,
&build_opts.pkg.ipfs_node,
)?;
let graph = build_plan.graph();
let built = build_with_options(build_opts, None)?;
let mut members: HashMap<&pkg::Pinned, Arc<_>> = built.into_members().collect();
let mut built_pkgs = Vec::new();
for member_index in build_plan.member_nodes() {
let pkg = &graph[member_index];
// Check if the current member is built.
//
// For individual members of the workspace, member nodes would be iterating
// over all the members but only the relevant member would be built.
if let Some(built_pkg) = members.remove(pkg) {
built_pkgs.push(built_pkg);
}
}
Ok(built_pkgs)
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-client/src/util/mod.rs | forc-plugins/forc-client/src/util/mod.rs | pub mod account;
pub mod aws;
pub(crate) mod encode;
pub(crate) mod pkg;
pub(crate) mod target;
pub mod tx;
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-client/src/util/account.rs | forc-plugins/forc-client/src/util/account.rs | use async_trait::async_trait;
use fuel_crypto::{Message, Signature};
use fuels::{
prelude::*,
types::{coin_type_id::CoinTypeId, input::Input},
};
use fuels_accounts::{
signers::private_key::PrivateKeySigner,
wallet::{Unlocked, Wallet},
Account,
};
use super::aws::AwsSigner;
#[derive(Clone, Debug)]
/// Set of different signers available to be used with `forc-client` operations.
pub enum ForcClientAccount {
/// Local signer where the private key owned locally. This can be
/// generated through `forc-wallet` integration or manually by providing
/// a private-key.
Wallet(Wallet<Unlocked<PrivateKeySigner>>),
/// A KMS Signer specifically using AWS KMS service. The signing key
/// is managed by another entity for KMS signers. Messages are
/// signed by the KMS entity. Signed transactions are retrieved
/// and submitted to the node by `forc-client`.
KmsSigner(AwsSigner),
}
impl Account for ForcClientAccount {
fn add_witnesses<Tb: TransactionBuilder>(&self, tb: &mut Tb) -> Result<()> {
tb.add_signer(self.clone())?;
Ok(())
}
}
#[async_trait]
impl ViewOnlyAccount for ForcClientAccount {
fn address(&self) -> Address {
match self {
ForcClientAccount::Wallet(wallet) => wallet.address(),
ForcClientAccount::KmsSigner(account) => {
fuels_accounts::ViewOnlyAccount::address(account)
}
}
}
fn try_provider(&self) -> Result<&Provider> {
match self {
ForcClientAccount::Wallet(wallet) => wallet.try_provider(),
ForcClientAccount::KmsSigner(account) => Ok(account.provider()),
}
}
async fn get_asset_inputs_for_amount(
&self,
asset_id: AssetId,
amount: u128,
excluded_coins: Option<Vec<CoinTypeId>>,
) -> Result<Vec<Input>> {
match self {
ForcClientAccount::Wallet(wallet) => {
wallet
.get_asset_inputs_for_amount(asset_id, amount, excluded_coins)
.await
}
ForcClientAccount::KmsSigner(account) => {
account
.get_asset_inputs_for_amount(asset_id, amount, excluded_coins)
.await
}
}
}
}
#[async_trait]
impl Signer for ForcClientAccount {
async fn sign(&self, message: Message) -> Result<Signature> {
match self {
ForcClientAccount::Wallet(wallet) => wallet.signer().sign(message).await,
ForcClientAccount::KmsSigner(account) => account.sign(message).await,
}
}
fn address(&self) -> Address {
match self {
ForcClientAccount::Wallet(wallet) => wallet.address(),
ForcClientAccount::KmsSigner(account) => fuels_core::traits::Signer::address(account),
}
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-client/src/cmd/submit.rs | forc-plugins/forc-client/src/cmd/submit.rs | use crate::NodeTarget;
use devault::Devault;
use std::path::PathBuf;
forc_util::cli_examples! {
super::Command {
[ Submit a transaction from a json file => "forc submit {path}/mint.json" ]
[ Submit a transaction from a json file and wait for confirmation => "forc submit {path}/mint.json --await true" ]
[ Submit a transaction from a json file and get output in json => "forc submit {path}/mint.json --tx-status-json true" ]
[ Submit a transaction from a json file to testnet => "forc submit {path}/mint.json --testnet" ]
[ Submit a transaction from a json file to a local net => "forc submit {path}/mint.json --target local" ]
}
}
/// Submit a transaction to the specified fuel node.
#[derive(Debug, Default, clap::Parser)]
#[clap(about, version, after_help = help())]
pub struct Command {
#[clap(flatten)]
pub network: Network,
#[clap(flatten)]
pub tx_status: TxStatus,
/// Path to the Transaction that is to be submitted to the Fuel node.
///
/// Paths to files ending with `.json` will be deserialized from JSON.
/// Paths to files ending with `.bin` will be deserialized from bytes
/// using the `fuel_tx::Transaction::try_from_bytes` constructor.
pub tx_path: PathBuf,
}
/// Options related to networking.
#[derive(Debug, Devault, clap::Args)]
pub struct Network {
#[clap(flatten)]
pub node: NodeTarget,
/// Whether or not to await confirmation that the transaction has been committed.
///
/// When `true`, await commitment and output the transaction status.
/// When `false`, do not await confirmation and simply output the transaction ID.
#[clap(long = "await", default_value = "true", action(clap::ArgAction::Set))]
#[devault("true")]
pub await_: bool,
}
/// Options related to the transaction status.
#[derive(Debug, Default, clap::Args)]
pub struct TxStatus {
/// Output the resulting transaction status as JSON rather than the default output.
#[clap(
long = "tx-status-json",
default_value = "false",
action(clap::ArgAction::Set)
)]
pub json: bool,
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-client/src/cmd/call.rs | forc-plugins/forc-client/src/cmd/call.rs | use crate::NodeTarget;
use clap::{Parser, ValueEnum};
use fuel_crypto::SecretKey;
use fuels::programs::calls::CallParameters;
use fuels_core::types::{Address, AssetId, ContractId};
use std::{io::Write, path::PathBuf, str::FromStr};
use url::Url;
pub use forc::cli::shared::{BuildOutput, BuildProfile, Minify, Pkg, Print};
pub use forc_tx::{Gas, Maturity};
#[derive(Debug, Clone)]
pub enum FuncType {
Selector(String),
// TODO: add support for function signatures - without ABI files
// ↳ gh issue: https://github.com/FuelLabs/sway/issues/6886
// Signature(String),
}
impl Default for FuncType {
fn default() -> Self {
FuncType::Selector(String::new())
}
}
impl FromStr for FuncType {
type Err = String;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let s = s.trim().replace(' ', "");
if s.is_empty() {
return Err("Function signature cannot be empty".to_string());
}
Ok(FuncType::Selector(s.to_string()))
}
}
/// Execution mode for contract calls
#[derive(Debug, Clone, PartialEq, Default, ValueEnum)]
#[clap(rename_all = "kebab-case")]
pub enum ExecutionMode {
/// Execute a dry run - no state changes, no gas fees, wallet is not used or validated
#[default]
DryRun,
/// Execute in simulation mode - no state changes, estimates gas, wallet is used but not validated
Simulate,
/// Execute live on chain - state changes, gas fees apply, wallet is used and validated
Live,
}
/// Output format for call results
#[derive(Debug, Clone, PartialEq, Default, ValueEnum)]
#[clap(rename_all = "lowercase")]
pub enum OutputFormat {
/// Default formatted output
#[default]
Default,
/// Raw unformatted output
Raw,
/// JSON output with full tracing information (logs, errors, and result)
Json,
}
impl Write for OutputFormat {
fn write(&mut self, buf: &[u8]) -> Result<usize, std::io::Error> {
match self {
OutputFormat::Default => std::io::stdout().write(buf),
OutputFormat::Raw => std::io::stdout().write(buf),
OutputFormat::Json => Ok(buf.len()), // no-op for json
}
}
fn flush(&mut self) -> Result<(), std::io::Error> {
match self {
OutputFormat::Default => std::io::stdout().flush(),
OutputFormat::Raw => std::io::stdout().flush(),
OutputFormat::Json => Ok(()),
}
}
}
impl From<OutputFormat> for forc_tracing::TracingWriter {
fn from(format: OutputFormat) -> Self {
match format {
OutputFormat::Json => forc_tracing::TracingWriter::Json,
_ => forc_tracing::TracingWriter::Stdio,
}
}
}
/// Flags for specifying the caller account
#[derive(Debug, Default, Clone, Parser, serde::Deserialize, serde::Serialize)]
pub struct Caller {
/// Derive an account from a secret key to make the call
#[clap(long, env = "SIGNING_KEY", help_heading = "ACCOUNT OPTIONS")]
pub signing_key: Option<SecretKey>,
/// Use forc-wallet to make the call
#[clap(long, default_value = "false", help_heading = "ACCOUNT OPTIONS")]
pub wallet: bool,
}
/// Options for contract call parameters
#[derive(Debug, Default, Clone, Parser)]
pub struct CallParametersOpts {
/// Amount of native assets to forward with the call
#[clap(
long,
default_value = "0",
alias = "value",
help_heading = "CALL PARAMETERS"
)]
pub amount: u64,
/// Asset ID to forward with the call
#[clap(long, help_heading = "CALL PARAMETERS")]
pub asset_id: Option<AssetId>,
/// Amount of gas to forward with the call
#[clap(long, help_heading = "CALL PARAMETERS")]
pub gas_forwarded: Option<u64>,
}
impl From<CallParametersOpts> for CallParameters {
fn from(opts: CallParametersOpts) -> Self {
let mut params = CallParameters::default();
if opts.amount != 0 {
params = params.with_amount(opts.amount);
}
if let Some(asset_id) = opts.asset_id {
params = params.with_asset_id(asset_id);
}
if let Some(gas) = opts.gas_forwarded {
params = params.with_gas_forwarded(gas);
}
params
}
}
/// Operation for the call command
#[derive(Debug, Clone, PartialEq)]
pub enum AbiSource {
/// ABI from file path
File(PathBuf),
/// ABI from URL
Url(Url),
/// ABI as raw string
String(String),
}
impl std::fmt::Display for AbiSource {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
AbiSource::File(path) => write!(f, "{}", path.display()),
AbiSource::Url(url) => write!(f, "{url}"),
AbiSource::String(s) => write!(f, "{s}"),
}
}
}
impl TryFrom<String> for AbiSource {
type Error = String;
fn try_from(s: String) -> Result<Self, Self::Error> {
// First try to parse as URL
if let Ok(url) = Url::parse(&s) {
match url.scheme() {
"http" | "https" | "ipfs" => return Ok(AbiSource::Url(url)),
_ => {} // Continue to check other options
}
}
// Check if it looks like a JSON string (starts with '{' or '[')
let trimmed = s.trim();
if (trimmed.starts_with('{') && trimmed.ends_with('}'))
|| (trimmed.starts_with('[') && trimmed.ends_with(']'))
{
return Ok(AbiSource::String(s));
}
// Default to treating as file path
Ok(AbiSource::File(PathBuf::from(s)))
}
}
#[derive(Debug, Clone)]
pub enum Operation {
/// Call a specific contract function
CallFunction {
contract_id: ContractId,
abi: AbiSource,
function: FuncType,
function_args: Vec<String>,
},
/// List all functions in the contract
ListFunctions {
contract_id: ContractId,
abi: AbiSource,
},
/// Direct transfer of assets to a contract
DirectTransfer {
recipient: Address,
amount: u64,
asset_id: Option<AssetId>,
},
}
/// Perform Fuel RPC calls from the comfort of your command line.
#[derive(Debug, Parser, Clone)]
#[clap(bin_name = "forc call", version)]
#[clap(after_help = r#"
## EXAMPLES:
### Call a contract with function parameters
```sh
forc call 0x0dcba78d7b09a1f77353f51367afd8b8ab94b5b2bb6c9437d9ba9eea47dede97 \
--abi ./contract-abi.json \
get_balance 0x0087675439e10a8351b1d5e4cf9d0ea6da77675623ff6b16470b5e3c58998423
```
### Call a contract with function parameters; additionally print logs, receipts and script json
```sh
forc call 0x0dcba78d7b09a1f77353f51367afd8b8ab94b5b2bb6c9437d9ba9eea47dede97 \
--abi ./contract-abi.json \
get_balance 0x0087675439e10a8351b1d5e4cf9d0ea6da77675623ff6b16470b5e3c58998423 \
-vv
```
### Call a contract with address labels for better trace readability
```sh
forc call 0x0dcba78d7b09a1f77353f51367afd8b8ab94b5b2bb6c9437d9ba9eea47dede97 \
--abi ./contract-abi.json \
transfer 0xf8f8b6283d7fa5b672b530cbb84fcccb4ff8dc40f8176ef4544ddb1f1952ad07 \
--label 0x0dcba78d7b09a1f77353f51367afd8b8ab94b5b2bb6c9437d9ba9eea47dede97:MainContract \
--label 0xf8f8b6283d7fa5b672b530cbb84fcccb4ff8dc40f8176ef4544ddb1f1952ad07:TokenContract \
-vv
```
### Call a contract without function parameters
```sh
forc call 0x0dcba78d7b09a1f77353f51367afd8b8ab94b5b2bb6c9437d9ba9eea47dede97 \
--abi ./contract-abi.json \
get_name
```
### Call a contract that makes external contract calls
```sh
forc call 0x0dcba78d7b09a1f77353f51367afd8b8ab94b5b2bb6c9437d9ba9eea47dede97 \
--abi ./contract-abi.json \
transfer 0xf8f8b6283d7fa5b672b530cbb84fcccb4ff8dc40f8176ef4544ddb1f1952ad07 \
--contracts 0xf8f8b6283d7fa5b672b530cbb84fcccb4ff8dc40f8176ef4544ddb1f1952ad07
```
### Call a contract with additional contract ABIs for better tracing
```sh
forc call 0x0dcba78d7b09a1f77353f51367afd8b8ab94b5b2bb6c9437d9ba9eea47dede97 \
--abi ./contract-abi.json \
transfer 0xf8f8b6283d7fa5b672b530cbb84fcccb4ff8dc40f8176ef4544ddb1f1952ad07 \
--contract-abi 0xf8f8b6283d7fa5b672b530cbb84fcccb4ff8dc40f8176ef4544ddb1f1952ad07:./external-abi.json \
--contract-abi 0x1234:https://example.com/abi.json
```
### Call a contract in simulation mode
```sh
forc call 0x0dcba78d7b09a1f77353f51367afd8b8ab94b5b2bb6c9437d9ba9eea47dede97 \
--abi ./contract-abi.json \
add 1 2 \
--mode simulate
```
### Call a contract in dry-run mode on custom node URL using explicit signing-key
```sh
forc call 0x0dcba78d7b09a1f77353f51367afd8b8ab94b5b2bb6c9437d9ba9eea47dede97 \
--node-url "http://127.0.0.1:4000/v1/graphql" \
--signing-key 0x... \
--abi ./contract-abi.json \
add 1 2 \
--mode dry-run
```
### Call a contract in live mode which performs state changes on testnet using forc-wallet
```sh
forc call 0x0dcba78d7b09a1f77353f51367afd8b8ab94b5b2bb6c9437d9ba9eea47dede97 \
--testnet \
--wallet \
--abi ./contract-abi.json \
add 1 2 \
--mode live
```
### Call a contract payable function which transfers value of native asset on mainnet
```sh
forc call 0x0dcba78d7b09a1f77353f51367afd8b8ab94b5b2bb6c9437d9ba9eea47dede97 \
--abi ./contract-abi.json \
transfer 0xf8f8b6283d7fa5b672b530cbb84fcccb4ff8dc40f8176ef4544ddb1f1952ad07 \
--mode live \
--amount 100
```
### Call a contract payable function which transfers value of custom asset
```sh
forc call 0x0dcba78d7b09a1f77353f51367afd8b8ab94b5b2bb6c9437d9ba9eea47dede97 \
--abi ./contract-abi.json \
transfer 0xf8f8b6283d7fa5b672b530cbb84fcccb4ff8dc40f8176ef4544ddb1f1952ad07 \
--amount 100 \
--asset-id 0x0087675439e10a8351b1d5e4cf9d0ea6da77675623ff6b16470b5e3c58998423 \
--live
```
### List all available functions in a contract
```sh
forc call 0x0dcba78d7b09a1f77353f51367afd8b8ab94b5b2bb6c9437d9ba9eea47dede97 \
--abi ./contract-abi.json \
--list-functions
```
### Call a contract with inline ABI JSON string
```sh
forc call 0x0dcba78d7b09a1f77353f51367afd8b8ab94b5b2bb6c9437d9ba9eea47dede97 \
--abi '{"functions":[{"inputs":[],"name":"get_balance","output":{"name":"","type":"u64","typeArguments":null}}]}' \
get_balance
```
### Direct transfer of asset to a contract or address
```sh
forc call 0x0dcba78d7b09a1f77353f51367afd8b8ab94b5b2bb6c9437d9ba9eea47dede97 \
--amount 100 \
--mode live
```
### Call a contract with interactive debugger after transaction
```sh
forc call 0x0dcba78d7b09a1f77353f51367afd8b8ab94b5b2bb6c9437d9ba9eea47dede97 \
--abi ./contract-abi.json \
get_balance 0x0087675439e10a8351b1d5e4cf9d0ea6da77675623ff6b16470b5e3c58998423 \
--debug
```
"#)]
pub struct Command {
/// The contract ID to call
#[clap(help_heading = "CONTRACT")]
pub address: Address,
/// Path, URI, or raw JSON string for the ABI
/// Required when making function calls or listing functions
/// Can be a file path, HTTP/HTTPS URL, or raw JSON string
#[clap(long, value_parser = |s: &str| AbiSource::try_from(s.to_string()))]
pub abi: Option<AbiSource>,
/// Additional contract IDs and their ABI paths for better tracing and debugging.
/// Format: contract_id:abi_path (can be used multiple times)
/// Example: --contract-abi 0x123:./abi1.json --contract-abi 0x456:https://example.com/abi2.json
/// Contract IDs can be provided with or without 0x prefix
#[clap(long = "contract-abi", value_parser = parse_contract_abi, action = clap::ArgAction::Append, help_heading = "CONTRACT")]
pub contract_abis: Option<Vec<(ContractId, AbiSource)>>,
/// Label addresses in the trace output for better readability.
/// Format: address:label (can be used multiple times)
/// Example: --label 0x123:MainContract --label 0x456:TokenContract
/// Addresses can be provided with or without 0x prefix
#[clap(long, value_parser = parse_label, action = clap::ArgAction::Append, help_heading = "OUTPUT")]
pub label: Option<Vec<(ContractId, String)>>,
/// The function selector to call.
/// The function selector is the name of the function to call (e.g. "transfer").
/// Not required when --list-functions is specified or when --amount is provided for direct transfer
#[clap(help_heading = "FUNCTION")]
pub function: Option<String>,
/// Arguments to pass to the function
#[clap(help_heading = "FUNCTION")]
pub function_args: Vec<String>,
/// Network connection options
#[clap(flatten)]
pub node: NodeTarget,
/// Caller account options
#[clap(flatten)]
pub caller: Caller,
/// Call parameters
#[clap(flatten)]
pub call_parameters: CallParametersOpts,
/// Execution mode - determines if state changes are applied
/// - `dry-run`: No state changes, no gas fees, wallet is not used or validated
/// - `simulate`: No state changes, estimates gas, wallet is used but not validated
/// - `live`: State changes, gas fees apply, wallet is used and validated
#[clap(long, default_value = "dry-run", help_heading = "EXECUTION")]
pub mode: ExecutionMode,
/// List all available functions in the contract
#[clap(
long,
alias = "list-functions",
conflicts_with = "function",
help_heading = "OPERATION"
)]
pub list_functions: bool,
/// The gas price to use for the call; defaults to 0
#[clap(flatten)]
pub gas: Option<Gas>,
/// The external contract addresses to use for the call
/// If none are provided, the call will automatically populate external contracts by making dry-run calls
/// to the node, and extract the contract addresses based on the revert reason.
/// Use an empty string '' to explicitly specify no external contracts.
/// Multiple contract IDs can be provided separated by commas.
#[clap(
long,
alias = "contracts",
value_delimiter = ',',
help_heading = "CONTRACT"
)]
pub external_contracts: Option<Vec<String>>,
/// Output format for the call result
#[clap(long, short = 'o', default_value = "default", help_heading = "OUTPUT")]
pub output: OutputFormat,
/// Contract call variable output count
#[clap(long, alias = "variable-output", help_heading = "VARIABLE OUTPUT")]
pub variable_output: Option<usize>,
/// Set verbosity levels; currently only supports max 2 levels
/// - `-v=1`: Print decoded logs
/// - `-v=2`: Additionally print receipts and script json
#[clap(short = 'v', action = clap::ArgAction::Count, help_heading = "OUTPUT")]
pub verbosity: u8,
/// Start interactive debugger after transaction execution
#[clap(long, short = 'd', help_heading = "DEBUG")]
pub debug: bool,
}
impl Command {
/// Validate the command and determine the CLI operation
pub fn validate_and_get_operation(&self) -> Result<Operation, String> {
// Case 1: List functions
if self.list_functions {
let Some(abi) = &self.abi else {
return Err("ABI is required when using --list-functions".to_string());
};
return Ok(Operation::ListFunctions {
contract_id: (*self.address).into(),
abi: abi.to_owned(),
});
}
// Case 2: Direct transfer with amount
if self.function.is_none() && self.call_parameters.amount > 0 {
if self.mode != ExecutionMode::Live {
return Err("Direct transfers are only supported in live mode".to_string());
}
return Ok(Operation::DirectTransfer {
recipient: self.address,
amount: self.call_parameters.amount,
asset_id: self.call_parameters.asset_id,
});
}
// Case 3: Call function
if let Some(function) = &self.function {
let Some(abi) = &self.abi else {
return Err("ABI is required when calling a function".to_string());
};
let func_type = FuncType::from_str(function)?;
return Ok(Operation::CallFunction {
contract_id: (*self.address).into(),
abi: abi.to_owned(),
function: func_type,
function_args: self.function_args.to_owned(),
});
}
// No valid operation matched
Err("Either function selector, --list-functions flag, or non-zero --amount for direct transfers must be provided".to_string())
}
}
fn parse_contract_abi(s: &str) -> Result<(ContractId, AbiSource), String> {
let parts: Vec<&str> = s.trim().split(':').collect();
let [contract_id_str, abi_path_str] = parts.try_into().map_err(|_| {
format!("Invalid contract ABI format: '{s}'. Expected format: contract_id:abi_path")
})?;
let contract_id =
ContractId::from_str(&format!("0x{}", contract_id_str.trim_start_matches("0x")))
.map_err(|e| format!("Invalid contract ID '{contract_id_str}': {e}"))?;
let abi_path = AbiSource::try_from(abi_path_str.to_string())
.map_err(|e| format!("Invalid ABI path '{abi_path_str}': {e}"))?;
Ok((contract_id, abi_path))
}
fn parse_label(s: &str) -> Result<(ContractId, String), String> {
let parts: Vec<&str> = s.trim().split(':').collect();
let [contract_id_str, label] = parts
.try_into()
.map_err(|_| format!("Invalid label format: '{s}'. Expected format: contract_id:label"))?;
let contract_id =
ContractId::from_str(&format!("0x{}", contract_id_str.trim_start_matches("0x")))
.map_err(|e| format!("Invalid contract ID '{contract_id_str}': {e}"))?;
Ok((contract_id, label.to_string()))
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_abi_source_try_from() {
let url_result = AbiSource::try_from("https://example.com/abi.json".to_string()).unwrap();
assert!(matches!(url_result, AbiSource::Url(_)));
let json_result = AbiSource::try_from(r#"{"functions": []}"#.to_string()).unwrap();
assert!(matches!(json_result, AbiSource::String(_)));
let array_result = AbiSource::try_from("[]".to_string()).unwrap();
assert!(matches!(array_result, AbiSource::String(_)));
let file_result = AbiSource::try_from("./contract-abi.json".to_string()).unwrap();
assert!(matches!(file_result, AbiSource::File(_)));
let file_url_result = AbiSource::try_from("file:///path/to/abi.json".to_string()).unwrap();
assert!(matches!(file_url_result, AbiSource::File(_)));
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-client/src/cmd/deploy.rs | forc-plugins/forc-client/src/cmd/deploy.rs | use crate::NodeTarget;
use clap::Parser;
use forc::cli::shared::IrCliOpt;
pub use forc::cli::shared::{BuildOutput, Minify, Pkg, Print};
use forc_pkg::BuildProfile;
pub use forc_tx::{Gas, Maturity};
pub use forc_util::tx_utils::Salt;
use fuel_crypto::SecretKey;
forc_util::cli_examples! {
super::Command {
[ Deploy a single contract => "forc deploy bc09bfa7a11a04ce42b0a5abf04fd437387ee49bf4561d575177e2946468b408" ]
[ Deploy a single contract from a different path => "forc deploy bc09bfa7a11a04ce42b0a5abf04fd437387ee49bf4561d575177e2946468b408 --path {path}" ]
[ Deploy to a custom network => "forc deploy --node-url https://testnet.fuel.network/graphql" ]
}
}
#[derive(Debug, Default, Parser)]
#[clap(bin_name = "forc deploy", version, after_help = help())]
pub struct Command {
#[clap(flatten)]
pub pkg: Pkg,
#[clap(flatten)]
pub minify: Minify,
#[clap(flatten)]
pub print: Print,
#[arg(long, value_parser = clap::builder::PossibleValuesParser::new(IrCliOpt::cli_options()))]
pub verify_ir: Option<Vec<String>>,
#[clap(flatten)]
pub gas: Gas,
#[clap(flatten)]
pub maturity: Maturity,
#[clap(flatten)]
pub node: NodeTarget,
/// Optional 256-bit hexadecimal literal(s) to redeploy contracts.
///
/// For a single contract, use `--salt <SALT>`, eg.: forc deploy --salt 0x0000000000000000000000000000000000000000000000000000000000000001
///
/// For a workspace with multiple contracts, use `--salt <CONTRACT_NAME>:<SALT>`
/// to specify a salt for each contract, eg.:
///
/// forc deploy --salt contract_a:0x0000000000000000000000000000000000000000000000000000000000000001
/// --salt contract_b:0x0000000000000000000000000000000000000000000000000000000000000002
#[clap(long)]
pub salt: Option<Vec<String>>,
/// Generate a default salt (0x0000000000000000000000000000000000000000000000000000000000000000) for the contract.
/// Useful for CI, to create reproducible deployments.
#[clap(long)]
pub default_salt: bool,
#[clap(flatten)]
pub build_output: BuildOutput,
/// The name of the build profile to use.
#[clap(long, default_value = BuildProfile::RELEASE)]
pub build_profile: String,
/// Sign the transaction with default signer that is pre-funded by fuel-core. Useful for testing against local node.
#[clap(long)]
pub default_signer: bool,
/// Deprecated in favor of `--default-signer`.
#[clap(long)]
pub unsigned: bool,
/// Submit the deployment transaction(s) without waiting for execution to complete.
#[clap(long)]
pub submit_only: bool,
/// Set the key to be used for signing.
pub signing_key: Option<SecretKey>,
/// Sign the deployment transaction manually.
#[clap(long)]
pub manual_signing: bool,
/// Override storage slot initialization.
///
/// By default, storage slots are initialized with the values defined in the storage block in
/// the contract. You can override the initialization by providing the file path to a JSON file
/// containing the overridden values.
///
/// The file format and key values should match the compiler-generated `*-storage_slots.json` file in the output
/// directory of the compiled contract.
///
/// Example: `forc deploy --override-storage-slots my_override.json`
///
/// my_override.json:
/// [
/// {
/// "key": "<key from out/debug/storage_slots.json>",
/// "value": "0000000000000000000000000000000000000000000000000000000000000001"
/// }
/// ]
#[clap(long, verbatim_doc_comment, name = "JSON_FILE_PATH")]
pub override_storage_slots: Option<String>,
#[clap(flatten)]
pub experimental: sway_features::CliFields,
/// AWS KMS signer arn. If present forc-deploy will automatically use AWS KMS signer instead of forc-wallet.
#[clap(long)]
pub aws_kms_signer: Option<String>,
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-client/src/cmd/mod.rs | forc-plugins/forc-client/src/cmd/mod.rs | pub mod call;
pub mod deploy;
pub mod run;
pub mod submit;
pub use call::Command as Call;
pub use deploy::Command as Deploy;
pub use run::Command as Run;
pub use submit::Command as Submit;
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-client/src/cmd/run.rs | forc-plugins/forc-client/src/cmd/run.rs | use crate::NodeTarget;
use clap::Parser;
use forc::cli::shared::IrCliOpt;
use fuel_crypto::SecretKey;
pub use super::submit::Network;
pub use forc::cli::shared::{BuildOutput, BuildProfile, Minify, Pkg, Print};
pub use forc_tx::{Gas, Maturity};
/// Run script project.
/// Crafts a script transaction then sends it to a running node.
#[derive(Debug, Default, Parser)]
#[clap(bin_name = "forc run", version)]
pub struct Command {
#[clap(flatten)]
pub pkg: Pkg,
#[clap(flatten)]
pub minify: Minify,
#[arg(long, value_parser = clap::builder::PossibleValuesParser::new(IrCliOpt::cli_options()))]
pub verify_ir: Option<Vec<String>>,
#[clap(flatten)]
pub print: Print,
#[clap(flatten)]
pub gas: Gas,
#[clap(flatten)]
pub maturity: Maturity,
#[clap(flatten)]
pub build_output: BuildOutput,
#[clap(flatten)]
pub build_profile: BuildProfile,
#[clap(flatten)]
pub node: NodeTarget,
/// Hex string of data to input to script.
#[clap(short, long)]
pub data: Option<String>,
/// Only craft transaction and print it out.
#[clap(long)]
pub dry_run: bool,
/// Pretty-print the outputs from the node.
#[clap(long = "pretty-print", short = 'r')]
pub pretty_print: bool,
/// 32-byte contract ID that will be called during the transaction.
#[clap(long = "contract")]
pub contract: Option<Vec<String>>,
/// Execute the transaction and return the final mutated transaction along with receipts
/// (which includes whether the transaction reverted or not). The transaction is not inserted
/// in the node's view of the blockchain, (i.e. it does not affect the chain state).
#[clap(long)]
pub simulate: bool,
/// Sign the transaction with default signer that is pre-funded by fuel-core. Useful for testing against local node.
#[clap(long)]
pub default_signer: bool,
/// Deprecated in favor of `--default-signer`.
#[clap(long)]
pub unsigned: bool,
/// Set the key to be used for signing.
pub signing_key: Option<SecretKey>,
/// Arguments to pass into main function with forc run.
#[clap(long)]
pub args: Option<Vec<String>>,
/// Start interactive debugger after transaction execution
#[clap(long, help_heading = "DEBUG")]
pub debug: bool,
#[clap(flatten)]
pub experimental: sway_features::CliFields,
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-client/src/bin/submit.rs | forc-plugins/forc-client/src/bin/submit.rs | use clap::Parser;
use forc_tracing::{init_tracing_subscriber, println_error};
#[tokio::main]
async fn main() {
init_tracing_subscriber(Default::default());
let command = forc_client::cmd::Submit::parse();
if let Err(err) = forc_client::op::submit(command).await {
println_error(&format!("{err}"));
std::process::exit(1);
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-client/src/bin/call.rs | forc-plugins/forc-client/src/bin/call.rs | use clap::Parser;
use forc_tracing::{init_tracing_subscriber, println_error, TracingSubscriberOptions};
#[tokio::main]
async fn main() {
let command = forc_client::cmd::Call::parse();
// Initialize tracing with verbosity from command
init_tracing_subscriber(TracingSubscriberOptions {
verbosity: Some(command.verbosity),
writer_mode: Some(command.output.clone().into()),
regex_filter: Some("forc_tracing".to_string()),
..Default::default()
});
let operation = match command.validate_and_get_operation() {
Ok(operation) => operation,
Err(err) => {
println_error(&err);
std::process::exit(1);
}
};
if let Err(err) = forc_client::op::call(operation, command).await {
println_error(&format!("{err}"));
std::process::exit(1);
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-client/src/bin/deploy.rs | forc-plugins/forc-client/src/bin/deploy.rs | use clap::Parser;
use forc_tracing::{init_tracing_subscriber, println_error};
#[tokio::main]
async fn main() {
init_tracing_subscriber(Default::default());
let command = forc_client::cmd::Deploy::parse();
if let Err(err) = forc_client::op::deploy(command).await {
println_error(&format!("{err}"));
std::process::exit(1);
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-client/src/bin/run.rs | forc-plugins/forc-client/src/bin/run.rs | use clap::Parser;
use forc_tracing::{init_tracing_subscriber, println_error};
#[tokio::main]
async fn main() {
init_tracing_subscriber(Default::default());
let command = forc_client::cmd::Run::parse();
if let Err(err) = forc_client::op::run(command).await {
println_error(&format!("{err}"));
std::process::exit(1);
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-client/src/op/submit.rs | forc-plugins/forc-client/src/op/submit.rs | use crate::cmd;
use anyhow::Context;
use fuel_core_client::client::{types::TransactionStatus, FuelClient};
use fuel_crypto::fuel_types::canonical::Deserialize;
/// A command for submitting transactions to a Fuel network.
pub async fn submit(cmd: cmd::Submit) -> anyhow::Result<()> {
let tx = read_tx(&cmd.tx_path)?;
let node_url = cmd.network.node.get_node_url(&None)?;
let client = FuelClient::new(node_url)?;
if cmd.network.await_ {
let status = client
.submit_and_await_commit(&tx)
.await
.context("Submission of tx or awaiting commit failed")?;
if cmd.tx_status.json {
print_status_json(&status)?;
} else {
print_status(&status);
}
} else {
let id = client.submit(&tx).await.context("Failed to submit tx")?;
println!("{id}");
}
Ok(())
}
/// Deserialize a `Transaction` from the given file into memory.
pub fn read_tx(path: &std::path::Path) -> anyhow::Result<fuel_tx::Transaction> {
let file = std::fs::File::open(path)?;
let reader = std::io::BufReader::new(file);
fn has_extension(path: &std::path::Path, ext: &str) -> bool {
path.extension().and_then(|ex| ex.to_str()) == Some(ext)
}
let tx: fuel_tx::Transaction = if has_extension(path, "json") {
serde_json::from_reader(reader)?
} else if has_extension(path, "bin") {
let tx_bytes = std::fs::read(path)?;
fuel_tx::Transaction::from_bytes(&tx_bytes).map_err(anyhow::Error::msg)?
} else {
anyhow::bail!(r#"Unsupported transaction file extension, expected ".json" or ".bin""#);
};
Ok(tx)
}
/// Format the transaction status in a more human-friendly manner.
pub fn fmt_status(status: &TransactionStatus, s: &mut String) -> anyhow::Result<()> {
use chrono::TimeZone;
use std::fmt::Write;
match status {
TransactionStatus::Submitted { submitted_at } => {
writeln!(s, "Transaction Submitted at {:?}", submitted_at.0)?;
}
TransactionStatus::Success {
block_height,
time,
program_state,
..
} => {
let utc = chrono::Utc.timestamp_nanos(time.to_unix());
writeln!(s, "Transaction Succeeded")?;
writeln!(s, " Block ID: {block_height}")?;
writeln!(s, " Time: {utc}",)?;
writeln!(s, " Program State: {program_state:?}")?;
}
TransactionStatus::SqueezedOut { reason } => {
writeln!(s, "Transaction Squeezed Out: {reason}")?;
}
TransactionStatus::Failure {
block_height,
time,
reason,
program_state,
..
} => {
let utc = chrono::Utc.timestamp_nanos(time.to_unix());
writeln!(s, "Transaction Failed")?;
writeln!(s, " Reason: {reason}")?;
writeln!(s, " Block ID: {block_height}")?;
writeln!(s, " Time: {utc}")?;
writeln!(s, " Program State: {program_state:?}")?;
}
TransactionStatus::PreconfirmationSuccess {
total_gas,
transaction_id,
receipts,
..
} => {
writeln!(s, "Transaction Preconfirmatino Succeeded")?;
writeln!(s, " Total Gas: {total_gas}")?;
writeln!(s, " Transaction Id: {transaction_id}",)?;
writeln!(s, " Receipts: {receipts:?}")?;
}
TransactionStatus::PreconfirmationFailure {
total_gas,
transaction_id,
receipts,
reason,
..
} => {
writeln!(s, "Transaction Preconfirmation Failed")?;
writeln!(s, " Total Gas: {total_gas}")?;
writeln!(s, " Transaction Id: {transaction_id}",)?;
writeln!(s, " Receipts: {receipts:?}")?;
writeln!(s, " Reason: {reason:?}")?;
}
}
Ok(())
}
/// Print the status to stdout.
pub fn print_status(status: &TransactionStatus) {
let mut string = String::new();
fmt_status(status, &mut string).expect("formatting to `String` is infallible");
println!("{string}");
}
/// Print the status to stdout in its JSON representation.
pub fn print_status_json(status: &TransactionStatus) -> anyhow::Result<()> {
let json = serde_json::to_string_pretty(status)?;
println!("{json}");
Ok(())
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-client/src/op/deploy.rs | forc-plugins/forc-client/src/op/deploy.rs | use crate::{
cmd,
constants::TX_SUBMIT_TIMEOUT_MS,
util::{
account::ForcClientAccount,
pkg::{built_pkgs, create_proxy_contract, update_proxy_address_in_manifest},
target::Target,
tx::{
check_and_create_wallet_at_default_path, prompt_forc_wallet_password, select_account,
update_proxy_contract_target, SignerSelectionMode,
},
},
};
use anyhow::{bail, Context, Result};
use forc::cli::shared::IrCliOpt;
use forc_pkg::{self as pkg, DumpOpts, PackageManifestFile};
use forc_pkg::{manifest::GenericManifestFile, MemberFilter};
use forc_tracing::{println_action_green, println_warning};
use forc_util::default_output_directory;
use forc_wallet::utils::default_wallet_path;
use fuel_abi_types::abi::program::Configurable;
use fuel_core_client::client::types::{ChainInfo, TransactionStatus};
use fuel_core_client::client::FuelClient;
use fuel_crypto::{fuel_types::ChainId, Hasher};
use fuel_tx::{Salt, Transaction};
use fuel_vm::prelude::*;
use fuels::{
macros::abigen,
programs::{
contract::{LoadConfiguration, StorageConfiguration},
executable::Executable,
},
types::transaction_builders::Blob,
};
use fuels_accounts::{provider::Provider, Account, ViewOnlyAccount};
use fuels_core::types::{transaction::TxPolicies, transaction_builders::CreateTransactionBuilder};
use futures::FutureExt;
use pkg::{BuildProfile, BuiltPackage};
use serde::{Deserialize, Serialize};
use std::{
collections::BTreeMap,
path::{Path, PathBuf},
str::FromStr,
sync::Arc,
time::Duration,
};
use sway_core::{asm_generation::ProgramABI, language::parsed::TreeType, BuildTarget, IrCli};
/// Default maximum contract size allowed for a single contract. If the target
/// contract size is bigger than this amount, forc-deploy will automatically
/// starts dividing the contract and deploy them in chunks automatically.
/// The value is in bytes
const MAX_CONTRACT_SIZE: usize = 100_000;
/// Represents a deployed instance of a forc package.
/// Packages other than libraries are deployable through different mechanisms.
#[derive(Debug, PartialEq, Eq, Clone)]
pub enum DeployedPackage {
Contract(DeployedContract),
Script(DeployedExecutable),
Predicate(DeployedExecutable),
}
/// Represents a deployed contract on the Fuel network.
#[derive(Debug, PartialEq, Eq, Clone, PartialOrd, Ord)]
pub struct DeployedContract {
pub id: fuel_tx::ContractId,
pub proxy: Option<fuel_tx::ContractId>,
pub chunked: bool,
}
/// Represents a deployed executable (script or predicate) on the Fuel network.
/// Executables are deployed as blobs with generated loaders for efficiency.
#[derive(Debug, PartialEq, Eq, Clone, PartialOrd, Ord)]
pub struct DeployedExecutable {
pub bytecode: Vec<u8>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ChunkInfo {
index: usize,
size: usize,
hash: String,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ChunkedDeploymentInfo {
original_contract_size: usize,
max_chunk_size: usize,
total_chunks: usize,
chunks: Vec<ChunkInfo>,
loader_contract_id: String,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "lowercase")]
pub enum DeploymentType {
Standard,
Chunked,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct DeploymentArtifact {
#[serde(skip_serializing_if = "Option::is_none")]
transaction_id: Option<String>,
salt: String,
network_endpoint: String,
chain_id: ChainId,
contract_id: String,
deployment_size: usize,
deployed_block_height: Option<u32>,
deployment_type: DeploymentType,
#[serde(skip_serializing_if = "Option::is_none")]
chunked_deployment_info: Option<ChunkedDeploymentInfo>,
}
impl DeploymentArtifact {
pub fn to_file(
&self,
output_dir: &Path,
pkg_name: &str,
contract_id: ContractId,
) -> Result<()> {
if !output_dir.exists() {
std::fs::create_dir_all(output_dir)?;
}
let deployment_artifact_json = format!("{pkg_name}-deployment-0x{contract_id}");
let deployments_path = output_dir
.join(deployment_artifact_json)
.with_extension("json");
let deployments_file = std::fs::File::create(deployments_path)?;
serde_json::to_writer_pretty(&deployments_file, &self)?;
Ok(())
}
}
type ContractSaltMap = BTreeMap<String, Salt>;
/// Takes the contract member salt inputs passed via the --salt option, validates them against
/// the manifests and returns a ContractSaltMap (BTreeMap of contract names to salts).
fn validate_and_parse_salts<'a>(
salt_args: &[String],
manifests: impl Iterator<Item = &'a PackageManifestFile>,
) -> Result<ContractSaltMap> {
let mut contract_salt_map = BTreeMap::default();
// Parse all the salt arguments first, and exit if there are errors in this step.
for salt_arg in salt_args {
if let Some((given_contract_name, salt)) = salt_arg.split_once(':') {
let salt = salt
.parse::<Salt>()
.map_err(|e| anyhow::anyhow!(e))
.unwrap();
if let Some(old) = contract_salt_map.insert(given_contract_name.to_string(), salt) {
bail!("2 salts provided for contract '{given_contract_name}':\n {old}\n {salt}");
};
} else {
bail!("Invalid salt provided - salt must be in the form <CONTRACT_NAME>:<SALT> when deploying a workspace");
}
}
for manifest in manifests {
for (dep_name, contract_dep) in manifest.contract_deps() {
let dep_pkg_name = contract_dep.dependency.package().unwrap_or(dep_name);
if let Some(declared_salt) = contract_salt_map.get(dep_pkg_name) {
bail!(
"Redeclaration of salt using the option '--salt' while a salt exists for contract '{}' \
under the contract dependencies of the Forc.toml manifest for '{}'\n\
Existing salt: '0x{}',\nYou declared: '0x{}'\n",
dep_pkg_name,
manifest.project_name(),
contract_dep.salt,
declared_salt,
);
}
}
}
Ok(contract_salt_map)
}
/// Depending on the cli options user passed, either returns storage slots from
/// compiled package, or the ones user provided as overrides.
fn resolve_storage_slots(
command: &cmd::Deploy,
compiled: &BuiltPackage,
) -> Result<Vec<fuel_tx::StorageSlot>> {
let mut storage_slots =
if let Some(storage_slot_override_file) = &command.override_storage_slots {
let storage_slots_file = std::fs::read_to_string(storage_slot_override_file)?;
let storage_slots: Vec<StorageSlot> = serde_json::from_str(&storage_slots_file)?;
storage_slots
} else {
compiled.storage_slots.clone()
};
storage_slots.sort();
Ok(storage_slots)
}
/// Creates blobs from the contract to deploy contracts that are larger than
/// maximum contract size. Created blobs are deployed, and a loader contract is
/// generated such that it loads all the deployed blobs, and provides the user
/// a single contract (loader contract that loads the blobs) to call into.
async fn deploy_chunked(
command: &cmd::Deploy,
compiled: &BuiltPackage,
salt: Salt,
account: &ForcClientAccount,
provider: &Provider,
pkg_name: &str,
) -> anyhow::Result<ContractId> {
println_action_green("Deploying", &format!("contract {pkg_name} chunks"));
let storage_slots = resolve_storage_slots(command, compiled)?;
let node_url = provider.url();
let client = FuelClient::new(node_url)?;
let chain_info = client.chain_info().await?;
let blobs = compiled
.bytecode
.bytes
.chunks(MAX_CONTRACT_SIZE)
.map(|chunk| Blob::new(chunk.to_vec()))
.collect();
let tx_policies = tx_policies_from_cmd(command);
let contract_id =
fuels::programs::contract::Contract::loader_from_blobs(blobs, salt, storage_slots)?
.deploy(account, tx_policies)
.await?
.contract_id;
// Create deployment artifact for chunked deployment
create_chunked_deployment_artifact(
contract_id,
salt,
node_url,
chain_info,
compiled,
command,
&compiled.descriptor.manifest_file,
)?;
Ok(contract_id)
}
/// Deploys a new proxy contract for the given package.
async fn deploy_new_proxy(
command: &cmd::Deploy,
pkg_name: &str,
pkg_storage_slots: &[StorageSlot],
impl_contract: &fuel_tx::ContractId,
provider: &Provider,
account: &ForcClientAccount,
) -> Result<ContractId> {
abigen!(Contract(name = "ProxyContract", abi = "{\"programType\":\"contract\",\"specVersion\":\"1.1\",\"encodingVersion\":\"1\",\"concreteTypes\":[{\"type\":\"()\",\"concreteTypeId\":\"2e38e77b22c314a449e91fafed92a43826ac6aa403ae6a8acb6cf58239fbaf5d\"},{\"type\":\"enum standards::src5::AccessError\",\"concreteTypeId\":\"3f702ea3351c9c1ece2b84048006c8034a24cbc2bad2e740d0412b4172951d3d\",\"metadataTypeId\":1},{\"type\":\"enum standards::src5::State\",\"concreteTypeId\":\"192bc7098e2fe60635a9918afb563e4e5419d386da2bdbf0d716b4bc8549802c\",\"metadataTypeId\":2},{\"type\":\"enum std::option::Option<struct std::contract_id::ContractId>\",\"concreteTypeId\":\"0d79387ad3bacdc3b7aad9da3a96f4ce60d9a1b6002df254069ad95a3931d5c8\",\"metadataTypeId\":4,\"typeArguments\":[\"29c10735d33b5159f0c71ee1dbd17b36a3e69e41f00fab0d42e1bd9f428d8a54\"]},{\"type\":\"enum sway_libs::ownership::errors::InitializationError\",\"concreteTypeId\":\"1dfe7feadc1d9667a4351761230f948744068a090fe91b1bc6763a90ed5d3893\",\"metadataTypeId\":5},{\"type\":\"enum sway_libs::upgradability::errors::SetProxyOwnerError\",\"concreteTypeId\":\"3c6e90ae504df6aad8b34a93ba77dc62623e00b777eecacfa034a8ac6e890c74\",\"metadataTypeId\":6},{\"type\":\"str\",\"concreteTypeId\":\"8c25cb3686462e9a86d2883c5688a22fe738b0bbc85f458d2d2b5f3f667c6d5a\"},{\"type\":\"struct std::contract_id::ContractId\",\"concreteTypeId\":\"29c10735d33b5159f0c71ee1dbd17b36a3e69e41f00fab0d42e1bd9f428d8a54\",\"metadataTypeId\":9},{\"type\":\"struct sway_libs::upgradability::events::ProxyOwnerSet\",\"concreteTypeId\":\"96dd838b44f99d8ccae2a7948137ab6256c48ca4abc6168abc880de07fba7247\",\"metadataTypeId\":10},{\"type\":\"struct sway_libs::upgradability::events::ProxyTargetSet\",\"concreteTypeId\":\"1ddc0adda1270a016c08ffd614f29f599b4725407c8954c8b960bdf651a9a6c8\",\"metadataTypeId\":11}],\"metadataTypes\":[{\"type\":\"b256\",\"metadataTypeId\":0},{\"type\":\"enum standards::src5::AccessError\",\"metadataTypeId\":1,\"components\":[{\"name\":\"NotOwner\",\"typeId\":\"2e38e77b22c314a449e91fafed92a43826ac6aa403ae6a8acb6cf58239fbaf5d\"}]},{\"type\":\"enum standards::src5::State\",\"metadataTypeId\":2,\"components\":[{\"name\":\"Uninitialized\",\"typeId\":\"2e38e77b22c314a449e91fafed92a43826ac6aa403ae6a8acb6cf58239fbaf5d\"},{\"name\":\"Initialized\",\"typeId\":3},{\"name\":\"Revoked\",\"typeId\":\"2e38e77b22c314a449e91fafed92a43826ac6aa403ae6a8acb6cf58239fbaf5d\"}]},{\"type\":\"enum std::identity::Identity\",\"metadataTypeId\":3,\"components\":[{\"name\":\"Address\",\"typeId\":8},{\"name\":\"ContractId\",\"typeId\":9}]},{\"type\":\"enum std::option::Option\",\"metadataTypeId\":4,\"components\":[{\"name\":\"None\",\"typeId\":\"2e38e77b22c314a449e91fafed92a43826ac6aa403ae6a8acb6cf58239fbaf5d\"},{\"name\":\"Some\",\"typeId\":7}],\"typeParameters\":[7]},{\"type\":\"enum sway_libs::ownership::errors::InitializationError\",\"metadataTypeId\":5,\"components\":[{\"name\":\"CannotReinitialized\",\"typeId\":\"2e38e77b22c314a449e91fafed92a43826ac6aa403ae6a8acb6cf58239fbaf5d\"}]},{\"type\":\"enum sway_libs::upgradability::errors::SetProxyOwnerError\",\"metadataTypeId\":6,\"components\":[{\"name\":\"CannotUninitialize\",\"typeId\":\"2e38e77b22c314a449e91fafed92a43826ac6aa403ae6a8acb6cf58239fbaf5d\"}]},{\"type\":\"generic T\",\"metadataTypeId\":7},{\"type\":\"struct std::address::Address\",\"metadataTypeId\":8,\"components\":[{\"name\":\"bits\",\"typeId\":0}]},{\"type\":\"struct std::contract_id::ContractId\",\"metadataTypeId\":9,\"components\":[{\"name\":\"bits\",\"typeId\":0}]},{\"type\":\"struct sway_libs::upgradability::events::ProxyOwnerSet\",\"metadataTypeId\":10,\"components\":[{\"name\":\"new_proxy_owner\",\"typeId\":2}]},{\"type\":\"struct sway_libs::upgradability::events::ProxyTargetSet\",\"metadataTypeId\":11,\"components\":[{\"name\":\"new_target\",\"typeId\":9}]}],\"functions\":[{\"inputs\":[],\"name\":\"proxy_target\",\"output\":\"0d79387ad3bacdc3b7aad9da3a96f4ce60d9a1b6002df254069ad95a3931d5c8\",\"attributes\":[{\"name\":\"doc-comment\",\"arguments\":[\" Returns the target contract of the proxy contract.\"]},{\"name\":\"doc-comment\",\"arguments\":[\"\"]},{\"name\":\"doc-comment\",\"arguments\":[\" # Returns\"]},{\"name\":\"doc-comment\",\"arguments\":[\"\"]},{\"name\":\"doc-comment\",\"arguments\":[\" * [Option<ContractId>] - The new proxy contract to which all fallback calls will be passed or `None`.\"]},{\"name\":\"doc-comment\",\"arguments\":[\"\"]},{\"name\":\"doc-comment\",\"arguments\":[\" # Number of Storage Accesses\"]},{\"name\":\"doc-comment\",\"arguments\":[\"\"]},{\"name\":\"doc-comment\",\"arguments\":[\" * Reads: `1`\"]},{\"name\":\"storage\",\"arguments\":[\"read\"]}]},{\"inputs\":[{\"name\":\"new_target\",\"concreteTypeId\":\"29c10735d33b5159f0c71ee1dbd17b36a3e69e41f00fab0d42e1bd9f428d8a54\"}],\"name\":\"set_proxy_target\",\"output\":\"2e38e77b22c314a449e91fafed92a43826ac6aa403ae6a8acb6cf58239fbaf5d\",\"attributes\":[{\"name\":\"doc-comment\",\"arguments\":[\" Change the target contract of the proxy contract.\"]},{\"name\":\"doc-comment\",\"arguments\":[\"\"]},{\"name\":\"doc-comment\",\"arguments\":[\" # Additional Information\"]},{\"name\":\"doc-comment\",\"arguments\":[\"\"]},{\"name\":\"doc-comment\",\"arguments\":[\" This method can only be called by the `proxy_owner`.\"]},{\"name\":\"doc-comment\",\"arguments\":[\"\"]},{\"name\":\"doc-comment\",\"arguments\":[\" # Arguments\"]},{\"name\":\"doc-comment\",\"arguments\":[\"\"]},{\"name\":\"doc-comment\",\"arguments\":[\" * `new_target`: [ContractId] - The new proxy contract to which all fallback calls will be passed.\"]},{\"name\":\"doc-comment\",\"arguments\":[\"\"]},{\"name\":\"doc-comment\",\"arguments\":[\" # Reverts\"]},{\"name\":\"doc-comment\",\"arguments\":[\"\"]},{\"name\":\"doc-comment\",\"arguments\":[\" * When not called by `proxy_owner`.\"]},{\"name\":\"doc-comment\",\"arguments\":[\"\"]},{\"name\":\"doc-comment\",\"arguments\":[\" # Number of Storage Accesses\"]},{\"name\":\"doc-comment\",\"arguments\":[\"\"]},{\"name\":\"doc-comment\",\"arguments\":[\" * Reads: `1`\"]},{\"name\":\"doc-comment\",\"arguments\":[\" * Write: `1`\"]},{\"name\":\"storage\",\"arguments\":[\"read\",\"write\"]}]},{\"inputs\":[],\"name\":\"proxy_owner\",\"output\":\"192bc7098e2fe60635a9918afb563e4e5419d386da2bdbf0d716b4bc8549802c\",\"attributes\":[{\"name\":\"doc-comment\",\"arguments\":[\" Returns the owner of the proxy contract.\"]},{\"name\":\"doc-comment\",\"arguments\":[\"\"]},{\"name\":\"doc-comment\",\"arguments\":[\" # Returns\"]},{\"name\":\"doc-comment\",\"arguments\":[\"\"]},{\"name\":\"doc-comment\",\"arguments\":[\" * [State] - Represents the state of ownership for this contract.\"]},{\"name\":\"doc-comment\",\"arguments\":[\"\"]},{\"name\":\"doc-comment\",\"arguments\":[\" # Number of Storage Accesses\"]},{\"name\":\"doc-comment\",\"arguments\":[\"\"]},{\"name\":\"doc-comment\",\"arguments\":[\" * Reads: `1`\"]},{\"name\":\"storage\",\"arguments\":[\"read\"]}]},{\"inputs\":[],\"name\":\"initialize_proxy\",\"output\":\"2e38e77b22c314a449e91fafed92a43826ac6aa403ae6a8acb6cf58239fbaf5d\",\"attributes\":[{\"name\":\"doc-comment\",\"arguments\":[\" Initializes the proxy contract.\"]},{\"name\":\"doc-comment\",\"arguments\":[\"\"]},{\"name\":\"doc-comment\",\"arguments\":[\" # Additional Information\"]},{\"name\":\"doc-comment\",\"arguments\":[\"\"]},{\"name\":\"doc-comment\",\"arguments\":[\" This method sets the storage values using the values of the configurable constants `INITIAL_TARGET` and `INITIAL_OWNER`.\"]},{\"name\":\"doc-comment\",\"arguments\":[\" This then allows methods that write to storage to be called.\"]},{\"name\":\"doc-comment\",\"arguments\":[\" This method can only be called once.\"]},{\"name\":\"doc-comment\",\"arguments\":[\"\"]},{\"name\":\"doc-comment\",\"arguments\":[\" # Reverts\"]},{\"name\":\"doc-comment\",\"arguments\":[\"\"]},{\"name\":\"doc-comment\",\"arguments\":[\" * When `storage::SRC14.proxy_owner` is not [State::Uninitialized].\"]},{\"name\":\"doc-comment\",\"arguments\":[\"\"]},{\"name\":\"doc-comment\",\"arguments\":[\" # Number of Storage Accesses\"]},{\"name\":\"doc-comment\",\"arguments\":[\"\"]},{\"name\":\"doc-comment\",\"arguments\":[\" * Writes: `2`\"]},{\"name\":\"storage\",\"arguments\":[\"write\"]}]},{\"inputs\":[{\"name\":\"new_proxy_owner\",\"concreteTypeId\":\"192bc7098e2fe60635a9918afb563e4e5419d386da2bdbf0d716b4bc8549802c\"}],\"name\":\"set_proxy_owner\",\"output\":\"2e38e77b22c314a449e91fafed92a43826ac6aa403ae6a8acb6cf58239fbaf5d\",\"attributes\":[{\"name\":\"doc-comment\",\"arguments\":[\" Changes proxy ownership to the passed State.\"]},{\"name\":\"doc-comment\",\"arguments\":[\"\"]},{\"name\":\"doc-comment\",\"arguments\":[\" # Additional Information\"]},{\"name\":\"doc-comment\",\"arguments\":[\"\"]},{\"name\":\"doc-comment\",\"arguments\":[\" This method can be used to transfer ownership between Identities or to revoke ownership.\"]},{\"name\":\"doc-comment\",\"arguments\":[\"\"]},{\"name\":\"doc-comment\",\"arguments\":[\" # Arguments\"]},{\"name\":\"doc-comment\",\"arguments\":[\"\"]},{\"name\":\"doc-comment\",\"arguments\":[\" * `new_proxy_owner`: [State] - The new state of the proxy ownership.\"]},{\"name\":\"doc-comment\",\"arguments\":[\"\"]},{\"name\":\"doc-comment\",\"arguments\":[\" # Reverts\"]},{\"name\":\"doc-comment\",\"arguments\":[\"\"]},{\"name\":\"doc-comment\",\"arguments\":[\" * When the sender is not the current proxy owner.\"]},{\"name\":\"doc-comment\",\"arguments\":[\" * When the new state of the proxy ownership is [State::Uninitialized].\"]},{\"name\":\"doc-comment\",\"arguments\":[\"\"]},{\"name\":\"doc-comment\",\"arguments\":[\" # Number of Storage Accesses\"]},{\"name\":\"doc-comment\",\"arguments\":[\"\"]},{\"name\":\"doc-comment\",\"arguments\":[\" * Reads: `1`\"]},{\"name\":\"doc-comment\",\"arguments\":[\" * Writes: `1`\"]},{\"name\":\"storage\",\"arguments\":[\"write\"]}]}],\"loggedTypes\":[{\"logId\":\"4571204900286667806\",\"concreteTypeId\":\"3f702ea3351c9c1ece2b84048006c8034a24cbc2bad2e740d0412b4172951d3d\"},{\"logId\":\"2151606668983994881\",\"concreteTypeId\":\"1ddc0adda1270a016c08ffd614f29f599b4725407c8954c8b960bdf651a9a6c8\"},{\"logId\":\"2161305517876418151\",\"concreteTypeId\":\"1dfe7feadc1d9667a4351761230f948744068a090fe91b1bc6763a90ed5d3893\"},{\"logId\":\"4354576968059844266\",\"concreteTypeId\":\"3c6e90ae504df6aad8b34a93ba77dc62623e00b777eecacfa034a8ac6e890c74\"},{\"logId\":\"10870989709723147660\",\"concreteTypeId\":\"96dd838b44f99d8ccae2a7948137ab6256c48ca4abc6168abc880de07fba7247\"},{\"logId\":\"10098701174489624218\",\"concreteTypeId\":\"8c25cb3686462e9a86d2883c5688a22fe738b0bbc85f458d2d2b5f3f667c6d5a\"}],\"messagesTypes\":[],\"configurables\":[{\"name\":\"INITIAL_TARGET\",\"concreteTypeId\":\"0d79387ad3bacdc3b7aad9da3a96f4ce60d9a1b6002df254069ad95a3931d5c8\",\"offset\":13368},{\"name\":\"INITIAL_OWNER\",\"concreteTypeId\":\"192bc7098e2fe60635a9918afb563e4e5419d386da2bdbf0d716b4bc8549802c\",\"offset\":13320}]}",));
let proxy_dir_output = create_proxy_contract(pkg_name)?;
let address = account.address();
// Add the combined storage slots from the original contract and the proxy contract.
let proxy_storage_path = proxy_dir_output.join("proxy-storage_slots.json");
let storage_configuration = StorageConfiguration::default()
.add_slot_overrides(pkg_storage_slots.iter().cloned())
.add_slot_overrides_from_file(proxy_storage_path)?;
let configurables = ProxyContractConfigurables::default()
.with_INITIAL_TARGET(Some(*impl_contract))?
.with_INITIAL_OWNER(State::Initialized(address.into()))?;
let configuration = LoadConfiguration::default()
.with_storage_configuration(storage_configuration)
.with_configurables(configurables);
let tx_policies = tx_policies_from_cmd(command);
let proxy_contract_id: ContractId = fuels::programs::contract::Contract::load_from(
proxy_dir_output.join("proxy.bin"),
configuration,
)?
.deploy(account, tx_policies)
.await?
.contract_id;
let chain_info = provider.chain_info().await?;
let target = Target::from_str(&chain_info.name).unwrap_or_default();
let contract_url = match target.explorer_url() {
Some(explorer_url) => format!("{explorer_url}/contract/0x"),
None => "".to_string(),
};
println_action_green(
"Finished",
&format!("deploying proxy contract for {pkg_name} {contract_url}{proxy_contract_id}"),
);
let instance = ProxyContract::new(proxy_contract_id, account.clone());
instance.methods().initialize_proxy().call().await?;
println_action_green("Initialized", &format!("proxy contract for {pkg_name}"));
Ok(proxy_contract_id)
}
/// Builds and deploys contracts, scripts, and predicates from the given path or workspace.
///
/// Contracts are deployed directly, while scripts and predicates are deployed as blobs with generated loaders.
///
/// Returns a vector of `DeployedPackage` representing all successful deployments.
pub async fn deploy(command: cmd::Deploy) -> Result<Vec<DeployedPackage>> {
if command.unsigned {
println_warning("--unsigned flag is deprecated, please prefer using --default-signer. Assuming `--default-signer` is passed. This means your transaction will be signed by an account that is funded by fuel-core by default for testing purposes.");
}
let curr_dir = if let Some(ref path) = command.pkg.path {
PathBuf::from(path)
} else {
std::env::current_dir()?
};
let build_opts = build_opts_from_cmd(&command, MemberFilter::default());
let built_pkgs = built_pkgs(&curr_dir, &build_opts)?;
let mut deployed_packages = Vec::new();
let contracts_to_deploy = built_pkgs
.iter()
.filter(|pkg| {
pkg.descriptor
.manifest_file
.check_program_type(&[TreeType::Contract])
.is_ok()
})
.cloned()
.collect::<Vec<_>>();
let scripts_to_deploy = built_pkgs
.iter()
.filter(|pkg| {
pkg.descriptor
.manifest_file
.check_program_type(&[TreeType::Script])
.is_ok()
})
.cloned()
.collect::<Vec<_>>();
let predicates_to_deploy = built_pkgs
.iter()
.filter(|pkg| {
pkg.descriptor
.manifest_file
.check_program_type(&[TreeType::Predicate])
.is_ok()
})
.cloned()
.collect::<Vec<_>>();
if contracts_to_deploy.is_empty()
&& scripts_to_deploy.is_empty()
&& predicates_to_deploy.is_empty()
{
println_warning("No deployable package was found in the current directory.");
} else {
deployed_packages.extend(
deploy_contracts(&command, &contracts_to_deploy)
.await?
.into_iter()
.map(DeployedPackage::Contract),
);
deployed_packages.extend(
deploy_executables(&command, &scripts_to_deploy)
.await?
.into_iter()
.map(DeployedPackage::Script),
);
deployed_packages.extend(
deploy_executables(&command, &predicates_to_deploy)
.await?
.into_iter()
.map(DeployedPackage::Predicate),
);
}
Ok(deployed_packages)
}
/// Builds and deploys executable (script and predicate) package(s) as blobs,
/// and generates a loader for each of them.
pub async fn deploy_executables(
command: &cmd::Deploy,
executables_to_deploy: &[Arc<BuiltPackage>],
) -> Result<Vec<DeployedExecutable>> {
let mut deployed_executable = vec![];
if executables_to_deploy.is_empty() {
return Ok(deployed_executable);
}
let node_url = validate_and_get_node_url(command, executables_to_deploy).await?;
// We will have 1 transaction per executable as each deployment uses a single blob.
let tx_count = executables_to_deploy.len();
let account = setup_deployment_account(command, &node_url, tx_count).await?;
for pkg in executables_to_deploy {
let script = Executable::from_bytes(pkg.bytecode.bytes.clone());
let loader = script.convert_to_loader()?;
println_action_green("Uploading", "blob containing executable bytecode.");
loader.upload_blob(account.clone()).await?;
println_action_green("Generating", "loader bytecode for the uploaded executable.");
let loader_bytecode = loader.code();
let pkg_name = &pkg.descriptor.name;
let out_dir = pkg.descriptor.manifest_file.dir().join("out");
let bin_path = out_dir.join(format!("{pkg_name}-loader.bin"));
std::fs::write(&bin_path, &loader_bytecode)?;
println_action_green(
"Saved",
&format!("loader bytecode at {}", bin_path.display()),
);
let loader_configurables_offset = loader.configurables_offset_in_code();
// Calculate the offset shift to adjust the configurables in the abi.
if let ProgramABI::Fuel(mut fuel_abi) = pkg.program_abi.clone() {
println_action_green("Generating", "loader abi for the uploaded executable.");
let json_abi_path = out_dir.join(format!("{pkg_name}-loader-abi.json"));
let original_configurables_section =
extract_configurables_offset(&pkg.bytecode.bytes).unwrap();
let offset_shift = original_configurables_section - loader_configurables_offset;
// if there are configurables in the abi we need to shift them by `offset_shift`.
let configurables = fuel_abi.configurables.clone().map(|configs| {
configs
.into_iter()
.map(|config| Configurable {
offset: config.offset - offset_shift as u64,
..config.clone()
})
.collect()
});
fuel_abi.configurables = configurables;
let json_string = serde_json::to_string_pretty(&fuel_abi)?;
std::fs::write(json_abi_path, json_string)?;
}
// If the executable is a predicate, we also want to display and save the predicate root.
if pkg
.descriptor
.manifest_file
.program_type()
.with_context(|| {
"error while trying to retrieve program type for executable deployment."
})?
== TreeType::Predicate
{
// Calculate the root.
let root = format!("0x{}", fuel_tx::Input::predicate_owner(&loader_bytecode));
// Root files are named in `pkg-name-root` format, since this is a
// loader we are also adding an identifier to differentiate it from
// the root of the "original" predicate.
let root_file_name = format!("{}-loader-root", &pkg_name);
let root_path = out_dir.join(root_file_name);
std::fs::write(&root_path, &root)?;
println_action_green(
"Saved",
&format!("loader root ({}) at {}", root, root_path.display()),
);
}
let deployed = DeployedExecutable {
bytecode: loader_bytecode,
};
deployed_executable.push(deployed);
println_action_green("Finished", &format!("deploying executable {pkg_name}"));
}
Ok(deployed_executable)
}
// This helper is borrowed from `fuels::programs::assembly`
fn extract_configurables_offset(binary: &[u8]) -> Result<usize> {
if binary.len() < 24 {
anyhow::bail!(
"given binary is too short to contain a configurable offset, len: {}",
binary.len()
);
}
let configurable_offset: [u8; 8] = binary[16..24].try_into().expect("checked above");
Ok(u64::from_be_bytes(configurable_offset) as usize)
}
/// Builds and deploys contract(s). If the given path corresponds to a workspace, all deployable members
/// will be built and deployed.
///
/// Upon success, returns the ID of each deployed contract in order of deployment.
///
/// When deploying a single contract, only that contract's ID is returned.
pub async fn deploy_contracts(
command: &cmd::Deploy,
contracts_to_deploy: &[Arc<BuiltPackage>],
) -> Result<Vec<DeployedContract>> {
let mut deployed_contracts = Vec::new();
if contracts_to_deploy.is_empty() {
return Ok(deployed_contracts);
}
let contract_salt_map = if let Some(salt_input) = &command.salt {
// If we're building 1 package, we just parse the salt as a string, ie. 0x00...
// If we're building >1 package, we must parse the salt as a pair of strings, ie. contract_name:0x00...
if contracts_to_deploy.len() > 1 {
let map = validate_and_parse_salts(
salt_input,
contracts_to_deploy
.iter()
.map(|b| &b.descriptor.manifest_file),
)?;
Some(map)
} else {
if salt_input.len() > 1 {
bail!("More than 1 salt was specified when deploying a single contract");
}
// OK to index into salt_input and built_pkgs_with_manifest here,
// since both are known to be len 1.
let salt = salt_input[0]
.parse::<Salt>()
.map_err(|e| anyhow::anyhow!(e))
.unwrap();
let mut contract_salt_map = ContractSaltMap::default();
contract_salt_map.insert(
contracts_to_deploy[0]
.descriptor
.manifest_file
.project_name()
.to_string(),
salt,
);
Some(contract_salt_map)
}
} else {
None
};
let node_url = validate_and_get_node_url(command, contracts_to_deploy).await?;
let provider = Provider::connect(node_url.clone()).await?;
// Confirmation step. Summarize the transaction(s) for the deployment.
let account = confirm_transaction_details(
contracts_to_deploy,
command,
node_url.clone(),
MAX_CONTRACT_SIZE,
)
.await?;
for pkg in contracts_to_deploy {
let salt = match (&contract_salt_map, command.default_salt) {
(Some(map), false) => {
if let Some(salt) = map.get(pkg.descriptor.manifest_file.project_name()) {
*salt
} else {
Default::default()
}
}
(None, true) => Default::default(),
(None, false) => rand::random(),
(Some(_), true) => {
bail!("Both `--salt` and `--default-salt` were specified: must choose one")
}
};
let bytecode_size = pkg.bytecode.bytes.len();
let deployed_contract_id = if bytecode_size > MAX_CONTRACT_SIZE {
// Deploy chunked
let node_url = command
.node
.get_node_url(&pkg.descriptor.manifest_file.network)?;
let provider = Provider::connect(node_url).await?;
deploy_chunked(
command,
pkg,
salt,
&account,
&provider,
&pkg.descriptor.name,
)
.await?
} else {
deploy_pkg(command, pkg, salt, &provider, &account).await?
};
let proxy_id = match &pkg.descriptor.manifest_file.proxy {
Some(forc_pkg::manifest::Proxy {
enabled: true,
address: Some(proxy_addr),
}) => {
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | true |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-client/src/op/mod.rs | forc-plugins/forc-client/src/op/mod.rs | pub mod call;
mod deploy;
mod run;
mod submit;
pub use call::call;
pub use deploy::{deploy, DeployedContract, DeployedExecutable, DeployedPackage};
pub use run::run;
pub use submit::submit;
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-client/src/op/call/list_functions.rs | forc-plugins/forc-client/src/op/call/list_functions.rs | use crate::op::call::{
parser::{get_default_value, param_to_function_arg, param_type_val_to_token, token_to_string},
Abi,
};
use anyhow::{anyhow, Result};
use fuels_core::types::{param_types::ParamType, ContractId};
use std::collections::HashMap;
use std::io::Write;
/// List all functions in the contracts' ABIs along with examples of how to call them.
/// This function supports listing functions from multiple contracts when additional
/// contract ABIs are provided via the --contract-abi parameter.
pub fn list_contract_functions<W: Write>(
main_contract_id: &ContractId,
abi_map: &HashMap<ContractId, Abi>,
writer: &mut W,
) -> Result<()> {
// First, list functions for the main contract
if let Some(main_abi) = abi_map.get(main_contract_id) {
list_functions_for_single_contract(main_contract_id, main_abi, true, writer)?;
} else {
return Err(anyhow!("Main contract ABI not found in abi_map"));
}
// Then, list functions for additional contracts if any
let additional_contracts: Vec<_> = abi_map
.iter()
.filter(|(id, _)| *id != main_contract_id)
.collect();
if !additional_contracts.is_empty() {
writeln!(writer, "\n{}", "=".repeat(80))?;
writeln!(writer, "Additional Contracts:\n")?;
for (contract_id, abi) in additional_contracts {
list_functions_for_single_contract(contract_id, abi, false, writer)?;
}
}
Ok(())
}
/// List functions for a single contract
fn list_functions_for_single_contract<W: Write>(
contract_id: &ContractId,
abi: &Abi,
is_main_contract: bool,
writer: &mut W,
) -> Result<()> {
let header = if is_main_contract {
format!("Callable functions for contract: {contract_id}\n")
} else {
format!("Functions for additional contract: {contract_id}\n")
};
writeln!(writer, "{header}")?;
if abi.unified.functions.is_empty() {
writeln!(writer, "No functions found in the contract ABI.\n")?;
return Ok(());
}
for func in &abi.unified.functions {
let func_args = func
.inputs
.iter()
.map(|input| {
let Ok(param_type) = ParamType::try_from_type_application(input, &abi.type_lookup)
else {
return Err(anyhow!("Failed to convert input type application"));
};
let func_args = format!("{}: {}", input.name, param_to_function_arg(¶m_type));
let func_args_input = {
let token =
param_type_val_to_token(¶m_type, &get_default_value(¶m_type))
.map_err(|err| {
anyhow!(
"Failed to generate example call for {}: {}",
func.name,
err
)
})?;
token_to_string(&token).map_err(|err| {
anyhow!(
"Failed to convert token to string for {}: {}",
func.name,
err
)
})?
};
Ok((func_args, func_args_input, param_type))
})
.collect::<Result<Vec<_>>>()?;
let func_args_types = func_args
.iter()
.map(|(func_args, _, _)| func_args.to_owned())
.collect::<Vec<String>>()
.join(", ");
let func_args_inputs = func_args
.iter()
.map(|(_, func_args_input, param_type)| match param_type {
ParamType::Array(_, _)
| ParamType::Unit
| ParamType::Tuple(_)
| ParamType::Struct { .. }
| ParamType::Enum { .. }
| ParamType::RawSlice
| ParamType::Vector(_) => format!("\"{func_args_input}\""),
_ => func_args_input.to_owned(),
})
.collect::<Vec<String>>()
.join(" ");
let return_type = ParamType::try_from_type_application(&func.output, &abi.type_lookup)
.map(|param_type| param_to_function_arg(¶m_type))
.map_err(|err| {
anyhow!(
"Failed to convert output type application for {}: {}",
func.name,
err
)
})?;
let painted_name = forc_util::ansiterm::Colour::Blue.paint(func.name.clone());
writeln!(writer, "{painted_name}({func_args_types}) -> {return_type}")?;
writeln!(
writer,
" forc call \\\n --abi {} \\\n {} \\\n {} {}\n",
abi.source, contract_id, func.name, func_args_inputs,
)?;
}
Ok(())
}
#[cfg(test)]
mod tests {
use super::*;
use crate::op::call::tests::get_contract_instance;
use std::{io::Cursor, path::Path, str::FromStr};
#[tokio::test]
async fn test_list_contract_functions_preserves_abi_source_format() {
let (_, id, _, _) = get_contract_instance().await;
// Load a test ABI content
let abi_path_str = "../../forc-plugins/forc-client/test/data/contract_with_types/contract_with_types-abi.json";
let abi_path = Path::new(abi_path_str);
let abi_str = std::fs::read_to_string(abi_path).unwrap();
// Test different source formats
let test_cases = vec![
(
"file_path",
crate::cmd::call::AbiSource::File(std::path::PathBuf::from("./test-abi.json")),
"--abi ./test-abi.json",
),
(
"url",
crate::cmd::call::AbiSource::Url(
url::Url::parse("https://example.com/abi.json").unwrap(),
),
"--abi https://example.com/abi.json",
),
(
"inline_json",
crate::cmd::call::AbiSource::String(r#"{"test":"value"}"#.to_string()),
r#"--abi {"test":"value"}"#,
),
];
for (test_name, source, expected_abi_arg) in test_cases {
let abi = Abi::from_str(&abi_str).unwrap().with_source(source);
let mut abi_map = HashMap::new();
abi_map.insert(id, abi);
let mut output = Cursor::new(Vec::<u8>::new());
list_contract_functions(&id, &abi_map, &mut output).unwrap();
let output_bytes = output.into_inner();
let output_string = String::from_utf8(output_bytes).unwrap();
// Verify the ABI source is preserved exactly as provided
assert!(
output_string.contains(expected_abi_arg),
"Test '{test_name}' failed: expected '{expected_abi_arg}' in output, but got:\n{output_string}"
);
}
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-client/src/op/call/call_function.rs | forc-plugins/forc-client/src/op/call/call_function.rs | use crate::{
cmd::{self, call::FuncType},
op::call::{
missing_contracts::determine_missing_contracts,
parser::{param_type_val_to_token, token_to_string},
CallResponse,
},
};
use anyhow::{anyhow, bail, Result};
use fuel_abi_types::abi::unified_program::UnifiedProgramABI;
use fuel_core_client::client::types::TransactionStatus;
use fuel_core_types::services::executor::{TransactionExecutionResult, TransactionExecutionStatus};
use fuels::{
accounts::ViewOnlyAccount,
client::FuelClient,
programs::calls::{
receipt_parser::ReceiptParser,
traits::{ContractDependencyConfigurator, TransactionTuner},
ContractCall,
},
types::transaction::Transaction,
};
use fuels_core::{
codec::{
encode_fn_selector, log_formatters_lookup, ABIDecoder, ABIEncoder, DecoderConfig,
EncoderConfig, ErrorDetails, LogDecoder,
},
types::{
param_types::ParamType,
transaction_builders::{BuildableTransaction, ScriptBuildStrategy, VariableOutputPolicy},
ContractId,
},
};
use std::{collections::HashMap, str::FromStr, sync::Arc};
/// Calls a contract function with the given parameters
pub async fn call_function(
contract_id: ContractId,
abi: crate::cmd::call::AbiSource,
function: FuncType,
function_args: Vec<String>,
cmd: cmd::Call,
) -> Result<CallResponse> {
let cmd::Call {
node,
mode,
caller,
call_parameters,
gas,
mut output,
external_contracts,
contract_abis,
variable_output,
..
} = cmd;
// Use the reusable function to create ABI map
let abi_map = super::create_abi_map(contract_id, &abi, contract_abis).await?;
// Get the main ABI for compatibility with existing code
let abi = abi_map
.get(&contract_id)
.ok_or_else(|| anyhow!("Main contract ABI not found in abi_map"))?;
let cmd::call::FuncType::Selector(selector) = function;
let (encoded_data, output_param) =
prepare_contract_call_data(&abi.unified, &selector, &function_args)?;
// Setup connection to node
let (wallet, tx_policies, base_asset_id) = super::setup_connection(&node, caller, &gas).await?;
let call_parameters = cmd::call::CallParametersOpts {
asset_id: call_parameters.asset_id.or(Some(base_asset_id)),
..call_parameters
};
// Create the contract call
let call = ContractCall {
contract_id,
encoded_selector: encode_fn_selector(&selector),
encoded_args: Ok(encoded_data),
call_parameters: call_parameters.clone().into(),
external_contracts: vec![], // set below
output_param: output_param.clone(),
is_payable: call_parameters.amount > 0,
custom_assets: Default::default(),
inputs: Vec::new(),
outputs: Vec::new(),
};
// Setup variable output policy and log decoder
let variable_output_policy = variable_output
.map(VariableOutputPolicy::Exactly)
.unwrap_or(VariableOutputPolicy::EstimateMinimum);
let error_codes = abi
.unified
.error_codes
.as_ref()
.map_or(HashMap::new(), |error_codes| {
error_codes
.iter()
.map(|(revert_code, error_details)| {
(
*revert_code,
ErrorDetails::new(
error_details.pos.pkg.clone(),
error_details.pos.file.clone(),
error_details.pos.line,
error_details.pos.column,
error_details.log_id.clone(),
error_details.msg.clone(),
),
)
})
.collect()
});
let log_decoder = LogDecoder::new(log_formatters_lookup(vec![], contract_id), error_codes);
// Execute the call based on execution mode
let client = FuelClient::new(wallet.provider().url())
.map_err(|e| anyhow!("Failed to create client: {e}"))?;
let consensus_params = wallet.provider().consensus_parameters().await?;
let chain_id = consensus_params.chain_id();
// Get external contracts (either provided or auto-detected)
let external_contracts = match external_contracts {
Some(contracts) if contracts.first().is_some_and(|s| s.is_empty()) => vec![],
Some(contracts) => {
// Parse each contract ID
contracts
.into_iter()
.filter(|s| !s.is_empty())
.map(|s| {
ContractId::from_str(s.strip_prefix("0x").unwrap_or(&s))
.map_err(|e| anyhow!("Invalid contract ID '{}': {}", s, e))
})
.collect::<Result<Vec<_>>>()?
}
None => {
// Automatically retrieve missing contract addresses from the call
forc_tracing::println_warning(
"Automatically retrieving missing contract addresses for the call",
);
let external_contracts = determine_missing_contracts(
&call,
wallet.provider(),
&tx_policies,
&variable_output_policy,
&consensus_params,
&log_decoder,
&wallet,
)
.await?;
if !external_contracts.is_empty() {
forc_tracing::println_warning(
"Automatically provided external contract addresses with call (max 10):",
);
external_contracts.iter().for_each(|addr| {
forc_tracing::println_warning(&format!("- 0x{addr}"));
});
}
external_contracts
}
};
let tb = call
.clone()
.with_external_contracts(external_contracts)
.transaction_builder(
tx_policies,
variable_output_policy,
&consensus_params,
call.inputs.clone(),
&wallet,
)
.map_err(|e| anyhow!("Failed to initialize transaction builder: {e}"))?;
#[cfg_attr(test, allow(unused_variables))]
let (tx, tx_execution, storage_reads) = match mode {
cmd::call::ExecutionMode::DryRun => {
let tx = call
.build_tx(tb, &wallet)
.await
.map_err(|e| anyhow!("Failed to build transaction: {e}"))?;
let (tx_execs, storage_reads) = client
.dry_run_opt_record_storage_reads(&[tx.clone().into()], None, None, None)
.await
.map_err(|e| anyhow!("Failed to dry run transaction: {e}"))?;
let tx_exec = tx_execs
.first()
.ok_or(anyhow!(
"Failed to extract transaction from dry run execution"
))?
.to_owned();
(tx, tx_exec, storage_reads)
}
cmd::call::ExecutionMode::Simulate => {
let tb = tb.with_build_strategy(ScriptBuildStrategy::StateReadOnly);
let tx = call
.build_tx(tb, &wallet)
.await
.map_err(|e| anyhow!("Failed to build transaction: {e}"))?;
let gas_price = gas.map(|g| g.price).unwrap_or(Some(0));
let (tx_execs, storage_reads) = client
.dry_run_opt_record_storage_reads(&[tx.clone().into()], None, gas_price, None)
.await
.map_err(|e| anyhow!("Failed to dry run transaction: {e}"))?;
let tx_exec = tx_execs
.first()
.ok_or(anyhow!(
"Failed to extract transaction from dry run execution"
))?
.to_owned();
(tx, tx_exec, storage_reads)
}
cmd::call::ExecutionMode::Live => {
forc_tracing::println_action_green(
"Sending transaction with wallet",
&format!("0x{}", wallet.address()),
);
let tx = call
.build_tx(tb, &wallet)
.await
.map_err(|e| anyhow!("Failed to build transaction: {e}"))?;
let tx_status = client.submit_and_await_commit(&tx.clone().into()).await?;
#[cfg_attr(test, allow(unused_variables))]
let (block_height, tx_exec) = match tx_status {
TransactionStatus::Success {
block_height,
program_state,
receipts,
total_gas,
total_fee,
..
} => (
block_height,
TransactionExecutionStatus {
id: tx.id(chain_id),
result: TransactionExecutionResult::Success {
result: program_state,
receipts: Arc::new(receipts),
total_gas,
total_fee,
},
},
),
TransactionStatus::Failure {
total_gas,
total_fee,
program_state,
receipts,
block_height,
..
} => (
block_height,
TransactionExecutionStatus {
id: tx.id(chain_id),
result: TransactionExecutionResult::Failed {
result: program_state,
receipts: Arc::new(receipts),
total_gas,
total_fee,
},
},
),
_ => bail!("Transaction status not found"),
};
#[cfg(not(test))]
let storage_reads = client
.storage_read_replay(&block_height)
.await
.map_err(|e| anyhow!("Failed to get storage reads: {e}"))?;
#[cfg(test)]
let storage_reads = vec![];
(tx, tx_exec, storage_reads)
}
};
let tx: fuel_tx::Transaction = tx.into();
let fuel_tx::Transaction::Script(script) = &tx else {
bail!("Transaction is not a script");
};
let receipts = tx_execution.result.receipts();
// Generate execution trace events by stepping through VM interpreter
#[cfg(test)]
let trace_events: Vec<crate::op::call::trace::TraceEvent> = vec![];
#[cfg(not(test))]
let trace_events = {
use crate::op::call::trace::interpret_execution_trace;
interpret_execution_trace(
wallet.provider(),
&mode,
&consensus_params,
script,
receipts,
storage_reads,
&abi_map,
)
.await
.map_err(|e| anyhow!("Failed to generate execution trace: {e}"))?
};
// display detailed call info if verbosity is set
if cmd.verbosity > 0 {
// Convert labels from Vec to HashMap
let labels: HashMap<ContractId, String> = cmd
.label
.as_ref()
.map(|labels| labels.iter().cloned().collect())
.unwrap_or_default();
super::display_detailed_call_info(
&tx_execution,
script,
&abi_map,
cmd.verbosity,
&mut output,
&trace_events,
&labels,
)?;
}
// If the call reverted, exit early; return an error with the revert details
if let Some((contract_id, revert_info)) =
crate::op::call::trace::first_revert_info(&trace_events)
{
return Err(anyhow!(
"Contract 0x{contract_id} reverted with code 0x{:x}",
revert_info.revert_code
));
}
// Parse the result based on output format
let mut receipt_parser = ReceiptParser::new(receipts, DecoderConfig::default());
let result = match output {
cmd::call::OutputFormat::Default | cmd::call::OutputFormat::Json => {
let data = receipt_parser
.extract_contract_call_data(contract_id)
.ok_or(anyhow!("Failed to extract contract call data"))?;
ABIDecoder::default()
.decode_as_debug_str(&output_param, data.as_slice())
.map_err(|e| anyhow!("Failed to decode as debug string: {e}"))?
}
cmd::call::OutputFormat::Raw => {
let token = receipt_parser
.parse_call(contract_id, &output_param)
.map_err(|e| anyhow!("Failed to parse call data: {e}"))?;
token_to_string(&token)
.map_err(|e| anyhow!("Failed to convert token to string: {e}"))?
}
};
// display tx info
super::display_tx_info(
tx_execution.id.to_string(),
Some(result.clone()),
&mode,
&node,
);
// Start interactive debugger if requested
if cmd.debug {
start_debug_session(&client, &tx, abi).await?;
}
Ok(CallResponse {
tx_hash: tx_execution.id.to_string(),
result: Some(result),
total_gas: *tx_execution.result.total_gas(),
receipts: tx_execution.result.receipts().to_vec(),
script: Some(script.to_owned()),
trace_events,
})
}
fn prepare_contract_call_data(
unified_program_abi: &UnifiedProgramABI,
selector: &str,
function_args: &[String],
) -> Result<(Vec<u8>, ParamType)> {
let type_lookup = unified_program_abi
.types
.iter()
.map(|decl| (decl.type_id, decl.clone()))
.collect::<HashMap<_, _>>();
// Find the function in the ABI
let abi_func = unified_program_abi
.functions
.iter()
.find(|f| f.name == selector)
.cloned()
.ok_or_else(|| anyhow!("Function '{selector}' not found in ABI"))?;
// Validate number of arguments
if abi_func.inputs.len() != function_args.len() {
bail!(
"Argument count mismatch for '{selector}': expected {}, got {}",
abi_func.inputs.len(),
function_args.len()
);
}
// Parse function arguments to tokens
let tokens = abi_func
.inputs
.iter()
.zip(function_args)
.map(|(type_application, arg)| {
let param_type =
ParamType::try_from_type_application(type_application, &type_lookup)
.map_err(|e| anyhow!("Failed to convert input type application: {e}"))?;
param_type_val_to_token(¶m_type, arg)
})
.collect::<Result<Vec<_>>>()?;
// Get output parameter type
let output_param = ParamType::try_from_type_application(&abi_func.output, &type_lookup)
.map_err(|e| anyhow!("Failed to convert output type: {e}"))?;
// Encode function arguments
let abi_encoder = ABIEncoder::new(EncoderConfig::default());
let encoded_data = abi_encoder
.encode(&tokens)
.map_err(|e| anyhow!("Failed to encode function arguments: {e}"))?;
Ok((encoded_data, output_param))
}
/// Starts an interactive debugging session with the given transaction and ABI
async fn start_debug_session(
fuel_client: &FuelClient,
tx: &fuel_tx::Transaction,
abi: &super::Abi,
) -> Result<()> {
// Create debugger instance from the existing fuel client
let mut debugger = forc_debug::debugger::Debugger::from_client(fuel_client.clone())
.await
.map_err(|e| anyhow!("Failed to create debugger: {e}"))?;
// Create temporary files for transaction and ABI (auto-cleaned when dropped)
let mut tx_file = tempfile::Builder::new()
.suffix(".json")
.tempfile()
.map_err(|e| anyhow!("Failed to create temp transaction file: {e}"))?;
serde_json::to_writer_pretty(&mut tx_file, tx)
.map_err(|e| anyhow!("Failed to write transaction to temp file: {e}"))?;
let mut abi_file = tempfile::Builder::new()
.suffix(".json")
.tempfile()
.map_err(|e| anyhow!("Failed to create temp ABI file: {e}"))?;
serde_json::to_writer_pretty(&mut abi_file, &abi.program)
.map_err(|e| anyhow!("Failed to write ABI to temp file: {e}"))?;
// Prepare the start_tx command string for the CLI
let tx_cmd = format!(
"start_tx {} {}",
tx_file.path().to_string_lossy(),
abi_file.path().to_string_lossy()
);
// Start the interactive CLI session with the prepared command
let mut cli = forc_debug::cli::Cli::new()
.map_err(|e| anyhow!("Failed to create debug CLI interface: {e}"))?;
cli.run(&mut debugger, Some(tx_cmd))
.await
.map_err(|e| anyhow!("Interactive debugging session failed: {e}"))?;
Ok(())
}
#[cfg(test)]
pub mod tests {
use super::*;
use crate::{
cmd,
op::call::{call, get_wallet, PrivateKeySigner},
};
use fuel_tx::field::Outputs;
use fuels::{crypto::SecretKey, prelude::*};
use std::path::PathBuf;
fn get_contract_call_cmd(
id: ContractId,
node_url: &str,
secret_key: SecretKey,
selector: &str,
args: Vec<&str>,
) -> cmd::Call {
cmd::Call {
address: (*id).into(),
abi: Some(cmd::call::AbiSource::File(PathBuf::from(
"../../forc-plugins/forc-client/test/data/contract_with_types/contract_with_types-abi.json",
))),
function: Some(selector.to_string()),
function_args: args.into_iter().map(String::from).collect(),
node: crate::NodeTarget { node_url: Some(node_url.to_string()), ..Default::default() },
caller: cmd::call::Caller { signing_key: Some(secret_key), wallet: false },
call_parameters: Default::default(),
mode: cmd::call::ExecutionMode::DryRun,
gas: None,
external_contracts: None,
contract_abis: None,
label: None,
output: cmd::call::OutputFormat::Raw,
list_functions: false,
variable_output: None,
verbosity: 0,
debug: false,
}
}
abigen!(Contract(
name = "TestContract",
abi = "forc-plugins/forc-client/test/data/contract_with_types/contract_with_types-abi.json"
));
pub async fn get_contract_instance() -> (TestContract<Wallet>, ContractId, Provider, SecretKey)
{
let secret_key = SecretKey::random(&mut rand::thread_rng());
let signer = PrivateKeySigner::new(secret_key);
let coins = setup_single_asset_coins(signer.address(), AssetId::zeroed(), 1, 1_000_000);
let provider = setup_test_provider(coins, vec![], None, None)
.await
.unwrap();
let wallet = get_wallet(Some(secret_key), false, provider.clone())
.await
.unwrap();
let id = Contract::load_from(
"../../forc-plugins/forc-client/test/data/contract_with_types/contract_with_types.bin",
LoadConfiguration::default(),
)
.unwrap()
.deploy(&wallet, TxPolicies::default())
.await
.unwrap()
.contract_id;
let instance = TestContract::new(id, wallet.clone());
(instance, id, provider, secret_key)
}
#[tokio::test]
async fn contract_call_with_abi() {
let (_, id, provider, secret_key) = get_contract_instance().await;
let node_url = provider.url();
// test_empty_no_return
let cmd = get_contract_call_cmd(id, node_url, secret_key, "test_empty_no_return", vec![]);
let operation = cmd.validate_and_get_operation().unwrap();
assert_eq!(call(operation, cmd).await.unwrap().result.unwrap(), "()");
// test_empty
let cmd = get_contract_call_cmd(id, node_url, secret_key, "test_empty", vec![]);
let operation = cmd.validate_and_get_operation().unwrap();
assert_eq!(call(operation, cmd).await.unwrap().result.unwrap(), "()");
// test_unit
let cmd = get_contract_call_cmd(id, node_url, secret_key, "test_unit", vec!["()"]);
let operation = cmd.validate_and_get_operation().unwrap();
assert_eq!(call(operation, cmd).await.unwrap().result.unwrap(), "()");
// test_u8
let cmd = get_contract_call_cmd(id, node_url, secret_key, "test_u8", vec!["255"]);
let operation = cmd.validate_and_get_operation().unwrap();
assert_eq!(call(operation, cmd).await.unwrap().result.unwrap(), "255");
// test_u16
let cmd = get_contract_call_cmd(id, node_url, secret_key, "test_u16", vec!["65535"]);
let operation = cmd.validate_and_get_operation().unwrap();
assert_eq!(call(operation, cmd).await.unwrap().result.unwrap(), "65535");
// test_u32
let cmd = get_contract_call_cmd(id, node_url, secret_key, "test_u32", vec!["4294967295"]);
let operation = cmd.validate_and_get_operation().unwrap();
assert_eq!(
call(operation, cmd).await.unwrap().result.unwrap(),
"4294967295"
);
// test_u64
let cmd = get_contract_call_cmd(
id,
node_url,
secret_key,
"test_u64",
vec!["18446744073709551615"],
);
let operation = cmd.validate_and_get_operation().unwrap();
assert_eq!(
call(operation, cmd).await.unwrap().result.unwrap(),
"18446744073709551615"
);
// test_u128
let cmd = get_contract_call_cmd(
id,
node_url,
secret_key,
"test_u128",
vec!["340282366920938463463374607431768211455"],
);
let operation = cmd.validate_and_get_operation().unwrap();
assert_eq!(
call(operation, cmd).await.unwrap().result.unwrap(),
"340282366920938463463374607431768211455"
);
// test_u256
let cmd = get_contract_call_cmd(
id,
node_url,
secret_key,
"test_u256",
vec!["115792089237316195423570985008687907853269984665640564039457584007913129639935"],
);
let operation = cmd.validate_and_get_operation().unwrap();
assert_eq!(
call(operation, cmd).await.unwrap().result.unwrap(),
"115792089237316195423570985008687907853269984665640564039457584007913129639935"
);
// test b256
let cmd = get_contract_call_cmd(
id,
node_url,
secret_key,
"test_b256",
vec!["0000000000000000000000000000000000000000000000000000000000000042"],
);
let operation = cmd.validate_and_get_operation().unwrap();
assert_eq!(
call(operation, cmd).await.unwrap().result.unwrap(),
"0x0000000000000000000000000000000000000000000000000000000000000042"
);
// test_b256 - fails if 0x prefix provided since it extracts input as an external contract; we don't want to do this so explicitly provide the external contract as empty
let mut cmd = get_contract_call_cmd(
id,
node_url,
secret_key,
"test_b256",
vec!["0x0000000000000000000000000000000000000000000000000000000000000042"],
);
let operation = cmd.validate_and_get_operation().unwrap();
cmd.external_contracts = Some(vec![]);
assert_eq!(
call(operation, cmd).await.unwrap().result.unwrap(),
"0x0000000000000000000000000000000000000000000000000000000000000042"
);
// test_bytes
let cmd = get_contract_call_cmd(id, node_url, secret_key, "test_bytes", vec!["0x42"]);
let operation = cmd.validate_and_get_operation().unwrap();
assert_eq!(call(operation, cmd).await.unwrap().result.unwrap(), "0x42");
// test bytes without 0x prefix
let cmd = get_contract_call_cmd(id, node_url, secret_key, "test_bytes", vec!["42"]);
let operation = cmd.validate_and_get_operation().unwrap();
assert_eq!(call(operation, cmd).await.unwrap().result.unwrap(), "0x42");
// test_str
let cmd = get_contract_call_cmd(id, node_url, secret_key, "test_str", vec!["fuel"]);
let operation = cmd.validate_and_get_operation().unwrap();
assert_eq!(call(operation, cmd).await.unwrap().result.unwrap(), "fuel");
// test str array
let cmd = get_contract_call_cmd(
id,
node_url,
secret_key,
"test_str_array",
vec!["fuel rocks"],
);
let operation = cmd.validate_and_get_operation().unwrap();
assert_eq!(
call(operation, cmd).await.unwrap().result.unwrap(),
"fuel rocks"
);
// test str array - fails if length mismatch
let cmd = get_contract_call_cmd(id, node_url, secret_key, "test_str_array", vec!["fuel"]);
let operation = cmd.validate_and_get_operation().unwrap();
assert_eq!(
call(operation, cmd).await.unwrap_err().to_string(),
"string array length mismatch: expected 10, got 4"
);
// test str slice
let cmd = get_contract_call_cmd(
id,
node_url,
secret_key,
"test_str_slice",
vec!["fuel rocks 42"],
);
let operation = cmd.validate_and_get_operation().unwrap();
assert_eq!(
call(operation, cmd).await.unwrap().result.unwrap(),
"fuel rocks 42"
);
// test tuple
let cmd = get_contract_call_cmd(id, node_url, secret_key, "test_tuple", vec!["(42, true)"]);
let operation = cmd.validate_and_get_operation().unwrap();
assert_eq!(
call(operation, cmd).await.unwrap().result.unwrap(),
"(42, true)"
);
// test array
let cmd = get_contract_call_cmd(
id,
node_url,
secret_key,
"test_array",
vec!["[42, 42, 42, 42, 42, 42, 42, 42, 42, 42]"],
);
let operation = cmd.validate_and_get_operation().unwrap();
assert_eq!(
call(operation, cmd).await.unwrap().result.unwrap(),
"[42, 42, 42, 42, 42, 42, 42, 42, 42, 42]"
);
// test_array - fails if different types
let cmd = get_contract_call_cmd(id, node_url, secret_key, "test_array", vec!["[42, true]"]);
let operation = cmd.validate_and_get_operation().unwrap();
assert_eq!(
call(operation, cmd).await.unwrap_err().to_string(),
"failed to parse u64 value: true"
);
// test_array - succeeds if length not matched!?
let cmd = get_contract_call_cmd(id, node_url, secret_key, "test_array", vec!["[42, 42]"]);
let operation = cmd.validate_and_get_operation().unwrap();
assert!(call(operation, cmd)
.await
.unwrap()
.result
.unwrap()
.starts_with("[42, 42, 0,"));
// test_vector
let cmd = get_contract_call_cmd(id, node_url, secret_key, "test_vector", vec!["[42, 42]"]);
let operation = cmd.validate_and_get_operation().unwrap();
assert_eq!(
call(operation, cmd).await.unwrap().result.unwrap(),
"[42, 42]"
);
// test_vector - fails if different types
let cmd =
get_contract_call_cmd(id, node_url, secret_key, "test_vector", vec!["[42, true]"]);
let operation = cmd.validate_and_get_operation().unwrap();
assert_eq!(
call(operation, cmd).await.unwrap_err().to_string(),
"failed to parse u64 value: true"
);
// test_struct - Identity { name: str[2], id: u64 }
let cmd = get_contract_call_cmd(id, node_url, secret_key, "test_struct", vec!["{fu, 42}"]);
let operation = cmd.validate_and_get_operation().unwrap();
assert_eq!(
call(operation, cmd).await.unwrap().result.unwrap(),
"{fu, 42}"
);
// test_struct - fails if incorrect inner attribute length
let cmd =
get_contract_call_cmd(id, node_url, secret_key, "test_struct", vec!["{fuel, 42}"]);
let operation = cmd.validate_and_get_operation().unwrap();
assert_eq!(
call(operation, cmd).await.unwrap_err().to_string(),
"string array length mismatch: expected 2, got 4"
);
// test_struct - succeeds if missing inner final attribute; default value is used
let cmd = get_contract_call_cmd(id, node_url, secret_key, "test_struct", vec!["{fu}"]);
let operation = cmd.validate_and_get_operation().unwrap();
assert_eq!(
call(operation, cmd).await.unwrap().result.unwrap(),
"{fu, 0}"
);
// test_struct - succeeds to use default values for all attributes if missing
let cmd = get_contract_call_cmd(id, node_url, secret_key, "test_struct", vec!["{}"]);
let operation = cmd.validate_and_get_operation().unwrap();
assert_eq!(
call(operation, cmd).await.unwrap().result.unwrap(),
"{\0\0, 0}"
);
// test_enum
let cmd =
get_contract_call_cmd(id, node_url, secret_key, "test_enum", vec!["(Active:true)"]);
let operation = cmd.validate_and_get_operation().unwrap();
assert_eq!(
call(operation, cmd).await.unwrap().result.unwrap(),
"(Active:true)"
);
// test_enum - succeeds if using index
let cmd = get_contract_call_cmd(id, node_url, secret_key, "test_enum", vec!["(1:56)"]);
let operation = cmd.validate_and_get_operation().unwrap();
assert_eq!(
call(operation, cmd).await.unwrap().result.unwrap(),
"(Pending:56)"
);
// test_enum - fails if variant not found
let cmd = get_contract_call_cmd(id, node_url, secret_key, "test_enum", vec!["(A:true)"]);
let operation = cmd.validate_and_get_operation().unwrap();
assert_eq!(
call(operation, cmd).await.unwrap_err().to_string(),
"failed to find index of variant: A"
);
// test_enum - fails if variant value incorrect
let cmd = get_contract_call_cmd(id, node_url, secret_key, "test_enum", vec!["(Active:3)"]);
let operation = cmd.validate_and_get_operation().unwrap();
assert_eq!(
call(operation, cmd).await.unwrap_err().to_string(),
"failed to parse `Active` variant enum value: 3"
);
// test_enum - fails if variant value is missing
let cmd = get_contract_call_cmd(id, node_url, secret_key, "test_enum", vec!["(Active:)"]);
let operation = cmd.validate_and_get_operation().unwrap();
assert_eq!(
call(operation, cmd).await.unwrap_err().to_string(),
"enum must have exactly two parts `(variant:value)`: (Active:)"
);
// test_option - encoded like an enum
let cmd = get_contract_call_cmd(id, node_url, secret_key, "test_option", vec!["(0:())"]);
let operation = cmd.validate_and_get_operation().unwrap();
assert_eq!(
call(operation, cmd).await.unwrap().result.unwrap(),
"(None:())"
);
// test_option - encoded like an enum; none value ignored
let cmd = get_contract_call_cmd(id, node_url, secret_key, "test_option", vec!["(0:42)"]);
let operation = cmd.validate_and_get_operation().unwrap();
assert_eq!(
call(operation, cmd).await.unwrap().result.unwrap(),
"(None:())"
);
// test_option - encoded like an enum; some value
let cmd = get_contract_call_cmd(id, node_url, secret_key, "test_option", vec!["(1:42)"]);
let operation = cmd.validate_and_get_operation().unwrap();
assert_eq!(
call(operation, cmd).await.unwrap().result.unwrap(),
"(Some:42)"
);
}
#[tokio::test]
async fn contract_call_with_abi_complex() {
let (_, id, provider, secret_key) = get_contract_instance().await;
let node_url = provider.url();
// test_complex_struct
let cmd = get_contract_call_cmd(
id,
node_url,
secret_key,
"test_struct_with_generic",
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | true |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-client/src/op/call/transfer.rs | forc-plugins/forc-client/src/op/call/transfer.rs | use anyhow::anyhow;
use fuels::accounts::{wallet::Wallet, Account};
use fuels_core::types::{transaction::TxPolicies, Address, AssetId};
pub async fn transfer(
wallet: &Wallet,
recipient: Address,
amount: u64,
asset_id: AssetId,
tx_policies: TxPolicies,
node: &crate::NodeTarget,
writer: &mut impl std::io::Write,
) -> anyhow::Result<super::CallResponse> {
let provider = wallet.provider();
// check is recipient is a user
let tx_response = if provider.is_user_account(*recipient).await? {
writeln!(
writer,
"\nTransferring {amount} 0x{asset_id} to recipient address 0x{recipient}...\n"
)?;
wallet
.transfer(recipient, amount, asset_id, tx_policies)
.await
.map_err(|e| anyhow!("Failed to transfer funds to recipient: {}", e))?
} else {
writeln!(
writer,
"\nTransferring {amount} 0x{asset_id} to contract address 0x{recipient}...\n"
)?;
let contract_id = (*recipient).into();
wallet
.force_transfer_to_contract(contract_id, amount, asset_id, tx_policies)
.await
.map_err(|e| anyhow!("Failed to transfer funds to contract: {}", e))?
};
// display tx info
super::display_tx_info(
tx_response.tx_id.to_string(),
None,
&crate::cmd::call::ExecutionMode::Live,
node,
);
Ok(super::CallResponse {
tx_hash: tx_response.tx_id.to_string(),
total_gas: tx_response.tx_status.total_gas,
result: None,
receipts: tx_response.tx_status.receipts.to_vec(),
script: None,
trace_events: vec![],
})
}
#[cfg(test)]
mod tests {
use super::*;
use crate::{op::call::PrivateKeySigner, NodeTarget};
use fuels::prelude::*;
#[tokio::test]
async fn test_transfer_function_to_recipient() {
// Launch a local network and set up wallets
let mut wallets = launch_custom_provider_and_get_wallets(
WalletsConfig::new(
Some(2), /* Two wallets */
Some(1), /* Single coin (UTXO) */
Some(1_000_000_000), /* Amount per coin */
),
None,
None,
)
.await
.unwrap();
let wallet_sender = wallets.pop().unwrap();
let wallet_recipient = wallets.pop().unwrap();
let recipient_address = wallet_recipient.address();
let provider = wallet_sender.provider();
let consensus_parameters = provider.consensus_parameters().await.unwrap();
let base_asset_id = consensus_parameters.base_asset_id();
// Test helpers to get balances
let get_recipient_balance = |addr: Address| async move {
provider
.get_asset_balance(&addr, base_asset_id)
.await
.unwrap()
};
// Get initial balance of recipient
let initial_balance = get_recipient_balance(wallet_recipient.address()).await;
// Test parameters
let tx_policies = TxPolicies::default();
let amount = 100;
let node = NodeTarget {
node_url: Some(provider.url().to_string()),
..Default::default()
};
// should successfully transfer funds
let response = transfer(
&wallet_sender,
recipient_address,
amount,
*base_asset_id,
tx_policies,
&node,
&mut std::io::stdout(),
)
.await
.unwrap();
// Verify response structure
assert!(
!response.tx_hash.is_empty(),
"Transaction hash should be returned"
);
assert!(response.result.is_none(), "Result should be none");
// Verify balance has increased by the transfer amount
assert_eq!(
get_recipient_balance(wallet_recipient.address()).await,
initial_balance + amount as u128,
"Balance should increase by transfer amount"
);
}
#[tokio::test]
async fn test_transfer_function_to_contract() {
let (_, id, provider, secret_key) = crate::op::call::tests::get_contract_instance().await;
let wallet = Wallet::new(PrivateKeySigner::new(secret_key), provider.clone());
let consensus_parameters = provider.clone().consensus_parameters().await.unwrap();
let base_asset_id = consensus_parameters.base_asset_id();
// Verify initial contract balance
let balance = provider
.get_contract_asset_balance(&id, base_asset_id)
.await
.unwrap();
assert_eq!(balance, 0, "Balance should be 0");
// Test parameters
let tx_policies = TxPolicies::default();
let amount = 100;
let node = NodeTarget {
node_url: Some(provider.url().to_string()),
..Default::default()
};
// should successfully transfer funds
let response = transfer(
&wallet,
Address::new(id.into()),
amount,
*base_asset_id,
tx_policies,
&node,
&mut std::io::stdout(),
)
.await
.unwrap();
// Verify response structure
assert!(
!response.tx_hash.is_empty(),
"Transaction hash should be returned"
);
assert!(response.result.is_none(), "Result should be none");
// Verify balance has increased by the transfer amount
let balance = provider
.get_contract_asset_balance(&id, base_asset_id)
.await
.unwrap();
assert_eq!(
balance, amount,
"Balance should increase by transfer amount"
);
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-client/src/op/call/parser.rs | forc-plugins/forc-client/src/op/call/parser.rs | use anyhow::{anyhow, bail, Result};
use fuels_core::types::{param_types::ParamType, EnumSelector, StaticStringToken, Token, U256};
use std::{fmt::Write, str::FromStr};
/// Converts a ParamType and associated value into a Token
pub fn param_type_val_to_token(param_type: &ParamType, input: &str) -> Result<Token> {
// Parses a string value while preserving quotes and escaped characters
let parse_string_value = |input: &str| {
if input.starts_with('"') && input.ends_with('"') {
// Remove outer quotes and unescape internal quotes
let without_outer_quotes = &input[1..input.len() - 1];
without_outer_quotes.replace("\\\"", "\"")
} else {
// If no quotes, just trim whitespace
input.trim().to_string()
}
};
match param_type {
ParamType::Unit => Ok(Token::Unit),
ParamType::Bool => bool::from_str(input)
.map(Token::Bool)
.map_err(|_| anyhow!("failed to parse bool value: {}", input)),
ParamType::U8 => u8::from_str(input)
.map(Token::U8)
.map_err(|_| anyhow!("failed to parse u8 value: {}", input)),
ParamType::U16 => u16::from_str(input)
.map(Token::U16)
.map_err(|_| anyhow!("failed to parse u16 value: {}", input)),
ParamType::U32 => u32::from_str(input)
.map(Token::U32)
.map_err(|_| anyhow!("failed to parse u32 value: {}", input)),
ParamType::U64 => u64::from_str(input)
.map(Token::U64)
.map_err(|_| anyhow!("failed to parse u64 value: {}", input)),
ParamType::U128 => u128::from_str(input)
.map(Token::U128)
.map_err(|_| anyhow!("failed to parse u128 value: {}", input)),
ParamType::U256 => {
// if prefix is 0x, it's a hex string
if input.starts_with("0x") {
U256::from_str(input)
.map(Token::U256)
.map_err(|_| anyhow!("failed to parse U256 value: {}", input))
} else {
U256::from_dec_str(input)
.map(Token::U256)
.map_err(|_| anyhow!("failed to parse U256 value: {}", input))
}
}
ParamType::B256 => {
// remove 0x prefix if provided
let input = input.trim_start_matches("0x");
if input.len() != 64 {
return Err(anyhow!("B256 value must be 64 hex characters: {}", input));
}
hex::decode(input)
.map(|bytes| Token::B256(bytes.try_into().unwrap()))
.map_err(|_| anyhow!("failed to parse B256 value: {}", input))
}
ParamType::String => Ok(Token::String(parse_string_value(input))),
ParamType::Bytes => {
// remove 0x prefix if provided
let input = input.trim_start_matches("0x");
if !input.len().is_multiple_of(2) {
return Err(anyhow!("bytes value must be even length: {}", input));
}
hex::decode(input)
.map(Token::Bytes)
.map_err(|_| anyhow!("failed to parse bytes value: {}", input))
}
ParamType::RawSlice => {
// remove 0x prefix if provided
let input = input.trim_start_matches("0x");
if !input.len().is_multiple_of(2) {
return Err(anyhow!("raw slice value must be even length: {}", input));
}
hex::decode(input)
.map(Token::RawSlice)
.map_err(|_| anyhow!("failed to parse raw slice value: {}", input))
}
ParamType::StringArray(size) => {
let parsed_str = parse_string_value(input);
if parsed_str.len() != *size {
return Err(anyhow!(
"string array length mismatch: expected {}, got {}",
size,
parsed_str.len()
));
}
Ok(Token::StringArray(StaticStringToken::new(
parsed_str,
Some(*size),
)))
}
ParamType::StringSlice => Ok(Token::StringSlice(StaticStringToken::new(
parse_string_value(input),
None,
))),
ParamType::Tuple(types) => {
// ensure input starts with '(' and ends with ')'
let parsed_tuple = parse_delimited_string(param_type, input)?;
Ok(Token::Tuple(
types
.iter()
.zip(parsed_tuple.iter())
.map(|(ty, s)| param_type_val_to_token(ty, s))
.collect::<Result<Vec<_>>>()?,
))
}
ParamType::Array(ty, _size) => {
// ensure input starts with '[' and ends with ']'
let parsed_array = parse_delimited_string(param_type, input)?;
Ok(Token::Array(
parsed_array
.iter()
.map(|s| param_type_val_to_token(ty, s))
.collect::<Result<Vec<_>>>()?,
))
}
ParamType::Vector(ty) => {
// ensure input starts with '[' and ends with ']'
let parsed_vector = parse_delimited_string(param_type, input)?;
Ok(Token::Vector(
parsed_vector
.iter()
.map(|s| param_type_val_to_token(ty, s))
.collect::<Result<Vec<_>>>()?,
))
}
ParamType::Struct { fields, .. } => {
// ensure input starts with '{' and ends with '}'
let parsed_vals = parse_delimited_string(param_type, input)?;
let parsed_struct = fields
.iter()
.zip(parsed_vals.iter())
.map(|((_, ty), val)| param_type_val_to_token(ty, val))
.collect::<Result<Vec<_>>>()?;
Ok(Token::Struct(parsed_struct))
}
ParamType::Enum { enum_variants, .. } => {
// enums must start with '(' and end with ')'
// enums must be in format of (variant_index:variant_value) or (variant_name:variant_value)
let parsed_enum = parse_delimited_string(param_type, input)?;
if parsed_enum.len() != 2 {
bail!(
"enum must have exactly two parts `(variant:value)`: {}",
input
);
}
let (variant_name_or_index, variant_value) = (&parsed_enum[0], &parsed_enum[1]);
// if variant can be parsed as u64 it is index; else it is name
let discriminant = match variant_name_or_index.parse::<u64>() {
Ok(index) => index,
Err(_) => {
// must be name; find index of variant_name_or_index in enum_variants given
let index = enum_variants
.variants()
.iter()
.position(|(name, _)| *name == *variant_name_or_index)
.ok_or(anyhow!(
"failed to find index of variant: {}",
variant_name_or_index
))?;
index as u64
}
};
let (_, ty) = enum_variants.select_variant(discriminant).map_err(|_| {
anyhow!("failed to select enum variant: `{}`", variant_name_or_index)
})?;
let token = param_type_val_to_token(ty, variant_value).map_err(|_| {
anyhow!(
"failed to parse `{}` variant enum value: {}",
variant_name_or_index,
variant_value
)
})?;
let enum_selector: EnumSelector = (discriminant, token, enum_variants.clone());
Ok(Token::Enum(enum_selector.into()))
}
}
}
/// Converts a Token to ParamType - unused unless we want to support input-param validation for enums
#[allow(dead_code)]
pub fn token_to_param_type(token: &Token) -> Result<ParamType> {
match token {
Token::Unit => Ok(ParamType::Unit),
Token::Bool(_) => Ok(ParamType::Bool),
Token::U8(_) => Ok(ParamType::U8),
Token::U16(_) => Ok(ParamType::U16),
Token::U32(_) => Ok(ParamType::U32),
Token::U64(_) => Ok(ParamType::U64),
Token::U128(_) => Ok(ParamType::U128),
Token::U256(_) => Ok(ParamType::U256),
Token::B256(_) => Ok(ParamType::B256),
Token::Bytes(_) => Ok(ParamType::Bytes),
Token::String(_) => Ok(ParamType::String),
Token::RawSlice(_) => Ok(ParamType::RawSlice),
Token::StringArray(str) => Ok(ParamType::StringArray(str.get_encodable_str()?.len())),
Token::StringSlice(_) => Ok(ParamType::StringSlice),
Token::Tuple(tokens) => Ok(ParamType::Tuple(
tokens
.iter()
.map(token_to_param_type)
.collect::<Result<Vec<_>>>()?,
)),
Token::Array(tokens) => Ok(ParamType::Array(
Box::new(token_to_param_type(
&tokens.iter().next().unwrap_or(&Token::default()).clone(),
)?),
tokens.len(),
)),
Token::Vector(tokens) => Ok(ParamType::Vector(Box::new(token_to_param_type(
&tokens.iter().next().unwrap_or(&Token::default()).clone(),
)?))),
Token::Struct(tokens) => Ok(ParamType::Struct {
name: "".to_string(),
fields: tokens
.iter()
.map(|t| {
(
"".to_string(),
token_to_param_type(t).expect("failed to convert token to param type"),
)
})
.collect::<Vec<(String, ParamType)>>(),
generics: vec![],
}),
Token::Enum(boxed_enum) => {
let (discriminant, _, enum_variants) = &**boxed_enum;
let (_name, _ty) = enum_variants
.select_variant(*discriminant)
.expect("failed to select variant");
Ok(ParamType::Enum {
name: "".to_string(),
enum_variants: enum_variants.clone(),
generics: Default::default(),
})
}
}
}
/// Converts a Token to a string
pub fn token_to_string(token: &Token) -> Result<String> {
match token {
Token::Unit => Ok("()".to_string()),
Token::Bool(b) => Ok(b.to_string()),
Token::U8(n) => Ok(n.to_string()),
Token::U16(n) => Ok(n.to_string()),
Token::U32(n) => Ok(n.to_string()),
Token::U64(n) => Ok(n.to_string()),
Token::U128(n) => Ok(n.to_string()),
Token::U256(n) => Ok(n.to_string()),
Token::B256(bytes) => {
let mut hex = String::with_capacity(bytes.len() * 2);
for byte in bytes {
write!(hex, "{byte:02x}").unwrap();
}
Ok(format!("0x{hex}"))
}
Token::Bytes(bytes) => {
let mut hex = String::with_capacity(bytes.len() * 2);
for byte in bytes {
write!(hex, "{byte:02x}").unwrap();
}
Ok(format!("0x{hex}"))
}
Token::String(s) => Ok(s.clone()),
Token::RawSlice(bytes) => {
let mut hex = String::with_capacity(bytes.len() * 2);
for byte in bytes {
write!(hex, "{byte:02x}").unwrap();
}
Ok(format!("0x{hex}"))
}
Token::StringArray(token) => Ok(token.get_encodable_str().map(|s| s.to_string())?),
Token::StringSlice(token) => token
.get_encodable_str()
.map(|s| s.to_string())
.map_err(|_| anyhow!("failed to get encodable string from StringSlice token")),
Token::Tuple(tokens) => {
let inner = tokens
.iter()
.map(token_to_string)
.collect::<Result<Vec<String>>>()?
.join(", ");
Ok(format!("({inner})"))
}
Token::Array(tokens) => {
let inner = tokens
.iter()
.map(token_to_string)
.collect::<Result<Vec<String>>>()?
.join(", ");
Ok(format!("[{inner}]"))
}
Token::Vector(tokens) => {
let inner = tokens
.iter()
.map(token_to_string)
.collect::<Result<Vec<String>>>()?
.join(", ");
Ok(format!("[{inner}]"))
}
Token::Struct(tokens) => {
let inner = tokens
.iter()
.map(token_to_string)
.collect::<Result<Vec<String>>>()?
.join(", ");
Ok(format!("{{{inner}}}"))
}
Token::Enum(selector) => {
let (discriminant, value, enum_variants) = &**selector;
let (name, _ty) = enum_variants
.select_variant(*discriminant)
.expect("failed to select variant");
// TODO: variant validation - currently causing issues since we need deep recursive comparisons..
// // ensure variant matches expected type
// let ty_got = token_to_param_type(value).map_err(|_| anyhow!("failed to convert token to param type"))?;
// if ty_got != *ty {
// // ensure all fields match of expected type if struct or enum
// match (ty, ty_got.clone()) {
// // (ParamType::Struct { fields: ty_fields, .. }, ParamType::Struct { fields: ty_got_fields, .. }) => {
// // for ((_, ty_param), (_, ty_got_param)) in ty_fields.iter().zip(ty_got_fields.iter()) {
// // if ty_param != ty_got_param {
// // return Err(anyhow!("expected type {:?} but got {:?}; mismatch in field: expected {:?}, got {:?}", ty, ty_got, ty_param, ty_got_param));
// // }
// // }
// // },
// (ParamType::Enum { enum_variants: ty_enum_variants, .. }, ParamType::Enum { enum_variants: ty_got_enum_variants, .. }) => {
// for ((_, ty_param), (_, ty_got_param)) in ty_enum_variants.variants().iter().zip(ty_got_enum_variants.variants().iter()) {
// if ty_param != ty_got_param {
// return Err(anyhow!("expected type {:?} but got {:?}; mismatch in variant: expected {:?}, got {:?}", ty, ty_got, ty_param, ty_got_param));
// }
// }
// },
// _ => return Err(anyhow!("expected type {:?} but got {:?}", ty, ty_got)),
// }
// }
Ok(format!("({}:{})", name, token_to_string(value)?))
}
}
}
/// Parses a delimited string into a vector of strings, preserving quoted content and nested structures
fn parse_delimited_string(param_type: &ParamType, input: &str) -> Result<Vec<String>> {
let input = input.trim();
let (start_delim, end_delim, separator) = match param_type {
ParamType::Tuple(_) => ('(', ')', ','),
ParamType::Array(_, _) | ParamType::Vector(_) => ('[', ']', ','),
ParamType::Struct { .. } => ('{', '}', ','),
ParamType::Enum { .. } => ('(', ')', ':'),
_ => bail!("Unsupported param type: {:?}", param_type),
};
if !input.starts_with(start_delim) || !input.ends_with(end_delim) {
bail!(
"input must start with '{}' and end with '{}': {}",
start_delim,
end_delim,
input
);
}
let inner = &input[1..input.len() - 1];
let mut parts = Vec::new();
let mut current = String::new();
let mut in_quotes = false;
let mut escaped = false;
let mut nesting_level = 0u8;
for c in inner.chars() {
match (c, in_quotes, escaped) {
('\\', _, false) => {
escaped = true;
current.push(c);
}
('"', _, true) => {
escaped = false;
current.push(c);
}
('"', false, false) => {
in_quotes = true;
current.push(c);
}
('"', true, false) => {
in_quotes = false;
current.push(c);
}
('{', false, false) => {
nesting_level += 1;
current.push(c);
}
('}', false, false) => {
nesting_level = nesting_level.saturating_sub(1);
current.push(c);
}
('(', false, false) => {
nesting_level += 1;
current.push(c);
}
(')', false, false) => {
nesting_level = nesting_level.saturating_sub(1);
current.push(c);
}
('[', false, false) => {
nesting_level += 1;
current.push(c);
}
(']', false, false) => {
nesting_level = nesting_level.saturating_sub(1);
current.push(c);
}
(c, false, false) if c == separator && nesting_level == 0 => {
if !current.trim().is_empty() {
parts.push(current.trim().to_string());
current = String::new();
}
}
(_, _, _) => {
escaped = false;
current.push(c);
}
}
}
if !current.trim().is_empty() {
parts.push(current.trim().to_string());
}
Ok(parts)
}
pub fn param_to_function_arg(param_type: &ParamType) -> String {
match param_type {
ParamType::Unit => "()".to_string(),
ParamType::Bool => "bool".to_string(),
ParamType::U8 => "u8".to_string(),
ParamType::U16 => "u16".to_string(),
ParamType::U32 => "u32".to_string(),
ParamType::U64 => "u64".to_string(),
ParamType::U128 => "U128".to_string(),
ParamType::U256 => "U256".to_string(),
ParamType::B256 => "b256".to_string(),
ParamType::Bytes => "Bytes".to_string(),
ParamType::String => "str".to_string(),
ParamType::RawSlice => "RawSlice".to_string(),
ParamType::StringArray(size) => format!("str[{size}]"),
ParamType::StringSlice => "str".to_string(),
ParamType::Tuple(types) => {
let inner = types
.iter()
.map(param_to_function_arg)
.collect::<Vec<String>>()
.join(", ");
format!("({inner})")
}
ParamType::Array(ty, size) => {
let inner = param_to_function_arg(ty);
format!("[{inner}; {size}]")
}
ParamType::Vector(ty) => {
let inner = param_to_function_arg(ty);
format!("Vec<{inner}>")
}
ParamType::Struct { name, .. } => {
// only get last part of name
name.split("::").last().unwrap_or(name).to_string()
}
ParamType::Enum { name, .. } => {
// only get last part of name
name.split("::").last().unwrap_or(name).to_string()
}
}
}
pub fn get_default_value(param_type: &ParamType) -> String {
match param_type {
ParamType::Unit => "()".to_string(),
ParamType::Bool => "false".to_string(),
ParamType::U8 => "0".to_string(),
ParamType::U16 => "0".to_string(),
ParamType::U32 => "0".to_string(),
ParamType::U64 => "0".to_string(),
ParamType::U128 => "0".to_string(),
ParamType::U256 => "0".to_string(),
ParamType::B256 => {
"0x0000000000000000000000000000000000000000000000000000000000000000".to_string()
}
ParamType::Bytes => "0x".to_string(),
ParamType::String => "hello".to_string(),
ParamType::RawSlice => "0x".to_string(),
ParamType::StringArray(size) => "a".repeat(*size),
ParamType::StringSlice => "hello".to_string(),
ParamType::Tuple(types) => {
let inner = types
.iter()
.map(get_default_value)
.collect::<Vec<String>>()
.join(", ");
format!("({inner})")
}
ParamType::Array(ty, size) => {
let inner = (0..*size)
.map(|_| get_default_value(ty))
.collect::<Vec<String>>()
.join(", ");
format!("[{inner}]")
}
ParamType::Vector(ty) => {
let inner = (0..2)
.map(|_| get_default_value(ty))
.collect::<Vec<String>>()
.join(", ");
format!("[{inner}]")
}
ParamType::Struct { fields, .. } => {
let inner = fields
.iter()
.map(|(_, ty)| get_default_value(ty))
.collect::<Vec<String>>()
.join(", ");
format!("{{{inner}}}")
}
ParamType::Enum { enum_variants, .. } => {
let (variant_key, variant_val_type) = enum_variants
.variants()
.first()
.expect("Enum must have at least one variant");
let variant_val_str = get_default_value(variant_val_type);
format!("({variant_key}: {variant_val_str})")
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use fuels_core::types::param_types::EnumVariants;
#[test]
fn test_parse_delimited_string() {
// Test with comma separator
let result = parse_delimited_string(&ParamType::Tuple(vec![]), "(a, b, c)").unwrap();
assert_eq!(result, vec!["a", "b", "c"]);
// Test with colon separator
let result = parse_delimited_string(
&ParamType::Enum {
name: "TestEnum".to_string(),
enum_variants: EnumVariants::new(vec![("".to_string(), ParamType::String)])
.unwrap(),
generics: vec![],
},
"(key:value)",
)
.unwrap();
assert_eq!(result, vec!["key", "value"]);
// Test with spaces around separator
let result = parse_delimited_string(
&ParamType::Struct {
name: "TestStruct".to_string(),
fields: vec![
("a".to_string(), ParamType::String),
("b".to_string(), ParamType::String),
("c".to_string(), ParamType::String),
],
generics: vec![],
},
"{a , b , c}",
)
.unwrap();
assert_eq!(result, vec!["a", "b", "c"]);
// Test with quoted strings
let result = parse_delimited_string(
&ParamType::Vector(Box::new(ParamType::String)),
"[\"a,b\", c]",
)
.unwrap();
assert_eq!(result, vec!["\"a,b\"", "c"]);
// Test with escaped quotes
let result =
parse_delimited_string(&ParamType::Tuple(vec![]), "(\"\\\"a:b\\\"\", c)").unwrap();
assert_eq!(result, vec!["\"\\\"a:b\\\"\"", "c"]);
// Test with separator in quotes
let result = parse_delimited_string(&ParamType::Tuple(vec![]), "(\"a:b\",c)").unwrap();
assert_eq!(result, vec!["\"a:b\"", "c"]);
}
#[test]
fn param_type_val_to_token_conversion() {
// unit
let token = param_type_val_to_token(&ParamType::Unit, "").unwrap();
assert_eq!(token, Token::Unit);
// bool
let token = param_type_val_to_token(&ParamType::Bool, "true").unwrap();
assert_eq!(token, Token::Bool(true));
// u8
let token = param_type_val_to_token(&ParamType::U8, "42").unwrap();
assert_eq!(token, Token::U8(42));
// u16
let token = param_type_val_to_token(&ParamType::U16, "42").unwrap();
assert_eq!(token, Token::U16(42));
// u32
let token = param_type_val_to_token(&ParamType::U32, "42").unwrap();
assert_eq!(token, Token::U32(42));
// u64
let token = param_type_val_to_token(&ParamType::U64, "42").unwrap();
assert_eq!(token, Token::U64(42));
// u128
let token = param_type_val_to_token(&ParamType::U128, "42").unwrap();
assert_eq!(token, Token::U128(42));
// u256 - hex string
let token = param_type_val_to_token(&ParamType::U256, "0x42").unwrap();
assert_eq!(token, Token::U256(66.into()));
// u256 - decimal string
let token = param_type_val_to_token(&ParamType::U256, "42").unwrap();
assert_eq!(token, Token::U256(42.into()));
// u256 - decimal string with leading 0
let token = param_type_val_to_token(
&ParamType::U256,
"0000000000000000000000000000000000000000000000000000000000000042",
)
.unwrap();
assert_eq!(token, Token::U256(42.into()));
// b256 - hex string, incorrect length
let token_result = param_type_val_to_token(&ParamType::B256, "0x42");
assert!(token_result.is_err());
assert_eq!(
token_result.unwrap_err().to_string(),
"B256 value must be 64 hex characters: 42"
);
// b256 - hex string, correct length
let token = param_type_val_to_token(
&ParamType::B256,
"0x0000000000000000000000000000000000000000000000000000000000000042",
)
.unwrap();
assert_eq!(
token,
Token::B256([
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 66
])
);
// b256 - no 0x prefix
let token = param_type_val_to_token(
&ParamType::B256,
"0000000000000000000000000000000000000000000000000000000000000042",
)
.unwrap();
assert_eq!(
token,
Token::B256([
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 66
])
);
// bytes
let token = param_type_val_to_token(&ParamType::Bytes, "0x42").unwrap();
assert_eq!(token, Token::Bytes(vec![66]));
// bytes - no 0x prefix
let token = param_type_val_to_token(&ParamType::Bytes, "42").unwrap();
assert_eq!(token, Token::Bytes(vec![66]));
// string
let token = param_type_val_to_token(&ParamType::String, "fuel").unwrap();
assert_eq!(token, Token::String("fuel".to_string()));
// raw slice
let token = param_type_val_to_token(&ParamType::RawSlice, "0x42").unwrap();
assert_eq!(token, Token::RawSlice(vec![66]));
// raw slice - no 0x prefix
let token = param_type_val_to_token(&ParamType::RawSlice, "42").unwrap();
assert_eq!(token, Token::RawSlice(vec![66]));
// string array - single val
let token = param_type_val_to_token(&ParamType::StringArray(4), "fuel").unwrap();
assert_eq!(
token,
Token::StringArray(StaticStringToken::new("fuel".to_string(), Some(4)))
);
// string array - incorrect length fails
let token_result = param_type_val_to_token(&ParamType::StringArray(2), "fuel");
assert!(token_result.is_err());
assert_eq!(
token_result.unwrap_err().to_string(),
"string array length mismatch: expected 2, got 4"
);
// string slice
let token = param_type_val_to_token(&ParamType::StringSlice, "fuel").unwrap();
assert_eq!(
token,
Token::StringSlice(StaticStringToken::new("fuel".to_string(), None))
);
// tuple - incorrect format
let token_result = param_type_val_to_token(
&ParamType::Tuple(vec![ParamType::String, ParamType::String]),
"fuel, 42",
);
assert!(token_result.is_err());
assert_eq!(
token_result.unwrap_err().to_string(),
"input must start with '(' and end with ')': fuel, 42"
);
// tuple
let token = param_type_val_to_token(
&ParamType::Tuple(vec![ParamType::String, ParamType::String]),
"(fuel, 42)",
)
.unwrap();
assert_eq!(
token,
Token::Tuple(vec![
Token::String("fuel".to_string()),
Token::String("42".to_string())
])
);
// tuple - different param types
let token = param_type_val_to_token(
&ParamType::Tuple(vec![ParamType::String, ParamType::U8]),
"(fuel, 42)",
)
.unwrap();
assert_eq!(
token,
Token::Tuple(vec![Token::String("fuel".to_string()), Token::U8(42)])
);
// array
let token =
param_type_val_to_token(&ParamType::Array(ParamType::String.into(), 3), "[fuel, 42]")
.unwrap();
assert_eq!(
token,
Token::Array(vec![
Token::String("fuel".to_string()),
Token::String("42".to_string())
])
);
// array - incorrect format
let token_result =
param_type_val_to_token(&ParamType::Array(ParamType::String.into(), 3), "fuel 42");
assert!(token_result.is_err());
assert_eq!(
token_result.unwrap_err().to_string(),
"input must start with '[' and end with ']': fuel 42"
);
// vector - correct format
let token =
param_type_val_to_token(&ParamType::Vector(ParamType::String.into()), "[fuel, 42]")
.unwrap();
assert_eq!(
token,
Token::Vector(vec![
Token::String("fuel".to_string()),
Token::String("42".to_string())
])
);
// vector - incorrect format
let token_result =
param_type_val_to_token(&ParamType::Vector(ParamType::String.into()), "fuel 42");
assert!(token_result.is_err());
assert_eq!(
token_result.unwrap_err().to_string(),
"input must start with '[' and end with ']': fuel 42"
);
// struct - correct format; single value
let token = param_type_val_to_token(
&ParamType::Struct {
name: "".to_string(),
fields: vec![("".to_string(), ParamType::String)],
generics: vec![],
},
"{fuel, 42}",
)
.unwrap();
assert_eq!(
token,
Token::Struct(vec![Token::String("fuel".to_string())])
);
// struct - correct format; multiple values
let token = param_type_val_to_token(
&ParamType::Struct {
name: "".to_string(),
fields: vec![
("".to_string(), ParamType::String),
("".to_string(), ParamType::String),
],
generics: vec![],
},
"{fuel, 42}",
)
.unwrap();
assert_eq!(
token,
Token::Struct(vec![
Token::String("fuel".to_string()),
Token::String("42".to_string())
])
);
// struct - correct format; multiple values; different param types
let token = param_type_val_to_token(
&ParamType::Struct {
name: "".to_string(),
fields: vec![
("".to_string(), ParamType::String),
("".to_string(), ParamType::U8),
],
generics: vec![],
},
"{fuel, 42}",
)
.unwrap();
assert_eq!(
token,
Token::Struct(vec![Token::String("fuel".to_string()), Token::U8(42)])
);
// struct - incorrect format (same as tuple)
let token_result = param_type_val_to_token(
&ParamType::Struct {
name: "".to_string(),
fields: vec![("a".to_string(), ParamType::String)],
generics: vec![],
},
"fuel, 42",
);
assert!(token_result.is_err());
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | true |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-client/src/op/call/mod.rs | forc-plugins/forc-client/src/op/call/mod.rs | mod call_function;
pub mod list_functions;
mod missing_contracts;
mod parser;
pub mod trace;
mod transfer;
use crate::cmd::call::AbiSource;
use crate::{
cmd,
constants::DEFAULT_PRIVATE_KEY,
op::call::{
call_function::call_function, list_functions::list_contract_functions, transfer::transfer,
},
util::tx::{prompt_forc_wallet_password, select_local_wallet_account},
};
use anyhow::{anyhow, Result};
use fuel_abi_types::abi::{
program::ProgramABI,
unified_program::{UnifiedProgramABI, UnifiedTypeDeclaration},
};
use fuel_core_types::services::executor::TransactionExecutionStatus;
use fuel_tx::Receipt;
use fuels::{
accounts::{
provider::Provider, signers::private_key::PrivateKeySigner, wallet::Wallet, ViewOnlyAccount,
},
crypto::SecretKey,
};
use fuels_core::types::{transaction::TxPolicies, AssetId, ContractId};
use serde::{Deserialize, Serialize};
use std::{collections::HashMap, str::FromStr};
/// Response returned from a contract call operation
#[derive(Debug, Default, Clone, Serialize, Deserialize)]
#[serde(rename_all = "snake_case")]
pub struct CallResponse {
pub tx_hash: String,
pub total_gas: u64,
#[serde(skip_serializing_if = "Option::is_none")]
pub result: Option<String>,
#[serde(skip_serializing_if = "Vec::is_empty")]
pub receipts: Vec<Receipt>,
#[serde(skip_serializing_if = "Vec::is_empty")]
pub trace_events: Vec<trace::TraceEvent>,
#[serde(rename = "Script", skip_serializing_if = "Option::is_none")]
pub script: Option<fuel_tx::Script>,
}
/// A command for calling a contract function.
pub async fn call(operation: cmd::call::Operation, cmd: cmd::Call) -> anyhow::Result<CallResponse> {
let is_json_mode = matches!(cmd.output, cmd::call::OutputFormat::Json);
let response = match operation {
cmd::call::Operation::ListFunctions { contract_id, abi } => {
if let cmd::call::OutputFormat::Json = cmd.output {
return Err(anyhow!("JSON output is not supported for list functions"));
}
let abi_map = create_abi_map(contract_id, &abi, cmd.contract_abis).await?;
// Use the simplified list_contract_functions function
list_contract_functions(&contract_id, &abi_map, &mut std::io::stdout())?;
CallResponse::default()
}
cmd::call::Operation::DirectTransfer {
recipient,
amount,
asset_id,
} => {
let cmd::Call {
node,
caller,
gas,
mut output,
..
} = cmd;
// Already validated that mode is ExecutionMode::Live
let (wallet, tx_policies, base_asset_id) =
setup_connection(&node, caller, &gas).await?;
let asset_id = asset_id.unwrap_or(base_asset_id);
transfer(
&wallet,
recipient,
amount,
asset_id,
tx_policies,
&node,
&mut output,
)
.await?
}
cmd::call::Operation::CallFunction {
contract_id,
abi,
function,
function_args,
} => {
// Call the function with required parameters
call_function(contract_id, abi, function, function_args, cmd).await?
}
};
// If using JSON output mode, explicitly print the response for potential parsing/piping
if is_json_mode {
println!("{}", serde_json::to_string_pretty(&response).unwrap());
}
Ok(response)
}
/// Sets up the connection to the node and initializes common parameters
async fn setup_connection(
node: &crate::NodeTarget,
caller: cmd::call::Caller,
gas: &Option<forc_tx::Gas>,
) -> anyhow::Result<(Wallet, TxPolicies, AssetId)> {
let node_url = node.get_node_url(&None)?;
let provider = Provider::connect(node_url).await?;
let wallet = get_wallet(caller.signing_key, caller.wallet, provider).await?;
let provider = wallet.provider();
let tx_policies = gas.as_ref().map(Into::into).unwrap_or_default();
let consensus_parameters = provider.consensus_parameters().await?;
let base_asset_id = consensus_parameters.base_asset_id();
Ok((wallet, tx_policies, *base_asset_id))
}
/// Helper function to load ABI from file, URL, or raw string
async fn load_abi(abi: &AbiSource) -> anyhow::Result<String> {
match abi {
AbiSource::File(path) => std::fs::read_to_string(path)
.map_err(|e| anyhow!("Failed to read ABI file at {:?}: {}", path, e)),
AbiSource::Url(url) => {
let response = reqwest::get(url.clone())
.await
.map_err(|e| anyhow!("Failed to fetch ABI from URL {}: {}", url, e))?;
let bytes = response
.bytes()
.await
.map_err(|e| anyhow!("Failed to read response body from URL {}: {}", url, e))?;
String::from_utf8(bytes.to_vec())
.map_err(|e| anyhow!("Failed to parse response as UTF-8 from URL {}: {}", url, e))
}
AbiSource::String(json_str) => {
// Validate that it's valid JSON
serde_json::from_str::<serde_json::Value>(json_str)
.map_err(|e| anyhow!("Invalid JSON in ABI string: {}", e))?;
Ok(json_str.to_owned())
}
}
}
/// Get the wallet to use for the call - based on optionally provided signing key and wallet flag.
async fn get_wallet(
signing_key: Option<SecretKey>,
use_wallet: bool,
provider: Provider,
) -> Result<Wallet> {
match (signing_key, use_wallet) {
(None, false) => {
let secret_key = SecretKey::from_str(DEFAULT_PRIVATE_KEY).unwrap();
let signer = PrivateKeySigner::new(secret_key);
let wallet = Wallet::new(signer, provider);
forc_tracing::println_warning(&format!(
"No signing key or wallet flag provided. Using default signer: 0x{}",
wallet.address()
));
Ok(wallet)
}
(Some(secret_key), false) => {
let signer = PrivateKeySigner::new(secret_key);
let wallet = Wallet::new(signer, provider);
forc_tracing::println_warning(&format!(
"Using account {} derived from signing key...",
wallet.address()
));
Ok(wallet)
}
(None, true) => {
let password = prompt_forc_wallet_password()?;
let wallet = select_local_wallet_account(&password, &provider).await?;
Ok(wallet)
}
(Some(secret_key), true) => {
forc_tracing::println_warning(
"Signing key is provided while requesting to use forc-wallet. Using signing key...",
);
let signer = PrivateKeySigner::new(secret_key);
let wallet = Wallet::new(signer, provider);
Ok(wallet)
}
}
}
#[derive(Debug, Clone)]
pub struct Abi {
source: AbiSource,
program: ProgramABI,
unified: UnifiedProgramABI,
// TODO: required for vm interpreter step through
// ↳ gh issue: https://github.com/FuelLabs/sway/issues/7197
#[allow(dead_code)]
type_lookup: HashMap<usize, UnifiedTypeDeclaration>,
}
impl Abi {
/// Set the source of the ABI after creation
pub fn with_source(mut self, source: AbiSource) -> Self {
self.source = source;
self
}
}
impl FromStr for Abi {
type Err = String;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let program: ProgramABI =
serde_json::from_str(s).map_err(|err| format!("failed to parse ABI: {err}"))?;
let unified = UnifiedProgramABI::from_counterpart(&program)
.map_err(|err| format!("conversion to unified ABI format failed: {err}"))?;
let type_lookup = unified
.types
.iter()
.map(|decl| (decl.type_id, decl.clone()))
.collect::<HashMap<_, _>>();
Ok(Self {
source: AbiSource::String(s.to_string()),
program,
unified,
type_lookup,
})
}
}
/// Displays transaction information
pub(crate) fn display_tx_info(
tx_hash: String,
result: Option<String>,
mode: &cmd::call::ExecutionMode,
node: &crate::NodeTarget,
) {
// print tx hash and result
forc_tracing::println_label_green("tx hash:", &tx_hash);
if let Some(ref result) = result {
forc_tracing::println_label_green("result:", result);
}
// display transaction url if live mode
if *mode == cmd::call::ExecutionMode::Live {
if let Some(explorer_url) = node.get_explorer_url() {
forc_tracing::println_label_green(
"\nView transaction:",
&format!("{explorer_url}/tx/0x{tx_hash}\n"),
);
}
}
}
/// Prints receipts and trace to the writer based on verbosity level
pub(crate) fn display_detailed_call_info(
tx: &TransactionExecutionStatus,
script: &fuel_tx::Script,
abis: &HashMap<ContractId, Abi>,
verbosity: u8,
writer: &mut impl std::io::Write,
trace_events: &[trace::TraceEvent],
labels: &HashMap<ContractId, String>,
) -> Result<()> {
if verbosity >= 4 {
forc_tracing::println_label_green(
"transaction script:\n",
&serde_json::to_string_pretty(script).unwrap(),
);
}
if verbosity >= 3 {
let formatted_receipts =
forc_util::tx_utils::format_log_receipts(tx.result.receipts(), true)
.map_err(|e| anyhow!("Failed to format receipts: {}", e))?;
forc_tracing::println_label_green("receipts:", &formatted_receipts);
}
if verbosity >= 2 {
trace::display_transaction_trace(*tx.result.total_gas(), trace_events, labels, writer)
.map_err(|e| anyhow!("Failed to display transaction trace: {e}"))?;
}
if verbosity >= 1 {
let logs = tx
.result
.receipts()
.iter()
.filter_map(|receipt| match receipt {
Receipt::LogData {
id,
rb,
data: Some(data),
..
} => {
let default_program_abi = ProgramABI::default();
let program_abi = abis
.get(id)
.map(|abi| &abi.program)
.unwrap_or(&default_program_abi);
forc_util::tx_utils::decode_fuel_vm_log_data(&rb.to_string(), data, program_abi)
.ok()
.map(|decoded| decoded.value)
}
_ => None,
})
.collect::<Vec<_>>();
// print logs if there are any
if !logs.is_empty() {
forc_tracing::println_green_bold("logs:");
for log in logs.iter() {
writeln!(writer, " {log:#}")?;
}
}
}
Ok(())
}
/// Create a HashMap of contract ABIs from a main ABI and optional additional contract ABIs
/// This is a reusable function for both call_function and list_functions operations
pub async fn create_abi_map(
main_contract_id: ContractId,
main_abi: &AbiSource,
additional_contract_abis: Option<Vec<(ContractId, AbiSource)>>,
) -> anyhow::Result<HashMap<ContractId, Abi>> {
// Load main ABI
let main_abi_str = load_abi(main_abi).await?;
let main_abi = Abi::from_str(&main_abi_str)
.map_err(|e| anyhow!("Failed to parse main ABI: {}", e))?
.with_source(main_abi.clone());
// Start with main contract ABI
let mut abi_map = HashMap::from([(main_contract_id, main_abi)]);
// Load additional contract ABIs if provided
if let Some(contract_abis) = additional_contract_abis {
for (contract_id, abi_path) in contract_abis {
match load_abi(&abi_path).await {
Ok(abi_str) => match Abi::from_str(&abi_str) {
Ok(additional_abi) => {
abi_map.insert(contract_id, additional_abi.with_source(abi_path.clone()));
forc_tracing::println_action_green(
"Loaded additional ABI for contract",
&format!("0x{contract_id}"),
);
}
Err(e) => {
forc_tracing::println_warning(&format!(
"Failed to parse ABI for contract 0x{contract_id}: {e}"
));
}
},
Err(e) => {
forc_tracing::println_warning(&format!(
"Failed to load ABI for contract 0x{contract_id}: {e}"
));
}
}
}
}
Ok(abi_map)
}
#[cfg(test)]
pub(crate) mod tests {
use super::*;
use fuels::prelude::*;
abigen!(Contract(
name = "TestContract",
abi = "forc-plugins/forc-client/test/data/contract_with_types/contract_with_types-abi.json"
));
pub async fn get_contract_instance() -> (TestContract<Wallet>, ContractId, Provider, SecretKey)
{
let secret_key = SecretKey::from_str(DEFAULT_PRIVATE_KEY).unwrap();
let signer = PrivateKeySigner::new(secret_key);
let coins = setup_single_asset_coins(signer.address(), AssetId::zeroed(), 1, 1_000_000);
let provider = setup_test_provider(coins, vec![], None, None)
.await
.unwrap();
let wallet = get_wallet(Some(secret_key), false, provider.clone())
.await
.unwrap();
let id = Contract::load_from(
"../../forc-plugins/forc-client/test/data/contract_with_types/contract_with_types.bin",
LoadConfiguration::default(),
)
.unwrap()
.deploy(&wallet, TxPolicies::default())
.await
.unwrap()
.contract_id;
let instance = TestContract::new(id, wallet.clone());
(instance, id, provider, secret_key)
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-client/src/op/call/missing_contracts.rs | forc-plugins/forc-client/src/op/call/missing_contracts.rs | use anyhow::{bail, Result};
use fuel_tx::{ConsensusParameters, ContractId};
use fuels::programs::calls::{
traits::TransactionTuner, utils::find_ids_of_missing_contracts, ContractCall,
};
use fuels_accounts::{
provider::Provider,
signers::private_key::PrivateKeySigner,
wallet::{Unlocked, Wallet},
};
use fuels_core::types::{transaction::TxPolicies, transaction_builders::VariableOutputPolicy};
/// Get the missing contracts from a contract call by dry-running the transaction
/// to find contracts that are not explicitly listed in the call's `external_contracts` field.
/// Note: This function is derived from `determine_missing_contracts` in `fuels-rs`
pub async fn determine_missing_contracts(
call: &ContractCall,
provider: &Provider,
tx_policies: &TxPolicies,
variable_output_policy: &VariableOutputPolicy,
consensus_params: &ConsensusParameters,
log_decoder: &fuels_core::codec::LogDecoder,
account: &Wallet<Unlocked<PrivateKeySigner>>,
) -> Result<Vec<ContractId>> {
let tb = call
.transaction_builder(
*tx_policies,
*variable_output_policy,
consensus_params,
call.inputs.clone(),
account,
)
.expect("Failed to initialize transaction builder");
let tx = call
.build_tx(tb, account)
.await
.expect("Failed to build transaction");
match provider
.dry_run(tx)
.await?
.take_receipts_checked(Some(log_decoder))
{
Ok(_) => Ok(vec![]),
Err(fuels_core::types::errors::Error::Transaction(
fuels::types::errors::transaction::Reason::Failure { receipts, .. },
)) => {
let missing_contracts = find_ids_of_missing_contracts(&receipts);
Ok(missing_contracts)
}
Err(err) => bail!(err),
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-client/src/op/call/trace/storage.rs | forc-plugins/forc-client/src/op/call/trace/storage.rs | use fuel_core_storage::column::Column;
use fuel_core_types::{services::executor::StorageReadReplayEvent, tai64::Tai64};
use fuel_vm::{
error::{InterpreterError, RuntimeError},
fuel_storage::{StorageRead, StorageSize, StorageWrite},
fuel_types::BlockHeight,
prelude::*,
storage::{
BlobData, ContractsAssetKey, ContractsAssets, ContractsAssetsStorage, ContractsRawCode,
ContractsState, ContractsStateData, ContractsStateKey, InterpreterStorage,
UploadedBytecodes,
},
};
use fuels_core::types::U256;
use std::{cell::RefCell, collections::HashMap};
type InnerStorage = HashMap<Column, HashMap<Vec<u8>, Option<Vec<u8>>>>;
#[derive(Clone)]
pub struct ShallowStorage {
pub block_height: BlockHeight,
pub timestamp: Tai64,
pub consensus_parameters_version: u32,
pub state_transition_version: u32,
pub coinbase: fuel_vm::prelude::ContractId,
pub storage: RefCell<InnerStorage>,
}
impl ShallowStorage {
pub fn initial_storage(reads: Vec<StorageReadReplayEvent>) -> InnerStorage {
let mut storage: InnerStorage = HashMap::new();
for read in reads {
let column = Column::try_from(read.column).expect("Invalid column id in read event");
storage
.entry(column)
.or_default()
.insert(read.key, read.value);
}
storage
}
fn value_of_column(&self, column: Column, key: Vec<u8>) -> Option<Vec<u8>> {
self.storage.borrow().get(&column)?.get(&key)?.clone()
}
fn replace_column(
&self,
column: Column,
key: Vec<u8>,
value: Option<Vec<u8>>,
) -> Option<Vec<u8>> {
self.storage
.borrow_mut()
.entry(column)
.or_default()
.insert(key.clone(), value)?
}
}
macro_rules! storage_rw {
($vm_type:ident, $convert_key:expr, $convert_value:expr, $convert_value_back:expr $(,)?) => {
storage_rw!(
$vm_type = $vm_type,
$convert_key,
$convert_value,
$convert_value_back
);
};
($vm_type:ident = $core_column:ident, $convert_key:expr, $convert_value:expr, $convert_value_back:expr $(,)?) => {
impl StorageSize<$vm_type> for ShallowStorage {
fn size_of_value(
&self,
key: &<$vm_type as fuel_vm::fuel_storage::Mappable>::Key,
) -> Result<Option<usize>, Self::Error> {
tracing::debug!(
"{:?} size_of_value {}",
stringify!($core_column),
hex::encode(&$convert_key(key))
);
let head = self.value_of_column(Column::$core_column, $convert_key(key));
Ok(head.map(|v| v.len()))
}
}
impl StorageInspect<$vm_type> for ShallowStorage {
type Error = Error;
fn get(
&self,
key: &<$vm_type as fuel_vm::fuel_storage::Mappable>::Key,
) -> Result<
Option<std::borrow::Cow<<$vm_type as fuel_vm::fuel_storage::Mappable>::OwnedValue>>,
Self::Error,
> {
tracing::debug!(
"{} get {}",
stringify!($core_column),
hex::encode(&$convert_key(key))
);
let head = self.value_of_column(Column::$core_column, $convert_key(key));
Ok(head.map($convert_value).map(std::borrow::Cow::Owned))
}
fn contains_key(
&self,
key: &<$vm_type as fuel_vm::fuel_storage::Mappable>::Key,
) -> Result<bool, Self::Error> {
tracing::debug!(
"{} contains_key {}",
stringify!($core_column),
hex::encode(&$convert_key(key))
);
let head = self.value_of_column(Column::$core_column, $convert_key(key));
Ok(head.is_some())
}
}
impl StorageRead<$vm_type> for ShallowStorage {
fn read(
&self,
key: &<$vm_type as fuel_vm::fuel_storage::Mappable>::Key,
offset: usize,
buf: &mut [u8],
) -> Result<bool, Self::Error> {
tracing::debug!(
"{} read {}",
stringify!($core_column),
hex::encode(&$convert_key(key)),
);
let head = self.value_of_column(Column::$core_column, $convert_key(key));
let Some(value) = head else {
return Ok(false);
};
if offset > value.len() || offset.saturating_add(buf.len()) > value.len() {
return Err(Error::CannotRead);
}
buf.copy_from_slice(&value[offset..][..buf.len()]);
Ok(true)
}
fn read_alloc(
&self,
key: &<$vm_type as fuel_vm::fuel_storage::Mappable>::Key,
) -> Result<Option<Vec<u8>>, Self::Error> {
todo!(
"{} read_alloc {}",
stringify!($core_column),
hex::encode(&$convert_key(key))
)
}
}
impl StorageMutate<$vm_type> for ShallowStorage {
fn replace(
&mut self,
key: &<$vm_type as fuel_vm::fuel_storage::Mappable>::Key,
value: &<$vm_type as fuel_vm::fuel_storage::Mappable>::Value,
) -> Result<
Option<<$vm_type as fuel_vm::fuel_storage::Mappable>::OwnedValue>,
Self::Error,
> {
tracing::debug!(
"{} replace {} (value={value:?})",
stringify!($core_column),
hex::encode(&$convert_key(key))
);
Ok(self
.replace_column(
Column::$core_column,
$convert_key(key),
Some($convert_value_back(value)),
)
.map($convert_value))
}
fn take(
&mut self,
key: &<$vm_type as fuel_vm::fuel_storage::Mappable>::Key,
) -> Result<
Option<<$vm_type as fuel_vm::fuel_storage::Mappable>::OwnedValue>,
Self::Error,
> {
tracing::debug!(
"{} take {}",
stringify!($core_column),
hex::encode(&$convert_key(key))
);
Ok(self
.replace_column(Column::$core_column, $convert_key(key), None)
.map($convert_value))
}
}
impl StorageWrite<$vm_type> for ShallowStorage {
fn write_bytes(
&mut self,
key: &<$vm_type as fuel_vm::fuel_storage::Mappable>::Key,
_buf: &[u8],
) -> Result<(), Self::Error> {
todo!("write_bytes {key:?}")
}
fn replace_bytes(
&mut self,
key: &<$vm_type as fuel_vm::fuel_storage::Mappable>::Key,
_buf: &[u8],
) -> Result<Option<Vec<u8>>, Self::Error> {
tracing::debug!("{} replace_bytes {key:?}", stringify!($core_column));
Ok(self.replace_column(Column::$core_column, $convert_key(key), None))
}
fn take_bytes(
&mut self,
key: &<$vm_type as fuel_vm::fuel_storage::Mappable>::Key,
) -> Result<Option<Vec<u8>>, Self::Error> {
todo!("take_bytes {key:?}")
}
}
};
}
storage_rw!(
ContractsRawCode,
|key: &ContractId| -> Vec<u8> { (**key).to_vec() },
|data| todo!("ContractsRawCode from bytes {data:?}"),
|data| -> Vec<u8> { todo!("ContractsRawCode to bytes {data:?}") },
);
storage_rw!(
ContractsState,
|key: &ContractsStateKey| -> Vec<u8> { key.as_ref().into() },
|data| { ContractsStateData(data.into()) },
|data: &[u8]| -> Vec<u8> { data.to_vec() },
);
storage_rw!(
ContractsAssets,
|key: &ContractsAssetKey| -> Vec<u8> { key.as_ref().into() },
|data| {
assert_eq!(data.len(), 8);
let mut buffer = [0u8; 8];
buffer.copy_from_slice(&data);
u64::from_be_bytes(buffer)
},
|data: &u64| -> Vec<u8> { data.to_be_bytes().to_vec() },
);
storage_rw!(
UploadedBytecodes,
|key: &Bytes32| -> Vec<u8> { key.as_ref().into() },
|data| todo!("UploadedBytecodes from bytes {data:?}"),
|data| -> Vec<u8> { todo!("UploadedBytecodes to bytes {data:?}") },
);
storage_rw!(
BlobData = Blobs,
|key: &BlobId| -> Vec<u8> { key.as_ref().into() },
|data| todo!("BlobData from bytes {data:?}"),
|data| -> Vec<u8> { todo!("BlobData to bytes {data:?}") },
);
impl ContractsAssetsStorage for ShallowStorage {}
#[derive(Debug)]
pub enum Error {
/// This block couldn't have been included
InvalidBlock,
/// The requested key is out of the available keyspace
KeyspaceOverflow,
/// Read offset too large, or buffer too small
CannotRead,
}
impl From<Error> for RuntimeError<Error> {
fn from(e: Error) -> Self {
RuntimeError::Storage(e)
}
}
impl From<Error> for InterpreterError<Error> {
fn from(e: Error) -> Self {
InterpreterError::Storage(e)
}
}
impl InterpreterStorage for ShallowStorage {
type DataError = Error;
fn block_height(&self) -> Result<BlockHeight, Self::DataError> {
Ok(self.block_height)
}
fn consensus_parameters_version(&self) -> Result<u32, Self::DataError> {
Ok(self.consensus_parameters_version)
}
fn state_transition_version(&self) -> Result<u32, Self::DataError> {
Ok(self.state_transition_version)
}
fn timestamp(
&self,
height: fuel_vm::fuel_types::BlockHeight,
) -> Result<fuel_vm::prelude::Word, Self::DataError> {
match height {
height if height > self.block_height => Err(Error::InvalidBlock),
height if height == self.block_height => Ok(self.timestamp.0),
height => {
todo!("timestamp {height:?}");
}
}
}
fn block_hash(
&self,
block_height: fuel_vm::fuel_types::BlockHeight,
) -> Result<fuel_vm::prelude::Bytes32, Self::DataError> {
// Block header hashes for blocks with height greater than or equal to current block height are zero (0x00**32).
// https://docs.fuel.network/docs/specs/fuel-vm/instruction-set/#bhsh-block-hash
if block_height >= self.block_height || block_height == Default::default() {
Ok(Bytes32::zeroed())
} else {
todo!("block_hash {block_height:?}");
}
}
fn coinbase(&self) -> Result<fuel_vm::prelude::ContractId, Self::DataError> {
Ok(self.coinbase)
}
fn set_consensus_parameters(
&mut self,
_version: u32,
_consensus_parameters: &fuel_vm::prelude::ConsensusParameters,
) -> Result<Option<fuel_vm::prelude::ConsensusParameters>, Self::DataError> {
unreachable!("Cannot be called by a script");
}
fn set_state_transition_bytecode(
&mut self,
_version: u32,
_hash: &fuel_vm::prelude::Bytes32,
) -> Result<Option<fuel_vm::prelude::Bytes32>, Self::DataError> {
unreachable!("Cannot be called by a script");
}
fn contract_state_range(
&self,
id: &fuel_vm::prelude::ContractId,
start_key: &fuel_vm::prelude::Bytes32,
range: usize,
) -> Result<Vec<Option<std::borrow::Cow<fuel_vm::storage::ContractsStateData>>>, Self::DataError>
{
tracing::debug!("contract_state_range {id:?} {start_key:?} {range:?}");
let mut results = Vec::new();
let mut key = U256::from_big_endian(start_key.as_ref());
let mut key_buffer = Bytes32::zeroed();
for offset in 0..(range as u64) {
if offset != 0 {
key = key.checked_add(1.into()).ok_or(Error::KeyspaceOverflow)?;
}
key.to_big_endian(key_buffer.as_mut());
let state_key = ContractsStateKey::new(id, &key_buffer);
let value = self
.storage::<fuel_vm::storage::ContractsState>()
.get(&state_key)?;
results.push(value);
}
Ok(results)
}
fn contract_state_insert_range<'a, I>(
&mut self,
contract: &fuel_vm::prelude::ContractId,
start_key: &fuel_vm::prelude::Bytes32,
values: I,
) -> Result<usize, Self::DataError>
where
I: Iterator<Item = &'a [u8]>,
{
tracing::debug!("contract_state_insert_range {contract:?} {start_key:?}");
let values: Vec<_> = values.collect();
let mut key = U256::from_big_endian(start_key.as_ref());
let mut key_buffer = Bytes32::zeroed();
let mut found_unset = 0u32;
for (idx, value) in values.iter().enumerate() {
if idx != 0 {
key = key.checked_add(1.into()).ok_or(Error::KeyspaceOverflow)?;
}
key.to_big_endian(key_buffer.as_mut());
let option = self.storage::<ContractsState>().replace(
&(contract, Bytes32::from_bytes_ref(&key_buffer)).into(),
value,
)?;
if option.is_none() {
found_unset += 1;
}
}
Ok(found_unset as usize)
}
fn contract_state_remove_range(
&mut self,
contract: &fuel_vm::prelude::ContractId,
start_key: &fuel_vm::prelude::Bytes32,
range: usize,
) -> Result<Option<()>, Self::DataError> {
tracing::debug!("contract_state_remove_range {contract:?} {start_key:?}");
let mut key = U256::from_big_endian(start_key.as_ref());
let mut key_buffer = Bytes32::zeroed();
let mut found_unset = false;
for idx in 0..range {
if idx != 0 {
key = key.checked_add(1.into()).ok_or(Error::KeyspaceOverflow)?;
}
key.to_big_endian(key_buffer.as_mut());
let option = self
.storage::<ContractsState>()
.take(&(contract, Bytes32::from_bytes_ref(&key_buffer)).into())?;
if option.is_none() {
found_unset = true;
}
}
Ok(if found_unset { None } else { Some(()) })
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-client/src/op/call/trace/mod.rs | forc-plugins/forc-client/src/op/call/trace/mod.rs | pub mod storage;
use crate::{cmd, op::call::Abi};
use ansiterm::Color;
use anyhow::{anyhow, Result};
use fuel_abi_types::{
abi::program::PanickingCall,
revert_info::{RevertInfo, RevertKind},
};
use fuel_core_types::tai64::Tai64;
use fuel_tx::Receipt;
use fuel_vm::{
fuel_asm::Word,
fuel_types::BlockHeight,
interpreter::{Interpreter, InterpreterParams, MemoryInstance},
prelude::*,
state::ProgramState,
};
use fuels::types::Token;
use fuels_core::{
codec::{ABIDecoder, DecoderConfig},
types::{param_types::ParamType, ContractId},
};
use std::{collections::HashMap, io::Read};
use storage::ShallowStorage;
/// A reader for VM memory that implements the necessary traits for ABI decoding
#[derive(Clone)]
pub struct MemoryReader<'a> {
mem: &'a MemoryInstance,
at: Word,
}
impl<'a> MemoryReader<'a> {
pub fn new(mem: &'a MemoryInstance, at: Word) -> Self {
Self { mem, at }
}
}
impl Read for MemoryReader<'_> {
fn read(&mut self, buf: &mut [u8]) -> std::io::Result<usize> {
let at = self.at;
self.at += buf.len() as Word;
buf.copy_from_slice(
self.mem
.read(at, buf.len())
.map_err(|_err| std::io::Error::other("Inaccessible memory"))?,
);
Ok(buf.len())
}
}
/// Interprets execution trace by stepping through VM execution until call receipts are encountered
pub async fn interpret_execution_trace(
provider: &fuels::accounts::provider::Provider,
mode: &cmd::call::ExecutionMode,
consensus_params: &ConsensusParameters,
script: &fuel_tx::Script,
receipts: &[Receipt],
storage_reads: Vec<fuel_core_types::services::executor::StorageReadReplayEvent>,
abis: &HashMap<ContractId, Abi>,
) -> Result<Vec<TraceEvent>> {
let mut tracer = CallRetTracer::new(abis);
let block_height: BlockHeight = (provider.latest_block_height().await?).into();
let gas_price = provider.latest_gas_price().await?;
let block = provider
.block_by_height(block_height)
.await?
.ok_or(anyhow!("Block not found"))?;
// Create shallow storage with empty initial storage reads
let storage = ShallowStorage {
block_height,
timestamp: Tai64::from_unix(
block
.header
.time
.ok_or(anyhow!("Block time not found"))?
.timestamp(),
),
consensus_parameters_version: block.header.consensus_parameters_version,
state_transition_version: block.header.state_transition_bytecode_version,
coinbase: Default::default(), // TODO: get from tx
storage: std::cell::RefCell::new(ShallowStorage::initial_storage(storage_reads)),
};
let script_tx = script
.clone()
.into_checked_basic(block_height, consensus_params)
.map_err(|err| anyhow!("Failed to check transaction: {err:?}"))?
.into_ready(
gas_price.gas_price,
consensus_params.gas_costs(),
consensus_params.fee_params(),
None,
)
.map_err(|err| anyhow!("Failed to check transaction: {err:?}"))?;
let mut vm = Interpreter::<_, _, Script>::with_storage(
MemoryInstance::new(),
storage.clone(),
InterpreterParams::new(gas_price.gas_price, consensus_params),
);
vm.set_single_stepping(true);
let mut t = *vm
.transact(script_tx)
.map_err(|e| anyhow!("Failed to transact in trace interpreter: {e:?}"))?
.state();
loop {
tracer.process_vm_state(&vm)?;
match t {
ProgramState::Return(_) | ProgramState::ReturnData(_) | ProgramState::Revert(_) => {
break
}
ProgramState::RunProgram(_) | ProgramState::VerifyPredicate(_) => {
t = vm
.resume()
.map_err(|e| anyhow!("Failed to resume VM in trace interpreter: {e:?}"))?;
}
}
}
if vm.receipts() != receipts {
match mode {
cmd::call::ExecutionMode::Live => return Err(anyhow!("Receipts mismatch")),
_ => forc_tracing::println_warning(
"Receipts mismatch; this is expected for non-live mode",
),
}
}
Ok(tracer.into_events())
}
#[allow(dead_code)]
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
pub enum TraceEvent {
Call {
/// Which receipt this call corresponds to.
index: usize,
/// Method being called.
method: String,
/// Arguments being passed to the method.
arguments: Option<Vec<String>>,
/// Contract being called
to: ContractId,
/// Amount being transferred
amount: u64,
/// Gas for the call
gas: u64,
},
Return {
index: usize,
/// Contract that returned
id: ContractId,
/// Return value (raw)
val: u64,
},
ReturnData {
index: usize,
/// Contract that returned data
id: ContractId,
/// Return data; decoded if ABI provided, otherwise hex encoded
data: String,
},
Panic {
index: usize,
/// Contract that panicked
id: ContractId,
/// Panic reason
reason: String,
/// Contract ID associated with the panic, if any
contract_id: Option<ContractId>,
},
Revert {
index: usize,
/// Contract that reverted
id: ContractId,
/// Revert value
ra: u64,
revert_info: Option<RevertInfoSummary>,
},
Log {
index: usize,
/// Contract that logged
id: ContractId,
/// Log values
ra: u64,
rb: u64,
rc: u64,
rd: u64,
},
LogData {
index: usize,
/// Contract that logged data
id: ContractId,
/// Decoded log data value
value: Option<String>,
/// Data length
len: u64,
},
Transfer {
index: usize,
/// Source contract
id: ContractId,
/// Destination (either contract or address)
to: String,
/// Amount transferred
amount: u64,
/// Asset ID
asset_id: String,
},
ScriptResult {
index: usize,
/// Script execution result
result: ScriptExecutionResult,
/// Gas used
gas_used: u64,
},
MessageOut {
index: usize,
/// Sender address
sender: String,
/// Recipient address
recipient: String,
/// Nonce
nonce: u64,
/// Digest
digest: String,
/// Amount
amount: u64,
/// Message data (hex encoded)
data: Option<String>,
},
Mint {
index: usize,
/// Contract that minted
contract_id: ContractId,
/// Sub asset ID
asset_id: String,
/// Amount minted
val: u64,
},
Burn {
index: usize,
/// Contract that burned
contract_id: ContractId,
/// Sub asset ID
asset_id: String,
/// Amount burned
val: u64,
},
}
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
pub struct PanicLocation {
pub function: String,
pub pkg: String,
pub file: String,
pub line: u64,
pub column: u64,
}
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
pub struct RevertInfoSummary {
pub revert_code: u64,
pub message: Option<String>,
pub value: Option<String>,
pub location: Option<PanicLocation>,
#[serde(skip_serializing_if = "Vec::is_empty")]
pub backtrace: Vec<PanickingCall>,
pub is_known_error: bool,
pub is_raw: bool,
}
impl From<RevertInfo> for RevertInfoSummary {
fn from(info: RevertInfo) -> Self {
match info.kind {
RevertKind::RawRevert => Self {
revert_code: info.revert_code,
message: None,
value: None,
location: None,
backtrace: vec![],
is_known_error: false,
is_raw: true,
},
RevertKind::KnownErrorSignal { err_msg } => Self {
revert_code: info.revert_code,
message: Some(err_msg),
value: None,
location: None,
backtrace: vec![],
is_known_error: true,
is_raw: false,
},
RevertKind::Panic {
err_msg,
err_val,
pos,
backtrace,
} => Self {
revert_code: info.revert_code,
message: err_msg,
value: err_val,
location: Some(PanicLocation {
function: pos.function,
pkg: pos.pkg,
file: pos.file,
line: pos.line,
column: pos.column,
}),
backtrace,
is_known_error: false,
is_raw: false,
},
}
}
}
pub fn first_revert_info(events: &[TraceEvent]) -> Option<(ContractId, RevertInfoSummary)> {
events.iter().find_map(|e| {
if let TraceEvent::Revert {
id,
revert_info: Some(info),
..
} = e
{
Some((*id, info.clone()))
} else {
None
}
})
}
fn decode_revert_info(
receipts: &[Receipt],
abis: &HashMap<ContractId, Abi>,
contract_id: ContractId,
revert_code: u64,
) -> Option<RevertInfoSummary> {
let program_abi = abis.get(&contract_id).map(|abi| &abi.program);
let info =
forc_util::tx_utils::revert_info_from_receipts(receipts, program_abi, Some(revert_code))?;
if info.revert_code != revert_code {
return None;
}
Some(RevertInfoSummary::from(info))
}
/// Format transaction trace events into a hierarchical trace visualization.
/// This function processes trace events sequentially and displays them with proper indentation
/// based on call depth, similar to the original format_transaction_trace function.
pub fn display_transaction_trace<W: std::io::Write>(
total_gas: u64,
trace_events: &[TraceEvent],
labels: &HashMap<ContractId, String>,
writer: &mut W,
) -> Result<()> {
let format_contract_with_label =
|contract_id: ContractId, labels: &HashMap<ContractId, String>| -> String {
if let Some(label) = labels.get(&contract_id) {
label.to_string()
} else {
format!("0x{contract_id}")
}
};
writeln!(writer, "Traces:")?;
writeln!(writer, " [Script]")?;
let mut depth = 0;
for event in trace_events {
let indent = if depth > 0 {
" │".repeat(depth)
} else {
"".to_string()
};
match event {
TraceEvent::Call {
to,
gas,
method,
arguments,
..
} => {
writeln!(
writer,
"{} ├─ [{}] {}{}{}({})",
indent,
gas,
Color::Green.paint(format_contract_with_label(*to, labels)),
Color::DarkGray.paint("::"),
method,
Color::DarkGray.paint(arguments.as_ref().unwrap_or(&vec![]).join(", "))
)?;
depth += 1;
}
TraceEvent::ReturnData { data, .. } => {
writeln!(
writer,
"{} └─ ← {}",
indent,
Color::BrightCyan.paint(data),
)?;
depth = depth.saturating_sub(1);
}
TraceEvent::Return { val, .. } => {
writeln!(writer, "{indent} └─ ← [Return] val: {val}")?;
depth = depth.saturating_sub(1);
}
TraceEvent::LogData { value, .. } => {
if let Some(log_value) = value {
writeln!(
writer,
"{} ├─ emit {}",
indent,
Color::BrightCyan.paint(log_value)
)?;
} else {
writeln!(writer, "{indent} ├─ emit ()")?;
}
}
TraceEvent::Revert { revert_info, .. } => {
writeln!(
writer,
"{} └─ ← {}",
indent,
Color::Red.paint("[Revert]")
)?;
depth = depth.saturating_sub(1);
if let Some(details) = revert_info {
write_revert_trace_details(writer, &indent, details)?;
}
}
TraceEvent::Panic { reason, .. } => {
writeln!(
writer,
"{} └─ ← {} {}",
indent,
Color::Red.paint("[Panic]"),
Color::Red.paint(reason)
)?;
depth = depth.saturating_sub(1);
}
TraceEvent::Transfer {
amount,
asset_id,
to,
..
} => {
writeln!(
writer,
"{indent} ├─ [Transfer] to:{to} asset_id:{asset_id} amount:{amount}"
)?;
}
TraceEvent::Mint { asset_id, val, .. } => {
writeln!(
writer,
"{indent} ├─ [Mint] asset_id:{asset_id} val:{val}"
)?;
}
TraceEvent::Burn { asset_id, val, .. } => {
writeln!(
writer,
"{indent} ├─ [Burn] asset_id:{asset_id} val:{val}"
)?;
}
TraceEvent::Log { rb, .. } => {
writeln!(writer, "{indent} ├─ [Log] rb: 0x{rb:x}")?;
}
TraceEvent::MessageOut {
amount,
recipient,
nonce,
digest,
data,
..
} => {
writeln!(
writer,
"{} ├─ [MessageOut] recipient:{} amount:{} nonce:{} digest:{} data:{}",
indent,
recipient,
amount,
nonce,
digest,
data.clone().unwrap_or("()".to_string())
)?;
}
TraceEvent::ScriptResult {
result, gas_used, ..
} => {
writeln!(
writer,
" [ScriptResult] result: {result:?}, gas_used: {gas_used}"
)?;
writeln!(writer)?;
match result {
ScriptExecutionResult::Success => writeln!(
writer,
"{}",
Color::Green.paint("Transaction successfully executed.")
)?,
_ => writeln!(writer, "{}", Color::Red.paint("Transaction failed."))?,
}
}
}
}
writeln!(writer, "Gas used: {total_gas}")?;
Ok(())
}
fn write_revert_trace_details<W: std::io::Write>(
writer: &mut W,
indent: &str,
details: &RevertInfoSummary,
) -> Result<()> {
let detail_prefix = format!("{indent} ");
let write_detail_line = |writer: &mut W, symbol: &str, text: String| -> Result<()> {
writeln!(
writer,
"{}{}",
detail_prefix,
Color::Red.paint(format!("{symbol} {text}"))
)
.map_err(Into::into)
};
let has_more_details = !details.is_raw;
write_detail_line(
writer,
if has_more_details { "├─" } else { "└─" },
format!("revert code: {:x}", details.revert_code),
)?;
if details.is_known_error {
if let Some(msg) = &details.message {
write_detail_line(writer, "└─", format!("error message: {msg}"))?;
}
} else if !details.is_raw {
if let Some(err_msg) = &details.message {
write_detail_line(writer, "├─", format!("panic message: {err_msg}"))?;
}
if let Some(err_val) = &details.value {
write_detail_line(writer, "├─", format!("panic value: {err_val}"))?;
}
if let Some(location) = &details.location {
let filtered_backtrace: Vec<_> = details
.backtrace
.iter()
.filter(|call| {
!call.pos.function.ends_with("::__entry") && call.pos.function != "__entry"
})
.collect();
let branch_symbol = if filtered_backtrace.is_empty() {
"└─"
} else {
"├─"
};
let location_prefix = if filtered_backtrace.is_empty() {
format!("{indent} ")
} else {
format!("{indent} {} ", Color::Red.paint("│"))
};
write_detail_line(
writer,
branch_symbol,
format!("panicked: in {}", location.function),
)?;
let loc_line = Color::Red.paint(format!(
" └─ at {}, {}:{}:{}",
location.pkg, location.file, location.line, location.column
));
writeln!(writer, "{}{}", location_prefix, loc_line)?;
if let Some((first, rest)) = filtered_backtrace.split_first() {
let line_prefix = format!("{indent} ");
write_backtrace_call(writer, &line_prefix, first, true)?;
for call in rest {
write_backtrace_call(writer, &line_prefix, call, false)?;
}
}
}
}
Ok(())
}
fn write_backtrace_call<W: std::io::Write>(
writer: &mut W,
indent_detail: &str,
call: &PanickingCall,
is_first: bool,
) -> Result<()> {
// Backtrace lines share the same indent; only the first includes the header.
let header_prefix = format!("{indent_detail}{}", Color::Red.paint("└─ backtrace: "));
if is_first {
writeln!(
writer,
"{header_prefix}{}",
Color::Red.paint(format!("called in {}", call.pos.function))
)?;
writeln!(
writer,
"{indent_detail} {}",
Color::Red.paint(format!(
"└─ at {}, {}:{}:{}",
call.pos.pkg, call.pos.file, call.pos.line, call.pos.column
))
)?;
} else {
writeln!(
writer,
"{indent_detail} {}",
Color::Red.paint(format!("called in {}", call.pos.function))
)?;
writeln!(
writer,
"{indent_detail} {}",
Color::Red.paint(format!(
"└─ at {}, {}:{}:{}",
call.pos.pkg, call.pos.file, call.pos.line, call.pos.column
))
)?;
};
Ok(())
}
pub type Vm =
Interpreter<MemoryInstance, ShallowStorage, Script, fuel_vm::interpreter::NotSupportedEcal>;
pub struct CallRetTracer<'a> {
abis: &'a HashMap<ContractId, Abi>,
return_type_callstack: Vec<StackFrame>,
events: Vec<TraceEvent>,
}
enum StackFrame {
KnownAbi(ParamType),
UnknownAbi,
}
impl<'a> CallRetTracer<'a> {
pub fn new(abis: &'a HashMap<ContractId, Abi>) -> Self {
Self {
abis,
return_type_callstack: Vec::new(),
events: Vec::new(),
}
}
pub fn process_vm_state(&mut self, vm: &Vm) -> Result<()> {
let start_index = self.events.len();
let decoder = ABIDecoder::new(DecoderConfig::default());
for (i, receipt) in vm.receipts().iter().enumerate().skip(start_index) {
let index = i + start_index;
let event = match receipt {
Receipt::Call {
to,
param1,
param2,
amount,
gas,
..
} => {
let method = match decoder
.decode(&ParamType::String, MemoryReader::new(vm.memory(), *param1))
{
Ok(Token::String(method)) => Some(method),
_ => None,
};
let arguments = if let Some((parameters, returns)) = method
.as_ref()
.and_then(|m| get_function_signature(self.abis.get(to)?, m.as_str()))
{
self.return_type_callstack
.push(StackFrame::KnownAbi(returns));
let args_reader = MemoryReader::new(vm.memory(), *param2);
decoder
.decode_multiple_as_debug_str(parameters.as_slice(), args_reader)
.ok()
} else {
self.return_type_callstack.push(StackFrame::UnknownAbi);
None
};
TraceEvent::Call {
index,
method: method.unwrap_or("unknown".to_string()),
arguments,
to: *to,
amount: *amount,
gas: *gas,
}
}
Receipt::Return { id, val, .. } => {
if !self.return_type_callstack.is_empty() {
let _ = self.return_type_callstack.pop().unwrap();
}
TraceEvent::Return {
index,
id: *id,
val: *val,
}
}
Receipt::ReturnData { id, ptr, data, .. } => {
let return_value = match self.return_type_callstack.pop() {
Some(StackFrame::KnownAbi(return_type)) => {
let reader = MemoryReader::new(vm.memory(), *ptr);
decoder
.decode_as_debug_str(&return_type, reader)
.unwrap_or_else(|_| match data {
Some(data) if !data.is_empty() => {
format!("0x{}", hex::encode(data))
}
_ => "()".to_string(),
})
}
Some(StackFrame::UnknownAbi) | None => match data {
// hex encode the data if available
Some(data) if !data.is_empty() => format!("0x{}", hex::encode(data)),
_ => "()".to_string(),
},
};
TraceEvent::ReturnData {
index,
data: return_value,
id: *id,
}
}
Receipt::Panic {
id,
reason,
contract_id,
..
} => TraceEvent::Panic {
index,
id: *id,
reason: format!("{:?}", reason.reason()),
contract_id: *contract_id,
},
Receipt::Revert { id, ra, .. } => TraceEvent::Revert {
index,
id: *id,
ra: *ra,
revert_info: decode_revert_info(vm.receipts(), self.abis, *id, *ra),
},
Receipt::Log {
id, ra, rb, rc, rd, ..
} => TraceEvent::Log {
index,
id: *id,
ra: *ra,
rb: *rb,
rc: *rc,
rd: *rd,
},
Receipt::LogData {
id, rb, len, data, ..
} => {
let data_str = match data {
Some(data) => {
let hex_str = format!("0x{}", hex::encode(data));
match self.abis.get(id) {
Some(abi) => {
let program_abi = sway_core::asm_generation::ProgramABI::Fuel(
abi.program.clone(),
);
forc_util::tx_utils::decode_log_data(
&rb.to_string(),
data,
&program_abi,
)
.ok()
.map(|decoded| decoded.value)
}
None => Some(hex_str),
}
}
None => None,
};
TraceEvent::LogData {
index,
value: data_str,
id: *id,
len: *len,
}
}
Receipt::Transfer {
id,
to,
amount,
asset_id,
..
} => TraceEvent::Transfer {
index,
id: *id,
to: format!("0x{to}"),
amount: *amount,
asset_id: format!("0x{asset_id}"),
},
Receipt::TransferOut {
id,
to,
amount,
asset_id,
..
} => TraceEvent::Transfer {
index,
id: *id,
to: format!("0x{to}"),
amount: *amount,
asset_id: format!("0x{asset_id}"),
},
Receipt::ScriptResult { result, gas_used } => TraceEvent::ScriptResult {
index,
result: *result,
gas_used: *gas_used,
},
Receipt::MessageOut {
sender,
recipient,
amount,
data,
..
} => {
let data_hex = data.as_ref().map(|d| format!("0x{}", hex::encode(d)));
TraceEvent::MessageOut {
index,
sender: format!("0x{sender}"),
recipient: format!("0x{recipient}"),
amount: *amount,
data: data_hex,
nonce: 0,
digest:
"0x0000000000000000000000000000000000000000000000000000000000000000"
.to_string(),
}
}
Receipt::Mint {
contract_id,
sub_id,
val,
..
} => TraceEvent::Mint {
index,
contract_id: *contract_id,
asset_id: format!("0x{sub_id}"),
val: *val,
},
Receipt::Burn {
contract_id,
sub_id,
val,
..
} => TraceEvent::Burn {
index,
contract_id: *contract_id,
asset_id: format!("0x{sub_id}"),
val: *val,
},
};
self.events.push(event);
}
Ok(())
}
pub fn into_events(self) -> Vec<TraceEvent> {
self.events
}
}
/// Extract function signature (parameters and return type) from ABI
fn get_function_signature(abi: &Abi, method: &str) -> Option<(Vec<ParamType>, ParamType)> {
let func = abi.unified.functions.iter().find(|f| f.name == *method)?;
let mut parameters = Vec::new();
for param in &func.inputs {
parameters.push(ParamType::try_from_type_application(param, &abi.type_lookup).ok()?);
}
let returns = ParamType::try_from_type_application(&func.output, &abi.type_lookup).ok()?;
Some((parameters, returns))
}
#[cfg(test)]
mod tests {
use super::*;
use fuel_tx::ScriptExecutionResult;
use fuels_core::types::ContractId;
use std::str::FromStr;
// Compare the results, ignoring whitespace differences and colors
fn normalize(s: &str) -> String {
// Remove ANSI color codes
let re = regex::Regex::new(r"\x1b\[[0-9;]*m").unwrap();
let s = re.replace_all(s, "");
s.split_whitespace().collect::<Vec<_>>().join(" ")
}
#[test]
fn writes_revert_details_without_backtrace() {
let summary = RevertInfoSummary {
revert_code: 0xbeef,
message: Some("boom".to_string()),
value: Some("Value".to_string()),
location: Some(PanicLocation {
function: "ctx::fn".to_string(),
pkg: "pkg".to_string(),
file: "file.sw".to_string(),
line: 1,
column: 1,
}),
backtrace: vec![],
is_known_error: false,
is_raw: false,
};
let mut buf = Vec::new();
write_revert_trace_details(&mut buf, "", &summary).unwrap();
let out = normalize(&String::from_utf8(buf).unwrap());
let expected = normalize(
r#"
├─ revert code: beef
├─ panic message: boom
├─ panic value: Value
└─ panicked: in ctx::fn
└─ at pkg, file.sw:1:1
"#,
);
assert_eq!(out, expected);
}
#[test]
fn writes_revert_details_with_backtrace() {
let summary = RevertInfoSummary {
revert_code: 0xdead,
message: None,
value: None,
location: Some(PanicLocation {
function: "ctx::fn".to_string(),
pkg: "pkg".to_string(),
file: "file.sw".to_string(),
line: 1,
column: 1,
}),
backtrace: vec![
PanickingCall {
pos: fuel_abi_types::abi::program::ErrorPosition {
pkg: "pkg".to_string(),
file: "file.sw".to_string(),
line: 2,
column: 3,
function: "caller_fn".to_string(),
},
function: "caller_fn".to_string(),
},
PanickingCall {
pos: fuel_abi_types::abi::program::ErrorPosition {
pkg: "pkg".to_string(),
file: "file.sw".to_string(),
line: 4,
column: 5,
function: "root_fn".to_string(),
},
function: "root_fn".to_string(),
},
],
is_known_error: false,
is_raw: false,
};
let mut buf = Vec::new();
write_revert_trace_details(&mut buf, "│ ", &summary).unwrap();
let out = normalize(&String::from_utf8(buf).unwrap());
let expected = normalize(
r#"
│ ├─ revert code: dead
│ ├─ panicked: in ctx::fn
│ │ └─ at pkg, file.sw:1:1
│ └─ backtrace: called in caller_fn
│ └─ at pkg, file.sw:2:3
│ called in root_fn
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | true |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-client/src/op/run/encode.rs | forc-plugins/forc-client/src/op/run/encode.rs | use crate::util::encode::{Token, Type};
use fuel_abi_types::abi::full_program::FullProgramABI;
use fuels_core::codec::{ABIEncoder, EncoderConfig};
#[derive(Debug, PartialEq, Eq)]
pub(crate) struct ScriptCallHandler {
main_arg_types: Vec<Type>,
}
impl ScriptCallHandler {
const MAIN_KEYWORD: &'static str = "main";
/// Generate a new call handler for calling script main function from the json abi.
///
/// Provide json abi is used for determining the argument types, this is required as the data
/// encoding is requiring the type of the data.
pub(crate) fn from_json_abi_str(json_abi_str: &str) -> anyhow::Result<Self> {
let full_abi = FullProgramABI::from_json_abi(json_abi_str)?;
// Note: using .expect() here is safe since a script without a main function is a compile
// error and the fact that we have the json abi of the built script suggests that this is a
// valid script.
let main_function = full_abi
.functions
.iter()
.find(|abi_func| abi_func.name() == Self::MAIN_KEYWORD)
.expect("every valid script needs to have a main function");
let main_arg_types = main_function
.inputs()
.iter()
.map(Type::try_from)
.collect::<anyhow::Result<Vec<_>>>()?;
Ok(Self { main_arg_types })
}
/// Encode the provided values with script's main argument types.
///
/// Returns an error if the provided value count does not match the number of arguments.
pub(crate) fn encode_arguments(&self, values: &[&str]) -> anyhow::Result<Vec<u8>> {
let main_arg_types = &self.main_arg_types;
let expected_arg_count = main_arg_types.len();
let provided_arg_count = values.len();
if expected_arg_count != provided_arg_count {
anyhow::bail!(
"main function takes {expected_arg_count} arguments, {provided_arg_count} provided"
);
}
let tokens = main_arg_types
.iter()
.zip(values.iter())
.map(|(ty, val)| Token::from_type_and_value(ty, val).map(|token| token.0))
.collect::<anyhow::Result<Vec<_>>>()?;
let abi_encoder = ABIEncoder::new(EncoderConfig::default());
Ok(abi_encoder.encode(tokens.as_slice())?)
}
}
#[cfg(test)]
mod tests {
use super::{ScriptCallHandler, Type};
const TEST_JSON_ABI: &str = r#"{"programType": "contract","specVersion": "1.1","encodingVersion": "1","metadataTypes":[],
"concreteTypes":[{"concreteTypeId":"2e38e77b22c314a449e91fafed92a43826ac6aa403ae6a8acb6cf58239fbaf5d",
"type":"()"},{"concreteTypeId":"b760f44fa5965c2474a3b471467a22c43185152129295af588b022ae50b50903","type":"bool"},
{"concreteTypeId":"c89951a24c6ca28c13fd1cfdc646b2b656d69e61a92b91023be7eb58eb914b6b","type":"u8"}],
"functions":[{"inputs":[{"name":"test_u8","concreteTypeId":"c89951a24c6ca28c13fd1cfdc646b2b656d69e61a92b91023be7eb58eb914b6b"},
{"name":"test_bool","concreteTypeId":"b760f44fa5965c2474a3b471467a22c43185152129295af588b022ae50b50903"}],"name":"main",
"output":"2e38e77b22c314a449e91fafed92a43826ac6aa403ae6a8acb6cf58239fbaf5d"}],"loggedTypes":[],
"messagesTypes":[],"configurables":[]}"#;
#[test]
fn test_script_call_handler_generation_success() {
let generated_call_handler = ScriptCallHandler::from_json_abi_str(TEST_JSON_ABI).unwrap();
let expected_call_handler = ScriptCallHandler {
main_arg_types: vec![Type::U8, Type::Bool],
};
assert_eq!(generated_call_handler, expected_call_handler);
}
#[test]
#[should_panic]
fn test_script_call_handler_generation_fail_missing_main() {
let test_json_abi =
r#"{"types":[],"functions":[],"loggedTypes":[],"messagesTypes":[],"configurables":[]}"#;
ScriptCallHandler::from_json_abi_str(test_json_abi).unwrap();
}
#[test]
fn test_main_encoding_success() {
let call_handler = ScriptCallHandler::from_json_abi_str(TEST_JSON_ABI).unwrap();
let values = ["2", "true"];
let encoded_bytes = call_handler.encode_arguments(&values).unwrap();
let expected_bytes = vec![2u8, 1u8];
assert_eq!(encoded_bytes, expected_bytes);
}
#[test]
#[should_panic]
fn test_main_encoding_fail_arg_type_mismatch() {
let call_handler = ScriptCallHandler::from_json_abi_str(TEST_JSON_ABI).unwrap();
// The abi describes the following main function:
// - fn main(test_u8: u8, test_bool: bool)
// Providing a bool to u8 field should return an error.
let values = ["true", "2"];
call_handler.encode_arguments(&values).unwrap();
}
#[test]
#[should_panic(expected = "main function takes 2 arguments, 1 provided")]
fn test_main_encoding_fail_arg_count_mismatch() {
let call_handler = ScriptCallHandler::from_json_abi_str(TEST_JSON_ABI).unwrap();
// The abi describes the following main function:
// - fn main(test_u8: u8, test_bool: bool)
// Providing only 1 value should return an error as function requires 2 args.
let values = ["true"];
call_handler.encode_arguments(&values).unwrap();
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-client/src/op/run/mod.rs | forc-plugins/forc-client/src/op/run/mod.rs | mod encode;
use crate::{
cmd,
constants::TX_SUBMIT_TIMEOUT_MS,
util::{
pkg::built_pkgs,
tx::{prompt_forc_wallet_password, select_account, SignerSelectionMode},
},
};
use anyhow::{anyhow, bail, Context, Result};
use forc::cli::shared::IrCliOpt;
use forc_pkg::{self as pkg, fuel_core_not_running, DumpOpts, PackageManifestFile};
use forc_tracing::println_warning;
use forc_util::tx_utils::format_log_receipts;
use fuel_abi_types::abi::program::ProgramABI;
use fuel_core_client::client::FuelClient;
use fuel_tx::{ContractId, Transaction};
use fuels::{
programs::calls::{traits::TransactionTuner, ScriptCall},
types::{
transaction::TxPolicies,
transaction_builders::{BuildableTransaction, VariableOutputPolicy},
},
};
use fuels_accounts::{provider::Provider, Account, ViewOnlyAccount};
use pkg::BuiltPackage;
use std::time::Duration;
use std::{path::PathBuf, str::FromStr};
use sway_core::BuildTarget;
use sway_core::{language::parsed::TreeType, IrCli};
use tokio::time::timeout;
use tracing::info;
use self::encode::ScriptCallHandler;
pub struct RanScript {
pub receipts: Vec<fuel_tx::Receipt>,
}
/// Builds and runs script(s). If given path corresponds to a workspace, all runnable members will
/// be built and deployed.
///
/// Upon success, returns the receipts of each script in the order they are executed.
///
/// When running a single script, only that script's receipts are returned.
pub async fn run(command: cmd::Run) -> Result<Vec<RanScript>> {
let mut command = command;
if command.unsigned {
println_warning("--unsigned flag is deprecated, please prefer using --default-signer. Assuming `--default-signer` is passed. This means your transaction will be signed by an account that is funded by fuel-core by default for testing purposes.");
command.default_signer = true;
}
let mut receipts = Vec::new();
let curr_dir = if let Some(path) = &command.pkg.path {
PathBuf::from(path)
} else {
std::env::current_dir().map_err(|e| anyhow!("{:?}", e))?
};
let build_opts = build_opts_from_cmd(&command);
let built_pkgs_with_manifest = built_pkgs(&curr_dir, &build_opts)?;
let wallet_mode = if command.default_signer || command.signing_key.is_some() {
SignerSelectionMode::Manual
} else {
let password = prompt_forc_wallet_password()?;
SignerSelectionMode::ForcWallet(password)
};
for built in built_pkgs_with_manifest {
if built
.descriptor
.manifest_file
.check_program_type(&[TreeType::Script])
.is_ok()
{
let pkg_receipts = run_pkg(
&command,
&built.descriptor.manifest_file,
&built,
&wallet_mode,
)
.await?;
receipts.push(pkg_receipts);
}
}
Ok(receipts)
}
fn tx_policies_from_cmd(cmd: &cmd::Run) -> TxPolicies {
let mut tx_policies = TxPolicies::default();
if let Some(max_fee) = cmd.gas.max_fee {
tx_policies = tx_policies.with_max_fee(max_fee);
}
if let Some(script_gas_limit) = cmd.gas.script_gas_limit {
tx_policies = tx_policies.with_script_gas_limit(script_gas_limit);
}
tx_policies
}
pub async fn run_pkg(
command: &cmd::Run,
manifest: &PackageManifestFile,
compiled: &BuiltPackage,
signer_mode: &SignerSelectionMode,
) -> Result<RanScript> {
let node_url = command.node.get_node_url(&manifest.network)?;
let provider = Provider::connect(node_url.clone()).await?;
let consensus_params = provider.consensus_parameters().await?;
let tx_count = 1;
let account = select_account(
signer_mode,
command.default_signer || command.unsigned,
command.signing_key,
&provider,
tx_count,
)
.await?;
let script_data = match (&command.data, &command.args) {
(None, Some(args)) => {
let minify_json_abi = true;
let package_json_abi = compiled
.json_abi_string(minify_json_abi)?
.ok_or_else(|| anyhow::anyhow!("Missing json abi string"))?;
let main_arg_handler = ScriptCallHandler::from_json_abi_str(&package_json_abi)?;
let args = args.iter().map(|arg| arg.as_str()).collect::<Vec<_>>();
main_arg_handler.encode_arguments(args.as_slice())?
}
(Some(_), Some(_)) => {
bail!("Both --args and --data provided, must choose one.")
}
_ => {
let input_data = command.data.as_deref().unwrap_or("");
let data = input_data.strip_prefix("0x").unwrap_or(input_data);
hex::decode(data).expect("Invalid hex")
}
};
let external_contracts = command
.contract
.as_ref()
.into_iter()
.flat_map(|contracts| contracts.iter())
.map(|contract| {
ContractId::from_str(contract)
.map_err(|e| anyhow!("Failed to parse contract id: {}", e))
})
.collect::<Result<Vec<ContractId>>>()?;
let script_binary = compiled.bytecode.bytes.clone();
let call = ScriptCall {
script_binary,
encoded_args: Ok(script_data),
inputs: vec![],
outputs: vec![],
external_contracts,
};
let tx_policies = tx_policies_from_cmd(command);
let mut tb = call.transaction_builder(
tx_policies,
VariableOutputPolicy::EstimateMinimum,
&consensus_params,
call.inputs.clone(),
&account,
)?;
account.add_witnesses(&mut tb)?;
account.adjust_for_fee(&mut tb, 0).await?;
let tx = tb.build(provider).await?;
if command.dry_run {
info!("{:?}", tx);
Ok(RanScript { receipts: vec![] })
} else {
let program_abi = match &compiled.program_abi {
sway_core::asm_generation::ProgramABI::Fuel(abi) => Some(abi),
_ => None,
};
let receipts = try_send_tx(
node_url.as_str(),
&tx.into(),
command.pretty_print,
command.simulate,
command.debug,
program_abi,
)
.await?;
Ok(RanScript { receipts })
}
}
async fn try_send_tx(
node_url: &str,
tx: &Transaction,
pretty_print: bool,
simulate: bool,
debug: bool,
abi: Option<&ProgramABI>,
) -> Result<Vec<fuel_tx::Receipt>> {
let client = FuelClient::new(node_url)?;
match client.health().await {
Ok(_) => timeout(
Duration::from_millis(TX_SUBMIT_TIMEOUT_MS),
send_tx(&client, tx, pretty_print, simulate, debug, abi),
)
.await
.with_context(|| format!("timeout waiting for {tx:?} to be included in a block"))?,
Err(_) => Err(fuel_core_not_running(node_url)),
}
}
async fn send_tx(
client: &FuelClient,
tx: &Transaction,
pretty_print: bool,
simulate: bool,
debug: bool,
abi: Option<&ProgramABI>,
) -> Result<Vec<fuel_tx::Receipt>> {
let outputs = {
if !simulate {
let status = client.submit_and_await_commit(tx).await?;
match status {
fuel_core_client::client::types::TransactionStatus::Success {
receipts, ..
} => receipts,
fuel_core_client::client::types::TransactionStatus::Failure {
receipts, ..
} => receipts,
_ => vec![],
}
} else {
let txs = vec![tx.clone()];
let receipts = client.dry_run(txs.as_slice()).await?;
let receipts = receipts
.first()
.map(|tx| &tx.result)
.map(|res| res.receipts());
match receipts {
Some(receipts) => receipts.to_vec(),
None => vec![],
}
}
};
if !outputs.is_empty() {
info!("{}", format_log_receipts(&outputs, pretty_print)?);
}
if debug {
start_debug_session(client, tx, abi).await?;
}
Ok(outputs)
}
/// Starts an interactive debugging session with the given transaction
async fn start_debug_session(
fuel_client: &FuelClient,
tx: &fuel_tx::Transaction,
program_abi: Option<&ProgramABI>,
) -> Result<()> {
// Create debugger instance from the existing fuel client
let mut debugger = forc_debug::debugger::Debugger::from_client(fuel_client.clone())
.await
.map_err(|e| anyhow!("Failed to create debugger: {e}"))?;
// Create temporary files for transaction and ABI (auto-cleaned when dropped)
let mut tx_file = tempfile::Builder::new()
.suffix(".json")
.tempfile()
.map_err(|e| anyhow!("Failed to create temp transaction file: {e}"))?;
serde_json::to_writer_pretty(&mut tx_file, tx)
.map_err(|e| anyhow!("Failed to write transaction to temp file: {e}"))?;
let mut abi_file = tempfile::Builder::new()
.suffix(".json")
.tempfile()
.map_err(|e| anyhow!("Failed to create temp ABI file: {e}"))?;
let tx_cmd = if let Some(abi) = program_abi {
serde_json::to_writer_pretty(&mut abi_file, &abi)
.map_err(|e| anyhow!("Failed to write ABI to temp file: {e}"))?;
// Prepare the start_tx command string for the CLI
format!(
"start_tx {} {}",
tx_file.path().to_string_lossy(),
abi_file.path().to_string_lossy()
)
} else {
// Prepare the start_tx command string for the CLI
format!("start_tx {}", tx_file.path().to_string_lossy())
};
// Start the interactive CLI session with the prepared command
let mut cli = forc_debug::cli::Cli::new()
.map_err(|e| anyhow!("Failed to create debug CLI interface: {e}"))?;
cli.run(&mut debugger, Some(tx_cmd))
.await
.map_err(|e| anyhow!("Interactive debugging session failed: {e}"))?;
Ok(())
}
fn build_opts_from_cmd(cmd: &cmd::Run) -> pkg::BuildOpts {
pkg::BuildOpts {
pkg: pkg::PkgOpts {
path: cmd.pkg.path.clone(),
offline: cmd.pkg.offline,
terse: cmd.pkg.terse,
locked: cmd.pkg.locked,
output_directory: cmd.pkg.output_directory.clone(),
ipfs_node: cmd.pkg.ipfs_node.clone().unwrap_or_default(),
},
print: pkg::PrintOpts {
ast: cmd.print.ast,
dca_graph: cmd.print.dca_graph.clone(),
dca_graph_url_format: cmd.print.dca_graph_url_format.clone(),
asm: cmd.print.asm(),
bytecode: cmd.print.bytecode,
bytecode_spans: false,
ir: cmd.print.ir(),
reverse_order: cmd.print.reverse_order,
},
verify_ir: cmd
.verify_ir
.as_ref()
.map_or(IrCli::default(), |opts| IrCliOpt::from(opts).0),
minify: pkg::MinifyOpts {
json_abi: cmd.minify.json_abi,
json_storage_slots: cmd.minify.json_storage_slots,
},
dump: DumpOpts::default(),
build_target: BuildTarget::default(),
build_profile: cmd.build_profile.build_profile.clone(),
release: cmd.build_profile.release,
error_on_warnings: cmd.build_profile.error_on_warnings,
time_phases: cmd.print.time_phases,
profile: cmd.print.profile,
metrics_outfile: cmd.print.metrics_outfile.clone(),
binary_outfile: cmd.build_output.bin_file.clone(),
debug_outfile: cmd.build_output.debug_file.clone(),
hex_outfile: cmd.build_output.hex_file.clone(),
tests: false,
member_filter: pkg::MemberFilter::only_scripts(),
experimental: cmd.experimental.experimental.clone(),
no_experimental: cmd.experimental.no_experimental.clone(),
no_output: false,
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-plugins/forc-client/tests/deploy.rs | forc-plugins/forc-client/tests/deploy.rs | use forc::cli::shared::Pkg;
use forc_client::{
cmd,
op::{deploy, DeployedContract, DeployedExecutable, DeployedPackage},
util::{account::ForcClientAccount, tx::update_proxy_contract_target},
NodeTarget,
};
use forc_pkg::manifest::Proxy;
use fuel_crypto::SecretKey;
use fuel_tx::{ContractId, Salt};
use fuels::{
macros::abigen,
types::{transaction::TxPolicies, AsciiString, Bits256, SizedAsciiString},
};
use fuels_accounts::{
provider::Provider, signers::private_key::PrivateKeySigner, wallet::Wallet, Account,
ViewOnlyAccount,
};
use portpicker::Port;
use rand::thread_rng;
use rexpect::spawn;
use std::{
fs,
path::{Path, PathBuf},
process::{Child, Command},
str::FromStr,
};
use tempfile::tempdir;
use toml_edit::{value, DocumentMut, InlineTable, Item, Table, Value};
fn get_workspace_root() -> PathBuf {
PathBuf::from(env!("CARGO_MANIFEST_DIR"))
.join("../")
.join("../")
.canonicalize()
.unwrap()
}
fn test_data_path() -> PathBuf {
PathBuf::from(env!("CARGO_MANIFEST_DIR"))
.join("test")
.join("data")
.canonicalize()
.unwrap()
}
fn run_node() -> (Child, Port) {
let port = portpicker::pick_unused_port().expect("No ports free");
let child = Command::new("fuel-core")
.arg("run")
.arg("--debug")
.arg("--db-type")
.arg("in-memory")
.arg("--port")
.arg(port.to_string())
.spawn()
.expect("Failed to start fuel-core");
(child, port)
}
/// Copy a directory recursively from `source` to `dest`.
fn copy_dir(source: &Path, dest: &Path) -> anyhow::Result<()> {
fs::create_dir_all(dest)?;
for e in fs::read_dir(source)? {
let entry = e?;
let file_type = entry.file_type()?;
if file_type.is_dir() {
copy_dir(&entry.path(), &dest.join(entry.file_name()))?;
} else {
fs::copy(entry.path(), dest.join(entry.file_name()))?;
}
}
Ok(())
}
/// Tries to get an `DeployedContract` out of the given `DeployedPackage`.
/// Panics otherwise.
fn expect_deployed_contract(deployed_package: DeployedPackage) -> DeployedContract {
if let DeployedPackage::Contract(contract) = deployed_package {
contract
} else {
println!("{deployed_package:?}");
panic!("expected deployed package to be a contract")
}
}
/// Tries to get a script (`DeployedExecutable`) out of given deployed package.
/// Panics otherwise.
fn expect_deployed_script(deployed_package: DeployedPackage) -> DeployedExecutable {
if let DeployedPackage::Script(script) = deployed_package {
script
} else {
panic!("expected deployed package to be a script")
}
}
/// Tries to get a predicate (`DeployedExecutable`) out of given deployed package.
/// Panics otherwise.
fn expect_deployed_predicate(deployed_package: DeployedPackage) -> DeployedExecutable {
if let DeployedPackage::Predicate(predicate) = deployed_package {
predicate
} else {
panic!("expected deployed package to be a predicate")
}
}
fn patch_manifest_file_with_path_std(manifest_dir: &Path) -> anyhow::Result<()> {
let toml_path = manifest_dir.join(sway_utils::constants::MANIFEST_FILE_NAME);
let toml_content = fs::read_to_string(&toml_path).unwrap();
let mut doc = toml_content.parse::<DocumentMut>().unwrap();
let new_std_path = get_workspace_root().join("sway-lib-std");
let mut std_dependency = InlineTable::new();
std_dependency.insert("path", Value::from(new_std_path.display().to_string()));
doc["dependencies"]["std"] = Item::Value(Value::InlineTable(std_dependency));
fs::write(&toml_path, doc.to_string()).unwrap();
Ok(())
}
fn patch_manifest_file_with_proxy_table(manifest_dir: &Path, proxy: Proxy) -> anyhow::Result<()> {
let toml_path = manifest_dir.join(sway_utils::constants::MANIFEST_FILE_NAME);
let toml_content = fs::read_to_string(&toml_path)?;
let mut doc = toml_content.parse::<DocumentMut>()?;
let proxy_table = doc.entry("proxy").or_insert(Item::Table(Table::new()));
let proxy_table = proxy_table.as_table_mut().unwrap();
proxy_table.insert("enabled", value(proxy.enabled));
if let Some(address) = proxy.address {
proxy_table.insert("address", value(address));
} else {
proxy_table.remove("address");
}
fs::write(&toml_path, doc.to_string())?;
Ok(())
}
fn update_main_sw(tmp_dir: &Path) -> anyhow::Result<()> {
let main_sw_path = tmp_dir.join("src").join("main.sw");
let content = fs::read_to_string(&main_sw_path)?;
let updated_content = content.replace("true", "false");
fs::write(main_sw_path, updated_content)?;
Ok(())
}
async fn assert_big_contract_calls(wallet: Wallet, contract_id: ContractId) {
abigen!(Contract(
name = "BigContract",
abi = "forc-plugins/forc-client/test/data/big_contract/big_contract-abi.json"
));
let instance = BigContract::new(contract_id, wallet);
let result = instance.methods().large_blob().call().await.unwrap().value;
assert!(result);
let result = instance
.methods()
.enum_input_output(Location::Mars)
.call()
.await
.unwrap()
.value;
assert_eq!(result, Location::Mars);
// Test enum with "tuple like struct" with simple value.
let result = instance
.methods()
.enum_input_output(Location::Earth(u64::MAX))
.call()
.await
.unwrap()
.value;
assert_eq!(result, Location::Earth(u64::MAX));
// Test enum with "tuple like struct" with enum value.
let result = instance
.methods()
.enum_input_output(Location::SimpleJupiter(Color::Red))
.call()
.await
.unwrap()
.value;
assert_eq!(result, Location::SimpleJupiter(Color::Red));
// Test enum with "tuple like struct" with enum value.
let result = instance
.methods()
.enum_input_output(Location::SimpleJupiter(Color::Blue(u64::MAX)))
.call()
.await
.unwrap()
.value;
assert_eq!(result, Location::SimpleJupiter(Color::Blue(u64::MAX)));
// Test enum with "tuple like struct" with enum array value.
let result = instance
.methods()
.enum_input_output(Location::Jupiter([Color::Red, Color::Blue(u64::MAX)]))
.call()
.await
.unwrap()
.value;
assert_eq!(
result,
Location::Jupiter([Color::Red, Color::Blue(u64::MAX)])
);
// Test enum with "tuple like struct" with struct array value.
let result = instance
.methods()
.enum_input_output(Location::SimplePluto(SimpleStruct {
a: true,
b: u64::MAX,
}))
.call()
.await
.unwrap()
.value;
assert_eq!(
result,
Location::SimplePluto(SimpleStruct {
a: true,
b: u64::MAX,
})
);
let input = Person {
name: AsciiString::new("Alice".into()).unwrap(),
age: 42,
alive: true,
location: Location::Earth(1),
some_tuple: (false, 42),
some_array: [4, 2],
some_b_256: Bits256::zeroed(),
};
let result = instance
.methods()
.struct_input_output(input.clone())
.call()
.await
.unwrap()
.value;
assert_eq!(result, input);
let _ = instance
.methods()
.push_storage_u16(42)
.call()
.await
.unwrap();
let result = instance
.methods()
.get_storage_u16(0)
.call()
.await
.unwrap()
.value;
assert_eq!(result, 42);
let _ = instance
.methods()
.push_storage_simple(SimpleStruct {
a: true,
b: u64::MAX,
})
.call()
.await
.unwrap();
let result = instance
.methods()
.get_storage_simple(0)
.call()
.await
.unwrap()
.value;
assert_eq!(
result,
SimpleStruct {
a: true,
b: u64::MAX,
}
);
let _ = instance
.methods()
.push_storage_location(Location::Mars)
.call()
.await
.unwrap();
let result = instance
.methods()
.get_storage_location(0)
.call()
.await
.unwrap()
.value;
assert_eq!(result, Location::Mars);
let _ = instance
.methods()
.push_storage_location(Location::Earth(u64::MAX))
.call()
.await
.unwrap();
let result = instance
.methods()
.get_storage_location(1)
.call()
.await
.unwrap()
.value;
assert_eq!(result, Location::Earth(u64::MAX));
let _ = instance
.methods()
.push_storage_location(Location::Jupiter([Color::Red, Color::Blue(u64::MAX)]))
.call()
.await
.unwrap();
let result = instance
.methods()
.get_storage_location(2)
.call()
.await
.unwrap()
.value;
assert_eq!(
result,
Location::Jupiter([Color::Red, Color::Blue(u64::MAX)])
);
let result = instance
.methods()
.assert_configurables()
.call()
.await
.unwrap()
.value;
assert!(result);
}
#[tokio::test]
async fn test_simple_deploy() {
let (mut node, port) = run_node();
let tmp_dir = tempdir().unwrap();
let project_dir = test_data_path().join("standalone_contract");
copy_dir(&project_dir, tmp_dir.path()).unwrap();
patch_manifest_file_with_path_std(tmp_dir.path()).unwrap();
let pkg = Pkg {
path: Some(tmp_dir.path().display().to_string()),
..Default::default()
};
let node_url = format!("http://127.0.0.1:{port}/v1/graphql");
let target = NodeTarget {
node_url: Some(node_url),
target: None,
testnet: false,
mainnet: false,
devnet: false,
};
let cmd = cmd::Deploy {
pkg,
salt: Some(vec![format!("{}", Salt::default())]),
node: target,
default_signer: true,
..Default::default()
};
let contract_ids = deploy(cmd).await.unwrap();
node.kill().unwrap();
let expected = vec![DeployedPackage::Contract(DeployedContract {
id: ContractId::from_str(
"1d1875bbd5a1b458b36df59b81998b613ea2b0e5afc0601bf9ab350ad35f69d3",
)
.unwrap(),
proxy: None,
chunked: false,
})];
assert_eq!(contract_ids, expected)
}
#[tokio::test]
async fn test_deploy_submit_only() {
let (mut node, port) = run_node();
let tmp_dir = tempdir().unwrap();
let project_dir = test_data_path().join("standalone_contract");
copy_dir(&project_dir, tmp_dir.path()).unwrap();
patch_manifest_file_with_path_std(tmp_dir.path()).unwrap();
let pkg = Pkg {
path: Some(tmp_dir.path().display().to_string()),
..Default::default()
};
let node_url = format!("http://127.0.0.1:{port}/v1/graphql");
let target = NodeTarget {
node_url: Some(node_url),
target: None,
testnet: false,
mainnet: false,
devnet: false,
};
let cmd = cmd::Deploy {
pkg,
salt: Some(vec![format!("{}", Salt::default())]),
node: target,
default_signer: true,
submit_only: true,
..Default::default()
};
let contract_ids = deploy(cmd).await.unwrap();
node.kill().unwrap();
let expected = vec![DeployedPackage::Contract(DeployedContract {
id: ContractId::from_str(
"1d1875bbd5a1b458b36df59b81998b613ea2b0e5afc0601bf9ab350ad35f69d3",
)
.unwrap(),
proxy: None,
chunked: false,
})];
assert_eq!(contract_ids, expected)
}
#[tokio::test]
async fn test_deploy_fresh_proxy() {
let (mut node, port) = run_node();
let tmp_dir = tempdir().unwrap();
let project_dir = test_data_path().join("standalone_contract");
copy_dir(&project_dir, tmp_dir.path()).unwrap();
patch_manifest_file_with_path_std(tmp_dir.path()).unwrap();
let proxy = Proxy {
enabled: true,
address: None,
};
patch_manifest_file_with_proxy_table(tmp_dir.path(), proxy).unwrap();
let pkg = Pkg {
path: Some(tmp_dir.path().display().to_string()),
..Default::default()
};
let node_url = format!("http://127.0.0.1:{port}/v1/graphql");
let target = NodeTarget {
node_url: Some(node_url),
target: None,
testnet: false,
mainnet: false,
devnet: false,
};
let cmd = cmd::Deploy {
pkg,
salt: Some(vec![format!("{}", Salt::default())]),
node: target,
default_signer: true,
..Default::default()
};
let contract_ids = deploy(cmd).await.unwrap();
node.kill().unwrap();
let impl_contract = DeployedPackage::Contract(DeployedContract {
id: ContractId::from_str(
"1d1875bbd5a1b458b36df59b81998b613ea2b0e5afc0601bf9ab350ad35f69d3",
)
.unwrap(),
proxy: Some(
ContractId::from_str(
"2be32d0fd53d8c6fc8dc15e46fb1dec9a782a486dd1540e2b9031629a6d9e26b",
)
.unwrap(),
),
chunked: false,
});
let expected = vec![impl_contract];
assert_eq!(contract_ids, expected)
}
#[tokio::test]
async fn test_proxy_contract_re_routes_call() {
let (mut node, port) = run_node();
let tmp_dir = tempdir().unwrap();
let project_dir = test_data_path().join("standalone_contract");
copy_dir(&project_dir, tmp_dir.path()).unwrap();
patch_manifest_file_with_path_std(tmp_dir.path()).unwrap();
let proxy = Proxy {
enabled: true,
address: None,
};
patch_manifest_file_with_proxy_table(tmp_dir.path(), proxy).unwrap();
let pkg = Pkg {
path: Some(tmp_dir.path().display().to_string()),
..Default::default()
};
let node_url = format!("http://127.0.0.1:{port}/v1/graphql");
let target = NodeTarget {
node_url: Some(node_url.clone()),
target: None,
testnet: false,
mainnet: false,
devnet: false,
};
let cmd = cmd::Deploy {
pkg,
salt: Some(vec![format!("{}", Salt::default())]),
node: target,
default_signer: true,
..Default::default()
};
let deployed_contract = expect_deployed_contract(deploy(cmd).await.unwrap().remove(0));
// At this point we deployed a contract with proxy.
let proxy_contract_id = deployed_contract.proxy.unwrap();
let impl_contract_id = deployed_contract.id;
// Make a contract call into proxy contract, and check if the initial
// contract returns a true.
let provider = Provider::connect(&node_url).await.unwrap();
let secret_key = SecretKey::from_str(forc_client::constants::DEFAULT_PRIVATE_KEY).unwrap();
let signer = PrivateKeySigner::new(secret_key);
let wallet_unlocked = Wallet::new(signer, provider);
abigen!(Contract(
name = "ImplementationContract",
abi = "forc-plugins/forc-client/test/data/standalone_contract/standalone_contract-abi.json"
));
let impl_contract_a = ImplementationContract::new(proxy_contract_id, wallet_unlocked.clone());
// Test storage functions
let res = impl_contract_a
.methods()
.test_function_read()
.with_contract_ids(&[impl_contract_id])
.call()
.await
.unwrap();
assert_eq!(res.value, 5);
let res = impl_contract_a
.methods()
.test_function_write(8)
.with_contract_ids(&[impl_contract_id])
.call()
.await
.unwrap();
assert_eq!(res.value, 8);
let res = impl_contract_a
.methods()
.test_function()
.with_contract_ids(&[impl_contract_id])
.call()
.await
.unwrap();
assert!(res.value);
update_main_sw(tmp_dir.path()).unwrap();
let target = NodeTarget {
node_url: Some(node_url.clone()),
target: None,
testnet: false,
mainnet: false,
devnet: false,
};
let pkg = Pkg {
path: Some(tmp_dir.path().display().to_string()),
..Default::default()
};
let cmd = cmd::Deploy {
pkg,
salt: Some(vec![format!("{}", Salt::default())]),
node: target,
default_signer: true,
..Default::default()
};
let deployed_contract = expect_deployed_contract(deploy(cmd).await.unwrap().remove(0));
// proxy contract id should be the same.
let proxy_contract_after_update = deployed_contract.proxy.unwrap();
assert_eq!(proxy_contract_id, proxy_contract_after_update);
let impl_contract_id_after_update = deployed_contract.id;
assert!(impl_contract_id != impl_contract_id_after_update);
let impl_contract_a = ImplementationContract::new(proxy_contract_after_update, wallet_unlocked);
// Test storage functions
let res = impl_contract_a
.methods()
.test_function_read()
.with_contract_ids(&[impl_contract_id_after_update])
.call()
.await
.unwrap();
// Storage should be preserved from the previous target contract.
assert_eq!(res.value, 8);
let res = impl_contract_a
.methods()
.test_function_write(9)
.with_contract_ids(&[impl_contract_id_after_update])
.call()
.await
.unwrap();
assert_eq!(res.value, 9);
let res = impl_contract_a
.methods()
.test_function()
.with_contract_ids(&[impl_contract_id_after_update])
.call()
.await
.unwrap();
assert!(!res.value);
node.kill().unwrap();
}
#[tokio::test]
async fn test_non_owner_fails_to_set_target() {
let (mut node, port) = run_node();
let tmp_dir = tempdir().unwrap();
let project_dir = test_data_path().join("standalone_contract");
copy_dir(&project_dir, tmp_dir.path()).unwrap();
patch_manifest_file_with_path_std(tmp_dir.path()).unwrap();
let proxy = Proxy {
enabled: true,
address: None,
};
patch_manifest_file_with_proxy_table(tmp_dir.path(), proxy).unwrap();
let pkg = Pkg {
path: Some(tmp_dir.path().display().to_string()),
..Default::default()
};
let node_url = format!("http://127.0.0.1:{port}/v1/graphql");
let target = NodeTarget {
node_url: Some(node_url.clone()),
target: None,
testnet: false,
mainnet: false,
devnet: false,
};
let cmd = cmd::Deploy {
pkg,
salt: Some(vec![format!("{}", Salt::default())]),
node: target,
default_signer: true,
..Default::default()
};
let contract_id = expect_deployed_contract(deploy(cmd).await.unwrap().remove(0));
// Proxy contract's id.
let proxy_id = contract_id.proxy.unwrap();
// Create and fund an owner account and an attacker account.
let provider = Provider::connect(&node_url).await.unwrap();
let attacker_secret_key = SecretKey::random(&mut thread_rng());
let attacker_signer = PrivateKeySigner::new(attacker_secret_key);
let attacker_wallet = Wallet::new(attacker_signer.clone(), provider.clone());
let owner_secret_key =
SecretKey::from_str(forc_client::constants::DEFAULT_PRIVATE_KEY).unwrap();
let owner_signer = PrivateKeySigner::new(owner_secret_key);
let owner_wallet = Wallet::new(owner_signer, provider.clone());
let consensus_parameters = provider.consensus_parameters().await.unwrap();
let base_asset_id = consensus_parameters.base_asset_id();
// Fund attacker wallet so that it can try to make a set proxy target call.
owner_wallet
.transfer(
attacker_wallet.address(),
100000,
*base_asset_id,
TxPolicies::default(),
)
.await
.unwrap();
let dummy_contract_id_target = ContractId::default();
abigen!(Contract(name = "ProxyContract", abi = "{\"programType\":\"contract\",\"specVersion\":\"1.1\",\"encodingVersion\":\"1\",\"concreteTypes\":[{\"type\":\"()\",\"concreteTypeId\":\"2e38e77b22c314a449e91fafed92a43826ac6aa403ae6a8acb6cf58239fbaf5d\"},{\"type\":\"enum standards::src5::AccessError\",\"concreteTypeId\":\"3f702ea3351c9c1ece2b84048006c8034a24cbc2bad2e740d0412b4172951d3d\",\"metadataTypeId\":1},{\"type\":\"enum standards::src5::State\",\"concreteTypeId\":\"192bc7098e2fe60635a9918afb563e4e5419d386da2bdbf0d716b4bc8549802c\",\"metadataTypeId\":2},{\"type\":\"enum std::option::Option<struct std::contract_id::ContractId>\",\"concreteTypeId\":\"0d79387ad3bacdc3b7aad9da3a96f4ce60d9a1b6002df254069ad95a3931d5c8\",\"metadataTypeId\":4,\"typeArguments\":[\"29c10735d33b5159f0c71ee1dbd17b36a3e69e41f00fab0d42e1bd9f428d8a54\"]},{\"type\":\"enum sway_libs::ownership::errors::InitializationError\",\"concreteTypeId\":\"1dfe7feadc1d9667a4351761230f948744068a090fe91b1bc6763a90ed5d3893\",\"metadataTypeId\":5},{\"type\":\"enum sway_libs::upgradability::errors::SetProxyOwnerError\",\"concreteTypeId\":\"3c6e90ae504df6aad8b34a93ba77dc62623e00b777eecacfa034a8ac6e890c74\",\"metadataTypeId\":6},{\"type\":\"str\",\"concreteTypeId\":\"8c25cb3686462e9a86d2883c5688a22fe738b0bbc85f458d2d2b5f3f667c6d5a\"},{\"type\":\"struct std::contract_id::ContractId\",\"concreteTypeId\":\"29c10735d33b5159f0c71ee1dbd17b36a3e69e41f00fab0d42e1bd9f428d8a54\",\"metadataTypeId\":9},{\"type\":\"struct sway_libs::upgradability::events::ProxyOwnerSet\",\"concreteTypeId\":\"96dd838b44f99d8ccae2a7948137ab6256c48ca4abc6168abc880de07fba7247\",\"metadataTypeId\":10},{\"type\":\"struct sway_libs::upgradability::events::ProxyTargetSet\",\"concreteTypeId\":\"1ddc0adda1270a016c08ffd614f29f599b4725407c8954c8b960bdf651a9a6c8\",\"metadataTypeId\":11}],\"metadataTypes\":[{\"type\":\"b256\",\"metadataTypeId\":0},{\"type\":\"enum standards::src5::AccessError\",\"metadataTypeId\":1,\"components\":[{\"name\":\"NotOwner\",\"typeId\":\"2e38e77b22c314a449e91fafed92a43826ac6aa403ae6a8acb6cf58239fbaf5d\"}]},{\"type\":\"enum standards::src5::State\",\"metadataTypeId\":2,\"components\":[{\"name\":\"Uninitialized\",\"typeId\":\"2e38e77b22c314a449e91fafed92a43826ac6aa403ae6a8acb6cf58239fbaf5d\"},{\"name\":\"Initialized\",\"typeId\":3},{\"name\":\"Revoked\",\"typeId\":\"2e38e77b22c314a449e91fafed92a43826ac6aa403ae6a8acb6cf58239fbaf5d\"}]},{\"type\":\"enum std::identity::Identity\",\"metadataTypeId\":3,\"components\":[{\"name\":\"Address\",\"typeId\":8},{\"name\":\"ContractId\",\"typeId\":9}]},{\"type\":\"enum std::option::Option\",\"metadataTypeId\":4,\"components\":[{\"name\":\"None\",\"typeId\":\"2e38e77b22c314a449e91fafed92a43826ac6aa403ae6a8acb6cf58239fbaf5d\"},{\"name\":\"Some\",\"typeId\":7}],\"typeParameters\":[7]},{\"type\":\"enum sway_libs::ownership::errors::InitializationError\",\"metadataTypeId\":5,\"components\":[{\"name\":\"CannotReinitialized\",\"typeId\":\"2e38e77b22c314a449e91fafed92a43826ac6aa403ae6a8acb6cf58239fbaf5d\"}]},{\"type\":\"enum sway_libs::upgradability::errors::SetProxyOwnerError\",\"metadataTypeId\":6,\"components\":[{\"name\":\"CannotUninitialize\",\"typeId\":\"2e38e77b22c314a449e91fafed92a43826ac6aa403ae6a8acb6cf58239fbaf5d\"}]},{\"type\":\"generic T\",\"metadataTypeId\":7},{\"type\":\"struct std::address::Address\",\"metadataTypeId\":8,\"components\":[{\"name\":\"bits\",\"typeId\":0}]},{\"type\":\"struct std::contract_id::ContractId\",\"metadataTypeId\":9,\"components\":[{\"name\":\"bits\",\"typeId\":0}]},{\"type\":\"struct sway_libs::upgradability::events::ProxyOwnerSet\",\"metadataTypeId\":10,\"components\":[{\"name\":\"new_proxy_owner\",\"typeId\":2}]},{\"type\":\"struct sway_libs::upgradability::events::ProxyTargetSet\",\"metadataTypeId\":11,\"components\":[{\"name\":\"new_target\",\"typeId\":9}]}],\"functions\":[{\"inputs\":[],\"name\":\"proxy_target\",\"output\":\"0d79387ad3bacdc3b7aad9da3a96f4ce60d9a1b6002df254069ad95a3931d5c8\",\"attributes\":[{\"name\":\"doc-comment\",\"arguments\":[\" Returns the target contract of the proxy contract.\"]},{\"name\":\"doc-comment\",\"arguments\":[\"\"]},{\"name\":\"doc-comment\",\"arguments\":[\" # Returns\"]},{\"name\":\"doc-comment\",\"arguments\":[\"\"]},{\"name\":\"doc-comment\",\"arguments\":[\" * [Option<ContractId>] - The new proxy contract to which all fallback calls will be passed or `None`.\"]},{\"name\":\"doc-comment\",\"arguments\":[\"\"]},{\"name\":\"doc-comment\",\"arguments\":[\" # Number of Storage Accesses\"]},{\"name\":\"doc-comment\",\"arguments\":[\"\"]},{\"name\":\"doc-comment\",\"arguments\":[\" * Reads: `1`\"]},{\"name\":\"storage\",\"arguments\":[\"read\"]}]},{\"inputs\":[{\"name\":\"new_target\",\"concreteTypeId\":\"29c10735d33b5159f0c71ee1dbd17b36a3e69e41f00fab0d42e1bd9f428d8a54\"}],\"name\":\"set_proxy_target\",\"output\":\"2e38e77b22c314a449e91fafed92a43826ac6aa403ae6a8acb6cf58239fbaf5d\",\"attributes\":[{\"name\":\"doc-comment\",\"arguments\":[\" Change the target contract of the proxy contract.\"]},{\"name\":\"doc-comment\",\"arguments\":[\"\"]},{\"name\":\"doc-comment\",\"arguments\":[\" # Additional Information\"]},{\"name\":\"doc-comment\",\"arguments\":[\"\"]},{\"name\":\"doc-comment\",\"arguments\":[\" This method can only be called by the `proxy_owner`.\"]},{\"name\":\"doc-comment\",\"arguments\":[\"\"]},{\"name\":\"doc-comment\",\"arguments\":[\" # Arguments\"]},{\"name\":\"doc-comment\",\"arguments\":[\"\"]},{\"name\":\"doc-comment\",\"arguments\":[\" * `new_target`: [ContractId] - The new proxy contract to which all fallback calls will be passed.\"]},{\"name\":\"doc-comment\",\"arguments\":[\"\"]},{\"name\":\"doc-comment\",\"arguments\":[\" # Reverts\"]},{\"name\":\"doc-comment\",\"arguments\":[\"\"]},{\"name\":\"doc-comment\",\"arguments\":[\" * When not called by `proxy_owner`.\"]},{\"name\":\"doc-comment\",\"arguments\":[\"\"]},{\"name\":\"doc-comment\",\"arguments\":[\" # Number of Storage Accesses\"]},{\"name\":\"doc-comment\",\"arguments\":[\"\"]},{\"name\":\"doc-comment\",\"arguments\":[\" * Reads: `1`\"]},{\"name\":\"doc-comment\",\"arguments\":[\" * Write: `1`\"]},{\"name\":\"storage\",\"arguments\":[\"read\",\"write\"]}]},{\"inputs\":[],\"name\":\"proxy_owner\",\"output\":\"192bc7098e2fe60635a9918afb563e4e5419d386da2bdbf0d716b4bc8549802c\",\"attributes\":[{\"name\":\"doc-comment\",\"arguments\":[\" Returns the owner of the proxy contract.\"]},{\"name\":\"doc-comment\",\"arguments\":[\"\"]},{\"name\":\"doc-comment\",\"arguments\":[\" # Returns\"]},{\"name\":\"doc-comment\",\"arguments\":[\"\"]},{\"name\":\"doc-comment\",\"arguments\":[\" * [State] - Represents the state of ownership for this contract.\"]},{\"name\":\"doc-comment\",\"arguments\":[\"\"]},{\"name\":\"doc-comment\",\"arguments\":[\" # Number of Storage Accesses\"]},{\"name\":\"doc-comment\",\"arguments\":[\"\"]},{\"name\":\"doc-comment\",\"arguments\":[\" * Reads: `1`\"]},{\"name\":\"storage\",\"arguments\":[\"read\"]}]},{\"inputs\":[],\"name\":\"initialize_proxy\",\"output\":\"2e38e77b22c314a449e91fafed92a43826ac6aa403ae6a8acb6cf58239fbaf5d\",\"attributes\":[{\"name\":\"doc-comment\",\"arguments\":[\" Initializes the proxy contract.\"]},{\"name\":\"doc-comment\",\"arguments\":[\"\"]},{\"name\":\"doc-comment\",\"arguments\":[\" # Additional Information\"]},{\"name\":\"doc-comment\",\"arguments\":[\"\"]},{\"name\":\"doc-comment\",\"arguments\":[\" This method sets the storage values using the values of the configurable constants `INITIAL_TARGET` and `INITIAL_OWNER`.\"]},{\"name\":\"doc-comment\",\"arguments\":[\" This then allows methods that write to storage to be called.\"]},{\"name\":\"doc-comment\",\"arguments\":[\" This method can only be called once.\"]},{\"name\":\"doc-comment\",\"arguments\":[\"\"]},{\"name\":\"doc-comment\",\"arguments\":[\" # Reverts\"]},{\"name\":\"doc-comment\",\"arguments\":[\"\"]},{\"name\":\"doc-comment\",\"arguments\":[\" * When `storage::SRC14.proxy_owner` is not [State::Uninitialized].\"]},{\"name\":\"doc-comment\",\"arguments\":[\"\"]},{\"name\":\"doc-comment\",\"arguments\":[\" # Number of Storage Accesses\"]},{\"name\":\"doc-comment\",\"arguments\":[\"\"]},{\"name\":\"doc-comment\",\"arguments\":[\" * Writes: `2`\"]},{\"name\":\"storage\",\"arguments\":[\"write\"]}]},{\"inputs\":[{\"name\":\"new_proxy_owner\",\"concreteTypeId\":\"192bc7098e2fe60635a9918afb563e4e5419d386da2bdbf0d716b4bc8549802c\"}],\"name\":\"set_proxy_owner\",\"output\":\"2e38e77b22c314a449e91fafed92a43826ac6aa403ae6a8acb6cf58239fbaf5d\",\"attributes\":[{\"name\":\"doc-comment\",\"arguments\":[\" Changes proxy ownership to the passed State.\"]},{\"name\":\"doc-comment\",\"arguments\":[\"\"]},{\"name\":\"doc-comment\",\"arguments\":[\" # Additional Information\"]},{\"name\":\"doc-comment\",\"arguments\":[\"\"]},{\"name\":\"doc-comment\",\"arguments\":[\" This method can be used to transfer ownership between Identities or to revoke ownership.\"]},{\"name\":\"doc-comment\",\"arguments\":[\"\"]},{\"name\":\"doc-comment\",\"arguments\":[\" # Arguments\"]},{\"name\":\"doc-comment\",\"arguments\":[\"\"]},{\"name\":\"doc-comment\",\"arguments\":[\" * `new_proxy_owner`: [State] - The new state of the proxy ownership.\"]},{\"name\":\"doc-comment\",\"arguments\":[\"\"]},{\"name\":\"doc-comment\",\"arguments\":[\" # Reverts\"]},{\"name\":\"doc-comment\",\"arguments\":[\"\"]},{\"name\":\"doc-comment\",\"arguments\":[\" * When the sender is not the current proxy owner.\"]},{\"name\":\"doc-comment\",\"arguments\":[\" * When the new state of the proxy ownership is [State::Uninitialized].\"]},{\"name\":\"doc-comment\",\"arguments\":[\"\"]},{\"name\":\"doc-comment\",\"arguments\":[\" # Number of Storage Accesses\"]},{\"name\":\"doc-comment\",\"arguments\":[\"\"]},{\"name\":\"doc-comment\",\"arguments\":[\" * Reads: `1`\"]},{\"name\":\"doc-comment\",\"arguments\":[\" * Writes: `1`\"]},{\"name\":\"storage\",\"arguments\":[\"write\"]}]}],\"loggedTypes\":[{\"logId\":\"4571204900286667806\",\"concreteTypeId\":\"3f702ea3351c9c1ece2b84048006c8034a24cbc2bad2e740d0412b4172951d3d\"},{\"logId\":\"2151606668983994881\",\"concreteTypeId\":\"1ddc0adda1270a016c08ffd614f29f599b4725407c8954c8b960bdf651a9a6c8\"},{\"logId\":\"2161305517876418151\",\"concreteTypeId\":\"1dfe7feadc1d9667a4351761230f948744068a090fe91b1bc6763a90ed5d3893\"},{\"logId\":\"4354576968059844266\",\"concreteTypeId\":\"3c6e90ae504df6aad8b34a93ba77dc62623e00b777eecacfa034a8ac6e890c74\"},{\"logId\":\"10870989709723147660\",\"concreteTypeId\":\"96dd838b44f99d8ccae2a7948137ab6256c48ca4abc6168abc880de07fba7247\"},{\"logId\":\"10098701174489624218\",\"concreteTypeId\":\"8c25cb3686462e9a86d2883c5688a22fe738b0bbc85f458d2d2b5f3f667c6d5a\"}],\"messagesTypes\":[],\"configurables\":[{\"name\":\"INITIAL_TARGET\",\"concreteTypeId\":\"0d79387ad3bacdc3b7aad9da3a96f4ce60d9a1b6002df254069ad95a3931d5c8\",\"offset\":13368},{\"name\":\"INITIAL_OWNER\",\"concreteTypeId\":\"192bc7098e2fe60635a9918afb563e4e5419d386da2bdbf0d716b4bc8549802c\",\"offset\":13320}]}",));
let wallet = Wallet::new(attacker_signer, provider.clone());
let attacker_account = ForcClientAccount::Wallet(wallet);
// Try to change target of the proxy with a random wallet which is not the owner of the proxy.
let res = update_proxy_contract_target(&attacker_account, proxy_id, dummy_contract_id_target)
.await
.err()
.unwrap();
node.kill().unwrap();
assert!(res
.to_string()
.starts_with("transaction reverted: NotOwner"));
}
// TODO: https://github.com/FuelLabs/sway/issues/6283
// Add interactive tests for the happy path cases. This requires starting the node with funded accounts and setting up
// the wallet with the correct password. The tests should be run in a separate test suite that is not run by default.
// It would also require overriding `default_wallet_path` function for tests, so as not to interfere with the user's wallet.
#[test]
fn test_deploy_interactive_missing_wallet() -> Result<(), rexpect::error::Error> {
let (mut node, port) = run_node();
let node_url = format!("http://127.0.0.1:{port}/v1/graphql");
// Spawn the forc-deploy binary using cargo run
let project_dir = test_data_path().join("standalone_contract");
let mut process = spawn(
&format!(
"cargo run --bin forc-deploy -- --node-url {node_url} -p {}",
project_dir.display()
),
Some(300000),
)?;
// Confirmation prompts
process
.exp_string("\u{1b}[1;32mConfirming\u{1b}[0m transactions [deploy standalone_contract]")?;
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | true |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-test/src/setup.rs | forc-test/src/setup.rs | use fuel_tx as tx;
use fuel_vm::{self as vm};
/// Result of preparing a deployment transaction setup for a contract.
pub type ContractDeploymentSetup = (tx::ContractId, vm::checked_transaction::Checked<tx::Create>);
/// Required test setup for package types that requires a deployment.
#[derive(Debug, Clone)]
pub enum DeploymentSetup {
Script(ScriptTestSetup),
Contract(ContractTestSetup),
}
impl DeploymentSetup {
/// Returns the storage for this test setup
fn storage(&self) -> &vm::storage::MemoryStorage {
match self {
DeploymentSetup::Script(script_setup) => &script_setup.storage,
DeploymentSetup::Contract(contract_setup) => &contract_setup.storage,
}
}
/// Return the root contract id if this is a contract setup.
fn root_contract_id(&self) -> Option<tx::ContractId> {
match self {
DeploymentSetup::Script(_) => None,
DeploymentSetup::Contract(contract_setup) => Some(contract_setup.root_contract_id),
}
}
}
/// The storage and the contract id (if a contract is being tested) for a test.
#[derive(Debug, Clone)]
pub enum TestSetup {
WithDeployment(DeploymentSetup),
WithoutDeployment(vm::storage::MemoryStorage),
}
impl TestSetup {
/// Returns the storage for this test setup
pub fn storage(&self) -> &vm::storage::MemoryStorage {
match self {
TestSetup::WithDeployment(deployment_setup) => deployment_setup.storage(),
TestSetup::WithoutDeployment(storage) => storage,
}
}
/// Produces an iterator yielding contract ids of contract dependencies for this test setup.
pub fn contract_dependency_ids(&self) -> impl Iterator<Item = &tx::ContractId> + '_ {
match self {
TestSetup::WithDeployment(deployment_setup) => match deployment_setup {
DeploymentSetup::Script(script_setup) => {
script_setup.contract_dependency_ids.iter()
}
DeploymentSetup::Contract(contract_setup) => {
contract_setup.contract_dependency_ids.iter()
}
},
TestSetup::WithoutDeployment(_) => [].iter(),
}
}
/// Return the root contract id if this is a contract setup.
pub fn root_contract_id(&self) -> Option<tx::ContractId> {
match self {
TestSetup::WithDeployment(deployment_setup) => deployment_setup.root_contract_id(),
TestSetup::WithoutDeployment(_) => None,
}
}
/// Produces an iterator yielding all contract ids required to be included in the transaction
/// for this test setup.
pub fn contract_ids(&self) -> impl Iterator<Item = tx::ContractId> + '_ {
self.contract_dependency_ids()
.cloned()
.chain(self.root_contract_id())
}
}
/// The data collected to test a contract.
#[derive(Debug, Clone)]
pub struct ContractTestSetup {
pub storage: vm::storage::MemoryStorage,
pub contract_dependency_ids: Vec<tx::ContractId>,
pub root_contract_id: tx::ContractId,
}
/// The data collected to test a script.
#[derive(Debug, Clone)]
pub struct ScriptTestSetup {
pub storage: vm::storage::MemoryStorage,
pub contract_dependency_ids: Vec<tx::ContractId>,
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-test/src/ecal.rs | forc-test/src/ecal.rs | use fuel_vm::{
interpreter::EcalHandler,
prelude::{Interpreter, RegId},
};
// ssize_t write(int fd, const void buf[.count], size_t count);
pub const WRITE_SYSCALL: u64 = 1000;
pub const FFLUSH_SYSCALL: u64 = 1001;
#[derive(Debug, Clone)]
pub enum Syscall {
Write { fd: u64, bytes: Vec<u8> },
Fflush { fd: u64 },
Unknown { ra: u64, rb: u64, rc: u64, rd: u64 },
}
impl Syscall {
pub fn apply(&self) {
use std::io::Write;
use std::os::fd::FromRawFd;
match self {
Syscall::Write { fd, bytes } => {
let s = std::str::from_utf8(bytes.as_slice()).unwrap();
let mut f = unsafe { std::fs::File::from_raw_fd(*fd as i32) };
write!(&mut f, "{s}").unwrap();
// Don't close the fd
std::mem::forget(f);
}
Syscall::Fflush { fd } => {
let mut f = unsafe { std::fs::File::from_raw_fd(*fd as i32) };
let _ = f.flush();
// Don't close the fd
std::mem::forget(f);
}
Syscall::Unknown { ra, rb, rc, rd } => {
println!("Unknown ecal: {ra} {rb} {rc} {rd}");
}
}
}
}
/// Handle VM `ecal` as syscalls.
///
/// The application of the syscalls can be turned off,
/// guaranteeing total isolation from the outside world.
///
/// Capture of the syscalls can be turned on, allowing
/// its application even after the VM is not running anymore.
///
/// Supported syscalls:
/// 1000 - write(fd: u64, buf: raw_ptr, count: u64) -> u64
#[derive(Debug, Clone)]
pub struct EcalSyscallHandler {
pub apply: bool,
pub capture: bool,
pub captured: Vec<Syscall>,
}
impl Default for EcalSyscallHandler {
fn default() -> Self {
Self::only_capturing()
}
}
impl EcalSyscallHandler {
pub fn only_capturing() -> Self {
Self {
apply: false,
capture: true,
captured: vec![],
}
}
pub fn only_applying() -> Self {
Self {
apply: true,
capture: false,
captured: vec![],
}
}
pub fn clear(&mut self) {
self.captured.clear();
}
}
impl EcalHandler for EcalSyscallHandler {
fn ecal<M, S, Tx, V>(
vm: &mut Interpreter<M, S, Tx, Self, V>,
a: RegId,
b: RegId,
c: RegId,
d: RegId,
) -> fuel_vm::error::SimpleResult<()>
where
M: fuel_vm::prelude::Memory,
{
let regs = vm.registers();
let syscall = match regs[a.to_u8() as usize] {
WRITE_SYSCALL => {
let fd = regs[b.to_u8() as usize];
let addr = regs[c.to_u8() as usize];
let count = regs[d.to_u8() as usize];
let bytes = vm.memory().read(addr, count).unwrap().to_vec();
Syscall::Write { fd, bytes }
}
FFLUSH_SYSCALL => {
let fd = regs[b.to_u8() as usize];
Syscall::Fflush { fd }
}
_ => {
let ra = regs[a.to_u8() as usize];
let rb = regs[b.to_u8() as usize];
let rc = regs[c.to_u8() as usize];
let rd = regs[d.to_u8() as usize];
Syscall::Unknown { ra, rb, rc, rd }
}
};
let s = vm.ecal_state_mut();
if s.apply {
syscall.apply();
}
if s.capture {
s.captured.push(syscall);
}
Ok(())
}
}
#[test]
fn ok_capture_ecals() {
use fuel_vm::fuel_asm::op::*;
use fuel_vm::prelude::*;
let vm: Interpreter<MemoryInstance, MemoryStorage, Script, EcalSyscallHandler> = <_>::default();
let test_input = "Hello, WriteSyscall!";
let script_data: Vec<u8> = test_input.bytes().collect();
let script = vec![
movi(0x20, WRITE_SYSCALL as u32),
gtf_args(0x10, 0x00, GTFArgs::ScriptData),
movi(0x21, script_data.len().try_into().unwrap()),
ecal(0x20, 0x1, 0x10, 0x21),
ret(RegId::ONE),
]
.into_iter()
.collect();
// Execute transaction
let mut client = MemoryClient::from_txtor(vm.into());
let tx = TransactionBuilder::script(script, script_data)
.script_gas_limit(1_000_000)
.add_fee_input()
.finalize()
.into_checked(Default::default(), &ConsensusParameters::standard())
.expect("failed to generate a checked tx");
let _ = client.transact(tx);
// Verify
let t: Transactor<MemoryInstance, MemoryStorage, Script, EcalSyscallHandler> = client.into();
let syscalls = t.interpreter().ecal_state().captured.clone();
assert_eq!(syscalls.len(), 1);
assert!(
matches!(&syscalls[0], Syscall::Write { fd: 1, bytes } if std::str::from_utf8(bytes).unwrap() == test_input)
);
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-test/src/lib.rs | forc-test/src/lib.rs | pub mod ecal;
pub mod execute;
pub mod setup;
use crate::execute::TestExecutor;
use crate::setup::{
ContractDeploymentSetup, ContractTestSetup, DeploymentSetup, ScriptTestSetup, TestSetup,
};
use ecal::EcalSyscallHandler;
use forc_pkg::{self as pkg, BuildOpts, DumpOpts};
use fuel_abi_types::revert_info::RevertInfo;
use fuel_tx::{self as tx, GasCostsValues};
use fuel_vm::checked_transaction::builder::TransactionBuilderExt;
use fuel_vm::{self as vm};
use pkg::TestPassCondition;
use pkg::{Built, BuiltPackage};
use rand::{Rng, SeedableRng};
use rayon::prelude::*;
use std::str::FromStr;
use std::{collections::HashMap, fs, path::PathBuf, sync::Arc};
use sway_core::{BuildTarget, IrCli};
use sway_types::Span;
use tx::consensus_parameters::ConsensusParametersV1;
use tx::{ConsensusParameters, ContractParameters, ScriptParameters, TxParameters};
use vm::interpreter::{InterpreterParams, MemoryInstance};
use vm::prelude::SecretKey;
/// The result of a `forc test` invocation.
#[derive(Debug)]
pub enum Tested {
Package(Box<TestedPackage>),
Workspace(Vec<TestedPackage>),
}
/// The result of testing a specific package.
#[derive(Debug)]
pub struct TestedPackage {
pub built: Box<pkg::BuiltPackage>,
/// The resulting `ProgramState` after executing the test.
pub tests: Vec<TestResult>,
}
#[derive(Debug)]
pub struct TestDetails {
/// The file that contains the test function.
pub file_path: Arc<PathBuf>,
/// The line number for the test declaration.
pub line_number: usize,
}
/// The filter to be used to only run matching tests.
#[derive(Debug, Clone)]
pub struct TestFilter<'a> {
/// The phrase used for filtering, a `&str` searched/matched with test name.
pub filter_phrase: &'a str,
/// If set `true`, a complete "match" is required with test name for the test to be executed,
/// otherwise a test_name should "contain" the `filter_phrase`.
pub exact_match: bool,
}
/// The result of executing a single test within a single package.
#[derive(Debug, Clone)]
pub struct TestResult {
/// The name of the function.
pub name: String,
/// The time taken for the test to execute.
pub duration: std::time::Duration,
/// The span for the function declaring this test.
pub span: Span,
/// The file path for the function declaring this test.
pub file_path: Arc<PathBuf>,
/// The resulting state after executing the test function.
pub state: vm::state::ProgramState,
/// The required state of the VM for this test to pass.
pub condition: pkg::TestPassCondition,
/// Emitted `Receipt`s during the execution of the test.
pub logs: Vec<fuel_tx::Receipt>,
/// Gas used while executing this test.
pub gas_used: u64,
/// EcalState of the execution
pub ecal: Box<EcalSyscallHandler>,
}
#[derive(Default, Debug, Clone, Copy)]
pub enum TestGasLimit {
#[default]
Default,
Unlimited,
Limited(u64),
}
const TEST_METADATA_SEED: u64 = 0x7E57u64;
/// A mapping from each member package of a build plan to its compiled contract dependencies.
type ContractDependencyMap = HashMap<pkg::Pinned, Vec<Arc<pkg::BuiltPackage>>>;
/// A package or a workspace that has been built, ready for test execution.
pub enum BuiltTests {
Package(PackageTests),
Workspace(Vec<PackageTests>),
}
/// A built package ready for test execution.
///
/// If the built package is a contract, a second built package for the same contract without the
/// tests are also populated.
///
/// For packages containing contracts or scripts, their [contract-dependencies] are needed for deployment.
#[derive(Debug)]
pub enum PackageTests {
Contract(PackageWithDeploymentToTest),
Script(PackageWithDeploymentToTest),
Predicate(Arc<pkg::BuiltPackage>),
Library(Arc<pkg::BuiltPackage>),
}
/// A built contract ready for test execution.
#[derive(Debug)]
pub struct ContractToTest {
/// Tests included contract.
pkg: Arc<pkg::BuiltPackage>,
/// Bytecode of the contract without tests.
without_tests_bytecode: pkg::BuiltPackageBytecode,
contract_dependencies: Vec<Arc<pkg::BuiltPackage>>,
}
/// A built script ready for test execution.
#[derive(Debug)]
pub struct ScriptToTest {
/// Tests included contract.
pkg: Arc<pkg::BuiltPackage>,
contract_dependencies: Vec<Arc<pkg::BuiltPackage>>,
}
/// A built package that requires deployment before test execution.
#[derive(Debug)]
pub enum PackageWithDeploymentToTest {
Script(ScriptToTest),
Contract(ContractToTest),
}
/// The set of options provided to the `test` function.
#[derive(Default, Clone)]
pub struct TestOpts {
pub pkg: pkg::PkgOpts,
pub print: pkg::PrintOpts,
pub verify_ir: IrCli,
pub minify: pkg::MinifyOpts,
/// If set, outputs a binary file representing the script bytes.
pub binary_outfile: Option<String>,
/// If set, outputs debug info to the provided file.
/// If the argument provided ends with .json, a JSON is emitted,
/// otherwise, an ELF file containing DWARF is emitted.
pub debug_outfile: Option<String>,
/// If set, generates a JSON file containing the hex-encoded script binary.
pub hex_outfile: Option<String>,
/// Build target to use.
pub build_target: BuildTarget,
/// Name of the build profile to use.
pub build_profile: String,
/// Use the release build profile.
/// The release profile can be customized in the manifest file.
pub release: bool,
/// Should warnings be treated as errors?
pub error_on_warnings: bool,
/// Output the time elapsed over each part of the compilation process.
pub time_phases: bool,
/// Profile the compilation process.
pub profile: bool,
/// Output compilation metrics into file.
pub metrics_outfile: Option<String>,
/// Set of enabled experimental flags
pub experimental: Vec<sway_features::Feature>,
/// Set of disabled experimental flags
pub no_experimental: Vec<sway_features::Feature>,
/// Do not output any build artifacts, e.g., bytecode, ABI JSON, etc.
pub no_output: bool,
}
/// The set of options provided for controlling logs printed for each test.
#[derive(Default, Clone)]
pub struct TestPrintOpts {
pub pretty_print: bool,
pub print_logs: bool,
}
#[derive(Default, Clone, Debug, PartialEq, Eq, PartialOrd, Ord)]
pub enum GasCostsSource {
#[default]
BuiltIn,
Mainnet,
Testnet,
File(String),
}
impl GasCostsSource {
pub fn provide_gas_costs(&self) -> Result<GasCostsValues, anyhow::Error> {
match self {
// Values in the `gas_costs_values.json` are taken from the `chain-configuration` repository:
// chain-configuration/upgradelog/ignition/consensus_parameters/6.json
// Update these values when there are changes to the gas costs on-chain.
Self::BuiltIn => Ok(serde_json::from_str(include_str!(
"../gas_costs_values.json"
))?),
// TODO: (GAS-COSTS) Fetch actual gas costs from mainnet/testnet and JSON file.
// See: https://github.com/FuelLabs/sway/issues/7472
Self::Mainnet => Err(anyhow::anyhow!(
"Fetching gas costs from mainnet is currently not implemented."
)),
Self::Testnet => Err(anyhow::anyhow!(
"Fetching gas costs from testnet is currently not implemented."
)),
Self::File(_file_path) => Err(anyhow::anyhow!(
"Loading gas costs from a JSON file is currently not implemented."
)),
}
}
}
impl FromStr for GasCostsSource {
type Err = anyhow::Error;
fn from_str(value: &str) -> Result<Self, Self::Err> {
match value {
"built-in" => Ok(Self::BuiltIn),
"mainnet" => Ok(Self::Mainnet),
"testnet" => Ok(Self::Testnet),
file_path => Ok(Self::File(file_path.to_string())),
}
}
}
/// A `LogData` decoded into a human readable format with its type information.
pub struct DecodedLog {
pub value: String,
}
impl TestedPackage {
pub fn tests_passed(&self) -> bool {
self.tests.iter().all(|test| test.passed())
}
}
impl PackageWithDeploymentToTest {
/// Returns a reference to the underlying `BuiltPackage`.
///
/// If this is a contract built package with tests included is returned.
fn pkg(&self) -> &BuiltPackage {
match self {
PackageWithDeploymentToTest::Script(script) => &script.pkg,
PackageWithDeploymentToTest::Contract(contract) => &contract.pkg,
}
}
/// Returns an iterator over contract dependencies of the package represented by this struct.
fn contract_dependencies(&self) -> impl Iterator<Item = &Arc<BuiltPackage>> + '_ {
match self {
PackageWithDeploymentToTest::Script(script_to_test) => {
script_to_test.contract_dependencies.iter()
}
PackageWithDeploymentToTest::Contract(contract_to_test) => {
contract_to_test.contract_dependencies.iter()
}
}
}
/// Deploy the contract dependencies for packages that require deployment.
///
/// For scripts deploys all contract dependencies.
/// For contract deploys all contract dependencies and the root contract itself.
fn deploy(&self) -> anyhow::Result<TestSetup> {
// Setup the interpreter for deployment.
let gas_price = 0;
// We are not concerned about gas costs of contract deployments for tests,
// only the gas costs of test executions. So, we can simply provide the
// default, built-in, gas costs values here.
let params = maxed_consensus_params(GasCostsValues::default(), TestGasLimit::default());
let storage = vm::storage::MemoryStorage::default();
let interpreter_params = InterpreterParams::new(gas_price, params.clone());
let mut interpreter: vm::prelude::Interpreter<_, _, _, vm::interpreter::NotSupportedEcal> =
vm::interpreter::Interpreter::with_storage(
MemoryInstance::new(),
storage,
interpreter_params,
);
// Iterate and create deployment transactions for contract dependencies of the root
// contract.
let contract_dependency_setups = self
.contract_dependencies()
.map(|built_pkg| deployment_transaction(built_pkg, &built_pkg.bytecode, ¶ms));
// Deploy contract dependencies of the root contract and collect their ids.
let contract_dependency_ids = contract_dependency_setups
.map(|(contract_id, tx)| {
// Transact the deployment transaction constructed for this contract dependency.
let tx = tx
.into_ready(gas_price, params.gas_costs(), params.fee_params(), None)
.unwrap();
interpreter.transact(tx).map_err(anyhow::Error::msg)?;
Ok(contract_id)
})
.collect::<anyhow::Result<Vec<_>>>()?;
let deployment_setup = if let PackageWithDeploymentToTest::Contract(contract_to_test) = self
{
// Root contract is the contract that we are going to be running the tests of, after this
// deployment.
let (root_contract_id, root_contract_tx) = deployment_transaction(
&contract_to_test.pkg,
&contract_to_test.without_tests_bytecode,
¶ms,
);
let root_contract_tx = root_contract_tx
.into_ready(gas_price, params.gas_costs(), params.fee_params(), None)
.unwrap();
// Deploy the root contract.
interpreter
.transact(root_contract_tx)
.map_err(anyhow::Error::msg)?;
let storage = interpreter.as_ref().clone();
DeploymentSetup::Contract(ContractTestSetup {
storage,
contract_dependency_ids,
root_contract_id,
})
} else {
let storage = interpreter.as_ref().clone();
DeploymentSetup::Script(ScriptTestSetup {
storage,
contract_dependency_ids,
})
};
Ok(TestSetup::WithDeployment(deployment_setup))
}
}
/// Returns a mapping of each member package of a build plan to its compiled contract dependencies,
/// ordered by deployment order.
///
/// Each dependency package needs to be deployed before executing the test for that package.
fn get_contract_dependency_map(
built: &Built,
build_plan: &pkg::BuildPlan,
) -> ContractDependencyMap {
let built_members: HashMap<&pkg::Pinned, Arc<pkg::BuiltPackage>> =
built.into_members().collect();
// For each member node, collect their contract dependencies.
build_plan
.member_nodes()
.map(|member_node| {
let graph = build_plan.graph();
let pinned_member = graph[member_node].clone();
let contract_dependencies = build_plan
.contract_dependencies(member_node)
.map(|contract_dependency_node_ix| graph[contract_dependency_node_ix].clone())
.filter_map(|pinned| built_members.get(&pinned))
.cloned()
.collect::<Vec<_>>();
(pinned_member, contract_dependencies)
})
.collect()
}
impl BuiltTests {
/// Constructs a `PackageTests` from `Built`.
pub fn from_built(built: Built, build_plan: &pkg::BuildPlan) -> anyhow::Result<BuiltTests> {
let contract_dependencies = get_contract_dependency_map(&built, build_plan);
let built = match built {
Built::Package(built_pkg) => BuiltTests::Package(PackageTests::from_built_pkg(
built_pkg,
&contract_dependencies,
)),
Built::Workspace(built_workspace) => {
let pkg_tests = built_workspace
.into_iter()
.map(|built_pkg| {
PackageTests::from_built_pkg(built_pkg, &contract_dependencies)
})
.collect();
BuiltTests::Workspace(pkg_tests)
}
};
Ok(built)
}
}
impl<'a> PackageTests {
/// Return a reference to the underlying `BuiltPackage`.
///
/// If this `PackageTests` is `PackageTests::Contract`, built package with tests included is
/// returned.
pub(crate) fn built_pkg_with_tests(&'a self) -> &'a BuiltPackage {
match self {
PackageTests::Contract(contract) => contract.pkg(),
PackageTests::Script(script) => script.pkg(),
PackageTests::Predicate(predicate) => predicate,
PackageTests::Library(library) => library,
}
}
/// Construct a `PackageTests` from `BuiltPackage`.
fn from_built_pkg(
built_pkg: Arc<BuiltPackage>,
contract_dependencies: &ContractDependencyMap,
) -> PackageTests {
let built_without_tests_bytecode = built_pkg.bytecode_without_tests.clone();
let contract_dependencies: Vec<Arc<pkg::BuiltPackage>> = contract_dependencies
.get(&built_pkg.descriptor.pinned)
.cloned()
.unwrap_or_default();
match built_without_tests_bytecode {
Some(contract_without_tests) => {
let contract_to_test = ContractToTest {
pkg: built_pkg,
without_tests_bytecode: contract_without_tests,
contract_dependencies,
};
PackageTests::Contract(PackageWithDeploymentToTest::Contract(contract_to_test))
}
None => match built_pkg.tree_type {
sway_core::language::parsed::TreeType::Predicate => {
PackageTests::Predicate(built_pkg)
}
sway_core::language::parsed::TreeType::Library => PackageTests::Library(built_pkg),
sway_core::language::parsed::TreeType::Script => {
let script_to_test = ScriptToTest {
pkg: built_pkg,
contract_dependencies,
};
PackageTests::Script(PackageWithDeploymentToTest::Script(script_to_test))
}
_ => unreachable!("contracts are already handled"),
},
}
}
/// Run all tests after applying the provided filter and collect their results.
pub(crate) fn run_tests(
&self,
test_runners: &rayon::ThreadPool,
test_filter: Option<&TestFilter>,
gas_costs_values: GasCostsValues,
gas_limit: TestGasLimit,
) -> anyhow::Result<TestedPackage> {
let pkg_with_tests = self.built_pkg_with_tests();
let tests = test_runners.install(|| {
pkg_with_tests
.bytecode
.entries
.par_iter()
.filter_map(|entry| {
if let Some(test_entry) = entry.kind.test() {
// If a test filter is specified, only the tests containing the filter phrase in
// their name are going to be executed.
let name = entry.finalized.fn_name.clone();
if let Some(filter) = test_filter {
if !filter.filter(&name) {
return None;
}
}
return Some((entry, test_entry));
}
None
})
.map(|(entry, test_entry)| {
// Execute the test and return the result.
let offset = u32::try_from(entry.finalized.imm)
.expect("test instruction offset out of range");
let name = entry.finalized.fn_name.clone();
let test_setup = self.setup()?;
TestExecutor::build(
&pkg_with_tests.bytecode.bytes,
offset,
test_setup,
test_entry,
name,
gas_costs_values.clone(),
gas_limit,
)?
.execute()
})
.collect::<anyhow::Result<_>>()
})?;
Ok(TestedPackage {
built: Box::new(pkg_with_tests.clone()),
tests,
})
}
/// Setup the storage for a test and returns a contract id for testing contracts.
///
/// For testing contracts, storage returned from this function contains the deployed contract.
/// For other types, default storage is returned.
pub fn setup(&self) -> anyhow::Result<TestSetup> {
match self {
PackageTests::Contract(contract_to_test) => {
let test_setup = contract_to_test.deploy()?;
Ok(test_setup)
}
PackageTests::Script(script_to_test) => {
let test_setup = script_to_test.deploy()?;
Ok(test_setup)
}
PackageTests::Predicate(_) | PackageTests::Library(_) => Ok(
TestSetup::WithoutDeployment(vm::storage::MemoryStorage::default()),
),
}
}
}
impl From<TestOpts> for pkg::BuildOpts {
fn from(val: TestOpts) -> Self {
pkg::BuildOpts {
pkg: val.pkg,
print: val.print,
verify_ir: val.verify_ir,
minify: val.minify,
dump: DumpOpts::default(),
binary_outfile: val.binary_outfile,
debug_outfile: val.debug_outfile,
hex_outfile: val.hex_outfile,
build_target: val.build_target,
build_profile: val.build_profile,
release: val.release,
error_on_warnings: val.error_on_warnings,
time_phases: val.time_phases,
profile: val.profile,
metrics_outfile: val.metrics_outfile,
tests: true,
member_filter: Default::default(),
experimental: val.experimental,
no_experimental: val.no_experimental,
no_output: val.no_output,
}
}
}
impl TestOpts {
/// Convert this set of test options into a set of build options.
pub fn into_build_opts(self) -> pkg::BuildOpts {
pkg::BuildOpts {
pkg: self.pkg,
print: self.print,
verify_ir: self.verify_ir,
minify: self.minify,
dump: DumpOpts::default(),
binary_outfile: self.binary_outfile,
debug_outfile: self.debug_outfile,
hex_outfile: self.hex_outfile,
build_target: self.build_target,
build_profile: self.build_profile,
release: self.release,
error_on_warnings: self.error_on_warnings,
time_phases: self.time_phases,
profile: self.profile,
metrics_outfile: self.metrics_outfile,
tests: true,
member_filter: Default::default(),
experimental: self.experimental,
no_experimental: self.no_experimental,
no_output: self.no_output,
}
}
}
impl TestResult {
/// Whether or not the test passed.
pub fn passed(&self) -> bool {
match &self.condition {
TestPassCondition::ShouldRevert(revert_code) => match revert_code {
Some(revert_code) => self.state == vm::state::ProgramState::Revert(*revert_code),
None => matches!(self.state, vm::state::ProgramState::Revert(_)),
},
TestPassCondition::ShouldNotRevert => {
!matches!(self.state, vm::state::ProgramState::Revert(_))
}
}
}
/// Return the revert info for this [TestResult] if the test is reverted.
pub fn revert_info(
&self,
program_abi: Option<&fuel_abi_types::abi::program::ProgramABI>,
logs: &[fuel_tx::Receipt],
) -> Option<RevertInfo> {
if let vm::state::ProgramState::Revert(revert_code) = self.state {
return forc_util::tx_utils::revert_info_from_receipts(
logs,
program_abi,
Some(revert_code),
)
.filter(|info| info.revert_code == revert_code);
}
None
}
/// Return [TestDetails] from the span of the function declaring this test.
pub fn details(&self) -> anyhow::Result<TestDetails> {
let span_start = self.span.start();
let file_str = fs::read_to_string(&*self.file_path)?;
let line_number = file_str[..span_start]
.chars()
.filter(|&c| c == '\n')
.count();
Ok(TestDetails {
file_path: self.file_path.clone(),
line_number,
})
}
}
/// Used to control test runner count for forc-test. Number of runners to use can be specified using
/// `Manual` or can be left forc-test to decide by using `Auto`.
pub enum TestRunnerCount {
Manual(usize),
Auto,
}
#[derive(Clone, Debug, Default)]
pub struct TestCount {
pub total: usize,
pub ignored: usize,
}
impl TestFilter<'_> {
fn filter(&self, fn_name: &str) -> bool {
if self.exact_match {
fn_name == self.filter_phrase
} else {
fn_name.contains(self.filter_phrase)
}
}
}
impl BuiltTests {
/// The total number of tests.
pub fn test_count(&self, test_filter: Option<&TestFilter>) -> TestCount {
let pkgs: Vec<&PackageTests> = match self {
BuiltTests::Package(pkg) => vec![pkg],
BuiltTests::Workspace(workspace) => workspace.iter().collect(),
};
pkgs.iter()
.flat_map(|pkg| {
pkg.built_pkg_with_tests()
.bytecode
.entries
.iter()
.filter_map(|entry| entry.kind.test().map(|test| (entry, test)))
})
.fold(TestCount::default(), |acc, (pkg_entry, _)| {
let num_ignored = match &test_filter {
Some(filter) => {
if filter.filter(&pkg_entry.finalized.fn_name) {
acc.ignored
} else {
acc.ignored + 1
}
}
None => acc.ignored,
};
TestCount {
total: acc.total + 1,
ignored: num_ignored,
}
})
}
/// Run all built tests, return the result.
pub fn run(
self,
test_runner_count: TestRunnerCount,
test_filter: Option<TestFilter>,
gas_costs_values: GasCostsValues,
gas_limit: TestGasLimit,
) -> anyhow::Result<Tested> {
let test_runners = match test_runner_count {
TestRunnerCount::Manual(runner_count) => rayon::ThreadPoolBuilder::new()
.num_threads(runner_count)
.build(),
TestRunnerCount::Auto => rayon::ThreadPoolBuilder::new().build(),
}?;
run_tests(
self,
&test_runners,
test_filter,
gas_costs_values,
gas_limit,
)
}
}
/// First builds the package or workspace, ready for execution.
pub fn build(opts: TestOpts) -> anyhow::Result<BuiltTests> {
let build_opts: BuildOpts = opts.into();
let build_plan = pkg::BuildPlan::from_pkg_opts(&build_opts.pkg)?;
let built = pkg::build_with_options(&build_opts, None)?;
BuiltTests::from_built(built, &build_plan)
}
/// Returns a `ConsensusParameters` which has maximum length/size allowance for scripts, contracts,
/// and transactions.
pub(crate) fn maxed_consensus_params(
gas_costs_values: GasCostsValues,
gas_limit: TestGasLimit,
) -> ConsensusParameters {
let script_params = ScriptParameters::DEFAULT
.with_max_script_length(u64::MAX)
.with_max_script_data_length(u64::MAX);
let tx_params = match gas_limit {
TestGasLimit::Default => TxParameters::DEFAULT,
TestGasLimit::Unlimited => TxParameters::DEFAULT.with_max_gas_per_tx(u64::MAX),
TestGasLimit::Limited(limit) => TxParameters::DEFAULT.with_max_gas_per_tx(limit),
}
.with_max_size(u64::MAX);
let contract_params = ContractParameters::DEFAULT
.with_contract_max_size(u64::MAX)
.with_max_storage_slots(u64::MAX);
ConsensusParameters::V1(ConsensusParametersV1 {
script_params,
tx_params,
contract_params,
gas_costs: gas_costs_values.into(),
block_gas_limit: u64::MAX,
..Default::default()
})
}
/// Deploys the provided contract and returns an interpreter instance ready to be used in test
/// executions with deployed contract.
fn deployment_transaction(
built_pkg: &pkg::BuiltPackage,
without_tests_bytecode: &pkg::BuiltPackageBytecode,
params: &tx::ConsensusParameters,
) -> ContractDeploymentSetup {
// Obtain the contract id for deployment.
let mut storage_slots = built_pkg.storage_slots.clone();
storage_slots.sort();
let bytecode = &without_tests_bytecode.bytes;
let contract = tx::Contract::from(bytecode.clone());
let root = contract.root();
let state_root = tx::Contract::initial_state_root(storage_slots.iter());
let salt = tx::Salt::zeroed();
let contract_id = tx::Contract::id(&salt, &root, &state_root);
// Create the deployment transaction.
let rng = &mut rand::rngs::StdRng::seed_from_u64(TEST_METADATA_SEED);
// Prepare the transaction metadata.
let secret_key = SecretKey::random(rng);
let utxo_id = rng.r#gen();
let amount = 1;
let maturity = 1u32.into();
// NOTE: fuel-core is using dynamic asset id and interacting with the fuel-core, using static
// asset id is not correct. But since forc-test maintains its own interpreter instance, correct
// base asset id is indeed the static `tx::AssetId::BASE`.
let asset_id = tx::AssetId::BASE;
let tx_pointer = rng.r#gen();
let block_height = (u32::MAX >> 1).into();
let tx = tx::TransactionBuilder::create(bytecode.as_slice().into(), salt, storage_slots)
.with_params(params.clone())
.add_unsigned_coin_input(secret_key, utxo_id, amount, asset_id, tx_pointer)
.add_output(tx::Output::contract_created(contract_id, state_root))
.maturity(maturity)
.finalize_checked(block_height);
(contract_id, tx)
}
/// Build the given package and run its tests after applying the filter provided.
///
/// Returns the result of test execution.
fn run_tests(
built: BuiltTests,
test_runners: &rayon::ThreadPool,
test_filter: Option<TestFilter>,
gas_costs_values: GasCostsValues,
gas_limit: TestGasLimit,
) -> anyhow::Result<Tested> {
match built {
BuiltTests::Package(pkg) => {
let tested_pkg = pkg.run_tests(
test_runners,
test_filter.as_ref(),
gas_costs_values.clone(),
gas_limit,
)?;
Ok(Tested::Package(Box::new(tested_pkg)))
}
BuiltTests::Workspace(workspace) => {
let tested_pkgs = workspace
.into_iter()
.map(|pkg| {
pkg.run_tests(
test_runners,
test_filter.as_ref(),
gas_costs_values.clone(),
gas_limit,
)
})
.collect::<anyhow::Result<Vec<TestedPackage>>>()?;
Ok(Tested::Workspace(tested_pkgs))
}
}
}
#[cfg(test)]
mod tests {
use std::path::PathBuf;
use fuel_tx::GasCostsValues;
use crate::{build, BuiltTests, TestFilter, TestGasLimit, TestOpts, TestResult};
/// Name of the folder containing required data for tests to run, such as an example forc
/// project.
const TEST_DATA_FOLDER_NAME: &str = "test_data";
/// Name of the library package in the "CARGO_MANIFEST_DIR/TEST_DATA_FOLDER_NAME".
const TEST_LIBRARY_PACKAGE_NAME: &str = "test_library";
/// Name of the contract package in the "CARGO_MANIFEST_DIR/TEST_DATA_FOLDER_NAME".
const TEST_CONTRACT_PACKAGE_NAME: &str = "test_contract";
/// Name of the predicate package in the "CARGO_MANIFEST_DIR/TEST_DATA_FOLDER_NAME".
const TEST_PREDICATE_PACKAGE_NAME: &str = "test_predicate";
/// Name of the script package in the "CARGO_MANIFEST_DIR/TEST_DATA_FOLDER_NAME".
const TEST_SCRIPT_PACKAGE_NAME: &str = "test_script";
/// Build the tests in the test package with the given name located at
/// "CARGO_MANIFEST_DIR/TEST_DATA_FOLDER_NAME/TEST_LIBRARY_PACKAGE_NAME".
fn test_package_built_tests(package_name: &str) -> anyhow::Result<BuiltTests> {
let cargo_manifest_dir = env!("CARGO_MANIFEST_DIR");
let library_package_dir = PathBuf::from(cargo_manifest_dir)
.join(TEST_DATA_FOLDER_NAME)
.join(package_name);
let library_package_dir_string = library_package_dir.to_string_lossy().to_string();
let build_options = TestOpts {
pkg: forc_pkg::PkgOpts {
path: Some(library_package_dir_string),
..Default::default()
},
..Default::default()
};
build(build_options)
}
fn test_package_test_results(
package_name: &str,
test_filter: Option<TestFilter>,
) -> anyhow::Result<Vec<TestResult>> {
let built_tests = test_package_built_tests(package_name)?;
let test_runner_count = crate::TestRunnerCount::Auto;
let tested = built_tests.run(
test_runner_count,
test_filter,
GasCostsValues::default(),
TestGasLimit::default(),
)?;
match tested {
crate::Tested::Package(tested_pkg) => Ok(tested_pkg.tests),
crate::Tested::Workspace(_) => {
unreachable!("test_library is a package, not a workspace.")
}
}
}
#[test]
fn test_filter_exact_match() {
let filter_phrase = "test_bam";
let test_filter = TestFilter {
filter_phrase,
exact_match: true,
};
let test_library_results =
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | true |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-test/src/execute.rs | forc-test/src/execute.rs | use crate::ecal::EcalSyscallHandler;
use crate::maxed_consensus_params;
use crate::setup::TestSetup;
use crate::TestGasLimit;
use crate::TestResult;
use crate::TEST_METADATA_SEED;
use forc_pkg::PkgTestEntry;
use fuel_tx::GasCostsValues;
use fuel_tx::{self as tx, output::contract::Contract, Chargeable, Finalizable};
use fuel_vm::error::InterpreterError;
use fuel_vm::fuel_asm;
use fuel_vm::prelude::Instruction;
use fuel_vm::prelude::RegId;
use fuel_vm::{
self as vm, checked_transaction::builder::TransactionBuilderExt, interpreter::Interpreter,
prelude::SecretKey, storage::MemoryStorage,
};
use rand::{Rng, SeedableRng};
use tx::Receipt;
use vm::interpreter::{InterpreterParams, MemoryInstance};
use vm::state::DebugEval;
use vm::state::ProgramState;
/// An interface for executing a test within a VM [Interpreter] instance.
#[derive(Debug, Clone)]
pub struct TestExecutor {
pub interpreter: Interpreter<MemoryInstance, MemoryStorage, tx::Script, EcalSyscallHandler>,
pub tx: vm::checked_transaction::Ready<tx::Script>,
pub test_entry: PkgTestEntry,
pub name: String,
pub jump_instruction_index: usize,
pub relative_jump_in_bytes: u32,
}
/// The result of executing a test with breakpoints enabled.
#[derive(Debug)]
pub enum DebugResult {
// Holds the test result.
TestComplete(TestResult),
// Holds the program counter of where the program stopped due to a breakpoint.
Breakpoint(u64),
}
impl TestExecutor {
pub fn build(
bytecode: &[u8],
test_instruction_index: u32,
test_setup: TestSetup,
test_entry: &PkgTestEntry,
name: String,
gas_costs_values: GasCostsValues,
gas_limit: TestGasLimit,
) -> anyhow::Result<Self> {
let storage = test_setup.storage().clone();
// Find the instruction which we will jump into the
// specified test
let jump_instruction_index = find_jump_instruction_index(bytecode);
// Create a transaction to execute the test function.
let script_input_data = vec![];
let rng = &mut rand::rngs::StdRng::seed_from_u64(TEST_METADATA_SEED);
// Prepare the transaction metadata.
let secret_key = SecretKey::random(rng);
let utxo_id = rng.r#gen();
let amount = 1;
let maturity = 1.into();
// NOTE: fuel-core is using dynamic asset id and interacting with the fuel-core, using static
// asset id is not correct. But since forc-test maintains its own interpreter instance, correct
// base asset id is indeed the static `tx::AssetId::BASE`.
let asset_id = tx::AssetId::BASE;
let tx_pointer = rng.r#gen();
let block_height = (u32::MAX >> 1).into();
let gas_price = 0;
let mut tx_builder = tx::TransactionBuilder::script(bytecode.to_vec(), script_input_data);
let params = maxed_consensus_params(gas_costs_values, gas_limit);
tx_builder
.with_params(params)
.add_unsigned_coin_input(secret_key, utxo_id, amount, asset_id, tx_pointer)
.maturity(maturity);
let mut output_index = 1;
// Insert contract ids into tx input
for contract_id in test_setup.contract_ids() {
tx_builder
.add_input(tx::Input::contract(
tx::UtxoId::new(tx::Bytes32::zeroed(), 0),
tx::Bytes32::zeroed(),
tx::Bytes32::zeroed(),
tx::TxPointer::new(0u32.into(), 0),
contract_id,
))
.add_output(tx::Output::Contract(Contract {
input_index: output_index,
balance_root: fuel_tx::Bytes32::zeroed(),
state_root: tx::Bytes32::zeroed(),
}));
output_index += 1;
}
let consensus_params = tx_builder.get_params().clone();
// Temporarily finalize to calculate `script_gas_limit`
let tmp_tx = tx_builder.clone().finalize();
// Get `max_gas` used by everything except the script execution. Add `1` because of rounding.
let max_gas =
tmp_tx.max_gas(consensus_params.gas_costs(), consensus_params.fee_params()) + 1;
// Increase `script_gas_limit` to the maximum allowed value.
tx_builder.script_gas_limit(consensus_params.tx_params().max_gas_per_tx() - max_gas);
// We need to increase the tx size limit as the default is 110 * 1024 and for big tests
// such as std this is not enough.
let tx = tx_builder
.finalize_checked(block_height)
.into_ready(
gas_price,
consensus_params.gas_costs(),
consensus_params.fee_params(),
None,
)
.map_err(|e| anyhow::anyhow!("{e:?}"))?;
let interpreter_params = InterpreterParams::new(gas_price, &consensus_params);
let memory_instance = MemoryInstance::new();
let interpreter = Interpreter::with_storage(memory_instance, storage, interpreter_params);
Ok(TestExecutor {
interpreter,
tx,
test_entry: test_entry.clone(),
name,
jump_instruction_index,
relative_jump_in_bytes: (test_instruction_index - jump_instruction_index as u32)
* Instruction::SIZE as u32,
})
}
// single-step until the jump-to-test instruction, then
// jump into the first instruction of the test
fn single_step_until_test(&mut self) -> ProgramState {
let jump_pc = (self.jump_instruction_index * Instruction::SIZE) as u64;
let old_single_stepping = self.interpreter.single_stepping();
self.interpreter.set_single_stepping(true);
let mut state = {
let transition = self.interpreter.transact(self.tx.clone());
Ok(*transition.unwrap().state())
};
loop {
match state {
// if the VM fails, we interpret as a revert
Err(_) => {
break ProgramState::Revert(0);
}
Ok(
state @ ProgramState::Return(_)
| state @ ProgramState::ReturnData(_)
| state @ ProgramState::Revert(_),
) => break state,
Ok(
s @ ProgramState::RunProgram(eval) | s @ ProgramState::VerifyPredicate(eval),
) => {
// time to jump into the specified test
if let Some(b) = eval.breakpoint() {
if b.pc() == jump_pc {
self.interpreter.registers_mut()[RegId::PC] +=
self.relative_jump_in_bytes as u64;
self.interpreter.set_single_stepping(old_single_stepping);
break s;
}
}
state = self.interpreter.resume();
}
}
}
}
/// Execute the test with breakpoints enabled.
pub fn start_debugging(&mut self) -> anyhow::Result<DebugResult> {
let start = std::time::Instant::now();
let _ = self.single_step_until_test();
let state = self
.interpreter
.resume()
.map_err(|err: InterpreterError<_>| {
anyhow::anyhow!("VM failed to resume. {:?}", err)
})?;
if let ProgramState::RunProgram(DebugEval::Breakpoint(breakpoint)) = state {
// A breakpoint was hit, so we tell the client to stop.
return Ok(DebugResult::Breakpoint(breakpoint.pc()));
}
let duration = start.elapsed();
let (gas_used, logs) = Self::get_gas_and_receipts(self.interpreter.receipts().to_vec())?;
let span = self.test_entry.span.clone();
let file_path = self.test_entry.file_path.clone();
let condition = self.test_entry.pass_condition.clone();
let name = self.name.clone();
Ok(DebugResult::TestComplete(TestResult {
name,
file_path,
duration,
span,
state,
condition,
logs,
gas_used,
ecal: Box::new(self.interpreter.ecal_state().clone()),
}))
}
/// Continue executing the test with breakpoints enabled.
pub fn continue_debugging(&mut self) -> anyhow::Result<DebugResult> {
let start = std::time::Instant::now();
let state = self
.interpreter
.resume()
.map_err(|err: InterpreterError<_>| {
anyhow::anyhow!("VM failed to resume. {:?}", err)
})?;
if let ProgramState::RunProgram(DebugEval::Breakpoint(breakpoint)) = state {
// A breakpoint was hit, so we tell the client to stop.
return Ok(DebugResult::Breakpoint(breakpoint.pc()));
}
let duration = start.elapsed();
let (gas_used, logs) = Self::get_gas_and_receipts(self.interpreter.receipts().to_vec())?; // TODO: calculate culumlative
let span = self.test_entry.span.clone();
let file_path = self.test_entry.file_path.clone();
let condition = self.test_entry.pass_condition.clone();
let name = self.name.clone();
Ok(DebugResult::TestComplete(TestResult {
name,
file_path,
duration,
span,
state,
condition,
logs,
gas_used,
ecal: Box::new(self.interpreter.ecal_state().clone()),
}))
}
pub fn execute(&mut self) -> anyhow::Result<TestResult> {
self.interpreter.ecal_state_mut().clear();
let start = std::time::Instant::now();
let mut state = Ok(self.single_step_until_test());
// Run test until its end
loop {
match state {
Err(_) => {
state = Ok(ProgramState::Revert(0));
break;
}
Ok(
ProgramState::Return(_) | ProgramState::ReturnData(_) | ProgramState::Revert(_),
) => break,
Ok(ProgramState::RunProgram(_) | ProgramState::VerifyPredicate(_)) => {
state = self.interpreter.resume();
}
}
}
let duration = start.elapsed();
let (gas_used, logs) = Self::get_gas_and_receipts(self.interpreter.receipts().to_vec())?;
let span = self.test_entry.span.clone();
let file_path = self.test_entry.file_path.clone();
let condition = self.test_entry.pass_condition.clone();
let name = self.name.clone();
Ok(TestResult {
name,
file_path,
duration,
span,
state: state.unwrap(),
condition,
logs,
gas_used,
ecal: Box::new(self.interpreter.ecal_state().clone()),
})
}
fn get_gas_and_receipts(receipts: Vec<Receipt>) -> anyhow::Result<(u64, Vec<Receipt>)> {
let gas_used = *receipts
.iter()
.find_map(|receipt| match receipt {
tx::Receipt::ScriptResult { gas_used, .. } => Some(gas_used),
_ => None,
})
.ok_or_else(|| anyhow::anyhow!("missing used gas information from test execution"))?;
// Only retain `Log` and `LogData` receipts.
let logs = receipts
.into_iter()
.filter(|receipt| {
matches!(receipt, tx::Receipt::Log { .. })
|| matches!(receipt, tx::Receipt::LogData { .. })
})
.collect();
Ok((gas_used, logs))
}
}
fn find_jump_instruction_index(bytecode: &[u8]) -> usize {
// Search first `move $$locbase $sp`
// This will be `__entry` for script/predicate/contract using encoding v1;
// `main` for script/predicate using encoding v0;
// or the first function for libraries
// MOVE R59 $sp ;; [26, 236, 80, 0]
let a = vm::fuel_asm::op::move_(59, fuel_asm::RegId::SP).to_bytes();
// for contracts using encoding v0
// search the first `lw $r0 $fp i73`
// which is the start of the fn selector
// LW $writable $fp 0x49 ;; [93, 64, 96, 73]
let b = vm::fuel_asm::op::lw(fuel_asm::RegId::WRITABLE, fuel_asm::RegId::FP, 73).to_bytes();
bytecode
.chunks(Instruction::SIZE)
.position(|instruction| {
let instruction: [u8; 4] = instruction.try_into().unwrap();
instruction == a || instruction == b
})
.unwrap()
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc/src/lib.rs | forc/src/lib.rs | pub mod cli;
pub mod ops;
pub mod utils;
#[cfg(feature = "test")]
pub mod test {
pub use crate::cli::BuildCommand;
pub use crate::ops::{forc_build, forc_check};
}
#[cfg(feature = "util")]
pub mod util {
pub use sway_utils::constants;
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc/src/main.rs | forc/src/main.rs | use forc_util::ForcCliResult;
#[tokio::main]
async fn main() -> ForcCliResult<()> {
forc::cli::run_cli().await.into()
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc/src/utils/program_type.rs | forc/src/utils/program_type.rs | #[derive(Debug)]
pub enum ProgramType {
Contract,
Script,
Predicate,
Library,
}
impl std::fmt::Display for ProgramType {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
use ProgramType::{Contract, Library, Predicate, Script};
let s = match self {
Contract => "contract",
Script => "script",
Predicate => "predicate",
Library => "library",
};
write!(f, "{s}")
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc/src/utils/mod.rs | forc/src/utils/mod.rs | pub mod defaults;
pub mod program_type;
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc/src/utils/defaults.rs | forc/src/utils/defaults.rs | /// We intentionally don't construct this using [serde]'s default deserialization so we get
/// the chance to insert some helpful comments and nicer formatting.
pub(crate) fn default_pkg_manifest(project_name: &str, entry_type: &str) -> String {
let author = get_author();
format!(
r#"[project]
authors = ["{author}"]
entry = "{entry_type}"
license = "Apache-2.0"
name = "{project_name}"
[dependencies]
"#
)
}
pub(crate) fn default_workspace_manifest() -> String {
r#"[workspace]
members = []"#
.to_string()
}
pub(crate) fn default_contract() -> String {
r#"contract;
abi MyContract {
fn test_function() -> bool;
}
impl MyContract for Contract {
fn test_function() -> bool {
true
}
}
"#
.into()
}
pub(crate) fn default_script() -> String {
r#"script;
use std::logging::log;
configurable {
SECRET_NUMBER: u64 = 0
}
fn main() -> u64 {
log(SECRET_NUMBER);
return SECRET_NUMBER;
}
"#
.into()
}
pub(crate) fn default_library() -> String {
"library;
// anything `pub` here will be exported as a part of this library's API
"
.into()
}
pub(crate) fn default_predicate() -> String {
r#"predicate;
fn main() -> bool {
true
}
"#
.into()
}
pub(crate) fn default_gitignore() -> String {
r#"out
target
"#
.into()
}
fn get_author() -> String {
std::env::var(sway_utils::FORC_INIT_MANIFEST_AUTHOR).unwrap_or_else(|_| whoami::realname())
}
#[test]
fn parse_default_pkg_manifest() {
use sway_utils::constants::MAIN_ENTRY;
tracing::info!(
"{:#?}",
toml::from_str::<forc_pkg::PackageManifest>(&default_pkg_manifest("test_proj", MAIN_ENTRY))
.unwrap()
)
}
#[test]
fn parse_default_workspace_manifest() {
tracing::info!(
"{:#?}",
toml::from_str::<forc_pkg::PackageManifest>(&default_workspace_manifest()).unwrap()
)
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc/src/cli/mod.rs | forc/src/cli/mod.rs | use self::commands::{
add, addr2line, build, check, clean, completions, contract_id, init, new, parse_bytecode,
plugins, predicate_root, remove, template, test, update,
};
pub use add::Command as AddCommand;
use addr2line::Command as Addr2LineCommand;
use anyhow::anyhow;
pub use build::Command as BuildCommand;
pub use check::Command as CheckCommand;
use clap::{Parser, Subcommand};
pub use clean::Command as CleanCommand;
pub use completions::Command as CompletionsCommand;
pub(crate) use contract_id::Command as ContractIdCommand;
use forc_tracing::{init_tracing_subscriber, TracingSubscriberOptions};
use forc_util::ForcResult;
pub use init::Command as InitCommand;
pub use new::Command as NewCommand;
use parse_bytecode::Command as ParseBytecodeCommand;
pub use plugins::Command as PluginsCommand;
pub(crate) use predicate_root::Command as PredicateRootCommand;
pub use remove::Command as RemoveCommand;
use std::str::FromStr;
pub use template::Command as TemplateCommand;
pub use test::Command as TestCommand;
use tracing::metadata::LevelFilter;
pub use update::Command as UpdateCommand;
mod commands;
mod plugin;
pub mod shared;
fn help() -> &'static str {
Box::leak(
format!(
"Examples:\n{}{}{}{}",
plugins::examples(),
test::examples(),
build::examples(),
check::examples(),
)
.trim_end()
.to_string()
.into_boxed_str(),
)
}
#[derive(Debug, Parser)]
#[clap(name = "forc", about = "Fuel Orchestrator", version, after_long_help = help())]
struct Opt {
/// The command to run
#[clap(subcommand)]
command: Forc,
/// Use verbose output
#[clap(short, long, action = clap::ArgAction::Count, global = true)]
verbose: u8,
/// Silence all output
#[clap(short, long, global = true)]
silent: bool,
/// Set the log level
#[clap(short='L', long, global = true, value_parser = LevelFilter::from_str)]
log_level: Option<LevelFilter>,
}
#[derive(Subcommand, Debug)]
enum Forc {
Add(AddCommand),
#[clap(name = "addr2line")]
Addr2Line(Addr2LineCommand),
#[clap(visible_alias = "b")]
Build(BuildCommand),
Check(CheckCommand),
Clean(CleanCommand),
Completions(CompletionsCommand),
New(NewCommand),
Init(InitCommand),
ParseBytecode(ParseBytecodeCommand),
#[clap(visible_alias = "t")]
Test(TestCommand),
Remove(RemoveCommand),
Update(UpdateCommand),
Plugins(PluginsCommand),
Template(TemplateCommand),
ContractId(ContractIdCommand),
PredicateRoot(PredicateRootCommand),
/// This is a catch-all for unknown subcommands and their arguments.
///
/// When we receive an unknown subcommand, we check for a plugin exe named
/// `forc-<unknown-subcommand>` and try to execute it:
///
/// ```ignore
/// forc-<unknown-subcommand> <args>
/// ```
#[clap(external_subcommand)]
Plugin(Vec<String>),
}
impl Forc {
#[allow(dead_code)]
pub fn possible_values() -> Vec<&'static str> {
vec![
"add",
"addr2line",
"build",
"check",
"clean",
"completions",
"init",
"new",
"parse-bytecode",
"plugins",
"test",
"update",
"template",
"remove",
"contract-id",
"predicate-root",
]
}
}
pub async fn run_cli() -> ForcResult<()> {
let opt = Opt::parse();
let tracing_options = TracingSubscriberOptions {
verbosity: Some(opt.verbose),
silent: Some(opt.silent),
log_level: opt.log_level,
..Default::default()
};
init_tracing_subscriber(tracing_options);
match opt.command {
Forc::Add(command) => add::exec(command),
Forc::Addr2Line(command) => addr2line::exec(command),
Forc::Build(command) => build::exec(command),
Forc::Check(command) => check::exec(command),
Forc::Clean(command) => clean::exec(command),
Forc::Completions(command) => completions::exec(command),
Forc::Init(command) => init::exec(command),
Forc::New(command) => new::exec(command),
Forc::ParseBytecode(command) => parse_bytecode::exec(command),
Forc::Plugins(command) => plugins::exec(command),
Forc::Test(command) => test::exec(command),
Forc::Update(command) => update::exec(command),
Forc::Remove(command) => remove::exec(command),
Forc::Template(command) => template::exec(command),
Forc::ContractId(command) => contract_id::exec(command),
Forc::PredicateRoot(command) => predicate_root::exec(command),
Forc::Plugin(args) => {
let output = plugin::execute_external_subcommand(&args)?;
let code = output
.status
.code()
.ok_or_else(|| anyhow!("plugin exit status unknown"))?;
std::process::exit(code);
}
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc/src/cli/shared.rs | forc/src/cli/shared.rs | //! Sets of arguments that are shared between commands.
use clap::{ArgGroup, Args, Parser};
use forc_pkg::source::IPFSNode;
use sway_core::{BuildTarget, IrCli, PrintAsm};
use sway_ir::PassManager;
#[derive(Debug, Args)]
#[command(group(
ArgGroup::new("source")
.required(false)
.args(["path", "git", "ipfs"]),
))]
pub struct SourceArgs {
/// Local path to the package.
#[arg(long)]
pub path: Option<String>,
/// Git URI for the package.
#[arg(long, value_name = "URI")]
pub git: Option<String>,
/// Git reference options like `branch`, `rev`, etc.
#[clap(flatten)]
pub git_ref: GitRef,
/// IPFS CID for the package.
#[arg(long, value_name = "CID")]
pub ipfs: Option<String>,
}
#[derive(Args, Debug, Default)]
#[command(group(
ArgGroup::new("git_ref")
.args(["branch", "tag", "rev"])
.multiple(false)
.requires("git")
))]
pub struct GitRef {
/// The branch to use.
#[arg(long)]
pub branch: Option<String>,
/// The tag to use.
#[arg(long)]
pub tag: Option<String>,
/// The specific revision to use.
#[arg(long)]
pub rev: Option<String>,
}
#[derive(Args, Debug, Default)]
pub struct SectionArgs {
/// Treats dependency as contract dependencies.
#[arg(long = "contract-dep")]
pub contract_deps: bool,
/// Salt value for contract deployment.
#[arg(long = "salt")]
pub salt: Option<String>,
}
#[derive(Args, Debug, Default)]
pub struct ManifestArgs {
/// Path to the manifest file.
#[arg(long, value_name = "PATH")]
pub manisfest_path: Option<String>,
}
#[derive(Args, Debug, Default)]
pub struct PackagesSelectionArgs {
/// Package to perform action on.
#[arg(long, short = 'p', value_name = "SPEC")]
pub package: Option<String>,
}
/// Args that can be shared between all commands that `build` a package. E.g. `build`, `test`,
/// `deploy`.
#[derive(Debug, Default, Parser)]
pub struct Build {
#[clap(flatten)]
pub pkg: Pkg,
#[clap(flatten)]
pub print: Print,
/// Verify the generated Sway IR (Intermediate Representation).
///
/// Values that can be combined:
/// - initial: initial IR prior to any optimization passes.
/// - final: final IR after applying all optimization passes.
/// - <pass name>: the name of an optimization pass. Verifies the IR state after that pass.
/// - all: short for initial, final, and all the optimization passes.
/// - modified: verify a requested optimization pass only if it has modified the IR.
#[arg(long, verbatim_doc_comment, num_args(1..=IrCliOpt::max_num_args()), value_parser = clap::builder::PossibleValuesParser::new(IrCliOpt::cli_options()))]
pub verify_ir: Option<Vec<String>>,
#[clap(flatten)]
pub minify: Minify,
#[clap(flatten)]
pub output: BuildOutput,
#[clap(flatten)]
pub profile: BuildProfile,
/// Build target to use for code generation.
#[clap(long, value_enum, default_value_t = BuildTarget::default(), alias="target")]
pub build_target: BuildTarget,
#[clap(flatten)]
pub dump: Dump,
}
/// Build output file options.
#[derive(Args, Debug, Default)]
pub struct BuildOutput {
/// Create a binary file at the provided path representing the final bytecode.
#[clap(long = "output-bin", short = 'o')]
pub bin_file: Option<String>,
/// Create a file at the provided path containing debug information.
///
/// If the file extension is .json, JSON format is used. Otherwise, an .elf file containing DWARF format is emitted.
#[clap(long = "output-debug", short = 'g')]
pub debug_file: Option<String>,
/// Generates a JSON file containing the hex-encoded script binary.
#[clap(long = "output-hexfile")]
pub hex_file: Option<String>,
}
/// Build profile options.
#[derive(Args, Debug, Default)]
pub struct BuildProfile {
/// The name of the build profile to use.
#[clap(long, conflicts_with = "release", default_value = forc_pkg::BuildProfile::DEBUG)]
pub build_profile: String,
/// Use the release build profile.
///
/// The release profile can be customized in the manifest file.
#[clap(long)]
pub release: bool,
/// Treat warnings as errors.
#[clap(long)]
pub error_on_warnings: bool,
}
/// Dump options.
#[derive(Args, Debug, Default)]
pub struct Dump {
/// Dump all trait implementations for the given type name.
#[clap(long = "dump-impls", value_name = "TYPE")]
pub dump_impls: Option<String>,
}
/// Options related to printing stages of compiler output.
#[derive(Args, Debug, Default)]
pub struct Print {
/// Print the generated Sway AST (Abstract Syntax Tree).
#[clap(long)]
pub ast: bool,
/// Print the computed Sway DCA (Dead Code Analysis) graph.
///
/// DCA graph is printed to the specified path.
/// If specified '' graph is printed to the stdout.
#[clap(long)]
pub dca_graph: Option<String>,
/// URL format to be used in the generated DCA graph .dot file.
///
/// Variables {path}, {line}, and {col} can be used in the provided format.
/// An example for vscode would be:
/// "vscode://file/{path}:{line}:{col}"
#[clap(long, verbatim_doc_comment)]
pub dca_graph_url_format: Option<String>,
/// Print the generated ASM (assembler).
///
/// Values that can be combined:
/// - virtual: initial ASM with virtual registers and abstract control flow.
/// - allocated: ASM with registers allocated, but still with abstract control flow.
/// - abstract: short for both virtual and allocated ASM.
/// - final: final ASM that gets serialized to the target VM bytecode.
/// - all: short for virtual, allocated, and final ASM.
#[arg(long, verbatim_doc_comment, num_args(1..=PrintAsmCliOpt::CLI_OPTIONS.len()), value_parser = clap::builder::PossibleValuesParser::new(&PrintAsmCliOpt::CLI_OPTIONS))]
pub asm: Option<Vec<String>>,
/// Print the bytecode.
///
/// This is the final output of the compiler.
#[clap(long)]
pub bytecode: bool,
/// Print the generated Sway IR (Intermediate Representation).
///
/// Values that can be combined:
/// - initial: initial IR prior to any optimization passes.
/// - final: final IR after applying all optimization passes.
/// - <pass name>: the name of an optimization pass. Prints the IR state after that pass.
/// - all: short for initial, final, and all the optimization passes.
/// - modified: print a requested optimization pass only if it has modified the IR.
#[arg(long, verbatim_doc_comment, num_args(1..=IrCliOpt::max_num_args()), value_parser = clap::builder::PossibleValuesParser::new(IrCliOpt::cli_options()))]
pub ir: Option<Vec<String>>,
/// Output the time elapsed over each part of the compilation process.
#[clap(long)]
pub time_phases: bool,
/// Profile the compilation process.
#[clap(long)]
pub profile: bool,
/// Output build errors and warnings in reverse order.
#[clap(long)]
pub reverse_order: bool,
/// Output compilation metrics into the specified file.
#[clap(long)]
pub metrics_outfile: Option<String>,
}
impl Print {
pub fn asm(&self) -> PrintAsm {
self.asm
.as_ref()
.map_or(PrintAsm::default(), |opts| PrintAsmCliOpt::from(opts).0)
}
pub fn ir(&self) -> IrCli {
self.ir
.as_ref()
.map_or(IrCli::default(), |opts| IrCliOpt::from(opts).0)
}
}
/// Package-related options.
#[derive(Args, Debug, Default)]
pub struct Pkg {
/// Path to the project.
///
/// If not specified, current working directory will be used.
#[clap(short, long)]
pub path: Option<String>,
/// Offline mode.
///
/// Prevents Forc from using the network when managing dependencies.
/// Meaning it will only try to use previously downloaded dependencies.
#[clap(long)]
pub offline: bool,
/// Terse mode.
///
/// Limited warning and error output.
#[clap(long, short = 't')]
pub terse: bool,
/// The directory in which Forc output artifacts are placed.
///
/// By default, this is `<project-root>/out`.
#[clap(long)]
pub output_directory: Option<String>,
/// Requires that the Forc.lock file is up-to-date.
///
/// If the lock file is missing, or it needs to be updated, Forc will exit with an error.
#[clap(long)]
pub locked: bool,
/// The IPFS node to use for fetching IPFS sources.
///
/// [possible values: FUEL, PUBLIC, LOCAL, <GATEWAY_URL>]
#[clap(long)]
pub ipfs_node: Option<IPFSNode>,
}
/// Options related to minifying output.
#[derive(Args, Debug, Default)]
pub struct Minify {
/// Minify JSON ABI files.
///
/// By default the JSON for ABIs is formatted for human readability. By using this option JSON
/// output will be "minified", i.e. all on one line without whitespace.
#[clap(long)]
pub json_abi: bool,
/// Minify JSON storage slot files.
///
/// By default the JSON for initial storage slots is formatted for human readability. By using
/// this option JSON output will be "minified", i.e. all on one line without whitespace.
#[clap(long)]
pub json_storage_slots: bool,
}
pub struct PrintAsmCliOpt(pub PrintAsm);
impl PrintAsmCliOpt {
const VIRTUAL: &'static str = "virtual";
const ALLOCATED: &'static str = "allocated";
const ABSTRACT: &'static str = "abstract";
const FINAL: &'static str = "final";
const ALL: &'static str = "all";
pub const CLI_OPTIONS: [&'static str; 5] = [
Self::VIRTUAL,
Self::ALLOCATED,
Self::ABSTRACT,
Self::FINAL,
Self::ALL,
];
}
impl From<&Vec<String>> for PrintAsmCliOpt {
fn from(value: &Vec<String>) -> Self {
let contains_opt = |opt: &str| value.iter().any(|val| *val == opt);
let print_asm = if contains_opt(Self::ALL) {
PrintAsm::all()
} else {
PrintAsm {
virtual_abstract: contains_opt(Self::ABSTRACT) || contains_opt(Self::VIRTUAL),
allocated_abstract: contains_opt(Self::ABSTRACT) || contains_opt(Self::ALLOCATED),
r#final: contains_opt(Self::FINAL),
}
};
Self(print_asm)
}
}
pub struct IrCliOpt(pub IrCli);
impl IrCliOpt {
const INITIAL: &'static str = "initial";
const FINAL: &'static str = "final";
const ALL: &'static str = "all";
const MODIFIED: &'static str = "modified";
pub const CLI_OPTIONS: [&'static str; 4] =
[Self::INITIAL, Self::FINAL, Self::ALL, Self::MODIFIED];
pub fn cli_options() -> Vec<&'static str> {
Self::CLI_OPTIONS
.iter()
.chain(PassManager::OPTIMIZATION_PASSES.iter())
.cloned()
.collect()
}
pub fn max_num_args() -> usize {
Self::CLI_OPTIONS.len() + PassManager::OPTIMIZATION_PASSES.len()
}
}
impl From<&Vec<String>> for IrCliOpt {
fn from(value: &Vec<String>) -> Self {
let contains_opt = |opt: &str| value.iter().any(|val| *val == opt);
let ir_cli = if contains_opt(Self::ALL) {
IrCli::all(contains_opt(Self::MODIFIED))
} else {
IrCli {
initial: contains_opt(Self::INITIAL),
r#final: contains_opt(Self::FINAL),
modified_only: contains_opt(Self::MODIFIED),
passes: value
.iter()
.filter(|val| !Self::CLI_OPTIONS.contains(&val.as_str()))
.cloned()
.collect(),
}
};
Self(ir_cli)
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc/src/cli/plugin.rs | forc/src/cli/plugin.rs | //! Items related to plugin support for `forc`.
use anyhow::{bail, Result};
use forc_tracing::println_warning_verbose;
use std::{
env, fs,
path::{Path, PathBuf},
process,
};
/// Attempt to execute the unknown subcommand as an external plugin.
///
/// The subcommand is assumed to be the first element, with the following elements representing
/// following arguments to the external subcommand.
///
/// E.g. given `foo bar baz` where `foo` is an unrecognized subcommand to `forc`, tries to execute
/// `forc-foo bar baz`.
pub(crate) fn execute_external_subcommand(args: &[String]) -> Result<process::Output> {
let cmd = args.first().expect("`args` must not be empty");
let args = &args[1..];
let path = find_external_subcommand(cmd);
let command = match path {
Some(command) => command,
None => bail!("no such subcommand: `{}`", cmd),
};
if let Ok(forc_path) = std::env::current_exe() {
if command.parent() != forc_path.parent() {
println_warning_verbose(&format!(
"The {} ({}) plugin is in a different directory than forc ({})\n",
cmd,
command.display(),
forc_path.display(),
));
}
}
let output = process::Command::new(command)
.stdin(process::Stdio::inherit())
.stdout(process::Stdio::inherit())
.stderr(process::Stdio::inherit())
.args(args)
.output()?;
Ok(output)
}
/// Find an exe called `forc-<cmd>` and return its path.
fn find_external_subcommand(cmd: &str) -> Option<PathBuf> {
let command_exe = format!("forc-{}{}", cmd, env::consts::EXE_SUFFIX);
search_directories()
.iter()
.map(|dir| dir.join(&command_exe))
.find(|file| is_executable(file))
}
/// Search the user's `PATH` for `forc-*` exes.
fn search_directories() -> Vec<PathBuf> {
if let Some(val) = env::var_os("PATH") {
return env::split_paths(&val).collect();
}
vec![]
}
#[cfg(unix)]
fn is_executable(path: &Path) -> bool {
use std::os::unix::prelude::*;
fs::metadata(path)
.map(|metadata| metadata.is_file() && metadata.permissions().mode() & 0o111 != 0)
.unwrap_or(false)
}
#[cfg(windows)]
fn is_executable(path: &Path) -> bool {
path.is_file()
}
/// Whether or not the given path points to a valid forc plugin.
fn is_plugin(path: &Path) -> bool {
if let Some(stem) = path.file_name().and_then(|os_str| os_str.to_str()) {
if stem.starts_with("forc-") && is_executable(path) {
return true;
}
}
false
}
/// Find all forc plugins available via `PATH`.
pub(crate) fn find_all() -> impl Iterator<Item = PathBuf> {
search_directories()
.into_iter()
.flat_map(walkdir::WalkDir::new)
.filter_map(Result::ok)
.filter_map(|entry| {
let path = entry.path().to_path_buf();
is_plugin(&path).then_some(path)
})
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc/src/cli/commands/contract_id.rs | forc/src/cli/commands/contract_id.rs | use crate::{
cli::shared::{BuildOutput, BuildProfile, IrCliOpt, Minify, Pkg, Print},
ops::forc_contract_id,
};
use clap::Parser;
use forc_util::{tx_utils::Salt, ForcResult};
forc_util::cli_examples! {
crate::cli::Opt {
[Get contract id => "forc contract-id"]
[Get contract id from a different path => "forc contract-id --path <PATH>"]
}
}
/// Determine contract-id for a contract. For workspaces outputs all contract ids in the workspace.
#[derive(Debug, Parser)]
#[clap(bin_name = "forc contract-id", version, after_help = help())]
pub struct Command {
#[clap(flatten)]
pub pkg: Pkg,
#[clap(flatten)]
pub minify: Minify,
#[clap(flatten)]
pub print: Print,
#[arg(long, value_parser = clap::builder::PossibleValuesParser::new(IrCliOpt::cli_options()))]
pub verify_ir: Option<Vec<String>>,
#[clap(flatten)]
pub build_output: BuildOutput,
#[clap(flatten)]
pub build_profile: BuildProfile,
#[clap(flatten)]
pub salt: Salt,
#[clap(flatten)]
pub experimental: sway_features::CliFields,
}
pub(crate) fn exec(cmd: Command) -> ForcResult<()> {
forc_contract_id::contract_id(cmd).map_err(|e| e.into())
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc/src/cli/commands/predicate_root.rs | forc/src/cli/commands/predicate_root.rs | use clap::Parser;
use forc_util::ForcResult;
pub use crate::cli::shared::{BuildOutput, BuildProfile, Minify, Pkg, Print};
use crate::{cli::shared::IrCliOpt, ops::forc_predicate_root};
forc_util::cli_examples! {
crate::cli::Opt {
[Get predicate root => "forc predicate-root"]
}
}
/// Determine predicate-root for a predicate. For workspaces outputs all predicate roots in the
/// workspace.
#[derive(Debug, Parser)]
#[clap(bin_name = "forc predicate-root", version, after_help = help())]
pub struct Command {
#[clap(flatten)]
pub pkg: Pkg,
#[clap(flatten)]
pub minify: Minify,
#[clap(flatten)]
pub print: Print,
#[arg(long, value_parser = clap::builder::PossibleValuesParser::new(IrCliOpt::cli_options()))]
pub verify_ir: Option<Vec<String>>,
#[clap(flatten)]
pub build_output: BuildOutput,
#[clap(flatten)]
pub build_profile: BuildProfile,
#[clap(flatten)]
pub experimental: sway_features::CliFields,
}
pub(crate) fn exec(cmd: Command) -> ForcResult<()> {
forc_predicate_root::predicate_root(cmd).map_err(|e| e.into())
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc/src/cli/commands/test.rs | forc/src/cli/commands/test.rs | use crate::cli::{self, shared::IrCliOpt};
use ansiterm::Colour;
use clap::Parser;
use forc_pkg as pkg;
use forc_test::{
GasCostsSource, TestFilter, TestGasLimit, TestResult, TestRunnerCount, TestedPackage,
};
use forc_tracing::println_action_green;
use forc_util::{
tx_utils::{decode_fuel_vm_log_data, format_log_receipts},
ForcError, ForcResult,
};
use fuel_abi_types::{abi::program::PanickingCall, revert_info::RevertKind};
use sway_core::{asm_generation::ProgramABI, fuel_prelude::fuel_tx::Receipt, IrCli};
use tracing::info;
forc_util::cli_examples! {
crate::cli::Opt {
[ Run test => "forc test" ]
[ Run test with a filter => "forc test $filter" ]
[ Run test without any output => "forc test --silent" ]
[ Run test without creating or update the lock file => "forc test --locked" ]
}
}
/// Run the Sway unit tests for the current project.
///
/// NOTE: Previously this command was used to support Rust integration testing, however the
/// provided behavior served no benefit over running `cargo test` directly. The proposal to change
/// the behavior to support unit testing can be found at the following link:
/// https://github.com/FuelLabs/sway/issues/1833
///
/// Sway unit tests are functions decorated with the `#[test]` attribute. Each test is compiled as
/// a unique entry point for a single program and has access to the namespace of the module in
/// which it is declared.
///
/// Upon successful compilation, test scripts are executed to their completion. A test is
/// considered a failure in the case that a revert (`rvrt`) instruction is encountered during
/// execution, unless it is specified as `#[test(should_revert)]`. Otherwise, it is considered a success.
#[derive(Debug, Parser)]
#[clap(bin_name = "forc test", version, after_help = help())]
pub struct Command {
#[clap(flatten)]
pub build: cli::shared::Build,
#[clap(flatten)]
pub test_print: TestPrintOpts,
/// When specified, only tests containing the given string will be executed.
pub filter: Option<String>,
#[clap(long)]
/// When specified, only the test exactly matching the given string will be executed.
pub filter_exact: bool,
#[clap(long)]
/// Number of threads to utilize when running the tests. By default, this is the number of
/// threads available in your system.
pub test_threads: Option<usize>,
#[clap(flatten)]
pub experimental: sway_features::CliFields,
/// Source of the gas costs values used to calculate gas costs of test executions.
///
/// If not provided, a built-in set of gas costs values will be used.
/// These are the gas costs values of the Fuel mainnet as of time of
/// the release of the `forc` version being used.
///
/// The mainnet and testnet options will fetch the current gas costs values from
/// their respective networks.
///
/// Alternatively, the gas costs values can be specified as a file path
/// to a local JSON file containing the gas costs values.
///
/// [possible values: built-in, mainnet, testnet, <FILE_PATH>]
#[clap(long)]
pub gas_costs: Option<GasCostsSource>,
/// Remove gas limit for test executions.
#[clap(long)]
pub no_gas_limit: bool,
}
/// The set of options provided for controlling output of a test.
#[derive(Parser, Debug, Clone)]
pub struct TestPrintOpts {
#[clap(long = "pretty")]
/// Pretty-print the logs emitted from tests.
pub pretty_print: bool,
/// Print decoded `Log` and `LogData` receipts for tests.
#[clap(long, short = 'l')]
pub logs: bool,
/// Print the raw logs for tests.
#[clap(long)]
pub raw_logs: bool,
/// Print the revert information for tests.
#[clap(long)]
pub reverts: bool,
/// Print the output of debug ecals for tests.
#[clap(long)]
pub dbgs: bool,
}
pub(crate) fn exec(cmd: Command) -> ForcResult<()> {
let test_runner_count = match cmd.test_threads {
Some(runner_count) => TestRunnerCount::Manual(runner_count),
None => TestRunnerCount::Auto,
};
let test_print_opts = cmd.test_print.clone();
let test_filter_phrase = cmd.filter.clone();
let test_filter = test_filter_phrase.as_ref().map(|filter_phrase| TestFilter {
filter_phrase,
exact_match: cmd.filter_exact,
});
let gas_costs_values = cmd
.gas_costs
.as_ref()
.unwrap_or(&GasCostsSource::BuiltIn)
.provide_gas_costs()?;
let gas_limit = if cmd.no_gas_limit {
TestGasLimit::Unlimited
} else {
TestGasLimit::Default
};
let opts = opts_from_cmd(cmd);
let built_tests = forc_test::build(opts)?;
let start = std::time::Instant::now();
let test_count = built_tests.test_count(test_filter.as_ref());
let num_tests_running = test_count.total - test_count.ignored;
let num_tests_ignored = test_count.ignored;
println_action_green(
"Running",
&format!(
"{} {}, filtered {} {}",
num_tests_running,
formatted_test_count_string(num_tests_running),
num_tests_ignored,
formatted_test_count_string(num_tests_ignored)
),
);
let tested = built_tests.run(test_runner_count, test_filter, gas_costs_values, gas_limit)?;
let duration = start.elapsed();
// Eventually we'll print this in a fancy manner, but this will do for testing.
let all_tests_passed = match tested {
forc_test::Tested::Workspace(pkgs) => {
for pkg in &pkgs {
let built = &pkg.built.descriptor.name;
info!("\ntested -- {built}\n");
print_tested_pkg(pkg, &test_print_opts)?;
}
info!("");
println_action_green("Finished", &format!("in {duration:?}"));
pkgs.iter().all(|pkg| pkg.tests_passed())
}
forc_test::Tested::Package(pkg) => {
print_tested_pkg(&pkg, &test_print_opts)?;
pkg.tests_passed()
}
};
if all_tests_passed {
Ok(())
} else {
let forc_error: ForcError = "Some tests failed.".into();
const FAILING_UNIT_TESTS_EXIT_CODE: u8 = 101;
Err(forc_error.exit_code(FAILING_UNIT_TESTS_EXIT_CODE))
}
}
fn print_tested_pkg(pkg: &TestedPackage, test_print_opts: &TestPrintOpts) -> ForcResult<()> {
let succeeded = pkg.tests.iter().filter(|t| t.passed()).count();
let failed = pkg.tests.len() - succeeded;
let mut failed_tests = Vec::new();
let program_abi = match &pkg.built.program_abi {
ProgramABI::Fuel(fuel_abi) => Some(fuel_abi),
_ => None,
};
for test in &pkg.tests {
let test_passed = test.passed();
let (state, color) = match test_passed {
true => ("ok", Colour::Green),
false => ("FAILED", Colour::Red),
};
info!(
" test {} ... {} ({:?}, {} gas)",
test.name,
color.paint(state),
test.duration,
test.gas_used
);
print_test_output(test, program_abi, Some(test_print_opts))?;
// If the test is failing, save the test result for printing the details later on.
if !test_passed {
failed_tests.push(test);
}
}
let (state, color) = match succeeded == pkg.tests.len() {
true => ("OK", Colour::Green),
false => ("FAILED", Colour::Red),
};
if failed != 0 {
info!("\n {}", Colour::Red.paint("failures:"));
for failed_test in failed_tests {
let failed_test_name = &failed_test.name;
let failed_test_details = failed_test.details()?;
let path = &*failed_test_details.file_path;
let line_number = failed_test_details.line_number;
info!(" test {failed_test_name}, {path:?}:{line_number}");
print_test_output(failed_test, program_abi, None)?;
info!("\n");
}
}
let pkg_test_durations: std::time::Duration = pkg
.tests
.iter()
.map(|test_result| test_result.duration)
.sum();
info!(
"\ntest result: {}. {} passed; {} failed; finished in {:?}",
color.paint(state),
succeeded,
failed,
pkg_test_durations
);
Ok(())
}
/// Prints the output of a test result, including debug output, logs, and revert information.
/// If the `test_print_opts` is `None`, it defaults to printing all the output.
fn print_test_output(
test: &TestResult,
program_abi: Option<&fuel_abi_types::abi::program::ProgramABI>,
test_print_opts: Option<&TestPrintOpts>,
) -> Result<(), ForcError> {
const TEST_NAME_INDENT: &str = " ";
let print_reverts = test_print_opts.map(|opts| opts.reverts).unwrap_or(true);
let print_dbgs = test_print_opts.map(|opts| opts.dbgs).unwrap_or(true);
let print_logs = test_print_opts.map(|opts| opts.logs).unwrap_or(true);
let print_raw_logs = test_print_opts.map(|opts| opts.raw_logs).unwrap_or(true);
let pretty_print = test_print_opts
.map(|opts| opts.pretty_print)
.unwrap_or(true);
if print_reverts {
if let Some(revert_info) = test.revert_info(program_abi, &test.logs) {
info!(
"{TEST_NAME_INDENT}revert code: {:x}",
revert_info.revert_code
);
match revert_info.kind {
RevertKind::RawRevert => {}
RevertKind::KnownErrorSignal { err_msg } => {
info!("{TEST_NAME_INDENT} └─ error message: {err_msg}");
}
RevertKind::Panic {
err_msg,
err_val,
pos,
backtrace,
} => {
if let Some(err_msg) = err_msg {
info!("{TEST_NAME_INDENT} ├─ panic message: {err_msg}");
}
if let Some(err_val) = err_val {
info!("{TEST_NAME_INDENT} ├─ panic value: {err_val}");
}
info!(
"{TEST_NAME_INDENT} {} panicked: in {}",
if backtrace.is_empty() {
"└─"
} else {
"├─"
},
pos.function
);
info!(
"{TEST_NAME_INDENT} {} └─ at {}, {}:{}:{}",
if backtrace.is_empty() { " " } else { "│ " },
pos.pkg,
pos.file,
pos.line,
pos.column
);
fn print_backtrace_call(call: &PanickingCall, is_first: bool) {
// The `__entry` function is a part of a backtrace,
// but we don't want to show it, since it is an internal implementation detail.
if call.pos.function.ends_with("::__entry")
|| call.pos.function.eq("__entry")
{
return;
}
let prefix = if is_first {
"└─ backtrace:"
} else {
" "
};
info!(
"{TEST_NAME_INDENT} {prefix} called in {}",
call.pos.function
);
info!(
"{TEST_NAME_INDENT} └─ at {}, {}:{}:{}",
call.pos.pkg, call.pos.file, call.pos.line, call.pos.column
);
}
if let Some((first, others)) = backtrace.split_first() {
print_backtrace_call(first, true);
for call in others {
print_backtrace_call(call, false);
}
}
}
}
}
}
if print_dbgs && !test.ecal.captured.is_empty() {
info!("{TEST_NAME_INDENT}debug output:");
for captured in test.ecal.captured.iter() {
captured.apply();
}
}
if print_logs && !test.logs.is_empty() {
if let Some(program_abi) = program_abi {
info!("{TEST_NAME_INDENT}decoded log values:");
for log in &test.logs {
if let Receipt::LogData {
rb,
data: Some(data),
..
} = log
{
let decoded_log_data =
decode_fuel_vm_log_data(&rb.to_string(), data, program_abi)?;
let var_value = decoded_log_data.value;
info!("{var_value}, log rb: {rb}");
}
}
}
}
if print_raw_logs && !test.logs.is_empty() {
let formatted_logs = format_log_receipts(&test.logs, pretty_print)?;
info!("{TEST_NAME_INDENT}raw logs:\n{}", formatted_logs);
}
Ok(())
}
fn opts_from_cmd(cmd: Command) -> forc_test::TestOpts {
forc_test::TestOpts {
pkg: pkg::PkgOpts {
path: cmd.build.pkg.path.clone(),
offline: cmd.build.pkg.offline,
terse: cmd.build.pkg.terse,
locked: cmd.build.pkg.locked,
output_directory: cmd.build.pkg.output_directory.clone(),
ipfs_node: cmd.build.pkg.ipfs_node.clone().unwrap_or_default(),
},
print: pkg::PrintOpts {
ast: cmd.build.print.ast,
dca_graph: cmd.build.print.dca_graph.clone(),
dca_graph_url_format: cmd.build.print.dca_graph_url_format.clone(),
asm: cmd.build.print.asm(),
bytecode: cmd.build.print.bytecode,
bytecode_spans: false,
ir: cmd.build.print.ir(),
reverse_order: cmd.build.print.reverse_order,
},
verify_ir: cmd
.build
.verify_ir
.as_ref()
.map_or(IrCli::default(), |opts| IrCliOpt::from(opts).0),
time_phases: cmd.build.print.time_phases,
profile: cmd.build.print.profile,
metrics_outfile: cmd.build.print.metrics_outfile,
minify: pkg::MinifyOpts {
json_abi: cmd.build.minify.json_abi,
json_storage_slots: cmd.build.minify.json_storage_slots,
},
build_profile: cmd.build.profile.build_profile,
release: cmd.build.profile.release,
error_on_warnings: cmd.build.profile.error_on_warnings,
binary_outfile: cmd.build.output.bin_file,
debug_outfile: cmd.build.output.debug_file,
hex_outfile: cmd.build.output.hex_file,
build_target: cmd.build.build_target,
experimental: cmd.experimental.experimental,
no_experimental: cmd.experimental.no_experimental,
no_output: false,
}
}
fn formatted_test_count_string(count: usize) -> &'static str {
if count == 1 {
"test"
} else {
"tests"
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc/src/cli/commands/update.rs | forc/src/cli/commands/update.rs | use crate::ops::forc_update;
use clap::Parser;
use forc_pkg::source::IPFSNode;
use forc_util::ForcResult;
forc_util::cli_examples! {
crate::cli::Opt {
[Update dependencies => "forc update"]
[Update a specific dependency => "forc update -d std"]
[Check if dependencies have newer versions => "forc update --check"]
}
}
/// Update dependencies in the Forc dependencies directory.
#[derive(Debug, Default, Parser)]
#[clap(bin_name = "forc update", version, after_help = help())]
pub struct Command {
/// Path to the project, if not specified, current working directory will be used.
#[clap(short, long)]
pub path: Option<String>,
/// Dependency to be updated.
/// If not set, all dependencies will be updated.
#[clap(short = 'd')]
pub target_dependency: Option<String>,
/// Checks if the dependencies have newer versions.
/// Won't actually perform the update, will output which
/// ones are up-to-date and outdated.
#[clap(short, long)]
pub check: bool,
/// The IPFS Node to use for fetching IPFS sources.
///
/// Possible values: FUEL, PUBLIC, LOCAL, <GATEWAY_URL>
#[clap(long)]
pub ipfs_node: Option<IPFSNode>,
}
pub(crate) fn exec(command: Command) -> ForcResult<()> {
match forc_update::update(command) {
Ok(_) => Ok(()),
Err(e) => Err(format!("couldn't update dependencies: {e}").as_str().into()),
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc/src/cli/commands/template.rs | forc/src/cli/commands/template.rs | use crate::ops::forc_template;
use clap::Parser;
use forc_util::ForcResult;
forc_util::cli_examples! {
crate::cli::Opt {
[Create a new Forc project from an option template => "forc template new-path --template-name option"]
}
}
/// Create a new Forc project from a git template.
#[derive(Debug, Parser)]
#[clap(bin_name = "forc template", version, after_help = help())]
pub struct Command {
/// The template url, should be a git repo.
#[clap(long, short, default_value = "https://github.com/fuellabs/sway")]
pub url: String,
/// The name of the template that needs to be fetched and used from git repo provided.
#[clap(long, short)]
pub template_name: Option<String>,
/// The name of the project that will be created
pub project_name: String,
}
pub(crate) fn exec(command: Command) -> ForcResult<()> {
forc_template::init(command)?;
Ok(())
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc/src/cli/commands/check.rs | forc/src/cli/commands/check.rs | use crate::ops::forc_check;
use clap::Parser;
use forc_pkg::source::IPFSNode;
use forc_util::{forc_result_bail, ForcResult};
use sway_core::{BuildTarget, Engines};
forc_util::cli_examples! {
crate::cli::Opt {
[ Check the current project => "forc check" ]
[ Check the current project with a different path => "forc check --path <PATH>" ]
[ Check the current project without updating dependencies => "forc check --locked" ]
}
}
/// Check the current or target project and all of its dependencies for errors.
///
/// This will essentially compile the packages without performing the final step of code generation,
/// which is faster than running forc build.
#[derive(Debug, Default, Parser)]
#[clap(bin_name = "forc check", version, after_help = help())]
pub struct Command {
/// Build target to use for code generation.
#[clap(value_enum, default_value_t=BuildTarget::default(), alias="target")]
pub build_target: BuildTarget,
/// Path to the project, if not specified, current working directory will be used.
#[clap(short, long)]
pub path: Option<String>,
/// Offline mode, prevents Forc from using the network when managing dependencies.
/// Meaning it will only try to use previously downloaded dependencies.
#[clap(long = "offline")]
pub offline_mode: bool,
/// Requires that the Forc.lock file is up-to-date. If the lock file is missing, or it
/// needs to be updated, Forc will exit with an error
#[clap(long)]
pub locked: bool,
/// Terse mode. Limited warning and error output.
#[clap(long = "terse", short = 't')]
pub terse_mode: bool,
/// Disable checking unit tests.
#[clap(long = "disable-tests")]
pub disable_tests: bool,
/// The IPFS Node to use for fetching IPFS sources.
///
/// Possible values: FUEL, PUBLIC, LOCAL, <GATEWAY_URL>
#[clap(long)]
pub ipfs_node: Option<IPFSNode>,
/// Dump all trait implementations for the given type name.
#[clap(long = "dump-impls", value_name = "TYPE")]
pub dump_impls: Option<String>,
#[clap(flatten)]
pub experimental: sway_features::CliFields,
}
pub(crate) fn exec(command: Command) -> ForcResult<()> {
let engines = Engines::default();
let res = forc_check::check(command, &engines)?;
if res.0.is_none() {
forc_result_bail!("unable to type check");
}
Ok(())
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc/src/cli/commands/build.rs | forc/src/cli/commands/build.rs | use crate::{cli, ops::forc_build};
use clap::Parser;
use forc_util::ForcResult;
forc_util::cli_examples! {
crate::cli::Opt {
[ Compile the current projectx => "forc build" ]
[ Compile the current project from a different path => "forc build --path <PATH>" ]
[ Compile the current project without updating dependencies => "forc build --path <PATH> --locked" ]
}
}
/// Compile the current or target project.
///
/// The output produced will depend on the project's program type.
///
/// - `script`, `predicate` and `contract` projects will produce their bytecode in binary format `<project-name>.bin`.
///
/// - `script` projects will also produce a file containing the hash of the bytecode binary
/// `<project-name>-bin-hash` (using `fuel_cypto::Hasher`).
///
/// - `predicate` projects will also produce a file containing the **root** hash of the bytecode binary
/// `<project-name>-bin-root` (using `fuel_tx::Contract::root_from_code`).
///
/// - `contract` and `library` projects will also produce the public ABI in JSON format
/// `<project-name>-abi.json`.
#[derive(Debug, Default, Parser)]
#[clap(bin_name = "forc build", version, after_help = help())]
pub struct Command {
#[clap(flatten)]
pub build: cli::shared::Build,
/// Also build all tests within the project.
#[clap(long)]
pub tests: bool,
#[clap(flatten)]
pub experimental: sway_features::CliFields,
}
pub(crate) fn exec(command: Command) -> ForcResult<()> {
forc_build::build(command)?;
Ok(())
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc/src/cli/commands/mod.rs | forc/src/cli/commands/mod.rs | pub mod add;
pub mod addr2line;
pub mod build;
pub mod check;
pub mod clean;
pub mod completions;
pub mod contract_id;
pub mod init;
pub mod new;
pub mod parse_bytecode;
pub mod plugins;
pub mod predicate_root;
pub mod remove;
pub mod template;
pub mod test;
pub mod update;
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc/src/cli/commands/init.rs | forc/src/cli/commands/init.rs | use crate::ops::forc_init;
use clap::Parser;
use forc_util::ForcResult;
forc_util::cli_examples! {
crate::cli::Opt {
[Initialize a new Forc project => "forc init --path <PATH>"]
[Initialize a new Forc project as workspace => "forc init --path <PATH> --workspace"]
[Initialize a new Forc project with a predicate => "forc init --path <PATH> --predicate"]
[Initialize a new Forc library project => "forc init --path <PATH> --library"]
}
}
/// Create a new Forc project in an existing directory.
#[derive(Debug, Parser)]
#[clap(bin_name = "forc init", version, after_help = help())]
pub struct Command {
/// The directory in which the forc project will be initialized.
#[clap(long)]
pub path: Option<String>,
/// The default program type, excluding all flags or adding this flag creates a basic contract program.
#[clap(long)]
pub contract: bool,
/// Create a package with a script target (src/main.sw).
#[clap(long)]
pub script: bool,
/// Create a package with a predicate target (src/predicate.rs).
#[clap(long)]
pub predicate: bool,
/// Create a package with a library target (src/lib.sw).
#[clap(long)]
pub library: bool,
/// Adding this flag creates an empty workspace.
#[clap(long)]
pub workspace: bool,
/// Set the package name. Defaults to the directory name
#[clap(long)]
pub name: Option<String>,
}
pub(crate) fn exec(command: Command) -> ForcResult<()> {
forc_init::init(command)?;
Ok(())
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc/src/cli/commands/completions.rs | forc/src/cli/commands/completions.rs | use std::fmt::Display;
use clap::{Command as ClapCommand, CommandFactory, Parser};
use clap_complete::{generate, Generator, Shell};
use forc_util::ForcResult;
#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq, clap::ValueEnum)]
enum Target {
/// Bourne Again Shell (bash)
Bash,
/// Elvish shell
Elvish,
/// Friendly Interactive Shell (fish)
Fish,
/// PowerShell
PowerShell,
/// Z Shell (zsh)
Zsh,
/// Fig
Fig,
}
impl Display for Target {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(
f,
"{}",
match self {
Target::Bash => "bash".to_string(),
Target::Elvish => "elvish".to_string(),
Target::Fish => "fish".to_string(),
Target::PowerShell => "powershell".to_string(),
Target::Zsh => "zsh".to_string(),
Target::Fig => "fig".to_string(),
}
)
}
}
/// Generate tab-completion scripts for your shell
#[derive(Debug, Parser)]
pub struct Command {
/// Specify shell to enable tab-completion for
///
/// [possible values: zsh, bash, fish, powershell, elvish]
///
/// For more info: https://fuellabs.github.io/sway/latest/forc/commands/forc_completions.html
#[clap(short = 'T', long, value_enum)]
target: Target,
}
pub(crate) fn exec(command: Command) -> ForcResult<()> {
let mut cmd = super::super::Opt::command();
match command.target {
Target::Fig => print_completions(clap_complete_fig::Fig, &mut cmd),
Target::Bash => print_completions(Shell::Bash, &mut cmd),
Target::Elvish => print_completions(Shell::Elvish, &mut cmd),
Target::PowerShell => print_completions(Shell::PowerShell, &mut cmd),
Target::Zsh => print_completions(Shell::Zsh, &mut cmd),
Target::Fish => print_completions(Shell::Fish, &mut cmd),
}
Ok(())
}
fn print_completions<G: Generator>(gen: G, cmd: &mut ClapCommand) {
generate(gen, cmd, cmd.get_name().to_string(), &mut std::io::stdout());
}
#[cfg(test)]
mod test {
use super::*;
use crate::cli::{Forc, Opt};
#[test]
fn bash() {
testsuite::<completest_pty::BashRuntimeBuilder>(Shell::Bash);
}
#[test]
fn zsh() {
testsuite::<completest_pty::ZshRuntimeBuilder>(Shell::Zsh);
}
#[test]
fn fish() {
testsuite::<completest_pty::FishRuntimeBuilder>(Shell::Fish);
}
fn testsuite<R>(shell: Shell)
where
R: completest_pty::RuntimeBuilder,
{
let bin_root = "/tmp/bin".into();
let home = "/tmp/home".into();
let runtime = R::new(bin_root, home).expect("runtime");
build_script_and_test(runtime, shell, "forc", &Forc::possible_values());
}
fn build_script_and_test<R>(
mut runtime: R,
shell: Shell,
command_to_complete: &str,
expectations: &[&str],
) where
R: completest_pty::Runtime,
{
let term = completest_pty::Term::new();
let mut cmd = Opt::command();
let mut completion_script = Vec::<u8>::new();
generate(shell, &mut cmd, "forc".to_owned(), &mut completion_script);
runtime
.register("forc", &String::from_utf8_lossy(&completion_script))
.expect("register completion script");
let output =
if let Ok(output) = runtime.complete(&format!("{command_to_complete} \t\t"), &term) {
output
} else {
println!("Skipping {shell}");
return;
};
for expectation in expectations {
assert!(
output.contains(expectation),
"Failed find {expectation} in {output}"
);
}
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc/src/cli/commands/plugins.rs | forc/src/cli/commands/plugins.rs | use crate::cli::PluginsCommand;
use anyhow::anyhow;
use clap::Parser;
use forc_tracing::println_warning;
use forc_util::ForcResult;
use std::{
collections::HashMap,
path::{Path, PathBuf},
};
use tracing::info;
forc_util::cli_examples! {
crate::cli::Opt {
[ List all plugins => "forc plugins" ]
[ List all plugins with their paths => "forc plugins --paths" ]
[ List all plugins with their descriptions => "forc plugins --describe" ]
[ List all plugins with their paths and descriptions => "forc plugins --paths --describe" ]
}
}
/// Find all forc plugins available via `PATH`.
///
/// Prints information about each discovered plugin.
#[derive(Debug, Parser)]
#[clap(name = "forc plugins", about = "List all forc plugins", version, after_help = help())]
pub struct Command {
/// Prints the absolute path to each discovered plugin.
#[clap(long = "paths", short = 'p')]
print_full_path: bool,
/// Prints the long description associated with each listed plugin
#[clap(long = "describe", short = 'd')]
describe: bool,
}
fn get_file_name(path: &Path) -> String {
if let Some(path_str) = path.file_name().and_then(|path_str| path_str.to_str()) {
path_str.to_owned()
} else {
path.display().to_string()
}
}
pub(crate) fn exec(command: PluginsCommand) -> ForcResult<()> {
let PluginsCommand {
print_full_path,
describe,
} = command;
let mut plugins = crate::cli::plugin::find_all()
.map(|path| {
get_plugin_info(path.clone(), print_full_path, describe).map(|info| (path, info))
})
.collect::<Result<Vec<(_, _)>, _>>()?
.into_iter()
.fold(HashMap::new(), |mut acc, (path, content)| {
let bin_name = get_file_name(&path);
acc.entry(bin_name.clone())
.or_insert_with(|| (bin_name, vec![], content.clone()))
.1
.push(path);
acc
})
.into_values()
.map(|(bin_name, mut paths, content)| {
paths.sort();
paths.dedup();
(bin_name, paths, content)
})
.collect::<Vec<_>>();
plugins.sort_by(|a, b| a.0.cmp(&b.0));
info!("Installed Plugins:");
for plugin in plugins {
info!("{}", plugin.2);
if plugin.1.len() > 1 {
println_warning(&format!("Multiple paths found for {}", plugin.0));
for path in plugin.1 {
println_warning(&format!(" {}", path.display()));
}
}
}
Ok(())
}
/// Find a plugin's description
///
/// Given a canonical plugin path, returns the description included in the `-h` opt.
/// Returns a generic description if a description cannot be found
fn parse_description_for_plugin(plugin: &Path) -> String {
use std::process::Command;
let default_description = "No description found for this plugin.";
let proc = Command::new(plugin)
.arg("-h")
.output()
.expect("Could not get plugin description.");
let stdout = String::from_utf8_lossy(&proc.stdout);
// If the plugin doesn't support a -h flag
match stdout.split('\n').nth(1) {
Some(x) => {
if x.is_empty() {
default_description.to_owned()
} else {
x.to_owned()
}
}
None => default_description.to_owned(),
}
}
/// # Panics
///
/// Format a given plugin's line to stdout
///
/// Formatting is based on a combination of `print_full_path` and `describe`. Will
/// panic if there is a problem retrieving a plugin's name or path
fn format_print_description(
path: PathBuf,
print_full_path: bool,
describe: bool,
) -> ForcResult<String> {
let display = if print_full_path {
path.display().to_string()
} else {
get_file_name(&path)
};
let description = parse_description_for_plugin(&path);
if describe {
Ok(format!(" {display} \t\t{description}"))
} else {
Ok(display)
}
}
/// # Panics
///
/// This function assumes that file names will never be empty since it is only used with
/// paths yielded from plugin::find_all(), as well as that the file names are in valid
/// unicode format since file names should be prefixed with `forc-`. Should one of these 2
/// assumptions fail, this function panics.
fn get_plugin_info(path: PathBuf, print_full_path: bool, describe: bool) -> ForcResult<String> {
format_print_description(path, print_full_path, describe)
.map_err(|e| anyhow!("Could not get plugin info: {}", e.as_ref()).into())
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc/src/cli/commands/addr2line.rs | forc/src/cli/commands/addr2line.rs | use anyhow::anyhow;
use clap::Parser;
use forc_util::{create_diagnostics_renderer, ForcResult};
use std::collections::VecDeque;
use std::fs::{self, File};
use std::io::{self, prelude::*, BufReader};
use std::path::{Path, PathBuf};
use sway_types::LineCol;
use tracing::info;
use annotate_snippets::{AnnotationType, Slice, Snippet, SourceAnnotation};
use sway_core::source_map::{LocationRange, SourceMap};
/// Show location and context of an opcode address in its source file
#[derive(Debug, Parser)]
pub(crate) struct Command {
/// Where to search for the project root
#[clap(short = 'S', long, default_value = ".")]
pub search_dir: PathBuf,
/// Source file mapping in JSON format
#[clap(short = 'g', long)]
pub sourcemap_path: PathBuf,
/// How many lines of context to show
#[clap(short, long, default_value = "2")]
pub context: usize,
/// Opcode index
#[clap(short = 'i', long)]
pub opcode_index: usize,
}
pub(crate) fn exec(command: Command) -> ForcResult<()> {
let contents = fs::read(&command.sourcemap_path)
.map_err(|err| anyhow!("{:?}: could not read: {:?}", command.sourcemap_path, err))?;
let sm: SourceMap = serde_json::from_slice(&contents).map_err(|err| {
anyhow!(
"{:?}: invalid source map json: {}",
command.sourcemap_path,
err
)
})?;
if let Some((mut path, range)) = sm.addr_to_span(command.opcode_index) {
if path.is_relative() {
path = command.search_dir.join(path);
}
let rr = read_range(&path, range, command.context)
.map_err(|err| anyhow!("{:?}: could not read: {:?}", path, err))?;
let path_str = format!("{path:?}");
let snippet = Snippet {
title: None,
footer: vec![],
slices: vec![Slice {
source: &rr.source,
line_start: rr.source_start_line,
origin: Some(&path_str),
fold: false,
annotations: vec![SourceAnnotation {
label: "here",
annotation_type: AnnotationType::Note,
range: (rr.offset, rr.offset + rr.length),
}],
}],
};
let renderer = create_diagnostics_renderer();
info!("{}", renderer.render(snippet));
Ok(())
} else {
Err("Address did not map to any source code location".into())
}
}
struct ReadRange {
source: String,
_source_start_byte: usize,
source_start_line: usize,
offset: usize,
length: usize,
}
fn line_col_to_pos<P: AsRef<Path>>(&LineCol { line, col }: &LineCol, path: P) -> io::Result<usize> {
let file = File::open(&path)?;
let mut reader = BufReader::new(file);
let mut pos = 0usize;
let mut buffer = String::new();
for _line_count in 1..line {
buffer.clear();
pos += reader.read_line(&mut buffer)?;
}
Ok(pos + col)
}
fn read_range<P: AsRef<Path>>(
path: P,
range: LocationRange,
context_lines: usize,
) -> io::Result<ReadRange> {
// Converting LineCol to Pos, twice, is inefficient.
// TODO: Rewrite the algorithm in terms of LineCol.
let range_start = line_col_to_pos(&range.start, &path)?;
let range_end = line_col_to_pos(&range.end, &path)?;
let file = File::open(&path)?;
let mut reader = BufReader::new(file);
let mut context_buffer = VecDeque::new();
let mut start_pos = None;
let mut position = 0;
for line_num in 0.. {
let mut buffer = String::new();
let n = reader.read_line(&mut buffer)?;
context_buffer.push_back(buffer);
if start_pos.is_none() {
if position + n > range_start {
let cbl: usize = context_buffer.iter().map(|c| c.len()).sum();
start_pos = Some((line_num, position, range_start - (position + n - cbl)));
} else if context_buffer.len() > context_lines {
let _ = context_buffer.pop_front();
}
} else if context_buffer.len() > context_lines * 2 {
break;
}
position += n;
}
let source = context_buffer.make_contiguous().join("");
let length = range_end - range_start;
let (source_start_line, _source_start_byte, offset) = start_pos.ok_or_else(|| {
io::Error::new(
io::ErrorKind::UnexpectedEof,
"Source file was modified, and the mapping is now out of range",
)
})?;
if offset + length > source.len() {
return Err(io::Error::new(
io::ErrorKind::UnexpectedEof,
"Source file was modified, and the mapping is now out of range",
));
}
Ok(ReadRange {
source,
_source_start_byte,
source_start_line,
offset,
length,
})
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc/src/cli/commands/add.rs | forc/src/cli/commands/add.rs | use crate::cli::shared::{ManifestArgs, PackagesSelectionArgs, SectionArgs, SourceArgs};
use clap::Parser;
use forc_pkg::{
manifest::dep_modifier::{self, Action, ModifyOpts},
source::IPFSNode,
};
use forc_util::ForcResult;
forc_util::cli_examples! {
crate::cli::Opt {
[Add a dependencies => "forc add <DEP>[@<VERSION>] "]
[Add a contract dependency => "forc add <DEP>[@<VERSION>] --contract-dep"]
[Dry run => "forc add <DEP>[@<VERSION>] --dry-run"]
}
}
// Add dependencies to Forc toml
#[derive(Debug, Parser)]
#[clap(bin_name = "forc add", version, after_help = help())]
pub struct Command {
/// List of dependencies to add in the format "name[@version]"
#[clap(value_enum, value_name = "DEP_SPEC", required = true, num_args = 1..,)]
pub dependencies: Vec<String>,
/// Print the changes that would be made without actually making them
#[arg(long)]
pub dry_run: bool,
#[clap(flatten, next_help_heading = "Manifest Options")]
pub manifest: ManifestArgs,
#[clap(flatten, next_help_heading = "Package Selection")]
pub package: PackagesSelectionArgs,
#[clap(flatten, next_help_heading = "Source")]
pub source: SourceArgs,
#[clap(flatten, next_help_heading = "Section")]
pub section: SectionArgs,
/// Offline mode.
///
/// Prevents Forc from using the network when managing dependencies.
#[clap(long)]
pub offline: bool,
/// The IPFS Node to use for fetching IPFS sources.
///
/// Possible values: FUEL, PUBLIC, LOCAL, <GATEWAY_URL>
#[clap(long)]
pub ipfs_node: Option<IPFSNode>,
}
pub(crate) fn exec(command: Command) -> ForcResult<()> {
dep_modifier::modify_dependencies(command.into())
.map_err(|e| format!("failed to add dependencies: {e}"))
.map_err(|msg| msg.as_str().into())
}
impl From<Command> for ModifyOpts {
fn from(cmd: Command) -> Self {
ModifyOpts {
action: Action::Add,
manifest_path: cmd.manifest.manisfest_path,
package: cmd.package.package,
source_path: cmd.source.path,
git: cmd.source.git,
branch: cmd.source.git_ref.branch,
tag: cmd.source.git_ref.tag,
rev: cmd.source.git_ref.rev,
ipfs: cmd.source.ipfs,
contract_deps: cmd.section.contract_deps,
salt: cmd.section.salt,
ipfs_node: cmd.ipfs_node,
dependencies: cmd.dependencies,
dry_run: cmd.dry_run,
offline: cmd.offline,
}
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc/src/cli/commands/clean.rs | forc/src/cli/commands/clean.rs | use crate::ops::forc_clean;
use clap::Parser;
use forc_util::ForcResult;
forc_util::cli_examples! {
crate::cli::Opt {
[Clean project => "forc clean"]
[Clean project with a custom path => "forc clean --path <PATH>"]
}
}
/// Removes the default forc compiler output artifact directory, i.e. `<project-name>/out`.
#[derive(Debug, Parser)]
#[clap(bin_name = "forc clean", version, after_help = help())]
pub struct Command {
/// Path to the project, if not specified, current working directory will be used.
#[clap(short, long)]
pub path: Option<String>,
}
pub fn exec(command: Command) -> ForcResult<()> {
forc_clean::clean(command)?;
Ok(())
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc/src/cli/commands/remove.rs | forc/src/cli/commands/remove.rs | use crate::cli::shared::{ManifestArgs, PackagesSelectionArgs, SectionArgs};
use clap::Parser;
use forc_pkg::{
manifest::dep_modifier::{self, Action, ModifyOpts},
source::IPFSNode,
};
use forc_util::ForcResult;
forc_util::cli_examples! {
crate::cli::Opt {
[Add a dependencies => "forc remove <DEP>"]
[Add a contract dependency => "forc remove <DEP> --contract-dep"]
[Dry run => "forc remove <DEP> --dry-run"]
}
}
// Add dependencies to Forc toml
#[derive(Debug, Parser)]
#[clap(bin_name = "forc remove", version, after_help = help())]
pub struct Command {
/// List of dependencies to remove in the format "name[@version]"
#[clap(value_enum, value_name = "DEP_SPEC", required = true)]
pub dependencies: Vec<String>,
/// Print the changes that would be made without actually making them
#[arg(long)]
pub dry_run: bool,
#[clap(flatten, next_help_heading = "Manifest Options")]
pub manifest: ManifestArgs,
#[clap(flatten, next_help_heading = "Package Selection")]
pub package: PackagesSelectionArgs,
#[clap(flatten, next_help_heading = "Section")]
pub section: SectionArgs,
/// Offline mode.
///
/// Prevents Forc from using the network when managing dependencies.
#[clap(long)]
pub offline: bool,
/// The IPFS Node to use for fetching IPFS sources.
///
/// Possible values: FUEL, PUBLIC, LOCAL, <GATEWAY_URL>
#[clap(long)]
pub ipfs_node: Option<IPFSNode>,
}
pub(crate) fn exec(command: Command) -> ForcResult<()> {
dep_modifier::modify_dependencies(command.into())
.map_err(|e| format!("failed to remove dependencies: {e}"))
.map_err(|msg| msg.as_str().into())
}
impl From<Command> for ModifyOpts {
fn from(cmd: Command) -> Self {
ModifyOpts {
action: Action::Remove,
manifest_path: cmd.manifest.manisfest_path,
package: cmd.package.package,
source_path: None,
git: None,
branch: None,
tag: None,
rev: None,
ipfs: None,
contract_deps: cmd.section.contract_deps,
salt: cmd.section.salt,
ipfs_node: cmd.ipfs_node,
dependencies: cmd.dependencies,
dry_run: cmd.dry_run,
offline: cmd.offline,
}
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc/src/cli/commands/parse_bytecode.rs | forc/src/cli/commands/parse_bytecode.rs | use clap::Parser;
use forc_util::bytecode::parse_bytecode_to_instructions;
use forc_util::ForcResult;
use term_table::row::Row;
use term_table::table_cell::{Alignment, TableCell};
use tracing::info;
forc_util::cli_examples! {
crate::cli::Opt {
[Parse bytecode => "forc parse-bytecode <PATH>"]
}
}
/// Parse bytecode file into a debug format.
#[derive(Debug, Parser)]
#[clap(bin_name = "forc parse-bytecode", version, after_help = help())]
pub(crate) struct Command {
file_path: String,
}
pub(crate) fn exec(command: Command) -> ForcResult<()> {
let instructions = parse_bytecode_to_instructions(&command.file_path)?;
let mut table = term_table::Table::new();
table.separate_rows = false;
table.add_row(Row::new(vec![
TableCell::new("half-word"),
TableCell::new("byte"),
TableCell::new("op"),
TableCell::new("raw"),
TableCell::new("notes"),
]));
table.style = term_table::TableStyle::empty();
for (word_ix, (result, raw)) in instructions.enumerate() {
use fuel_asm::Instruction;
let notes = match result {
Ok(Instruction::JI(ji)) => format!("jump to byte {}", u32::from(ji.imm24()) * 4),
Ok(Instruction::JNEI(jnei)) => {
format!("conditionally jump to byte {}", u32::from(jnei.imm12()) * 4)
}
Ok(Instruction::JNZI(jnzi)) => {
format!("conditionally jump to byte {}", u32::from(jnzi.imm18()) * 4)
}
Err(fuel_asm::InvalidOpcode) if word_ix == 2 || word_ix == 3 => {
let parsed_raw = u32::from_be_bytes([raw[0], raw[1], raw[2], raw[3]]);
format!(
"data section offset {} ({})",
if word_ix == 2 { "lo" } else { "hi" },
parsed_raw
)
}
Err(fuel_asm::InvalidOpcode) if word_ix == 4 || word_ix == 5 => {
let parsed_raw = u32::from_be_bytes([raw[0], raw[1], raw[2], raw[3]]);
format!(
"configurables offset {} ({})",
if word_ix == 4 { "lo" } else { "hi" },
parsed_raw
)
}
Ok(_) | Err(fuel_asm::InvalidOpcode) => "".into(),
};
table.add_row(Row::new(vec![
TableCell::builder(word_ix)
.col_span(1)
.alignment(Alignment::Right)
.build(),
TableCell::new(word_ix * 4),
TableCell::new(match result {
Ok(inst) => format!("{inst:?}"),
Err(err) => format!("{err:?}"),
}),
TableCell::new(format!(
"{:02x} {:02x} {:02x} {:02x}",
raw[0], raw[1], raw[2], raw[3],
)),
TableCell::new(notes),
]));
}
info!("{}", table.render());
Ok(())
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc/src/cli/commands/new.rs | forc/src/cli/commands/new.rs | use crate::{cli::init::Command as InitCommand, ops::forc_init::init};
use anyhow::anyhow;
use clap::Parser;
use forc_util::{forc_result_bail, validate_project_name, ForcResult};
use std::path::{Path, PathBuf};
forc_util::cli_examples! {
crate::cli::Opt {
[Create a new project => "forc new --contract --name my_project <PATH>"]
[Create a new workspace => "forc new --workspace --name my_workspace <PATH>"]
[Create a new Forc project with a predicate => "forc new <PATH> --predicate"]
[Create a new Forc library project => "forc new <PATH> --library"]
}
}
/// Create a new Forc project at `<path>`.
#[derive(Debug, Parser)]
#[clap(bin_name = "forc new", version, after_help = help())]
pub struct Command {
/// The default program type. Excluding all flags or adding this flag creates a basic contract
/// program.
#[clap(long)]
pub contract: bool,
/// Adding this flag creates an empty script program.
#[clap(long)]
pub script: bool,
/// Adding this flag creates an empty predicate program.
#[clap(long)]
pub predicate: bool,
/// Adding this flag creates an empty library program.
#[clap(long)]
pub library: bool,
/// Adding this flag creates an empty workspace.
#[clap(long)]
pub workspace: bool,
/// Set the package name. Defaults to the directory name
#[clap(long)]
pub name: Option<String>,
/// The path at which the project directory will be created.
pub path: String,
}
pub(crate) fn exec(command: Command) -> ForcResult<()> {
// `forc new` is roughly short-hand for `forc init`, but we create the directory first if it
// doesn't yet exist. Here we create the directory if it doesn't exist then re-use the existing
// `forc init` logic.
let Command {
contract,
script,
predicate,
library,
workspace,
name,
path,
} = command;
match &name {
Some(name) => validate_project_name(name)?,
None => {
// If there is no name specified for the project, the last component of the `path` (directory name)
// will be used by default so we should also check that.
let project_path = PathBuf::from(&path);
let directory_name = project_path
.file_name()
.ok_or_else(|| anyhow!("missing path for new command"))?
.to_string_lossy();
validate_project_name(&directory_name)?;
}
}
let dir_path = Path::new(&path);
if dir_path.exists() {
forc_result_bail!(
"Directory \"{}\" already exists.\nIf you wish to initialise a forc project inside \
this directory, consider using `forc init --path {}`",
dir_path.canonicalize()?.display(),
dir_path.display(),
);
} else {
std::fs::create_dir_all(dir_path)?;
}
let init_cmd = InitCommand {
path: Some(path),
contract,
script,
predicate,
library,
workspace,
name,
};
init(init_cmd)?;
Ok(())
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc/src/ops/forc_check.rs | forc/src/ops/forc_check.rs | use crate::cli::CheckCommand;
use anyhow::Result;
use forc_pkg as pkg;
use forc_pkg::manifest::GenericManifestFile;
use pkg::manifest::ManifestFile;
use std::{path::PathBuf, sync::Arc};
use sway_core::{language::ty, Engines};
use sway_error::handler::Handler;
pub fn check(
command: CheckCommand,
engines: &Engines,
) -> Result<(Option<Arc<ty::TyProgram>>, Handler)> {
let CheckCommand {
build_target,
path,
offline_mode: offline,
terse_mode,
locked,
disable_tests,
ipfs_node,
experimental,
..
} = command;
let this_dir = if let Some(ref path) = path {
PathBuf::from(path)
} else {
std::env::current_dir()?
};
let manifest_file = ManifestFile::from_dir(this_dir)?;
let member_manifests = manifest_file.member_manifests()?;
let lock_path = manifest_file.lock_path()?;
let plan = pkg::BuildPlan::from_lock_and_manifests(
&lock_path,
&member_manifests,
locked,
offline,
&ipfs_node.unwrap_or_default(),
)?;
let tests_enabled = !disable_tests;
let mut v = pkg::check(
&plan,
build_target,
terse_mode,
None,
tests_enabled,
engines,
None,
&experimental.experimental,
&experimental.no_experimental,
sway_core::DbgGeneration::None,
)?;
let (res, handler) = v
.pop()
.expect("there is guaranteed to be at least one elem in the vector");
let res = res.and_then(|programs| programs.typed.ok());
Ok((res, handler))
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc/src/ops/forc_template.rs | forc/src/ops/forc_template.rs | use crate::cli::TemplateCommand;
use anyhow::{anyhow, Context, Result};
use forc_pkg::{
manifest::{self, PackageManifest},
source::{self, git::Url},
};
use forc_tracing::println_action_green;
use forc_util::validate_project_name;
use fs_extra::dir::{copy, CopyOptions};
use std::fs::File;
use std::io::{Read, Write};
use std::path::{Path, PathBuf};
use std::{env, str::FromStr};
use sway_utils::constants;
pub fn init(command: TemplateCommand) -> Result<()> {
validate_project_name(&command.project_name)?;
// The name used for the temporary local repo directory used for fetching the template.
let local_repo_name = command
.template_name
.clone()
.unwrap_or_else(|| format!("{}-template-source", command.project_name));
let source = source::git::Source {
repo: Url::from_str(&command.url)?,
reference: source::git::Reference::DefaultBranch,
};
let current_dir = &env::current_dir()?;
let fetch_ts = std::time::Instant::now();
let fetch_id = source::fetch_id(current_dir, fetch_ts);
println_action_green("Resolving", &format!("the HEAD of {}", source.repo));
let git_source = source::git::pin(fetch_id, &local_repo_name, source)?;
let repo_path = source::git::commit_path(
&local_repo_name,
&git_source.source.repo,
&git_source.commit_hash,
);
if !repo_path.exists() {
println_action_green("Fetching", git_source.to_string().as_str());
source::git::fetch(fetch_id, &local_repo_name, &git_source)?;
}
let from_path = match command.template_name {
Some(ref template_name) => manifest::find_dir_within(&repo_path, template_name)
.ok_or_else(|| {
anyhow!(
"failed to find a template `{}` in {}",
template_name,
command.url
)
})?,
None => {
let manifest_path = repo_path.join(constants::MANIFEST_FILE_NAME);
if PackageManifest::from_file(manifest_path).is_err() {
anyhow::bail!("failed to find a template in {}", command.url);
}
repo_path
}
};
// Create the target dir
let target_dir = current_dir.join(&command.project_name);
println_action_green(
"Creating",
&format!("{} from template", &command.project_name),
);
// Copy contents from template to target dir
copy_template_to_target(&from_path, &target_dir)?;
// Edit forc.toml
edit_forc_toml(&target_dir, &command.project_name, &whoami::realname())?;
if target_dir.join("test").exists() {
edit_cargo_toml(&target_dir, &command.project_name, &whoami::realname())?;
}
Ok(())
}
fn edit_forc_toml(out_dir: &Path, project_name: &str, real_name: &str) -> Result<()> {
let mut file = File::open(out_dir.join(constants::MANIFEST_FILE_NAME))?;
let mut toml = String::new();
file.read_to_string(&mut toml)?;
let mut manifest_toml = toml.parse::<toml_edit::DocumentMut>()?;
let mut authors = Vec::new();
let forc_toml: toml::Value = toml::de::from_str(&toml)?;
if let Some(table) = forc_toml.as_table() {
if let Some(package) = table.get("project") {
// If authors Vec is currently populated use that
if let Some(toml::Value::Array(authors_vec)) = package.get("authors") {
for author in authors_vec {
if let toml::value::Value::String(name) = &author {
authors.push(name.clone());
}
}
}
}
}
// Only append the users name to the authors field if it isn't already in the list
if authors.iter().any(|e| e != real_name) {
authors.push(real_name.to_string());
}
let authors: toml_edit::Array = authors.iter().collect();
manifest_toml["project"]["authors"] = toml_edit::value(authors);
manifest_toml["project"]["name"] = toml_edit::value(project_name);
// Remove explicit std entry from copied template
if let Some(project) = manifest_toml.get_mut("dependencies") {
let _ = project
.as_table_mut()
.context("Unable to get forc manifest as table")?
.remove("std");
}
let mut file = File::create(out_dir.join(constants::MANIFEST_FILE_NAME))?;
file.write_all(manifest_toml.to_string().as_bytes())?;
Ok(())
}
fn edit_cargo_toml(out_dir: &Path, project_name: &str, real_name: &str) -> Result<()> {
let mut file = File::open(out_dir.join(constants::TEST_MANIFEST_FILE_NAME))?;
let mut toml = String::new();
file.read_to_string(&mut toml)?;
let mut updated_authors = toml_edit::Array::default();
let cargo_toml: toml::Value = toml::de::from_str(&toml)?;
if let Some(table) = cargo_toml.as_table() {
if let Some(package) = table.get("package") {
if let Some(toml::Value::Array(authors_vec)) = package.get("authors") {
for author in authors_vec {
if let toml::value::Value::String(name) = &author {
updated_authors.push(name);
}
}
}
}
}
updated_authors.push(real_name);
let mut manifest_toml = toml.parse::<toml_edit::DocumentMut>()?;
manifest_toml["package"]["authors"] = toml_edit::value(updated_authors);
manifest_toml["package"]["name"] = toml_edit::value(project_name);
let mut file = File::create(out_dir.join(constants::TEST_MANIFEST_FILE_NAME))?;
file.write_all(manifest_toml.to_string().as_bytes())?;
Ok(())
}
fn copy_template_to_target(from: &PathBuf, to: &PathBuf) -> Result<()> {
let mut copy_options = CopyOptions::new();
copy_options.copy_inside = true;
copy(from, to, ©_options)?;
Ok(())
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc/src/ops/forc_clean.rs | forc/src/ops/forc_clean.rs | use crate::cli::CleanCommand;
use anyhow::{anyhow, bail, Result};
use forc_pkg::manifest::GenericManifestFile;
use forc_pkg::manifest::ManifestFile;
use forc_util::default_output_directory;
use std::path::PathBuf;
use sway_utils::{find_parent_manifest_dir, MANIFEST_FILE_NAME};
pub fn clean(command: CleanCommand) -> Result<()> {
let CleanCommand { path } = command;
// find manifest directory, even if in subdirectory
let this_dir = if let Some(ref path) = path {
PathBuf::from(path)
} else {
std::env::current_dir().map_err(|e| anyhow!("{:?}", e))?
};
let manifest_dir = match find_parent_manifest_dir(&this_dir) {
Some(dir) => dir,
None => {
bail!(
"could not find `{}` in `{}` or any parent directory",
MANIFEST_FILE_NAME,
this_dir.display(),
)
}
};
let manifest = ManifestFile::from_dir(manifest_dir)?;
// If this is a workspace collect all member paths and clean each of them.
let paths: Vec<PathBuf> = match manifest {
ManifestFile::Package(_) => std::iter::once(this_dir).collect(),
ManifestFile::Workspace(workspace) => workspace.member_paths()?.collect(),
};
for member_path in paths {
// Clear `<project>/out` directory.
// Ignore I/O errors telling us `out_dir` isn't there.
let out_dir = default_output_directory(&member_path);
let _ = std::fs::remove_dir_all(out_dir);
}
Ok(())
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc/src/ops/forc_predicate_root.rs | forc/src/ops/forc_predicate_root.rs | use crate::cli::{shared::IrCliOpt, PredicateRootCommand};
use anyhow::Result;
use forc_pkg::{self as pkg, build_with_options, DumpOpts};
use sway_core::{BuildTarget, IrCli};
pub fn predicate_root(command: PredicateRootCommand) -> Result<()> {
let build_options = build_opts_from_cmd(command);
// Building predicates will output the predicate root by default.
// So to display all predicate roots in the current workspace we just need to build the
// workspace with a member filter that filters out every project type other than predicates.
build_with_options(&build_options, None)?;
Ok(())
}
fn build_opts_from_cmd(cmd: PredicateRootCommand) -> pkg::BuildOpts {
pkg::BuildOpts {
pkg: pkg::PkgOpts {
path: cmd.pkg.path.clone(),
offline: cmd.pkg.offline,
terse: cmd.pkg.terse,
locked: cmd.pkg.locked,
output_directory: cmd.pkg.output_directory.clone(),
ipfs_node: cmd.pkg.ipfs_node.unwrap_or_default(),
},
print: pkg::PrintOpts {
ast: cmd.print.ast,
dca_graph: cmd.print.dca_graph.clone(),
dca_graph_url_format: cmd.print.dca_graph_url_format.clone(),
asm: cmd.print.asm(),
bytecode: cmd.print.bytecode,
bytecode_spans: false,
ir: cmd.print.ir(),
reverse_order: cmd.print.reverse_order,
},
verify_ir: cmd
.verify_ir
.as_ref()
.map_or(IrCli::default(), |opts| IrCliOpt::from(opts).0),
dump: DumpOpts::default(),
time_phases: cmd.print.time_phases,
profile: cmd.print.profile,
metrics_outfile: cmd.print.metrics_outfile,
minify: pkg::MinifyOpts {
json_abi: cmd.minify.json_abi,
json_storage_slots: cmd.minify.json_storage_slots,
},
build_profile: cmd.build_profile.build_profile.clone(),
release: cmd.build_profile.release,
error_on_warnings: cmd.build_profile.error_on_warnings,
binary_outfile: cmd.build_output.bin_file.clone(),
debug_outfile: cmd.build_output.debug_file,
hex_outfile: cmd.build_output.hex_file.clone(),
build_target: BuildTarget::default(),
tests: false,
member_filter: pkg::MemberFilter::only_predicates(),
experimental: cmd.experimental.experimental,
no_experimental: cmd.experimental.no_experimental,
no_output: false,
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc/src/ops/mod.rs | forc/src/ops/mod.rs | pub mod forc_build;
pub mod forc_check;
pub mod forc_clean;
pub mod forc_contract_id;
pub mod forc_init;
pub mod forc_predicate_root;
pub mod forc_template;
pub mod forc_update;
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc/src/ops/forc_contract_id.rs | forc/src/ops/forc_contract_id.rs | use crate::cli::{shared::IrCliOpt, ContractIdCommand};
use anyhow::{bail, Result};
use forc_pkg::{self as pkg, build_with_options, DumpOpts};
use forc_tracing::println_green;
use sway_core::{fuel_prelude::fuel_tx, BuildTarget, IrCli};
use tracing::info;
pub fn contract_id(command: ContractIdCommand) -> Result<()> {
let build_options = build_opts_from_cmd(&command);
let build_plan = pkg::BuildPlan::from_pkg_opts(&build_options.pkg)?;
// If a salt was specified but we have more than one member to build, there
// may be ambiguity in how the salt should be applied, especially if the
// workspace contains multiple contracts, and especially if one contract
// member is the dependency of another (in which case salt should be
// specified under `[contract-dependencies]`). Considering this, we have a
// simple check to ensure that we only accept salt when working on a single
// package. In the future, we can consider relaxing this to allow for
// specifying a salt for workspacs, as long as there is only one
// root contract member in the package graph.
if command.salt.salt.is_some() && build_plan.member_nodes().count() > 1 {
bail!(
"A salt was specified when attempting to detect the contract id \
for a workspace with more than one member.
If you wish to find out contract id for a contract member with\
salt, run this command for the member individually.
If you wish to specify the salt for a contract dependency, \
please do so within the `[contract-dependencies]` table."
)
}
let built = build_with_options(&build_options, None)?;
for (pinned_contract, built_contract) in built.into_members() {
let salt = command
.salt
.salt
.or_else(|| build_plan.salt(pinned_contract))
.unwrap_or_else(fuel_tx::Salt::zeroed);
let name = &pinned_contract.name;
let storage_slots = built_contract.storage_slots.clone();
let contract_id = pkg::contract_id(&built_contract.bytecode.bytes, storage_slots, &salt);
println_green(&format!(" {name}"));
info!(" Contract id: 0x{contract_id}");
}
Ok(())
}
fn build_opts_from_cmd(cmd: &ContractIdCommand) -> pkg::BuildOpts {
pkg::BuildOpts {
pkg: pkg::PkgOpts {
path: cmd.pkg.path.clone(),
offline: cmd.pkg.offline,
terse: cmd.pkg.terse,
locked: cmd.pkg.locked,
output_directory: cmd.pkg.output_directory.clone(),
ipfs_node: cmd.pkg.ipfs_node.clone().unwrap_or_default(),
},
print: pkg::PrintOpts {
ast: cmd.print.ast,
dca_graph: cmd.print.dca_graph.clone(),
dca_graph_url_format: cmd.print.dca_graph_url_format.clone(),
asm: cmd.print.asm(),
bytecode: cmd.print.bytecode,
bytecode_spans: false,
ir: cmd.print.ir(),
reverse_order: cmd.print.reverse_order,
},
verify_ir: cmd
.verify_ir
.as_ref()
.map_or(IrCli::default(), |opts| IrCliOpt::from(opts).0),
dump: DumpOpts::default(),
time_phases: cmd.print.time_phases,
profile: cmd.print.profile,
metrics_outfile: cmd.print.metrics_outfile.clone(),
minify: pkg::MinifyOpts {
json_abi: cmd.minify.json_abi,
json_storage_slots: cmd.minify.json_storage_slots,
},
build_profile: cmd.build_profile.build_profile.clone(),
release: cmd.build_profile.release,
error_on_warnings: cmd.build_profile.error_on_warnings,
binary_outfile: cmd.build_output.bin_file.clone(),
debug_outfile: cmd.build_output.debug_file.clone(),
hex_outfile: cmd.build_output.hex_file.clone(),
build_target: BuildTarget::default(),
tests: false,
member_filter: pkg::MemberFilter::only_contracts(),
experimental: cmd.experimental.experimental.clone(),
no_experimental: cmd.experimental.no_experimental.clone(),
no_output: false,
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc/src/ops/forc_build.rs | forc/src/ops/forc_build.rs | use crate::cli::{shared::IrCliOpt, BuildCommand};
use forc_pkg as pkg;
use forc_util::ForcResult;
use pkg::MemberFilter;
use sway_core::IrCli;
pub fn build(cmd: BuildCommand) -> ForcResult<pkg::Built> {
let opts = opts_from_cmd(cmd);
let built = pkg::build_with_options(&opts, None)?;
Ok(built)
}
fn opts_from_cmd(cmd: BuildCommand) -> pkg::BuildOpts {
pkg::BuildOpts {
pkg: pkg::PkgOpts {
path: cmd.build.pkg.path.clone(),
offline: cmd.build.pkg.offline,
terse: cmd.build.pkg.terse,
locked: cmd.build.pkg.locked,
output_directory: cmd.build.pkg.output_directory.clone(),
ipfs_node: cmd.build.pkg.ipfs_node.clone().unwrap_or_default(),
},
print: pkg::PrintOpts {
ast: cmd.build.print.ast,
dca_graph: cmd.build.print.dca_graph.clone(),
dca_graph_url_format: cmd.build.print.dca_graph_url_format.clone(),
asm: cmd.build.print.asm(),
bytecode: cmd.build.print.bytecode,
bytecode_spans: false,
ir: cmd.build.print.ir(),
reverse_order: cmd.build.print.reverse_order,
},
verify_ir: cmd
.build
.verify_ir
.as_ref()
.map_or(IrCli::default(), |opts| IrCliOpt::from(opts).0),
dump: pkg::DumpOpts {
dump_impls: cmd.build.dump.dump_impls,
},
time_phases: cmd.build.print.time_phases,
profile: cmd.build.print.profile,
metrics_outfile: cmd.build.print.metrics_outfile,
minify: pkg::MinifyOpts {
json_abi: cmd.build.minify.json_abi,
json_storage_slots: cmd.build.minify.json_storage_slots,
},
build_profile: cmd.build.profile.build_profile,
release: cmd.build.profile.release,
error_on_warnings: cmd.build.profile.error_on_warnings,
binary_outfile: cmd.build.output.bin_file,
debug_outfile: cmd.build.output.debug_file,
hex_outfile: cmd.build.output.hex_file,
build_target: cmd.build.build_target,
tests: cmd.tests,
member_filter: MemberFilter::default(),
experimental: cmd.experimental.experimental,
no_experimental: cmd.experimental.no_experimental,
no_output: false,
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc/src/ops/forc_init.rs | forc/src/ops/forc_init.rs | use crate::cli::InitCommand;
use crate::utils::{defaults, program_type::ProgramType};
use anyhow::Context;
use forc_util::{forc_result_bail, validate_project_name, ForcResult};
use std::fs;
use std::io::Write;
use std::path::{Path, PathBuf};
use sway_utils::constants;
use tracing::{debug, info};
#[derive(Debug)]
enum InitType {
Package(ProgramType),
Workspace,
}
fn print_welcome_message() {
let read_the_docs = format!(
"Read the Docs:\n- {}\n- {}\n- {}\n- {}",
"Sway Book: https://docs.fuel.network/docs/sway",
"Forc Book: https://docs.fuel.network/docs/forc",
"Rust SDK Book: https://docs.fuel.network/docs/fuels-rs",
"TypeScript SDK: https://docs.fuel.network/docs/fuels-ts"
);
let join_the_community = format!(
"Join the Community:\n- Follow us {}
- Ask questions on {}",
"@SwayLang: https://twitter.com/SwayLang", "Discourse: https://forum.fuel.network/"
);
let report_bugs = format!(
"Report Bugs:\n- {}",
"Sway Issues: https://github.com/FuelLabs/sway/issues/new"
);
let try_forc = "To compile, use `forc build`, and to run tests use `forc test`";
info!(
"\n{}\n\n----\n\n{}\n\n{}\n\n{}\n\n",
try_forc, read_the_docs, join_the_community, report_bugs
);
}
pub fn init(command: InitCommand) -> ForcResult<()> {
let project_dir = match &command.path {
Some(p) => PathBuf::from(p),
None => {
std::env::current_dir().context("Failed to get current directory for forc init.")?
}
};
if !project_dir.is_dir() {
forc_result_bail!(format!(
"'{}' is not a valid directory.",
project_dir.display()
),);
}
if project_dir.join(constants::MANIFEST_FILE_NAME).exists() {
forc_result_bail!(
"'{}' already includes a Forc.toml file.",
project_dir.display()
);
}
debug!(
"\nUsing project directory at {}",
project_dir.canonicalize()?.display()
);
let project_name = match command.name {
Some(name) => name,
None => project_dir
.file_stem()
.context("Failed to infer project name from directory name.")?
.to_string_lossy()
.into_owned(),
};
validate_project_name(&project_name)?;
let init_type = match (
command.contract,
command.script,
command.predicate,
command.library,
command.workspace,
) {
(_, false, false, false, false) => InitType::Package(ProgramType::Contract),
(false, true, false, false, false) => InitType::Package(ProgramType::Script),
(false, false, true, false, false) => InitType::Package(ProgramType::Predicate),
(false, false, false, true, false) => InitType::Package(ProgramType::Library),
(false, false, false, false, true) => InitType::Workspace,
_ => {
forc_result_bail!(
"Multiple types detected, please specify only one initialization type: \
\n Possible Types:\n - contract\n - script\n - predicate\n - library\n - workspace"
)
}
};
// Make a new directory for the project
let dir_to_create = match init_type {
InitType::Package(_) => project_dir.join("src"),
InitType::Workspace => project_dir.clone(),
};
fs::create_dir_all(dir_to_create)?;
// Insert default manifest file
match init_type {
InitType::Workspace => fs::write(
Path::new(&project_dir).join(constants::MANIFEST_FILE_NAME),
defaults::default_workspace_manifest(),
)?,
InitType::Package(ProgramType::Library) => fs::write(
Path::new(&project_dir).join(constants::MANIFEST_FILE_NAME),
// Library names cannot have `-` in them because the Sway compiler does not allow that.
// Even though this is technically not a problem in the toml file, we replace `-` with
// `_` here as well so that the library name in the Sway file matches the one in
// `Forc.toml`
defaults::default_pkg_manifest(&project_name.replace('-', "_"), constants::LIB_ENTRY),
)?,
_ => fs::write(
Path::new(&project_dir).join(constants::MANIFEST_FILE_NAME),
defaults::default_pkg_manifest(&project_name, constants::MAIN_ENTRY),
)?,
}
match init_type {
InitType::Package(ProgramType::Contract) => fs::write(
Path::new(&project_dir)
.join("src")
.join(constants::MAIN_ENTRY),
defaults::default_contract(),
)?,
InitType::Package(ProgramType::Script) => fs::write(
Path::new(&project_dir)
.join("src")
.join(constants::MAIN_ENTRY),
defaults::default_script(),
)?,
InitType::Package(ProgramType::Library) => fs::write(
Path::new(&project_dir)
.join("src")
.join(constants::LIB_ENTRY),
// Library names cannot have `-` in them because the Sway compiler does not allow that
defaults::default_library(),
)?,
InitType::Package(ProgramType::Predicate) => fs::write(
Path::new(&project_dir)
.join("src")
.join(constants::MAIN_ENTRY),
defaults::default_predicate(),
)?,
_ => {}
}
// Ignore default `out` and `target` directories created by forc and cargo.
let gitignore_path = Path::new(&project_dir).join(".gitignore");
// Append to existing gitignore if it exists otherwise create a new one.
let mut gitignore_file = fs::OpenOptions::new()
.append(true)
.create(true)
.open(&gitignore_path)?;
gitignore_file.write_all(defaults::default_gitignore().as_bytes())?;
debug!(
"\nCreated .gitignore at {}",
gitignore_path.canonicalize()?.display()
);
debug!("\nSuccessfully created {init_type:?}: {project_name}",);
print_welcome_message();
Ok(())
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc/src/ops/forc_update.rs | forc/src/ops/forc_update.rs | use crate::cli::UpdateCommand;
use anyhow::{anyhow, Result};
use forc_pkg::manifest::GenericManifestFile;
use forc_pkg::{self as pkg, lock, Lock};
use forc_util::lock_path;
use pkg::manifest::ManifestFile;
use std::{fs, path::PathBuf};
use tracing::info;
/// Running `forc update` will check for updates for the entire dependency graph and commit new
/// semver-compatible versions to the `Forc.lock` file. For git dependencies, the commit is updated
/// to the HEAD of the specified branch, or remains unchanged in the case a tag is specified. Path
/// dependencies remain unchanged as they are always sourced directly.
///
/// This is called during `forc build` in the case that there is no existing `Forc.lock` file for
/// the project.
///
/// Run `forc update --check` to perform a dry-run and produce a list of updates that will be
/// performed across all dependencies without actually committing them to the lock file.
///
/// Use the `--package <package-name>` flag to update only a specific package throughout the
/// dependency graph.
pub fn update(command: UpdateCommand) -> Result<()> {
let UpdateCommand {
path,
check,
// TODO: Use `package` here rather than `target_dependency`
target_dependency: _,
..
} = command;
let this_dir = match path {
Some(path) => PathBuf::from(path),
None => std::env::current_dir()?,
};
let manifest = ManifestFile::from_dir(this_dir)?;
let lock_path = lock_path(manifest.dir());
let old_lock = Lock::from_path(&lock_path).ok().unwrap_or_default();
let offline = false;
let member_manifests = manifest.member_manifests()?;
let ipfs_node = command.ipfs_node.unwrap_or_default();
let new_plan = pkg::BuildPlan::from_manifests(&member_manifests, offline, &ipfs_node)?;
let new_lock = Lock::from_graph(new_plan.graph());
let diff = new_lock.diff(&old_lock);
let member_names = member_manifests
.values()
.map(|manifest| manifest.project.name.clone())
.collect();
lock::print_diff(&member_names, &diff);
// If we're not only `check`ing, write the updated lock file.
if !check {
let string = toml::ser::to_string_pretty(&new_lock)
.map_err(|e| anyhow!("failed to serialize lock file: {}", e))?;
fs::write(&lock_path, string).map_err(|e| anyhow!("failed to write lock file: {}", e))?;
info!(" Created new lock file at {}", lock_path.display());
} else {
info!(" `--check` enabled: `Forc.lock` was not changed");
}
Ok(())
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc/tests/cli_integration.rs | forc/tests/cli_integration.rs | use std::path::PathBuf;
use rexpect::spawn;
const TIMEOUT_MS: u64 = 300000;
fn test_fixtures_path() -> PathBuf {
PathBuf::from(env!("CARGO_MANIFEST_DIR"))
.join("tests")
.join("fixtures")
.canonicalize()
.unwrap()
}
#[test]
fn test_forc_test_decoded_logs() -> Result<(), rexpect::error::Error> {
// Spawn the forc binary using cargo run
let project_dir = test_fixtures_path().join("test_contract");
let mut process = spawn(
&format!(
"cargo run --bin forc -- test --logs --path {}",
project_dir.to_string_lossy()
),
Some(TIMEOUT_MS),
)?;
// Assert that the output is correct
process.exp_string(" test test_log_4")?;
process.exp_string("decoded log values:")?;
process.exp_string("4, log rb: 1515152261580153489")?;
process.exp_string(" test test_log_2")?;
process.exp_string("decoded log values:")?;
process.exp_string("2, log rb: 1515152261580153489")?;
process.process.exit()?;
Ok(())
}
#[test]
fn test_forc_test_raw_logs() -> Result<(), rexpect::error::Error> {
// Spawn the forc binary using cargo run
let project_dir = test_fixtures_path().join("test_contract");
let mut process = spawn(
&format!(
"cargo run --bin forc -- test --raw-logs --path {}",
project_dir.to_string_lossy()
),
Some(TIMEOUT_MS),
)?;
// Assert that the output is correct
process.exp_string(" test test_log_4")?;
process.exp_string("raw logs:")?;
process.exp_string(r#"[{"LogData":{"data":"0000000000000004","digest":"8005f02d43fa06e7d0585fb64c961d57e318b27a145c857bcd3a6bdb413ff7fc","id":"0000000000000000000000000000000000000000000000000000000000000000","is":10368,"len":8,"pc":11432,"ptr":67108856,"ra":0,"rb":1515152261580153489}}]"#)?;
process.exp_string(" test test_log_2")?;
process.exp_string("raw logs:")?;
process.exp_string(r#"[{"LogData":{"data":"0000000000000002","digest":"cd04a4754498e06db5a13c5f371f1f04ff6d2470f24aa9bd886540e5dce77f70","id":"0000000000000000000000000000000000000000000000000000000000000000","is":10368,"len":8,"pc":11432,"ptr":67108856,"ra":0,"rb":1515152261580153489}}]"#)?;
process.process.exit()?;
Ok(())
}
#[test]
fn test_forc_test_both_logs() -> Result<(), rexpect::error::Error> {
// Spawn the forc binary using cargo run
let project_dir = test_fixtures_path().join("test_contract");
let mut process = spawn(
&format!(
"cargo run --bin forc -- test --logs --raw-logs --path {}",
project_dir.to_string_lossy()
),
Some(TIMEOUT_MS),
)?;
// Assert that the output is correct
process.exp_string(" test test_log_4")?;
process.exp_string("decoded log values:")?;
process.exp_string("4, log rb: 1515152261580153489")?;
process.exp_string("raw logs:")?;
process.exp_string(r#"[{"LogData":{"data":"0000000000000004","digest":"8005f02d43fa06e7d0585fb64c961d57e318b27a145c857bcd3a6bdb413ff7fc","id":"0000000000000000000000000000000000000000000000000000000000000000","is":10368,"len":8,"pc":11432,"ptr":67108856,"ra":0,"rb":1515152261580153489}}]"#)?;
process.exp_string(" test test_log_2")?;
process.exp_string("decoded log values:")?;
process.exp_string("2, log rb: 1515152261580153489")?;
process.exp_string("raw logs:")?;
process.exp_string(r#"[{"LogData":{"data":"0000000000000002","digest":"cd04a4754498e06db5a13c5f371f1f04ff6d2470f24aa9bd886540e5dce77f70","id":"0000000000000000000000000000000000000000000000000000000000000000","is":10368,"len":8,"pc":11432,"ptr":67108856,"ra":0,"rb":1515152261580153489}}]"#)?;
process.process.exit()?;
Ok(())
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-util/src/lib.rs | forc-util/src/lib.rs | //! Utility items shared between forc crates.
use annotate_snippets::{
renderer::{AnsiColor, Style},
Annotation, AnnotationType, Renderer, Slice, Snippet, SourceAnnotation,
};
use anyhow::{bail, Context, Result};
use forc_tracing::{println_action_green, println_error, println_red_err, println_yellow_err};
use std::{
collections::{hash_map, HashSet},
fmt::Display,
fs::File,
hash::{Hash, Hasher},
path::{Path, PathBuf},
process::Termination,
str,
};
use sway_core::language::parsed::TreeType;
use sway_error::{
diagnostic::{Diagnostic, Issue, Label, LabelType, Level, ToDiagnostic},
error::CompileError,
warning::{CompileInfo, CompileWarning},
};
use sway_types::{LineCol, LineColRange, SourceEngine, Span};
use sway_utils::constants;
pub mod bytecode;
pub mod fs_locking;
pub mod restricted;
#[cfg(feature = "tx")]
pub mod tx_utils;
#[macro_use]
pub mod cli;
pub use ansiterm;
pub use paste;
pub use regex::Regex;
pub const DEFAULT_OUTPUT_DIRECTORY: &str = "out";
pub const DEFAULT_ERROR_EXIT_CODE: u8 = 1;
pub const DEFAULT_SUCCESS_EXIT_CODE: u8 = 0;
/// A result type for forc operations. This shouldn't be returned from entry points, instead return
/// `ForcCliResult` to exit with correct exit code.
pub type ForcResult<T, E = ForcError> = Result<T, E>;
/// A wrapper around `ForcResult`. Designed to be returned from entry points as it handles
/// error reporting and exits with correct exit code.
#[derive(Debug)]
pub struct ForcCliResult<T> {
result: ForcResult<T>,
}
/// A forc error type which is a wrapper around `anyhow::Error`. It enables propagation of custom
/// exit code alongside the original error.
#[derive(Debug)]
pub struct ForcError {
error: anyhow::Error,
exit_code: u8,
}
impl ForcError {
pub fn new(error: anyhow::Error, exit_code: u8) -> Self {
Self { error, exit_code }
}
/// Returns a `ForcError` with provided exit_code.
pub fn exit_code(self, exit_code: u8) -> Self {
Self {
error: self.error,
exit_code,
}
}
}
impl AsRef<anyhow::Error> for ForcError {
fn as_ref(&self) -> &anyhow::Error {
&self.error
}
}
impl From<&str> for ForcError {
fn from(value: &str) -> Self {
Self {
error: anyhow::anyhow!("{value}"),
exit_code: DEFAULT_ERROR_EXIT_CODE,
}
}
}
impl From<anyhow::Error> for ForcError {
fn from(value: anyhow::Error) -> Self {
Self {
error: value,
exit_code: DEFAULT_ERROR_EXIT_CODE,
}
}
}
impl From<std::io::Error> for ForcError {
fn from(value: std::io::Error) -> Self {
Self {
error: value.into(),
exit_code: DEFAULT_ERROR_EXIT_CODE,
}
}
}
impl Display for ForcError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
self.error.fmt(f)
}
}
impl<T> Termination for ForcCliResult<T> {
fn report(self) -> std::process::ExitCode {
match self.result {
Ok(_) => DEFAULT_SUCCESS_EXIT_CODE.into(),
Err(e) => {
println_error(&format!("{e}"));
e.exit_code.into()
}
}
}
}
impl<T> From<ForcResult<T>> for ForcCliResult<T> {
fn from(value: ForcResult<T>) -> Self {
Self { result: value }
}
}
#[macro_export]
macro_rules! forc_result_bail {
($msg:literal $(,)?) => {
return $crate::ForcResult::Err(anyhow::anyhow!($msg).into())
};
($err:expr $(,)?) => {
return $crate::ForcResult::Err(anyhow::anyhow!($err).into())
};
($fmt:expr, $($arg:tt)*) => {
return $crate::ForcResult::Err(anyhow::anyhow!($fmt, $($arg)*).into())
};
}
pub fn find_file_name<'sc>(manifest_dir: &Path, entry_path: &'sc Path) -> Result<&'sc Path> {
let mut file_path = manifest_dir.to_path_buf();
file_path.pop();
let file_name = match entry_path.strip_prefix(file_path.clone()) {
Ok(o) => o,
Err(err) => bail!(err),
};
Ok(file_name)
}
pub fn lock_path(manifest_dir: &Path) -> PathBuf {
manifest_dir.join(constants::LOCK_FILE_NAME)
}
pub fn validate_project_name(name: &str) -> Result<()> {
restricted::is_valid_project_name_format(name)?;
validate_name(name, "project name")
}
// Using (https://github.com/rust-lang/cargo/blob/489b66f2e458404a10d7824194d3ded94bc1f4e4/src/cargo/util/toml/mod.rs +
// https://github.com/rust-lang/cargo/blob/489b66f2e458404a10d7824194d3ded94bc1f4e4/src/cargo/ops/cargo_new.rs) for reference
pub fn validate_name(name: &str, use_case: &str) -> Result<()> {
// if true returns formatted error
restricted::contains_invalid_char(name, use_case)?;
if restricted::is_keyword(name) {
bail!("the name `{name}` cannot be used as a {use_case}, it is a Sway keyword");
}
if restricted::is_conflicting_artifact_name(name) {
bail!(
"the name `{name}` cannot be used as a {use_case}, \
it conflicts with Forc's build directory names"
);
}
if name.to_lowercase() == "test" {
bail!(
"the name `test` cannot be used as a {use_case}, \
it conflicts with Sway's built-in test library"
);
}
if restricted::is_conflicting_suffix(name) {
bail!(
"the name `{name}` is part of Sway's standard library\n\
It is recommended to use a different name to avoid problems."
);
}
if restricted::is_windows_reserved(name) {
if cfg!(windows) {
bail!("cannot use name `{name}`, it is a reserved Windows filename");
} else {
bail!(
"the name `{name}` is a reserved Windows filename\n\
This package will not work on Windows platforms."
);
}
}
if restricted::is_non_ascii_name(name) {
bail!("the name `{name}` contains non-ASCII characters which are unsupported");
}
Ok(())
}
/// Simple function to convert kebab-case to snake_case.
pub fn kebab_to_snake_case(s: &str) -> String {
s.replace('-', "_")
}
pub fn default_output_directory(manifest_dir: &Path) -> PathBuf {
manifest_dir.join(DEFAULT_OUTPUT_DIRECTORY)
}
/// Returns the user's `.forc` directory, `$HOME/.forc` by default.
pub fn user_forc_directory() -> PathBuf {
dirs::home_dir()
.expect("unable to find the user home directory")
.join(constants::USER_FORC_DIRECTORY)
}
/// The location at which `forc` will checkout git repositories.
pub fn git_checkouts_directory() -> PathBuf {
user_forc_directory().join("git").join("checkouts")
}
/// Given a path to a directory we wish to lock, produce a path for an associated lock file.
///
/// Note that the lock file itself is simply a placeholder for co-ordinating access. As a result,
/// we want to create the lock file if it doesn't exist, but we can never reliably remove it
/// without risking invalidation of an existing lock. As a result, we use a dedicated, hidden
/// directory with a lock file named after the checkout path.
///
/// Note: This has nothing to do with `Forc.lock` files, rather this is about fd locks for
/// coordinating access to particular paths (e.g. git checkout directories).
fn fd_lock_path<X: AsRef<Path>>(path: X) -> PathBuf {
const LOCKS_DIR_NAME: &str = ".locks";
const LOCK_EXT: &str = "forc-lock";
let file_name = hash_path(path);
user_forc_directory()
.join(LOCKS_DIR_NAME)
.join(file_name)
.with_extension(LOCK_EXT)
}
/// Hash the path to produce a file-system friendly file name.
/// Append the file stem for improved readability.
fn hash_path<X: AsRef<Path>>(path: X) -> String {
let path = path.as_ref();
let mut hasher = hash_map::DefaultHasher::default();
path.hash(&mut hasher);
let hash = hasher.finish();
let file_name = match path.file_stem().and_then(|s| s.to_str()) {
None => format!("{hash:X}"),
Some(stem) => format!("{hash:X}-{stem}"),
};
file_name
}
/// Create an advisory lock over the given path.
///
/// See [fd_lock_path] for details.
pub fn path_lock<X: AsRef<Path>>(path: X) -> Result<fd_lock::RwLock<File>> {
let lock_path = fd_lock_path(path);
let lock_dir = lock_path
.parent()
.expect("lock path has no parent directory");
std::fs::create_dir_all(lock_dir).context("failed to create forc advisory lock directory")?;
let lock_file = File::create(&lock_path).context("failed to create advisory lock file")?;
Ok(fd_lock::RwLock::new(lock_file))
}
pub fn program_type_str(ty: &TreeType) -> &'static str {
match ty {
TreeType::Script => "script",
TreeType::Contract => "contract",
TreeType::Predicate => "predicate",
TreeType::Library => "library",
}
}
pub fn print_compiling(ty: Option<&TreeType>, name: &str, src: &dyn std::fmt::Display) {
// NOTE: We can only print the program type if we can parse the program, so
// program type must be optional.
let ty = match ty {
Some(ty) => format!("{} ", program_type_str(ty)),
None => "".to_string(),
};
println_action_green(
"Compiling",
&format!("{ty}{} ({src})", ansiterm::Style::new().bold().paint(name)),
);
}
pub fn print_infos(source_engine: &SourceEngine, terse_mode: bool, infos: &[CompileInfo]) {
if infos.is_empty() {
return;
}
if !terse_mode {
infos
.iter()
.for_each(|n| format_diagnostic(&n.to_diagnostic(source_engine)));
}
}
pub fn print_warnings(
source_engine: &SourceEngine,
terse_mode: bool,
proj_name: &str,
warnings: &[CompileWarning],
tree_type: &TreeType,
) {
if warnings.is_empty() {
return;
}
let type_str = program_type_str(tree_type);
if !terse_mode {
warnings
.iter()
.for_each(|w| format_diagnostic(&w.to_diagnostic(source_engine)));
}
println_yellow_err(&format!(
" Compiled {} {:?} with {} {}.",
type_str,
proj_name,
warnings.len(),
if warnings.len() > 1 {
"warnings"
} else {
"warning"
}
));
}
pub fn print_on_failure(
source_engine: &SourceEngine,
terse_mode: bool,
infos: &[CompileInfo],
warnings: &[CompileWarning],
errors: &[CompileError],
reverse_results: bool,
) {
print_infos(source_engine, terse_mode, infos);
let e_len = errors.len();
let w_len = warnings.len();
if !terse_mode {
if reverse_results {
warnings
.iter()
.rev()
.for_each(|w| format_diagnostic(&w.to_diagnostic(source_engine)));
errors
.iter()
.rev()
.for_each(|e| format_diagnostic(&e.to_diagnostic(source_engine)));
} else {
warnings
.iter()
.for_each(|w| format_diagnostic(&w.to_diagnostic(source_engine)));
errors
.iter()
.for_each(|e| format_diagnostic(&e.to_diagnostic(source_engine)));
}
}
if e_len == 0 && w_len > 0 {
println_red_err(&format!(
" Aborting. {} warning(s) treated as error(s).",
warnings.len()
));
} else {
println_red_err(&format!(
" Aborting due to {} {}.",
e_len,
if e_len > 1 { "errors" } else { "error" }
));
}
}
/// Creates [Renderer] for printing warnings and errors.
///
/// To ensure the same styling of printed warnings and errors across all the tools,
/// always use this function to create [Renderer]s,
pub fn create_diagnostics_renderer() -> Renderer {
// For the diagnostic messages we use bold and bright colors.
// Note that for the summaries of warnings and errors we use
// their regular equivalents which are defined in `forc-tracing` package.
Renderer::styled()
.warning(
Style::new()
.bold()
.fg_color(Some(AnsiColor::BrightYellow.into())),
)
.error(
Style::new()
.bold()
.fg_color(Some(AnsiColor::BrightRed.into())),
)
}
pub fn format_diagnostic(diagnostic: &Diagnostic) {
/// Temporary switch for testing the feature.
/// Keep it false until we decide to fully support the diagnostic codes.
const SHOW_DIAGNOSTIC_CODE: bool = false;
if diagnostic.is_old_style() {
format_old_style_diagnostic(diagnostic.issue());
return;
}
let mut label = String::new();
get_title_label(diagnostic, &mut label);
let snippet_title = Some(Annotation {
label: Some(label.as_str()),
id: if SHOW_DIAGNOSTIC_CODE {
diagnostic.reason().map(|reason| reason.code())
} else {
None
},
annotation_type: diagnostic_level_to_annotation_type(diagnostic.level()),
});
let mut snippet_slices = Vec::<Slice<'_>>::new();
// We first display labels from the issue file...
if diagnostic.issue().is_in_source() {
snippet_slices.push(construct_slice(diagnostic.labels_in_issue_source()))
}
// ...and then all the remaining labels from the other files.
for source_path in diagnostic.related_sources(false) {
snippet_slices.push(construct_slice(diagnostic.labels_in_source(source_path)))
}
let mut snippet_footer = Vec::<Annotation<'_>>::new();
for help in diagnostic.help() {
snippet_footer.push(Annotation {
id: None,
label: Some(help),
annotation_type: AnnotationType::Help,
});
}
let snippet = Snippet {
title: snippet_title,
slices: snippet_slices,
footer: snippet_footer,
};
let renderer = create_diagnostics_renderer();
match diagnostic.level() {
Level::Info => tracing::info!("{}\n____\n", renderer.render(snippet)),
Level::Warning => tracing::warn!("{}\n____\n", renderer.render(snippet)),
Level::Error => tracing::error!("{}\n____\n", renderer.render(snippet)),
}
fn format_old_style_diagnostic(issue: &Issue) {
let annotation_type = label_type_to_annotation_type(issue.label_type());
let snippet_title = Some(Annotation {
label: if issue.is_in_source() {
None
} else {
Some(issue.text())
},
id: None,
annotation_type,
});
let mut snippet_slices = vec![];
if issue.is_in_source() {
let span = issue.span();
let input = span.input();
let mut start_pos = span.start();
let mut end_pos = span.end();
let LineColRange { mut start, end } = span.line_col_one_index();
let input = construct_window(&mut start, end, &mut start_pos, &mut end_pos, input);
let slice = Slice {
source: input,
line_start: start.line,
// Safe unwrap because the issue is in source, so the source path surely exists.
origin: Some(issue.source_path().unwrap().as_str()),
fold: false,
annotations: vec![SourceAnnotation {
label: issue.text(),
annotation_type,
range: (start_pos, end_pos),
}],
};
snippet_slices.push(slice);
}
let snippet = Snippet {
title: snippet_title,
footer: vec![],
slices: snippet_slices,
};
let renderer = create_diagnostics_renderer();
tracing::error!("{}\n____\n", renderer.render(snippet));
}
fn get_title_label(diagnostics: &Diagnostic, label: &mut String) {
label.clear();
if let Some(reason) = diagnostics.reason() {
label.push_str(reason.description());
}
}
fn diagnostic_level_to_annotation_type(level: Level) -> AnnotationType {
match level {
Level::Info => AnnotationType::Info,
Level::Warning => AnnotationType::Warning,
Level::Error => AnnotationType::Error,
}
}
}
fn construct_slice(labels: Vec<&Label>) -> Slice {
debug_assert!(
!labels.is_empty(),
"To construct slices, at least one label must be provided."
);
debug_assert!(
labels.iter().all(|label| label.is_in_source()),
"Slices can be constructed only for labels that are related to a place in source code."
);
debug_assert!(
HashSet::<&str>::from_iter(labels.iter().map(|label| label.source_path().unwrap().as_str())).len() == 1,
"Slices can be constructed only for labels that are related to places in the same source code."
);
let source_file = labels[0].source_path().map(|path| path.as_str());
let source_code = labels[0].span().input();
// Joint span of the code snippet that covers all the labels.
let span = Span::join_all(labels.iter().map(|label| label.span().clone()));
let (source, line_start, shift_in_bytes) = construct_code_snippet(&span, source_code);
let mut annotations = vec![];
for message in labels {
annotations.push(SourceAnnotation {
label: message.text(),
annotation_type: label_type_to_annotation_type(message.label_type()),
range: get_annotation_range(message.span(), source_code, shift_in_bytes),
});
}
return Slice {
source,
line_start,
origin: source_file,
fold: true,
annotations,
};
fn get_annotation_range(
span: &Span,
source_code: &str,
shift_in_bytes: usize,
) -> (usize, usize) {
let mut start_pos = span.start();
let mut end_pos = span.end();
let start_ix_bytes = start_pos - std::cmp::min(shift_in_bytes, start_pos);
let end_ix_bytes = end_pos - std::cmp::min(shift_in_bytes, end_pos);
// We want the start_pos and end_pos in terms of chars and not bytes, so translate.
start_pos = source_code[shift_in_bytes..(shift_in_bytes + start_ix_bytes)]
.chars()
.count();
end_pos = source_code[shift_in_bytes..(shift_in_bytes + end_ix_bytes)]
.chars()
.count();
(start_pos, end_pos)
}
}
fn label_type_to_annotation_type(label_type: LabelType) -> AnnotationType {
match label_type {
LabelType::Info => AnnotationType::Info,
LabelType::Help => AnnotationType::Help,
LabelType::Warning => AnnotationType::Warning,
LabelType::Error => AnnotationType::Error,
}
}
/// Given the overall span to be shown in the code snippet, determines how much of the input source
/// to show in the snippet.
///
/// Returns the source to be shown, the line start, and the offset of the snippet in bytes relative
/// to the beginning of the input code.
///
/// The library we use doesn't handle auto-windowing and line numbers, so we must manually
/// calculate the line numbers and match them up with the input window. It is a bit fiddly.
fn construct_code_snippet<'a>(span: &Span, input: &'a str) -> (&'a str, usize, usize) {
// how many lines to prepend or append to the highlighted region in the window
const NUM_LINES_BUFFER: usize = 2;
let LineColRange { start, end } = span.line_col_one_index();
let total_lines_in_input = input.chars().filter(|x| *x == '\n').count();
debug_assert!(end.line >= start.line);
let total_lines_of_highlight = end.line - start.line;
debug_assert!(total_lines_in_input >= total_lines_of_highlight);
let mut current_line = 0;
let mut lines_to_start_of_snippet = 0;
let mut calculated_start_ix = None;
let mut calculated_end_ix = None;
let mut pos = 0;
for character in input.chars() {
if character == '\n' {
current_line += 1
}
if current_line + NUM_LINES_BUFFER >= start.line && calculated_start_ix.is_none() {
calculated_start_ix = Some(pos);
lines_to_start_of_snippet = current_line;
}
if current_line >= end.line + NUM_LINES_BUFFER && calculated_end_ix.is_none() {
calculated_end_ix = Some(pos);
}
if calculated_start_ix.is_some() && calculated_end_ix.is_some() {
break;
}
pos += character.len_utf8();
}
let calculated_start_ix = calculated_start_ix.unwrap_or(0);
let calculated_end_ix = calculated_end_ix.unwrap_or(input.len());
(
&input[calculated_start_ix..calculated_end_ix],
lines_to_start_of_snippet,
calculated_start_ix,
)
}
// TODO: Remove once "old-style" diagnostic is fully replaced with new one and the backward
// compatibility is no longer needed.
/// Given a start and an end position and an input, determine how much of a window to show in the
/// error.
/// Mutates the start and end indexes to be in line with the new slice length.
///
/// The library we use doesn't handle auto-windowing and line numbers, so we must manually
/// calculate the line numbers and match them up with the input window. It is a bit fiddly.
fn construct_window<'a>(
start: &mut LineCol,
end: LineCol,
start_ix: &mut usize,
end_ix: &mut usize,
input: &'a str,
) -> &'a str {
// how many lines to prepend or append to the highlighted region in the window
const NUM_LINES_BUFFER: usize = 2;
let total_lines_in_input = input.chars().filter(|x| *x == '\n').count();
debug_assert!(end.line >= start.line);
let total_lines_of_highlight = end.line - start.line;
debug_assert!(total_lines_in_input >= total_lines_of_highlight);
let mut current_line = 1usize;
let mut chars = input.char_indices().map(|(char_offset, character)| {
let r = (current_line, char_offset);
if character == '\n' {
current_line += 1;
}
r
});
// Find the first char of the first line
let first_char = chars
.by_ref()
.find(|(current_line, _)| current_line + NUM_LINES_BUFFER >= start.line);
// Find the last char of the last line
let last_char = chars
.by_ref()
.find(|(current_line, _)| *current_line > end.line + NUM_LINES_BUFFER)
.map(|x| x.1);
// this releases the borrow of `current_line`
drop(chars);
let (first_char_line, first_char_offset, last_char_offset) = match (first_char, last_char) {
// has first and last
(Some((first_char_line, first_char_offset)), Some(last_char_offset)) => {
(first_char_line, first_char_offset, last_char_offset)
}
// has first and no last
(Some((first_char_line, first_char_offset)), None) => {
(first_char_line, first_char_offset, input.len())
}
// others
_ => (current_line, input.len(), input.len()),
};
// adjust indices to be inside the returned window
start.line = first_char_line;
*start_ix = start_ix.saturating_sub(first_char_offset);
*end_ix = end_ix.saturating_sub(first_char_offset);
&input[first_char_offset..last_char_offset]
}
#[test]
fn ok_construct_window() {
fn t(
start_line: usize,
start_col: usize,
end_line: usize,
end_col: usize,
start_char: usize,
end_char: usize,
input: &str,
) -> (usize, usize, &str) {
let mut s = LineCol {
line: start_line,
col: start_col,
};
let mut start = start_char;
let mut end = end_char;
let r = construct_window(
&mut s,
LineCol {
line: end_line,
col: end_col,
},
&mut start,
&mut end,
input,
);
(start, end, r)
}
// Invalid Empty file
assert_eq!(t(0, 0, 0, 0, 0, 0, ""), (0, 0, ""));
// Valid Empty File
assert_eq!(t(1, 1, 1, 1, 0, 0, ""), (0, 0, ""));
// One line, error after the last char
assert_eq!(t(1, 7, 1, 7, 6, 6, "script"), (6, 6, "script"));
// 01 23 45 67 89 AB CD E
let eight_lines = "1\n2\n3\n4\n5\n6\n7\n8";
assert_eq!(t(1, 1, 1, 1, 0, 1, eight_lines), (0, 1, "1\n2\n3\n"));
assert_eq!(t(2, 1, 2, 1, 2, 3, eight_lines), (2, 3, "1\n2\n3\n4\n"));
assert_eq!(t(3, 1, 3, 1, 4, 5, eight_lines), (4, 5, "1\n2\n3\n4\n5\n"));
assert_eq!(t(4, 1, 4, 1, 6, 7, eight_lines), (4, 5, "2\n3\n4\n5\n6\n"));
assert_eq!(t(5, 1, 5, 1, 8, 9, eight_lines), (4, 5, "3\n4\n5\n6\n7\n"));
assert_eq!(t(6, 1, 6, 1, 10, 11, eight_lines), (4, 5, "4\n5\n6\n7\n8"));
assert_eq!(t(7, 1, 7, 1, 12, 13, eight_lines), (4, 5, "5\n6\n7\n8"));
assert_eq!(t(8, 1, 8, 1, 14, 15, eight_lines), (4, 5, "6\n7\n8"));
// Invalid lines
assert_eq!(t(9, 1, 9, 1, 14, 15, eight_lines), (2, 3, "7\n8"));
assert_eq!(t(10, 1, 10, 1, 14, 15, eight_lines), (0, 1, "8"));
assert_eq!(t(11, 1, 11, 1, 14, 15, eight_lines), (0, 0, ""));
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-util/src/restricted.rs | forc-util/src/restricted.rs | //! Helpers for validating and checking names like package and organization names.
// This is based on https://github.com/rust-lang/cargo/blob/489b66f2e458404a10d7824194d3ded94bc1f4e4/src/cargo/util/restricted_names.rs
use anyhow::{bail, Result};
use regex::Regex;
use std::path::Path;
/// Returns `true` if the name contains non-ASCII characters.
pub fn is_non_ascii_name(name: &str) -> bool {
name.chars().any(|ch| ch > '\x7f')
}
/// Rust keywords, further bikeshedding necessary to determine a complete set of Sway keywords
pub fn is_keyword(name: &str) -> bool {
// See https://doc.rust-lang.org/reference/keywords.html
[
"Self", "abstract", "as", "await", "become", "box", "break", "const", "continue", "dep",
"do", "dyn", "else", "enum", "extern", "false", "final", "fn", "for", "if", "impl", "in",
"let", "loop", "macro", "match", "move", "mut", "override", "priv", "pub", "ref", "return",
"self", "static", "struct", "super", "trait", "true", "try", "type", "typeof", "unsafe",
"unsized", "use", "virtual", "where", "while", "yield",
]
.contains(&name)
}
/// These names cannot be used on Windows, even with an extension.
pub fn is_windows_reserved(name: &str) -> bool {
[
"con", "prn", "aux", "nul", "com1", "com2", "com3", "com4", "com5", "com6", "com7", "com8",
"com9", "lpt1", "lpt2", "lpt3", "lpt4", "lpt5", "lpt6", "lpt7", "lpt8", "lpt9",
]
.contains(&name.to_ascii_lowercase().as_str())
}
/// These names conflict with library, macro or heap allocation suffixes, or keywords.
pub fn is_conflicting_suffix(name: &str) -> bool {
["alloc", "proc_macro", "proc-macro"].contains(&name)
}
// Bikeshedding necessary to determine if relevant
/// An artifact with this name will conflict with one of forc's build directories.
pub fn is_conflicting_artifact_name(name: &str) -> bool {
["deps", "examples", "build", "incremental"].contains(&name)
}
/// Check the package name for invalid characters.
pub fn contains_invalid_char(name: &str, use_case: &str) -> Result<()> {
let mut chars = name.chars();
if let Some(ch) = chars.next() {
if ch.is_ascii_digit() {
// A specific error for a potentially common case.
bail!(
"the name `{name}` cannot be used as a {use_case}, \
the name cannot start with a digit"
);
}
if !(unicode_xid::UnicodeXID::is_xid_start(ch) || ch == '_') {
bail!(
"invalid character `{ch}` in {use_case}: `{name}`, \
the first character must be a Unicode XID start character \
(most letters or `_`)"
);
}
}
for ch in chars {
if !(unicode_xid::UnicodeXID::is_xid_continue(ch) || ch == '-') {
bail!(
"invalid character `{ch}` in {use_case}: `{name}`, \
characters must be Unicode XID characters \
(numbers, `-`, `_`, or most letters)"
);
}
}
if name.is_empty() {
bail!(
"{use_case} cannot be left empty, \
please use a valid name"
);
}
Ok(())
}
/// Check the entire path for names reserved in Windows.
pub fn is_windows_reserved_path(path: &Path) -> bool {
path.iter()
.filter_map(|component| component.to_str())
.any(|component| {
let stem = component.split('.').next().unwrap();
is_windows_reserved(stem)
})
}
/// Returns `true` if the name contains any glob pattern wildcards.
pub fn is_glob_pattern<T: AsRef<str>>(name: T) -> bool {
name.as_ref().contains(&['*', '?', '[', ']'][..])
}
/// Check the project name format.
pub fn is_valid_project_name_format(name: &str) -> Result<()> {
let re = Regex::new(r"^([a-zA-Z]([a-zA-Z0-9-_]+)|)$").unwrap();
if !re.is_match(name) {
bail!(
"'{name}' is not a valid name for a project. \n\
The name may use letters, numbers, hyphens, and underscores, and must start with a letter."
);
}
Ok(())
}
#[test]
fn test_invalid_char() {
assert_eq!(
contains_invalid_char("test#proj", "package name").map_err(|e| e.to_string()),
std::result::Result::Err(
"invalid character `#` in package name: `test#proj`, \
characters must be Unicode XID characters \
(numbers, `-`, `_`, or most letters)"
.into()
)
);
assert_eq!(
contains_invalid_char("test proj", "package name").map_err(|e| e.to_string()),
std::result::Result::Err(
"invalid character ` ` in package name: `test proj`, \
characters must be Unicode XID characters \
(numbers, `-`, `_`, or most letters)"
.into()
)
);
assert_eq!(
contains_invalid_char("", "package name").map_err(|e| e.to_string()),
std::result::Result::Err(
"package name cannot be left empty, \
please use a valid name"
.into()
)
);
assert!(matches!(
contains_invalid_char("test_proj", "package name"),
std::result::Result::Ok(())
));
}
#[test]
fn test_is_valid_project_name_format() {
let assert_valid = |name: &str| {
is_valid_project_name_format(name).expect("this should pass");
};
let assert_invalid = |name: &str, expected_error: &str| {
assert_eq!(
is_valid_project_name_format(name).map_err(|e| e.to_string()),
Err(expected_error.into())
);
};
let format_error_message = |name: &str| -> String {
format!(
"'{name}' is not a valid name for a project. \n\
The name may use letters, numbers, hyphens, and underscores, and must start with a letter."
)
};
// Test valid project names
assert_valid("mock_project_name");
assert_valid("mock_project_name123");
assert_valid("mock_project_name-123-_");
// Test invalid project names
assert_invalid("1mock_project", &format_error_message("1mock_project"));
assert_invalid("mock_.project", &format_error_message("mock_.project"));
assert_invalid("mock_/project", &format_error_message("mock_/project"));
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-util/src/cli.rs | forc-util/src/cli.rs | #[macro_export]
// Let the user format the help and parse it from that string into arguments to create the unit test
macro_rules! cli_examples {
($st:path { $( [ $($description:ident)* => $command:stmt ] )* }) => {
forc_util::cli_examples! {
{
$crate::paste::paste! {
use clap::Parser;
$st::try_parse_from
}
} {
$( [ $($description)* => $command ] )*
}
}
};
( $code:block { $( [ $($description:ident)* => $command:stmt ] )* }) => {
$crate::paste::paste! {
#[cfg(test)]
mod cli_parsing {
$(
#[test]
fn [<$($description:lower _)*:snake example>] () {
let cli_parser = $code;
let mut args = parse_args($command);
if cli_parser(args.clone()).is_err() {
// Failed to parse, it maybe a plugin. To execute a plugin the first argument needs to be removed, `forc`.
args.remove(0);
cli_parser(args).expect("valid subcommand");
}
}
)*
#[cfg(test)]
fn parse_args(input: &str) -> Vec<String> {
let mut chars = input.chars().peekable().into_iter();
let mut args = vec![];
loop {
let character = if let Some(c) = chars.next() { c } else { break };
match character {
' ' | '\\' | '\t' | '\n' => loop {
match chars.peek() {
Some(' ') | Some('\t') | Some('\n') => chars.next(),
_ => break,
};
},
'=' => {
args.push("=".to_string());
}
'"' | '\'' => {
let end_character = character;
let mut current_word = String::new();
loop {
match chars.peek() {
Some(character) => {
if *character == end_character {
let _ = chars.next();
args.push(current_word);
break;
} else if *character == '\\' {
let _ = chars.next();
if let Some(character) = chars.next() {
current_word.push(character);
}
} else {
current_word.push(*character);
chars.next();
}
}
None => {
break;
}
}
}
}
character => {
let mut current_word = character.to_string();
loop {
match chars.peek() {
Some(' ') | Some('\t') | Some('\n') | Some('=') | Some('\'')
| Some('"') | None => {
args.push(current_word);
break;
}
Some(character) => {
current_word.push(*character);
chars.next();
}
}
}
}
}
}
args
}
}
}
fn help() -> &'static str {
Box::leak(format!("{}\n{}", forc_util::ansiterm::Colour::Yellow.paint("EXAMPLES:"), examples()).into_boxed_str())
}
pub fn examples() -> &'static str {
Box::leak( [
$(
$crate::paste::paste! {
format!(" # {}\n {}\n\n", stringify!($($description)*), $command)
},
)*
].concat().into_boxed_str())
}
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-util/src/tx_utils.rs | forc-util/src/tx_utils.rs | use anyhow::Result;
use clap::Args;
use fuel_abi_types::revert_info::RevertInfo;
use fuels_core::{codec::ABIDecoder, types::param_types::ParamType};
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use sway_core::{asm_generation::ProgramABI, fuel_prelude::fuel_tx};
/// Added salt used to derive the contract ID.
#[derive(Debug, Args, Default, Deserialize, Serialize)]
pub struct Salt {
/// Added salt used to derive the contract ID.
///
/// By default, this is
/// `0x0000000000000000000000000000000000000000000000000000000000000000`.
#[clap(long = "salt")]
pub salt: Option<fuel_tx::Salt>,
}
/// Format `Log` and `LogData` receipts.
pub fn format_log_receipts(receipts: &[fuel_tx::Receipt], pretty_print: bool) -> Result<String> {
let mut receipt_to_json_array = serde_json::to_value(receipts)?;
for (rec_index, receipt) in receipts.iter().enumerate() {
let rec_value = receipt_to_json_array.get_mut(rec_index).ok_or_else(|| {
anyhow::anyhow!(
"Serialized receipts does not contain {} th index",
rec_index
)
})?;
match receipt {
fuel_tx::Receipt::LogData {
data: Some(data), ..
} => {
if let Some(v) = rec_value.pointer_mut("/LogData/data") {
*v = hex::encode(data).into();
}
}
fuel_tx::Receipt::ReturnData {
data: Some(data), ..
} => {
if let Some(v) = rec_value.pointer_mut("/ReturnData/data") {
*v = hex::encode(data).into();
}
}
_ => {}
}
}
if pretty_print {
Ok(serde_json::to_string_pretty(&receipt_to_json_array)?)
} else {
Ok(serde_json::to_string(&receipt_to_json_array)?)
}
}
/// A `LogData` decoded into a human readable format with its type information.
pub struct DecodedLog {
pub value: String,
}
pub fn decode_log_data(
log_id: &str,
log_data: &[u8],
program_abi: &ProgramABI,
) -> anyhow::Result<DecodedLog> {
match program_abi {
ProgramABI::Fuel(program_abi) => decode_fuel_vm_log_data(log_id, log_data, program_abi),
_ => Err(anyhow::anyhow!(
"only Fuel VM is supported for log decoding"
)),
}
}
pub fn decode_fuel_vm_log_data(
log_id: &str,
log_data: &[u8],
program_abi: &fuel_abi_types::abi::program::ProgramABI,
) -> anyhow::Result<DecodedLog> {
let program_abi =
fuel_abi_types::abi::unified_program::UnifiedProgramABI::from_counterpart(program_abi)?;
// Create type lookup (id, TypeDeclaration)
let type_lookup = program_abi
.types
.iter()
.map(|decl| (decl.type_id, decl.clone()))
.collect::<HashMap<_, _>>();
let logged_type_lookup: HashMap<_, _> = program_abi
.logged_types
.iter()
.flatten()
.map(|logged_type| (logged_type.log_id.as_str(), logged_type.application.clone()))
.collect();
let type_application = logged_type_lookup
.get(&log_id)
.ok_or_else(|| anyhow::anyhow!("log id is missing"))?;
let abi_decoder = ABIDecoder::default();
let param_type = ParamType::try_from_type_application(type_application, &type_lookup)?;
let decoded_str = abi_decoder.decode_as_debug_str(¶m_type, log_data)?;
let decoded_log = DecodedLog { value: decoded_str };
Ok(decoded_log)
}
/// Build [`RevertInfo`] from VM receipts and an optional program ABI.
/// This extracts the latest revert code from receipts (or a provided hint) and
/// decodes panic metadata (message/value/backtrace) using the ABI metadata if available.
pub fn revert_info_from_receipts(
receipts: &[fuel_tx::Receipt],
program_abi: Option<&fuel_abi_types::abi::program::ProgramABI>,
revert_code_hint: Option<u64>,
) -> Option<RevertInfo> {
let revert_code = receipts
.iter()
.rev()
.find_map(|receipt| match receipt {
fuel_tx::Receipt::Revert { ra, .. } => Some(*ra),
_ => None,
})
.or(revert_code_hint)?;
let decode_last_log_data =
|log_id: &str, program_abi: &fuel_abi_types::abi::program::ProgramABI| {
receipts.iter().rev().find_map(|receipt| match receipt {
fuel_tx::Receipt::LogData {
data: Some(data), ..
} => decode_fuel_vm_log_data(log_id, data, program_abi)
.ok()
.map(|decoded| decoded.value),
_ => None,
})
};
Some(RevertInfo::new(
revert_code,
program_abi,
decode_last_log_data,
))
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-util/src/bytecode.rs | forc-util/src/bytecode.rs | use anyhow::anyhow;
use sha2::{Digest, Sha256};
use std::fs::File;
use std::io::{BufReader, Read};
use std::path::Path;
// The index of the beginning of the half-word (4 bytes) that contains the configurables section offset.
const CONFIGURABLES_OFFSET_INSTR_LO: usize = 4;
// The index of the end of the half-word (4 bytes) that contains the configurables section offset.
const CONFIGURABLES_OFFSET_INSTR_HI: usize = 5;
// The count of the beginning half-words that contain the configurables section offset.
const CONFIGURABLES_OFFSET_PREAMBLE: usize = CONFIGURABLES_OFFSET_INSTR_HI + 1;
/// A tuple of an instruction and its corresponding bytes. Useful when needing to access the raw bytes
/// of an instruction that is parsed as [fuel_asm::InvalidOpcode], such as metadata in the preamble.
pub type InstructionWithBytes = (
Result<fuel_asm::Instruction, fuel_asm::InvalidOpcode>,
Vec<u8>,
);
/// An iterator over each [fuel_asm::Instruction] or [fuel_asm::InvalidOpcode] with its corresponding bytes.
pub struct InstructionWithBytesIterator {
buf_reader: BufReader<File>,
}
impl InstructionWithBytesIterator {
/// Return a new iterator for each instruction parsed from raw bytes.
pub fn new(buf_reader: BufReader<File>) -> Self {
InstructionWithBytesIterator { buf_reader }
}
}
impl Iterator for InstructionWithBytesIterator {
type Item = InstructionWithBytes;
fn next(&mut self) -> Option<InstructionWithBytes> {
let mut buffer = [0; fuel_asm::Instruction::SIZE];
// Read the next instruction into the buffer
match self.buf_reader.read_exact(&mut buffer) {
Ok(_) => fuel_asm::from_bytes(buffer)
.next()
.map(|inst| (inst, buffer.to_vec())),
Err(_) => None,
}
}
}
/// Parses a bytecode file into an iterator of instructions and their corresponding bytes.
pub fn parse_bytecode_to_instructions<P>(path: P) -> anyhow::Result<InstructionWithBytesIterator>
where
P: AsRef<Path> + Clone,
{
let f = File::open(path.clone())
.map_err(|_| anyhow!("{}: file not found", path.as_ref().to_string_lossy()))?;
let buf_reader = BufReader::new(f);
Ok(InstructionWithBytesIterator::new(buf_reader))
}
/// Gets the bytecode ID from a bytecode file. The bytecode ID is the hash of the bytecode after removing the
/// condigurables section, if any.
pub fn get_bytecode_id<P>(path: P) -> anyhow::Result<String>
where
P: AsRef<Path> + Clone,
{
let mut instructions = parse_bytecode_to_instructions(path.clone())?;
// Collect the first six instructions into a temporary vector
let mut first_six_instructions = Vec::with_capacity(CONFIGURABLES_OFFSET_PREAMBLE);
for _ in 0..CONFIGURABLES_OFFSET_PREAMBLE {
if let Some(instruction) = instructions.next() {
first_six_instructions.push(instruction);
} else {
return Err(anyhow!("Incomplete bytecode"));
}
}
let (lo_instr, low_raw) = &first_six_instructions[CONFIGURABLES_OFFSET_INSTR_LO];
let (hi_instr, hi_raw) = &first_six_instructions[CONFIGURABLES_OFFSET_INSTR_HI];
if let Err(fuel_asm::InvalidOpcode) = lo_instr {
if let Err(fuel_asm::InvalidOpcode) = hi_instr {
// Now assemble the configurables offset.
let configurables_offset = usize::from_be_bytes([
low_raw[0], low_raw[1], low_raw[2], low_raw[3], hi_raw[0], hi_raw[1], hi_raw[2],
hi_raw[3],
]);
// Hash the first six instructions
let mut hasher = Sha256::new();
for (_, raw) in first_six_instructions {
hasher.update(raw);
}
// Continue hashing the remaining instructions up to the configurables section offset.
instructions
.take(
configurables_offset / fuel_asm::Instruction::SIZE
- CONFIGURABLES_OFFSET_PREAMBLE,
) // Minus 6 because we already hashed the first six
.for_each(|(_, raw)| {
hasher.update(raw);
});
let hash_result = hasher.finalize();
let bytecode_id = format!("{hash_result:x}");
return Ok(bytecode_id);
}
}
Err(anyhow!("Configurables section offset not found"))
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn test_get_bytecode_id_happy() {
// These binary files were generated from `examples/configurable_constants` and `examples/counter`
// using `forc build` and `forc build --release` respectively.
let bytecode_id: String =
get_bytecode_id("tests/fixtures/bytecode/debug-counter.bin").expect("bytecode id");
assert_eq!(
bytecode_id,
"e65aa988cae1041b64dc2d85e496eed0e8a1d8105133bd313c17645a1859d53b".to_string()
);
let bytecode_id =
get_bytecode_id("tests/fixtures/bytecode/release-counter.bin").expect("bytecode id");
assert_eq!(
bytecode_id,
"42ae8352cbc892d7c7621f1d6fb42b072a08ba5968508d49f54991668d4ea141".to_string()
);
let bytecode_id =
get_bytecode_id("tests/fixtures/bytecode/debug-configurable_constants.bin")
.expect("bytecode id");
assert_eq!(
bytecode_id,
"babc3d9dcac8d48dee1e5aeb3340ff098d3c1ab8b0a28341d9291d8ff757199e".to_string()
);
let bytecode_id =
get_bytecode_id("tests/fixtures/bytecode/release-configurable_constants.bin")
.expect("bytecode id");
assert_eq!(
bytecode_id,
"2adfb515b66763fd29391bdba012921d045a0be83d89be5492bcaacc429695e9".to_string()
);
}
#[test]
fn test_get_bytecode_id_missing_configurable_offset() {
// This bytecode file was generated from `examples/configurable_constants` using an older version of the
// compiler that did not include the configurables section offset in the preamble.
let result = get_bytecode_id(
"tests/fixtures/bytecode/debug-configurable_constants-missing-offset.bin",
);
assert_eq!(
result.unwrap_err().to_string().as_str(),
"Configurables section offset not found"
);
}
#[test]
fn test_get_bytecode_id_bad_path() {
let result = get_bytecode_id("tests/fixtures/bytecode/blahblahblahblah.bin");
assert_eq!(
result.unwrap_err().to_string().as_str(),
"tests/fixtures/bytecode/blahblahblahblah.bin: file not found"
);
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-util/src/fs_locking.rs | forc-util/src/fs_locking.rs | use crate::{hash_path, user_forc_directory};
use std::{
fs::{create_dir_all, read_dir, remove_file, File},
io::{self, Read, Write},
path::{Path, PathBuf},
};
/// Very simple AdvisoryPathMutex class
///
/// The goal of this struct is to signal other processes that a path is being used by another
/// process exclusively.
///
/// This struct will self-heal if the process that locked the file is no longer running.
pub struct PidFileLocking(PathBuf);
impl PidFileLocking {
pub fn new<X: AsRef<Path>, Y: AsRef<Path>>(
filename: X,
dir: Y,
extension: &str,
) -> PidFileLocking {
// Try to cleanup stale files, ignore any errors as this is best-effort
let _ = Self::cleanup_stale_files();
let file_name = hash_path(filename);
Self(
user_forc_directory()
.join(dir)
.join(file_name)
.with_extension(extension),
)
}
/// Create a new PidFileLocking instance that is shared between the LSP and any other process
/// that may want to update the file and needs to wait for the LSP to finish (like forc-fmt)
pub fn lsp<X: AsRef<Path>>(filename: X) -> PidFileLocking {
Self::new(filename, ".lsp-locks", "lock")
}
/// Checks if the given pid is active
#[cfg(not(target_os = "windows"))]
fn is_pid_active(pid: usize) -> bool {
// Not using sysinfo here because it has compatibility issues with fuel.nix
// https://github.com/FuelLabs/fuel.nix/issues/64
use std::process::Command;
let output = Command::new("ps")
.arg("-p")
.arg(pid.to_string())
.output()
.expect("Failed to execute ps command");
let output_str = String::from_utf8_lossy(&output.stdout);
output_str.contains(&format!("{pid} "))
}
#[cfg(target_os = "windows")]
fn is_pid_active(pid: usize) -> bool {
// Not using sysinfo here because it has compatibility issues with fuel.nix
// https://github.com/FuelLabs/fuel.nix/issues/64
use std::process::Command;
let output = Command::new("tasklist")
.arg("/FI")
.arg(format!("PID eq {}", pid))
.output()
.expect("Failed to execute tasklist command");
let output_str = String::from_utf8_lossy(&output.stdout);
// Check if the output contains the PID, indicating the process is active
output_str.contains(&format!("{}", pid))
}
/// Removes the lock file if it is not locked or the process that locked it is no longer active
pub fn release(&self) -> io::Result<()> {
if self.is_locked() {
Err(io::Error::other(format!(
"Cannot remove a dirty lock file, it is locked by another process (PID: {:#?})",
self.get_locker_pid()
)))
} else {
self.remove_file()?;
Ok(())
}
}
fn remove_file(&self) -> io::Result<()> {
match remove_file(&self.0) {
Err(e) => {
if e.kind() != std::io::ErrorKind::NotFound {
return Err(e);
}
Ok(())
}
_ => Ok(()),
}
}
/// Returns the PID of the owner of the current lock. If the PID is not longer active the lock
/// file will be removed
pub fn get_locker_pid(&self) -> Option<usize> {
let fs = File::open(&self.0);
if let Ok(mut file) = fs {
let mut contents = String::new();
file.read_to_string(&mut contents).ok();
drop(file);
if let Ok(pid) = contents.trim().parse::<usize>() {
return if Self::is_pid_active(pid) {
Some(pid)
} else {
let _ = self.remove_file();
None
};
}
}
None
}
/// Checks if the current path is owned by any other process. This will return false if there is
/// no lock file or the current process is the owner of the lock file
pub fn is_locked(&self) -> bool {
self.get_locker_pid()
.map(|pid| pid != (std::process::id() as usize))
.unwrap_or_default()
}
/// Locks the given filepath if it is not already locked
pub fn lock(&self) -> io::Result<()> {
self.release()?;
if let Some(dir) = self.0.parent() {
// Ensure the directory exists
create_dir_all(dir)?;
}
let mut fs = File::create(&self.0)?;
fs.write_all(std::process::id().to_string().as_bytes())?;
fs.sync_all()?;
fs.flush()?;
Ok(())
}
/// Cleans up all stale lock files in the .lsp-locks directory
/// Returns a vector of paths that were cleaned up
pub fn cleanup_stale_files() -> io::Result<Vec<PathBuf>> {
let lock_dir = user_forc_directory().join(".lsp-locks");
let entries = read_dir(&lock_dir)?;
let mut cleaned_paths = Vec::new();
for entry in entries {
let entry = entry?;
let path = entry.path();
if let Some(ext) = path.extension().and_then(|ext| ext.to_str()) {
if ext == "lock" {
if let Ok(mut file) = File::open(&path) {
let mut contents = String::new();
if file.read_to_string(&mut contents).is_ok() {
if let Ok(pid) = contents.trim().parse::<usize>() {
if !Self::is_pid_active(pid) {
remove_file(&path)?;
cleaned_paths.push(path);
}
} else {
remove_file(&path)?;
cleaned_paths.push(path);
}
}
}
}
}
}
Ok(cleaned_paths)
}
}
/// Checks if the specified file is marked as "dirty".
/// This is used to prevent changing files that are currently open in an editor
/// with unsaved changes.
///
/// Returns `true` if a corresponding "dirty" flag file exists, `false` otherwise.
pub fn is_file_dirty<X: AsRef<Path>>(path: X) -> bool {
PidFileLocking::lsp(path.as_ref()).is_locked()
}
#[cfg(test)]
mod test {
use super::{user_forc_directory, PidFileLocking};
use mark_flaky_tests::flaky;
use std::{
fs::{metadata, File},
io::{ErrorKind, Write},
os::unix::fs::MetadataExt,
};
#[test]
fn test_fs_locking_same_process() {
let x = PidFileLocking::lsp("test");
assert!(!x.is_locked()); // checks the non-existence of the lock (therefore it is not locked)
assert!(x.lock().is_ok());
// The current process is locking "test"
let x = PidFileLocking::lsp("test");
assert!(!x.is_locked());
}
#[test]
fn test_legacy() {
// tests against an empty file (as legacy were creating this files)
let x = PidFileLocking::lsp("legacy");
assert!(x.lock().is_ok());
// lock file exists,
assert!(metadata(&x.0).is_ok());
// simulate a stale lock file from legacy (which should be empty)
let _ = File::create(&x.0).unwrap();
assert_eq!(metadata(&x.0).unwrap().size(), 0);
let x = PidFileLocking::lsp("legacy");
assert!(!x.is_locked());
}
#[test]
fn test_remove() {
let x = PidFileLocking::lsp("lock");
assert!(x.lock().is_ok());
assert!(x.release().is_ok());
assert!(x.release().is_ok());
}
#[test]
fn test_fs_locking_stale() {
let x = PidFileLocking::lsp("stale");
assert!(x.lock().is_ok());
// lock file exists,
assert!(metadata(&x.0).is_ok());
// simulate a stale lock file
let mut x = File::create(&x.0).unwrap();
x.write_all(b"191919191919").unwrap();
x.flush().unwrap();
drop(x);
// PID=191919191919 does not exists, hopefully, and this should remove the lock file
let x = PidFileLocking::lsp("stale");
assert!(!x.is_locked());
let e = metadata(&x.0).unwrap_err().kind();
assert_eq!(e, ErrorKind::NotFound);
}
#[flaky]
#[test]
fn test_cleanup_stale_files() {
// First create some test files
let test_lock = PidFileLocking::lsp("test_cleanup");
test_lock.lock().expect("Failed to create test lock file");
// Create a test lock file with invalid PID
let lock_path = user_forc_directory()
.join(".lsp-locks")
.join("test_cleanup_invalid.lock");
// Write invalid content to ensure parsing fails
{
let mut file = File::create(&lock_path).expect("Failed to create test lock file");
file.write_all(b"not-a-pid")
.expect("Failed to write invalid content");
file.flush().expect("Failed to flush file");
}
// Verify both files exist before cleanup
assert!(
test_lock.0.exists(),
"Valid lock file should exist before cleanup"
);
assert!(
lock_path.exists(),
"Invalid lock file should exist before cleanup"
);
// Run cleanup and check returned paths
let cleaned_paths =
PidFileLocking::cleanup_stale_files().expect("Failed to cleanup stale files");
// Verify that only the invalid lock file was cleaned up
assert_eq!(cleaned_paths.len(), 1, "Expected one file to be cleaned up");
assert_eq!(
cleaned_paths[0], lock_path,
"Expected invalid file to be cleaned up"
);
// Verify file system state
assert!(test_lock.0.exists(), "Active lock file should still exist");
assert!(!lock_path.exists(), "Lock file should be removed");
// Cleanup after test
test_lock.release().expect("Failed to release test lock");
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-lsp/src/config.rs | sway-lsp/src/config.rs | use serde::{Deserialize, Serialize};
use tracing::metadata::LevelFilter;
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, Default)]
#[serde(rename_all = "camelCase")]
pub struct Config {
#[serde(default)]
pub client: LspClient,
#[serde(default)]
pub debug: DebugConfig,
#[serde(default)]
pub logging: LoggingConfig,
#[serde(default)]
pub inlay_hints: InlayHintsConfig,
#[serde(default)]
pub diagnostic: DiagnosticConfig,
#[serde(default)]
pub on_enter: OnEnterConfig,
#[serde(default, skip_serializing)]
trace: TraceConfig,
#[serde(default)]
pub garbage_collection: GarbageCollectionConfig,
}
#[derive(Clone, Default, Debug, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "lowercase")]
pub enum LspClient {
VsCode,
#[serde(other)]
#[default]
Other,
}
#[derive(Clone, Debug, PartialEq, Eq, Deserialize, Default)]
struct TraceConfig {}
// Options for debugging various parts of the server.
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct DebugConfig {
pub show_collected_tokens_as_warnings: Warnings,
}
impl Default for DebugConfig {
fn default() -> Self {
Self {
show_collected_tokens_as_warnings: Warnings::Default,
}
}
}
// Options for displaying compiler diagnostics.
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct DiagnosticConfig {
pub show_warnings: bool,
pub show_errors: bool,
}
impl Default for DiagnosticConfig {
fn default() -> Self {
Self {
show_warnings: true,
show_errors: true,
}
}
}
// Options for configuring garbage collection.
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct GarbageCollectionConfig {
pub gc_enabled: bool,
}
impl Default for GarbageCollectionConfig {
fn default() -> Self {
Self { gc_enabled: true }
}
}
// Options for configuring server logging.
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
pub struct LoggingConfig {
#[serde(with = "LevelFilterDef")]
pub level: LevelFilter,
}
impl Default for LoggingConfig {
fn default() -> Self {
Self {
level: LevelFilter::OFF,
}
}
}
// This allows us to deserialize the enum that is defined in another crate.
#[derive(Deserialize, Serialize, Clone)]
#[serde(rename_all = "lowercase")]
#[serde(remote = "LevelFilter")]
#[allow(clippy::upper_case_acronyms)]
enum LevelFilterDef {
OFF,
ERROR,
WARN,
INFO,
DEBUG,
TRACE,
}
/// Instructs the client to draw squiggly lines
/// under all of the tokens that our server managed to parse.
#[derive(Clone, Debug, PartialEq, Eq, Serialize)]
pub enum Warnings {
Default,
Parsed,
Typed,
}
// Options for configuring inlay hints.
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct InlayHintsConfig {
/// Whether to render leading colons for type hints, and trailing colons for parameter hints.
pub render_colons: bool,
/// Whether to show inlay type hints for variables.
pub type_hints: bool,
/// Maximum length for inlay hints. Set to null to have an unlimited length.
pub max_length: Option<usize>,
}
impl Default for InlayHintsConfig {
fn default() -> Self {
Self {
render_colons: true,
type_hints: true,
max_length: Some(25),
}
}
}
// Options for additional behavior when the user presses enter.
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct OnEnterConfig {
pub continue_doc_comments: Option<bool>,
pub continue_comments: Option<bool>,
}
impl Default for OnEnterConfig {
fn default() -> Self {
Self {
continue_doc_comments: Some(true),
continue_comments: Some(false),
}
}
}
impl<'de> serde::Deserialize<'de> for Warnings {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::Deserializer<'de>,
{
struct WarningsVisitor;
impl serde::de::Visitor<'_> for WarningsVisitor {
type Value = Warnings;
fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(formatter, "a string representing a Warnings")
}
fn visit_str<E: serde::de::Error>(self, s: &str) -> Result<Warnings, E> {
Ok(match s {
"off" => Warnings::Default,
"parsed" => Warnings::Parsed,
"typed" => Warnings::Typed,
_ => return Err(E::invalid_value(serde::de::Unexpected::Str(s), &self)),
})
}
}
deserializer.deserialize_any(WarningsVisitor)
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-lsp/src/lib.rs | sway-lsp/src/lib.rs | #![recursion_limit = "256"]
pub mod capabilities;
pub mod config;
pub mod core;
pub mod error;
pub mod server_state;
pub mod handlers {
pub mod notification;
pub mod request;
}
pub mod lsp_ext;
pub mod server;
mod traverse;
pub mod utils;
use lsp_types::{
CodeActionProviderCapability, CodeLensOptions, CompletionOptions, ExecuteCommandOptions,
HoverProviderCapability, OneOf, RenameOptions, SemanticTokensLegend, SemanticTokensOptions,
ServerCapabilities, TextDocumentSyncCapability, TextDocumentSyncKind, WorkDoneProgressOptions,
WorkspaceFoldersServerCapabilities, WorkspaceServerCapabilities,
};
use server_state::ServerState;
use tower_lsp::{LspService, Server};
pub async fn start() {
let (service, socket) = LspService::build(ServerState::new)
.custom_method("sway/show_ast", ServerState::show_ast)
.custom_method("sway/visualize", ServerState::visualize)
.custom_method("sway/on_enter", ServerState::on_enter)
.custom_method("sway/metrics", ServerState::metrics)
.finish();
Server::new(tokio::io::stdin(), tokio::io::stdout(), socket)
.serve(service)
.await;
}
/// Returns the capabilities of the server to the client,
/// indicating its support for various language server protocol features.
pub fn server_capabilities() -> ServerCapabilities {
ServerCapabilities {
code_action_provider: Some(CodeActionProviderCapability::Simple(true)),
code_lens_provider: Some(CodeLensOptions {
resolve_provider: Some(false),
}),
completion_provider: Some(CompletionOptions {
trigger_characters: Some(vec![".".to_string()]),
..Default::default()
}),
definition_provider: Some(OneOf::Left(true)),
document_formatting_provider: Some(OneOf::Left(true)),
document_highlight_provider: Some(OneOf::Left(true)),
document_symbol_provider: Some(OneOf::Left(true)),
execute_command_provider: Some(ExecuteCommandOptions {
commands: vec![],
..Default::default()
}),
hover_provider: Some(HoverProviderCapability::Simple(true)),
inlay_hint_provider: Some(OneOf::Left(true)),
rename_provider: Some(OneOf::Right(RenameOptions {
prepare_provider: Some(true),
work_done_progress_options: WorkDoneProgressOptions {
work_done_progress: Some(true),
},
})),
references_provider: Some(OneOf::Left(true)),
semantic_tokens_provider: Some(
SemanticTokensOptions {
legend: SemanticTokensLegend {
token_types: capabilities::semantic_tokens::SUPPORTED_TYPES.to_vec(),
token_modifiers: capabilities::semantic_tokens::SUPPORTED_MODIFIERS.to_vec(),
},
range: Some(true),
..Default::default()
}
.into(),
),
text_document_sync: Some(TextDocumentSyncCapability::Kind(
TextDocumentSyncKind::INCREMENTAL,
)),
workspace: Some(WorkspaceServerCapabilities {
workspace_folders: Some(WorkspaceFoldersServerCapabilities {
supported: Some(true),
change_notifications: Some(OneOf::Left(true)),
}),
..Default::default()
}),
..ServerCapabilities::default()
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-lsp/src/error.rs | sway-lsp/src/error.rs | use lsp_types::Range;
use swayfmt::FormatterError;
use thiserror::Error;
#[derive(Debug, Error)]
pub enum LanguageServerError {
// Inherited errors
#[error(transparent)]
DocumentError(#[from] DocumentError),
#[error(transparent)]
DirectoryError(#[from] DirectoryError),
#[error(transparent)]
RenameError(#[from] RenameError),
// Top level errors
#[error("Failed to create build plan. {0}")]
BuildPlanFailed(anyhow::Error),
#[error("Build Plan Cache is empty")]
BuildPlanCacheIsEmpty,
#[error("Failed to compile. {0}")]
FailedToCompile(anyhow::Error),
#[error("Failed to parse document")]
FailedToParse,
#[error("Error formatting document: {0}")]
FormatError(FormatterError),
#[error("No Programs were returned from the compiler")]
ProgramsIsNone,
#[error("Member program not found in the compiler results")]
MemberProgramNotFound,
#[error("Unable to acquire a semaphore permit for parsing")]
UnableToAcquirePermit,
#[error("Client is not initialized")]
ClientNotInitialized,
#[error("Client request error: {0}")]
ClientRequestError(String),
#[error("Global workspace not initialized")]
GlobalWorkspaceNotInitialized,
#[error("SyncWorkspace already initialized")]
SyncWorkspaceAlreadyInitialized,
}
#[derive(Debug, Error, PartialEq, Eq)]
pub enum DocumentError {
#[error("No document found at {:?}", path)]
DocumentNotFound { path: String },
#[error("Workspace manifest not found. {:?}", err)]
WorkspaceManifestNotFound { err: String },
#[error("Missing Forc.toml in {:?}", dir)]
ManifestFileNotFound { dir: String },
#[error("Cannot get member manifest files for the manifest at {:?}", dir)]
MemberManifestsFailed { dir: String },
#[error("Cannot get lock file path for the manifest at {:?}", dir)]
ManifestsLockPathFailed { dir: String },
#[error("Document is already stored at {:?}", path)]
DocumentAlreadyStored { path: String },
#[error("File wasn't able to be created at path {:?} : {:?}", path, err)]
UnableToCreateFile { path: String, err: String },
#[error("Unable to write string to file at {:?} : {:?}", path, err)]
UnableToWriteFile { path: String, err: String },
#[error("File wasn't able to be removed at path {:?} : {:?}", path, err)]
UnableToRemoveFile { path: String, err: String },
#[error("Permission denied for path {:?}", path)]
PermissionDenied { path: String },
#[error("IO error for path {:?} : {:?}", path, error)]
IOError { path: String, error: String },
#[error("Invalid range {:?}", range)]
InvalidRange { range: Range },
}
#[derive(Debug, Error, PartialEq, Eq)]
pub enum DirectoryError {
#[error("Can't find temporary directory")]
TempDirNotFound,
#[error("Can't find manifest directory")]
ManifestDirNotFound,
#[error("Can't find temporary member directory")]
TempMemberDirNotFound,
#[error("Can't extract project name from {:?}", dir)]
CantExtractProjectName { dir: String },
#[error("Failed to create hidden .lsp_locks directory: {0}")]
LspLocksDirFailed(String),
#[error("Failed to create temp directory")]
TempDirFailed,
#[error("Failed to canonicalize path")]
CanonicalizeFailed,
#[error("Failed to copy workspace contents to temp directory")]
CopyContentsFailed,
#[error("Failed to create build plan. {0}")]
StripPrefixError(std::path::StripPrefixError),
#[error("Unable to create Url from path {:?}", path)]
UrlFromPathFailed { path: String },
#[error("Unable to create Url from span {:?}", span)]
UrlFromSpanFailed { span: String },
#[error("Unable to create path from Url {:?}", url)]
PathFromUrlFailed { url: String },
#[error("Unable to create span from path {:?}", path)]
SpanFromPathFailed { path: String },
#[error("No program ID found for path {:?}", path)]
ProgramIdNotFound { path: String },
}
#[derive(Debug, Error, PartialEq, Eq)]
pub enum RenameError {
#[error("No token was found in the token map at that position")]
TokenNotFound,
#[error("Token is not part of the user's workspace")]
TokenNotPartOfWorkspace,
#[error("Keywords and intrinsics are unable to be renamed")]
SymbolKindNotAllowed,
#[error("Invalid name {:?}: not an identifier", name)]
InvalidName { name: String },
#[error("Identifiers cannot begin with a double underscore, as that naming convention is reserved for compiler intrinsics.")]
InvalidDoubleUnderscore,
#[error("The file {:?}: already exists", path)]
FileAlreadyExists { path: String },
#[error("The module {:?}: cannot be renamed", path)]
UnableToRenameModule { path: String },
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-lsp/src/server.rs | sway-lsp/src/server.rs | //! This module implements the [LanguageServer] trait for [ServerState].
//! It provides an interface between the LSP protocol and the sway-lsp internals.
use crate::{
handlers::{notification, request},
lsp_ext::{OnEnterParams, ShowAstParams, VisualizeParams},
server_state::ServerState,
};
use lsp_types::{
CodeActionParams, CodeActionResponse, CodeLens, CodeLensParams, CompletionParams,
CompletionResponse, DidChangeTextDocumentParams, DidChangeWatchedFilesParams,
DidCloseTextDocumentParams, DidOpenTextDocumentParams, DidSaveTextDocumentParams,
DocumentFormattingParams, DocumentHighlight, DocumentHighlightParams, DocumentSymbolParams,
DocumentSymbolResponse, GotoDefinitionParams, GotoDefinitionResponse, Hover, HoverParams,
InitializeParams, InitializeResult, InitializedParams, InlayHint, InlayHintParams, Location,
PrepareRenameResponse, ReferenceParams, RenameParams, SemanticTokensParams,
SemanticTokensRangeParams, SemanticTokensRangeResult, SemanticTokensResult,
TextDocumentIdentifier, TextDocumentPositionParams, TextEdit, WorkspaceEdit,
};
use sway_utils::PerformanceData;
use tower_lsp::{jsonrpc::Result, LanguageServer};
#[tower_lsp::async_trait]
impl LanguageServer for ServerState {
async fn initialize(&self, params: InitializeParams) -> Result<InitializeResult> {
request::handle_initialize(self, ¶ms)
}
async fn initialized(&self, _: InitializedParams) {
// Register a file system watcher for Forc.toml files with the client.
if let Err(err) = self.register_forc_toml_watcher().await {
tracing::error!("Failed to register Forc.toml file watcher: {}", err);
}
tracing::info!("Sway Language Server Initialized");
}
async fn shutdown(&self) -> Result<()> {
self.shutdown_server()
}
async fn did_open(&self, params: DidOpenTextDocumentParams) {
if let Err(err) = notification::handle_did_open_text_document(self, params).await {
tracing::error!("{}", err.to_string());
}
}
async fn did_close(&self, params: DidCloseTextDocumentParams) {
if let Err(err) = self
.pid_locked_files
.remove_dirty_flag(¶ms.text_document.uri)
{
tracing::error!("{}", err.to_string());
}
}
async fn did_change(&self, params: DidChangeTextDocumentParams) {
if let Err(err) = notification::handle_did_change_text_document(self, params).await {
tracing::error!("{}", err.to_string());
}
}
async fn did_save(&self, params: DidSaveTextDocumentParams) {
if let Err(err) = notification::handle_did_save_text_document(self, params).await {
tracing::error!("{}", err.to_string());
}
}
async fn did_change_watched_files(&self, params: DidChangeWatchedFilesParams) {
if let Err(err) = notification::handle_did_change_watched_files(self, params) {
tracing::error!("{}", err.to_string());
}
}
async fn hover(&self, params: HoverParams) -> Result<Option<Hover>> {
request::handle_hover(self, params)
}
async fn code_action(&self, params: CodeActionParams) -> Result<Option<CodeActionResponse>> {
request::handle_code_action(self, params).await
}
async fn code_lens(&self, params: CodeLensParams) -> Result<Option<Vec<CodeLens>>> {
request::handle_code_lens(self, params).await
}
async fn completion(&self, params: CompletionParams) -> Result<Option<CompletionResponse>> {
request::handle_completion(self, params)
}
async fn document_symbol(
&self,
params: DocumentSymbolParams,
) -> Result<Option<DocumentSymbolResponse>> {
request::handle_document_symbol(self, params).await
}
async fn semantic_tokens_full(
&self,
params: SemanticTokensParams,
) -> Result<Option<SemanticTokensResult>> {
request::handle_semantic_tokens_full(self, params).await
}
async fn semantic_tokens_range(
&self,
params: SemanticTokensRangeParams,
) -> Result<Option<SemanticTokensRangeResult>> {
request::handle_semantic_tokens_range(self, params).await
}
async fn document_highlight(
&self,
params: DocumentHighlightParams,
) -> Result<Option<Vec<DocumentHighlight>>> {
request::handle_document_highlight(self, params).await
}
async fn goto_definition(
&self,
params: GotoDefinitionParams,
) -> Result<Option<GotoDefinitionResponse>> {
request::handle_goto_definition(self, params)
}
async fn formatting(&self, params: DocumentFormattingParams) -> Result<Option<Vec<TextEdit>>> {
request::handle_formatting(self, params).await
}
async fn rename(&self, params: RenameParams) -> Result<Option<WorkspaceEdit>> {
request::handle_rename(self, params)
}
async fn prepare_rename(
&self,
params: TextDocumentPositionParams,
) -> Result<Option<PrepareRenameResponse>> {
request::handle_prepare_rename(self, params)
}
async fn inlay_hint(&self, params: InlayHintParams) -> Result<Option<Vec<InlayHint>>> {
request::handle_inlay_hints(self, params).await
}
async fn references(&self, params: ReferenceParams) -> Result<Option<Vec<Location>>> {
request::handle_references(self, params).await
}
}
// Custom LSP-Server Methods
impl ServerState {
pub async fn show_ast(&self, params: ShowAstParams) -> Result<Option<TextDocumentIdentifier>> {
request::handle_show_ast(self, ¶ms)
}
pub async fn on_enter(&self, params: OnEnterParams) -> Result<Option<WorkspaceEdit>> {
request::handle_on_enter(self, ¶ms)
}
pub async fn visualize(&self, params: VisualizeParams) -> Result<Option<String>> {
request::handle_visualize(self, ¶ms)
}
pub async fn metrics(&self) -> Result<Option<Vec<(String, PerformanceData)>>> {
request::metrics(self)
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-lsp/src/lsp_ext.rs | sway-lsp/src/lsp_ext.rs | //! sway-lsp extensions to the LSP.
use lsp_types::{TextDocumentContentChangeEvent, TextDocumentIdentifier, Url};
use serde::{Deserialize, Serialize};
#[derive(Debug, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct ShowAstParams {
pub text_document: TextDocumentIdentifier,
pub ast_kind: String,
pub save_path: Url,
}
#[derive(Debug, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct OnEnterParams {
pub text_document: TextDocumentIdentifier,
/// The actual content changes, including the newline.
pub content_changes: Vec<TextDocumentContentChangeEvent>,
}
#[derive(Debug, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct VisualizeParams {
pub text_document: TextDocumentIdentifier,
pub graph_kind: String,
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-lsp/src/server_state.rs | sway-lsp/src/server_state.rs | //! The context or environment in which the language server functions.
use crate::{
capabilities::runnable::Runnable,
config::{Config, GarbageCollectionConfig, Warnings},
core::{
document::{Documents, PidLockedFiles},
session::{self, program_id_from_path, Session},
sync::SyncWorkspace,
token_map::TokenMap,
},
error::{DirectoryError, DocumentError, LanguageServerError},
utils::{debug, keyword_docs::KeywordDocs},
};
use crossbeam_channel::{Receiver, Sender};
use dashmap::{
mapref::{multiple::RefMulti, one::Ref},
DashMap,
};
use forc_pkg::{
manifest::{GenericManifestFile, ManifestFile},
PackageManifestFile,
};
use lsp_types::{
Diagnostic, DidChangeWatchedFilesRegistrationOptions, FileSystemWatcher, GlobPattern,
Registration, Url, WatchKind,
};
use parking_lot::{Mutex, RwLock};
use std::{
collections::{BTreeMap, VecDeque},
mem,
ops::Deref,
path::PathBuf,
process::Command,
sync::{
atomic::{AtomicBool, Ordering},
Arc,
},
};
use sway_core::{language::Programs, Engines, LspConfig};
use sway_types::ProgramId;
use tokio::sync::Notify;
use tower_lsp::{jsonrpc, Client};
const DEFAULT_SESSION_CACHE_CAPACITY: usize = 4;
pub type RunnableMap = DashMap<PathBuf, Vec<Box<dyn Runnable>>>;
/// `ServerState` is the primary mutable state of the language server
pub struct ServerState {
pub(crate) client: Option<Client>,
pub config: Arc<RwLock<Config>>,
pub sync_workspaces: DashMap<PathBuf, Arc<SyncWorkspace>>,
pub token_map: Arc<TokenMap>,
pub engines: Arc<RwLock<Engines>>,
pub compiled_programs: Arc<CompiledPrograms>,
pub runnables: Arc<RunnableMap>,
pub(crate) keyword_docs: Arc<KeywordDocs>,
/// A Least Recently Used (LRU) cache of [Session]s, each representing a project opened in the user's workspace.
/// This cache limits memory usage by maintaining a fixed number of active sessions, automatically
/// evicting the least recently used sessions when the capacity is reached.
pub sessions: LruSessionCache,
pub documents: Documents,
// Compilation thread related fields
pub(crate) retrigger_compilation: Arc<AtomicBool>,
pub is_compiling: Arc<AtomicBool>,
pub(crate) cb_tx: Sender<TaskMessage>,
pub(crate) cb_rx: Arc<Receiver<TaskMessage>>,
pub(crate) finished_compilation: Arc<Notify>,
pub(crate) pid_locked_files: PidLockedFiles,
manifest_cache: DashMap<Url, Arc<PathBuf>>,
last_compilation_state: Arc<RwLock<LastCompilationState>>,
}
impl Default for ServerState {
fn default() -> Self {
let (cb_tx, cb_rx) = crossbeam_channel::bounded(1);
let state = ServerState {
client: None,
token_map: Arc::new(TokenMap::new()),
engines: Arc::new(RwLock::new(Engines::default())),
compiled_programs: Arc::new(CompiledPrograms(DashMap::new())),
runnables: Arc::new(RunnableMap::new()),
config: Arc::new(RwLock::new(Config::default())),
sync_workspaces: DashMap::new(),
keyword_docs: Arc::new(KeywordDocs::new()),
sessions: LruSessionCache::new(DEFAULT_SESSION_CACHE_CAPACITY),
documents: Documents::new(),
retrigger_compilation: Arc::new(AtomicBool::new(false)),
is_compiling: Arc::new(AtomicBool::new(false)),
cb_tx,
cb_rx: Arc::new(cb_rx),
finished_compilation: Arc::new(Notify::new()),
pid_locked_files: PidLockedFiles::new(),
manifest_cache: DashMap::new(),
last_compilation_state: Arc::new(RwLock::new(LastCompilationState::Uninitialized)),
};
// Spawn a new thread dedicated to handling compilation tasks
state.spawn_compilation_thread();
state
}
}
/// `LastCompilationState` represents the state of the last compilation process.
/// It is primarily used for debugging purposes.
#[derive(Debug, PartialEq)]
enum LastCompilationState {
Success,
Failed,
Uninitialized,
}
/// `TaskMessage` represents the set of messages or commands that can be sent to and processed by a worker thread in the compilation environment.
#[derive(Debug)]
pub enum TaskMessage {
CompilationContext(CompilationContext),
// A signal to the receiving thread to gracefully terminate its operation.
Terminate,
}
/// `CompilationContext` encapsulates all the necessary details required by the compilation thread to execute a compilation process.
/// It acts as a container for shared resources and state information relevant to a specific compilation task.
#[derive(Debug)]
pub struct CompilationContext {
pub session: Arc<Session>,
pub sync: Arc<SyncWorkspace>,
pub token_map: Arc<TokenMap>,
pub engines: Arc<RwLock<Engines>>,
pub compiled_programs: Arc<CompiledPrograms>,
pub runnables: Arc<RunnableMap>,
pub uri: Url,
pub version: Option<i32>,
pub optimized_build: bool,
pub gc_options: GarbageCollectionConfig,
pub file_versions: BTreeMap<PathBuf, Option<u64>>,
}
impl ServerState {
pub fn new(client: Client) -> ServerState {
ServerState {
client: Some(client),
..Default::default()
}
}
/// Registers a file system watcher for Forc.toml files with the client.
pub async fn register_forc_toml_watcher(&self) -> Result<(), LanguageServerError> {
let client = self
.client
.as_ref()
.ok_or(LanguageServerError::ClientNotInitialized)?;
let watchers = vec![FileSystemWatcher {
glob_pattern: GlobPattern::String("**/Forc.toml".to_string()),
kind: Some(WatchKind::Create | WatchKind::Change),
}];
let registration_options = DidChangeWatchedFilesRegistrationOptions { watchers };
let registration = Registration {
id: "forc-toml-watcher".to_string(),
method: "workspace/didChangeWatchedFiles".to_string(),
register_options: Some(
serde_json::to_value(registration_options)
.expect("Failed to serialize registration options"),
),
};
client
.register_capability(vec![registration])
.await
.map_err(|err| LanguageServerError::ClientRequestError(err.to_string()))?;
Ok(())
}
/// Spawns a new thread dedicated to handling compilation tasks. This thread listens for
/// `TaskMessage` instances sent over a channel and processes them accordingly.
///
/// This approach allows for asynchronous compilation tasks to be handled in parallel to
/// the main application flow, improving efficiency and responsiveness.
pub fn spawn_compilation_thread(&self) {
let is_compiling = self.is_compiling.clone();
let retrigger_compilation = self.retrigger_compilation.clone();
let finished_compilation = self.finished_compilation.clone();
let rx = self.cb_rx.clone();
let last_compilation_state = self.last_compilation_state.clone();
std::thread::spawn(move || {
while let Ok(msg) = rx.recv() {
match msg {
TaskMessage::CompilationContext(ctx) => {
let uri = &ctx.uri;
let path = uri.to_file_path().unwrap();
let mut engines_clone = ctx.engines.read().clone();
let lsp_mode = Some(LspConfig {
optimized_build: ctx.optimized_build,
file_versions: ctx.file_versions.clone(),
});
let (needs_reprocessing, _) =
needs_reprocessing(&ctx.token_map, &path, lsp_mode.as_ref());
// Perform garbage collection if enabled and if the file has been modified to manage memory usage.
if ctx.gc_options.gc_enabled && needs_reprocessing {
// Call this on the engines clone so we don't clear types that are still in use
// and might be needed in the case cancel compilation was triggered.
if let Err(err) =
session::garbage_collect_module(&mut engines_clone, uri)
{
tracing::error!(
"Unable to perform garbage collection: {}",
err.to_string()
);
}
}
// Set the is_compiling flag to true so that the wait_for_parsing function knows that we are compiling
is_compiling.store(true, Ordering::SeqCst);
match session::parse_project(
uri,
&engines_clone,
Some(retrigger_compilation.clone()),
&ctx,
lsp_mode.as_ref(),
) {
Ok(()) => {
// Use the uri to get the metrics for the program
match ctx.compiled_programs.program_from_uri(uri, &engines_clone) {
Some(program) => {
// It's very important to check if the workspace AST was reused to determine if we need to overwrite the engines.
// Because the engines_clone has garbage collection applied. If the workspace AST was reused, we need to keep the old engines
// as the engines_clone might have cleared some types that are still in use.
if program.value().metrics.reused_programs == 0 {
// Commit local changes in the programs, module, and function caches to the shared state.
// This ensures that any modifications made during compilation are preserved
// before we swap the engines.
engines_clone.qe().commit();
// The compiler did not reuse the workspace AST.
// We need to overwrite the old engines with the engines clone.
mem::swap(
&mut *ctx.engines.write(),
&mut engines_clone,
);
}
*last_compilation_state.write() =
LastCompilationState::Success;
}
None => {
*last_compilation_state.write() =
LastCompilationState::Failed;
}
}
}
Err(err) => {
tracing::error!("{}", err.to_string());
*last_compilation_state.write() = LastCompilationState::Failed;
}
}
// Reset the flags to false
is_compiling.store(false, Ordering::SeqCst);
retrigger_compilation.store(false, Ordering::SeqCst);
// Make sure there isn't any pending compilation work
if rx.is_empty() {
// finished compilation, notify waiters
finished_compilation.notify_waiters();
}
}
TaskMessage::Terminate => {
// If we receive a terminate message, we need to exit the thread
return;
}
}
}
});
}
/// Spawns a new thread dedicated to checking if the client process is still active,
/// and if not, shutting down the server.
pub fn spawn_client_heartbeat(&self, client_pid: usize) {
tokio::spawn(async move {
loop {
// Not using sysinfo here because it has compatibility issues with fuel.nix
// https://github.com/FuelLabs/fuel.nix/issues/64
let output = Command::new("ps")
.arg("-p")
.arg(client_pid.to_string())
.output()
.expect("Failed to execute ps command");
if String::from_utf8_lossy(&output.stdout).contains(&format!("{client_pid} ")) {
tracing::trace!("Client Heartbeat: still running ({client_pid})");
} else {
std::process::exit(0);
}
tokio::time::sleep(std::time::Duration::from_secs(60)).await;
}
});
}
/// Waits asynchronously for the `is_compiling` flag to become false.
///
/// This function checks the state of `is_compiling`, and if it's true,
/// it awaits on a notification. Once notified, it checks again, repeating
/// this process until `is_compiling` becomes false.
pub async fn wait_for_parsing(&self) {
loop {
// Check both the is_compiling flag and the last_compilation_state.
// Wait if is_compiling is true or if the last_compilation_state is Uninitialized.
if !self.is_compiling.load(Ordering::SeqCst)
&& *self.last_compilation_state.read() != LastCompilationState::Uninitialized
{
// compilation is finished, lets check if there are pending compilation requests.
if self.cb_rx.is_empty() {
// no pending compilation work, safe to break.
break;
}
}
// We are still compiling, lets wait to be notified.
self.finished_compilation.notified().await;
}
}
pub fn shutdown_server(&self) -> jsonrpc::Result<()> {
let _p = tracing::trace_span!("shutdown_server").entered();
tracing::info!("Shutting Down the Sway Language Server");
// Drain pending compilation requests
while self.cb_rx.try_recv().is_ok() {}
// Set the retrigger_compilation flag to true so that the compilation exits early
self.retrigger_compilation.store(true, Ordering::SeqCst);
// Send a terminate message to the compilation thread
self.cb_tx
.send(TaskMessage::Terminate)
.expect("failed to send terminate message");
// Delete all temporary directories.
for entry in self.sync_workspaces.iter() {
entry.value().remove_temp_dir();
}
Ok(())
}
pub(crate) async fn publish_diagnostics(
&self,
uri: Url,
workspace_uri: Url,
session: Arc<Session>,
) {
let diagnostics = self.diagnostics(&uri, session.clone());
// Note: Even if the computed diagnostics vec is empty, we still have to push the empty Vec
// in order to clear former diagnostics. Newly pushed diagnostics always replace previously pushed diagnostics.
if let Some(client) = self.client.as_ref() {
client
.publish_diagnostics(workspace_uri.clone(), diagnostics, None)
.await;
}
}
fn diagnostics(&self, uri: &Url, session: Arc<Session>) -> Vec<Diagnostic> {
let mut diagnostics_to_publish = vec![];
let config = &self.config.read();
let tokens = self.token_map.tokens_for_file(uri);
match config.debug.show_collected_tokens_as_warnings {
// If collected_tokens_as_warnings is Parsed or Typed,
// take over the normal error and warning display behavior
// and instead show the either the parsed or typed tokens as warnings.
// This is useful for debugging the lsp parser.
Warnings::Parsed => {
diagnostics_to_publish = debug::generate_warnings_for_parsed_tokens(tokens);
}
Warnings::Typed => {
diagnostics_to_publish = debug::generate_warnings_for_typed_tokens(tokens);
}
Warnings::Default => {
if let Some(diagnostics) =
session.diagnostics.read().get(&PathBuf::from(uri.path()))
{
if config.diagnostic.show_warnings {
diagnostics_to_publish.extend(diagnostics.warnings.clone());
}
if config.diagnostic.show_errors {
diagnostics_to_publish.extend(diagnostics.errors.clone());
}
}
}
}
diagnostics_to_publish
}
/// Constructs and returns a tuple of `(Arc<SyncWorkspace>, Url)` from a given workspace URI.
/// The returned URL represents the temp directory workspace.
pub fn sync_and_uri_from_workspace(
&self,
workspace_uri: &Url,
) -> Result<(Arc<SyncWorkspace>, Url), LanguageServerError> {
let sync = self.get_sync_workspace_for_uri(workspace_uri)?;
let uri = sync.workspace_to_temp_url(workspace_uri)?;
Ok((sync, uri))
}
/// Constructs and returns a tuple of `(Arc<SyncWorkspace>, Url, Arc<Session>)` from a given workspace URI.
/// The returned URL represents the temp directory workspace.
pub fn sync_uri_and_session_from_workspace(
&self,
workspace_uri: &Url,
) -> Result<(Arc<SyncWorkspace>, Url, Arc<Session>), LanguageServerError> {
let sync = self.get_sync_workspace_for_uri(workspace_uri)?;
let uri = sync.workspace_to_temp_url(workspace_uri)?;
let session = self.url_to_session(workspace_uri)?;
Ok((sync, uri, session))
}
/// Constructs and returns a tuple of `(Url, Arc<Session>)` from a given workspace URI.
/// The returned URL represents the temp directory workspace.
pub fn uri_and_session_from_workspace(
&self,
workspace_uri: &Url,
) -> Result<(Url, Arc<Session>), LanguageServerError> {
let temp_uri = self.uri_from_workspace(workspace_uri)?;
let session = self.url_to_session(workspace_uri)?;
Ok((temp_uri, session))
}
/// Constructs and returns the temp directory URL from a given workspace URI.
pub fn uri_from_workspace(&self, workspace_uri: &Url) -> Result<Url, LanguageServerError> {
let sync = self.get_sync_workspace_for_uri(workspace_uri)?;
sync.workspace_to_temp_url(workspace_uri)
.map_err(LanguageServerError::from)
}
fn url_to_session(&self, uri: &Url) -> Result<Arc<Session>, LanguageServerError> {
// Try to get the manifest directory from the cache
let manifest_dir = if let Some(cached_dir) = self.manifest_cache.get(uri) {
cached_dir.clone()
} else {
// Otherwise, find the manifest directory from the uri and cache it
let path = PathBuf::from(uri.path());
let manifest = PackageManifestFile::from_dir(&path).map_err(|_| {
DocumentError::ManifestFileNotFound {
dir: path.to_string_lossy().to_string(),
}
})?;
let dir = Arc::new(
manifest
.path()
.parent()
.ok_or(DirectoryError::ManifestDirNotFound)?
.to_path_buf(),
);
self.manifest_cache.insert(uri.clone(), dir.clone());
dir
};
// If the session is already in the cache, return it
if let Some(session) = self.sessions.get(&manifest_dir) {
return Ok(session);
}
// If no session can be found, then we need to call init and insert a new session into the map
let session = Arc::new(Session::new());
self.sessions
.insert((*manifest_dir).clone(), session.clone());
Ok(session)
}
/// Determines the workspace root for the given file URI.
fn find_workspace_root_for_uri(&self, file_uri: &Url) -> Result<PathBuf, LanguageServerError> {
let path = PathBuf::from(file_uri.path());
let search_dir = path.parent().unwrap_or(&path);
// Find the initial manifest (could be package or workspace)
let initial_manifest_file = ManifestFile::from_dir(search_dir).map_err(|_| {
DocumentError::ManifestFileNotFound {
dir: search_dir.to_string_lossy().into(),
}
})?;
// Determine the true workspace root.
// If the initial manifest is a package that's part of a workspace, get that workspace root.
// Otherwise, the initial manifest's directory is the root.
let actual_sync_root = match &initial_manifest_file {
ManifestFile::Package(pkg_mf) => {
// Check if this package is part of a workspace
match pkg_mf
.workspace()
.map_err(|e| DocumentError::WorkspaceManifestNotFound { err: e.to_string() })?
{
Some(ws_mf) => {
// It's part of a workspace, use the workspace's directory
tracing::trace!(
"Package {:?} is part of workspace {:?}. Using workspace root.",
pkg_mf.path(),
ws_mf.path()
);
ws_mf.dir().to_path_buf()
}
None => {
// It's a standalone package, use its directory
tracing::trace!(
"Package {:?} is standalone. Using package root.",
pkg_mf.path()
);
initial_manifest_file.dir().to_path_buf()
}
}
}
ManifestFile::Workspace(ws_mf) => {
// It's already a workspace manifest, use its directory
tracing::trace!(
"Initial manifest is a workspace: {:?}. Using its root.",
ws_mf.path()
);
initial_manifest_file.dir().to_path_buf()
}
};
Ok(actual_sync_root)
}
/// Gets the SyncWorkspace for the given URI.
pub fn get_sync_workspace_for_uri(
&self,
uri: &Url,
) -> Result<Arc<SyncWorkspace>, LanguageServerError> {
let workspace_root = self.find_workspace_root_for_uri(uri)?;
self.sync_workspaces
.get(&workspace_root)
.map(|entry| entry.value().clone())
.ok_or(LanguageServerError::GlobalWorkspaceNotInitialized)
}
/// Gets the existing SyncWorkspace for a file URI or initializes it if it doesn't exist.
pub async fn get_or_init_sync_workspace(
&self,
uri: &Url,
) -> Result<Arc<SyncWorkspace>, LanguageServerError> {
// First try to find the workspace root for this URI
let workspace_root = self.find_workspace_root_for_uri(uri)?;
let canonical_root = workspace_root.canonicalize().unwrap_or(workspace_root);
// Check if we already have a SyncWorkspace for this root
if let Some(sw_arc) = self.sync_workspaces.get(&canonical_root) {
return Ok(sw_arc.value().clone());
}
// Otherwise, initialize a new workspace sync for this root
let initialized_sw = self.initialize_workspace_sync(uri).await?;
self.sync_workspaces
.insert(canonical_root.clone(), initialized_sw.clone());
tracing::info!(
"SyncWorkspace successfully initialized for {:?}",
canonical_root
);
Ok(initialized_sw)
}
/// Initializes a new SyncWorkspace by creating a temporary directory structure and syncing manifest files.
/// This sets up the workspace environment needed for language server operations.
pub async fn initialize_workspace_sync(
&self,
file_uri_triggering_init: &Url,
) -> Result<Arc<SyncWorkspace>, LanguageServerError> {
let actual_sync_root = self.find_workspace_root_for_uri(file_uri_triggering_init)?;
tracing::debug!(
"Initializing SyncWorkspace for root: {:?}",
actual_sync_root
);
let sw = Arc::new(SyncWorkspace::new());
sw.create_temp_dir_from_workspace(&actual_sync_root)?;
sw.clone_manifest_dir_to_temp()?;
sw.sync_manifest()?;
let temp_dir_for_docs = sw.temp_dir()?;
self.documents
.store_sway_files_from_temp(temp_dir_for_docs)
.await?;
Ok(sw)
}
/// Checks if a workspace is already initialized for the given URI
pub fn is_workspace_initialized(&self, uri: &Url) -> bool {
if let Ok(workspace_root) = self.find_workspace_root_for_uri(uri) {
self.sync_workspaces.contains_key(&workspace_root)
} else {
false
}
}
}
/// Determines if expensive operations (traversal, GC, etc.) should be performed
/// based on whether tokens exist and if files were modified.
pub fn needs_reprocessing<'a>(
token_map: &TokenMap,
path: &'a PathBuf,
lsp_mode: Option<&'a LspConfig>,
) -> (bool, Option<&'a PathBuf>) {
let has_tokens = token_map
.iter()
.any(|item| item.key().path.as_ref() == Some(path));
let modified_file_path = modified_file(lsp_mode);
let reprocess = !has_tokens || modified_file_path.is_some();
(reprocess, modified_file_path)
}
/// Returns the modified file from the LspConfig if it exists.
pub fn modified_file(lsp_mode: Option<&LspConfig>) -> Option<&PathBuf> {
lsp_mode.and_then(|mode| {
mode.file_versions
.iter()
.find_map(|(path, version)| version.map(|_| path))
})
}
#[derive(Debug)]
pub struct CompiledPrograms(DashMap<ProgramId, Programs>);
impl Default for CompiledPrograms {
fn default() -> Self {
CompiledPrograms(DashMap::new())
}
}
impl CompiledPrograms {
pub fn new() -> Self {
CompiledPrograms::default()
}
pub fn program_from_uri(
&self,
uri: &Url,
engines: &Engines,
) -> Option<Ref<'_, ProgramId, Programs>> {
let path = uri.to_file_path().ok()?;
let program_id = program_id_from_path(&path, engines).ok()?;
self.get(&program_id)
}
}
impl Deref for CompiledPrograms {
type Target = DashMap<ProgramId, Programs>;
fn deref(&self) -> &Self::Target {
&self.0
}
}
/// A Least Recently Used (LRU) cache for storing and managing `Session` objects.
/// This cache helps limit memory usage by maintaining a fixed number of active sessions.
pub struct LruSessionCache {
/// Stores the actual `Session` objects, keyed by their file paths.
sessions: Arc<DashMap<PathBuf, Arc<Session>>>,
/// Keeps track of the order in which sessions were accessed, with most recent at the front.
usage_order: Arc<Mutex<VecDeque<PathBuf>>>,
/// The maximum number of sessions that can be stored in the cache.
capacity: usize,
}
impl LruSessionCache {
/// Creates a new `LruSessionCache` with the specified capacity.
pub fn new(capacity: usize) -> Self {
LruSessionCache {
sessions: Arc::new(DashMap::new()),
usage_order: Arc::new(Mutex::new(VecDeque::with_capacity(capacity))),
capacity,
}
}
pub fn iter(&self) -> impl Iterator<Item = RefMulti<'_, PathBuf, Arc<Session>>> {
self.sessions.iter()
}
/// Retrieves a session from the cache and updates its position to the front of the usage order.
pub fn get(&self, path: &PathBuf) -> Option<Arc<Session>> {
if let Some(session) = self.sessions.try_get(path).try_unwrap() {
if self.sessions.len() >= self.capacity {
self.move_to_front(path);
}
Some(session.clone())
} else {
None
}
}
/// Inserts or updates a session in the cache.
/// If at capacity and inserting a new session, evicts the least recently used one.
/// For existing sessions, updates their position in the usage order if at capacity.
pub fn insert(&self, path: PathBuf, session: Arc<Session>) {
if let Some(mut entry) = self.sessions.get_mut(&path) {
// Session already exists, update it
*entry = session;
self.move_to_front(&path);
} else {
// New session
if self.sessions.len() >= self.capacity {
self.evict_least_used();
}
self.sessions.insert(path.clone(), session);
let mut order = self.usage_order.lock();
order.push_front(path);
}
}
/// Moves the specified path to the front of the usage order, marking it as most recently used.
fn move_to_front(&self, path: &PathBuf) {
tracing::trace!("Moving path to front of usage order: {:?}", path);
let mut order = self.usage_order.lock();
if let Some(index) = order.iter().position(|p| p == path) {
order.remove(index);
}
order.push_front(path.clone());
}
/// Removes the least recently used session from the cache when the capacity is reached.
fn evict_least_used(&self) {
let mut order = self.usage_order.lock();
if let Some(old_path) = order.pop_back() {
tracing::trace!(
"Cache at capacity. Evicting least used session: {:?}",
old_path
);
self.sessions.remove(&old_path);
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use std::path::PathBuf;
use std::sync::Arc;
#[test]
fn test_lru_session_cache_insertion_and_retrieval() {
let cache = LruSessionCache::new(2);
let path1 = PathBuf::from("/path/1");
let path2 = PathBuf::from("/path/2");
let session1 = Arc::new(Session::new());
let session2 = Arc::new(Session::new());
cache.insert(path1.clone(), session1.clone());
cache.insert(path2.clone(), session2.clone());
assert!(Arc::ptr_eq(&cache.get(&path1).unwrap(), &session1));
assert!(Arc::ptr_eq(&cache.get(&path2).unwrap(), &session2));
}
#[test]
fn test_lru_session_cache_capacity() {
let cache = LruSessionCache::new(2);
let path1 = PathBuf::from("/path/1");
let path2 = PathBuf::from("/path/2");
let path3 = PathBuf::from("/path/3");
let session1 = Arc::new(Session::new());
let session2 = Arc::new(Session::new());
let session3 = Arc::new(Session::new());
cache.insert(path1.clone(), session1);
cache.insert(path2.clone(), session2);
cache.insert(path3.clone(), session3);
assert!(cache.get(&path1).is_none());
assert!(cache.get(&path2).is_some());
assert!(cache.get(&path3).is_some());
}
#[test]
fn test_lru_session_cache_update_order() {
let cache = LruSessionCache::new(2);
let path1 = PathBuf::from("/path/1");
let path2 = PathBuf::from("/path/2");
let path3 = PathBuf::from("/path/3");
let session1 = Arc::new(Session::new());
let session2 = Arc::new(Session::new());
let session3 = Arc::new(Session::new());
cache.insert(path1.clone(), session1.clone());
cache.insert(path2.clone(), session2.clone());
// Access path1 to move it to the front
cache.get(&path1);
// Insert path3, which should evict path2
cache.insert(path3.clone(), session3);
assert!(cache.get(&path1).is_some());
assert!(cache.get(&path2).is_none());
assert!(cache.get(&path3).is_some());
}
#[test]
fn test_lru_session_cache_overwrite() {
let cache = LruSessionCache::new(2);
let path1 = PathBuf::from("/path/1");
let session1 = Arc::new(Session::new());
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | true |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-lsp/src/capabilities/document_symbol.rs | sway-lsp/src/capabilities/document_symbol.rs | use crate::core::{token::get_range_from_span, token_map::TokenMap};
use lsp_types::{self, DocumentSymbol, Url};
use std::path::PathBuf;
use sway_core::{
language::ty::{
TyAbiDecl, TyAstNodeContent, TyConstantDecl, TyDecl, TyEnumDecl, TyFunctionDecl,
TyFunctionParameter, TyIncludeStatement, TyProgram, TySideEffectVariant, TyStorageDecl,
TyStructDecl, TyTraitInterfaceItem, TyTraitItem, TyTraitType,
},
Engines, GenericTypeArgument,
};
use sway_types::{Span, Spanned};
/// Generates a hierarchical document symbol tree for LSP code outline/navigation.
/// Processes declarations (functions, structs, enums, etc.) into nested symbols,
/// preserving parent-child relationships like functions with their variables,
/// structs with their fields, and traits with their methods.
pub fn to_document_symbols(
uri: &Url,
path: &PathBuf,
ty_program: &TyProgram,
engines: &Engines,
token_map: &TokenMap,
) -> Vec<DocumentSymbol> {
let source_id = engines.se().get_source_id(path);
// Find if there is a configurable symbol in the token map that belongs to the current file
// We will add children symbols to this when we encounter configurable declarations below.
let mut configurable_symbol = token_map
.tokens_for_file(uri)
.find(|item| item.key().name == "configurable")
.map(|item| {
DocumentSymbolBuilder::new()
.name(item.key().name.clone())
.kind(lsp_types::SymbolKind::STRUCT)
.range(item.key().range)
.selection_range(item.key().range)
.children(vec![])
.build()
});
// Only include nodes that originate from the file.
let mut nodes: Vec<_> = (if ty_program.root_module.span.source_id() == Some(&source_id) {
Some(ty_program.root_module.all_nodes.iter())
} else {
ty_program
.root_module
.submodules_recursive()
.find(|(_, submodule)| submodule.module.span.source_id() == Some(&source_id))
.map(|(_, submodule)| submodule.module.all_nodes.iter())
})
.into_iter()
.flatten()
.filter_map(|node| {
match &node.content {
TyAstNodeContent::SideEffect(side_effect) => {
if let TySideEffectVariant::IncludeStatement(include_statement) =
&side_effect.side_effect
{
Some(build_include_symbol(include_statement))
} else {
None
}
}
TyAstNodeContent::Declaration(decl) => match decl {
TyDecl::TypeAliasDecl(decl) => {
let type_alias_decl = engines.de().get_type_alias(&decl.decl_id);
let span = type_alias_decl.call_path.suffix.span();
let range = get_range_from_span(&span);
let detail = Some(type_alias_decl.ty.span().as_str().to_string());
let type_alias_symbol = DocumentSymbolBuilder::new()
.name(span.str().to_string())
.kind(lsp_types::SymbolKind::TYPE_PARAMETER)
.range(range)
.selection_range(range)
.detail(detail)
.build();
Some(type_alias_symbol)
}
TyDecl::FunctionDecl(decl) => {
let fn_decl = engines.de().get_function(&decl.decl_id);
let range = get_range_from_span(&fn_decl.name.span());
let detail = Some(fn_decl_detail(&fn_decl.parameters, &fn_decl.return_type));
let children = collect_variables_from_func_decl(engines, &fn_decl);
let func_symbol = DocumentSymbolBuilder::new()
.name(fn_decl.name.span().str().to_string())
.kind(lsp_types::SymbolKind::FUNCTION)
.range(range)
.selection_range(range)
.detail(detail)
.children(children)
.build();
Some(func_symbol)
}
TyDecl::EnumDecl(decl) => {
let enum_decl = engines.de().get_enum(&decl.decl_id);
let span = enum_decl.call_path.suffix.span();
let range = get_range_from_span(&span);
let children = collect_enum_variants(&enum_decl);
let enum_symbol = DocumentSymbolBuilder::new()
.name(span.str().to_string())
.kind(lsp_types::SymbolKind::ENUM)
.range(range)
.selection_range(range)
.children(children)
.build();
Some(enum_symbol)
}
TyDecl::StructDecl(decl) => {
let struct_decl = engines.de().get_struct(&decl.decl_id);
let span = struct_decl.call_path.suffix.span();
let range = get_range_from_span(&span);
let children = collect_struct_fields(&struct_decl);
let struct_symbol = DocumentSymbolBuilder::new()
.name(span.str().to_string())
.kind(lsp_types::SymbolKind::STRUCT)
.range(range)
.selection_range(range)
.children(children)
.build();
Some(struct_symbol)
}
TyDecl::AbiDecl(decl) => {
let abi_decl = engines.de().get_abi(&decl.decl_id);
let decl_str = abi_decl.span().str();
let name = extract_header(&decl_str);
let range = get_range_from_span(&abi_decl.name.span());
let children = collect_fns_from_abi_decl(engines, &abi_decl);
let abi_symbol = DocumentSymbolBuilder::new()
.name(name)
.kind(lsp_types::SymbolKind::NAMESPACE)
.range(range)
.selection_range(range)
.children(children)
.build();
Some(abi_symbol)
}
TyDecl::TraitDecl(decl) => {
let trait_decl = engines.de().get_trait(&decl.decl_id);
let decl_str = trait_decl.span().str().to_string();
let name = extract_header(&decl_str);
let range = get_range_from_span(&trait_decl.name.span());
let children =
collect_interface_surface(engines, &trait_decl.interface_surface);
let trait_symbol = DocumentSymbolBuilder::new()
.name(name)
.kind(lsp_types::SymbolKind::INTERFACE)
.range(range)
.selection_range(range)
.children(children)
.build();
Some(trait_symbol)
}
TyDecl::TraitTypeDecl(decl) => {
let trait_type_decl = engines.de().get_type(&decl.decl_id);
Some(build_trait_symbol(&trait_type_decl))
}
TyDecl::ImplSelfOrTrait(decl) => {
let impl_trait_decl = engines.de().get_impl_self_or_trait(&decl.decl_id);
let decl_str = impl_trait_decl.span().str().to_string();
let name = extract_header(&decl_str);
let range = get_range_from_span(&impl_trait_decl.trait_name.suffix.span());
let children = collect_ty_trait_items(engines, &impl_trait_decl.items);
let symbol = DocumentSymbolBuilder::new()
.name(name)
.kind(lsp_types::SymbolKind::NAMESPACE)
.range(range)
.selection_range(range)
.children(children)
.build();
Some(symbol)
}
TyDecl::ConstantDecl(decl) => {
let const_decl = engines.de().get_constant(&decl.decl_id);
Some(build_constant_symbol(&const_decl))
}
TyDecl::StorageDecl(decl) => {
let storage_decl = engines.de().get_storage(&decl.decl_id);
let span = storage_decl.storage_keyword.span();
let range = get_range_from_span(&span);
let children = collect_fields_from_storage(&storage_decl);
let storage_symbol = DocumentSymbolBuilder::new()
.name(span.str().to_string())
.kind(lsp_types::SymbolKind::STRUCT)
.range(range)
.selection_range(range)
.children(children)
.build();
Some(storage_symbol)
}
TyDecl::ConfigurableDecl(decl) => {
let configurable_decl = engines.de().get_configurable(&decl.decl_id);
let span = configurable_decl.call_path.suffix.span();
let range = get_range_from_span(&span);
let symbol = DocumentSymbolBuilder::new()
.name(span.str().to_string())
.kind(lsp_types::SymbolKind::FIELD)
.detail(Some(
configurable_decl
.type_ascription
.span()
.as_str()
.to_string(),
))
.range(range)
.selection_range(range)
.build();
// Add symbol to the end of configurable_symbol's children field
configurable_symbol
.as_mut()?
.children
.as_mut()?
.push(symbol);
None
}
_ => None,
},
_ => None,
}
})
.collect();
// Add configurable symbol to the end after all children symbols have been added
if let Some(symbol) = configurable_symbol {
nodes.push(symbol);
}
// Sort by range start position
nodes.sort_by_key(|node| node.range.start);
nodes
}
fn build_include_symbol(include_statement: &TyIncludeStatement) -> DocumentSymbol {
let span = include_statement.span();
let range = get_range_from_span(&span);
DocumentSymbolBuilder::new()
.name(span.str().to_string())
.kind(lsp_types::SymbolKind::MODULE)
.range(range)
.selection_range(range)
.build()
}
fn build_constant_symbol(const_decl: &TyConstantDecl) -> DocumentSymbol {
let span = const_decl.call_path.suffix.span();
let range = get_range_from_span(&span);
DocumentSymbolBuilder::new()
.name(span.str().to_string())
.kind(lsp_types::SymbolKind::CONSTANT)
.detail(Some(const_decl.type_ascription.span().as_str().to_string()))
.range(range)
.selection_range(range)
.build()
}
fn build_trait_symbol(trait_type_decl: &TyTraitType) -> DocumentSymbol {
let span = trait_type_decl.name.span();
let range = get_range_from_span(&span);
DocumentSymbolBuilder::new()
.name(span.str().to_string())
.kind(lsp_types::SymbolKind::TYPE_PARAMETER)
.range(range)
.selection_range(range)
.build()
}
fn collect_interface_surface(
engines: &Engines,
items: &[TyTraitInterfaceItem],
) -> Vec<DocumentSymbol> {
items
.iter()
.map(|item| match item {
TyTraitInterfaceItem::TraitFn(decl_ref) => {
let fn_decl = engines.de().get_trait_fn(decl_ref);
build_function_symbol(
&fn_decl.name.span(),
&fn_decl.parameters,
&fn_decl.return_type,
)
}
TyTraitInterfaceItem::Constant(decl_ref) => {
let const_decl = engines.de().get_constant(decl_ref);
build_constant_symbol(&const_decl)
}
TyTraitInterfaceItem::Type(decl_ref) => {
let trait_type_decl = engines.de().get_type(decl_ref);
build_trait_symbol(&trait_type_decl)
}
})
.collect()
}
fn collect_ty_trait_items(engines: &Engines, items: &[TyTraitItem]) -> Vec<DocumentSymbol> {
items
.iter()
.filter_map(|item| match item {
TyTraitItem::Fn(decl_ref) => Some(engines.de().get_function(decl_ref)),
_ => None,
})
.map(|fn_decl| {
let children = collect_variables_from_func_decl(engines, &fn_decl);
let mut symbol = build_function_symbol(
&fn_decl.name.span(),
&fn_decl.parameters,
&fn_decl.return_type,
);
symbol.children = Some(children);
symbol
})
.collect()
}
fn collect_fields_from_storage(decl: &TyStorageDecl) -> Vec<DocumentSymbol> {
decl.fields
.iter()
.map(|field| build_field_symbol(&field.name.span(), &field.type_argument))
.collect()
}
fn build_field_symbol(span: &Span, type_argument: &GenericTypeArgument) -> DocumentSymbol {
let range = get_range_from_span(span);
DocumentSymbolBuilder::new()
.name(span.clone().str().to_string())
.detail(Some(type_argument.span().as_str().to_string()))
.kind(lsp_types::SymbolKind::FIELD)
.range(range)
.selection_range(range)
.build()
}
fn build_function_symbol(
span: &Span,
parameters: &[TyFunctionParameter],
return_type: &GenericTypeArgument,
) -> DocumentSymbol {
let range = get_range_from_span(span);
DocumentSymbolBuilder::new()
.name(span.clone().str().to_string())
.detail(Some(fn_decl_detail(parameters, return_type)))
.kind(lsp_types::SymbolKind::FUNCTION)
.range(range)
.selection_range(range)
.build()
}
fn collect_fns_from_abi_decl(engines: &Engines, decl: &TyAbiDecl) -> Vec<DocumentSymbol> {
decl.interface_surface
.iter()
.filter_map(|item| match item {
TyTraitInterfaceItem::TraitFn(decl_ref) => Some(engines.de().get_trait_fn(decl_ref)),
_ => None,
})
.map(|trait_fn| {
build_function_symbol(
&trait_fn.name.span(),
&trait_fn.parameters,
&trait_fn.return_type,
)
})
.collect()
}
fn collect_struct_fields(decl: &TyStructDecl) -> Vec<DocumentSymbol> {
decl.fields
.iter()
.map(|field| build_field_symbol(&field.name.span(), &field.type_argument))
.collect()
}
// Collect all enum variants
fn collect_enum_variants(decl: &TyEnumDecl) -> Vec<DocumentSymbol> {
decl.variants
.iter()
.map(|variant| {
let range = get_range_from_span(&variant.name.span());
// Check for the presence of a CallPathTree, and if it exists, use the type information as the detail.
let detail = variant
.type_argument
.call_path_tree
.as_ref()
.map(|_| Some(variant.type_argument.span().as_str().to_string()))
.unwrap_or(None);
DocumentSymbolBuilder::new()
.name(variant.name.span().str().to_string())
.kind(lsp_types::SymbolKind::ENUM_MEMBER)
.range(range)
.selection_range(range)
.detail(detail)
.build()
})
.collect()
}
// Collect all variables declared within the function body
fn collect_variables_from_func_decl(
engines: &Engines,
decl: &TyFunctionDecl,
) -> Vec<DocumentSymbol> {
decl.body
.contents
.iter()
.filter_map(|node| {
if let TyAstNodeContent::Declaration(TyDecl::VariableDecl(var_decl)) = &node.content {
let range = get_range_from_span(&var_decl.name.span());
let type_name = format!("{}", engines.help_out(var_decl.type_ascription.type_id));
let symbol = DocumentSymbolBuilder::new()
.name(var_decl.name.span().str().to_string())
.kind(lsp_types::SymbolKind::VARIABLE)
.range(range)
.selection_range(range)
.detail((!type_name.is_empty()).then_some(type_name))
.build();
Some(symbol)
} else {
None
}
})
.collect()
}
// Generate the signature for functions
fn fn_decl_detail(parameters: &[TyFunctionParameter], return_type: &GenericTypeArgument) -> String {
let params = parameters
.iter()
.map(|p| format!("{}: {}", p.name, p.type_argument.span().as_str()))
.collect::<Vec<_>>()
.join(", ");
// Check for the presence of a CallPathTree, and if it exists, add it to the return type.
let return_type = return_type
.call_path_tree
.as_ref()
.map(|_| format!(" -> {}", return_type.span().as_str()))
.unwrap_or_default();
format!("fn({params}){return_type}")
}
/// Extracts the header of a sway construct such as an `impl` block or `abi` declaration,
/// including any generic parameters, traits, or super traits, up to (but not including)
/// the opening `{` character. Trims any trailing whitespace.
///
/// If the `{` character is not found, the entire string is returned without trailing whitespace.
///
/// # Examples
///
/// ```ignore
/// let impl_example = "impl<T> Setter<T> for FooBarData<T> {\n fn set(self, new_value: T) -> Self {\n FooBarData {\n value: new_value,\n }\n }\n}";
/// let result = extract_header(impl_example);
/// assert_eq!(result, "impl<T> Setter<T> for FooBarData<T>");
///
/// let abi_example = "abi MyAbi : MySuperAbi {\n fn bar();\n}";
/// let result = extract_header(abi_example);
/// assert_eq!(result, "abi MyAbi : MySuperAbi");
/// ```
fn extract_header(s: &str) -> &str {
if let Some(pos) = s.find('{') {
s[..pos].trim_end()
} else {
s.trim_end()
}
}
/// Builder for creating [`DocumentSymbol`] instances with method chaining.
/// Initializes with empty name, NULL kind, and zero position ranges.
pub struct DocumentSymbolBuilder {
name: String,
detail: Option<String>,
kind: lsp_types::SymbolKind,
tags: Option<Vec<lsp_types::SymbolTag>>,
range: lsp_types::Range,
selection_range: lsp_types::Range,
children: Option<Vec<DocumentSymbol>>,
deprecated: Option<bool>,
}
impl Default for DocumentSymbolBuilder {
fn default() -> Self {
Self::new()
}
}
impl DocumentSymbolBuilder {
pub fn new() -> Self {
Self {
name: String::new(),
kind: lsp_types::SymbolKind::NULL,
range: lsp_types::Range::new(
lsp_types::Position::new(0, 0),
lsp_types::Position::new(0, 0),
),
selection_range: lsp_types::Range::new(
lsp_types::Position::new(0, 0),
lsp_types::Position::new(0, 0),
),
detail: None,
tags: None,
children: None,
deprecated: None,
}
}
pub fn name(mut self, name: impl Into<String>) -> Self {
self.name = name.into();
self
}
pub fn kind(mut self, kind: lsp_types::SymbolKind) -> Self {
self.kind = kind;
self
}
pub fn range(mut self, range: lsp_types::Range) -> Self {
self.range = range;
self
}
pub fn selection_range(mut self, range: lsp_types::Range) -> Self {
self.selection_range = range;
self
}
pub fn detail(mut self, detail: Option<String>) -> Self {
self.detail = detail;
self
}
pub fn tags(mut self, tags: Vec<lsp_types::SymbolTag>) -> Self {
self.tags = Some(tags);
self
}
pub fn children(mut self, children: Vec<DocumentSymbol>) -> Self {
self.children = Some(children);
self
}
pub fn build(self) -> DocumentSymbol {
#[allow(warnings)]
DocumentSymbol {
name: self.name,
detail: self.detail,
kind: self.kind,
tags: self.tags,
range: self.range,
selection_range: self.selection_range,
children: self.children,
deprecated: self.deprecated,
}
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-lsp/src/capabilities/highlight.rs | sway-lsp/src/capabilities/highlight.rs | use crate::core::{session, token_map::TokenMap};
use lsp_types::{DocumentHighlight, Position, Url};
use sway_core::Engines;
pub fn get_highlights(
engines: &Engines,
token_map: &TokenMap,
url: &Url,
position: Position,
) -> Option<Vec<DocumentHighlight>> {
let _p = tracing::trace_span!("get_highlights").entered();
session::token_ranges(engines, token_map, url, position).map(|ranges| {
ranges
.into_iter()
.map(|range| DocumentHighlight { range, kind: None })
.collect()
})
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-lsp/src/capabilities/code_lens.rs | sway-lsp/src/capabilities/code_lens.rs | use crate::server_state::RunnableMap;
use lsp_types::{CodeLens, Url};
use std::path::PathBuf;
pub fn code_lens(runnables: &RunnableMap, url: &Url) -> Vec<CodeLens> {
let _p = tracing::trace_span!("code_lens").entered();
let url_path = PathBuf::from(url.path());
// Construct code lenses for runnable functions
let runnables_for_path = runnables.get(&url_path);
let mut result: Vec<CodeLens> = runnables_for_path
.map(|runnables| {
runnables
.iter()
.map(|runnable| CodeLens {
range: *runnable.range(),
command: Some(runnable.command()),
data: None,
})
.collect()
})
.unwrap_or_default();
// Sort the results
result.sort_by(|a, b| a.range.start.line.cmp(&b.range.start.line));
result
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-lsp/src/capabilities/completion.rs | sway-lsp/src/capabilities/completion.rs | use crate::core::token::TokenIdent;
use lsp_types::{
CompletionItem, CompletionItemKind, CompletionItemLabelDetails, CompletionTextEdit, Position,
Range, TextEdit,
};
use sway_core::{
language::ty::{TyAstNodeContent, TyDecl, TyFunctionDecl, TyFunctionParameter},
Engines, Namespace, TypeId, TypeInfo,
};
use sway_types::Spanned;
pub(crate) fn to_completion_items(
namespace: &Namespace,
engines: &Engines,
ident_to_complete: &TokenIdent,
fn_decl: &TyFunctionDecl,
position: Position,
) -> Vec<CompletionItem> {
type_id_of_raw_ident(engines, namespace, &ident_to_complete.name, fn_decl)
.map(|type_id| completion_items_for_type_id(engines, namespace, type_id, position))
.unwrap_or_default()
}
/// Gathers the given [`TypeId`] struct's fields and methods and builds completion items.
fn completion_items_for_type_id(
engines: &Engines,
namespace: &Namespace,
type_id: TypeId,
position: Position,
) -> Vec<CompletionItem> {
let mut completion_items = vec![];
let type_info = engines.te().get(type_id);
if let TypeInfo::Struct(decl_id) = &*type_info {
let struct_decl = engines.de().get_struct(&decl_id.clone());
for field in &struct_decl.fields {
let item = CompletionItem {
kind: Some(CompletionItemKind::FIELD),
label: field.name.as_str().to_string(),
label_details: Some(CompletionItemLabelDetails {
description: Some(field.type_argument.span().clone().str()),
detail: None,
}),
..Default::default()
};
completion_items.push(item);
}
}
for method in namespace
.current_module()
.get_methods_for_type(engines, type_id)
{
let method = method.expect_typed();
let fn_decl = engines.de().get_function(&method.id().clone());
let params = &fn_decl.parameters;
// Only show methods that take `self` as the first parameter.
if params.first().is_some_and(TyFunctionParameter::is_self) {
let params_short = if params.is_empty() {
"()".to_string()
} else {
"(…)".to_string()
};
let params_edit_str = params
.iter()
.filter_map(|p| {
if p.is_self() {
return None;
}
Some(p.name.as_str())
})
.collect::<Vec<&str>>()
.join(", ");
let item = CompletionItem {
kind: Some(CompletionItemKind::METHOD),
label: format!("{}{}", method.name().clone().as_str(), params_short),
text_edit: Some(CompletionTextEdit::Edit(TextEdit {
range: Range {
start: position,
end: position,
},
new_text: format!("{}({})", method.name().clone().as_str(), params_edit_str),
})),
label_details: Some(CompletionItemLabelDetails {
description: Some(fn_signature_string(engines, &fn_decl, &type_id)),
detail: None,
}),
..Default::default()
};
completion_items.push(item);
}
}
completion_items
}
/// Returns the [String] of the shortened function signature to display in the completion item's label details.
fn fn_signature_string(
engines: &Engines,
fn_decl: &TyFunctionDecl,
parent_type_id: &TypeId,
) -> String {
let params_str = fn_decl
.parameters
.iter()
.map(|p| {
replace_self_with_type_str(
engines,
p.type_argument.clone().span().str(),
parent_type_id,
)
})
.collect::<Vec<String>>()
.join(", ");
format!(
"fn({}) -> {}",
params_str,
replace_self_with_type_str(
engines,
fn_decl.return_type.clone().span().str(),
parent_type_id
)
)
}
/// Given a [String] representing a type, replaces `Self` with the display name of the type.
fn replace_self_with_type_str(
engines: &Engines,
type_str: String,
parent_type_id: &TypeId,
) -> String {
if type_str == "Self" {
return engines.help_out(parent_type_id).to_string();
}
type_str
}
/// Returns the [TypeId] of an ident that may include field accesses and may be incomplete.
/// For the first part of the ident, it looks for instantiation in the scope of the given
/// [`TyFunctionDecl`]. For example, given `a.b.c`, it will return the type ID of `c`
/// if it can resolve `a` in the given function.
fn type_id_of_raw_ident(
engines: &Engines,
namespace: &Namespace,
ident_name: &str,
fn_decl: &TyFunctionDecl,
) -> Option<TypeId> {
// If this ident has no field accesses or chained methods, look for it in the local function scope.
if !ident_name.contains('.') {
return type_id_of_local_ident(ident_name, fn_decl);
}
// Otherwise, start with the first part of the ident and follow the subsequent types.
let parts = ident_name.split('.').collect::<Vec<&str>>();
let mut curr_type_id = type_id_of_local_ident(parts[0], fn_decl);
let mut i = 1;
while (i < parts.len()) && curr_type_id.is_some() {
if parts[i].ends_with(')') {
let method_name = parts[i].split_at(parts[i].find('(').unwrap_or(0)).0;
curr_type_id = namespace
.current_module()
.get_methods_for_type(engines, curr_type_id?)
.into_iter()
.find_map(|method| {
let method = method.expect_typed();
if method.name().clone().as_str() == method_name {
return Some(
engines
.de()
.get_function(&method.id().clone())
.return_type
.type_id,
);
}
None
});
} else if let TypeInfo::Struct(decl_id) = &*engines.te().get(curr_type_id.unwrap()) {
let struct_decl = engines.de().get_struct(&decl_id.clone());
curr_type_id = struct_decl
.fields
.iter()
.find(|field| field.name.as_str() == parts[i])
.map(|field| field.type_argument.type_id);
}
i += 1;
}
curr_type_id
}
/// Returns the [TypeId] of an ident by looking for its instantiation within the scope of the
/// given [TyFunctionDecl].
fn type_id_of_local_ident(ident_name: &str, fn_decl: &TyFunctionDecl) -> Option<TypeId> {
fn_decl
.parameters
.iter()
.find_map(|param| {
// Check if this ident is a function parameter
if param.name.as_str() == ident_name {
return Some(param.type_argument.type_id);
}
None
})
.or_else(|| {
// Check if there is a variable declaration for this ident
fn_decl.body.contents.iter().find_map(|node| {
if let TyAstNodeContent::Declaration(TyDecl::VariableDecl(variable_decl)) =
node.content.clone()
{
if variable_decl.name.as_str() == ident_name {
return Some(variable_decl.return_type);
}
}
None
})
})
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-lsp/src/capabilities/rename.rs | sway-lsp/src/capabilities/rename.rs | use crate::{
core::{
sync::SyncWorkspace,
token::{SymbolKind, Token, TokenIdent, TypedAstToken},
token_map::{TokenMap, TokenMapExt},
},
error::{LanguageServerError, RenameError},
utils::document::get_url_from_path,
};
use lsp_types::{Position, PrepareRenameResponse, TextEdit, Url, WorkspaceEdit};
use std::collections::HashMap;
use sway_core::{language::ty, Engines};
use sway_types::SourceEngine;
const RAW_IDENTIFIER: &str = "r#";
pub fn rename(
engines: &Engines,
token_map: &TokenMap,
new_name: String,
url: &Url,
position: Position,
sync: &SyncWorkspace,
) -> Result<WorkspaceEdit, LanguageServerError> {
let _p = tracing::trace_span!("rename").entered();
// Make sure the new name is not a keyword or a literal int type
if sway_parse::RESERVED_KEYWORDS.contains(&new_name)
|| sway_parse::parse_int_suffix(&new_name).is_some()
{
return Err(LanguageServerError::RenameError(RenameError::InvalidName {
name: new_name,
}));
}
// Identifiers cannot begin with a double underscore, this is reserved for compiler intrinsics.
if new_name.starts_with("__") {
return Err(LanguageServerError::RenameError(
RenameError::InvalidDoubleUnderscore,
));
}
// Get the token at the current cursor position
let t = token_map
.token_at_position(url, position)
.ok_or(RenameError::TokenNotFound)?;
let token = t.value();
// We don't currently allow renaming of module names.
if token.kind == SymbolKind::Module {
return Err(LanguageServerError::RenameError(
RenameError::UnableToRenameModule { path: new_name },
));
}
// If the token is a function, find the parent declaration
// and collect idents for all methods of ABI Decl, Trait Decl, and Impl Trait
let map_of_changes: HashMap<Url, Vec<TextEdit>> = (if token.kind == SymbolKind::Function {
find_all_methods_for_decl(token_map, engines, url, position)?
} else {
// otherwise, just find all references of the token in the token map
token_map
.iter()
.all_references_of_token(token, engines)
.map(|item| item.key().clone())
.collect::<Vec<TokenIdent>>()
})
.into_iter()
.filter_map(|ident| {
if ident.name == "self" {
return None;
}
let mut range = ident.range;
if ident.is_raw_ident() {
// Make sure the start char starts at the beginning,
// taking the r# tokens into account.
range.start.character -= RAW_IDENTIFIER.len() as u32;
}
if let Some(path) = &ident.path {
let url = get_url_from_path(path).ok()?;
if let Some(url) = sync.to_workspace_url(url) {
let edit = TextEdit::new(range, new_name.clone());
return Some((url, vec![edit]));
};
}
None
})
.fold(HashMap::new(), |mut map, (k, mut v)| {
map.entry(k)
.and_modify(|existing| {
existing.append(&mut v);
// Sort the TextEdits by their range in reverse order so the client applies edits
// from the end of the document to the beginning, preventing issues with offset changes.
existing.sort_unstable_by(|a, b| b.range.start.cmp(&a.range.start));
})
.or_insert(v);
map
});
Ok(WorkspaceEdit::new(map_of_changes))
}
pub fn prepare_rename(
engines: &Engines,
token_map: &TokenMap,
url: &Url,
position: Position,
sync: &SyncWorkspace,
) -> Result<PrepareRenameResponse, LanguageServerError> {
let t = token_map
.token_at_position(url, position)
.ok_or(RenameError::TokenNotFound)?;
let (ident, token) = t.pair();
// Only let through tokens that are in the users workspace.
// tokens that are external to the users workspace cannot be renamed.
let _ = is_token_in_workspace(engines, token, sync)?;
// Make sure we don't allow renaming of tokens that
// are keywords or intrinsics.
if matches!(
token.kind,
SymbolKind::Keyword
| SymbolKind::SelfKeyword
| SymbolKind::SelfTypeKeyword
| SymbolKind::ProgramTypeKeyword
| SymbolKind::BoolLiteral
| SymbolKind::Intrinsic
) {
return Err(LanguageServerError::RenameError(
RenameError::SymbolKindNotAllowed,
));
}
Ok(PrepareRenameResponse::RangeWithPlaceholder {
range: ident.range,
placeholder: formatted_name(ident),
})
}
/// Returns the name of the identifier, prefixed with r# if the identifier is raw.
fn formatted_name(ident: &TokenIdent) -> String {
let name = ident.name.to_string();
// Prefix r# onto the name if the ident is raw.
if ident.is_raw_ident() {
return format!("{RAW_IDENTIFIER}{name}");
}
name
}
/// Checks if the token is in the users workspace.
fn is_token_in_workspace(
engines: &Engines,
token: &Token,
sync: &SyncWorkspace,
) -> Result<bool, LanguageServerError> {
let decl_ident = token
.declared_token_ident(engines)
.ok_or(RenameError::TokenNotFound)?;
// Check the span of the tokens definitions to determine if it's in the users workspace.
let temp_path = &sync.temp_dir()?;
if let Some(path) = &decl_ident.path {
if !path.starts_with(temp_path) {
return Err(LanguageServerError::RenameError(
RenameError::TokenNotPartOfWorkspace,
));
}
}
Ok(true)
}
/// Returns a `Vec<Ident>` containing the identifiers of all trait functions found.
fn trait_interface_idents<'a>(
interface_surface: &'a [ty::TyTraitInterfaceItem],
se: &'a SourceEngine,
) -> Vec<TokenIdent> {
interface_surface
.iter()
.filter_map(|item| match item {
ty::TyTraitInterfaceItem::TraitFn(fn_decl) => Some(TokenIdent::new(fn_decl.name(), se)),
_ => None,
})
.collect()
}
/// Returns the `Ident`s of all methods found for an `AbiDecl`, `TraitDecl`, or `ImplTrait`.
fn find_all_methods_for_decl<'a>(
token_map: &'a TokenMap,
engines: &'a Engines,
url: &'a Url,
position: Position,
) -> Result<Vec<TokenIdent>, LanguageServerError> {
// Find the parent declaration
let t = token_map
.parent_decl_at_position(engines, url, position)
.ok_or(RenameError::TokenNotFound)?;
let decl_token = t.value();
let idents = token_map
.iter()
.all_references_of_token(decl_token, engines)
.filter_map(|item| {
let token = item.value();
token.as_typed().as_ref().and_then(|typed| match typed {
TypedAstToken::TypedDeclaration(decl) => match decl {
ty::TyDecl::AbiDecl(ty::AbiDecl { decl_id, .. }) => {
let abi_decl = engines.de().get_abi(decl_id);
Some(trait_interface_idents(
&abi_decl.interface_surface,
engines.se(),
))
}
ty::TyDecl::TraitDecl(ty::TraitDecl { decl_id, .. }) => {
let trait_decl = engines.de().get_trait(decl_id);
Some(trait_interface_idents(
&trait_decl.interface_surface,
engines.se(),
))
}
ty::TyDecl::ImplSelfOrTrait(ty::ImplSelfOrTrait { decl_id, .. }) => {
let impl_trait = engines.de().get_impl_self_or_trait(decl_id);
Some(
impl_trait
.items
.iter()
.filter_map(|item| match item {
ty::TyTraitItem::Fn(fn_decl) => {
Some(TokenIdent::new(fn_decl.name(), engines.se()))
}
_ => None,
})
.collect::<Vec<TokenIdent>>(),
)
}
_ => None,
},
_ => None,
})
})
.flatten()
.collect();
Ok(idents)
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-lsp/src/capabilities/semantic_tokens.rs | sway-lsp/src/capabilities/semantic_tokens.rs | use crate::core::{
token::{SymbolKind, Token, TokenIdent},
token_map::TokenMap,
};
use dashmap::mapref::multiple::RefMulti;
use lsp_types::{
Range, SemanticToken, SemanticTokenModifier, SemanticTokenType, SemanticTokens,
SemanticTokensRangeResult, SemanticTokensResult, Url,
};
use std::sync::atomic::{AtomicU32, Ordering};
// https://github.com/microsoft/vscode-extension-samples/blob/5ae1f7787122812dcc84e37427ca90af5ee09f14/semantic-tokens-sample/vscode.proposed.d.ts#L71
/// Get the semantic tokens for the entire file.
pub fn semantic_tokens_full(token_map: &TokenMap, url: &Url) -> Option<SemanticTokensResult> {
let tokens: Vec<_> = token_map.tokens_for_file(url).collect();
let sorted_tokens_refs = sort_tokens(&tokens);
Some(semantic_tokens(&sorted_tokens_refs[..]).into())
}
/// Get the semantic tokens within a range.
pub fn semantic_tokens_range(
token_map: &TokenMap,
url: &Url,
range: &Range,
) -> Option<SemanticTokensRangeResult> {
let _p = tracing::trace_span!("semantic_tokens_range").entered();
let tokens: Vec<_> = token_map
.tokens_for_file(url)
.filter(|item| {
// make sure the token_ident range is within the range that was passed in
let token_range = item.key().range;
token_range.start >= range.start && token_range.end <= range.end
})
.collect();
let sorted_tokens_refs = sort_tokens(&tokens);
Some(semantic_tokens(&sorted_tokens_refs[..]).into())
}
pub fn semantic_tokens(tokens_sorted: &[&RefMulti<TokenIdent, Token>]) -> SemanticTokens {
static TOKEN_RESULT_COUNTER: AtomicU32 = AtomicU32::new(1);
let id = TOKEN_RESULT_COUNTER
.fetch_add(1, Ordering::SeqCst)
.to_string();
let mut builder = SemanticTokensBuilder::new(id);
for entry in tokens_sorted {
let (ident, token) = entry.pair();
let ty = semantic_token_type(&token.kind);
if let Some(token_index) = type_index(&ty) {
// TODO - improve with modifiers
let modifier_bitset = 0;
builder.push(ident.range, token_index, modifier_bitset);
} else {
tracing::error!("Unsupported token type: {:?} for token: {:#?}", ty, token);
}
}
builder.build()
}
/// Sort tokens by their span so each token is sequential.
///
/// If this step isn't done, then the bit offsets used for the `lsp_types::SemanticToken` are incorrect.
fn sort_tokens<'a>(
tokens: &'a [RefMulti<'a, TokenIdent, Token>],
) -> Vec<&'a RefMulti<'a, TokenIdent, Token>> {
let mut refs: Vec<_> = tokens.iter().collect();
// Sort the vector of references based on the spans of the tokens
refs.sort_by(|a, b| {
let a_span = a.key().range;
let b_span = b.key().range;
(a_span.start, a_span.end).cmp(&(b_span.start, b_span.end))
});
refs
}
//-------------------------------
/// Tokens are encoded relative to each other.
///
/// This is taken from rust-analyzer which is also a direct port of <https://github.com/microsoft/vscode-languageserver-node/blob/f425af9de46a0187adb78ec8a46b9b2ce80c5412/server/src/sematicTokens.proposed.ts#L45>
struct SemanticTokensBuilder {
id: String,
prev_line: u32,
prev_char: u32,
data: Vec<SemanticToken>,
}
impl SemanticTokensBuilder {
pub fn new(id: String) -> Self {
SemanticTokensBuilder {
id,
prev_line: 0,
prev_char: 0,
data: Vec::default(),
}
}
/// Push a new token onto the builder
pub fn push(&mut self, range: Range, token_index: u32, modifier_bitset: u32) {
let mut push_line = range.start.line;
let mut push_char = range.start.character;
if !self.data.is_empty() {
push_line -= self.prev_line;
if push_line == 0 {
push_char -= self.prev_char;
}
}
// A token cannot be multiline
let token_len = range.end.character - range.start.character;
let token = SemanticToken {
delta_line: push_line,
delta_start: push_char,
length: token_len,
token_type: token_index,
token_modifiers_bitset: modifier_bitset,
};
self.data.push(token);
self.prev_line = range.start.line;
self.prev_char = range.start.character;
}
pub fn build(self) -> SemanticTokens {
SemanticTokens {
result_id: Some(self.id),
data: self.data,
}
}
}
pub(crate) const SUPPORTED_TYPES: &[SemanticTokenType] = &[
SemanticTokenType::STRING,
SemanticTokenType::NUMBER,
SemanticTokenType::NAMESPACE,
SemanticTokenType::STRUCT,
SemanticTokenType::CLASS,
SemanticTokenType::INTERFACE,
SemanticTokenType::ENUM,
SemanticTokenType::ENUM_MEMBER,
SemanticTokenType::TYPE_PARAMETER,
SemanticTokenType::FUNCTION,
SemanticTokenType::METHOD,
SemanticTokenType::PROPERTY,
SemanticTokenType::VARIABLE,
SemanticTokenType::PARAMETER,
SemanticTokenType::new("generic"),
SemanticTokenType::new("boolean"),
SemanticTokenType::new("keyword"),
SemanticTokenType::new("builtinType"),
SemanticTokenType::new("deriveHelper"),
SemanticTokenType::new("selfKeyword"),
SemanticTokenType::new("selfTypeKeyword"),
SemanticTokenType::new("typeAlias"),
SemanticTokenType::new("traitType"),
];
pub(crate) const SUPPORTED_MODIFIERS: &[SemanticTokenModifier] = &[
// declaration of symbols
SemanticTokenModifier::DECLARATION,
// definition of symbols as in header files
SemanticTokenModifier::DEFINITION,
SemanticTokenModifier::READONLY,
SemanticTokenModifier::STATIC,
// for variable references where the variable is assigned to
SemanticTokenModifier::MODIFICATION,
SemanticTokenModifier::DOCUMENTATION,
// for symbols that are part of stdlib
SemanticTokenModifier::DEFAULT_LIBRARY,
];
/// Get the semantic token type from the symbol kind.
fn semantic_token_type(kind: &SymbolKind) -> SemanticTokenType {
match kind {
SymbolKind::Field => SemanticTokenType::PROPERTY,
SymbolKind::ValueParam => SemanticTokenType::PARAMETER,
SymbolKind::Variable | SymbolKind::Const => SemanticTokenType::VARIABLE,
SymbolKind::Function | SymbolKind::Intrinsic => SemanticTokenType::FUNCTION,
SymbolKind::Struct => SemanticTokenType::STRUCT,
SymbolKind::Enum => SemanticTokenType::ENUM,
SymbolKind::Variant => SemanticTokenType::ENUM_MEMBER,
SymbolKind::Trait => SemanticTokenType::INTERFACE,
SymbolKind::TypeParameter => SemanticTokenType::TYPE_PARAMETER,
SymbolKind::Module => SemanticTokenType::NAMESPACE,
SymbolKind::StringLiteral => SemanticTokenType::STRING,
SymbolKind::ByteLiteral | SymbolKind::NumericLiteral => SemanticTokenType::NUMBER,
SymbolKind::BoolLiteral => SemanticTokenType::new("boolean"),
SymbolKind::TypeAlias => SemanticTokenType::new("typeAlias"),
SymbolKind::TraitType => SemanticTokenType::new("traitType"),
SymbolKind::Keyword | SymbolKind::ProgramTypeKeyword => SemanticTokenType::new("keyword"),
SymbolKind::Unknown => SemanticTokenType::new("generic"),
SymbolKind::BuiltinType => SemanticTokenType::new("builtinType"),
SymbolKind::DeriveHelper => SemanticTokenType::new("deriveHelper"),
SymbolKind::SelfKeyword => SemanticTokenType::new("selfKeyword"),
SymbolKind::SelfTypeKeyword => SemanticTokenType::new("selfTypeKeyword"),
}
}
fn type_index(ty: &SemanticTokenType) -> Option<u32> {
SUPPORTED_TYPES
.iter()
.position(|it| it == ty)
.map(|x| x as u32)
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-lsp/src/capabilities/inlay_hints.rs | sway-lsp/src/capabilities/inlay_hints.rs | use crate::{
config::InlayHintsConfig,
core::{
token::{get_range_from_span, TypedAstToken},
token_map::TokenMap,
},
};
use lsp_types::{self, Range, Url};
use sway_core::{
language::ty::{TyDecl, TyExpression, TyExpressionVariant},
type_system::TypeInfo,
Engines,
};
use sway_types::{Ident, Spanned};
#[derive(Clone, Debug, PartialEq, Eq)]
pub enum InlayKind {
TypeHint,
Parameter,
}
#[derive(Debug)]
pub struct InlayHint {
pub range: Range,
pub kind: InlayKind,
pub label: String,
}
/// Generates inlay hints for the provided range.
pub fn inlay_hints(
engines: &Engines,
token_map: &TokenMap,
uri: &Url,
range: &Range,
config: &InlayHintsConfig,
) -> Option<Vec<lsp_types::InlayHint>> {
let _span = tracing::trace_span!("inlay_hints").entered();
if !config.type_hints {
return None;
}
// 1. Iterate through all tokens in the file
// 2. Filter for TypedVariableDeclaration tokens within the provided range
// 3. For each variable declaration:
// a. If it's a function application, generate parameter hints
// b. If it doesn't have a type ascription and its type is known:
// - Look up the type information
// - Generate a type hint
// 4. Collect all generated hints into a single vector
let hints: Vec<lsp_types::InlayHint> = token_map
.tokens_for_file(uri)
.filter_map(|item| {
let token = item.value();
token.as_typed().as_ref().and_then(|t| match t {
TypedAstToken::TypedDeclaration(TyDecl::VariableDecl(var_decl)) => {
let var_range = get_range_from_span(&var_decl.name.span());
if var_range.start >= range.start && var_range.end <= range.end {
Some(var_decl.clone())
} else {
None
}
}
_ => None,
})
})
.flat_map(|var| {
let mut hints = Vec::new();
// Function parameter hints
if let TyExpressionVariant::FunctionApplication { arguments, .. } = &var.body.expression
{
hints.extend(handle_function_parameters(arguments, config));
}
// Variable declaration hints
if var.type_ascription.call_path_tree.is_none() {
let type_info = engines.te().get(var.type_ascription.type_id);
if !matches!(
*type_info,
TypeInfo::Unknown | TypeInfo::UnknownGeneric { .. }
) {
let range = get_range_from_span(&var.name.span());
let kind = InlayKind::TypeHint;
let label = format!("{}", engines.help_out(var.type_ascription));
let inlay_hint = InlayHint { range, kind, label };
hints.push(self::inlay_hint(config, inlay_hint));
}
}
hints
})
.collect();
Some(hints)
}
fn handle_function_parameters(
arguments: &[(Ident, TyExpression)],
config: &InlayHintsConfig,
) -> Vec<lsp_types::InlayHint> {
arguments
.iter()
.flat_map(|(name, exp)| {
let mut hints = Vec::new();
let (should_create_hint, span) = match &exp.expression {
TyExpressionVariant::Literal(_)
| TyExpressionVariant::ConstantExpression { .. }
| TyExpressionVariant::Tuple { .. }
| TyExpressionVariant::ArrayExplicit { .. }
| TyExpressionVariant::ArrayIndex { .. }
| TyExpressionVariant::FunctionApplication { .. }
| TyExpressionVariant::StructFieldAccess { .. }
| TyExpressionVariant::TupleElemAccess { .. } => (true, &exp.span),
TyExpressionVariant::EnumInstantiation {
call_path_binding, ..
} => (true, &call_path_binding.span),
_ => (false, &exp.span),
};
if should_create_hint {
let range = get_range_from_span(span);
let kind = InlayKind::Parameter;
let label = name.as_str().to_string();
let inlay_hint = InlayHint { range, kind, label };
hints.push(self::inlay_hint(config, inlay_hint));
}
// Handle nested function applications
if let TyExpressionVariant::FunctionApplication {
arguments: nested_args,
..
} = &exp.expression
{
hints.extend(handle_function_parameters(nested_args, config));
}
hints
})
.collect::<Vec<_>>()
}
fn inlay_hint(config: &InlayHintsConfig, inlay_hint: InlayHint) -> lsp_types::InlayHint {
let truncate_label = |label: String| -> String {
if let Some(max_length) = config.max_length {
if label.len() > max_length {
format!("{}...", &label[..max_length.saturating_sub(3)])
} else {
label
}
} else {
label
}
};
let label = match inlay_hint.kind {
InlayKind::TypeHint if config.render_colons => format!(": {}", inlay_hint.label),
InlayKind::Parameter if config.render_colons => format!("{}: ", inlay_hint.label),
_ => inlay_hint.label,
};
lsp_types::InlayHint {
position: match inlay_hint.kind {
// after annotated thing
InlayKind::TypeHint => inlay_hint.range.end,
InlayKind::Parameter => inlay_hint.range.start,
},
label: lsp_types::InlayHintLabel::String(truncate_label(label)),
kind: match inlay_hint.kind {
InlayKind::TypeHint => Some(lsp_types::InlayHintKind::TYPE),
InlayKind::Parameter => Some(lsp_types::InlayHintKind::PARAMETER),
},
tooltip: None,
padding_left: Some(match inlay_hint.kind {
InlayKind::TypeHint => !config.render_colons,
InlayKind::Parameter => false,
}),
padding_right: Some(match inlay_hint.kind {
InlayKind::TypeHint => false,
InlayKind::Parameter => !config.render_colons,
}),
text_edits: None,
data: None,
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-lsp/src/capabilities/on_enter.rs | sway-lsp/src/capabilities/on_enter.rs | use crate::{
config::OnEnterConfig,
core::document::{Documents, TextDocument},
lsp_ext::OnEnterParams,
};
use tower_lsp::lsp_types::{
DocumentChanges, OneOf, OptionalVersionedTextDocumentIdentifier, Position, Range,
TextDocumentEdit, TextEdit, Url, WorkspaceEdit,
};
const NEWLINE: &str = "\n";
const COMMENT_START: &str = "//";
const DOC_COMMENT_START: &str = "///";
/// If the change was an enter keypress or pasting multiple lines in a comment, it prefixes the line(s)
/// with the appropriate comment start pattern (// or ///).
pub fn on_enter(
config: &OnEnterConfig,
documents: &Documents,
temp_uri: &Url,
params: &OnEnterParams,
) -> Option<WorkspaceEdit> {
if !(params.content_changes[0].text.contains(NEWLINE)) {
return None;
}
let mut workspace_edit = None;
let text_document = documents
.get_text_document(temp_uri)
.expect("could not get text document");
if config.continue_doc_comments.unwrap_or(false) {
workspace_edit = get_comment_workspace_edit(DOC_COMMENT_START, params, &text_document);
}
if config.continue_comments.unwrap_or(false) && workspace_edit.is_none() {
workspace_edit = get_comment_workspace_edit(COMMENT_START, params, &text_document);
}
workspace_edit
}
fn get_comment_workspace_edit(
start_pattern: &str,
change_params: &OnEnterParams,
text_document: &TextDocument,
) -> Option<WorkspaceEdit> {
let range = change_params.content_changes[0]
.range
.expect("change is missing range");
let line = text_document.get_line(range.start.line as usize);
// If the previous line doesn't start with a comment, return early.
if !line.trim().starts_with(start_pattern) {
return None;
}
let uri = change_params.text_document.uri.clone();
let text = change_params.content_changes[0].text.clone();
let indentation = &line[..line.find(start_pattern).unwrap_or(0)];
let mut edits = vec![];
// To support pasting multiple lines in a comment, we need to add the comment start pattern after each newline,
// except the last one.
let lines: Vec<_> = text.split(NEWLINE).collect();
lines.iter().enumerate().for_each(|(i, _)| {
if i < lines.len() - 1 {
let position =
Position::new(range.start.line + (i as u32) + 1, indentation.len() as u32);
edits.push(OneOf::Left(TextEdit {
new_text: format!("{start_pattern} "),
range: Range::new(position, position),
}));
}
});
let edit = TextDocumentEdit {
text_document: OptionalVersionedTextDocumentIdentifier {
// Use the original uri to make updates, not the temporary one from the session.
uri,
version: None,
},
edits,
};
Some(WorkspaceEdit {
document_changes: Some(DocumentChanges::Edits(vec![edit])),
..Default::default()
})
}
#[cfg(test)]
mod tests {
use super::*;
use lsp_types::{AnnotatedTextEdit, TextDocumentContentChangeEvent, TextDocumentIdentifier};
use sway_lsp_test_utils::get_absolute_path;
fn assert_text_edit(
actual: &OneOf<TextEdit, AnnotatedTextEdit>,
new_text: String,
line: u32,
character: u32,
) {
match actual {
OneOf::Left(edit) => {
let position = Position { line, character };
let expected = TextEdit {
new_text,
range: Range {
start: position,
end: position,
},
};
assert_eq!(*edit, expected);
}
OneOf::Right(_) => panic!("expected left"),
}
}
#[tokio::test]
async fn get_comment_workspace_edit_double_slash_indented() {
let path = get_absolute_path("sway-lsp/tests/fixtures/diagnostics/dead_code/src/main.sw");
let uri = Url::from_file_path(path.clone()).unwrap();
let text_document = TextDocument::build_from_path(path.as_str())
.await
.expect("failed to build document");
let params = OnEnterParams {
text_document: TextDocumentIdentifier { uri },
content_changes: vec![TextDocumentContentChangeEvent {
range: Some(Range {
start: Position {
line: 47,
character: 34,
},
end: Position {
line: 47,
character: 34,
},
}),
range_length: Some(0),
text: "\n ".to_string(),
}],
};
let result = get_comment_workspace_edit(COMMENT_START, ¶ms, &text_document)
.expect("workspace edit");
let changes = result.document_changes.expect("document changes");
let edits = match changes {
DocumentChanges::Edits(edits) => edits,
DocumentChanges::Operations(_) => panic!("expected edits"),
};
assert_eq!(edits.len(), 1);
assert_eq!(edits[0].edits.len(), 1);
assert_text_edit(&edits[0].edits[0], "// ".to_string(), 48, 4);
}
#[tokio::test]
async fn get_comment_workspace_edit_triple_slash_paste() {
let path = get_absolute_path("sway-lsp/tests/fixtures/diagnostics/dead_code/src/main.sw");
let uri = Url::from_file_path(path.clone()).unwrap();
let text_document = TextDocument::build_from_path(path.as_str())
.await
.expect("failed to build document");
let params = OnEnterParams {
text_document: TextDocumentIdentifier { uri },
content_changes: vec![TextDocumentContentChangeEvent {
range: Some(Range {
start: Position {
line: 41,
character: 4,
},
end: Position {
line: 41,
character: 34,
},
}),
range_length: Some(30),
text: "fn not_used2(input: u64) -> u64 {\n return input + 1;\n}".to_string(),
}],
};
let result = get_comment_workspace_edit(DOC_COMMENT_START, ¶ms, &text_document)
.expect("workspace edit");
let changes = result.document_changes.expect("document changes");
let edits = match changes {
DocumentChanges::Edits(edits) => edits,
DocumentChanges::Operations(_) => panic!("expected edits"),
};
assert_eq!(edits.len(), 1);
assert_eq!(edits[0].edits.len(), 2);
assert_text_edit(&edits[0].edits[0], "/// ".to_string(), 42, 0);
assert_text_edit(&edits[0].edits[1], "/// ".to_string(), 43, 0);
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-lsp/src/capabilities/mod.rs | sway-lsp/src/capabilities/mod.rs | pub mod code_actions;
pub mod code_lens;
pub mod completion;
pub mod diagnostic;
pub mod document_symbol;
pub mod formatting;
pub mod highlight;
pub mod hover;
pub mod inlay_hints;
pub mod on_enter;
pub mod rename;
pub mod runnable;
pub mod semantic_tokens;
pub(crate) use code_actions::code_actions;
pub(crate) use on_enter::on_enter;
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-lsp/src/capabilities/runnable.rs | sway-lsp/src/capabilities/runnable.rs | use lsp_types::{Command, Range};
use serde_json::{json, Value};
use sway_core::language::parsed::TreeType;
#[derive(Debug, Eq, PartialEq, Clone)]
pub struct RunnableMainFn {
/// The location in the file where the runnable button should be displayed
pub range: Range,
/// The program kind of the current file
pub tree_type: TreeType,
}
#[derive(Debug, Eq, PartialEq, Clone)]
pub struct RunnableTestFn {
/// The location in the file where the runnable button should be displayed
pub range: Range,
/// Additional arguments to use with the runnable command.
pub test_name: Option<String>,
}
/// A runnable is a sway function that can be executed in the editor.
pub trait Runnable: core::fmt::Debug + Send + Sync + 'static {
/// The command to execute.
fn command(&self) -> Command {
Command {
command: self.cmd_string(),
title: self.label_string(),
arguments: self.arguments(),
}
}
/// The command name defined in the client.
fn cmd_string(&self) -> String;
/// The label to display in the editor.
fn label_string(&self) -> String;
/// The arguments to pass to the command.
fn arguments(&self) -> Option<Vec<Value>>;
/// The range in the file where the runnable button should be displayed.
fn range(&self) -> &Range;
}
impl Runnable for RunnableMainFn {
fn cmd_string(&self) -> String {
"sway.runScript".to_string()
}
fn label_string(&self) -> String {
"▶\u{fe0e} Run".to_string()
}
fn arguments(&self) -> Option<Vec<Value>> {
None
}
fn range(&self) -> &Range {
&self.range
}
}
impl Runnable for RunnableTestFn {
fn cmd_string(&self) -> String {
"sway.runTests".to_string()
}
fn label_string(&self) -> String {
"▶\u{fe0e} Run Test".to_string()
}
fn arguments(&self) -> Option<Vec<Value>> {
self.test_name
.as_ref()
.map(|test_name| vec![json!({ "name": test_name })])
}
fn range(&self) -> &Range {
&self.range
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-lsp/src/capabilities/formatting.rs | sway-lsp/src/capabilities/formatting.rs | use crate::{
core::document::Documents,
error::{DocumentError, LanguageServerError},
};
use lsp_types::{Position, Range, TextEdit, Url};
use sway_types::span::Source;
use swayfmt::Formatter;
pub fn format_text(documents: &Documents, url: &Url) -> Result<Vec<TextEdit>, LanguageServerError> {
let _p = tracing::trace_span!("format_text").entered();
let document = documents.try_get(url.path()).try_unwrap().ok_or_else(|| {
DocumentError::DocumentNotFound {
path: url.path().to_string(),
}
})?;
get_page_text_edit(document.get_text().into(), &mut <_>::default())
.map(|page_text_edit| vec![page_text_edit])
}
pub fn get_page_text_edit(
src: Source,
formatter: &mut Formatter,
) -> Result<TextEdit, LanguageServerError> {
// we only format if code is correct
let formatted_code = formatter
.format(src.clone())
.map_err(LanguageServerError::FormatError)?;
let text_lines_count = src.text.split('\n').count();
let num_of_lines = formatted_code.split('\n').count();
let line_end = std::cmp::max(num_of_lines, text_lines_count) as u32;
Ok(TextEdit {
range: Range::new(Position::new(0, 0), Position::new(line_end, 0)),
new_text: formatted_code,
})
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-lsp/src/capabilities/diagnostic.rs | sway-lsp/src/capabilities/diagnostic.rs | use std::collections::HashMap;
use std::path::PathBuf;
use lsp_types::{Diagnostic, DiagnosticSeverity, DiagnosticTag, Position, Range};
use serde::{Deserialize, Serialize};
use sway_error::warning::{CompileInfo, CompileWarning, Info};
use sway_error::{error::CompileError, warning::Warning};
use sway_types::{LineCol, LineColRange, SourceEngine, Spanned};
pub(crate) type DiagnosticMap = HashMap<PathBuf, Diagnostics>;
#[derive(Debug, Default, Clone)]
pub struct Diagnostics {
pub infos: Vec<Diagnostic>,
pub warnings: Vec<Diagnostic>,
pub errors: Vec<Diagnostic>,
}
fn get_error_diagnostic(error: &CompileError) -> Diagnostic {
let data = serde_json::to_value(DiagnosticData::try_from(error.clone()).ok()).ok();
Diagnostic {
range: get_range(error.span().line_col_one_index()),
severity: Some(DiagnosticSeverity::ERROR),
message: format!("{error}"),
data,
..Default::default()
}
}
fn get_warning_diagnostic(warning: &CompileWarning) -> Diagnostic {
Diagnostic {
range: get_range(warning.span().line_col_one_index()),
severity: Some(DiagnosticSeverity::WARNING),
message: warning.to_friendly_warning_string(),
tags: get_warning_diagnostic_tags(&warning.warning_content),
..Default::default()
}
}
fn get_info_diagnostic(info: &CompileInfo) -> Diagnostic {
Diagnostic {
range: get_range(info.span().line_col_one_index()),
severity: Some(DiagnosticSeverity::INFORMATION),
message: info.to_friendly_string(),
tags: get_info_diagnostic_tags(&info.content),
..Default::default()
}
}
pub fn get_diagnostics(
infos: &[CompileInfo],
warnings: &[CompileWarning],
errors: &[CompileError],
source_engine: &SourceEngine,
) -> DiagnosticMap {
let mut diagnostics = DiagnosticMap::new();
for info in infos {
let diagnostic = get_info_diagnostic(info);
if let Some(source_id) = info.span().source_id() {
let path = source_engine.get_path(source_id);
diagnostics.entry(path).or_default().infos.push(diagnostic);
}
}
for warning in warnings {
let diagnostic = get_warning_diagnostic(warning);
if let Some(source_id) = warning.span().source_id() {
let path = source_engine.get_path(source_id);
diagnostics
.entry(path)
.or_default()
.warnings
.push(diagnostic);
}
}
for error in errors {
let diagnostic = get_error_diagnostic(error);
if let Some(source_id) = error.span().source_id() {
let path = source_engine.get_path(source_id);
diagnostics.entry(path).or_default().errors.push(diagnostic);
}
}
diagnostics
}
fn get_range(LineColRange { start, end }: LineColRange) -> Range {
let pos = |lc: LineCol| Position::new(lc.line as u32 - 1, lc.col as u32 - 1);
let start = pos(start);
let end = pos(end);
Range { start, end }
}
fn get_warning_diagnostic_tags(warning: &Warning) -> Option<Vec<DiagnosticTag>> {
match warning {
Warning::StructFieldNeverRead
| Warning::DeadDeclaration
| Warning::DeadEnumDeclaration
| Warning::DeadEnumVariant { .. }
| Warning::DeadFunctionDeclaration
| Warning::DeadMethod
| Warning::DeadStorageDeclaration
| Warning::DeadStorageDeclarationForFunction { .. }
| Warning::DeadStructDeclaration
| Warning::DeadTrait
| Warning::MatchExpressionUnreachableArm { .. }
| Warning::UnreachableCode
| Warning::UnusedReturnValue { .. } => Some(vec![DiagnosticTag::UNNECESSARY]),
_ => None,
}
}
fn get_info_diagnostic_tags(info: &Info) -> Option<Vec<DiagnosticTag>> {
match info {
Info::ImplTraitsForType { .. } => Some(vec![DiagnosticTag::UNNECESSARY]),
}
}
/// Extra data to be sent with a diagnostic and provided in CodeAction context.
#[derive(Debug, Default, Clone, Serialize, Deserialize)]
pub struct DiagnosticData {
pub unknown_symbol_name: Option<String>,
}
impl TryFrom<CompileWarning> for DiagnosticData {
type Error = anyhow::Error;
fn try_from(_value: CompileWarning) -> Result<Self, Self::Error> {
anyhow::bail!("Not implemented");
}
}
impl TryFrom<CompileError> for DiagnosticData {
type Error = anyhow::Error;
fn try_from(value: CompileError) -> Result<Self, Self::Error> {
match value {
CompileError::SymbolNotFound { name, .. } => Ok(DiagnosticData {
unknown_symbol_name: Some(name.to_string()),
}),
CompileError::TraitNotFound { name, .. } => Ok(DiagnosticData {
unknown_symbol_name: Some(name),
}),
CompileError::UnknownVariable { var_name, .. } => Ok(DiagnosticData {
unknown_symbol_name: Some(var_name.to_string()),
}),
_ => anyhow::bail!("Not implemented"),
}
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-lsp/src/capabilities/hover/mod.rs | sway-lsp/src/capabilities/hover/mod.rs | pub(crate) mod hover_link_contents;
use self::hover_link_contents::HoverLinkContents;
use crate::{
config::LspClient,
core::{
sync::SyncWorkspace,
token::{SymbolKind, Token, TypedAstToken},
},
server_state::ServerState,
utils::{attributes::doc_comment_attributes, markdown, markup::Markup},
};
use lsp_types::{self, Position, Url};
use std::sync::Arc;
use sway_core::Namespace;
use sway_core::{
language::{ty, Visibility},
Engines, TypeId,
};
use sway_types::{Span, Spanned};
/// Extracts the hover information for a token at the current position.
pub fn hover_data(
state: &ServerState,
sync: Arc<SyncWorkspace>,
engines: &Engines,
url: &Url,
position: Position,
) -> Option<lsp_types::Hover> {
let t = state.token_map.token_at_position(url, position)?;
let (ident, token) = t.pair();
let range = ident.range;
// check if our token is a keyword
if matches!(
token.kind,
SymbolKind::BoolLiteral
| SymbolKind::Keyword
| SymbolKind::SelfKeyword
| SymbolKind::ProgramTypeKeyword
) {
let name = &ident.name;
let documentation = state.keyword_docs.get(name).unwrap();
let prefix = format!("\n```sway\n{name}\n```\n\n---\n\n");
let formatted_doc = format!("{prefix}{documentation}");
let content = Markup::new().text(&formatted_doc);
let contents = lsp_types::HoverContents::Markup(markup_content(&content));
return Some(lsp_types::Hover {
contents,
range: Some(range),
});
}
let program = state.compiled_programs.program_from_uri(url, engines)?;
let namespace = &program.value().typed.as_ref().ok()?.namespace;
let client_config = state.config.read().client.clone();
let contents = match &token.declared_token_ident(engines) {
Some(decl_ident) => {
let t = state.token_map.try_get(decl_ident).try_unwrap()?;
let decl_token = t.value();
hover_format(
engines,
decl_token,
&decl_ident.name,
client_config,
&sync,
namespace,
)
}
// The `TypeInfo` of the token does not contain an `Ident`. In this case,
// we use the `Ident` of the token itself.
None => hover_format(
&state.engines.read(),
token,
&ident.name,
client_config,
&sync,
namespace,
),
};
Some(lsp_types::Hover {
contents,
range: Some(range),
})
}
fn visibility_as_str(visibility: Visibility) -> &'static str {
match visibility {
Visibility::Private => "",
Visibility::Public => "pub ",
}
}
/// Expects a span from either a `FunctionDeclaration` or a `TypedFunctionDeclaration`.
fn extract_fn_signature(span: &Span) -> String {
let value = span.as_str();
value.split('{').take(1).map(str::trim).collect()
}
fn format_doc_attributes(engines: &Engines, token: &Token) -> String {
let mut doc_comment = String::new();
doc_comment_attributes(engines, token, |attributes| {
doc_comment = attributes.iter().fold(String::new(), |output, attribute| {
// TODO: Change this logic once https://github.com/FuelLabs/sway/issues/6938 gets implemented.
let comment = attribute.args.first().unwrap().name.as_str();
format!("{output}{comment}\n")
});
});
doc_comment
}
fn format_visibility_hover(visibility: Visibility, decl_name: &str, token_name: &str) -> String {
format!(
"{}{} {}",
visibility_as_str(visibility),
decl_name,
token_name
)
}
fn format_variable_hover(is_mutable: bool, type_name: &str, token_name: &str) -> String {
let mutability = if is_mutable { " mut" } else { "" };
format!("let{mutability} {token_name}: {type_name}")
}
fn markup_content(markup: &Markup) -> lsp_types::MarkupContent {
let kind = lsp_types::MarkupKind::Markdown;
let value = markdown::format_docs(markup.as_str());
lsp_types::MarkupContent { kind, value }
}
fn hover_format(
engines: &Engines,
token: &Token,
ident_name: &str,
client_config: LspClient,
sync: &SyncWorkspace,
namespace: &Namespace,
) -> lsp_types::HoverContents {
let decl_engine = engines.de();
let doc_comment = format_doc_attributes(engines, token);
let format_name_with_type = |name: &str, type_id: &TypeId| -> String {
let type_name = format!("{}", engines.help_out(type_id));
format!("{name}: {type_name}")
};
// Used to collect all the information we need to generate links for the hover component.
let mut hover_link_contents = HoverLinkContents::new(engines, sync, namespace);
let sway_block = token
.as_typed()
.as_ref()
.and_then(|typed_token| match typed_token {
TypedAstToken::TypedDeclaration(decl) => match decl {
ty::TyDecl::VariableDecl(var_decl) => {
let type_name =
format!("{}", engines.help_out(var_decl.type_ascription.type_id));
hover_link_contents.add_related_types(&var_decl.type_ascription.type_id);
Some(format_variable_hover(
var_decl.mutability.is_mutable(),
&type_name,
ident_name,
))
}
ty::TyDecl::StructDecl(ty::StructDecl { decl_id, .. }) => {
let struct_decl = decl_engine.get_struct(decl_id);
hover_link_contents.add_implementations_for_decl(decl);
Some(format_visibility_hover(
struct_decl.visibility,
decl.friendly_type_name(),
ident_name,
))
}
ty::TyDecl::TraitDecl(ty::TraitDecl { decl_id, .. }) => {
let trait_decl = decl_engine.get_trait(decl_id);
hover_link_contents.add_implementations_for_trait(&trait_decl);
Some(format_visibility_hover(
trait_decl.visibility,
decl.friendly_type_name(),
ident_name,
))
}
ty::TyDecl::EnumDecl(ty::EnumDecl { decl_id, .. }) => {
let enum_decl = decl_engine.get_enum(decl_id);
hover_link_contents.add_implementations_for_decl(decl);
Some(format_visibility_hover(
enum_decl.visibility,
decl.friendly_type_name(),
ident_name,
))
}
ty::TyDecl::AbiDecl(ty::AbiDecl { .. }) => {
hover_link_contents.add_implementations_for_decl(decl);
Some(format!("{} {}", decl.friendly_type_name(), &ident_name))
}
_ => None,
},
TypedAstToken::TypedFunctionDeclaration(func) => {
hover_link_contents.add_related_types(&func.return_type.type_id);
Some(extract_fn_signature(&func.span()))
}
TypedAstToken::TypedFunctionParameter(param) => {
hover_link_contents.add_related_types(¶m.type_argument.type_id);
Some(format_name_with_type(
param.name.as_str(),
¶m.type_argument.type_id,
))
}
TypedAstToken::TypedStructField(field) => {
hover_link_contents.add_implementations_for_type(
&field.type_argument.span(),
field.type_argument.type_id,
);
Some(format_name_with_type(
field.name.as_str(),
&field.type_argument.type_id,
))
}
TypedAstToken::TypedExpression(expr) => match expr.expression {
ty::TyExpressionVariant::Literal { .. } => {
Some(format!("{}", engines.help_out(expr.return_type)))
}
_ => None,
},
_ => None,
});
let content = Markup::new()
.maybe_add_sway_block(sway_block)
.text(&doc_comment)
.maybe_add_links(
engines.se(),
&hover_link_contents.related_types,
&hover_link_contents.implementations,
&client_config,
);
lsp_types::HoverContents::Markup(markup_content(&content))
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-lsp/src/capabilities/hover/hover_link_contents.rs | sway-lsp/src/capabilities/hover/hover_link_contents.rs | use crate::{
core::{sync::SyncWorkspace, token::get_range_from_span},
utils::document::get_url_from_span,
};
use sway_core::{
engine_threading::SpannedWithEngines,
language::{
ty::{TyDecl, TyTraitDecl},
CallPath,
},
namespace::TraitMap,
Engines, Namespace, TypeId, TypeInfo,
};
use lsp_types::{Range, Url};
use sway_types::{Named, Span, Spanned};
#[derive(Debug, Clone)]
pub struct RelatedType {
pub name: String,
pub uri: Url,
pub range: Range,
pub callpath: CallPath,
}
#[derive(Debug, Clone)]
pub struct HoverLinkContents<'a> {
pub related_types: Vec<RelatedType>,
pub implementations: Vec<Span>,
engines: &'a Engines,
sync: &'a SyncWorkspace,
namespace: &'a Namespace,
}
impl<'a> HoverLinkContents<'a> {
pub fn new(engines: &'a Engines, sync: &'a SyncWorkspace, namespace: &'a Namespace) -> Self {
Self {
related_types: Vec::new(),
implementations: Vec::new(),
engines,
sync,
namespace,
}
}
/// Adds the given type and any related type parameters to the list of related types.
pub fn add_related_types(&mut self, type_id: &TypeId) {
let type_info = self.engines.te().get(*type_id);
match &*type_info {
TypeInfo::Enum(decl_id) => {
let decl = self.engines.de().get_enum(decl_id);
self.add_related_type(
decl.name().to_string(),
&decl.span(),
decl.call_path.clone(),
);
decl.generic_parameters
.iter()
.filter_map(|x| x.as_type_parameter())
.for_each(|type_param| self.add_related_types(&type_param.type_id));
}
TypeInfo::Struct(decl_id) => {
let decl = self.engines.de().get_struct(decl_id);
self.add_related_type(
decl.name().to_string(),
&decl.span(),
decl.call_path.clone(),
);
decl.generic_parameters
.iter()
.filter_map(|x| x.as_type_parameter())
.for_each(|type_param| self.add_related_types(&type_param.type_id));
}
_ => {}
}
}
/// Adds a single type to the list of related types.
fn add_related_type(&mut self, name: String, span: &Span, callpath: CallPath) {
if let Ok(mut uri) = get_url_from_span(self.engines.se(), span) {
let converted_url = self.sync.temp_to_workspace_url(&uri);
if let Ok(url) = converted_url {
uri = url;
}
let range = get_range_from_span(span);
self.related_types.push(RelatedType {
name,
uri,
range,
callpath,
});
};
}
/// Adds all implementations of the given [`TyTraitDecl`] to the list of implementations.
pub fn add_implementations_for_trait(&mut self, trait_decl: &TyTraitDecl) {
let call_path =
CallPath::from(trait_decl.name.clone()).to_fullpath(self.engines, self.namespace);
let impl_spans =
TraitMap::get_impl_spans_for_trait_name(self.namespace.current_module(), &call_path);
self.add_implementations(&trait_decl.span(), impl_spans);
}
/// Adds implementations of the given type to the list of implementations using the [`TyDecl`].
pub fn add_implementations_for_decl(&mut self, ty_decl: &TyDecl) {
let impl_spans = TraitMap::get_impl_spans_for_decl(
self.namespace.current_module(),
self.engines,
ty_decl,
);
self.add_implementations(&ty_decl.span(self.engines), impl_spans);
}
/// Adds implementations of the given type to the list of implementations using the [`TypeId`].
pub fn add_implementations_for_type(&mut self, decl_span: &Span, type_id: TypeId) {
let impl_spans = TraitMap::get_impl_spans_for_type(
self.namespace.current_module(),
self.engines,
&type_id,
);
self.add_implementations(decl_span, impl_spans);
}
/// Adds implementations to the list of implementation spans, with the declaration span first.
/// Ensure that all paths are converted to workspace paths before adding them.
fn add_implementations(&mut self, decl_span: &Span, impl_spans: Vec<Span>) {
let mut seen = std::collections::HashSet::new();
let mut all_spans = Vec::new();
// Always add declaration span first
if seen.insert(decl_span.clone()) {
all_spans.push(decl_span.clone());
}
// Add implementation spans, skipping duplicates
for span in impl_spans {
if seen.insert(span.clone()) {
all_spans.push(span);
}
}
for span in &all_spans {
let span_result = self.sync.temp_to_workspace_span(self.engines.se(), span);
if let Ok(span) = span_result {
self.implementations.push(span);
}
}
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-lsp/src/capabilities/code_actions/mod.rs | sway-lsp/src/capabilities/code_actions/mod.rs | pub mod abi_decl;
pub mod common;
pub mod constant_decl;
pub mod diagnostic;
pub mod enum_decl;
pub mod enum_variant;
pub mod function_decl;
pub mod storage_field;
pub mod struct_decl;
pub mod struct_field;
pub mod trait_fn;
pub use crate::error::DocumentError;
use crate::{
core::{
token::{Token, TypedAstToken},
token_map::TokenMap,
},
server_state::CompiledPrograms,
};
use lsp_types::{
CodeAction as LspCodeAction, CodeActionDisabled, CodeActionKind, CodeActionOrCommand,
CodeActionResponse, Diagnostic, Position, Range, TextEdit, Url, WorkspaceEdit,
};
use serde_json::Value;
use std::collections::HashMap;
use sway_core::{language::ty, Engines, Namespace};
use sway_types::{LineCol, Spanned};
pub(crate) const CODE_ACTION_IMPL_TITLE: &str = "Generate impl for";
pub(crate) const CODE_ACTION_NEW_TITLE: &str = "Generate `new`";
pub(crate) const CODE_ACTION_DOC_TITLE: &str = "Generate a documentation template";
pub(crate) const CODE_ACTION_IMPORT_TITLE: &str = "Import";
pub(crate) const CODE_ACTION_QUALIFY_TITLE: &str = "Qualify as";
#[derive(Clone)]
pub(crate) struct CodeActionContext<'a> {
engines: &'a Engines,
tokens: &'a TokenMap,
token: &'a Token,
uri: &'a Url,
temp_uri: &'a Url,
diagnostics: &'a Vec<Diagnostic>,
namespace: &'a Namespace,
}
pub fn code_actions(
engines: &Engines,
token_map: &TokenMap,
range: &Range,
uri: &Url,
temp_uri: &Url,
diagnostics: &Vec<Diagnostic>,
compiled_programs: &CompiledPrograms,
) -> Option<CodeActionResponse> {
let t = token_map.token_at_position(temp_uri, range.start)?;
let token = t.value();
let program = compiled_programs.program_from_uri(temp_uri, engines)?;
let namespace = &program.value().typed.as_ref().ok()?.namespace;
let ctx = CodeActionContext {
engines,
tokens: token_map,
token,
uri,
temp_uri,
diagnostics,
namespace,
};
let actions_by_type = token
.as_typed()
.as_ref()
.map(|typed_token| match typed_token {
TypedAstToken::TypedDeclaration(decl) => match decl {
ty::TyDecl::AbiDecl(ty::AbiDecl { decl_id, .. }) => {
abi_decl::code_actions(decl_id, &ctx)
}
ty::TyDecl::StructDecl(ty::StructDecl { decl_id, .. }) => {
struct_decl::code_actions(decl_id, &ctx)
}
ty::TyDecl::EnumDecl(ty::EnumDecl { decl_id, .. }) => {
enum_decl::code_actions(decl_id, &ctx)
}
_ => Vec::new(),
},
TypedAstToken::TypedFunctionDeclaration(decl) => {
function_decl::code_actions(decl, &ctx)
}
TypedAstToken::TypedStorageField(decl) => storage_field::code_actions(decl, &ctx),
TypedAstToken::TypedConstantDeclaration(decl) => {
constant_decl::code_actions(decl, &ctx)
}
TypedAstToken::TypedEnumVariant(decl) => enum_variant::code_actions(decl, &ctx),
TypedAstToken::TypedStructField(decl) => struct_field::code_actions(decl, &ctx),
TypedAstToken::TypedTraitFn(decl) => trait_fn::code_actions(decl, &ctx),
_ => Vec::new(),
})
.unwrap_or_default();
let actions_by_diagnostic = diagnostic::code_actions(&ctx).unwrap_or_default();
Some([actions_by_type, actions_by_diagnostic].concat())
}
pub(crate) trait CodeAction<'a, T: Spanned> {
/// Creates a new [`CodeAction`] with the given [Engines], declaration type, and [Url].
fn new(ctx: &CodeActionContext<'a>, decl: &'a T) -> Self;
/// Returns a [String] of text to insert into the document.
fn new_text(&self) -> String;
/// Returns a [String] of text to use as the title of the code action.
fn title(&self) -> String;
fn indentation(&self) -> String {
let LineCol { col, .. } = self.decl().span().start_line_col_one_index();
" ".repeat(col - 1)
}
/// Returns the declaration.
fn decl(&self) -> &T;
/// Returns the declaration's [Url].
fn uri(&self) -> &Url;
/// Returns an optional [`CodeActionDisabled`] indicating whether this code action should be disabled.
fn disabled(&self) -> Option<CodeActionDisabled> {
None
}
/// Returns a [`CodeActionOrCommand`] for the given code action.
fn code_action(&self) -> CodeActionOrCommand {
let text_edit = TextEdit {
range: self.range(),
new_text: self.new_text(),
};
let changes = HashMap::from([(self.uri().clone(), vec![text_edit])]);
CodeActionOrCommand::CodeAction(LspCodeAction {
title: self.title(),
kind: Some(CodeActionKind::REFACTOR),
edit: Some(WorkspaceEdit {
changes: Some(changes),
..Default::default()
}),
data: Some(Value::String(self.uri().to_string())),
disabled: self.disabled(),
..Default::default()
})
}
/// Returns the [Range] to insert text. This will usually be implemented as `range_before` or `range_after`.
fn range(&self) -> Range;
/// Returns the [Range] to insert text after the last line of the span, with an empty line in between.
fn range_after(&self) -> Range {
let LineCol {
line: last_line, ..
} = self.decl().span().end_line_col_one_index();
let insertion_position = Position {
line: last_line as u32,
character: 0,
};
Range {
start: insertion_position,
end: insertion_position,
}
}
/// Returns the [Range] to insert text before the first line of the span, with an empty line in between.
fn range_before(&self) -> Range {
let LineCol {
line: first_line, ..
} = self.decl().span().start_line_col_one_index();
let insertion_position = Position {
line: first_line as u32 - 1,
character: 0,
};
Range {
start: insertion_position,
end: insertion_position,
}
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-lsp/src/capabilities/code_actions/function_decl/mod.rs | sway-lsp/src/capabilities/code_actions/function_decl/mod.rs | use crate::capabilities::code_actions::{CodeAction, CodeActionContext};
use lsp_types::CodeActionOrCommand;
use sway_core::language::ty;
use super::common::fn_doc_comment::FnDocCommentCodeAction;
pub(crate) fn code_actions(
decl: &ty::TyFunctionDecl,
ctx: &CodeActionContext,
) -> Vec<CodeActionOrCommand> {
vec![FnDocCommentCodeAction::new(ctx, decl).code_action()]
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-lsp/src/capabilities/code_actions/struct_decl/mod.rs | sway-lsp/src/capabilities/code_actions/struct_decl/mod.rs | pub(crate) mod struct_impl;
pub(crate) mod struct_new;
use self::{struct_impl::StructImplCodeAction, struct_new::StructNewCodeAction};
use crate::capabilities::code_actions::{CodeAction, CodeActionContext};
use lsp_types::CodeActionOrCommand;
use sway_core::{decl_engine::id::DeclId, language::ty};
use super::common::basic_doc_comment::BasicDocCommentCodeAction;
pub(crate) fn code_actions(
decl_id: &DeclId<ty::TyStructDecl>,
ctx: &CodeActionContext,
) -> Vec<CodeActionOrCommand> {
let decl = (*ctx.engines.de().get_struct(decl_id)).clone();
vec![
StructImplCodeAction::new(ctx, &decl).code_action(),
StructNewCodeAction::new(ctx, &decl).code_action(),
BasicDocCommentCodeAction::new(ctx, &decl).code_action(),
]
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-lsp/src/capabilities/code_actions/struct_decl/struct_new.rs | sway-lsp/src/capabilities/code_actions/struct_decl/struct_new.rs | use crate::{
capabilities::code_actions::{
common::generate_impl::GenerateImplCodeAction, CodeAction, CodeActionContext,
CODE_ACTION_NEW_TITLE,
},
core::{token::TypedAstToken, token_map::TokenMapExt},
};
use lsp_types::{CodeActionDisabled, Position, Range, Url};
use sway_core::language::ty::{self, TyImplSelfOrTrait, TyStructDecl, TyStructField};
use sway_types::{LineCol, Spanned};
pub(crate) struct StructNewCodeAction<'a> {
decl: &'a TyStructDecl,
uri: &'a Url,
existing_impl_decl: Option<TyImplSelfOrTrait>,
}
impl<'a> GenerateImplCodeAction<'a, TyStructDecl> for StructNewCodeAction<'a> {
fn decl_name(&self) -> String {
self.decl.call_path.suffix.to_string()
}
}
impl<'a> CodeAction<'a, TyStructDecl> for StructNewCodeAction<'a> {
fn new(ctx: &CodeActionContext<'a>, decl: &'a TyStructDecl) -> Self {
// Before the other functions are called, we need to determine if the new function
// should be generated in a new impl block, an existing impl block, or not at all.
// Find the first impl block for this struct if it exists.
let existing_impl_decl = ctx
.tokens
.iter()
.all_references_of_token(ctx.token, ctx.engines)
.find_map(|item| {
if let Some(TypedAstToken::TypedDeclaration(ty::TyDecl::ImplSelfOrTrait(
ty::ImplSelfOrTrait { decl_id, .. },
))) = item.value().as_typed()
{
Some((*ctx.engines.de().get_impl_self_or_trait(decl_id)).clone())
} else {
None
}
});
Self {
decl,
uri: ctx.uri,
existing_impl_decl,
}
}
fn new_text(&self) -> String {
let params = StructNewCodeAction::params_string(&self.decl.fields);
let new_fn = self.fn_signature_string(
"new".to_string(),
params,
&self.decl.attributes,
self.return_type_string(),
Some(self.fn_body()),
);
// If there is already an impl block for this struct, add only the function to it.
if self.existing_impl_decl.is_some() {
format!("{new_fn}\n")
} else {
// Otherwise, generate the impl block with the `new` function inside.
self.impl_string(
self.type_param_string(&self.decl.generic_parameters),
format!("\n{new_fn}\n"),
None,
)
}
}
fn range(&self) -> Range {
// If there is already an impl block for this struct, insert the new function at the top of it.
let insertion_position = match self.existing_impl_decl.clone() {
Some(decl) => {
let LineCol {
line: first_line, ..
} = decl.span.start_line_col_one_index();
Position {
line: first_line as u32,
character: 0,
}
}
None => {
// If we're inserting a whole new impl block, default to the line after the struct declaration.
let LineCol {
line: last_line, ..
} = self.decl().span().end_line_col_one_index();
Position {
line: last_line as u32,
character: 0,
}
}
};
Range {
start: insertion_position,
end: insertion_position,
}
}
fn title(&self) -> String {
CODE_ACTION_NEW_TITLE.to_string()
}
fn decl(&self) -> &TyStructDecl {
self.decl
}
fn uri(&self) -> &Url {
self.uri
}
fn disabled(&self) -> Option<CodeActionDisabled> {
// If there is already a `new` function in the impl block, don't generate a new one.
if self
.existing_impl_decl
.clone()?
.items
.iter()
.any(|item| match item {
sway_core::language::ty::TyTraitItem::Fn(fn_decl) => {
fn_decl.span().as_str().contains("fn new")
}
sway_core::language::ty::TyTraitItem::Constant(_) => false,
sway_core::language::ty::TyTraitItem::Type(_) => false,
})
{
Some(CodeActionDisabled {
reason: format!("Struct {} already has a `new` function", self.decl_name()),
})
} else {
None
}
}
}
impl StructNewCodeAction<'_> {
fn return_type_string(&self) -> String {
" -> Self".to_string()
}
fn params_string(params: &[TyStructField]) -> String {
params
.iter()
.map(|field| format!("{}: {}", field.name, field.type_argument.span().as_str()))
.collect::<Vec<String>>()
.join(", ")
}
fn fn_body(&self) -> String {
if self.decl.fields.is_empty() {
return "Self {{}}".to_string();
}
format!(
"Self {{ {} }}",
self.decl
.fields
.iter()
.map(|field| format!("{}", field.name))
.collect::<Vec<String>>()
.join(", ")
)
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-lsp/src/capabilities/code_actions/struct_decl/struct_impl.rs | sway-lsp/src/capabilities/code_actions/struct_decl/struct_impl.rs | use crate::capabilities::code_actions::{
common::generate_impl::{GenerateImplCodeAction, TAB},
CodeAction, CodeActionContext, CODE_ACTION_IMPL_TITLE,
};
use lsp_types::{Range, Url};
use sway_core::language::ty::TyStructDecl;
pub(crate) struct StructImplCodeAction<'a> {
decl: &'a TyStructDecl,
uri: &'a Url,
}
impl<'a> GenerateImplCodeAction<'a, TyStructDecl> for StructImplCodeAction<'a> {
fn decl_name(&self) -> String {
self.decl.call_path.suffix.to_string()
}
}
impl<'a> CodeAction<'a, TyStructDecl> for StructImplCodeAction<'a> {
fn new(ctx: &CodeActionContext<'a>, decl: &'a TyStructDecl) -> Self {
Self { decl, uri: ctx.uri }
}
fn new_text(&self) -> String {
self.impl_string(
self.type_param_string(&self.decl.generic_parameters),
format!("\n{TAB}\n"),
None,
)
}
fn title(&self) -> String {
format!("{} `{}`", CODE_ACTION_IMPL_TITLE, self.decl_name())
}
fn range(&self) -> Range {
self.range_after()
}
fn decl(&self) -> &TyStructDecl {
self.decl
}
fn uri(&self) -> &Url {
self.uri
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-lsp/src/capabilities/code_actions/enum_variant/mod.rs | sway-lsp/src/capabilities/code_actions/enum_variant/mod.rs | use crate::capabilities::code_actions::{CodeAction, CodeActionContext};
use lsp_types::CodeActionOrCommand;
use sway_core::language::ty;
use super::common::basic_doc_comment::BasicDocCommentCodeAction;
pub(crate) fn code_actions(
decl: &ty::TyEnumVariant,
ctx: &CodeActionContext,
) -> Vec<CodeActionOrCommand> {
vec![BasicDocCommentCodeAction::new(ctx, decl).code_action()]
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-lsp/src/capabilities/code_actions/storage_field/mod.rs | sway-lsp/src/capabilities/code_actions/storage_field/mod.rs | use crate::capabilities::code_actions::{CodeAction, CodeActionContext};
use lsp_types::CodeActionOrCommand;
use sway_core::language::ty;
use super::common::basic_doc_comment::BasicDocCommentCodeAction;
pub(crate) fn code_actions(
decl: &ty::TyStorageField,
ctx: &CodeActionContext,
) -> Vec<CodeActionOrCommand> {
vec![BasicDocCommentCodeAction::new(ctx, decl).code_action()]
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.