repo stringlengths 6 65 | file_url stringlengths 81 311 | file_path stringlengths 6 227 | content stringlengths 0 32.8k | language stringclasses 1
value | license stringclasses 7
values | commit_sha stringlengths 40 40 | retrieved_at stringdate 2026-01-04 15:31:58 2026-01-04 20:25:31 | truncated bool 2
classes |
|---|---|---|---|---|---|---|---|---|
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/test/src/sdk-harness/test_projects/abi_impl_methods_callable/mod.rs | test/src/sdk-harness/test_projects/abi_impl_methods_callable/mod.rs | use fuels::prelude::*;
abigen!(Contract(
name = "AbiImplMethodsCallable",
abi = "test_projects/abi_impl_methods_callable/out/release/abi_impl_methods_callable-abi.json"
));
async fn get_abi_impl_methods_callable_instance() -> AbiImplMethodsCallable<Wallet> {
let wallet = launch_provider_and_get_wallet().await.unwrap();
let id = Contract::load_from(
"test_projects/abi_impl_methods_callable/out/release/abi_impl_methods_callable.bin",
LoadConfiguration::default(),
)
.unwrap()
.deploy(&wallet, TxPolicies::default())
.await
.unwrap();
AbiImplMethodsCallable::new(id.contract_id, wallet)
}
#[tokio::test]
async fn impl_method_test() -> Result<()> {
let instance = get_abi_impl_methods_callable_instance().await;
let contract_methods = instance.methods();
let response = contract_methods.impl_method().call().await?;
assert_eq!(42, response.value);
Ok(())
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/test/src/sdk-harness/test_projects/option_field_order/mod.rs | test/src/sdk-harness/test_projects/option_field_order/mod.rs | use fuels::prelude::*;
abigen!(Contract(
name = "MyContract",
abi = "test_projects/option_field_order/out/release/option_field_order-abi.json"
));
#[tokio::test]
async fn default_is_none() {
let instance = setup().await;
assert!(instance.methods().is_none().call().await.unwrap().value);
}
async fn setup() -> MyContract<Wallet> {
let wallet = launch_provider_and_get_wallet().await.unwrap();
let id = Contract::load_from(
"test_projects/option_field_order/out/release/option_field_order.bin",
LoadConfiguration::default(),
)
.unwrap()
.deploy(&wallet, TxPolicies::default())
.await
.unwrap()
.contract_id;
MyContract::new(id.clone(), wallet)
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/test/src/sdk-harness/test_projects/asset_id/mod.rs | test/src/sdk-harness/test_projects/asset_id/mod.rs | use fuels::prelude::*;
abigen!(Contract(
name = "TestAssetId",
abi = "test_projects/asset_id/out/release/asset_id-abi.json"
));
#[tokio::test]
async fn can_get_base_asset_id() {
let wallet = launch_provider_and_get_wallet().await.unwrap();
let (fuelcontract_instance, _fuelcontract_id) = get_instance(wallet.clone()).await;
let asset_id = fuelcontract_instance
.methods()
.get_base_asset_id()
.call()
.await
.unwrap()
.value;
let consensus_params = wallet.provider().consensus_parameters().await.unwrap();
let base_asset_id = consensus_params.base_asset_id();
assert_eq!(asset_id, *base_asset_id);
}
async fn get_instance(wallet: Wallet) -> (TestAssetId<Wallet>, ContractId) {
let fuelcontract_id = Contract::load_from(
"test_projects/asset_id/out/release/asset_id.bin",
LoadConfiguration::default(),
)
.unwrap()
.deploy(&wallet, TxPolicies::default())
.await
.unwrap()
.contract_id;
wallet
.force_transfer_to_contract(fuelcontract_id, 1000, AssetId::BASE, TxPolicies::default())
.await
.unwrap();
let fuelcontract_instance = TestAssetId::new(fuelcontract_id.clone(), wallet);
(fuelcontract_instance, fuelcontract_id.into())
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/test/src/sdk-harness/test_projects/predicate_panic_expression/mod.rs | test/src/sdk-harness/test_projects/predicate_panic_expression/mod.rs | use fuel_vm::fuel_asm::{op, RegId};
use fuel_vm::fuel_tx;
use fuel_vm::fuel_tx::{Address, AssetId, Output};
use fuels::{
core::codec::{ABIEncoder, EncoderConfig},
prelude::*,
types::{input::Input, transaction_builders::ScriptTransactionBuilder, Token},
};
use std::str::FromStr;
async fn setup() -> (Vec<u8>, Address, Wallet, u64, AssetId) {
let predicate_code = std::fs::read(
"test_projects/predicate_panic_expression/out/release/predicate_panic_expression.bin",
)
.unwrap();
let predicate_address = fuel_tx::Input::predicate_owner(&predicate_code);
let mut node_config = NodeConfig::default();
node_config.starting_gas_price = 0;
let mut wallets = launch_custom_provider_and_get_wallets(
WalletsConfig::new(Some(1), None, None),
Some(node_config),
None,
)
.await
.unwrap();
let wallet = wallets.pop().unwrap();
(
predicate_code,
predicate_address,
wallet,
1000,
AssetId::default(),
)
}
async fn create_predicate(
predicate_address: Address,
wallet: &Wallet,
amount_to_predicate: u64,
asset_id: AssetId,
) {
let provider = wallet.provider();
let wallet_coins = wallet
.get_asset_inputs_for_amount(
asset_id,
wallet.get_asset_balance(&asset_id).await.unwrap().into(),
None,
)
.await
.unwrap();
let output_coin = Output::coin(predicate_address, amount_to_predicate, asset_id);
let output_change = Output::change(wallet.clone().address().into(), 0, asset_id);
let mut tx = ScriptTransactionBuilder::prepare_transfer(
wallet_coins,
vec![output_coin, output_change],
Default::default(),
)
.with_script(op::ret(RegId::ONE).to_bytes().to_vec());
tx.add_signer(wallet.signer().clone()).unwrap();
let tx = tx.build(provider).await.unwrap();
provider
.send_transaction_and_await_commit(tx)
.await
.unwrap();
}
async fn submit_to_predicate(
predicate_code: Vec<u8>,
predicate_address: Address,
wallet: &Wallet,
amount_to_predicate: u64,
asset_id: AssetId,
receiver_address: Address,
predicate_data: Vec<u8>,
) -> Result<()> {
let filter = ResourceFilter {
from: predicate_address.into(),
asset_id: Some(asset_id),
amount: amount_to_predicate.into(),
..Default::default()
};
let utxo_predicate_hash = wallet
.provider()
.get_spendable_resources(filter)
.await
.unwrap();
let mut inputs = vec![];
let mut total_amount_in_predicate = 0;
for coin in utxo_predicate_hash {
inputs.push(Input::resource_predicate(
coin.clone(),
predicate_code.to_vec(),
predicate_data.clone(),
));
total_amount_in_predicate += coin.amount();
}
let output_coin = Output::coin(receiver_address, total_amount_in_predicate - 1, asset_id);
let output_change = Output::change(predicate_address, 0, asset_id);
let provider = wallet.provider();
let new_tx = ScriptTransactionBuilder::prepare_transfer(
inputs,
vec![output_coin, output_change],
Default::default(),
)
.with_tx_policies(TxPolicies::default().with_tip(1))
.build(provider)
.await
.unwrap();
wallet
.provider()
.send_transaction_and_await_commit(new_tx)
.await
.map(|_| ())
}
async fn get_balance(wallet: &Wallet, address: Address, asset_id: AssetId) -> u128 {
wallet
.provider()
.get_asset_balance(&address.into(), &asset_id)
.await
.unwrap()
}
#[tokio::test]
async fn valid_predicate() {
// Predicate must revert for these inputs.
for val in 0..=3u32 {
let arg = Token::U32(val);
let args: Vec<Token> = vec![arg];
let predicate_data = ABIEncoder::new(EncoderConfig::default())
.encode(&args)
.unwrap();
let receiver_address =
Address::from_str("0xd926978a28a565531a06cbf5fab5402d6ee2021e5a5dce2d2f7c61e5521be109")
.unwrap();
let (predicate_code, predicate_address, wallet, amount_to_predicate, asset_id) =
setup().await;
create_predicate(predicate_address, &wallet, amount_to_predicate, asset_id).await;
let receiver_balance_before = get_balance(&wallet, receiver_address, asset_id).await;
assert_eq!(receiver_balance_before, 0);
submit_to_predicate(
predicate_code,
predicate_address,
&wallet,
amount_to_predicate,
asset_id,
receiver_address,
predicate_data,
)
.await
.expect_err("Predicate must revert for these inputs");
// The receiver balance stays the same.
let receiver_balance_after = get_balance(&wallet, receiver_address, asset_id).await;
assert_eq!(receiver_balance_before, receiver_balance_after);
// The predicate balance stays the same.
let predicate_balance = get_balance(&wallet, predicate_address, asset_id).await;
assert_eq!(predicate_balance, 1000);
}
// Predicate returns true for this input.
let arg = Token::U32(4u32);
let args: Vec<Token> = vec![arg];
let predicate_data = ABIEncoder::new(EncoderConfig::default())
.encode(&args)
.unwrap();
let receiver_address =
Address::from_str("0xd926978a28a565531a06cbf5fab5402d6ee2021e5a5dce2d2f7c61e5521be109")
.unwrap();
let (predicate_code, predicate_address, wallet, amount_to_predicate, asset_id) = setup().await;
create_predicate(predicate_address, &wallet, amount_to_predicate, asset_id).await;
let receiver_balance_before = get_balance(&wallet, receiver_address, asset_id).await;
assert_eq!(receiver_balance_before, 0);
submit_to_predicate(
predicate_code,
predicate_address,
&wallet,
amount_to_predicate,
asset_id,
receiver_address,
predicate_data,
)
.await
.expect("Failed to submit to predicate");
// The receiver balance gets increased.
let receiver_balance_after = get_balance(&wallet, receiver_address, asset_id).await;
assert_eq!(
receiver_balance_before + u128::from(amount_to_predicate) - 1,
receiver_balance_after
);
// The predicate balance drops to zero.
let predicate_balance = get_balance(&wallet, predicate_address, asset_id).await;
assert_eq!(predicate_balance, 0);
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/test/src/sdk-harness/test_projects/vec_in_abi/mod.rs | test/src/sdk-harness/test_projects/vec_in_abi/mod.rs | use fuels::{prelude::*, programs::calls::ContractCall, types::Bits256};
use std::str::FromStr;
abigen!(Contract(
name = "VecInAbiTestContract",
abi = "test_projects/vec_in_abi/out/release/vec_in_abi-abi.json"
));
async fn get_vec_in_abi_instance() -> (VecInAbiTestContract<Wallet>, ContractId) {
let wallet = launch_provider_and_get_wallet().await.unwrap();
let id = Contract::load_from(
"test_projects/vec_in_abi/out/release/vec_in_abi.bin",
LoadConfiguration::default(),
)
.unwrap()
.deploy(&wallet, TxPolicies::default())
.await
.unwrap()
.contract_id;
let instance = VecInAbiTestContract::new(id.clone(), wallet);
(instance, id.into())
}
#[tokio::test]
async fn test_bool() -> Result<()> {
let (instance, _id) = get_vec_in_abi_instance().await;
let contract_methods = instance.methods();
let input = vec![true, false, true];
let response = contract_methods.bool_test(input.clone()).call().await?;
assert_eq!(input, response.value);
Ok(())
}
#[tokio::test]
async fn test_u8() -> Result<()> {
let (instance, _id) = get_vec_in_abi_instance().await;
let contract_methods = instance.methods();
let input = vec![42, 43, 44];
let response = contract_methods.u8_test(input.clone()).call().await?;
assert_eq!(input, response.value);
Ok(())
}
#[tokio::test]
async fn test_u16() -> Result<()> {
let (instance, _id) = get_vec_in_abi_instance().await;
let contract_methods = instance.methods();
let input = vec![42, 43, 44];
let response = contract_methods.u16_test(input.clone()).call().await?;
assert_eq!(input, response.value);
Ok(())
}
#[tokio::test]
async fn test_u32() -> Result<()> {
let (instance, _id) = get_vec_in_abi_instance().await;
let contract_methods = instance.methods();
let input = vec![42, 43, 44];
let response = contract_methods.u32_test(input.clone()).call().await?;
assert_eq!(input, response.value);
Ok(())
}
#[tokio::test]
async fn test_u64() -> Result<()> {
let (instance, _id) = get_vec_in_abi_instance().await;
let contract_methods = instance.methods();
let input = vec![42, 43, 44];
let response = contract_methods.u64_test(input.clone()).call().await?;
assert_eq!(input, response.value);
Ok(())
}
#[tokio::test]
async fn test_b256() -> Result<()> {
let (instance, _id) = get_vec_in_abi_instance().await;
let contract_methods = instance.methods();
let input = vec![Bits256([1u8; 32]), Bits256([2u8; 32]), Bits256([3u8; 32])];
let response = contract_methods.b256_test(input.clone()).call().await?;
assert_eq!(input, response.value);
Ok(())
}
#[tokio::test]
async fn test_struct() -> Result<()> {
let (instance, _id) = get_vec_in_abi_instance().await;
let contract_methods = instance.methods();
let input = vec![
MyStruct {
first_field: Some(
Address::from_str(
"0x4242424242424242424242424242424242424242424242424242424242424242",
)
.unwrap(),
),
second_field: 42,
},
MyStruct {
first_field: None,
second_field: 43,
},
MyStruct {
first_field: Some(
Address::from_str(
"0x4444444444444444444444444444444444444444444444444444444444444444",
)
.unwrap(),
),
second_field: 44,
},
];
let response = contract_methods.struct_test(input.clone()).call().await?;
assert_eq!(input, response.value);
Ok(())
}
#[tokio::test]
async fn test_enum() -> Result<()> {
let (instance, _id) = get_vec_in_abi_instance().await;
let contract_methods = instance.methods();
let input = vec![
MyEnum::FirstVariant(Some(
Address::from_str("0x4242424242424242424242424242424242424242424242424242424242424242")
.unwrap(),
)),
MyEnum::FirstVariant(None),
MyEnum::SecondVariant(42),
];
let response = contract_methods.enum_test(input.clone()).call().await?;
assert_eq!(input, response.value);
Ok(())
}
#[tokio::test]
async fn test_array() -> Result<()> {
let (instance, _id) = get_vec_in_abi_instance().await;
let contract_methods = instance.methods();
let input = vec![
[
Address::from_str("0x4242424242424242424242424242424242424242424242424242424242424242")
.unwrap(),
Address::from_str("0x6969696969696969696969696969696969696969696969696969696969696969")
.unwrap(),
],
[
Address::from_str("0x4343434343434343434343434343434343434343434343434343434343434343")
.unwrap(),
Address::from_str("0x7070707070707070707070707070707070707070707070707070707070707070")
.unwrap(),
],
[
Address::from_str("0x9999999999999999999999999999999999999999999999999999999999999999")
.unwrap(),
Address::from_str("0x0000000000000000000000000000000000000000000000000000000000000000")
.unwrap(),
],
];
let response = contract_methods.array_test(input.clone()).call().await?;
assert_eq!(input, response.value);
Ok(())
}
#[tokio::test]
async fn test_string() -> Result<()> {
let (instance, _id) = get_vec_in_abi_instance().await;
let contract_methods = instance.methods();
let input = vec![
"fuel".try_into().unwrap(),
"labs".try_into().unwrap(),
"rock".try_into().unwrap(),
];
let response = contract_methods.string_test(input.clone()).call().await?;
assert_eq!(input, response.value);
Ok(())
}
#[tokio::test]
async fn test_vec_in_vec() -> Result<()> {
let (instance, _id) = get_vec_in_abi_instance().await;
let contract_methods = instance.methods();
let input = vec![vec![42, 43, 44], vec![69, 70, 71], vec![99, 100, 101]];
let response = contract_methods
.vec_in_vec_test(input.clone())
.call()
.await?;
assert_eq!(
input.into_iter().flatten().collect::<Vec<_>>(),
response.value
);
Ok(())
}
async fn test_echo<T>(
f: impl Fn(T) -> CallHandler<fuels::accounts::wallet::Wallet, ContractCall, T>,
input: T,
) where
T: Eq
+ Clone
+ fuels::core::traits::Tokenizable
+ fuels::core::traits::Parameterize
+ std::fmt::Debug,
{
let response = (f)(input.clone()).call().await.unwrap();
assert_eq!(input, response.value);
}
#[tokio::test]
async fn test_echos() {
let (instance, _id) = get_vec_in_abi_instance().await;
let contract_methods = instance.methods();
test_echo(|v| contract_methods.echo_u8(v), vec![0u8, 1u8, 2u8]).await;
test_echo(|v| contract_methods.echo_u16(v), vec![0u16, 1u16, 2u16]).await;
test_echo(|v| contract_methods.echo_u32(v), vec![0u32, 1u32, 2u32]).await;
test_echo(
|v| contract_methods.echo_u32_vec_in_vec(v),
vec![vec![0u32], vec![1u32], vec![2u32]],
)
.await;
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/test/src/sdk-harness/test_projects/events/mod.rs | test/src/sdk-harness/test_projects/events/mod.rs | use fuels::prelude::*;
#[tokio::test]
async fn emits_indexed_events() -> Result<()> {
abigen!(Script(
name = "Events",
abi = "test_projects/events/out/release/events-abi.json",
));
let wallet = launch_provider_and_get_wallet().await.unwrap();
let bin_path = "test_projects/events/out/release/events.bin";
let instance = Events::new(wallet.clone(), bin_path);
let response = instance.main().call().await?;
// TODO: Uncomment once fuels-rs is updated with indexed events support (https://github.com/FuelLabs/fuels-rs/pull/1695).
// let events = response.decode_logs_with_type::<TestIndexedEventStruct>()?;
// assert_eq!(events.len(), 3);
// let flags: Vec<bool> = events.iter().map(|event| event.field_1).collect();
// assert_eq!(flags, vec![true, false, true]);
// let expected =
// hex::decode("ef86afa9696cf0dc6385e2c407a6e159a1103cefb7e2ae0636fb33d3cb2a9e4a").unwrap();
// assert_eq!(expected, response.tx_status.receipts[0].data().unwrap());
Ok(())
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/test/src/sdk-harness/test_projects/predicate_data_struct/mod.rs | test/src/sdk-harness/test_projects/predicate_data_struct/mod.rs | use fuel_vm::fuel_asm::{op, RegId};
use fuel_vm::fuel_tx;
use fuel_vm::fuel_tx::{Address, AssetId, Output};
use fuels::{
core::codec::{ABIEncoder, EncoderConfig},
prelude::*,
types::{input::Input, transaction_builders::ScriptTransactionBuilder, Token},
};
use std::str::FromStr;
async fn setup() -> (Vec<u8>, Address, Wallet, u64, AssetId) {
let predicate_code =
std::fs::read("test_projects/predicate_data_struct/out/release/predicate_data_struct.bin")
.unwrap();
let predicate_address = fuel_tx::Input::predicate_owner(&predicate_code);
let mut node_config = NodeConfig::default();
node_config.starting_gas_price = 0;
let mut wallets = launch_custom_provider_and_get_wallets(
WalletsConfig::new(Some(1), None, None),
Some(node_config),
None,
)
.await
.unwrap();
let wallet = wallets.pop().unwrap();
(
predicate_code,
predicate_address,
wallet,
1000,
AssetId::default(),
)
}
async fn create_predicate(
predicate_address: Address,
wallet: &Wallet,
amount_to_predicate: u64,
asset_id: AssetId,
) {
let wallet_coins = wallet
.get_asset_inputs_for_amount(
asset_id,
wallet.get_asset_balance(&asset_id).await.unwrap().into(),
None,
)
.await
.unwrap();
let provider = wallet.provider();
let output_coin = Output::coin(predicate_address, amount_to_predicate, asset_id);
let output_change = Output::change(wallet.address().into(), 0, asset_id);
let mut tx = ScriptTransactionBuilder::prepare_transfer(
wallet_coins,
vec![output_coin, output_change],
Default::default(),
)
.with_script(op::ret(RegId::ONE).to_bytes().to_vec());
tx.add_signer(wallet.signer().clone()).unwrap();
let tx = tx.build(provider).await.unwrap();
provider.send_transaction_and_await_commit(tx).await.unwrap();
}
async fn submit_to_predicate(
predicate_code: Vec<u8>,
predicate_address: Address,
wallet: &Wallet,
amount_to_predicate: u64,
asset_id: AssetId,
receiver_address: Address,
predicate_data: Vec<u8>,
) {
let filter = ResourceFilter {
from: predicate_address.into(),
asset_id: Some(asset_id),
amount: amount_to_predicate.into(),
..Default::default()
};
let provider = wallet.provider();
let utxo_predicate_hash = provider.get_spendable_resources(filter).await.unwrap();
let mut inputs = vec![];
let mut total_amount_in_predicate = 0;
for coin in utxo_predicate_hash {
inputs.push(Input::resource_predicate(
coin.clone(),
predicate_code.to_vec(),
predicate_data.clone(),
));
total_amount_in_predicate += coin.amount();
}
let output_coin = Output::coin(receiver_address, total_amount_in_predicate, asset_id);
let output_change = Output::change(predicate_address, 0, asset_id);
let new_tx = ScriptTransactionBuilder::prepare_transfer(
inputs,
vec![output_coin, output_change],
Default::default(),
)
.build(provider)
.await
.unwrap();
let _call_result = provider.send_transaction_and_await_commit(new_tx).await;
}
async fn get_balance(wallet: &Wallet, address: Address, asset_id: AssetId) -> u128 {
wallet
.provider()
.get_asset_balance(&address.into(), &asset_id)
.await
.unwrap()
}
struct Validation {
has_account: bool,
total_complete: u64,
}
fn encode_struct(predicate_struct: Validation) -> Vec<u8> {
let has_account = Token::Bool(predicate_struct.has_account);
let total_complete = Token::U64(predicate_struct.total_complete);
let token_struct: Vec<Token> = vec![has_account, total_complete];
ABIEncoder::new(EncoderConfig::default())
.encode(&token_struct)
.unwrap()
}
#[tokio::test]
async fn should_pass_with_valid_struct() {
let predicate_data = encode_struct(Validation {
has_account: true,
total_complete: 100,
});
let receiver_address =
Address::from_str("0xde97d8624a438121b86a1956544bd72ed68cd69f2c99555b08b1e8c51ffd511c")
.unwrap();
let (predicate_code, predicate_address, wallet, amount_to_predicate, asset_id) = setup().await;
create_predicate(predicate_address, &wallet, amount_to_predicate, asset_id).await;
let receiver_balance_before = get_balance(&wallet, receiver_address, asset_id).await;
assert_eq!(receiver_balance_before, 0);
submit_to_predicate(
predicate_code,
predicate_address,
&wallet,
amount_to_predicate,
asset_id,
receiver_address,
predicate_data,
)
.await;
let receiver_balance_after = get_balance(&wallet, receiver_address, asset_id).await;
assert_eq!(
receiver_balance_before + amount_to_predicate as u128,
receiver_balance_after
);
let predicate_balance = get_balance(&wallet, predicate_address, asset_id).await;
assert_eq!(predicate_balance, 0);
}
#[tokio::test]
async fn should_fail_with_invalid_struct_u64() {
let predicate_data = encode_struct(Validation {
has_account: true,
total_complete: 200,
});
let receiver_address =
Address::from_str("0xde97d8624a438121b86a1956544bd72ed68cd69f2c99555b08b1e8c51ffd511c")
.unwrap();
let (predicate_code, predicate_address, wallet, amount_to_predicate, asset_id) = setup().await;
create_predicate(predicate_address, &wallet, amount_to_predicate, asset_id).await;
let receiver_balance_before = get_balance(&wallet, receiver_address, asset_id).await;
assert_eq!(receiver_balance_before, 0);
submit_to_predicate(
predicate_code,
predicate_address,
&wallet,
amount_to_predicate,
asset_id,
receiver_address,
predicate_data,
)
.await;
let receiver_balance_after = get_balance(&wallet, receiver_address, asset_id).await;
assert_eq!(receiver_balance_before, receiver_balance_after);
let predicate_balance = get_balance(&wallet, predicate_address, asset_id).await;
assert_eq!(predicate_balance, amount_to_predicate as u128);
}
#[tokio::test]
async fn should_fail_with_invalid_struct_bool() {
let predicate_data = encode_struct(Validation {
has_account: false,
total_complete: 100,
});
let receiver_address =
Address::from_str("0xde97d8624a438121b86a1956544bd72ed68cd69f2c99555b08b1e8c51ffd511c")
.unwrap();
let (predicate_code, predicate_address, wallet, amount_to_predicate, asset_id) = setup().await;
create_predicate(predicate_address, &wallet, amount_to_predicate, asset_id).await;
let receiver_balance_before = get_balance(&wallet, receiver_address, asset_id).await;
assert_eq!(receiver_balance_before, 0);
submit_to_predicate(
predicate_code,
predicate_address,
&wallet,
amount_to_predicate,
asset_id,
receiver_address,
predicate_data,
)
.await;
let receiver_balance_after = get_balance(&wallet, receiver_address, asset_id).await;
assert_eq!(receiver_balance_before, receiver_balance_after);
let predicate_balance = get_balance(&wallet, predicate_address, asset_id).await;
assert_eq!(predicate_balance, amount_to_predicate as u128);
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/test/src/sdk-harness/test_projects/superabi/mod.rs | test/src/sdk-harness/test_projects/superabi/mod.rs | use fuels::prelude::*;
abigen!(Contract(
name = "SuperAbiTestContract",
abi = "test_projects/superabi/out/release/superabi-abi.json"
));
async fn get_superabi_instance() -> SuperAbiTestContract<Wallet> {
let wallet = launch_provider_and_get_wallet().await.unwrap();
let id = Contract::load_from(
"test_projects/superabi/out/release/superabi.bin",
LoadConfiguration::default(),
)
.unwrap()
.deploy(&wallet, TxPolicies::default())
.await
.unwrap()
.contract_id;
SuperAbiTestContract::new(id.clone(), wallet)
}
#[tokio::test]
async fn abi_test() -> Result<()> {
let instance = get_superabi_instance().await;
let contract_methods = instance.methods();
let response = contract_methods.abi_test().call().await?;
assert_eq!(42, response.value);
Ok(())
}
#[tokio::test]
async fn superabi_test() -> Result<()> {
let instance = get_superabi_instance().await;
let contract_methods = instance.methods();
let response = contract_methods.superabi_test().call().await?;
assert_eq!(41, response.value);
Ok(())
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/test/src/sdk-harness/test_projects/registers/mod.rs | test/src/sdk-harness/test_projects/registers/mod.rs | use fuel_vm::consts::VM_MAX_RAM;
use fuels::prelude::*;
abigen!(Contract(
name = "TestRegistersContract",
abi = "test_projects/registers/out/release/registers-abi.json",
));
// Compile contract, create node and deploy contract, returning TestRegistersContract contract instance
// TO DO :
// - Ability to return any type of Contract.
// - Return a result
async fn deploy_test_registers_instance() -> TestRegistersContract<Wallet> {
let wallet = launch_provider_and_get_wallet().await.unwrap();
let id = Contract::load_from(
"test_projects/registers/out/release/registers.bin",
LoadConfiguration::default(),
)
.unwrap()
.deploy(&wallet, TxPolicies::default())
.await
.unwrap()
.contract_id;
TestRegistersContract::new(id.clone(), wallet)
}
#[tokio::test]
async fn can_get_overflow() {
let instance = deploy_test_registers_instance().await;
let result = instance.methods().get_overflow().call().await.unwrap();
assert_eq!(result.value, 0);
}
#[tokio::test]
async fn can_get_program_counter() {
let instance = deploy_test_registers_instance().await;
let result = instance
.methods()
.get_program_counter()
.call()
.await
.unwrap();
assert!(is_within_range(result.value));
}
#[tokio::test]
async fn can_get_stack_start_ptr() {
let instance = deploy_test_registers_instance().await;
let result = instance
.methods()
.get_stack_start_ptr()
.call()
.await
.unwrap();
assert!(is_within_range(result.value));
}
#[tokio::test]
async fn can_get_stack_ptr() {
let instance = deploy_test_registers_instance().await;
let result = instance.methods().get_stack_ptr().call().await.unwrap();
assert!(is_within_range(result.value));
}
#[tokio::test]
async fn can_get_frame_ptr() {
let instance = deploy_test_registers_instance().await;
let result = instance.methods().get_frame_ptr().call().await.unwrap();
assert!(is_within_range(result.value));
}
#[tokio::test]
async fn can_get_heap_ptr() {
let instance = deploy_test_registers_instance().await;
let result = instance.methods().get_heap_ptr().call().await.unwrap();
assert!(is_within_range(result.value));
}
#[tokio::test]
async fn can_get_error() {
let instance = deploy_test_registers_instance().await;
let result = instance.methods().get_error().call().await.unwrap();
assert_eq!(result.value, 0);
}
#[tokio::test]
async fn can_get_global_gas() {
let instance = deploy_test_registers_instance().await;
let result = instance.methods().get_global_gas().call().await.unwrap();
assert_ne!(result.value, 0);
}
#[tokio::test]
async fn can_get_context_gas() {
let instance = deploy_test_registers_instance().await;
let result = instance.methods().get_context_gas().call().await.unwrap();
assert_ne!(result.value, 0);
}
#[tokio::test]
async fn can_get_balance() {
let instance = deploy_test_registers_instance().await;
let result = instance.methods().get_balance().call().await.unwrap();
assert_eq!(result.value, 0);
}
#[tokio::test]
async fn can_get_instrs_start() {
let instance = deploy_test_registers_instance().await;
let result = instance.methods().get_instrs_start().call().await.unwrap();
assert!(is_within_range(result.value));
}
#[tokio::test]
async fn can_get_return_value() {
let instance = deploy_test_registers_instance().await;
let result = instance.methods().get_return_value().call().await.unwrap();
assert_eq!(result.value, 0);
}
#[tokio::test]
async fn can_get_return_length() {
let instance = deploy_test_registers_instance().await;
let result = instance.methods().get_return_length().call().await.unwrap();
assert_eq!(result.value, 0);
}
#[tokio::test]
async fn can_get_flags() {
let instance = deploy_test_registers_instance().await;
let result = instance.methods().get_flags().call().await.unwrap();
assert_eq!(result.value, 0);
}
fn is_within_range(n: u64) -> bool {
n > 0 && n <= VM_MAX_RAM
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/test/src/sdk-harness/test_projects/result_option_expect/mod.rs | test/src/sdk-harness/test_projects/result_option_expect/mod.rs | use fuels::{accounts::wallet::Wallet, prelude::*};
abigen!(Contract(
name = "ExpectTestingContract",
abi = "test_projects/result_option_expect/out/release/result_option_expect-abi.json"
));
async fn setup() -> (ExpectTestingContract<Wallet>, ContractId) {
let wallet = launch_provider_and_get_wallet().await.unwrap();
let id = Contract::load_from(
"test_projects/result_option_expect/out/release/result_option_expect.bin",
LoadConfiguration::default(),
)
.unwrap()
.deploy(&wallet, TxPolicies::default())
.await
.unwrap()
.contract_id;
let instance = ExpectTestingContract::new(id.clone(), wallet);
(instance, id.into())
}
#[tokio::test]
async fn test_expect_option() {
let (instance, _id) = setup().await;
instance
.methods()
.option_test_should_not_revert()
.call()
.await
.unwrap();
}
#[tokio::test]
async fn test_expect_result() {
let (instance, _id) = setup().await;
instance
.methods()
.result_test_should_not_revert()
.call()
.await
.unwrap();
}
#[tokio::test]
#[should_panic]
async fn test_expect_option_panic() {
let (instance, _id) = setup().await;
instance
.methods()
.option_test_should_revert()
.call()
.await
.unwrap();
}
#[tokio::test]
#[should_panic]
async fn test_expect_result_panic() {
let (instance, _id) = setup().await;
instance
.methods()
.result_test_should_revert()
.call()
.await
.unwrap();
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/test/src/sdk-harness/test_projects/storage_string/mod.rs | test/src/sdk-harness/test_projects/storage_string/mod.rs | use fuels::prelude::*;
abigen!(Contract(
name = "TestStorageStringContract",
abi = "test_projects/storage_string/out/release/storage_string-abi.json",
));
async fn setup() -> TestStorageStringContract<Wallet> {
let mut node_config = NodeConfig::default();
node_config.starting_gas_price = 0;
let mut wallets = launch_custom_provider_and_get_wallets(
WalletsConfig::new(Some(1), None, None),
Some(node_config),
None,
)
.await
.unwrap();
let wallet = wallets.pop().unwrap();
let id = Contract::load_from(
"test_projects/storage_string/out/release/storage_string.bin",
LoadConfiguration::default(),
)
.unwrap()
.deploy(&wallet, TxPolicies::default())
.await
.unwrap()
.contract_id;
TestStorageStringContract::new(id, wallet)
}
#[tokio::test]
async fn stores_string() {
let instance = setup().await;
let input = "Fuel is blazingly fast!";
assert_eq!(
instance.methods().stored_len().call().await.unwrap().value,
0
);
instance
.methods()
.store_string(input.into())
.call()
.await
.unwrap();
assert_eq!(
instance.methods().stored_len().call().await.unwrap().value,
input.as_bytes().len() as u64
);
assert_eq!(
instance.methods().get_string().call().await.unwrap().value,
Bytes(input.as_bytes().to_vec())
);
}
#[tokio::test]
async fn stores_long_string() {
let instance = setup().await;
// 2060 bytes, max length of URI
let input = "Nam quis nulla. Integer malesuada. In in enim a arcu imperdiet malesuada. Sed vel lectus. Donec odio urna, tempus molestie, porttitor ut, iaculis quis, sem. Phasellus rhoncus. Aenean id metus id velit ullamcorper pulvinar. Vestibulum fermentum tortor id mi. Pellentesque ipsum. Nulla non arcu lacinia neque faucibus fringilla. Nulla non lectus sed nisl molestie malesuada. Proin in tellus sit amet nibh dignissim sagittis. Vivamus luctus egestas leo. Maecenas sollicitudin. Nullam rhoncus aliquam metus. Etiam egestas wisi a erat.
Lorem ipsum dolor sit amet, consectetuer adipiscing elit. Nullam feugiat, turpis at pulvinar vulputate, erat libero tristique tellus, nec bibendum odio risus sit amet ante. Aliquam erat volutpat. Nunc auctor. Mauris pretium quam et urna. Fusce nibh. Duis risus. Curabitur sagittis hendrerit ante. Aliquam erat volutpat. Vestibulum erat nulla, ullamcorper nec, rutrum non, nonummy ac, erat. Duis condimentum augue id magna semper rutrum. Nullam justo enim, consectetuer nec, ullamcorper ac, vestibulum in, elit. Proin pede metus, vulputate nec, fermentum fringilla, vehicula vitae, justo. Fusce consectetuer risus a nunc. Aliquam ornare wisi eu metus. Integer pellentesque quam vel velit. Duis pulvinar.
Lorem ipsum dolor sit amet, consectetuer adipiscing elit. Morbi gravida libero nec velit. Morbi scelerisque luctus velit. Etiam dui sem, fermentum vitae, sagittis id, malesuada in, quam. Proin mattis lacinia justo. Vestibulum facilisis auctor urna. Aliquam in lorem sit amet leo accumsan lacinia. Integer rutrum, orci vestibulum ullamcorper ultricies, lacus quam ultricies odio, vitae placerat pede sem sit amet enim. Phasellus et lorem id felis nonummy placerat. Fusce dui leo, imperdiet in, aliquam sit amet, feugiat eu, orci. Aenean vel massa quis mauris vehicula lacinia. Quisque tincidunt scelerisque libero. Maecenas libero. Etiam dictum tincidunt diam. Donec ipsum massa, ullamcorper in, auctor et, scelerisque sed, est. Suspendisse nisl. Sed convallis magna eu sem. Cras pede libero, dapibus nec, pretium";
assert_eq!(
instance.methods().stored_len().call().await.unwrap().value,
0
);
let tx_policies = TxPolicies::default().with_script_gas_limit(12_000_000);
instance
.methods()
.store_string(input.into())
.with_tx_policies(tx_policies)
.call()
.await
.unwrap();
assert_eq!(
instance.methods().stored_len().call().await.unwrap().value,
input.as_bytes().len() as u64
);
assert_eq!(
instance.methods().get_string().call().await.unwrap().value,
Bytes(input.as_bytes().to_vec())
);
}
#[tokio::test]
async fn stores_string_twice() {
let instance = setup().await;
let input1 = "Fuel is the fastest modular execution layer";
let input2 = "Fuel is blazingly fast!";
instance
.methods()
.store_string(input1.into())
.call()
.await
.unwrap();
assert_eq!(
instance.methods().stored_len().call().await.unwrap().value,
input1.as_bytes().len() as u64
);
assert_eq!(
instance.methods().get_string().call().await.unwrap().value,
Bytes(input1.as_bytes().to_vec())
);
instance
.methods()
.store_string(input2.into())
.call()
.await
.unwrap();
assert_eq!(
instance.methods().stored_len().call().await.unwrap().value,
input2.as_bytes().len() as u64
);
assert_eq!(
instance.methods().get_string().call().await.unwrap().value,
Bytes(input2.as_bytes().to_vec())
);
}
#[tokio::test]
async fn clears_bytes() {
let instance = setup().await;
let input = "Fuel is blazingly fast!";
instance
.methods()
.store_string(input.into())
.call()
.await
.unwrap();
assert_eq!(
instance.methods().stored_len().call().await.unwrap().value,
input.as_bytes().len() as u64
);
assert!(
instance
.methods()
.clear_string()
.call()
.await
.unwrap()
.value
);
assert_eq!(
instance.methods().stored_len().call().await.unwrap().value,
0
);
}
#[tokio::test]
async fn get_string_length() {
let instance = setup().await;
let input = "Fuel is blazingly fast!";
assert_eq!(
instance.methods().stored_len().call().await.unwrap().value,
0
);
instance
.methods()
.store_string(input.into())
.call()
.await
.unwrap();
assert_eq!(
instance.methods().stored_len().call().await.unwrap().value,
input.as_bytes().len() as u64
);
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/test/src/sdk-harness/test_projects/superabi_supertrait/mod.rs | test/src/sdk-harness/test_projects/superabi_supertrait/mod.rs | use fuels::prelude::*;
abigen!(Contract(
name = "SuperAbiSuperTraitTestContract",
abi = "test_projects/superabi_supertrait/out/release/superabi_supertrait-abi.json"
));
async fn get_superabi_supertrait_instance() -> SuperAbiSuperTraitTestContract<Wallet> {
let wallet = launch_provider_and_get_wallet().await.unwrap();
let id = Contract::load_from(
"test_projects/superabi_supertrait/out/release/superabi_supertrait.bin",
LoadConfiguration::default(),
)
.unwrap()
.deploy(&wallet, TxPolicies::default())
.await
.unwrap()
.contract_id;
SuperAbiSuperTraitTestContract::new(id.clone(), wallet)
}
#[tokio::test]
async fn method1_test() -> Result<()> {
let instance = get_superabi_supertrait_instance().await;
let contract_methods = instance.methods();
let response = contract_methods.method1().call().await?;
assert_eq!(42, response.value);
Ok(())
}
// contract supertrait methods are not callable externally
// #[tokio::test]
// async fn method_test() -> Result<()> {
// let instance = get_superabi_supertrait_instance().await;
// let contract_methods = instance.methods();
// let response = contract_methods.method().call().await?;
// assert_eq!(0xBAD, response.value);
// Ok(())
// }
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/test/src/sdk-harness/test_projects/ec_recover/mod.rs | test/src/sdk-harness/test_projects/ec_recover/mod.rs | use fuel_vm::{
fuel_crypto::{Message, PublicKey, SecretKey, Signature},
fuel_tx::Bytes64,
fuel_types::Bytes32,
};
use fuels::{accounts::signers::private_key::PrivateKeySigner, prelude::*, types::Bits256};
use rand::{rngs::StdRng, Rng, SeedableRng};
abigen!(Contract(
name = "EcRecoverContract",
abi = "test_projects/ec_recover/out/release/ec_recover-abi.json"
));
async fn setup_env() -> Result<(
EcRecoverContract<Wallet>,
SecretKey,
PublicKey,
Wallet,
Message,
Bytes64,
)> {
let mut rng = StdRng::seed_from_u64(1000);
let msg_bytes: Bytes32 = rng.r#gen();
let private_key = SecretKey::random(&mut rng);
let public_key = PublicKey::from(&private_key);
let msg = Message::from_bytes(*msg_bytes);
let sig = Signature::sign(&private_key, &msg);
let sig_bytes: Bytes64 = Bytes64::from(sig);
let signer = PrivateKeySigner::new(private_key);
let num_assets = 1;
let coins_per_asset = 10;
let amount_per_coin = 15;
let (coins, _asset_ids) = setup_multiple_assets_coins(
signer.address(),
num_assets,
coins_per_asset,
amount_per_coin,
);
let provider = setup_test_provider(coins.clone(), vec![], None, None)
.await
.unwrap();
let wallet = Wallet::new(signer, provider);
let contract_id = Contract::load_from(
"test_projects/ec_recover/out/release/ec_recover.bin",
LoadConfiguration::default(),
)
.unwrap()
.deploy(&wallet, TxPolicies::default())
.await
.unwrap()
.contract_id;
let contract_instance = EcRecoverContract::new(contract_id, wallet.clone());
Ok((
contract_instance,
private_key,
public_key,
wallet,
msg,
sig_bytes,
))
}
#[tokio::test]
async fn can_recover_public_key() {
let (contract, _secret, public_key, _wallet, msg, sig_bytes) = setup_env().await.unwrap();
let sig_r = &sig_bytes[..32];
let sig_v_s = &sig_bytes[32..];
let response = contract
.methods()
.recover_pub_key(
Bits256(sig_r.try_into().unwrap()),
Bits256(sig_v_s.try_into().unwrap()),
Bits256(msg.into()),
)
.call()
.await
.unwrap();
let first = response.value.0;
let second = response.value.1;
let arrays: [[u8; 32]; 2] = [first.0, second.0];
let joined: Vec<u8> = arrays.into_iter().flat_map(|s| s.into_iter()).collect();
let joined_array: [u8; 64] = joined.try_into().unwrap();
let pubkey = Bytes64::new(joined_array);
assert_eq!(pubkey, Bytes64::new(*public_key));
}
#[tokio::test]
async fn can_recover_address() {
let (contract, _secret, _public_key, wallet, msg, sig_bytes) = setup_env().await.unwrap();
let sig_r = &sig_bytes[..32];
let sig_v_s = &sig_bytes[32..];
let response = contract
.methods()
.recover_address(
Bits256(sig_r.try_into().unwrap()),
Bits256(sig_v_s.try_into().unwrap()),
Bits256(*msg),
)
.call()
.await
.unwrap();
assert_eq!(Address::from(response.value), wallet.address());
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/test/src/sdk-harness/test_projects/storage_namespace/mod.rs | test/src/sdk-harness/test_projects/storage_namespace/mod.rs | use fuels::{
prelude::*,
types::{Bits256, SizedAsciiString},
};
abigen!(Contract(
name = "TestStorageContract",
abi = "test_projects/storage_namespace/out/release/storage-namespace-abi.json",
));
async fn get_test_storage_instance() -> TestStorageContract<Wallet> {
let wallet = launch_provider_and_get_wallet().await.unwrap();
let id = Contract::load_from(
"test_projects/storage_namespace/out/release/storage_namespace.bin",
LoadConfiguration::default(),
)
.unwrap()
.deploy(&wallet, TxPolicies::default())
.await
.unwrap();
TestStorageContract::new(id.clone(), wallet)
}
#[tokio::test]
async fn can_store_and_get_bool() {
let instance = get_test_storage_instance().await;
let b = true;
// Test store
instance.methods().store_bool(b).call().await.unwrap();
let result = instance.methods().get_bool().call().await.unwrap();
assert_eq!(result.value, Some(b));
}
#[tokio::test]
async fn can_store_and_get_u8() {
let instance = get_test_storage_instance().await;
let n = 8;
// Test store
instance.methods().store_u8(n).call().await.unwrap();
let result = instance.methods().get_u8().call().await.unwrap();
assert_eq!(result.value, Some(n));
}
#[tokio::test]
async fn can_store_and_get_u16() {
let instance = get_test_storage_instance().await;
let n = 16;
// Test store
instance.methods().store_u16(n).call().await.unwrap();
let result = instance.methods().get_u16().call().await.unwrap();
assert_eq!(result.value, Some(n));
}
#[tokio::test]
async fn can_store_and_get_u32() {
let instance = get_test_storage_instance().await;
let n = 32;
// Test store
instance.methods().store_u32(n).call().await.unwrap();
let result = instance.methods().get_u32().call().await.unwrap();
assert_eq!(result.value, Some(n));
}
#[tokio::test]
async fn can_store_and_get_u64() {
let instance = get_test_storage_instance().await;
let n = 64;
// Test store
instance.methods().store_u64(n).call().await.unwrap();
let result = instance.methods().get_u64().call().await.unwrap();
assert_eq!(result.value, Some(n));
}
#[tokio::test]
async fn can_store_b256() {
let instance = get_test_storage_instance().await;
let n: Bits256 = Bits256([2; 32]);
// Test store
instance.methods().store_b256(n).call().await.unwrap();
let result = instance.methods().get_b256().call().await.unwrap();
assert_eq!(result.value, Some(n));
}
#[tokio::test]
async fn can_store_small_struct() {
let instance = get_test_storage_instance().await;
let s = SmallStruct { x: 42 };
// Test store
instance
.methods()
.store_small_struct(s.clone())
.call()
.await
.unwrap();
let result = instance.methods().get_small_struct().call().await.unwrap();
assert_eq!(result.value, Some(s));
}
#[tokio::test]
async fn can_store_medium_struct() {
let instance = get_test_storage_instance().await;
let s = MediumStruct { x: 42, y: 66 };
// Test store
instance
.methods()
.store_medium_struct(s.clone())
.call()
.await
.unwrap();
let result = instance.methods().get_medium_struct().call().await.unwrap();
assert_eq!(result.value, Some(s));
}
#[tokio::test]
async fn can_store_large_struct() {
let instance = get_test_storage_instance().await;
let s = LargeStruct {
x: 13,
y: Bits256([6; 32]),
z: 77,
};
// Test store
instance
.methods()
.store_large_struct(s.clone())
.call()
.await
.unwrap();
let result = instance.methods().get_large_struct().call().await.unwrap();
assert_eq!(result.value, Some(s));
}
#[tokio::test]
async fn can_store_very_large_struct() {
let instance = get_test_storage_instance().await;
let s = VeryLargeStruct {
x: 42,
y: Bits256([9; 32]),
z: Bits256([7; 32]),
};
instance
.methods()
.store_very_large_struct(s.clone())
.call()
.await
.unwrap();
let result = instance
.methods()
.get_very_large_struct()
.call()
.await
.unwrap();
assert_eq!(result.value, Some(s));
}
#[tokio::test]
async fn can_store_enum() {
let instance = get_test_storage_instance().await;
let e1 = StorageEnum::V1(Bits256([3; 32]));
// Test store
instance
.methods()
.store_enum(e1.clone())
.call()
.await
.unwrap();
let result = instance.methods().get_enum().call().await.unwrap();
assert_eq!(result.value, Some(e1));
let e2 = StorageEnum::V2(99);
instance
.methods()
.store_enum(e2.clone())
.call()
.await
.unwrap();
let result = instance.methods().get_enum().call().await.unwrap();
assert_eq!(result.value, Some(e2));
let e3 = StorageEnum::V3(Bits256([4; 32]));
instance
.methods()
.store_enum(e3.clone())
.call()
.await
.unwrap();
let result = instance.methods().get_enum().call().await.unwrap();
assert_eq!(result.value, Some(e3));
}
#[tokio::test]
async fn can_store_tuple() {
let instance = get_test_storage_instance().await;
let t = (Bits256([7; 32]), 8, Bits256([6; 32]));
// Test store
instance.methods().store_tuple(t).call().await.unwrap();
let result = instance.methods().get_tuple().call().await.unwrap();
assert_eq!(result.value, Some(t));
}
#[tokio::test]
async fn can_store_string() {
let instance = get_test_storage_instance().await;
let s = "fastest_modular_execution_layer".to_string();
// Test store
instance
.methods()
.store_string(SizedAsciiString::try_from(s.clone()).unwrap())
.call()
.await
.unwrap();
let result = instance.methods().get_string().call().await.unwrap();
assert_eq!(result.value, Some(SizedAsciiString::try_from(s).unwrap()));
}
#[tokio::test]
async fn can_store_array() {
let instance = get_test_storage_instance().await;
let a = [Bits256([153; 32]), Bits256([136; 32]), Bits256([119; 32])];
// Test store
instance.methods().store_array().call().await.unwrap();
let result = instance.methods().get_array().call().await.unwrap();
assert_eq!(result.value, Some(a));
}
#[tokio::test]
async fn can_store_non_inlined() {
let instance = get_test_storage_instance().await;
let result = instance.methods().storage_in_call().call().await.unwrap();
assert_eq!(result.value, 333);
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/test/src/sdk-harness/test_projects/storage_bytes/mod.rs | test/src/sdk-harness/test_projects/storage_bytes/mod.rs | use fuels::prelude::*;
abigen!(Contract(
name = "TestStorageBytesContract",
abi = "test_projects/storage_bytes/out/release/storage_bytes-abi.json",
));
async fn setup() -> TestStorageBytesContract<Wallet> {
let wallet = launch_provider_and_get_wallet().await.unwrap();
let id = Contract::load_from(
"test_projects/storage_bytes/out/release/storage_bytes.bin",
LoadConfiguration::default(),
)
.unwrap()
.deploy(&wallet, TxPolicies::default())
.await
.unwrap()
.contract_id;
TestStorageBytesContract::new(id, wallet)
}
#[tokio::test]
async fn stores_byte() {
let instance = setup().await;
let input = vec![1u8];
assert_eq!(instance.methods().len().call().await.unwrap().value, 0);
instance
.methods()
.store_bytes(input.clone())
.call()
.await
.unwrap();
assert_eq!(
instance.methods().len().call().await.unwrap().value,
input.len() as u64
);
instance
.methods()
.assert_stored_bytes(input)
.call()
.await
.unwrap();
}
#[tokio::test]
async fn stores_8_bytes() {
let instance = setup().await;
let input = vec![1u8, 2u8, 3u8, 4u8, 5u8, 6u8, 7u8, 8u8];
assert_eq!(instance.methods().len().call().await.unwrap().value, 0);
instance
.methods()
.store_bytes(input.clone())
.call()
.await
.unwrap();
assert_eq!(
instance.methods().len().call().await.unwrap().value,
input.len() as u64
);
instance
.methods()
.assert_stored_bytes(input)
.call()
.await
.unwrap();
}
#[tokio::test]
async fn stores_32_bytes() {
let instance = setup().await;
let input = vec![
1u8, 2u8, 3u8, 4u8, 5u8, 6u8, 7u8, 8u8, 1u8, 2u8, 3u8, 4u8, 5u8, 6u8, 7u8, 8u8, 1u8, 2u8,
3u8, 4u8, 5u8, 6u8, 7u8, 8u8, 1u8, 2u8, 3u8, 4u8, 5u8, 6u8, 7u8, 8u8,
];
assert_eq!(instance.methods().len().call().await.unwrap().value, 0);
instance
.methods()
.store_bytes(input.clone())
.call()
.await
.unwrap();
assert_eq!(
instance.methods().len().call().await.unwrap().value,
input.len() as u64
);
instance
.methods()
.assert_stored_bytes(input)
.call()
.await
.unwrap();
}
#[tokio::test]
async fn stores_string_as_bytes() {
let instance = setup().await;
let input = String::from("Fuel is blazingly fast!");
assert_eq!(instance.methods().len().call().await.unwrap().value, 0);
instance
.methods()
.store_bytes(input.clone().as_bytes().into())
.call()
.await
.unwrap();
assert_eq!(
instance.methods().len().call().await.unwrap().value,
input.clone().as_bytes().len() as u64
);
instance
.methods()
.assert_stored_bytes(input.as_bytes().into())
.call()
.await
.unwrap();
}
#[tokio::test]
async fn stores_long_string_as_bytes() {
let instance = setup().await;
// 2060 bytes
let input = String::from("Nam quis nulla. Integer malesuada. In in enim a arcu imperdiet malesuada. Sed vel lectus. Donec odio urna, tempus molestie, porttitor ut, iaculis quis, sem. Phasellus rhoncus. Aenean id metus id velit ullamcorper pulvinar. Vestibulum fermentum tortor id mi. Pellentesque ipsum. Nulla non arcu lacinia neque faucibus fringilla. Nulla non lectus sed nisl molestie malesuada. Proin in tellus sit amet nibh dignissim sagittis. Vivamus luctus egestas leo. Maecenas sollicitudin. Nullam rhoncus aliquam metus. Etiam egestas wisi a erat.
Lorem ipsum dolor sit amet, consectetuer adipiscing elit. Nullam feugiat, turpis at pulvinar vulputate, erat libero tristique tellus, nec bibendum odio risus sit amet ante. Aliquam erat volutpat. Nunc auctor. Mauris pretium quam et urna. Fusce nibh. Duis risus. Curabitur sagittis hendrerit ante. Aliquam erat volutpat. Vestibulum erat nulla, ullamcorper nec, rutrum non, nonummy ac, erat. Duis condimentum augue id magna semper rutrum. Nullam justo enim, consectetuer nec, ullamcorper ac, vestibulum in, elit. Proin pede metus, vulputate nec, fermentum fringilla, vehicula vitae, justo. Fusce consectetuer risus a nunc. Aliquam ornare wisi eu metus. Integer pellentesque quam vel velit. Duis pulvinar.
Lorem ipsum dolor sit amet, consectetuer adipiscing elit. Morbi gravida libero nec velit. Morbi scelerisque luctus velit. Etiam dui sem, fermentum vitae, sagittis id, malesuada in, quam. Proin mattis lacinia justo. Vestibulum facilisis auctor urna. Aliquam in lorem sit amet leo accumsan lacinia. Integer rutrum, orci vestibulum ullamcorper ultricies, lacus quam ultricies odio, vitae placerat pede sem sit amet enim. Phasellus et lorem id felis nonummy placerat. Fusce dui leo, imperdiet in, aliquam sit amet, feugiat eu, orci. Aenean vel massa quis mauris vehicula lacinia. Quisque tincidunt scelerisque libero. Maecenas libero. Etiam dictum tincidunt diam. Donec ipsum massa, ullamcorper in, auctor et, scelerisque sed, est. Suspendisse nisl. Sed convallis magna eu sem. Cras pede libero, dapibus nec, pretium");
assert_eq!(instance.methods().len().call().await.unwrap().value, 0);
instance
.methods()
.store_bytes(input.clone().as_bytes().into())
.call()
.await
.unwrap();
assert_eq!(
instance.methods().len().call().await.unwrap().value,
input.clone().as_bytes().len() as u64
);
instance
.methods()
.assert_stored_bytes(input.as_bytes().into())
.call()
.await
.unwrap();
}
#[tokio::test]
async fn stores_string_twice() {
let instance = setup().await;
let input1 = String::from("Fuel is the fastest modular execution layer");
let input2 = String::from("Fuel is blazingly fast!");
instance
.methods()
.store_bytes(input1.clone().as_bytes().into())
.call()
.await
.unwrap();
assert_eq!(
instance.methods().len().call().await.unwrap().value,
input1.clone().as_bytes().len() as u64
);
instance
.methods()
.assert_stored_bytes(input1.as_bytes().into())
.call()
.await
.unwrap();
instance
.methods()
.store_bytes(input2.clone().as_bytes().into())
.call()
.await
.unwrap();
assert_eq!(
instance.methods().len().call().await.unwrap().value,
input2.clone().as_bytes().len() as u64
);
instance
.methods()
.assert_stored_bytes(input2.as_bytes().into())
.call()
.await
.unwrap();
}
#[tokio::test]
async fn clears_bytes() {
let instance = setup().await;
let input = String::from("Fuel is blazingly fast!");
instance
.methods()
.store_bytes(input.clone().as_bytes().into())
.call()
.await
.unwrap();
assert_eq!(
instance.methods().len().call().await.unwrap().value,
input.as_bytes().len() as u64
);
assert!(
instance
.methods()
.clear_stored_bytes()
.call()
.await
.unwrap()
.value
);
assert_eq!(instance.methods().len().call().await.unwrap().value, 0);
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/test/src/sdk-harness/test_projects/generics_in_abi/mod.rs | test/src/sdk-harness/test_projects/generics_in_abi/mod.rs | use fuels::{prelude::*, types::Bits256};
abigen!(Contract(
name = "GenericsInAbiTestContract",
abi = "test_projects/generics_in_abi/out/release/generics_in_abi-abi.json"
));
async fn get_generics_in_abi_instance() -> (GenericsInAbiTestContract<Wallet>, ContractId) {
let wallet = launch_provider_and_get_wallet().await.unwrap();
let id = Contract::load_from(
"test_projects/generics_in_abi/out/release/generics_in_abi.bin",
LoadConfiguration::default(),
)
.unwrap()
.deploy(&wallet, TxPolicies::default())
.await
.unwrap()
.contract_id;
let instance = GenericsInAbiTestContract::new(id.clone(), wallet);
(instance, id.into())
}
#[tokio::test]
async fn generics_bool() -> Result<()> {
let (instance, _id) = get_generics_in_abi_instance().await;
let contract_methods = instance.methods();
{
// simple struct with a single generic param
let arg1 = SimpleGeneric {
single_generic_param: 123u64,
};
let result = contract_methods
.struct_w_generic(arg1.clone())
.call()
.await?
.value;
assert_eq!(result, arg1);
}
{
// struct that delegates the generic param internally
let arg1 = PassTheGenericOn {
one: SimpleGeneric {
single_generic_param: "abc".try_into()?,
},
};
let result = contract_methods
.struct_delegating_generic(arg1.clone())
.call()
.await?
.value;
assert_eq!(result, arg1);
}
{
// struct that has the generic in an array
let arg1 = StructWArrayGeneric { a: [1u32, 2u32] };
let result = contract_methods
.struct_w_generic_in_array(arg1.clone())
.call()
.await?
.value;
assert_eq!(result, arg1);
}
{
// struct that has the generic in a tuple
let arg1 = StructWTupleGeneric { a: (1, 2) };
let result = contract_methods
.struct_w_generic_in_tuple(arg1.clone())
.call()
.await?
.value;
assert_eq!(result, arg1);
}
{
// struct that has the generic in a tuple
let arg1 = StructWDiffTupleGeneric { a: (1, false) };
let result = contract_methods
.struct_w_diff_generic_in_tuple(arg1.clone())
.call()
.await?
.value;
assert_eq!(result, arg1);
}
{
// struct with generic in variant
let arg1 = EnumWGeneric::b(10);
let result = contract_methods
.enum_w_generic(arg1.clone())
.call()
.await?
.value;
assert_eq!(result, arg1);
}
{
// complex case
let pass_through = PassTheGenericOn {
one: SimpleGeneric {
single_generic_param: "ab".try_into()?,
},
};
let w_arr_generic = StructWArrayGeneric {
a: [pass_through.clone(), pass_through],
};
let arg1 = MegaExample {
a: ([Bits256([0; 32]), Bits256([0; 32])], "ab".try_into()?),
b: vec![(
[EnumWGeneric::b(StructWTupleGeneric {
a: (w_arr_generic.clone(), w_arr_generic),
})],
10u32,
)],
};
contract_methods.complex_test(arg1.clone()).call().await?;
}
Ok(())
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/test/src/sdk-harness/test_projects/ec_recover_and_match_predicate/mod.rs | test/src/sdk-harness/test_projects/ec_recover_and_match_predicate/mod.rs | use fuels::{
accounts::{predicate::Predicate, signers::private_key::PrivateKeySigner},
crypto::Message,
prelude::*,
types::B512,
};
abigen!(
Predicate(
name = "TestPredicate",
abi = "test_projects/ec_recover_and_match_predicate/out/release/ec_recover_and_match_predicate-abi.json"
)
);
#[tokio::test]
async fn ec_recover_and_match_predicate_test() -> Result<()> {
use fuel_vm::fuel_crypto::SecretKey;
let secret_key1: SecretKey =
"0x862512a2363db2b3a375c0d4bbbd27172180d89f23f2e259bac850ab02619301"
.parse()
.unwrap();
let secret_key2: SecretKey =
"0x37fa81c84ccd547c30c176b118d5cb892bdb113e8e80141f266519422ef9eefd"
.parse()
.unwrap();
let secret_key3: SecretKey =
"0x976e5c3fa620092c718d852ca703b6da9e3075b9f2ecb8ed42d9f746bf26aafb"
.parse()
.unwrap();
let signer_1 = PrivateKeySigner::new(secret_key1);
let signer_2 = PrivateKeySigner::new(secret_key2);
let signer_3 = PrivateKeySigner::new(secret_key3);
let all_coins = [signer_1.address(), signer_2.address(), signer_3.address()]
.iter()
.flat_map(|wallet| setup_single_asset_coins(*wallet, AssetId::default(), 10, 1_000_000))
.collect::<Vec<_>>();
let mut node_config = NodeConfig::default();
node_config.starting_gas_price = 0;
let provider = setup_test_provider(all_coins, vec![], Some(node_config), None)
.await
.unwrap();
let wallet_1 = Wallet::new(signer_1, provider.clone());
let wallet_2 = Wallet::new(signer_2, provider.clone());
let wallet_3 = Wallet::new(signer_3, provider.clone());
let random_secret_key = SecretKey::random(&mut rand::thread_rng());
let receiver = Wallet::new(PrivateKeySigner::new(random_secret_key), provider.clone());
let data_to_sign = Message::new([0; 32]);
let signature1: B512 = wallet_1
.signer()
.sign(data_to_sign)
.await?
.as_ref()
.try_into()?;
let signature2: B512 = wallet_2
.signer()
.sign(data_to_sign)
.await?
.as_ref()
.try_into()?;
let signature3: B512 = wallet_3
.signer()
.sign(data_to_sign)
.await?
.as_ref()
.try_into()?;
let signatures = [signature1, signature2, signature3];
let predicate_data = TestPredicateEncoder::default().encode_data(signatures)?;
let code_path =
"test_projects/ec_recover_and_match_predicate/out/release/ec_recover_and_match_predicate.bin";
let predicate = Predicate::load_from(code_path)?
.with_data(predicate_data)
.with_provider(provider.clone());
let amount_to_predicate = 1000;
let asset_id = AssetId::default();
wallet_1
.transfer(
predicate.address(),
amount_to_predicate,
asset_id,
TxPolicies::default(),
)
.await?;
let predicate_balance = provider
.get_asset_balance(&predicate.address(), &asset_id)
.await?;
assert_eq!(predicate_balance, amount_to_predicate as u128);
predicate
.transfer(
receiver.address(),
amount_to_predicate,
asset_id,
TxPolicies::default(),
)
.await?;
let receiver_balance_after = receiver.get_asset_balance(&asset_id).await?;
assert_eq!(amount_to_predicate as u128, receiver_balance_after);
let predicate_balance = provider
.get_asset_balance(&predicate.address(), &asset_id)
.await?;
assert_eq!(predicate_balance, 0);
Ok(())
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/test/src/sdk-harness/test_projects/evm_ec_recover/mod.rs | test/src/sdk-harness/test_projects/evm_ec_recover/mod.rs | use fuel_vm::{
fuel_crypto::{Message, PublicKey, SecretKey, Signature},
fuel_types::Bytes64,
};
use fuels::{
accounts::signers::private_key::PrivateKeySigner,
prelude::*,
types::{Bits256, Bytes32, EvmAddress},
};
use rand::{rngs::StdRng, Rng, SeedableRng};
use sha3::{Digest, Keccak256};
abigen!(Contract(
name = "EvmEcRecoverContract",
abi = "test_projects/evm_ec_recover/out/release/evm_ec_recover-abi.json"
));
fn keccak_hash<B>(data: B) -> Bytes32
where
B: AsRef<[u8]>,
{
let mut hasher = Keccak256::new();
hasher.update(data);
<[u8; Bytes32::LEN]>::from(hasher.finalize()).into()
}
fn clear_12_bytes(bytes: [u8; 32]) -> [u8; 32] {
let mut bytes = bytes;
bytes[..12].copy_from_slice(&[0u8; 12]);
bytes
}
async fn setup_env() -> Result<(
EvmEcRecoverContract<Wallet>,
PublicKey,
Message,
Bytes64,
[u8; 32],
)> {
let mut rng = StdRng::seed_from_u64(1000);
let msg_bytes: Bytes32 = rng.r#gen();
let private_key = SecretKey::random(&mut rng);
let public_key = PublicKey::from(&private_key);
let signer = PrivateKeySigner::new(private_key);
// generate an "evm address" from the public key
let pub_key_hash = keccak_hash(*public_key);
let evm_address = clear_12_bytes(*pub_key_hash);
let msg = Message::from_bytes(*msg_bytes);
let sig = Signature::sign(&private_key, &msg);
let sig_bytes: Bytes64 = Bytes64::from(sig);
let num_assets = 1;
let coins_per_asset = 10;
let amount_per_coin = 15;
let (coins, _asset_ids) = setup_multiple_assets_coins(
signer.address(),
num_assets,
coins_per_asset,
amount_per_coin,
);
let provider = setup_test_provider(coins.clone(), vec![], None, None)
.await
.unwrap();
let wallet = Wallet::new(signer, provider);
let contract_id = Contract::load_from(
"test_projects/evm_ec_recover/out/release/evm_ec_recover.bin",
LoadConfiguration::default(),
)
.unwrap()
.deploy(&wallet, TxPolicies::default())
.await
.unwrap()
.contract_id;
let contract_instance = EvmEcRecoverContract::new(contract_id, wallet.clone());
Ok((contract_instance, public_key, msg, sig_bytes, evm_address))
}
#[tokio::test]
async fn can_recover_public_key() {
let (contract, public_key, msg, sig_bytes, _) = setup_env().await.unwrap();
let sig_r = &sig_bytes[..32];
let sig_v_s = &sig_bytes[32..];
let response = contract
.methods()
.recover_pub_key(
Bits256(sig_r.try_into().unwrap()),
Bits256(sig_v_s.try_into().unwrap()),
Bits256(msg.into()),
)
.call()
.await
.unwrap();
let first = response.value.0;
let second = response.value.1;
let arrays: [[u8; 32]; 2] = [first.0, second.0];
let joined: Vec<u8> = arrays.into_iter().flat_map(|s| s.into_iter()).collect();
let joined_array: [u8; 64] = joined.try_into().unwrap();
let pubkey = Bytes64::new(joined_array);
assert_eq!(pubkey, Bytes64::new(*public_key));
}
#[tokio::test]
async fn can_recover_evm_address() {
let (contract, _, msg, sig_bytes, evm_address) = setup_env().await.unwrap();
let sig_r = &sig_bytes[..32];
let sig_v_s = &sig_bytes[32..];
let response = contract
.methods()
.recover_evm_address(
Bits256(sig_r.try_into().unwrap()),
Bits256(sig_v_s.try_into().unwrap()),
Bits256(*msg),
)
.call()
.await
.unwrap();
assert_eq!(response.value, EvmAddress::from(Bits256(evm_address)));
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/test/src/sdk-harness/test_projects/storage_vec_of_storage_string/mod.rs | test/src/sdk-harness/test_projects/storage_vec_of_storage_string/mod.rs | use fuels::prelude::*;
abigen!(Contract(
name = "TestStorageVecOfStorageStringContract",
abi = "test_projects/storage_vec_of_storage_string/out/release/storage_vec_of_storage_string-abi.json",
));
async fn test_storage_vec_of_storage_string_instance(
) -> TestStorageVecOfStorageStringContract<Wallet> {
let wallet = launch_provider_and_get_wallet().await.unwrap();
let id = Contract::load_from(
"test_projects/storage_vec_of_storage_string/out/release/storage_vec_of_storage_string.bin",
LoadConfiguration::default(),
)
.unwrap()
.deploy(&wallet, TxPolicies::default())
.await
.unwrap()
.contract_id;
TestStorageVecOfStorageStringContract::new(id.clone(), wallet)
}
// This test proves that https://github.com/FuelLabs/sway/issues/6036 is fixed.
#[tokio::test]
async fn test_push_and_get() {
let instance = test_storage_vec_of_storage_string_instance().await;
const NUM_OF_STRINGS: u64 = 10; // Keep it larger then 8, to stress the internal implementation that does % 8.
let strings = (0..NUM_OF_STRINGS)
.map(|i| i.to_string())
.collect::<Vec<_>>();
for string in &strings {
let _ = instance.methods().push(string.to_owned()).call().await;
}
let returned_count = instance.methods().count().call().await.unwrap().value;
assert_eq!(returned_count, NUM_OF_STRINGS);
let mut returned_strings = vec![];
for i in 0..NUM_OF_STRINGS {
let returned_string = instance.methods().get(i).call().await.unwrap().value;
returned_strings.push(returned_string);
}
assert_eq!(returned_strings, strings);
}
// TODO: Uncomment this test once https://github.com/FuelLabs/sway/issues/6040 is fixed.
// #[tokio::test]
// async fn test_push_and_insert() {
// let instance = test_storage_vec_of_storage_string_instance().await;
// const NUM_OF_STRINGS: u64 = 10; // Keep it larger then 8, to stress the internal implementation that does % 8.
// let mut strings = (0..NUM_OF_STRINGS).map(|i| i.to_string()).collect::<Vec<_>>();
// for string in &strings {
// let _ = instance
// .methods()
// .insert(string.to_owned())
// .call()
// .await;
// }
// let returned_count = instance
// .methods()
// .count()
// .call()
// .await
// .unwrap()
// .value;
// assert_eq!(returned_count, NUM_OF_STRINGS);
// let mut returned_strings = vec![];
// for i in 0..NUM_OF_STRINGS {
// let returned_string = instance
// .methods()
// .get(i)
// .call()
// .await
// .unwrap()
// .value;
// returned_strings.push(returned_string);
// }
// strings.reverse();
// assert_eq!(returned_strings, strings);
// }
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/test/src/sdk-harness/test_projects/private_struct_fields_in_storage_and_abi/mod.rs | test/src/sdk-harness/test_projects/private_struct_fields_in_storage_and_abi/mod.rs | use fuels::prelude::*;
abigen!(Contract(
name = "TestPrivateStructFieldsInStorageAndAbi",
abi = "test_projects/private_struct_fields_in_storage_and_abi/out/release/private_struct_fields_in_storage_and_abi-abi.json",
));
async fn test_storage_private_struct_fields_instance(
) -> TestPrivateStructFieldsInStorageAndAbi<Wallet> {
let wallet = launch_provider_and_get_wallet().await.unwrap();
let id = Contract::load_from(
"test_projects/private_struct_fields_in_storage_and_abi/out/release/private_struct_fields_in_storage_and_abi.bin",
LoadConfiguration::default(),
)
.unwrap()
.deploy(&wallet, TxPolicies::default())
.await
.unwrap()
.contract_id;
TestPrivateStructFieldsInStorageAndAbi::new(id.clone(), wallet)
}
#[tokio::test]
async fn read_initial_can_init_via_storage() {
let methods = test_storage_private_struct_fields_instance()
.await
.methods();
assert_eq!(
methods
.read_initial_can_init_via_storage()
.call()
.await
.unwrap()
.value,
CanInitStruct { x: 11, y: 12 }
);
}
#[tokio::test]
async fn write_and_read_can_init_via_storage() {
let methods = test_storage_private_struct_fields_instance()
.await
.methods();
let input = CanInitStruct { x: 1111, y: 2222 };
assert_eq!(
methods
.write_and_read_can_init_via_storage(input.clone())
.call()
.await
.unwrap()
.value,
input
);
}
#[tokio::test]
async fn write_and_read_cannot_init_via_api() {
let methods = test_storage_private_struct_fields_instance()
.await
.methods();
let input = CannotInitStruct { x: 1111, y: 2222 };
assert_eq!(
methods
.write_and_read_cannot_init_via_api(input.clone())
.call()
.await
.unwrap()
.value,
input
);
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/test/src/sdk-harness/test_projects/low_level_call/mod.rs | test/src/sdk-harness/test_projects/low_level_call/mod.rs | use fuel_vm::fuel_tx::{
output::contract::Contract as OutputContract, Bytes32, ContractId, Output, TxPointer, UtxoId,
};
use fuels::{
core::codec::*,
prelude::*,
types::{input::Input, Bits256, SizedAsciiString},
};
macro_rules! fn_selector {
( $fn_name: ident ( $($fn_arg: ty),* ) ) => {
encode_fn_selector(stringify!($fn_name)).to_vec()
};
}
macro_rules! calldata {
( $($arg: expr),* ) => {
ABIEncoder::new(EncoderConfig::default()).encode(&[$(::fuels::core::traits::Tokenizable::into_token($arg)),*]).unwrap()
}
}
// Load abi from json
abigen!(
Contract(
name = "TestContract",
abi =
"test_artifacts/low_level_callee_contract/out/release/low_level_callee_contract-abi.json"
),
Script(
name = "TestScript",
abi = "test_projects/low_level_call/out/release/low_level_call-abi.json"
)
);
async fn low_level_call(
id: ContractId,
wallet: Wallet,
function_selector: Vec<u8>,
calldata: Vec<u8>,
single_value_type_arg: bool,
) {
// Build the script instance
let script_instance = TestScript::new(
wallet,
"test_projects/low_level_call/out/release/low_level_call.bin",
);
// Add the contract being called to the inputs and outputs
let contract_input = Input::Contract {
utxo_id: UtxoId::new(Bytes32::zeroed(), 0),
balance_root: Bytes32::zeroed(),
state_root: Bytes32::zeroed(),
tx_pointer: TxPointer::default(),
contract_id: id,
};
let contract_output = Output::Contract(OutputContract {
input_index: 0u16,
balance_root: Bytes32::zeroed(),
state_root: Bytes32::zeroed(),
});
// Run the script which will call the contract
let tx = script_instance
.main(
id,
fuels::types::Bytes(function_selector),
fuels::types::Bytes(calldata),
single_value_type_arg,
)
.with_inputs(vec![contract_input])
.with_outputs(vec![contract_output])
.with_tx_policies(TxPolicies::default());
tx.call().await.unwrap();
}
async fn get_contract_instance() -> (TestContract<Wallet>, ContractId, Wallet) {
// Launch a local network and deploy the contract
let mut wallets = launch_custom_provider_and_get_wallets(
WalletsConfig::new(
Some(1), /* Single wallet */
Some(1), /* Single coin (UTXO) */
Some(1_000_000_000), /* Amount per coin */
),
None,
None,
)
.await
.unwrap();
let wallet = wallets.pop().unwrap();
let id = Contract::load_from(
"test_artifacts/low_level_callee_contract/out/release/low_level_callee_contract.bin",
LoadConfiguration::default(),
)
.unwrap()
.deploy(&wallet, TxPolicies::default())
.await
.unwrap()
.contract_id;
let instance = TestContract::new(id.clone(), wallet.clone());
(instance, id.into(), wallet)
}
#[tokio::test]
async fn can_call_with_one_word_arg() {
let (instance, id, wallet) = get_contract_instance().await;
let function_selector = fn_selector!(set_value(u64));
let calldata = calldata!(42u64);
// Calling "set_value(u64)" with argument "42" should set the value to 42
low_level_call(id, wallet, function_selector, calldata, true).await;
let result = instance.methods().get_value().call().await.unwrap().value;
assert_eq!(result, 42);
}
#[tokio::test]
async fn can_call_with_multi_word_arg() {
let (instance, id, wallet) = get_contract_instance().await;
let function_selector = fn_selector!(set_b256_value(Bits256));
let calldata = calldata!(Bits256([1u8; 32]));
low_level_call(id, wallet, function_selector, calldata, false).await;
let result = instance
.methods()
.get_b256_value()
.call()
.await
.unwrap()
.value;
assert_eq!(result, Bits256([1u8; 32]));
}
#[tokio::test]
async fn can_call_with_multiple_args() {
let (instance, id, wallet) = get_contract_instance().await;
let function_selector = fn_selector!(set_value_multiple(u64, u64));
let calldata = calldata!(23u64, 42u64);
low_level_call(id, wallet, function_selector, calldata, false).await;
let result = instance.methods().get_value().call().await.unwrap().value;
assert_eq!(result, 23 + 42);
}
#[tokio::test]
async fn can_call_with_multiple_args_complex() {
let (instance, id, wallet) = get_contract_instance().await;
let function_selector =
fn_selector!(set_value_multiple_complex(MyStruct, SizedAsciiString::<4>));
let calldata = calldata!(
MyStruct {
a: true,
b: [1, 2, 3],
},
SizedAsciiString::<4>::try_from("fuel").unwrap()
);
low_level_call(id, wallet, function_selector, calldata, false).await;
let result_uint = instance.methods().get_value().call().await.unwrap().value;
let result_bool = instance
.methods()
.get_bool_value()
.call()
.await
.unwrap()
.value;
let result_str = instance
.methods()
.get_str_value()
.call()
.await
.unwrap()
.value;
assert_eq!(result_uint, 2);
assert!(result_bool);
assert_eq!(result_str, "fuel");
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/test/src/sdk-harness/test_projects/storage_vec_nested/mod.rs | test/src/sdk-harness/test_projects/storage_vec_nested/mod.rs | use fuels::prelude::*;
abigen!(Contract(
name = "TestStorageVecNestedContract",
abi = "test_projects/storage_vec_nested/out/release/storage_vec_nested-abi.json",
));
async fn test_storage_vec_nested_instance() -> TestStorageVecNestedContract<Wallet> {
let wallet = launch_provider_and_get_wallet().await.unwrap();
let id = Contract::load_from(
"test_projects/storage_vec_nested/out/release/storage_vec_nested.bin",
LoadConfiguration::default(),
)
.unwrap()
.deploy(&wallet, TxPolicies::default())
.await
.unwrap()
.contract_id;
TestStorageVecNestedContract::new(id.clone(), wallet)
}
#[tokio::test]
async fn nested_vec_access_push() {
let methods = test_storage_vec_nested_instance().await.methods();
methods.nested_vec_access_push().call().await.unwrap();
}
#[tokio::test]
async fn nested_vec_access_insert() {
let methods = test_storage_vec_nested_instance().await.methods();
methods.nested_vec_access_insert().call().await.unwrap();
}
#[tokio::test]
#[should_panic]
async fn revert_on_load_storage_vec() {
let methods = test_storage_vec_nested_instance().await.methods();
methods.revert_on_load_storage_vec().call().await.unwrap();
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/test/src/sdk-harness/test_projects/storage_init/mod.rs | test/src/sdk-harness/test_projects/storage_init/mod.rs | use fuels::prelude::*;
abigen!(Contract(
name = "TestStorageInitContract",
abi = "test_projects/storage_init/out/release/storage_init-abi.json",
));
async fn test_storage_init_instance() -> TestStorageInitContract<Wallet> {
let wallet = launch_provider_and_get_wallet().await.unwrap();
let id = Contract::load_from(
"test_projects/storage_init/out/release/storage_init.bin",
LoadConfiguration::default().with_storage_configuration(
StorageConfiguration::default()
.add_slot_overrides_from_file(
"test_projects/storage_init/out/release/storage_init-storage_slots.json",
)
.unwrap(),
),
)
.unwrap()
.deploy(&wallet, TxPolicies::default())
.await
.unwrap()
.contract_id;
TestStorageInitContract::new(id.clone(), wallet)
}
#[tokio::test]
async fn test_initializers() {
let methods = test_storage_init_instance().await.methods();
assert!(methods.test_initializers().call().await.unwrap().value);
// let l = methods.test_initializers().call().await;
// let (receipts, value) = match l {
// Ok(l) => (l.receipts, l.value),
// Err(Error::RevertTransactionError { receipts, .. }) => receipts,
// _ => todo!(),
// };
// pretty_assertions::assert_eq!(&receipts[4].data(), &receipts[5].data());
// assert!(value);
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/test/src/sdk-harness/test_projects/storage_access/mod.rs | test/src/sdk-harness/test_projects/storage_access/mod.rs | use fuels::{prelude::*, types::Bits256};
abigen!(Contract(
name = "TestStorageAccessContract",
abi = "test_projects/storage_access/out/release/storage_access-abi.json",
));
async fn test_storage_access_instance() -> TestStorageAccessContract<Wallet> {
let wallet = launch_provider_and_get_wallet().await.unwrap();
let id = Contract::load_from(
"test_projects/storage_access/out/release/storage_access.bin",
LoadConfiguration::default(),
)
.unwrap()
.deploy(&wallet, TxPolicies::default())
.await
.unwrap()
.contract_id;
TestStorageAccessContract::new(id.clone(), wallet)
}
#[tokio::test]
async fn simple_access() {
let methods = test_storage_access_instance().await.methods();
let input = 42;
assert_eq!(
methods
.write_and_read_u64(input)
.call()
.await
.unwrap()
.value,
input
);
let input = Bits256([1; 32]);
assert_eq!(
methods
.write_and_read_b256(input)
.call()
.await
.unwrap()
.value,
input
);
}
#[tokio::test]
async fn struct_access_simple() {
let methods = test_storage_access_instance().await.methods();
let input = Simple {
x: 0,
y: 0,
b: Bits256([1; 32]),
z: 69,
w: 0,
};
assert_eq!(
methods
.write_and_read_struct_simple(input.clone())
.call()
.await
.unwrap()
.value,
input
);
}
#[tokio::test]
async fn struct_access() {
let methods = test_storage_access_instance().await.methods();
let input = S {
a: 1,
b: Bits256([2; 32]),
c: T {
x: 3,
y: Bits256([4; 32]),
z: M {
u: Bits256([5; 32]),
v: 8,
},
},
d: Bits256([6; 32]),
};
assert_eq!(
methods
.write_and_read_struct_1(input.clone())
.call()
.await
.unwrap()
.value,
input
);
assert_eq!(
methods
.write_and_read_struct_2(input.clone())
.call()
.await
.unwrap()
.value,
input
);
}
#[tokio::test]
async fn map_access() {
let methods = test_storage_access_instance().await.methods();
let (key1, key2, key3) = (42, 69, 99);
let (value1, value2, value3) = (1, 2, 3);
let _ = methods.map_write(key1, value1).call().await;
let _ = methods.map_write(key2, value2).call().await;
let _ = methods.map_write(key3, value3).call().await;
assert_eq!(
methods.map_read(key1).call().await.unwrap().value,
Some(value1)
);
assert_eq!(
methods.map_read(key2).call().await.unwrap().value,
Some(value2)
);
assert_eq!(
methods.map_read(key3).call().await.unwrap().value,
Some(value3)
);
assert_eq!(methods.map_read(0).call().await.unwrap().value, None);
}
#[tokio::test]
async fn maps_in_struct_access() {
let methods = test_storage_access_instance().await.methods();
let (key1, key2, key3) = ((42, 24), (69, 96), (99, 88));
let (value1, value2, value3) = ((1, 4), (2, 5), (3, 6));
let _ = methods.map_in_struct_write(key1, value1).call().await;
let _ = methods.map_in_struct_write(key2, value2).call().await;
let _ = methods.map_in_struct_write(key3, value3).call().await;
assert_eq!(
methods.map_in_struct_read(key1).call().await.unwrap().value,
(Some(value1.0), Some(value1.1))
);
assert_eq!(
methods.map_in_struct_read(key2).call().await.unwrap().value,
(Some(value2.0), Some(value2.1))
);
assert_eq!(
methods.map_in_struct_read(key3).call().await.unwrap().value,
(Some(value3.0), Some(value3.1))
);
assert_eq!(
methods
.map_in_struct_read((0, 0))
.call()
.await
.unwrap()
.value,
(None, None)
);
}
#[tokio::test]
async fn clears_storage_key() {
let methods = test_storage_access_instance().await.methods();
assert!(methods.clears_storage_key().call().await.unwrap().value);
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/test/src/sdk-harness/test_projects/result_in_abi/mod.rs | test/src/sdk-harness/test_projects/result_in_abi/mod.rs | use fuels::{prelude::*, types::Bits256};
use std::str::FromStr;
abigen!(Contract(
name = "ResultInAbiTestContract",
abi = "test_projects/result_in_abi/out/release/result_in_abi-abi.json"
));
async fn get_result_in_abi_instance() -> (ResultInAbiTestContract<Wallet>, ContractId) {
let wallet = launch_provider_and_get_wallet().await.unwrap();
let id = Contract::load_from(
"test_projects/result_in_abi/out/release/result_in_abi.bin",
LoadConfiguration::default(),
)
.unwrap()
.deploy(&wallet, TxPolicies::default())
.await
.unwrap()
.contract_id;
let instance = ResultInAbiTestContract::new(id.clone(), wallet);
(instance, id.into())
}
#[tokio::test]
async fn test_bool() -> Result<()> {
let (instance, _id) = get_result_in_abi_instance().await;
let contract_methods = instance.methods();
let input = Ok(true);
let response = contract_methods.bool_test(input.clone()).call().await?;
assert_eq!(input, response.value);
let input = Ok(false);
let response = contract_methods.bool_test(input.clone()).call().await?;
assert_eq!(input, response.value);
let input = Err(SomeError::SomeErrorString("error".try_into().unwrap()));
let response = contract_methods.bool_test(input.clone()).call().await?;
assert_eq!(input, response.value);
Ok(())
}
#[tokio::test]
async fn test_u8() -> Result<()> {
let (instance, _id) = get_result_in_abi_instance().await;
let contract_methods = instance.methods();
let input = Ok(42);
let response = contract_methods.u8_test(input.clone()).call().await?;
assert_eq!(input, response.value);
let input = Err(SomeError::SomeErrorString("error".try_into().unwrap()));
let response = contract_methods.u8_test(input.clone()).call().await?;
assert_eq!(input, response.value);
Ok(())
}
#[tokio::test]
async fn test_u16() -> Result<()> {
let (instance, _id) = get_result_in_abi_instance().await;
let contract_methods = instance.methods();
let input = Ok(42);
let response = contract_methods.u16_test(input.clone()).call().await?;
assert_eq!(input, response.value);
let input = Err(SomeError::SomeErrorString("error".try_into().unwrap()));
let response = contract_methods.u16_test(input.clone()).call().await?;
assert_eq!(input, response.value);
Ok(())
}
#[tokio::test]
async fn test_u32() -> Result<()> {
let (instance, _id) = get_result_in_abi_instance().await;
let contract_methods = instance.methods();
let input = Ok(42);
let response = contract_methods.u32_test(input.clone()).call().await?;
assert_eq!(input, response.value);
let input = Err(SomeError::SomeErrorString("error".try_into().unwrap()));
let response = contract_methods.u32_test(input.clone()).call().await?;
assert_eq!(input, response.value);
Ok(())
}
#[tokio::test]
async fn test_u64() -> Result<()> {
let (instance, _id) = get_result_in_abi_instance().await;
let contract_methods = instance.methods();
let input = Ok(42);
let response = contract_methods.u64_test(input.clone()).call().await?;
assert_eq!(input, response.value);
let input = Err(SomeError::SomeErrorString("error".try_into().unwrap()));
let response = contract_methods.u64_test(input.clone()).call().await?;
assert_eq!(input, response.value);
Ok(())
}
#[tokio::test]
async fn test_b256() -> Result<()> {
let (instance, _id) = get_result_in_abi_instance().await;
let contract_methods = instance.methods();
let input = Ok(Bits256([1u8; 32]));
let response = contract_methods.b256_test(input.clone()).call().await?;
assert_eq!(input, response.value);
let input = Err(SomeError::SomeErrorString("error".try_into().unwrap()));
let response = contract_methods.b256_test(input.clone()).call().await?;
assert_eq!(input, response.value);
Ok(())
}
#[tokio::test]
async fn test_struct() -> Result<()> {
let (instance, _id) = get_result_in_abi_instance().await;
let contract_methods = instance.methods();
let input = Ok(MyStruct {
first_field: Ok(Address::from_str(
"0x4242424242424242424242424242424242424242424242424242424242424242",
)
.unwrap()),
second_field: 42,
});
let response = contract_methods.struct_test(input.clone()).call().await?;
assert_eq!(input, response.value);
let input = Ok(MyStruct {
first_field: Err(SomeError::SomeErrorString("error".try_into().unwrap())),
second_field: 42,
});
let response = contract_methods.struct_test(input.clone()).call().await?;
assert_eq!(input, response.value);
let input = Err(SomeError::SomeErrorString("error".try_into().unwrap()));
let response = contract_methods.struct_test(input.clone()).call().await?;
assert_eq!(input, response.value);
Ok(())
}
#[tokio::test]
async fn test_tuple() -> Result<()> {
let (instance, _id) = get_result_in_abi_instance().await;
let contract_methods = instance.methods();
let input = Ok((
Ok(
Address::from_str("0x4242424242424242424242424242424242424242424242424242424242424242")
.unwrap(),
),
42,
));
let response = contract_methods.tuple_test(input.clone()).call().await?;
assert_eq!(input, response.value);
let input = Ok((
Err(SomeError::SomeErrorString("error".try_into().unwrap())),
42,
));
let response = contract_methods.tuple_test(input.clone()).call().await?;
assert_eq!(input, response.value);
let input = Err(SomeError::SomeErrorString("error".try_into().unwrap()));
let response = contract_methods.tuple_test(input.clone()).call().await?;
assert_eq!(input, response.value);
Ok(())
}
#[tokio::test]
async fn test_enum() -> Result<()> {
let (instance, _id) = get_result_in_abi_instance().await;
let contract_methods = instance.methods();
let input = Ok(MyEnum::FirstVariant(Ok(Address::from_str(
"0x4242424242424242424242424242424242424242424242424242424242424242",
)
.unwrap())));
let response = contract_methods.enum_test(input.clone()).call().await?;
assert_eq!(input, response.value);
let input = Ok(MyEnum::FirstVariant(Err(SomeError::SomeErrorString(
"error".try_into().unwrap(),
))));
let response = contract_methods.enum_test(input.clone()).call().await?;
assert_eq!(input, response.value);
let input = Ok(MyEnum::SecondVariant(42));
let response = contract_methods.enum_test(input.clone()).call().await?;
assert_eq!(input, response.value);
let input = Err(SomeError::SomeErrorString("error".try_into().unwrap()));
let response = contract_methods.enum_test(input.clone()).call().await?;
assert_eq!(input, response.value);
Ok(())
}
#[tokio::test]
async fn test_array() -> Result<()> {
let (instance, _id) = get_result_in_abi_instance().await;
let contract_methods = instance.methods();
let input = Ok([
Ok(
Address::from_str("0x4242424242424242424242424242424242424242424242424242424242424242")
.unwrap(),
),
Ok(
Address::from_str("0x6969696969696969696969696969696969696969696969696969696969696969")
.unwrap(),
),
Ok(
Address::from_str("0x9999999999999999999999999999999999999999999999999999999999999999")
.unwrap(),
),
]);
let response = contract_methods.array_test(input.clone()).call().await?;
assert_eq!(input, response.value);
let input = Ok([
Err(SomeError::SomeErrorString("error".try_into().unwrap())),
Ok(
Address::from_str("0x6969696969696969696969696969696969696969696969696969696969696969")
.unwrap(),
),
Err(SomeError::SomeErrorString("error".try_into().unwrap())),
]);
let response = contract_methods.array_test(input.clone()).call().await?;
assert_eq!(input, response.value);
let input = Ok([
Err(SomeError::SomeErrorString("error".try_into().unwrap())),
Err(SomeError::SomeErrorString("error".try_into().unwrap())),
Err(SomeError::SomeErrorString("error".try_into().unwrap())),
]);
let response = contract_methods.array_test(input.clone()).call().await?;
assert_eq!(input, response.value);
let input = Err(SomeError::SomeErrorString("error".try_into().unwrap()));
let response = contract_methods.array_test(input.clone()).call().await?;
assert_eq!(input, response.value);
Ok(())
}
#[tokio::test]
async fn test_string() -> Result<()> {
let (instance, _id) = get_result_in_abi_instance().await;
let contract_methods = instance.methods();
let input = Ok("fuel".try_into().unwrap());
let response = contract_methods.string_test(input.clone()).call().await?;
assert_eq!(input, response.value);
let input = Err(SomeError::SomeErrorString("error".try_into().unwrap()));
let response = contract_methods.string_test(input.clone()).call().await?;
assert_eq!(input, response.value);
Ok(())
}
#[tokio::test]
async fn test_option_in_result() -> Result<()> {
let (instance, _id) = get_result_in_abi_instance().await;
let contract_methods = instance.methods();
let input = Ok(Some("fuel".try_into().unwrap()));
let response = contract_methods
.option_in_result_test(input.clone())
.call()
.await?;
assert_eq!(input, response.value);
let input = Ok(None);
let response = contract_methods
.option_in_result_test(input.clone())
.call()
.await?;
assert_eq!(input, response.value);
let input = Err(SomeError::SomeErrorString("error".try_into().unwrap()));
let response = contract_methods
.option_in_result_test(input.clone())
.call()
.await?;
assert_eq!(input, response.value);
Ok(())
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/test/src/sdk-harness/test_projects/asset_ops/mod.rs | test/src/sdk-harness/test_projects/asset_ops/mod.rs | use fuels::{
prelude::*,
types::{Bits256, Bytes32, Identity},
};
use sha2::{Digest, Sha256};
use std::str::FromStr;
abigen!(Contract(
name = "TestFuelCoinContract",
abi = "test_projects/asset_ops/out/release/asset_ops-abi.json"
));
#[tokio::test]
async fn can_mint() {
let wallet = launch_provider_and_get_wallet().await.unwrap();
let (fuelcontract_instance, fuelcontract_id) = get_fuelcoin_instance(wallet).await;
let sub_id = Bytes32::zeroed();
let asset_id = get_asset_id(sub_id, fuelcontract_id).await;
let mut balance_result = fuelcontract_instance
.methods()
.get_balance(Bits256(*asset_id), fuelcontract_id)
.call()
.await
.unwrap();
assert_eq!(balance_result.value, 0);
fuelcontract_instance
.methods()
.mint_coins(11, Bits256(*sub_id))
.call()
.await
.unwrap();
balance_result = fuelcontract_instance
.methods()
.get_balance(Bits256(*asset_id), fuelcontract_id)
.call()
.await
.unwrap();
assert_eq!(balance_result.value, 11);
}
#[tokio::test]
async fn can_mint_multiple() {
let wallet = launch_provider_and_get_wallet().await.unwrap();
let (fuelcontract_instance, fuelcontract_id) = get_fuelcoin_instance(wallet).await;
let sub_id_1 = Bytes32::zeroed();
let sub_id_2 = Bytes32::from([1u8; 32]);
let asset_id_1 = get_asset_id(sub_id_1, fuelcontract_id).await;
let asset_id_2 = get_asset_id(sub_id_2, fuelcontract_id).await;
let mut balance_result_1 = fuelcontract_instance
.methods()
.get_balance(Bits256(*asset_id_1), fuelcontract_id)
.call()
.await
.unwrap();
assert_eq!(balance_result_1.value, 0);
let mut balance_result_2 = fuelcontract_instance
.methods()
.get_balance(Bits256(*asset_id_2), fuelcontract_id)
.call()
.await
.unwrap();
assert_eq!(balance_result_2.value, 0);
fuelcontract_instance
.methods()
.mint_coins(11, Bits256(*sub_id_1))
.call()
.await
.unwrap();
fuelcontract_instance
.methods()
.mint_coins(12, Bits256(*sub_id_2))
.call()
.await
.unwrap();
balance_result_1 = fuelcontract_instance
.methods()
.get_balance(Bits256(*asset_id_1), fuelcontract_id)
.call()
.await
.unwrap();
assert_eq!(balance_result_1.value, 11);
balance_result_2 = fuelcontract_instance
.methods()
.get_balance(Bits256(*asset_id_2), fuelcontract_id)
.call()
.await
.unwrap();
assert_eq!(balance_result_2.value, 12);
}
#[tokio::test]
async fn can_burn() {
let wallet = launch_provider_and_get_wallet().await.unwrap();
let (fuelcontract_instance, fuelcontract_id) = get_fuelcoin_instance(wallet).await;
let sub_id = Bytes32::zeroed();
let asset_id = get_asset_id(sub_id, fuelcontract_id).await;
let mut balance_result = fuelcontract_instance
.methods()
.get_balance(Bits256(*asset_id), fuelcontract_id)
.call()
.await
.unwrap();
assert_eq!(balance_result.value, 0);
fuelcontract_instance
.methods()
.mint_coins(11, Bits256(*sub_id))
.call()
.await
.unwrap();
fuelcontract_instance
.methods()
.burn_coins(7, Bits256(*sub_id))
.call()
.await
.unwrap();
balance_result = fuelcontract_instance
.methods()
.get_balance(Bits256(*asset_id), fuelcontract_id)
.call()
.await
.unwrap();
assert_eq!(balance_result.value, 4);
}
#[tokio::test]
async fn can_force_transfer() {
let wallet = launch_provider_and_get_wallet().await.unwrap();
let (fuelcontract_instance, fuelcontract_id) = get_fuelcoin_instance(wallet.clone()).await;
let balance_id = get_balance_contract_id(wallet).await;
let sub_id = Bytes32::zeroed();
let asset_id = get_asset_id(sub_id, fuelcontract_id).await;
let mut balance_result = fuelcontract_instance
.methods()
.get_balance(Bits256(*asset_id), fuelcontract_id)
.call()
.await
.unwrap();
assert_eq!(balance_result.value, 0);
fuelcontract_instance
.methods()
.mint_coins(100, Bits256(*sub_id))
.call()
.await
.unwrap();
balance_result = fuelcontract_instance
.methods()
.get_balance(Bits256(*asset_id), fuelcontract_id)
.call()
.await
.unwrap();
assert_eq!(balance_result.value, 100);
// confirm initial balance on balance contract (recipient)
balance_result = fuelcontract_instance
.methods()
.get_balance(Bits256(*asset_id), balance_id)
.with_contract_ids(&[balance_id.into()])
.call()
.await
.unwrap();
assert_eq!(balance_result.value, 0);
let coins = 42u64;
fuelcontract_instance
.methods()
.force_transfer_coins(coins, Bits256(*asset_id), balance_id)
.with_contract_ids(&[balance_id.into()])
.call()
.await
.unwrap();
// confirm remaining balance on fuelcoin contract
balance_result = fuelcontract_instance
.methods()
.get_balance(Bits256(*asset_id), fuelcontract_id)
.call()
.await
.unwrap();
assert_eq!(balance_result.value, 58);
// confirm new balance on balance contract (recipient)
balance_result = fuelcontract_instance
.methods()
.get_balance(Bits256(*asset_id), balance_id)
.with_contract_ids(&[balance_id.into()])
.call()
.await
.unwrap();
assert_eq!(balance_result.value, 42);
}
#[tokio::test]
async fn can_mint_and_send_to_contract() {
let wallet = launch_provider_and_get_wallet().await.unwrap();
let (fuelcontract_instance, fuelcontract_id) = get_fuelcoin_instance(wallet.clone()).await;
let balance_id = get_balance_contract_id(wallet).await;
let amount = 55u64;
let sub_id = Bytes32::zeroed();
let asset_id = get_asset_id(sub_id, fuelcontract_id).await;
fuelcontract_instance
.methods()
.mint_and_send_to_contract(amount, balance_id, Bits256(*sub_id))
.with_contract_ids(&[balance_id.into()])
.call()
.await
.unwrap();
let result = fuelcontract_instance
.methods()
.get_balance(Bits256(*asset_id), balance_id)
.with_contract_ids(&[balance_id.into()])
.call()
.await
.unwrap();
assert_eq!(result.value, amount)
}
#[tokio::test]
async fn can_mint_and_send_to_address() {
let wallet = launch_provider_and_get_wallet().await.unwrap();
let (fuelcontract_instance, fuelcontract_id) = get_fuelcoin_instance(wallet.clone()).await;
let amount = 55u64;
let sub_id = Bytes32::zeroed();
let asset_id = get_asset_id(sub_id, fuelcontract_id).await;
let asset_id_array: [u8; 32] = *asset_id;
let address = wallet.address();
let recipient = address.clone();
fuelcontract_instance
.methods()
.mint_and_send_to_address(amount, recipient, Bits256(*sub_id))
.with_variable_output_policy(VariableOutputPolicy::Exactly(1))
.call()
.await
.unwrap();
assert_eq!(
wallet
.get_spendable_resources(AssetId::from(asset_id_array), 1, None)
.await
.unwrap()[0]
.amount(),
amount
);
}
#[tokio::test]
async fn can_perform_generic_mint_to_with_address() {
let wallet = launch_provider_and_get_wallet().await.unwrap();
let (fuelcontract_instance, fuelcontract_id) = get_fuelcoin_instance(wallet.clone()).await;
let sub_id = Bytes32::zeroed();
let asset_id = get_asset_id(sub_id, fuelcontract_id).await;
let amount = 55u64;
let asset_id_array: [u8; 32] = *asset_id;
let address = wallet.address();
fuelcontract_instance
.methods()
.generic_mint_to(amount, Identity::Address(address.into()), Bits256(*sub_id))
.with_variable_output_policy(VariableOutputPolicy::Exactly(1))
.call()
.await
.unwrap();
assert_eq!(
wallet
.get_spendable_resources(AssetId::from(asset_id_array), 1, None)
.await
.unwrap()[0]
.amount(),
amount
);
}
#[tokio::test]
async fn can_perform_generic_mint_to_with_contract_id() {
let num_wallets = 1;
let coins_per_wallet = 1;
let amount_per_coin = 1_000_000;
let config = WalletsConfig::new(
Some(num_wallets),
Some(coins_per_wallet),
Some(amount_per_coin),
);
let wallets = launch_custom_provider_and_get_wallets(config, None, None)
.await
.unwrap();
let (fuelcontract_instance, fuelcontract_id) = get_fuelcoin_instance(wallets[0].clone()).await;
let balance_id = get_balance_contract_id(wallets[0].clone()).await;
let amount = 55u64;
let sub_id = Bytes32::zeroed();
let asset_id = get_asset_id(sub_id, fuelcontract_id).await;
fuelcontract_instance
.methods()
.generic_mint_to(amount, Identity::ContractId(balance_id), Bits256(*sub_id))
.with_contract_ids(&[balance_id.into()])
.call()
.await
.unwrap();
let result = fuelcontract_instance
.methods()
.get_balance(Bits256(*asset_id), balance_id)
.with_contract_ids(&[balance_id.into()])
.call()
.await
.unwrap();
assert_eq!(result.value, amount)
}
#[tokio::test]
async fn can_perform_generic_transfer_to_address() {
let wallet = launch_provider_and_get_wallet().await.unwrap();
let (fuelcontract_instance, fuelcontract_id) = get_fuelcoin_instance(wallet.clone()).await;
let sub_id = Bytes32::zeroed();
let asset_id = get_asset_id(sub_id, fuelcontract_id).await;
let amount = 33u64;
let asset_id_array: [u8; 32] = *asset_id;
let address = wallet.address();
fuelcontract_instance
.methods()
.mint_coins(amount, Bits256(*sub_id))
.call()
.await
.unwrap();
fuelcontract_instance
.methods()
.generic_transfer(
amount,
Bits256(*asset_id),
Identity::Address(address.into()),
)
.with_variable_output_policy(VariableOutputPolicy::Exactly(1))
.call()
.await
.unwrap();
assert_eq!(
wallet
.get_spendable_resources(AssetId::from(asset_id_array), 1, None)
.await
.unwrap()[0]
.amount(),
amount
);
}
#[tokio::test]
async fn can_perform_generic_transfer_to_contract() {
let num_wallets = 1;
let coins_per_wallet = 1;
let amount_per_coin = 1_000_000;
let config = WalletsConfig::new(
Some(num_wallets),
Some(coins_per_wallet),
Some(amount_per_coin),
);
let wallets = launch_custom_provider_and_get_wallets(config, None, None)
.await
.unwrap();
let (fuelcontract_instance, fuelcontract_id) = get_fuelcoin_instance(wallets[0].clone()).await;
let balance_id = get_balance_contract_id(wallets[0].clone()).await;
let sub_id = Bytes32::zeroed();
let asset_id = get_asset_id(sub_id, fuelcontract_id).await;
let amount = 44u64;
fuelcontract_instance
.methods()
.mint_coins(amount, Bits256(*sub_id))
.call()
.await
.unwrap();
fuelcontract_instance
.methods()
.generic_transfer(amount, Bits256(*asset_id), Identity::ContractId(balance_id))
.with_contract_ids(&[balance_id.into()])
.call()
.await
.unwrap();
let result = fuelcontract_instance
.methods()
.get_balance(Bits256(*asset_id), balance_id)
.with_contract_ids(&[balance_id.into()])
.call()
.await
.unwrap();
assert_eq!(result.value, amount)
}
#[tokio::test]
async fn can_send_message_output_with_data() {
let num_wallets = 1;
let coins_per_wallet = 1;
let amount_per_coin = 1_000_000;
let config = WalletsConfig::new(
Some(num_wallets),
Some(coins_per_wallet),
Some(amount_per_coin),
);
let wallets = launch_custom_provider_and_get_wallets(config, None, None)
.await
.unwrap();
let (fuelcontract_instance, fuelcontract_id) = get_fuelcoin_instance(wallets[0].clone()).await;
let amount = 33u64;
let recipient_address: Address = wallets[0].address().into();
let call_response = fuelcontract_instance
.methods()
.send_message(Bits256(*recipient_address), vec![100, 75, 50], amount)
.call()
.await
.unwrap();
let message_receipt = call_response
.tx_status
.receipts
.iter()
.find(|&r| matches!(r, fuels::tx::Receipt::MessageOut { .. }))
.unwrap();
assert_eq!(*fuelcontract_id, **message_receipt.sender().unwrap());
assert_eq!(&recipient_address, message_receipt.recipient().unwrap());
assert_eq!(amount, message_receipt.amount().unwrap());
assert_eq!(3, message_receipt.len().unwrap());
assert_eq!(vec![100, 75, 50], message_receipt.data().unwrap());
}
#[tokio::test]
async fn can_send_message_output_without_data() {
let num_wallets = 1;
let coins_per_wallet = 1;
let amount_per_coin = 1_000_000;
let config = WalletsConfig::new(
Some(num_wallets),
Some(coins_per_wallet),
Some(amount_per_coin),
);
let wallets = launch_custom_provider_and_get_wallets(config, None, None)
.await
.unwrap();
let (fuelcontract_instance, fuelcontract_id) = get_fuelcoin_instance(wallets[0].clone()).await;
let amount = 33u64;
let recipient_hex = "0x000000000000000000000000b46a7a1a23f3897cc83a94521a96da5c23bc58db";
let recipient_address = Address::from_str(recipient_hex).unwrap();
let call_response = fuelcontract_instance
.methods()
.send_message(Bits256(*recipient_address), Vec::<u64>::new(), amount)
.call()
.await
.unwrap();
let message_receipt = call_response
.tx_status
.receipts
.iter()
.find(|&r| matches!(r, fuels::tx::Receipt::MessageOut { .. }))
.unwrap();
assert_eq!(*fuelcontract_id, **message_receipt.sender().unwrap());
assert_eq!(&recipient_address, message_receipt.recipient().unwrap());
assert_eq!(amount, message_receipt.amount().unwrap());
assert_eq!(0, message_receipt.len().unwrap());
assert_eq!(Vec::<u8>::new(), message_receipt.data().unwrap());
}
async fn get_fuelcoin_instance(wallet: Wallet) -> (TestFuelCoinContract<Wallet>, ContractId) {
let fuelcontract_id = Contract::load_from(
"test_projects/asset_ops/out/release/asset_ops.bin",
LoadConfiguration::default(),
)
.unwrap()
.deploy(&wallet, TxPolicies::default())
.await
.unwrap()
.contract_id;
wallet
.force_transfer_to_contract(fuelcontract_id, 1000, AssetId::BASE, TxPolicies::default())
.await
.unwrap();
let fuelcontract_instance = TestFuelCoinContract::new(fuelcontract_id.clone(), wallet);
(fuelcontract_instance, fuelcontract_id.into())
}
async fn get_balance_contract_id(wallet: Wallet) -> ContractId {
let balance_id = Contract::load_from(
"test_artifacts/balance_contract/out/release/balance_contract.bin",
LoadConfiguration::default(),
)
.unwrap()
.deploy(&wallet, TxPolicies::default())
.await
.unwrap()
.contract_id;
balance_id.into()
}
async fn get_asset_id(sub_id: Bytes32, contract: ContractId) -> Bytes32 {
let mut hasher = Sha256::new();
hasher.update(*contract);
hasher.update(*sub_id);
Bytes32::from(<[u8; 32]>::from(hasher.finalize()))
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/test/src/sdk-harness/test_projects/context/mod.rs | test/src/sdk-harness/test_projects/context/mod.rs | use fuel_vm::consts::VM_MAX_RAM;
use fuels::{
prelude::*,
tx::ContractIdExt,
types::{Bits256, SubAssetId, ContractId},
};
abigen!(
Contract(
name = "TestContextContract",
abi = "test_projects/context/out/release/context-abi.json",
),
Contract(
name = "TestContextCallerContract",
abi = "test_artifacts/context_caller_contract/out/release/context_caller_contract-abi.json",
),
Contract(
name = "FuelCoin",
abi = "test_projects/asset_ops/out/release/asset_ops-abi.json"
)
);
async fn get_contracts() -> (
TestContextContract<Wallet>,
ContractId,
TestContextCallerContract<Wallet>,
ContractId,
) {
let wallet = launch_provider_and_get_wallet().await.unwrap();
let id_1 = Contract::load_from(
"test_projects/context/out/release/context.bin",
LoadConfiguration::default(),
)
.unwrap()
.deploy(&wallet, TxPolicies::default())
.await
.unwrap()
.contract_id;
let id_2 = Contract::load_from(
"test_artifacts/context_caller_contract/out/release/context_caller_contract.bin",
LoadConfiguration::default(),
)
.unwrap()
.deploy(&wallet, TxPolicies::default())
.await
.unwrap()
.contract_id;
let instance_2 = TestContextCallerContract::new(id_2.clone(), wallet.clone());
let instance_1 = TestContextContract::new(id_1.clone(), wallet.clone());
(instance_1, id_1.into(), instance_2, id_2.into())
}
#[tokio::test]
async fn can_get_this_balance() {
let (context_instance, context_id, caller_instance, caller_id) = get_contracts().await;
let send_amount = 42;
caller_instance
.methods()
.mint_coins(send_amount)
.call()
.await
.unwrap();
caller_instance
.methods()
.call_receive_coins(send_amount, context_id)
.with_contracts(&[&context_instance])
.call()
.await
.unwrap();
let result = context_instance
.methods()
.get_this_balance(Bits256(*caller_id.asset_id(&SubAssetId::zeroed())))
.call()
.await
.unwrap();
assert_eq!(result.value, send_amount);
}
#[tokio::test]
async fn can_get_balance_of_contract() {
let (context_instance, _, caller_instance, caller_id) = get_contracts().await;
let send_amount = 42;
caller_instance
.methods()
.mint_coins(send_amount)
.call()
.await
.unwrap();
let result = context_instance
.methods()
.get_balance_of_contract(Bits256(*caller_id.asset_id(&SubAssetId::zeroed())), caller_id)
.with_contracts(&[&caller_instance])
.call()
.await
.unwrap();
assert_eq!(result.value, send_amount);
}
#[tokio::test]
async fn can_get_msg_value() {
let (context_instance, context_id, caller_instance, _) = get_contracts().await;
let send_amount = 11;
caller_instance
.methods()
.mint_coins(send_amount)
.call()
.await
.unwrap();
let result = caller_instance
.methods()
.call_get_amount_with_coins(send_amount, context_id)
.with_contracts(&[&context_instance])
.call()
.await
.unwrap();
assert_eq!(result.value, send_amount);
}
#[tokio::test]
async fn can_get_msg_id() {
let (context_instance, context_id, caller_instance, caller_id) = get_contracts().await;
let send_amount = 42;
caller_instance
.methods()
.mint_coins(send_amount)
.call()
.await
.unwrap();
let result = caller_instance
.methods()
.call_get_asset_id_with_coins(send_amount, context_id)
.with_contracts(&[&context_instance])
.call()
.await
.unwrap();
assert_eq!(
result.value,
Bits256(*caller_id.asset_id(&SubAssetId::zeroed()))
);
}
#[tokio::test]
async fn can_get_msg_gas() {
let (context_instance, context_id, caller_instance, _) = get_contracts().await;
let send_amount = 11;
caller_instance
.methods()
.mint_coins(send_amount)
.call()
.await
.unwrap();
let result = caller_instance
.methods()
.call_get_gas_with_coins(send_amount, context_id)
.with_contracts(&[&context_instance])
.call()
.await
.unwrap();
is_within_range(result.value);
}
#[tokio::test]
async fn can_get_global_gas() {
let (context_instance, context_id, caller_instance, _) = get_contracts().await;
let send_amount = 11;
caller_instance
.methods()
.mint_coins(send_amount)
.call()
.await
.unwrap();
let result = caller_instance
.methods()
.call_get_global_gas_with_coins(send_amount, context_id)
.with_contracts(&[&context_instance])
.call()
.await
.unwrap();
is_within_range(result.value);
}
fn is_within_range(n: u64) -> bool {
n > 0 && n <= VM_MAX_RAM
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/test/src/sdk-harness/test_projects/parsing_logs/mod.rs | test/src/sdk-harness/test_projects/parsing_logs/mod.rs | use fuels::{
prelude::*,
types::{Bits256, SizedAsciiString},
};
abigen!(Contract(
name = "ParsingLogsTestContract",
abi = "test_projects/parsing_logs/out/release/parsing_logs-abi.json"
));
async fn get_parsing_logs_instance() -> (ParsingLogsTestContract<Wallet>, ContractId) {
let wallet = launch_provider_and_get_wallet().await.unwrap();
let id = Contract::load_from(
"test_projects/parsing_logs/out/release/parsing_logs.bin",
LoadConfiguration::default(),
)
.unwrap()
.deploy(&wallet, TxPolicies::default())
.await
.unwrap()
.contract_id;
let instance = ParsingLogsTestContract::new(id.clone(), wallet);
(instance, id.into())
}
#[tokio::test]
async fn test_parse_logged_varibles() -> Result<()> {
let (instance, _id) = get_parsing_logs_instance().await;
let contract_methods = instance.methods();
let response = contract_methods.produce_logs_variables().call().await?;
let log_u64 = response.decode_logs_with_type::<u64>()?;
let log_bits256 = response.decode_logs_with_type::<Bits256>()?;
let log_string = response.decode_logs_with_type::<SizedAsciiString<4>>()?;
let log_array = response.decode_logs_with_type::<[u8; 3]>()?;
let expected_bits256 = Bits256([
239, 134, 175, 169, 105, 108, 240, 220, 99, 133, 226, 196, 7, 166, 225, 89, 161, 16, 60,
239, 183, 226, 174, 6, 54, 251, 51, 211, 203, 42, 158, 74,
]);
assert_eq!(log_u64, vec![64]);
assert_eq!(log_bits256, vec![expected_bits256]);
assert_eq!(log_string, vec!["Fuel"]);
assert_eq!(log_array, vec![[1, 2, 3]]);
Ok(())
}
#[tokio::test]
async fn test_parse_logged_private_structs() -> Result<()> {
let (instance, _id) = get_parsing_logs_instance().await;
let contract_methods = instance.methods();
let response = contract_methods
.produce_logs_private_structs()
.call()
.await?;
let log_address = response
.decode_logs_with_type::<Address>()
.unwrap()
.pop()
.unwrap();
let log_contract_id = response
.decode_logs_with_type::<ContractId>()
.unwrap()
.pop()
.unwrap();
let log_asset_id = response
.decode_logs_with_type::<AssetId>()
.unwrap()
.pop()
.unwrap();
let expected_bits256 = [
239, 134, 175, 169, 105, 108, 240, 220, 99, 133, 226, 196, 7, 166, 225, 89, 161, 16, 60,
239, 183, 226, 174, 6, 54, 251, 51, 211, 203, 42, 158, 74,
];
assert_eq!(log_address, Address::new(expected_bits256));
assert_eq!(log_contract_id, ContractId::new(expected_bits256));
assert_eq!(log_asset_id, AssetId::new(expected_bits256));
Ok(())
}
#[tokio::test]
async fn test_parse_logs_values() -> Result<()> {
let (instance, _id) = get_parsing_logs_instance().await;
let contract_methods = instance.methods();
let response = contract_methods.produce_logs_values().call().await?;
let log_u64 = response.decode_logs_with_type::<u64>()?;
let log_u32 = response.decode_logs_with_type::<u32>()?;
let log_u16 = response.decode_logs_with_type::<u16>()?;
let log_u8 = response.decode_logs_with_type::<u8>()?;
// try to retrieve non existent log
let log_nonexistent = response.decode_logs_with_type::<bool>()?;
assert_eq!(log_u64, vec![64]);
assert_eq!(log_u32, vec![32]);
assert_eq!(log_u16, vec![16]);
assert_eq!(log_u8, vec![8]);
assert!(log_nonexistent.is_empty());
Ok(())
}
#[tokio::test]
async fn test_parse_logs_custom_types() -> Result<()> {
let (instance, _id) = get_parsing_logs_instance().await;
let contract_methods = instance.methods();
let response = contract_methods.produce_logs_custom_types().call().await?;
let log_test_struct = response.decode_logs_with_type::<TestStruct>()?;
let log_test_enum = response.decode_logs_with_type::<TestEnum>()?;
let expected_bits256 = Bits256([
239, 134, 175, 169, 105, 108, 240, 220, 99, 133, 226, 196, 7, 166, 225, 89, 161, 16, 60,
239, 183, 226, 174, 6, 54, 251, 51, 211, 203, 42, 158, 74,
]);
let expected_struct = TestStruct {
field_1: true,
field_2: expected_bits256,
field_3: 64,
};
let expected_enum = TestEnum::VariantTwo;
assert_eq!(log_test_struct, vec![expected_struct]);
assert_eq!(log_test_enum, vec![expected_enum]);
Ok(())
}
#[tokio::test]
async fn test_parse_logs_generic_types() -> Result<()> {
let (instance, _id) = get_parsing_logs_instance().await;
let contract_methods = instance.methods();
let response = contract_methods.produce_logs_generic_types().call().await?;
let log_struct = response.decode_logs_with_type::<StructWithGeneric<[_; 3]>>()?;
let log_enum = response.decode_logs_with_type::<EnumWithGeneric<[_; 3]>>()?;
let log_struct_nested =
response.decode_logs_with_type::<StructWithNestedGeneric<StructWithGeneric<[_; 3]>>>()?;
let log_struct_deeply_nested = response.decode_logs_with_type::<StructDeeplyNestedGeneric<
StructWithNestedGeneric<StructWithGeneric<[_; 3]>>,
>>()?;
let l = [1u8, 2u8, 3u8];
let expected_struct = StructWithGeneric {
field_1: l,
field_2: 64,
};
let expected_enum = EnumWithGeneric::VariantOne(l);
let expected_nested_struct = StructWithNestedGeneric {
field_1: expected_struct.clone(),
field_2: 64,
};
let expected_deeply_nested_struct = StructDeeplyNestedGeneric {
field_1: expected_nested_struct.clone(),
field_2: 64,
};
assert_eq!(log_struct, vec![expected_struct]);
assert_eq!(log_enum, vec![expected_enum]);
assert_eq!(log_struct_nested, vec![expected_nested_struct]);
assert_eq!(
log_struct_deeply_nested,
vec![expected_deeply_nested_struct]
);
Ok(())
}
#[tokio::test]
async fn test_get_logs() -> Result<()> {
let (instance, _id) = get_parsing_logs_instance().await;
let contract_methods = instance.methods();
let response = contract_methods.produce_multiple_logs().call().await?;
let logs = response.decode_logs();
let expected_bits256 = Bits256([
239, 134, 175, 169, 105, 108, 240, 220, 99, 133, 226, 196, 7, 166, 225, 89, 161, 16, 60,
239, 183, 226, 174, 6, 54, 251, 51, 211, 203, 42, 158, 74,
]);
let expected_struct = TestStruct {
field_1: true,
field_2: expected_bits256,
field_3: 64,
};
let expected_enum = TestEnum::VariantTwo;
let expected_generic_struct = StructWithGeneric {
field_1: expected_struct.clone(),
field_2: 64,
};
let expected_logs: Vec<String> = vec![
format!("{:?}", 64u64),
format!("{:?}", 32u32),
format!("{:?}", 16u16),
format!("{:?}", 8u8),
format!("{:?}", 64u64),
format!("{:?}", expected_bits256),
format!("{:?}", SizedAsciiString::<4>::new("Fuel".to_string())?),
format!("{:?}", [1, 2, 3]),
format!("{:?}", expected_struct),
format!("{:?}", expected_enum),
format!("{:?}", expected_generic_struct),
];
pretty_assertions::assert_eq!(expected_logs, logs.filter_succeeded());
Ok(())
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/test/src/e2e_vm_tests/util.rs | test/src/e2e_vm_tests/util.rs | pub trait VecExt<T> {
/// Retains the elements specified by the predicate `f`,
/// and returns the elements that were removed.
fn retain_and_get_removed<F>(&mut self, f: F) -> Vec<T>
where
F: FnMut(&T) -> bool;
}
impl<T> VecExt<T> for Vec<T> {
fn retain_and_get_removed<F>(&mut self, mut f: F) -> Vec<T>
where
F: FnMut(&T) -> bool,
{
let mut removed = Vec::new();
let mut i = 0;
while i < self.len() {
if f(&mut self[i]) {
i += 1;
} else {
let val = self.remove(i);
removed.push(val);
}
}
removed
}
}
pub(crate) fn duration_to_str(duration: &std::time::Duration) -> String {
let parts: [u16; 4] = [
(duration.as_secs() / 3600) as u16,
((duration.as_secs() / 60) % 60) as u16,
(duration.as_secs() % 60) as u16,
(duration.as_millis() % 1000) as u16,
];
// Hopefully we will never need to deal with hours :-)
let parts = if parts[0] == 0 {
&parts[1..]
} else {
&parts[..]
};
parts
.iter()
.map(|part| format!("{part:#02}"))
.collect::<Vec<_>>()
.join(":")
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/test/src/e2e_vm_tests/mod.rs | test/src/e2e_vm_tests/mod.rs | // Please take a look in test_programs/README.md for details on how these tests work.
mod harness;
mod harness_callback_handler;
mod util;
use crate::e2e_vm_tests::harness::run_and_capture_output;
use crate::{FilterConfig, RunConfig};
use anyhow::{anyhow, bail, Result};
use chrono::Local;
use colored::*;
use core::fmt;
use forc_pkg::manifest::{GenericManifestFile, ManifestFile};
use forc_pkg::BuildProfile;
use forc_test::ecal::Syscall;
use forc_util::tx_utils::decode_log_data;
use fuel_vm::fuel_tx;
use fuel_vm::prelude::*;
use git2::Repository;
use rayon::iter::{IntoParallelRefIterator, ParallelIterator};
use regex::Regex;
use std::borrow::Cow;
use std::collections::{BTreeMap, HashSet};
use std::fs::File;
use std::io::stdout;
use std::io::Write;
use std::process::{Command, Stdio};
use std::str::FromStr;
use std::time::{Duration, Instant};
use std::{
collections::HashMap,
path::{Path, PathBuf},
sync::Arc,
};
use sway_core::BuildTarget;
use sway_features::{CliFields, ExperimentalFeatures};
use tokio::sync::Mutex;
use tracing::Instrument;
use self::util::VecExt;
use serde::{Deserialize, Serialize};
#[derive(Clone, PartialEq, Debug)]
enum TestCategory {
Compiles,
FailsToCompile,
Runs,
RunsWithContract,
UnitTestsPass,
Disabled,
}
#[derive(Clone, PartialEq)]
enum TestResult {
Result(Word),
Return(u64),
ReturnData(Vec<u8>),
Revert(u64),
}
impl fmt::Debug for TestResult {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
TestResult::Result(result) => write!(f, "Result({result})"),
TestResult::Return(code) => write!(f, "Return({code})"),
TestResult::ReturnData(data) => write!(f, "ReturnData(0x{})", hex::encode(data)),
TestResult::Revert(code) => write!(f, "Revert({code})"),
}
}
}
#[derive(Clone)]
pub struct FileCheck(String);
impl FileCheck {
pub fn build(&self) -> Result<filecheck::Checker, anyhow::Error> {
const DIRECTIVE_RX: &str = r"(?m)^\s*#\s*(\w+):\s+(.*)$";
let mut checker = filecheck::CheckerBuilder::new();
// Parse the file and check for unknown FileCheck directives.
let re = Regex::new(DIRECTIVE_RX).unwrap();
for cap in re.captures_iter(&self.0) {
if let Ok(false) = checker.directive(&cap[0]) {
bail!("Unknown FileCheck directive: {}", &cap[1]);
}
}
Ok(checker.finish())
}
}
#[derive(Clone)]
struct TestDescription {
test_toml_path: String,
name: String,
suffix: Option<String>,
category: TestCategory,
script_data: Option<Vec<u8>>,
script_data_new_encoding: Option<Vec<u8>>,
witness_data: Option<Vec<Vec<u8>>>,
expected_result: Option<TestResult>,
expected_result_new_encoding: Option<TestResult>,
expected_warnings: u32,
contract_paths: Vec<String>,
/// Signing key to be used if the test is of [TestCategory::RunsWithContract].
/// `None` if the test has any other [TestCategory].
signing_key: Option<SecretKey>,
validate_abi: bool,
validate_storage_slots: bool,
supported_targets: HashSet<BuildTarget>,
expected_decoded_test_logs: Option<Vec<String>>,
unsupported_profiles: Vec<&'static str>,
checker: FileCheck,
run_config: RunConfig,
experimental: ExperimentalFeatures,
has_experimental_field: bool,
logs: Option<String>,
}
impl TestDescription {
pub fn display_name(&self) -> Cow<str> {
if let Some(suffix) = self.suffix.as_ref() {
format!("{} ({})", self.name, suffix).into()
} else {
self.name.as_str().into()
}
}
pub fn expect_signing_key(&self) -> &SecretKey {
self.signing_key
.as_ref()
.expect("`RunsWithContract` test must have a signing key defined")
}
}
#[derive(PartialEq, Eq, Hash)]
struct DeployedContractKey {
pub contract_path: String,
pub new_encoding: bool,
}
#[derive(Serialize, Deserialize)]
struct GasUsage {
/// The name of the unit test, or `None` if it is the gas usage of a script run.
pub unit_test_name: Option<String>,
pub gas_used: usize,
}
impl GasUsage {
pub fn new(gas_used: usize) -> Self {
Self {
unit_test_name: None,
gas_used,
}
}
pub fn with_unit_test_name(unit_test_name: String, gas_used: usize) -> Self {
Self {
unit_test_name: Some(unit_test_name),
gas_used,
}
}
}
#[derive(Serialize, Deserialize)]
struct BytecodeSize {
/// The name of the compiled package if more than one package is compiled
/// within a test, or `None` if it is a single package whose name is the
/// same as the test name.
pub package_name: Option<String>,
pub bytecode_size: usize,
}
impl BytecodeSize {
pub fn new(bytecode_size: usize) -> Self {
Self {
package_name: None,
bytecode_size,
}
}
pub fn with_package_name(package_name: String, bytecode_size: usize) -> Self {
Self {
package_name: Some(package_name),
bytecode_size,
}
}
}
/// Performance data, bytecode sizes and gas usages,
/// collected during the run of a single test.
///
/// Performance data can be collected for tests of
/// these categories: "compile", "run", "unit_tests_pass".
///
/// A single test can have several bytecode sizes, if a
/// workspace is "compiled", and several gas usages, if
/// "unit_tests_pass" is run.
#[derive(Serialize, Deserialize)]
struct TestPerfData {
pub test_display_name: String,
pub bytecode_sizes: Vec<BytecodeSize>,
pub gas_usages: Vec<GasUsage>,
}
impl TestPerfData {
fn new(test_display_name: String) -> Self {
Self {
test_display_name,
bytecode_sizes: vec![],
gas_usages: vec![],
}
}
}
#[derive(Clone)]
struct TestContext {
deployed_contracts: Arc<Mutex<HashMap<DeployedContractKey, ContractId>>>,
}
fn print_receipts(output: &mut String, receipts: &[Receipt]) {
let mut text_log = String::new();
use std::fmt::Write;
let _ = writeln!(output, " {}", "Receipts".green().bold());
for (i, receipt) in receipts.iter().enumerate() {
let _ = write!(output, " {}", format!("#{i}").bold());
match receipt {
Receipt::LogData {
id,
ra,
rb,
ptr,
len,
digest,
pc,
is,
data,
} => {
// Small hack to allow log from tests.
if *ra == u64::MAX {
match rb {
0 => {
let data = data.as_ref().unwrap();
let payload = data
.as_ref()
.get(8..)
.expect("log data shorter than 8 byte header");
let s = std::str::from_utf8(payload).unwrap();
text_log.push_str(s);
}
1 => {
let data = data.as_ref().unwrap();
let s = u64::from_be_bytes(data.as_ref().try_into().unwrap());
text_log.push_str(&format!("{s}"));
}
2 => {
text_log.push('\n');
}
_ => {}
}
}
let _ = write!(output, " LogData\n ID: {id:?}\n RA: {ra:?}\n RB: {rb:?}\n Ptr: {ptr:?}\n Len: {len:?}\n Digest: {digest:?}\n PC: {pc:?}\n IS: {is:?}\n Data: {data:?}\n");
}
Receipt::ReturnData {
id,
ptr,
len,
digest,
pc,
is,
data,
} => {
let _ = write!(output, " ReturnData\n ID: {id:?}\n Ptr: {ptr:?}\n Len: {len:?}\n Digest: {digest:?}\n PC: {pc:?}\n IS: {is:?}\n Data: {data:?}\n");
}
Receipt::Call {
id,
to,
amount,
asset_id,
gas,
param1,
param2,
pc,
is,
} => {
let _ = write!(output, " Call\n ID: {id:?}\n To: {to:?}\n Amount: {amount:?}\n Asset ID: {asset_id:?}\n Gas: {gas:?}\n Param #1: {param1:?}\n Param #2: {param2:?}\n PC: {pc:?}\n IS: {is:?}\n");
}
Receipt::Return { id, val, pc, is } => {
let _ = write!(output, " Return\n ID: {id:?}\n Value: {val:?}\n PC: {pc:?}\n IS: {is:?}\n");
}
Receipt::Panic {
id,
reason,
pc,
is,
contract_id,
} => {
let _ = write!(output, " Panic\n ID: {id:?}\n Reason: {reason:?}\n PC: {pc:?}\n IS: {is:?}\n Contract ID: {contract_id:?}\n");
}
Receipt::Revert { id, ra, pc, is } => {
let _ = write!(output, " Revert\n ID: {id:?}\n RA: {ra:?}\n PC: {pc:?}\n IS: {is:?}\n");
}
Receipt::Log {
id,
ra,
rb,
rc,
rd,
pc,
is,
} => {
let _ = write!(output, " Log\n ID: {id:?}\n RA: {ra:?}\n RB: {rb:?}\n RC: {rc:?}\n RD: {rd:?}\n PC: {pc:?}\n IS: {is:?}\n");
}
Receipt::Transfer {
id,
to,
amount,
asset_id,
pc,
is,
} => {
let _ = write!(output, " Transfer\n ID: {id:?}\n To: {to:?}\n Amount: {amount:?}\n Asset ID: {asset_id:?}\n PC: {pc:?}\n IS: {is:?}\n");
}
Receipt::TransferOut {
id,
to,
amount,
asset_id,
pc,
is,
} => {
let _ = write!(output, " TransferOut\n ID: {id:?}\n To: {to:?}\n Amount: {amount:?}\n Asset ID: {asset_id:?}\n PC: {pc:?}\n IS: {is:?}\n");
}
Receipt::ScriptResult { result, gas_used } => {
let _ = write!(
output,
" ScriptResult\n Result: {result:?}\n Gas Used: {gas_used:?}\n"
);
}
Receipt::MessageOut {
sender,
recipient,
amount,
nonce,
len,
digest,
data,
} => {
let _ = write!(output, " MessageOut\n Sender: {sender:?}\n Recipient: {recipient:?}\n Amount: {amount:?}\n Nonce: {nonce:?}\n Len: {len:?}\n Digest: {digest:?}\n Data: {data:?}\n");
}
Receipt::Mint {
sub_id,
contract_id,
val,
pc,
is,
} => {
let _ = write!(output, " Mint\n Sub ID: {sub_id:?}\n Contract ID: {contract_id:?}\n Val: {val:?}\n PC: {pc:?}\n IS: {is:?}\n");
}
Receipt::Burn {
sub_id,
contract_id,
val,
pc,
is,
} => {
let _ = write!(output, " Burn\n Sub ID: {sub_id:?}\n Contract ID: {contract_id:?}\n Val: {val:?}\n PC: {pc:?}\n IS: {is:?}\n");
}
}
}
if !text_log.is_empty() {
let _ = writeln!(output, " {}", "Text Logs".green().bold());
for l in text_log.lines() {
let _ = writeln!(output, "{l}");
}
}
}
impl TestContext {
async fn deploy_contract(
&self,
run_config: &RunConfig,
contract_path: String,
signing_key: &SecretKey,
) -> Result<ContractId> {
let experimental = ExperimentalFeatures::new(
&HashMap::default(),
&run_config.experimental.experimental,
&run_config.experimental.no_experimental,
)
.unwrap();
let key = DeployedContractKey {
contract_path: contract_path.clone(),
new_encoding: experimental.new_encoding,
};
let mut deployed_contracts = self.deployed_contracts.lock().await;
Ok(if let Some(contract_id) = deployed_contracts.get(&key) {
*contract_id
} else {
let contract_id =
harness::deploy_contract(contract_path.as_str(), run_config, signing_key).await?;
deployed_contracts.insert(key, contract_id);
contract_id
})
}
async fn run(
&self,
test: &TestDescription,
output: &mut String,
verbose: bool,
) -> Result<TestPerfData> {
let TestDescription {
name,
suffix,
category,
script_data,
script_data_new_encoding,
witness_data,
expected_result,
expected_result_new_encoding,
expected_warnings,
contract_paths,
validate_abi,
validate_storage_slots,
checker,
run_config,
expected_decoded_test_logs,
experimental,
has_experimental_field,
logs,
..
} = test;
let checker = checker.build().unwrap();
let script_data = if !has_experimental_field && experimental.new_encoding {
script_data_new_encoding
} else {
script_data
};
let expected_result = if !has_experimental_field && experimental.new_encoding {
expected_result_new_encoding
} else {
expected_result
};
let mut perf_data = TestPerfData::new(test.display_name().into());
match category {
TestCategory::Runs => {
let expected_result = expected_result
.as_ref()
.expect("No expected result found. This is likely because the `test.toml` is missing either an \"expected_result_new_encoding\" or \"expected_result\" entry.");
let (result, out) =
run_and_capture_output(|| harness::compile_to_bytes(name, run_config, logs))
.await;
*output = out;
if let Ok(result) = result.as_ref() {
let packages = match result {
forc_pkg::Built::Package(p) => [p.clone()].to_vec(),
forc_pkg::Built::Workspace(p) => p.clone(),
};
for p in packages {
let bytecode_len = p.bytecode.bytes.len();
let configurables = match &p.program_abi {
sway_core::asm_generation::ProgramABI::Fuel(abi) => {
abi.configurables.as_ref().cloned().unwrap_or_default()
}
sway_core::asm_generation::ProgramABI::Evm(_)
| sway_core::asm_generation::ProgramABI::MidenVM(_) => vec![],
}
.into_iter()
.map(|x| (x.offset, x.name))
.collect::<BTreeMap<u64, String>>();
let mut items = configurables.iter().peekable();
while let Some(current) = items.next() {
let next_offset = match items.peek() {
Some(next) => *next.0,
None => bytecode_len as u64,
};
let size = next_offset - current.0;
output.push_str(&format!(
"Configurable Encoded Bytes Buffer Size: {} {}\n",
current.1, size
));
}
}
}
check_file_checker(checker, name, output)?;
let compiled = result?;
let compiled = match compiled {
forc_pkg::Built::Package(built_pkg) => built_pkg.as_ref().clone(),
forc_pkg::Built::Workspace(_) => {
panic!("workspaces are not supported in the test suite yet")
}
};
perf_data
.bytecode_sizes
.push(BytecodeSize::new(compiled.bytecode.bytes.len()));
if compiled.warnings.len() > *expected_warnings as usize {
return Err(anyhow::Error::msg(format!(
"Expected warnings: {expected_warnings}\nActual number of warnings: {}",
compiled.warnings.len()
)));
}
let result = harness::runs_in_vm(
compiled.clone(),
script_data.clone(),
witness_data.clone(),
)?;
let actual_result = match result {
harness::VMExecutionResult::Fuel(state, receipts, ecal) => {
print_receipts(output, &receipts);
let gas_used = receipts.iter().find_map(|r| {
if let Receipt::ScriptResult { gas_used, .. } = r {
Some(*gas_used)
} else {
None
}
});
if let Some(gas_used) = gas_used {
perf_data.gas_usages.push(GasUsage::new(gas_used as usize));
}
use std::fmt::Write;
let _ = writeln!(output, " {}", "Captured Output".green().bold());
for captured in ecal.captured.iter() {
match captured {
Syscall::Write { bytes, .. } => {
let s = std::str::from_utf8(bytes.as_slice()).unwrap();
output.push_str(s);
}
Syscall::Fflush { .. } => {}
Syscall::Unknown { ra, rb, rc, rd } => {
let _ = writeln!(output, "Unknown ecal: {ra} {rb} {rc} {rd}");
}
}
}
match state {
ProgramState::Return(v) => TestResult::Return(v),
ProgramState::ReturnData(digest) => {
// Find the ReturnData receipt matching the digest
let receipt = receipts
.iter()
.find(|r| r.digest() == Some(&digest))
.unwrap();
// Get the data from the receipt
let data = receipt.data().unwrap().to_vec();
TestResult::ReturnData(data)
}
ProgramState::Revert(v) => TestResult::Revert(v),
ProgramState::RunProgram(_) => {
panic!("Execution is in a suspended state: RunProgram");
}
ProgramState::VerifyPredicate(_) => {
panic!("Execution is in a suspended state: VerifyPredicate");
}
}
}
harness::VMExecutionResult::Evm(state) => match state {
revm::primitives::ExecutionResult::Success { reason, .. } => match reason {
revm::primitives::SuccessReason::Stop => TestResult::Result(0),
revm::primitives::SuccessReason::Return => todo!(),
revm::primitives::SuccessReason::SelfDestruct => todo!(),
revm::primitives::SuccessReason::EofReturnContract => todo!(),
},
revm::primitives::ExecutionResult::Revert { .. } => TestResult::Result(0),
revm::primitives::ExecutionResult::Halt { reason, .. } => {
panic!("EVM exited with unhandled reason: {reason:?}");
}
},
};
if &actual_result != expected_result {
return Err(anyhow::Error::msg(format!(
"expected: {expected_result:?}\nactual: {actual_result:?}"
)));
} else if *validate_abi {
let (result, out) = run_and_capture_output(|| async {
harness::test_json_abi(
name,
&compiled,
experimental.new_encoding,
run_config.update_output_files,
suffix,
*has_experimental_field,
run_config.release,
)
})
.await;
output.push_str(&out);
result?;
}
}
TestCategory::Compiles => {
let (result, out) =
run_and_capture_output(|| harness::compile_to_bytes(name, run_config, logs))
.await;
*output = out;
let (is_single_package, compiled_pkgs) = match result? {
forc_pkg::Built::Package(built_pkg) => {
if built_pkg.warnings.len() > *expected_warnings as usize {
return Err(anyhow::Error::msg(format!(
"Expected warnings: {expected_warnings}\nActual number of warnings: {}",
built_pkg.warnings.len()
)));
}
(true, vec![(name.clone(), built_pkg.as_ref().clone())])
}
forc_pkg::Built::Workspace(built_workspace) => (
false,
built_workspace
.iter()
.map(|built_pkg| {
(
built_pkg.descriptor.pinned.name.clone(),
built_pkg.as_ref().clone(),
)
})
.collect(),
),
};
for (name, built_pkg) in &compiled_pkgs {
if is_single_package {
perf_data
.bytecode_sizes
.push(BytecodeSize::new(built_pkg.bytecode.bytes.len()));
} else {
perf_data
.bytecode_sizes
.push(BytecodeSize::with_package_name(
name.clone(),
built_pkg.bytecode.bytes.len(),
));
}
}
check_file_checker(checker, name, output)?;
if *validate_abi {
for (name, built_pkg) in &compiled_pkgs {
let (result, out) = run_and_capture_output(|| async {
harness::test_json_abi(
name,
built_pkg,
experimental.new_encoding,
run_config.update_output_files,
suffix,
*has_experimental_field,
run_config.release,
)
})
.await;
output.push_str(&out);
result?;
}
}
if *validate_storage_slots {
for (name, built_pkg) in &compiled_pkgs {
let (result, out) = run_and_capture_output(|| async {
harness::test_json_storage_slots(name, built_pkg, suffix)
})
.await;
result?;
output.push_str(&out);
}
}
}
TestCategory::FailsToCompile => {
let (result, out) =
run_and_capture_output(|| harness::compile_to_bytes(name, run_config, logs))
.await;
*output = out;
if result.is_ok() {
if verbose {
eprintln!("[{output}]");
}
return Err(anyhow::Error::msg("Test compiles but is expected to fail"));
} else {
check_file_checker(checker, name, output)?;
}
}
TestCategory::RunsWithContract => {
if contract_paths.is_empty() {
panic!(
"For {name}\n\
One or more contract paths are required for 'run_on_node' tests."
);
}
let signing_key = test.expect_signing_key();
let mut contract_ids = Vec::new();
for contract_path in contract_paths.clone() {
let (result, out) = run_and_capture_output(|| async {
self.deploy_contract(run_config, contract_path, signing_key)
.await
})
.await;
output.push_str(&out);
contract_ids.push(result);
}
let contract_ids = contract_ids.into_iter().collect::<Result<Vec<_>, _>>()?;
let (result, out) =
harness::runs_on_node(name, run_config, &contract_ids, signing_key).await;
output.push_str(&out);
let receipts = result?;
if verbose {
print_receipts(output, &receipts);
}
if !receipts.iter().all(|res| {
!matches!(
res,
fuel_tx::Receipt::Revert { .. } | fuel_tx::Receipt::Panic { .. }
)
}) {
println!();
for cid in contract_ids {
println!("Deployed contract: {}", format!("{:#x}", cid).bold(),);
}
return Err(anyhow::Error::msg("Receipts contain reverts or panics"));
}
if receipts.len() < 2 {
return Err(anyhow::Error::msg(format!(
"less than 2 receipts: {:?} receipts",
receipts.len()
)));
}
match &receipts[receipts.len() - 2] {
Receipt::Return { val, .. } => match expected_result.as_ref().unwrap() {
TestResult::Result(v) => {
if *v != *val {
return Err(anyhow::Error::msg(format!(
"return value does not match expected: {v:?}, {val:?}"
)));
}
}
TestResult::ReturnData(_) => {
todo!("Test result `ReturnData` is currently not implemented.")
}
TestResult::Return(_) => {
todo!("Test result `Return` is currently not implemented.")
}
TestResult::Revert(_) => {
todo!("Test result `Revert` is currently not implemented.")
}
},
Receipt::ReturnData { data, .. } => match expected_result.as_ref().unwrap() {
TestResult::ReturnData(v) => {
let actual = data.as_ref().map(|bytes| bytes.as_ref()).unwrap();
if v.as_slice() != actual {
return Err(anyhow::Error::msg(format!(
"return value does not match expected: {v:?}, {data:?}"
)));
}
}
TestResult::Result(_) => {
todo!("Test result `Result` is currently not implemented.")
}
TestResult::Return(_) => {
todo!("Test result `Return` is currently not implemented.")
}
TestResult::Revert(_) => {
todo!("Test result `Revert` is currently not implemented.")
}
},
_ => {}
};
}
TestCategory::UnitTestsPass => {
let (result, out) =
harness::compile_and_run_unit_tests(name, run_config, true).await;
*output = out;
let mut decoded_logs = vec![];
result.map(|tested_pkgs| {
let mut failed = vec![];
for pkg in tested_pkgs {
if !pkg.tests.is_empty() {
println!();
}
if let Some(bytecode_size_without_tests) = pkg.built.bytecode_without_tests.as_ref().map(|bc| bc.bytes.len()) {
perf_data.bytecode_sizes.push(BytecodeSize::new(bytecode_size_without_tests));
}
for test in pkg.tests.into_iter() {
perf_data.gas_usages.push(GasUsage::with_unit_test_name(
test.name.clone(),
test.gas_used as usize,
));
if verbose {
// "test incorrect_def_modeling ... ok (17.673µs, 59 gas)"
println!(" test {} ... {} ({:?}, {} gas)",
test.name,
if test.passed() { "ok" } else { "nok" },
test.duration,
test.gas_used,
);
for log in test.logs.iter() {
println!("{log:?}");
}
}
if expected_decoded_test_logs.is_some() {
for log in test.logs.iter() {
if let Receipt::LogData {
rb,
data: Some(data),
..
} = log
{
let decoded_log_data = decode_log_data(
&rb.to_string(),
data,
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | true |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/test/src/e2e_vm_tests/harness_callback_handler.rs | test/src/e2e_vm_tests/harness_callback_handler.rs | use normalize_path::NormalizePath as _;
use std::{
collections::HashMap,
path::PathBuf,
str::FromStr as _,
sync::{Arc, Mutex},
};
use sway_core::{
ir_generation::{get_encoding_representation, get_runtime_representation},
Engines, Observer, TypeInfo,
};
use sway_ir::Type;
fn stdout_logs(root: &str, snapshot: &str) {
let root = PathBuf::from_str(root).unwrap();
let root = root.normalize();
let mut insta = insta::Settings::new();
insta.set_snapshot_path(root);
insta.set_prepend_module_to_snapshot(false);
insta.set_omit_expression(true);
let scope = insta.bind_to_scope();
let _ = std::panic::catch_unwind(|| {
insta::assert_snapshot!("logs", snapshot);
});
drop(scope);
}
struct Inner {
eng: rhai::Engine,
ast: rhai::AST,
pkg_name_cache: HashMap<PathBuf, String>,
snapshot: Arc<Mutex<String>>,
trace: Arc<Mutex<Option<bool>>>,
root: String,
}
impl Inner {
fn apply(&mut self, engines: &Engines) {
let mut trace = self.trace.lock().unwrap();
if let Some(enable) = trace.take() {
engines.obs().enable_trace(enable);
}
}
fn get_package_name(
&mut self,
span: &sway_types::Span,
engines: &sway_core::Engines,
) -> Option<String> {
if let Some(sid) = span.source_id() {
let filename = engines.se().get_path(sid);
if let Some(pid) = engines.se().get_program_id_from_manifest_path(&filename) {
let path = engines
.se()
.get_manifest_path_from_program_id(&pid)
.unwrap()
.join("Forc.toml");
Some(
if let Some(pkg_name) = self.pkg_name_cache.get(&path).cloned() {
pkg_name
} else {
let toml = std::fs::read_to_string(&path).unwrap();
let forc_toml: toml::Table = toml::from_str(&toml).unwrap();
let pkg_name = forc_toml["project"]["name"].as_str().unwrap().to_string();
self.pkg_name_cache.insert(path.clone(), pkg_name.clone());
pkg_name
},
)
} else {
None
}
} else {
None
}
}
fn on_before_method_resolution(
&mut self,
ctx: &sway_core::semantic_analysis::TypeCheckContext<'_>,
method_name: &sway_core::type_system::ast_elements::binding::TypeBinding<
sway_core::language::parsed::MethodName,
>,
args_types: &[sway_core::TypeId],
) {
let pkg_name = self
.get_package_name(&method_name.span, ctx.engines)
.unwrap_or_default();
if pkg_name.is_empty() || pkg_name == "std" {
return;
}
let args = format!(
"on_before_method_resolution: {:?}; {:?}; {:?}",
method_name.inner,
method_name.type_arguments,
ctx.engines.help_out(args_types.to_vec())
);
let mut scope = rhai::Scope::new();
scope
.push_constant("args", args)
.push_constant("pkg", pkg_name.clone())
.push_constant("event", "on_before_method_resolution")
.push_constant("method", method_name.inner.easy_name().as_str().to_string());
self.eng
.eval_ast_with_scope::<()>(&mut scope, &self.ast)
.unwrap();
self.apply(ctx.engines);
}
fn on_after_method_resolution(
&mut self,
ctx: &sway_core::semantic_analysis::TypeCheckContext<'_>,
method_name: &sway_core::type_system::ast_elements::binding::TypeBinding<
sway_core::language::parsed::MethodName,
>,
args_types: &[sway_core::TypeId],
new_ref: sway_core::decl_engine::DeclRefFunction,
new_type_id: sway_core::TypeId,
) {
let pkg_name = self
.get_package_name(&method_name.span, ctx.engines)
.unwrap_or_default();
if pkg_name.is_empty() || pkg_name == "std" {
return;
}
let args = format!(
"on_after_method_resolution: {:?}; {:?}; {:?}; {:?}; {:?}",
method_name.inner,
method_name.type_arguments,
ctx.engines.help_out(args_types.to_vec()),
ctx.engines.help_out(new_ref.id()),
ctx.engines.help_out(new_type_id),
);
let mut scope = rhai::Scope::new();
scope
.push_constant("pkg", pkg_name.clone())
.push_constant("event", "on_after_method_resolution")
.push_constant("method", method_name.inner.easy_name().as_str().to_string())
.push_constant("args", args);
self.eng
.eval_ast_with_scope::<()>(&mut scope, &self.ast)
.unwrap();
self.apply(ctx.engines);
}
fn on_after_ir_type_resolution(
&mut self,
engines: &Engines,
ctx: &sway_ir::Context,
type_info: &TypeInfo,
ir_type: &Type,
) {
let mut scope = rhai::Scope::new();
let runtime_mem_repr = get_runtime_representation(ctx, *ir_type);
let encoding_mem_repr = get_encoding_representation(engines, type_info);
let is_trivial = if let Some(encoding_mem_repr) = encoding_mem_repr.as_ref() {
runtime_mem_repr == *encoding_mem_repr
} else {
false
};
let type_size = ir_type.size(ctx);
scope
.push_constant("pkg", "")
.push_constant("event", "on_after_ir_type_resolution")
.push_constant("type_info", engines.help_out(type_info).to_string())
.push_constant("ir_type", ir_type.as_string(ctx))
.push_constant("runtime_mem_repr", format!("{runtime_mem_repr:?}"))
.push_constant("encoding_mem_repr", format!("{encoding_mem_repr:?}"))
.push_constant("is_trivial", is_trivial)
.push_constant("type_size", type_size.in_bytes());
self.eng
.eval_ast_with_scope::<()>(&mut scope, &self.ast)
.unwrap();
self.apply(engines);
}
}
impl Drop for Inner {
fn drop(&mut self) {
let snapshot = self.snapshot.lock().unwrap();
if !snapshot.is_empty() {
stdout_logs(&self.root, &snapshot);
}
}
}
pub struct HarnessCallbackHandler {
inner: Mutex<Inner>,
}
impl HarnessCallbackHandler {
pub fn new(root: &str, script: &str) -> Self {
let snapshot = Arc::new(Mutex::new(String::new()));
let trace = Arc::new(Mutex::new(None));
let mut eng = rhai::Engine::new();
eng.on_print({
let snapshot = snapshot.clone();
move |s| {
let mut snapshot = snapshot.lock().unwrap();
snapshot.push_str(s);
}
});
eng.register_fn("unique_print", {
let snapshot = snapshot.clone();
move |s: &str| {
let mut snapshot = snapshot.lock().unwrap();
if !snapshot.contains(s) {
snapshot.push_str(s);
}
}
});
eng.register_fn("println", {
let snapshot = snapshot.clone();
move |s: &str| {
let mut snapshot = snapshot.lock().unwrap();
snapshot.push_str(s);
snapshot.push('\n');
}
});
eng.register_fn("trace", {
let trace = trace.clone();
move |enable: bool| {
*trace.lock().unwrap() = Some(enable);
}
});
let scope = rhai::Scope::new();
let ast = eng.compile_into_self_contained(&scope, script).unwrap();
Self {
inner: Mutex::new(Inner {
eng,
ast,
pkg_name_cache: HashMap::default(),
snapshot,
trace,
root: root.to_string(),
}),
}
}
}
impl Observer for HarnessCallbackHandler {
fn on_trace(&self, msg: &str) {
let inner = self.inner.lock().unwrap();
let mut snapshot = inner.snapshot.lock().unwrap();
snapshot.push_str(msg);
snapshot.push('\n');
}
fn on_before_method_resolution(
&self,
ctx: &sway_core::semantic_analysis::TypeCheckContext<'_>,
method_name: &sway_core::type_system::ast_elements::binding::TypeBinding<
sway_core::language::parsed::MethodName,
>,
args_types: &[sway_core::TypeId],
) {
let mut inner = self.inner.lock().unwrap();
inner.on_before_method_resolution(ctx, method_name, args_types);
inner.apply(ctx.engines);
}
fn on_after_method_resolution(
&self,
ctx: &sway_core::semantic_analysis::TypeCheckContext<'_>,
method_name: &sway_core::type_system::ast_elements::binding::TypeBinding<
sway_core::language::parsed::MethodName,
>,
args_types: &[sway_core::TypeId],
new_ref: sway_core::decl_engine::DeclRefFunction,
new_type_id: sway_core::TypeId,
) {
let mut inner = self.inner.lock().unwrap();
inner.on_after_method_resolution(ctx, method_name, args_types, new_ref, new_type_id);
}
fn on_after_ir_type_resolution(
&self,
engines: &Engines,
ctx: &sway_ir::Context,
type_info: &TypeInfo,
ir_type: &Type,
) {
let mut inner = self.inner.lock().unwrap();
inner.on_after_ir_type_resolution(engines, ctx, type_info, ir_type);
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/test/src/e2e_vm_tests/harness.rs | test/src/e2e_vm_tests/harness.rs | use crate::e2e_vm_tests::harness_callback_handler::HarnessCallbackHandler;
use super::RunConfig;
use anyhow::{anyhow, bail, Result};
use colored::Colorize;
use forc_client::{
cmd::{Deploy as DeployCommand, Run as RunCommand},
op::{deploy, run, DeployedPackage},
NodeTarget,
};
use forc_pkg::{BuildProfile, Built, BuiltPackage, PrintOpts};
use forc_test::{ecal::EcalSyscallHandler, TestGasLimit};
use fuel_tx::TransactionBuilder;
use fuel_vm::checked_transaction::builder::TransactionBuilderExt;
use fuel_vm::fuel_tx::{self, consensus_parameters::ConsensusParametersV1};
use fuel_vm::interpreter::Interpreter;
use fuel_vm::prelude::*;
use futures::Future;
use rand::rngs::StdRng;
use rand::{Rng, SeedableRng};
use regex::{Captures, Regex};
use std::{fs, io::Read, path::PathBuf};
use sway_core::{asm_generation::ProgramABI, BuildTarget, Observer};
pub const NODE_URL: &str = "http://127.0.0.1:4000";
pub(crate) async fn run_and_capture_output<F, Fut, T>(func: F) -> (T, String)
where
F: FnOnce() -> Fut,
Fut: Future<Output = T>,
{
let mut output = String::new();
// Capture both stdout and stderr to buffers, run the code and save to a string.
let buf_stdout = gag::BufferRedirect::stdout();
let buf_stderr = gag::BufferRedirect::stderr();
let result = func().await;
if let Ok(mut buf_stdout) = buf_stdout {
buf_stdout.read_to_string(&mut output).unwrap();
drop(buf_stdout);
}
if let Ok(mut buf_stderr) = buf_stderr {
buf_stderr.read_to_string(&mut output).unwrap();
drop(buf_stderr);
}
if cfg!(windows) {
// In windows output error and warning path files start with \\?\
// We replace \ by / so tests can check unix paths only
let regex = Regex::new(r"\\\\?\\(.*)").unwrap();
output = regex
.replace_all(output.as_str(), |caps: &Captures| {
caps[1].replace('\\', "/")
})
.to_string();
}
(result, output)
}
pub(crate) async fn deploy_contract(
file_name: &str,
run_config: &RunConfig,
signing_key: &SecretKey,
) -> Result<ContractId> {
println!(" Deploying {} ...", file_name.bold());
println!(" Signing key used: {}", signing_key.to_string().bold());
let manifest_dir = env!("CARGO_MANIFEST_DIR");
let deployed_packages = deploy(DeployCommand {
pkg: forc_client::cmd::deploy::Pkg {
path: Some(format!(
"{manifest_dir}/src/e2e_vm_tests/test_programs/{file_name}"
)),
terse: !run_config.verbose,
locked: run_config.locked,
..Default::default()
},
signing_key: Some(*signing_key),
default_salt: true,
build_profile: match run_config.release {
true => BuildProfile::RELEASE.to_string(),
false => BuildProfile::DEBUG.to_string(),
},
experimental: run_config.experimental.clone(),
..Default::default()
})
.await?;
deployed_packages
.into_iter()
.map(|deployed_pkg| {
if let DeployedPackage::Contract(deployed_contract) = deployed_pkg {
Some(deployed_contract.id)
} else {
None
}
})
.next()
.flatten()
.ok_or_else(|| anyhow!("expected to find at least one deployed contract."))
}
/// Run a given project against a node. Assumes the node is running at localhost:4000.
pub(crate) async fn runs_on_node(
file_name: &str,
run_config: &RunConfig,
contract_ids: &[fuel_tx::ContractId],
signing_key: &SecretKey,
) -> (Result<Vec<fuel_tx::Receipt>>, String) {
run_and_capture_output(|| async {
println!(" Running on node {} ...", file_name.bold());
let manifest_dir = env!("CARGO_MANIFEST_DIR");
let contracts = contract_ids
.iter()
.map(|contract_id| format!("0x{contract_id:x}"))
.collect::<Vec<_>>();
let command = RunCommand {
pkg: forc_client::cmd::run::Pkg {
path: Some(format!(
"{manifest_dir}/src/e2e_vm_tests/test_programs/{file_name}"
)),
locked: run_config.locked,
terse: !run_config.verbose,
..Default::default()
},
node: NodeTarget {
node_url: Some(NODE_URL.into()),
..Default::default()
},
contract: Some(contracts),
signing_key: Some(*signing_key),
experimental: run_config.experimental.clone(),
..Default::default()
};
run(command).await.map(|ran_scripts| {
ran_scripts
.into_iter()
.flat_map(|ran_script| ran_script.receipts)
.collect::<Vec<_>>()
})
})
.await
}
pub(crate) enum VMExecutionResult {
Fuel(ProgramState, Vec<Receipt>, Box<EcalSyscallHandler>),
Evm(revm::primitives::result::ExecutionResult),
}
/// Very basic check that code does indeed run in the VM.
pub(crate) fn runs_in_vm(
script: BuiltPackage,
script_data: Option<Vec<u8>>,
witness_data: Option<Vec<Vec<u8>>>,
) -> Result<VMExecutionResult> {
match script.descriptor.target {
BuildTarget::Fuel => {
let storage = MemoryStorage::default();
let rng = &mut StdRng::seed_from_u64(2322u64);
let maturity = 1.into();
let script_data = script_data.unwrap_or_default();
let block_height = (u32::MAX >> 1).into();
// The default max length is 1MB which isn't enough for the bigger tests.
let max_size = 64 * 1024 * 1024;
let script_params = ScriptParameters::DEFAULT
.with_max_script_length(max_size)
.with_max_script_data_length(max_size);
let tx_params = TxParameters::DEFAULT.with_max_size(max_size);
let params = ConsensusParameters::V1(ConsensusParametersV1 {
script_params,
tx_params,
..Default::default()
});
let mut tb = TransactionBuilder::script(script.bytecode.bytes, script_data);
tb.with_params(params)
.add_unsigned_coin_input(
SecretKey::random(rng),
rng.r#gen(),
1,
Default::default(),
rng.r#gen(),
)
.maturity(maturity);
if let Some(witnesses) = witness_data {
for witness in witnesses {
tb.add_witness(witness.into());
}
}
let gas_price = 0;
let consensus_params = tb.get_params().clone();
let params = ConsensusParameters::default();
// Temporarily finalize to calculate `script_gas_limit`
let tmp_tx = tb.clone().finalize();
// Get `max_gas` used by everything except the script execution. Add `1` because of rounding.
let max_gas =
tmp_tx.max_gas(consensus_params.gas_costs(), consensus_params.fee_params()) + 1;
// Increase `script_gas_limit` to the maximum allowed value.
tb.script_gas_limit(consensus_params.tx_params().max_gas_per_tx() - max_gas);
let tx = tb
.finalize_checked(block_height)
.into_ready(gas_price, params.gas_costs(), params.fee_params(), None)
.map_err(|e| anyhow::anyhow!("{e:?}"))?;
let mem_instance = MemoryInstance::new();
let mut i: Interpreter<_, _, _, EcalSyscallHandler> =
Interpreter::with_storage(mem_instance, storage, Default::default());
let transition = i.transact(tx).map_err(anyhow::Error::msg)?;
Ok(VMExecutionResult::Fuel(
*transition.state(),
transition.receipts().to_vec(),
Box::new(i.ecal_state().clone()),
))
}
BuildTarget::EVM => {
let mut evm = revm::EvmBuilder::default()
.with_db(revm::InMemoryDB::default())
.with_clear_env()
.build();
// Transaction to create the smart contract
let result = evm
.transact_commit()
.map_err(|e| anyhow::anyhow!("Could not create smart contract on EVM: {e:?}"))?;
match result {
revm::primitives::ExecutionResult::Revert { .. }
| revm::primitives::ExecutionResult::Halt { .. } => todo!(),
revm::primitives::ExecutionResult::Success { ref output, .. } => match output {
revm::primitives::result::Output::Call(_) => todo!(),
revm::primitives::result::Output::Create(_bytes, address_opt) => {
match address_opt {
None => todo!(),
Some(address) => {
evm.tx_mut().data = script.bytecode.bytes.into();
evm.tx_mut().transact_to =
revm::interpreter::primitives::TransactTo::Call(*address);
let result = evm
.transact_commit()
.map_err(|e| anyhow::anyhow!("Failed call on EVM: {e:?}"))?;
Ok(VMExecutionResult::Evm(result))
}
}
}
},
}
}
}
}
/// Compiles the code and optionally captures the output of forc and the compilation.
/// Returns a tuple with the result of the compilation, as well as the output.
pub(crate) async fn compile_to_bytes(
file_name: &str,
run_config: &RunConfig,
logs: &Option<String>,
) -> Result<Built> {
println!("Compiling {} ...", file_name.bold());
let manifest_dir = env!("CARGO_MANIFEST_DIR");
let root = format!("{manifest_dir}/src/e2e_vm_tests/test_programs/{file_name}");
let build_opts = forc_pkg::BuildOpts {
build_target: run_config.build_target,
build_profile: BuildProfile::DEBUG.into(),
release: run_config.release,
print: PrintOpts {
ast: false,
dca_graph: None,
dca_graph_url_format: None,
asm: run_config.print_asm,
bytecode: run_config.print_bytecode,
bytecode_spans: run_config.print_bytecode,
ir: run_config.print_ir.clone(),
reverse_order: false,
},
verify_ir: run_config.verify_ir.clone(),
pkg: forc_pkg::PkgOpts {
path: Some(root.clone()),
locked: run_config.locked,
terse: false,
..Default::default()
},
experimental: run_config.experimental.experimental.clone(),
no_experimental: run_config.experimental.no_experimental.clone(),
no_output: !run_config.write_output,
..Default::default()
};
match std::panic::catch_unwind(|| {
let callback_handler: Option<Box<dyn Observer>> = if let Some(script) = logs {
Some(Box::new(HarnessCallbackHandler::new(&root, script)))
} else {
None
};
forc_pkg::build_with_options(&build_opts, callback_handler)
}) {
Ok(result) => {
// Print the result of the compilation (i.e., any errors Forc produces).
if let Err(ref e) = result {
println!("\n{e}");
}
result
}
Err(_) => Err(anyhow!("Compiler panic")),
}
}
/// Compiles the project's unit tests, then runs all unit tests.
/// Returns the tested package result.
pub(crate) async fn compile_and_run_unit_tests(
file_name: &str,
run_config: &RunConfig,
capture_output: bool,
) -> (Result<Vec<forc_test::TestedPackage>>, String) {
run_and_capture_output(|| async {
tracing::info!("Compiling {} ...", file_name.bold());
let manifest_dir = env!("CARGO_MANIFEST_DIR");
let path: PathBuf = [
manifest_dir,
"src",
"e2e_vm_tests",
"test_programs",
file_name,
]
.iter()
.collect();
match std::panic::catch_unwind(|| {
forc_test::build(forc_test::TestOpts {
pkg: forc_pkg::PkgOpts {
path: Some(path.to_string_lossy().into_owned()),
locked: run_config.locked,
terse: !(capture_output || run_config.verbose),
..Default::default()
},
experimental: run_config.experimental.experimental.clone(),
no_experimental: run_config.experimental.no_experimental.clone(),
release: run_config.release,
print: PrintOpts {
asm: run_config.print_asm,
bytecode: run_config.print_bytecode,
ir: run_config.print_ir.clone(),
..Default::default()
},
build_target: run_config.build_target,
no_output: !run_config.write_output,
..Default::default()
})
}) {
Ok(Ok(built_tests)) => {
let test_filter = None;
let tested = built_tests.run(
forc_test::TestRunnerCount::Auto,
test_filter,
run_config.gas_costs_values.clone(),
TestGasLimit::default(),
)?;
match tested {
forc_test::Tested::Package(tested_pkg) => Ok(vec![*tested_pkg]),
forc_test::Tested::Workspace(tested_pkgs) => Ok(tested_pkgs),
}
}
Ok(Err(e)) => Err(e),
Err(_) => Err(anyhow!("Compiler panic")),
}
})
.await
}
pub(crate) fn test_json_abi(
file_name: &str,
built_package: &BuiltPackage,
experimental_new_encoding: bool,
update_output_files: bool,
suffix: &Option<String>,
has_experimental_field: bool,
is_release: bool,
) -> Result<()> {
let manifest_dir = env!("CARGO_MANIFEST_DIR");
let experimental_suffix = match (has_experimental_field, experimental_new_encoding) {
(true, _) => suffix
.as_ref()
.unwrap()
.strip_prefix("test")
.unwrap()
.strip_suffix("toml")
.unwrap()
.trim_end_matches('.'),
(false, true) => "_new_encoding",
(false, false) => "",
};
let oracle_path = format!(
"{}/src/e2e_vm_tests/test_programs/{}/json_abi_oracle{}.{}.json",
manifest_dir,
file_name,
experimental_suffix,
if is_release { "release" } else { "debug" },
);
let output_path = format!(
"{}/src/e2e_vm_tests/test_programs/{}/json_abi_output{}.{}.json",
manifest_dir,
file_name,
experimental_suffix,
if is_release { "release" } else { "debug" },
);
emit_json_abi(file_name, &output_path, built_package)?;
// Update the oracle failing silently
if update_output_files {
let _ = std::fs::copy(&output_path, &oracle_path);
}
if fs::metadata(oracle_path.clone()).is_err() {
bail!(
"JSON ABI oracle file does not exist for this test\nExpected oracle path: {}",
&oracle_path
);
}
if fs::metadata(output_path.clone()).is_err() {
bail!(
"JSON ABI output file does not exist for this test\nExpected output path: {}",
&output_path
);
}
let oracle_contents = fs::read_to_string(&oracle_path)
.expect("Something went wrong reading the JSON ABI oracle file.");
let output_contents = fs::read_to_string(&output_path)
.expect("Something went wrong reading the JSON ABI output file.");
if oracle_contents != output_contents {
bail!(
"Mismatched ABI JSON output.\nOracle path: {}\nOutput path: {}\n{}",
oracle_path,
output_path,
prettydiff::diff_lines(&oracle_contents, &output_contents)
);
}
Ok(())
}
fn emit_json_abi(
file_name: &str,
json_abi_output_path: &str,
built_package: &BuiltPackage,
) -> Result<()> {
tracing::info!("ABI JSON gen {} ...", file_name.bold());
let json_abi = match &built_package.program_abi {
ProgramABI::Fuel(abi) => serde_json::json!(abi),
ProgramABI::Evm(abi) => serde_json::json!(abi),
ProgramABI::MidenVM(_) => todo!(),
};
let file = std::fs::File::create(json_abi_output_path)?;
serde_json::to_writer_pretty(&file, &json_abi)?;
Ok(())
}
pub(crate) fn test_json_storage_slots(
file_name: &str,
built_package: &BuiltPackage,
suffix: &Option<String>,
) -> Result<()> {
let manifest_dir = env!("CARGO_MANIFEST_DIR");
let experimental_suffix = suffix
.as_ref()
.unwrap()
.strip_prefix("test")
.unwrap()
.strip_suffix("toml")
.unwrap()
.trim_end_matches('.');
let oracle_path = format!(
"{}/src/e2e_vm_tests/test_programs/{}/json_storage_slots_oracle{}.json",
manifest_dir, file_name, experimental_suffix,
);
let output_path = format!(
"{}/src/e2e_vm_tests/test_programs/{}/json_storage_slots_output{}.json",
manifest_dir, file_name, experimental_suffix,
);
emit_json_storage_slots(file_name, &output_path, built_package)?;
if fs::metadata(oracle_path.clone()).is_err() {
bail!("JSON storage slots oracle file does not exist for this test.\nExpected oracle path: {}", &oracle_path);
}
if fs::metadata(output_path.clone()).is_err() {
bail!("JSON storage slots output file does not exist for this test.\nExpected output path: {}", &output_path);
}
let oracle_contents = fs::read_to_string(oracle_path.clone())
.expect("Something went wrong reading the JSON storage slots oracle file.");
let output_contents = fs::read_to_string(output_path.clone())
.expect("Something went wrong reading the JSON storage slots output file.");
if oracle_contents != output_contents {
bail!(
"Mismatched storage slots JSON output.\nOracle path: {}\nOutput path: {}\n{}",
oracle_path,
output_path,
prettydiff::diff_lines(&oracle_contents, &output_contents)
);
}
Ok(())
}
fn emit_json_storage_slots(
file_name: &str,
json_storage_slots_output_path: &str,
built_package: &BuiltPackage,
) -> Result<()> {
tracing::info!("Storage slots JSON gen {} ...", file_name.bold());
let json_storage_slots = serde_json::json!(built_package.storage_slots);
let file = std::fs::File::create(json_storage_slots_output_path)?;
serde_json::to_writer_pretty(&file, &json_storage_slots)?;
Ok(())
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/test/src/snapshot/mod.rs | test/src/snapshot/mod.rs | use anyhow::Result;
use libtest_mimic::{Arguments, Trial};
use normalize_path::NormalizePath;
use regex::{Captures, Regex};
use std::{
collections::{BTreeSet, HashMap, VecDeque},
path::{Path, PathBuf},
str::FromStr,
sync::Once,
};
use sway_core::Engines;
use sway_features::ExperimentalFeatures;
use sway_ir::{function_print, Backtrace};
static FORC_COMPILATION: Once = Once::new();
static FORC_DOC_COMPILATION: Once = Once::new();
fn compile_forc() {
let args = vec!["b", "--release", "-p", "forc"];
let o = std::process::Command::new("cargo")
.args(args)
.output()
.unwrap();
assert!(o.status.success());
}
fn compile_forc_doc() {
let args = vec!["b", "--release", "-p", "forc-doc"];
let o = std::process::Command::new("cargo")
.args(args)
.output()
.unwrap();
assert!(o.status.success());
}
#[derive(Default)]
struct UndoFiles {
contents: HashMap<PathBuf, Vec<u8>>,
}
impl Drop for UndoFiles {
fn drop(&mut self) {
#[allow(clippy::iter_over_hash_type)]
for (path, contents) in self.contents.drain() {
let _ = std::fs::write(path, contents);
}
}
}
pub(super) async fn run(filter_regex: Option<®ex::Regex>) -> Result<()> {
let repo_root = PathBuf::from_str(&std::env::var("CARGO_MANIFEST_DIR")?)?
.parent()
.unwrap()
.to_path_buf();
let test_root = repo_root.join("test");
let test_programs_dir = test_root.join("src/e2e_vm_tests/test_programs/");
let args = Arguments {
filter: filter_regex.as_ref().map(|filter| filter.to_string()),
nocapture: true,
..Default::default()
};
let tests = discover_tests(&test_root)
.into_iter()
.map(|dir| {
let name = dir
.strip_prefix(&test_programs_dir)
.unwrap()
.display()
.to_string();
let repo_root = repo_root.clone();
Trial::test(name.clone(), move || {
let snapshot_toml =
std::fs::read_to_string(format!("{}/snapshot.toml", dir.display()))?;
let snapshot_toml = if snapshot_toml.trim().is_empty() {
"cmds = [ \"forc build --path {root}\" ]".to_string()
} else {
snapshot_toml
};
let snapshot_toml = toml::from_str::<toml::Value>(&snapshot_toml)?;
let root = dir.strip_prefix(&repo_root).unwrap().display().to_string();
let cmds = snapshot_toml.get("cmds").unwrap().as_array().unwrap();
let mut snapshot = String::new();
let _ = run_cmds(&name, &repo_root, &root, cmds, &mut snapshot);
fn stdout(root: &str, snapshot: &str) {
let root = PathBuf::from_str(root).unwrap();
let root = root.normalize();
let mut insta = insta::Settings::new();
insta.set_snapshot_path(root);
insta.set_prepend_module_to_snapshot(false);
insta.set_omit_expression(true);
let scope = insta.bind_to_scope();
insta::assert_snapshot!("stdout", snapshot);
drop(scope);
}
stdout(&format!("{}/{root}", repo_root.display()), &snapshot);
Ok(())
})
})
.collect();
libtest_mimic::run(&args, tests).exit();
}
fn run_cmds(
test_name: &String,
repo_root: &PathBuf,
root: &String,
cmds: &Vec<toml::Value>,
snapshot: &mut String,
) -> std::result::Result<(), libtest_mimic::Failed> {
use std::fmt::Write;
let name = PathBuf::from_str(test_name).unwrap();
let name = name.file_stem().unwrap();
let find_blocks_regex = Regex::new(r#"START ([0-9a-zA-Z_]*)"#).unwrap();
for cmd in cmds {
match cmd {
toml::Value::String(cmd) => {
let cmd = cmd
.replace("{root}", root)
.replace("{name}", name.to_str().unwrap());
if !cmd.starts_with("echo") {
let _ = writeln!(snapshot, "> {cmd}");
}
let mut last_output: Option<String> = None;
// We intentionally split the command by " | " to allow for
// `regex` command to support `|` operator, although without
// surrounding spaces.
for cmd in cmd.split(" | ") {
let cmd = cmd.trim();
let cmd = if let Some(cmd) = cmd.strip_prefix("forc doc ") {
FORC_DOC_COMPILATION.call_once(|| {
compile_forc_doc();
});
format!("target/release/forc-doc {cmd} 1>&2")
} else if let Some(cmd) = cmd.strip_prefix("forc ") {
FORC_COMPILATION.call_once(|| {
compile_forc();
});
format!("target/release/forc {cmd} 1>&2")
} else if let Some(cmd) = cmd.strip_prefix("sub ") {
let arg = cmd.trim();
if let Some(l) = last_output.take() {
let mut new_output = String::new();
for line in l.lines() {
if line.contains(arg) {
new_output.push_str(line);
new_output.push('\n');
}
}
last_output = Some(new_output);
}
continue;
} else if let Some(cmd) = cmd.strip_prefix("regex ") {
let arg = cmd.trim();
let arg = arg.trim_matches('\'');
let regex = Regex::new(arg).expect("regex provided to the snapshot `regex` filter is not a valid Rust regex");
if let Some(l) = last_output.take() {
let mut new_output = String::new();
for line in l.lines() {
if regex.is_match(line) {
new_output.push_str(line);
new_output.push('\n');
}
}
last_output = Some(new_output);
}
continue;
} else if let Some(args) = cmd.strip_prefix("replace-file ") {
let Some((path, args)) = args.trim().split_once(" ") else {
panic!("replace needs three arguments: file from to");
};
let Some(from) = args.trim().strip_prefix("\"") else {
panic!("replace arguments must be quoted");
};
let Some((from, args)) = from.split_once("\"") else {
panic!("replace arguments must be quoted");
};
let Some(to) = args.trim().strip_prefix("\"") else {
panic!("replace arguments must be quoted");
};
let Some((to, _)) = to.split_once("\"") else {
panic!("replace arguments must be quoted");
};
let proj_root = repo_root.join(root);
let path = proj_root.join(path);
let path = path.canonicalize().unwrap();
if !path
.display()
.to_string()
.starts_with(&proj_root.display().to_string())
{
panic!("not allowed to edit files outside project folder");
}
let contents = std::fs::read_to_string(&path).unwrap();
let contents = contents.replace(from, to);
std::fs::write(path, contents).unwrap();
continue;
} else if let Some(args) = cmd.strip_prefix("filter-fn ") {
if let Some(output) = last_output.take() {
let (name, fns) = args.trim().split_once(" ").unwrap();
let fns = fns
.split(",")
.map(|x| x.trim().to_string())
.collect::<BTreeSet<String>>();
let mut captured = String::new();
let mut inside_ir = false;
let mut inside_asm = false;
let mut last_asm_lines = VecDeque::new();
let mut capture_line = false;
let compiling_project_line = format!("Compiling script {name}");
for line in output.lines() {
if line.contains(&compiling_project_line) {
inside_ir = true;
}
if line.contains(";; ASM: Final program") {
inside_asm = true;
}
if inside_ir {
if line.starts_with("// IR:") {
capture_line = true;
}
if line.starts_with("!0 =") {
let engines = Engines::default();
let ir = sway_ir::parse(
&captured,
engines.se(),
ExperimentalFeatures::default(),
Backtrace::None,
)
.unwrap();
for m in ir.module_iter() {
for f in m.function_iter(&ir) {
if fns.contains(f.get_name(&ir)) {
snapshot.push('\n');
function_print(snapshot, &ir, f, false)
.unwrap();
snapshot.push('\n');
}
}
}
capture_line = false;
inside_ir = false;
captured.clear();
}
}
if inside_asm {
if line.contains("save locals base register for function") {
for f in fns.iter() {
if line.contains(f.as_str()) {
capture_line = true;
snapshot.push('\n');
for l in last_asm_lines.drain(..) {
snapshot.push_str(l);
snapshot.push('\n');
}
}
}
}
// keep the last two lines
if last_asm_lines.len() >= 2 {
last_asm_lines.pop_front();
}
last_asm_lines.push_back(line);
if line.is_empty() {
inside_asm = false;
}
if line.contains("; return from call") {
if capture_line {
captured.push_str(line);
captured.push('\n');
write!(snapshot, "{captured}").unwrap();
captured.clear();
}
capture_line = false;
}
}
if capture_line {
captured.push_str(line);
captured.push('\n');
}
}
last_output = Some(String::new());
}
continue;
} else if let Some(args) = cmd.strip_prefix("echo ") {
let mut chars = args.chars();
'nextline: loop {
for _ in 0..80 {
if let Some(c) = chars.next() {
snapshot.push(c);
} else {
break 'nextline;
}
}
for c in chars.by_ref() {
if c == ' ' || c == '\n' {
snapshot.push('\n');
continue 'nextline;
} else {
snapshot.push(c);
}
}
break 'nextline;
}
snapshot.push('\n');
continue;
} else {
panic!("`{cmd}` is not a supported snapshot command.\nPossible tool commands: forc doc, forc\nPossible filtering commands: sub, regex, filter-fn");
};
let o = duct::cmd!("bash", "-c", cmd.clone())
.dir(repo_root.clone())
.stderr_to_stdout()
.stdout_capture();
let o = if let Some(last_output) = last_output.as_ref() {
o.stdin_bytes(last_output.as_bytes())
} else {
o
};
let o = o.env("COLUMNS", "10").unchecked().start().unwrap();
let o = o.wait().unwrap();
last_output = Some(clean_output(&format!(
"exit status: {}\noutput:\n{}",
o.status.code().unwrap(),
std::str::from_utf8(&o.stdout).unwrap(),
)));
}
let _ = writeln!(snapshot, "{}", last_output.unwrap_or_default());
}
toml::Value::Table(map) => {
let repeat_type = map["repeat"].as_str().unwrap();
let cmds = map["cmds"].as_array().unwrap();
match repeat_type {
"for-each-block" => {
fn remove_block_from_file(contents: &str, block_name: &str) -> String {
let block_regex = Regex::new(&format!("\\/\\* START {block_name} \\*\\/[.\\s\\S]+?END {block_name} \\*\\/")).unwrap();
block_regex
.replace_all(contents, |_: &Captures| -> String { String::new() })
.to_string()
}
let path = PathBuf::from_str(root).unwrap().join("src/main.sw");
let byte_contents = std::fs::read(&path).unwrap();
let contents = String::from_utf8(byte_contents.clone()).unwrap();
let mut blocks = BTreeSet::new();
for capture in find_blocks_regex.captures_iter(&contents) {
let name = capture.get(1).unwrap().as_str().to_string();
blocks.insert(name);
}
for block in blocks.iter() {
let _ = writeln!(snapshot, "> Block: {block}");
let mut undo = UndoFiles::default();
undo.contents.insert(path.clone(), byte_contents.clone());
let mut new_contents = contents.clone();
for remove_block in blocks.iter() {
if remove_block != block {
new_contents =
remove_block_from_file(&new_contents, remove_block)
.to_string();
}
}
let _ = std::fs::write(&path, new_contents);
let _ = run_cmds(test_name, repo_root, root, cmds, snapshot);
}
}
_ => {
panic!("`{cmd}` is not a supported repeat type.\nPossible types are: for-each-block.");
}
}
}
_ => {
panic!("`cmds` items can only be strings or inline tables.");
}
}
}
Ok(())
}
pub fn discover_tests(test_root: &Path) -> Vec<PathBuf> {
use glob::glob;
let mut entries = vec![];
let pattern = format!("{}/**/snapshot.toml", test_root.display());
for entry in glob(&pattern)
.expect("Failed to read glob pattern")
.flatten()
{
entries.push(entry.parent().unwrap().to_owned())
}
entries
}
fn clean_output(output: &str) -> String {
#[derive(Default)]
struct RawText(String);
impl vte::Perform for RawText {
fn print(&mut self, c: char) {
self.0.push(c);
}
fn execute(&mut self, _: u8) {}
fn hook(&mut self, _: &vte::Params, _: &[u8], _: bool, _: char) {}
fn put(&mut self, b: u8) {
self.0.push(b as char);
}
fn unhook(&mut self) {}
fn osc_dispatch(&mut self, _: &[&[u8]], _: bool) {}
fn csi_dispatch(&mut self, _: &vte::Params, _: &[u8], _: bool, _: char) {}
fn esc_dispatch(&mut self, _: &[u8], _: bool, _: u8) {}
}
let mut raw = String::new();
for line in output.lines() {
let mut performer = RawText::default();
let mut p = vte::Parser::new();
for b in line.as_bytes() {
p.advance(&mut performer, *b);
}
raw.push_str(&performer.0);
raw.push('\n');
}
// Remove absolute paths from snapshot tests
let manifest_dir = env!("CARGO_MANIFEST_DIR");
let manifest_dir: PathBuf = PathBuf::from(manifest_dir);
let parent = manifest_dir.parent().unwrap();
let result = raw.replace(&format!("{}/", parent.display()), "");
// Remove compilation time
let r = Regex::new("(Finished (debug|release) \\[.*?\\] target\\(s\\) \\[.*?\\] in )(.*?s)")
.unwrap();
let result = r.replace_all(&result, "$1???");
// Remove forc test time
let r = Regex::new("((F|f)inished in )(.*?s)").unwrap();
let result = r.replace_all(&result, "$1???");
// Remove individual test duration time
let r = Regex::new("(test .+ \\()(.*?s)(, .+ gas\\))").unwrap();
let result = r.replace_all(&result, "$1???$3");
// Remove test result "finished in" time
let r = Regex::new("(test result: .+ finished in )(.*?s)").unwrap();
let result = r.replace(&result, "$1???");
// Remove test duration time
let r = Regex::new("(Finished in )(.*?s)").unwrap();
let result = r.replace(&result, "$1???");
result.to_string()
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-parse/src/attribute.rs | sway-parse/src/attribute.rs | use crate::priv_prelude::{Peek, Peeker};
use crate::{Parse, ParseBracket, ParseResult, ParseToEnd, Parser, ParserConsumed};
use sway_ast::attribute::{Annotated, Attribute, AttributeArg, AttributeDecl, AttributeHashKind};
use sway_ast::brackets::Parens;
use sway_ast::keywords::{EqToken, HashBangToken, HashToken, StorageToken};
use sway_ast::literal::LitBool;
use sway_ast::token::{DocComment, DocStyle};
use sway_ast::Literal;
use sway_error::parser_error::ParseErrorKind;
use sway_types::{Ident, Span, Spanned};
impl Peek for DocComment {
fn peek(peeker: Peeker<'_>) -> Option<DocComment> {
peeker.peek_doc_comment().ok().cloned()
}
}
impl Parse for DocComment {
fn parse(parser: &mut Parser) -> ParseResult<DocComment> {
match parser.take::<DocComment>() {
Some(doc_comment) => Ok(doc_comment),
None => Err(parser.emit_error(ParseErrorKind::ExpectedDocComment)),
}
}
}
impl Parse for Vec<AttributeDecl> {
fn parse(parser: &mut Parser) -> ParseResult<Self> {
let mut attributes = Vec::new();
loop {
if let Some(DocComment { .. }) = parser.peek() {
let doc_comment = parser.parse::<DocComment>()?;
let doc_comment_attr_decl = match doc_comment.doc_style {
DocStyle::Outer => AttributeDecl::new_outer_doc_comment(
doc_comment.span,
doc_comment.content_span,
),
DocStyle::Inner => AttributeDecl::new_inner_doc_comment(
doc_comment.span,
doc_comment.content_span,
),
};
attributes.push(doc_comment_attr_decl);
continue;
}
// This will parse both `#` and `#!` attributes.
if let Some(attr_decl) = parser.guarded_parse::<HashToken, _>()? {
attributes.push(attr_decl);
continue;
}
break;
}
Ok(attributes)
}
}
impl<T: Parse> Parse for Annotated<T> {
fn parse(parser: &mut Parser) -> ParseResult<Self> {
let attributes = parser.parse::<Vec<AttributeDecl>>()?;
if parser.check_empty().is_some() {
// Provide a dedicated error message for the case when we have
// inner doc comments (`//!`) at the end of the module (because
// there are no items after the comments).
let error = if attributes
.iter()
.all(|attr| attr.is_inner() && attr.is_doc_comment())
{
// Show the error on the complete doc comment.
let first_doc_line = attributes.first().expect(
"parsing `Annotated` guarantees that `attributes` have at least one element",
);
let last_doc_line = attributes.last().expect(
"parsing `Annotated` guarantees that `attributes` have at least one element",
);
let span = Span::join(first_doc_line.span(), &last_doc_line.span().start_span());
parser.emit_error_with_span(
ParseErrorKind::ExpectedInnerDocCommentAtTheTopOfFile,
span,
)
} else {
let is_only_documented = attributes.iter().all(|attr| attr.is_doc_comment());
parser.emit_error(ParseErrorKind::ExpectedAnAnnotatedElement { is_only_documented })
};
Err(error)
} else {
// Parse the `T` value.
let value = match parser.parse_with_recovery() {
Ok(value) => value,
Err(r) => {
let (spans, error) =
r.recover_at_next_line_with_fallback_error(ParseErrorKind::InvalidItem);
if let Some(error) = T::error(spans, error) {
error
} else {
Err(error)?
}
}
};
Ok(Annotated { attributes, value })
}
}
fn error(
spans: Box<[sway_types::Span]>,
error: sway_error::handler::ErrorEmitted,
) -> Option<Self>
where
Self: Sized,
{
T::error(spans, error).map(|value| Annotated {
attributes: vec![],
value,
})
}
}
impl Parse for AttributeDecl {
fn parse(parser: &mut Parser) -> ParseResult<Self> {
Ok(AttributeDecl {
hash_kind: parser.parse()?,
attribute: parser.parse()?,
})
}
}
impl Parse for AttributeHashKind {
fn parse(parser: &mut Parser) -> ParseResult<Self> {
match parser.take::<HashBangToken>() {
Some(hash_bang_token) => Ok(AttributeHashKind::Inner(hash_bang_token)),
None => match parser.take::<HashToken>() {
Some(hash_token) => Ok(AttributeHashKind::Outer(hash_token)),
None => Err(parser.emit_error(ParseErrorKind::ExpectedAnAttribute)),
},
}
}
}
impl Parse for AttributeArg {
fn parse(parser: &mut Parser) -> ParseResult<Self> {
let name = parser.parse()?;
match parser.take::<EqToken>() {
Some(_) => {
let value = match parser.take::<Ident>() {
Some(ident) if ident.as_str() == "true" => Literal::Bool(LitBool {
span: ident.span(),
kind: sway_ast::literal::LitBoolType::True,
}),
Some(ident) if ident.as_str() == "false" => Literal::Bool(LitBool {
span: ident.span(),
kind: sway_ast::literal::LitBoolType::False,
}),
_ => parser.parse()?,
};
Ok(AttributeArg {
name,
value: Some(value),
})
}
None => Ok(AttributeArg { name, value: None }),
}
}
}
impl Parse for Attribute {
fn parse(parser: &mut Parser) -> ParseResult<Self> {
let name = if let Some(storage) = parser.take::<StorageToken>() {
Ident::from(storage)
} else {
parser.parse()?
};
let args = Parens::try_parse(parser)?;
Ok(Attribute { name, args })
}
}
impl ParseToEnd for Attribute {
fn parse_to_end<'a, 'e>(mut parser: Parser<'a, '_>) -> ParseResult<(Self, ParserConsumed<'a>)> {
let attrib = parser.parse()?;
match parser.check_empty() {
Some(consumed) => Ok((attrib, consumed)),
None => Err(parser.emit_error(ParseErrorKind::UnexpectedTokenAfterAttribute)),
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::test_utils::parse;
use insta::*;
use sway_ast::ItemFn;
#[test]
fn parse_annotated_fn() {
assert_ron_snapshot!(parse::<Annotated<ItemFn>>(r#"
// I will be ignored.
//! This is a misplaced inner doc comment.
/// This is an outer doc comment.
#[storage(read)]
fn main() {
()
}
"#,), @r#"
Annotated(
attributes: [
AttributeDecl(
hash_kind: Inner(HashBangToken(
span: Span(
src: "\n // I will be ignored.\n //! This is a misplaced inner doc comment.\n /// This is an outer doc comment.\n #[storage(read)]\n fn main() {\n ()\n }\n ",
start: 47,
end: 89,
source_id: None,
),
)),
attribute: SquareBrackets(
inner: Punctuated(
value_separator_pairs: [],
final_value_opt: Some(Attribute(
name: BaseIdent(
name_override_opt: Some("doc-comment"),
span: Span(
src: "\n // I will be ignored.\n //! This is a misplaced inner doc comment.\n /// This is an outer doc comment.\n #[storage(read)]\n fn main() {\n ()\n }\n ",
start: 47,
end: 89,
source_id: None,
),
is_raw_ident: false,
),
args: Some(Parens(
inner: Punctuated(
value_separator_pairs: [],
final_value_opt: Some(AttributeArg(
name: BaseIdent(
name_override_opt: None,
span: Span(
src: "\n // I will be ignored.\n //! This is a misplaced inner doc comment.\n /// This is an outer doc comment.\n #[storage(read)]\n fn main() {\n ()\n }\n ",
start: 50,
end: 89,
source_id: None,
),
is_raw_ident: false,
),
value: None,
)),
),
span: Span(
src: "\n // I will be ignored.\n //! This is a misplaced inner doc comment.\n /// This is an outer doc comment.\n #[storage(read)]\n fn main() {\n ()\n }\n ",
start: 50,
end: 89,
source_id: None,
),
)),
)),
),
span: Span(
src: "\n // I will be ignored.\n //! This is a misplaced inner doc comment.\n /// This is an outer doc comment.\n #[storage(read)]\n fn main() {\n ()\n }\n ",
start: 47,
end: 89,
source_id: None,
),
),
),
AttributeDecl(
hash_kind: Outer(HashToken(
span: Span(
src: "\n // I will be ignored.\n //! This is a misplaced inner doc comment.\n /// This is an outer doc comment.\n #[storage(read)]\n fn main() {\n ()\n }\n ",
start: 102,
end: 135,
source_id: None,
),
)),
attribute: SquareBrackets(
inner: Punctuated(
value_separator_pairs: [],
final_value_opt: Some(Attribute(
name: BaseIdent(
name_override_opt: Some("doc-comment"),
span: Span(
src: "\n // I will be ignored.\n //! This is a misplaced inner doc comment.\n /// This is an outer doc comment.\n #[storage(read)]\n fn main() {\n ()\n }\n ",
start: 102,
end: 135,
source_id: None,
),
is_raw_ident: false,
),
args: Some(Parens(
inner: Punctuated(
value_separator_pairs: [],
final_value_opt: Some(AttributeArg(
name: BaseIdent(
name_override_opt: None,
span: Span(
src: "\n // I will be ignored.\n //! This is a misplaced inner doc comment.\n /// This is an outer doc comment.\n #[storage(read)]\n fn main() {\n ()\n }\n ",
start: 105,
end: 135,
source_id: None,
),
is_raw_ident: false,
),
value: None,
)),
),
span: Span(
src: "\n // I will be ignored.\n //! This is a misplaced inner doc comment.\n /// This is an outer doc comment.\n #[storage(read)]\n fn main() {\n ()\n }\n ",
start: 105,
end: 135,
source_id: None,
),
)),
)),
),
span: Span(
src: "\n // I will be ignored.\n //! This is a misplaced inner doc comment.\n /// This is an outer doc comment.\n #[storage(read)]\n fn main() {\n ()\n }\n ",
start: 102,
end: 135,
source_id: None,
),
),
),
AttributeDecl(
hash_kind: Outer(HashToken(
span: Span(
src: "\n // I will be ignored.\n //! This is a misplaced inner doc comment.\n /// This is an outer doc comment.\n #[storage(read)]\n fn main() {\n ()\n }\n ",
start: 148,
end: 149,
source_id: None,
),
)),
attribute: SquareBrackets(
inner: Punctuated(
value_separator_pairs: [],
final_value_opt: Some(Attribute(
name: BaseIdent(
name_override_opt: None,
span: Span(
src: "\n // I will be ignored.\n //! This is a misplaced inner doc comment.\n /// This is an outer doc comment.\n #[storage(read)]\n fn main() {\n ()\n }\n ",
start: 150,
end: 157,
source_id: None,
),
is_raw_ident: false,
),
args: Some(Parens(
inner: Punctuated(
value_separator_pairs: [],
final_value_opt: Some(AttributeArg(
name: BaseIdent(
name_override_opt: None,
span: Span(
src: "\n // I will be ignored.\n //! This is a misplaced inner doc comment.\n /// This is an outer doc comment.\n #[storage(read)]\n fn main() {\n ()\n }\n ",
start: 158,
end: 162,
source_id: None,
),
is_raw_ident: false,
),
value: None,
)),
),
span: Span(
src: "\n // I will be ignored.\n //! This is a misplaced inner doc comment.\n /// This is an outer doc comment.\n #[storage(read)]\n fn main() {\n ()\n }\n ",
start: 157,
end: 163,
source_id: None,
),
)),
)),
),
span: Span(
src: "\n // I will be ignored.\n //! This is a misplaced inner doc comment.\n /// This is an outer doc comment.\n #[storage(read)]\n fn main() {\n ()\n }\n ",
start: 149,
end: 164,
source_id: None,
),
),
),
],
value: ItemFn(
fn_signature: FnSignature(
visibility: None,
fn_token: FnToken(
span: Span(
src: "\n // I will be ignored.\n //! This is a misplaced inner doc comment.\n /// This is an outer doc comment.\n #[storage(read)]\n fn main() {\n ()\n }\n ",
start: 177,
end: 179,
source_id: None,
),
),
name: BaseIdent(
name_override_opt: None,
span: Span(
src: "\n // I will be ignored.\n //! This is a misplaced inner doc comment.\n /// This is an outer doc comment.\n #[storage(read)]\n fn main() {\n ()\n }\n ",
start: 180,
end: 184,
source_id: None,
),
is_raw_ident: false,
),
generics: None,
arguments: Parens(
inner: Static(Punctuated(
value_separator_pairs: [],
final_value_opt: None,
)),
span: Span(
src: "\n // I will be ignored.\n //! This is a misplaced inner doc comment.\n /// This is an outer doc comment.\n #[storage(read)]\n fn main() {\n ()\n }\n ",
start: 184,
end: 186,
source_id: None,
),
),
return_type_opt: None,
where_clause_opt: None,
),
body: Braces(
inner: CodeBlockContents(
statements: [],
final_expr_opt: Some(Tuple(Parens(
inner: Nil,
span: Span(
src: "\n // I will be ignored.\n //! This is a misplaced inner doc comment.\n /// This is an outer doc comment.\n #[storage(read)]\n fn main() {\n ()\n }\n ",
start: 205,
end: 207,
source_id: None,
),
))),
span: Span(
src: "\n // I will be ignored.\n //! This is a misplaced inner doc comment.\n /// This is an outer doc comment.\n #[storage(read)]\n fn main() {\n ()\n }\n ",
start: 188,
end: 220,
source_id: None,
),
),
span: Span(
src: "\n // I will be ignored.\n //! This is a misplaced inner doc comment.\n /// This is an outer doc comment.\n #[storage(read)]\n fn main() {\n ()\n }\n ",
start: 187,
end: 221,
source_id: None,
),
),
),
)
"#);
}
#[test]
fn parse_attribute() {
assert_ron_snapshot!(parse::<Attribute>(r#"
name(arg1, arg2 = "value", arg3)
"#,), @r#"
Attribute(
name: BaseIdent(
name_override_opt: None,
span: Span(
src: "\n name(arg1, arg2 = \"value\", arg3)\n ",
start: 13,
end: 17,
source_id: None,
),
is_raw_ident: false,
),
args: Some(Parens(
inner: Punctuated(
value_separator_pairs: [
(AttributeArg(
name: BaseIdent(
name_override_opt: None,
span: Span(
src: "\n name(arg1, arg2 = \"value\", arg3)\n ",
start: 18,
end: 22,
source_id: None,
),
is_raw_ident: false,
),
value: None,
), CommaToken(
span: Span(
src: "\n name(arg1, arg2 = \"value\", arg3)\n ",
start: 22,
end: 23,
source_id: None,
),
)),
(AttributeArg(
name: BaseIdent(
name_override_opt: None,
span: Span(
src: "\n name(arg1, arg2 = \"value\", arg3)\n ",
start: 24,
end: 28,
source_id: None,
),
is_raw_ident: false,
),
value: Some(String(LitString(
span: Span(
src: "\n name(arg1, arg2 = \"value\", arg3)\n ",
start: 31,
end: 38,
source_id: None,
),
parsed: "value",
))),
), CommaToken(
span: Span(
src: "\n name(arg1, arg2 = \"value\", arg3)\n ",
start: 38,
end: 39,
source_id: None,
),
)),
],
final_value_opt: Some(AttributeArg(
name: BaseIdent(
name_override_opt: None,
span: Span(
src: "\n name(arg1, arg2 = \"value\", arg3)\n ",
start: 40,
end: 44,
source_id: None,
),
is_raw_ident: false,
),
value: None,
)),
),
span: Span(
src: "\n name(arg1, arg2 = \"value\", arg3)\n ",
start: 17,
end: 45,
source_id: None,
),
)),
)
"#);
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-parse/src/path.rs | sway-parse/src/path.rs | use crate::{Parse, ParseResult, Parser};
use sway_ast::keywords::{DoubleColonToken, OpenAngleBracketToken, SelfToken, StorageToken};
use sway_ast::{
AngleBrackets, PathExpr, PathExprSegment, PathType, PathTypeSegment, QualifiedPathRoot,
};
use sway_types::{Ident, Spanned};
impl Parse for PathExpr {
fn parse(parser: &mut Parser) -> ParseResult<PathExpr> {
let root_opt = match parser.take() {
Some(open_angle_bracket_token) => {
let qualified_path_root = parser.parse()?;
let close_angle_bracket_token = parser.parse()?;
let angle_brackets = AngleBrackets {
open_angle_bracket_token,
inner: qualified_path_root,
close_angle_bracket_token,
};
let double_colon_token = parser.parse()?;
Some((Some(angle_brackets), double_colon_token))
}
None => parser
.take()
.map(|double_colon_token| (None, double_colon_token)),
};
let prefix = parser.parse()?;
let mut suffix: Vec<(DoubleColonToken, PathExprSegment)> = Vec::new();
let mut incomplete_suffix = false;
while let Some(double_colon_token) = parser.take() {
if let Ok(segment) = parser.parse() {
suffix.push((double_colon_token, segment));
} else {
incomplete_suffix = true;
// this is to make the span be `foo::` instead of just `foo`
let dummy_path_expr_segment = PathExprSegment {
name: Ident::new(double_colon_token.span()),
generics_opt: None,
};
suffix.push((double_colon_token, dummy_path_expr_segment));
break;
}
}
Ok(PathExpr {
root_opt,
prefix,
suffix,
incomplete_suffix,
})
}
}
fn parse_ident(parser: &mut Parser) -> ParseResult<Ident> {
if let Some(token) = parser.take::<StorageToken>() {
Ok(Ident::from(token))
} else if let Some(token) = parser.take::<SelfToken>() {
Ok(Ident::from(token))
} else {
parser.parse::<Ident>()
}
}
impl Parse for PathExprSegment {
fn parse(parser: &mut Parser) -> ParseResult<PathExprSegment> {
Ok(PathExprSegment {
name: parse_ident(parser)?,
generics_opt: parser.guarded_parse::<(DoubleColonToken, OpenAngleBracketToken), _>()?,
})
}
}
impl Parse for PathType {
fn parse(parser: &mut Parser) -> ParseResult<PathType> {
let root_opt = match parser.take() {
Some(open_angle_bracket_token) => {
let qualified_path_root = parser.parse()?;
let close_angle_bracket_token = parser.parse()?;
let angle_brackets = AngleBrackets {
open_angle_bracket_token,
inner: qualified_path_root,
close_angle_bracket_token,
};
let double_colon_token = parser.parse()?;
Some((Some(angle_brackets), double_colon_token))
}
None => parser
.take()
.map(|double_colon_token| (None, double_colon_token)),
};
let prefix = parser.parse()?;
let mut suffix = Vec::new();
while let Some(double_colon_token) = parser.take() {
let segment = parser.parse()?;
suffix.push((double_colon_token, segment));
}
Ok(PathType {
root_opt,
prefix,
suffix,
})
}
}
impl Parse for PathTypeSegment {
fn parse(parser: &mut Parser) -> ParseResult<PathTypeSegment> {
let name = parse_ident(parser)?;
let generics_opt =
if let Some(generics) = parser.guarded_parse::<OpenAngleBracketToken, _>()? {
Some((None, generics))
} else if let Some((double_colon_token, generics)) =
parser.guarded_parse::<(DoubleColonToken, OpenAngleBracketToken), _>()?
{
Some((Some(double_colon_token), generics))
} else {
None
};
Ok(PathTypeSegment { name, generics_opt })
}
}
impl Parse for QualifiedPathRoot {
fn parse(parser: &mut Parser) -> ParseResult<QualifiedPathRoot> {
let ty = parser.parse()?;
let as_trait = (parser.parse()?, parser.parse()?);
Ok(QualifiedPathRoot { ty, as_trait })
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::test_utils::parse;
use insta::*;
#[test]
fn parse_nested_path() {
assert_ron_snapshot!(parse::<PathExpr>(r#"
std::vec::Vec
"#,), @r#"
PathExpr(
root_opt: None,
prefix: PathExprSegment(
name: BaseIdent(
name_override_opt: None,
span: Span(
src: "\n std::vec::Vec\n ",
start: 13,
end: 16,
source_id: None,
),
is_raw_ident: false,
),
generics_opt: None,
),
suffix: [
(DoubleColonToken(
span: Span(
src: "\n std::vec::Vec\n ",
start: 16,
end: 18,
source_id: None,
),
), PathExprSegment(
name: BaseIdent(
name_override_opt: None,
span: Span(
src: "\n std::vec::Vec\n ",
start: 18,
end: 21,
source_id: None,
),
is_raw_ident: false,
),
generics_opt: None,
)),
(DoubleColonToken(
span: Span(
src: "\n std::vec::Vec\n ",
start: 21,
end: 23,
source_id: None,
),
), PathExprSegment(
name: BaseIdent(
name_override_opt: None,
span: Span(
src: "\n std::vec::Vec\n ",
start: 23,
end: 26,
source_id: None,
),
is_raw_ident: false,
),
generics_opt: None,
)),
],
)
"#);
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-parse/src/lib.rs | sway-parse/src/lib.rs | mod attribute;
mod brackets;
mod expr;
mod generics;
mod item;
mod keywords;
mod literal;
mod module;
mod parse;
mod parser;
mod path;
mod pattern;
mod priv_prelude;
mod punctuated;
mod submodule;
#[cfg(test)]
mod test_utils;
mod token;
mod ty;
mod where_clause;
use crate::priv_prelude::*;
pub use crate::{
keywords::RESERVED_KEYWORDS,
parse::Parse,
parser::Parser,
token::{is_valid_identifier_or_path, lex, lex_commented, parse_int_suffix},
};
use sway_ast::{
attribute::Annotated,
token::{DocComment, DocStyle},
Module, ModuleKind,
};
use sway_error::handler::{ErrorEmitted, Handler};
use sway_features::ExperimentalFeatures;
use sway_types::{span::Source, SourceId};
pub fn parse_file(
handler: &Handler,
src: Source,
source_id: Option<SourceId>,
experimental: ExperimentalFeatures,
) -> Result<Annotated<Module>, ErrorEmitted> {
let end = src.text.len();
let ts = lex(handler, src, 0, end, source_id)?;
let (m, _) = Parser::new(handler, &ts, experimental).parse_to_end()?;
Ok(m)
}
pub fn parse_module_kind(
handler: &Handler,
src: Source,
source_id: Option<SourceId>,
experimental: ExperimentalFeatures,
) -> Result<ModuleKind, ErrorEmitted> {
let end = src.text.len();
let ts = lex(handler, src, 0, end, source_id)?;
let mut parser = Parser::new(handler, &ts, experimental);
while let Some(DocComment {
doc_style: DocStyle::Inner,
..
}) = parser.peek()
{
parser.parse::<DocComment>()?;
}
parser.parse()
}
#[cfg(test)]
mod tests {
use crate::*;
#[test]
fn parse_invalid() {
// just make sure these do not panic
let _res = parse_file(
&Handler::default(),
"script; fn main(256߄".into(),
None,
ExperimentalFeatures::default(),
);
let _res = parse_file(
&Handler::default(),
"script;
fn karr() {
let c: f828 = 0x00000000000000000000000vncifxp;
abi Zezybt {
#[mfzbezc, storage(r#
true }
}
cug"
.into(),
None,
ExperimentalFeatures::default(),
);
let _res = parse_file(
&Handler::default(),
"script;
stdfn main() {
let a: b256 = 0x000>0000000scri s = \"flibrary I24;
use std::primitives::*;
use std::assert::assert;
///\u{7eb}"
.into(),
None,
ExperimentalFeatures::default(),
);
let _res = parse_file(
&Handler::default(),
"script; \"\u{7eb}\u{7eb}".into(),
None,
ExperimentalFeatures::default(),
);
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-parse/src/parse.rs | sway-parse/src/parse.rs | use crate::keywords::RESERVED_KEYWORDS;
use crate::{ParseResult, Parser, ParserConsumed, Peeker};
use sway_ast::Intrinsic;
use sway_error::parser_error::ParseErrorKind;
use sway_types::{ast::Delimiter, Ident, Spanned};
pub trait Parse {
const FALLBACK_ERROR: ParseErrorKind = ParseErrorKind::InvalidItem;
fn parse(parser: &mut Parser) -> ParseResult<Self>
where
Self: Sized;
fn error(
#[allow(clippy::boxed_local)] _spans: Box<[sway_types::Span]>,
_error: sway_error::handler::ErrorEmitted,
) -> Option<Self>
where
Self: Sized,
{
None
}
}
pub trait Peek {
fn peek(peeker: Peeker<'_>) -> Option<Self>
where
Self: Sized;
}
pub trait ParseToEnd {
fn parse_to_end<'a>(parser: Parser<'a, '_>) -> ParseResult<(Self, ParserConsumed<'a>)>
where
Self: Sized;
}
impl<T> Parse for Box<T>
where
T: Parse,
{
fn parse(parser: &mut Parser) -> ParseResult<Box<T>> {
let value = parser.parse()?;
Ok(Box::new(value))
}
}
macro_rules! impl_tuple (
($($name:ident,)*) => {
impl<$($name,)*> Parse for ($($name,)*)
where
$($name: Parse,)*
{
#[allow(unused)]
fn parse(parser: &mut Parser) -> ParseResult<($($name,)*)> {
$(
#[allow(non_snake_case)]
let $name = parser.parse()?;
)*
Ok(($($name,)*))
}
}
impl<$($name,)*> Peek for ($($name,)*)
where
$($name: Peek,)*
{
fn peek(peeker: Peeker<'_>) -> Option<Self> {
#![allow(unused_assignments, unused, non_snake_case)]
let mut tokens = peeker.token_trees;
$(
let ($name, fewer_tokens) = Peeker::with::<$name>(tokens)?;
tokens = fewer_tokens;
)*
Some(($($name,)*))
}
}
};
);
impl_tuple!();
impl_tuple!(T0,);
impl_tuple!(T0, T1,);
impl_tuple!(T0, T1, T2,);
impl_tuple!(T0, T1, T2, T3,);
impl_tuple!(T0, T1, T2, T3, T4,);
impl_tuple!(T0, T1, T2, T3, T4, T5,);
impl_tuple!(T0, T1, T2, T3, T4, T5, T6,);
impl_tuple!(T0, T1, T2, T3, T4, T5, T6, T7,);
impl_tuple!(T0, T1, T2, T3, T4, T5, T6, T7, T8,);
impl_tuple!(T0, T1, T2, T3, T4, T5, T6, T7, T8, T9,);
impl_tuple!(T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10,);
impl_tuple!(T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11,);
impl<T> ParseToEnd for Vec<T>
where
T: Parse,
{
fn parse_to_end<'a, 'e>(
mut parser: Parser<'a, '_>,
) -> ParseResult<(Vec<T>, ParserConsumed<'a>)> {
let mut ret = Vec::new();
loop {
if let Some(consumed) = parser.check_empty() {
return Ok((ret, consumed));
}
match parser.parse_with_recovery() {
Ok(value) => ret.push(value),
Err(r) => {
let (spans, error) =
r.recover_at_next_line_with_fallback_error(T::FALLBACK_ERROR);
if let Some(error) = T::error(spans, error) {
ret.push(error);
} else {
Err(error)?
}
}
}
}
}
}
impl Peek for Ident {
fn peek(peeker: Peeker<'_>) -> Option<Ident> {
peeker.peek_ident().ok().cloned()
}
}
impl Parse for Ident {
fn parse(parser: &mut Parser) -> ParseResult<Ident> {
match parser.take::<Ident>() {
Some(ident) => {
let ident_str = ident.as_str();
if parser.check_double_underscore
&& (ident_str.starts_with("__") && Intrinsic::try_from_str(ident_str).is_none())
{
return Err(parser.emit_error_with_span(
ParseErrorKind::InvalidDoubleUnderscore,
ident.span(),
));
}
if !ident.is_raw_ident() && RESERVED_KEYWORDS.contains(ident_str) {
return Err(parser.emit_error_with_span(
ParseErrorKind::ReservedKeywordIdentifier,
ident.span(),
));
}
Ok(ident)
}
None => Err(parser.emit_error(ParseErrorKind::ExpectedIdent)),
}
}
}
impl Peek for Delimiter {
fn peek(peeker: Peeker<'_>) -> Option<Delimiter> {
peeker.peek_delimiter().ok()
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-parse/src/pattern.rs | sway-parse/src/pattern.rs | use crate::{Parse, ParseBracket, ParseResult, Parser, Peek};
use sway_ast::brackets::{Braces, Parens};
use sway_ast::keywords::{DoubleDotToken, FalseToken, TrueToken};
use sway_ast::literal::{LitBool, LitBoolType};
use sway_ast::punctuated::Punctuated;
use sway_ast::{Literal, PathExpr, Pattern, PatternStructField};
use sway_error::parser_error::ParseErrorKind;
use sway_types::Spanned;
impl Parse for Pattern {
fn parse(parser: &mut Parser) -> ParseResult<Pattern> {
let combine = |lhs, rhs, pipe_token| Pattern::Or {
lhs,
pipe_token,
rhs,
};
parse_binary(parser, parse_atomic_pattern, combine)
}
}
fn parse_binary<O: Peek>(
parser: &mut Parser,
sub: impl Fn(&mut Parser) -> ParseResult<Pattern>,
combine: impl Fn(Box<Pattern>, Box<Pattern>, O) -> Pattern,
) -> ParseResult<Pattern> {
let mut expr = sub(parser)?;
while let Some((op_token, rhs)) = parse_op_rhs(parser, &sub)? {
expr = combine(Box::new(expr), rhs, op_token);
}
Ok(expr)
}
fn parse_op_rhs<O: Peek>(
parser: &mut Parser,
sub: impl Fn(&mut Parser) -> ParseResult<Pattern>,
) -> ParseResult<Option<(O, Box<Pattern>)>> {
if let Some(op_token) = parser.take() {
let rhs = Box::new(sub(parser)?);
return Ok(Some((op_token, rhs)));
}
Ok(None)
}
fn parse_atomic_pattern(parser: &mut Parser) -> ParseResult<Pattern> {
{
let ref_token = parser.take();
let mut_token = parser.take();
if ref_token.is_some() || mut_token.is_some() {
let name = parser.parse()?;
return Ok(Pattern::Var {
reference: ref_token,
mutable: mut_token,
name,
});
}
let lit_bool = |span, kind| Ok(Pattern::Literal(Literal::Bool(LitBool { span, kind })));
if let Some(ident) = parser.take::<TrueToken>() {
return lit_bool(ident.span(), LitBoolType::True);
}
if let Some(ident) = parser.take::<FalseToken>() {
return lit_bool(ident.span(), LitBoolType::False);
}
if let Some(literal) = parser.take() {
return Ok(Pattern::Literal(literal));
}
if let Some(tuple) = Parens::try_parse(parser)? {
return Ok(Pattern::Tuple(tuple));
}
if let Some(underscore_token) = parser.take() {
return Ok(Pattern::Wildcard { underscore_token });
}
let path = parser.parse::<PathExpr>()?;
if path.incomplete_suffix {
return Ok(Pattern::Error(
Box::new([path.span()]),
parser.emit_error(ParseErrorKind::ExpectedPathType),
));
}
if let Some(args) = Parens::try_parse(parser)? {
return Ok(Pattern::Constructor { path, args });
}
if let Some(fields) = Braces::try_parse(parser)? {
let inner_fields: &Punctuated<_, _> = fields.get();
let rest_pattern = inner_fields
.value_separator_pairs
.iter()
.find(|(p, _)| matches!(p, PatternStructField::Rest { token: _ }));
if let Some((rest_pattern, _)) = rest_pattern {
return Err(parser.emit_error_with_span(
ParseErrorKind::UnexpectedRestPattern,
rest_pattern.span(),
));
}
return Ok(Pattern::Struct { path, fields });
}
match path.try_into_ident() {
Ok(name) => Ok(Pattern::AmbiguousSingleIdent(name)),
Err(path) => Ok(Pattern::Constant(path)),
}
}
}
impl Parse for PatternStructField {
fn parse(parser: &mut Parser) -> ParseResult<PatternStructField> {
if let Some(token) = parser.take::<DoubleDotToken>() {
return Ok(PatternStructField::Rest { token });
}
let field_name = parser.parse()?;
let pattern_opt = match parser.take() {
Some(colon_token) => Some((colon_token, parser.parse()?)),
None => None,
};
Ok(PatternStructField::Field {
field_name,
pattern_opt,
})
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-parse/src/parser.rs | sway-parse/src/parser.rs | use crate::{Parse, ParseToEnd, Peek};
use core::marker::PhantomData;
use std::cell::RefCell;
use sway_ast::keywords::Keyword;
use sway_ast::literal::Literal;
use sway_ast::token::{
DocComment, GenericTokenTree, Group, Punct, Spacing, TokenStream, TokenTree,
};
use sway_ast::PubToken;
use sway_error::error::CompileError;
use sway_error::handler::{ErrorEmitted, Handler};
use sway_error::parser_error::{ParseError, ParseErrorKind};
use sway_features::ExperimentalFeatures;
use sway_types::{
ast::{Delimiter, PunctKind},
Ident, Span, Spanned,
};
pub struct Parser<'a, 'e> {
token_trees: &'a [TokenTree],
full_span: Span,
handler: &'e Handler,
pub check_double_underscore: bool,
pub experimental: ExperimentalFeatures,
}
impl<'a, 'e> Parser<'a, 'e> {
pub fn new(
handler: &'e Handler,
token_stream: &'a TokenStream,
experimental: ExperimentalFeatures,
) -> Parser<'a, 'e> {
Parser {
token_trees: token_stream.token_trees(),
full_span: token_stream.span(),
handler,
check_double_underscore: true,
experimental,
}
}
pub fn emit_error(&mut self, kind: ParseErrorKind) -> ErrorEmitted {
let span = match self.token_trees {
[token_tree, ..] => token_tree.span(),
_ => {
// Create a new span that points to _just_ after the last parsed item or 1
// character before that if the last parsed item is the last item in the full span.
let num_trailing_spaces =
self.full_span.as_str().len() - self.full_span.as_str().trim_end().len();
let trim_offset = if num_trailing_spaces == 0 {
1
} else {
num_trailing_spaces
};
Span::new(
self.full_span.src().clone(),
self.full_span.end().saturating_sub(trim_offset),
(self.full_span.end() + 1).saturating_sub(trim_offset),
self.full_span.source_id().cloned(),
)
.unwrap_or(Span::dummy())
}
};
self.emit_error_with_span(kind, span)
}
pub fn emit_error_with_span(&mut self, kind: ParseErrorKind, span: Span) -> ErrorEmitted {
let error = ParseError { span, kind };
self.handler.emit_err(CompileError::Parse { error })
}
/// Eats a `P` in its canonical way by peeking.
///
/// Unlike [`Parser::peek`], this method advances the parser on success, but not on failure.
pub fn take<P: Peek>(&mut self) -> Option<P> {
let (value, tokens) = Peeker::with(self.token_trees)?;
self.token_trees = tokens;
Some(value)
}
/// Tries to peek a `P` in its canonical way.
///
/// Either way, on success or failure, the parser is not advanced.
pub fn peek<P: Peek>(&self) -> Option<P> {
Peeker::with(self.token_trees).map(|(v, _)| v)
}
/// Tries to peek a `P` as the second token in its canonical way.
///
/// Either way, on success or failure, the parser is not advanced.
pub fn peek_next<P: Peek>(&self) -> Option<P> {
Peeker::with(&self.token_trees[1..]).map(|(v, _)| v)
}
/// This function will fork the current parse, and call the parsing function.
/// If it succeeds it will sync the original parser with the forked one;
///
/// If it fails it will return a `Recoverer` together with the `ErrorEmitted`.
///
/// This recoverer can be used to put the forked parsed back in track and then
/// sync the original parser to allow the parsing to continue.
pub fn call_parsing_function_with_recovery<
'original,
T,
F: FnOnce(&mut Parser<'a, '_>) -> ParseResult<T>,
>(
&'original mut self,
parsing_function: F,
) -> Result<T, ParseRecoveryStrategies<'original, 'a, 'e>> {
let handler = Handler::default();
let mut fork = Parser {
token_trees: self.token_trees,
full_span: self.full_span.clone(),
handler: &handler,
check_double_underscore: self.check_double_underscore,
experimental: self.experimental,
};
match parsing_function(&mut fork) {
Ok(result) => {
self.token_trees = fork.token_trees;
self.handler.append(handler);
Ok(result)
}
Err(error) => {
let Parser {
token_trees,
full_span,
..
} = fork;
Err(ParseRecoveryStrategies {
original: RefCell::new(self),
handler,
fork_token_trees: token_trees,
fork_full_span: full_span,
error,
})
}
}
}
/// This function will fork the current parse, and try to parse
/// T using the fork. If it succeeds it will sync the original parser with the forked one;
///
/// If it fails it will return a `Recoverer` together with the `ErrorEmitted`.
///
/// This recoverer can be used to put the forked parsed back in track and then
/// sync the original parser to allow the parsing to continue.
pub fn parse_with_recovery<'original, T: Parse>(
&'original mut self,
) -> Result<T, ParseRecoveryStrategies<'original, 'a, 'e>> {
self.call_parsing_function_with_recovery(|p| p.parse())
}
/// This function does three things
/// 1 - it peeks P;
/// 2 - it forks the current parser and tries to parse
/// T using this fork. If it succeeds it syncs the original
/// parser with the forked one;
/// 3 - if it fails it will return a `Recoverer` together with the `ErrorEmitted`.
///
/// This recoverer can be used to put the forked parsed back in track and then
/// sync the original parser to allow the parsing to continue.
pub fn guarded_parse_with_recovery<'original, P: Peek, T: Parse>(
&'original mut self,
) -> Result<Option<T>, ParseRecoveryStrategies<'original, 'a, 'e>> {
if self.peek::<P>().is_none() {
return Ok(None);
}
let handler = Handler::default();
let mut fork = Parser {
token_trees: self.token_trees,
full_span: self.full_span.clone(),
handler: &handler,
check_double_underscore: self.check_double_underscore,
experimental: self.experimental,
};
match fork.parse() {
Ok(result) => {
self.token_trees = fork.token_trees;
self.handler.append(handler);
Ok(Some(result))
}
Err(error) => {
let Parser {
token_trees,
full_span,
..
} = fork;
Err(ParseRecoveryStrategies {
original: RefCell::new(self),
handler,
fork_token_trees: token_trees,
fork_full_span: full_span,
error,
})
}
}
}
/// Parses a `T` in its canonical way.
/// Do not advance the parser on failure
pub fn try_parse<T: Parse>(&mut self, append_diagnostics: bool) -> ParseResult<T> {
let handler = Handler::default();
let mut fork = Parser {
token_trees: self.token_trees,
full_span: self.full_span.clone(),
handler: &handler,
check_double_underscore: self.check_double_underscore,
experimental: self.experimental,
};
let r = match T::parse(&mut fork) {
Ok(result) => {
self.token_trees = fork.token_trees;
Ok(result)
}
Err(err) => Err(err),
};
if append_diagnostics {
self.handler.append(handler);
}
r
}
/// This method is useful if `T` does not impl `ParseToEnd`
pub fn try_parse_and_check_empty<T: Parse>(
mut self,
append_diagnostics: bool,
) -> ParseResult<Option<(T, ParserConsumed<'a>)>> {
let value = self.try_parse(append_diagnostics)?;
match self.check_empty() {
Some(consumed) => Ok(Some((value, consumed))),
None => Ok(None),
}
}
/// Parses a `T` in its canonical way.
pub fn parse<T: Parse>(&mut self) -> ParseResult<T> {
T::parse(self)
}
/// Parses `T` given that the guard `G` was successfully peeked.
///
/// Useful to parse e.g., `$keyword $stuff` as a unit where `$keyword` is your guard.
pub fn guarded_parse<G: Peek, T: Parse>(&mut self) -> ParseResult<Option<T>> {
self.peek::<G>().map(|_| self.parse()).transpose()
}
pub fn parse_to_end<T: ParseToEnd>(self) -> ParseResult<(T, ParserConsumed<'a>)> {
T::parse_to_end(self)
}
/// Do not advance the parser on failure
pub fn try_parse_to_end<T: ParseToEnd>(
&mut self,
append_diagnostics: bool,
) -> ParseResult<(T, ParserConsumed<'a>)> {
let handler = Handler::default();
let fork = Parser {
token_trees: self.token_trees,
full_span: self.full_span.clone(),
handler: &handler,
check_double_underscore: self.check_double_underscore,
experimental: self.experimental,
};
let r = T::parse_to_end(fork);
if append_diagnostics {
self.handler.append(handler);
}
r
}
pub fn enter_delimited(
&mut self,
expected_delimiter: Delimiter,
) -> Option<(Parser<'_, '_>, Span)> {
match self.token_trees {
[TokenTree::Group(Group {
delimiter,
token_stream,
span,
}), rest @ ..]
if *delimiter == expected_delimiter =>
{
self.token_trees = rest;
let parser = Parser {
token_trees: token_stream.token_trees(),
full_span: token_stream.span(),
handler: self.handler,
check_double_underscore: self.check_double_underscore,
experimental: self.experimental,
};
Some((parser, span.clone()))
}
_ => None,
}
}
pub fn is_empty(&self) -> bool {
self.token_trees.is_empty()
}
pub fn check_empty(&self) -> Option<ParserConsumed<'a>> {
self.is_empty()
.then_some(ParserConsumed { _priv: PhantomData })
}
pub fn debug_tokens(&self) -> &[TokenTree] {
let len = std::cmp::min(5, self.token_trees.len());
&self.token_trees[..len]
}
/// Errors given `Some(PubToken)`.
pub fn ban_visibility_qualifier(&mut self, vis: &Option<PubToken>) -> ParseResult<()> {
if let Some(token) = vis {
return Err(self.emit_error_with_span(
ParseErrorKind::UnnecessaryVisibilityQualifier {
visibility: token.ident(),
},
token.span(),
));
}
Ok(())
}
pub fn full_span(&self) -> &Span {
&self.full_span
}
/// Consume tokens while its line equals to `line`.
///
/// # Warning
///
/// To calculate lines the original source code needs to be transversed.
pub fn consume_while_line_equals(&mut self, line: usize) {
loop {
let Some(current_token) = self.token_trees.first() else {
break;
};
let current_span = current_token.span();
let current_span_line = current_span.start_line_col_one_index().line;
if current_span_line != line {
break;
} else {
self.token_trees = &self.token_trees[1..];
}
}
}
pub fn has_errors(&self) -> bool {
self.handler.has_errors()
}
pub fn has_warnings(&self) -> bool {
self.handler.has_warnings()
}
}
pub struct Peeker<'a> {
pub token_trees: &'a [TokenTree],
num_tokens: &'a mut usize,
}
impl<'a> Peeker<'a> {
/// Peek a `P` in `token_trees`, if any, and return the `P` + the remainder of the token trees.
pub fn with<P: Peek>(token_trees: &'a [TokenTree]) -> Option<(P, &'a [TokenTree])> {
let mut num_tokens = 0;
let peeker = Peeker {
token_trees,
num_tokens: &mut num_tokens,
};
let value = P::peek(peeker)?;
Some((value, &token_trees[num_tokens..]))
}
pub fn peek_ident(self) -> Result<&'a Ident, Self> {
match self.token_trees {
[TokenTree::Ident(ident), ..] => {
*self.num_tokens = 1;
Ok(ident)
}
_ => Err(self),
}
}
pub fn peek_literal(self) -> Result<&'a Literal, Self> {
match self.token_trees {
[TokenTree::Literal(literal), ..] => {
*self.num_tokens = 1;
Ok(literal)
}
_ => Err(self),
}
}
pub fn peek_punct_kinds(
self,
punct_kinds: &[PunctKind],
not_followed_by: &[PunctKind],
) -> Result<Span, Self> {
let (last_punct_kind, first_punct_kinds) = punct_kinds
.split_last()
.unwrap_or_else(|| panic!("peek_punct_kinds called with empty slice"));
if self.token_trees.len() < punct_kinds.len() {
return Err(self);
}
for (punct_kind, tt) in first_punct_kinds.iter().zip(self.token_trees.iter()) {
match tt {
TokenTree::Punct(Punct {
kind,
spacing: Spacing::Joint,
..
}) if *kind == *punct_kind => {}
_ => return Err(self),
}
}
let span_end = match &self.token_trees[punct_kinds.len() - 1] {
TokenTree::Punct(Punct {
kind,
spacing,
span,
}) if *kind == *last_punct_kind => match spacing {
Spacing::Alone => span,
Spacing::Joint => match &self.token_trees.get(punct_kinds.len()) {
Some(TokenTree::Punct(Punct { kind, .. })) => {
if not_followed_by.contains(kind) {
return Err(self);
}
span
}
_ => span,
},
},
_ => return Err(self),
};
let span_start = match &self.token_trees[0] {
TokenTree::Punct(Punct { span, .. }) => span,
_ => unreachable!(),
};
let span = Span::join(span_start.clone(), span_end);
*self.num_tokens = punct_kinds.len();
Ok(span)
}
pub fn peek_delimiter(self) -> Result<Delimiter, Self> {
match self.token_trees {
[TokenTree::Group(Group { delimiter, .. }), ..] => {
*self.num_tokens = 1;
Ok(*delimiter)
}
_ => Err(self),
}
}
pub fn peek_doc_comment(self) -> Result<&'a DocComment, Self> {
match self.token_trees {
[TokenTree::DocComment(doc_comment), ..] => {
*self.num_tokens = 1;
Ok(doc_comment)
}
_ => Err(self),
}
}
}
/// This struct is returned by some parser methods that allow
/// parser recovery.
///
/// It implements some standardized recovery strategies or it allows
/// custom strategies using the `start` method.
pub struct ParseRecoveryStrategies<'original, 'a, 'e> {
original: RefCell<&'original mut Parser<'a, 'e>>,
handler: Handler,
fork_token_trees: &'a [TokenTree],
fork_full_span: Span,
error: ErrorEmitted,
}
impl<'a> ParseRecoveryStrategies<'_, 'a, '_> {
/// This strategy consumes everything at the current line and emits the fallback error
/// if the forked parser does not contain any error.
pub fn recover_at_next_line_with_fallback_error(
&self,
kind: ParseErrorKind,
) -> (Box<[Span]>, ErrorEmitted) {
let line = if self.fork_token_trees.is_empty() {
None
} else {
self.last_consumed_token()
.map(|x| x.span())
.or_else(|| self.fork_token_trees.first().map(|x| x.span()))
.map(|x| x.start_line_col_one_index().line)
};
self.start(|p| {
if let Some(line) = line {
p.consume_while_line_equals(line);
}
if !p.has_errors() {
p.emit_error_with_span(kind, self.diff_span(p));
}
})
}
/// Starts the parser recovery process calling the callback with the forked parser.
/// All the changes to this forked parser will be imposed into the original parser,
/// including diagnostics.
pub fn start<'this>(
&'this self,
f: impl FnOnce(&mut Parser<'a, 'this>),
) -> (Box<[Span]>, ErrorEmitted) {
let mut p = {
let original = self.original.borrow();
Parser {
token_trees: self.fork_token_trees,
full_span: self.fork_full_span.clone(),
handler: &self.handler,
check_double_underscore: original.check_double_underscore,
experimental: original.experimental,
}
};
f(&mut p);
self.finish(p)
}
/// This is the token before the whole tentative parser started.
pub fn starting_token(&self) -> &GenericTokenTree<TokenStream> {
let original = self.original.borrow();
&original.token_trees[0]
}
/// This is the last consumed token of the forked parser. This the token
/// immediately before the forked parser head.
pub fn last_consumed_token(&self) -> Option<&GenericTokenTree<TokenStream>> {
let fork_head_span = self.fork_token_trees.first()?.span();
// find the last token consumed by the fork
let original = self.original.borrow();
let fork_pos = original
.token_trees
.iter()
.position(|x| x.span() == fork_head_span)?;
let before_fork_pos = fork_pos.checked_sub(1)?;
original.token_trees.get(before_fork_pos)
}
/// This return a span encopassing all tokens that were consumed by the `p` since the start
/// of the tentative parsing
///
/// This is useful to show one single error for all the consumed tokens.
pub fn diff_span<'this>(&self, p: &Parser<'a, 'this>) -> Span {
let original = self.original.borrow_mut();
// collect all tokens trees that were consumed by the fork
let qty = if let Some(first_fork_tt) = p.token_trees.first() {
original
.token_trees
.iter()
.position(|tt| tt.span() == first_fork_tt.span())
.expect("not finding fork head")
} else {
original.token_trees.len()
};
let garbage: Vec<_> = original
.token_trees
.iter()
.take(qty)
.map(|x| x.span())
.collect();
Span::join_all(garbage)
}
fn finish(&self, p: Parser<'a, '_>) -> (Box<[Span]>, ErrorEmitted) {
let mut original = self.original.borrow_mut();
// collect all tokens trees that were consumed by the fork
let qty = if let Some(first_fork_tt) = p.token_trees.first() {
original
.token_trees
.iter()
.position(|tt| tt.span() == first_fork_tt.span())
.expect("not finding fork head")
} else {
original.token_trees.len()
};
let garbage: Vec<_> = original
.token_trees
.iter()
.take(qty)
.map(|x| x.span())
.collect();
original.token_trees = p.token_trees;
original.handler.append(self.handler.clone());
(garbage.into_boxed_slice(), self.error)
}
}
pub struct ParserConsumed<'a> {
_priv: PhantomData<fn(&'a ()) -> &'a ()>,
}
pub type ParseResult<T> = Result<T, ErrorEmitted>;
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-parse/src/where_clause.rs | sway-parse/src/where_clause.rs | use crate::{Parse, ParseResult, Parser};
use sway_ast::punctuated::Punctuated;
use sway_ast::{WhereBound, WhereClause};
impl Parse for WhereClause {
fn parse(parser: &mut Parser) -> ParseResult<WhereClause> {
let where_token = parser.parse()?;
let mut value_separator_pairs = Vec::new();
let final_value_opt = loop {
let ty_name = match parser.take() {
Some(ty_name) => ty_name,
None => break None,
};
let colon_token = parser.parse()?;
let bounds = parser.parse()?;
let where_bound = WhereBound {
ty_name,
colon_token,
bounds,
};
match parser.take() {
Some(comma_token) => value_separator_pairs.push((where_bound, comma_token)),
None => break Some(Box::new(where_bound)),
}
};
let bounds = Punctuated {
value_separator_pairs,
final_value_opt,
};
Ok(WhereClause {
where_token,
bounds,
})
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-parse/src/module.rs | sway-parse/src/module.rs | use crate::{Parse, ParseResult, ParseToEnd, Parser, ParserConsumed};
use sway_ast::{attribute::Annotated, Module, ModuleKind};
use sway_error::parser_error::ParseErrorKind;
impl Parse for ModuleKind {
fn parse(parser: &mut Parser) -> ParseResult<Self> {
if let Some(script_token) = parser.take() {
Ok(Self::Script { script_token })
} else if let Some(contract_token) = parser.take() {
Ok(Self::Contract { contract_token })
} else if let Some(predicate_token) = parser.take() {
Ok(Self::Predicate { predicate_token })
} else if let Some(library_token) = parser.take() {
Ok(Self::Library { library_token })
} else {
Err(parser.emit_error(ParseErrorKind::ExpectedModuleKind))
}
}
}
impl ParseToEnd for Annotated<Module> {
fn parse_to_end<'a, 'e>(mut parser: Parser<'a, '_>) -> ParseResult<(Self, ParserConsumed<'a>)> {
let attributes = parser.parse()?;
let (kind, semicolon_token) = parser.parse()?;
let (items, consumed) = parser.parse_to_end()?;
let module = Annotated {
attributes,
value: Module {
kind,
semicolon_token,
items,
},
};
Ok((module, consumed))
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::test_utils::parse_to_end;
use insta::*;
#[test]
fn parse_noop_script_module() {
assert_ron_snapshot!(parse_to_end::<Annotated<Module>>(r#"
script;
fn main() {
()
}
"#,), @r#"
Annotated(
attributes: [],
value: Module(
kind: Script(
script_token: ScriptToken(
span: Span(
src: "\n script;\n \n fn main() {\n ()\n }\n ",
start: 13,
end: 19,
source_id: None,
),
),
),
semicolon_token: SemicolonToken(
span: Span(
src: "\n script;\n \n fn main() {\n ()\n }\n ",
start: 19,
end: 20,
source_id: None,
),
),
items: [
Annotated(
attributes: [],
value: Fn(ItemFn(
fn_signature: FnSignature(
visibility: None,
fn_token: FnToken(
span: Span(
src: "\n script;\n \n fn main() {\n ()\n }\n ",
start: 42,
end: 44,
source_id: None,
),
),
name: BaseIdent(
name_override_opt: None,
span: Span(
src: "\n script;\n \n fn main() {\n ()\n }\n ",
start: 45,
end: 49,
source_id: None,
),
is_raw_ident: false,
),
generics: None,
arguments: Parens(
inner: Static(Punctuated(
value_separator_pairs: [],
final_value_opt: None,
)),
span: Span(
src: "\n script;\n \n fn main() {\n ()\n }\n ",
start: 49,
end: 51,
source_id: None,
),
),
return_type_opt: None,
where_clause_opt: None,
),
body: Braces(
inner: CodeBlockContents(
statements: [],
final_expr_opt: Some(Tuple(Parens(
inner: Nil,
span: Span(
src: "\n script;\n \n fn main() {\n ()\n }\n ",
start: 70,
end: 72,
source_id: None,
),
))),
span: Span(
src: "\n script;\n \n fn main() {\n ()\n }\n ",
start: 53,
end: 85,
source_id: None,
),
),
span: Span(
src: "\n script;\n \n fn main() {\n ()\n }\n ",
start: 52,
end: 86,
source_id: None,
),
),
)),
),
],
),
)
"#);
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-parse/src/priv_prelude.rs | sway-parse/src/priv_prelude.rs | pub use crate::{
brackets::ParseBracket,
parse::{ParseToEnd, Peek},
parser::{ParseResult, ParserConsumed, Peeker},
};
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-parse/src/keywords.rs | sway-parse/src/keywords.rs | use crate::{Parse, ParseResult, Parser, Peek, Peeker};
use sway_ast::keywords::*;
use sway_error::parser_error::ParseErrorKind;
use sway_types::Spanned;
fn peek_keyword<T: Keyword>(peeker: Peeker<'_>) -> Option<T> {
let ident = peeker.peek_ident().ok()?;
(!ident.is_raw_ident() && ident.as_str() == T::AS_STR).then(|| T::new(ident.span()))
}
fn parse_keyword<T: Keyword + Peek>(parser: &mut Parser) -> ParseResult<T> {
match parser.take() {
Some(value) => Ok(value),
None => Err(parser.emit_error(ParseErrorKind::ExpectedKeyword { word: T::AS_STR })),
}
}
macro_rules! keyword_impls {
($($ty:ty),*) => {
$(
impl Peek for $ty {
fn peek(peeker: Peeker<'_>) -> Option<Self> {
peek_keyword(peeker)
}
}
impl Parse for $ty {
fn parse(parser: &mut Parser) -> ParseResult<Self> {
parse_keyword(parser)
}
}
)*
};
}
keyword_impls! {
ScriptToken,
ContractToken,
PredicateToken,
LibraryToken,
ModToken,
PubToken,
UseToken,
AsToken,
StructToken,
ClassToken,
EnumToken,
SelfToken,
FnToken,
TraitToken,
ImplToken,
ForToken,
InToken,
AbiToken,
ConstToken,
StorageToken,
StrToken,
AsmToken,
ReturnToken,
IfToken,
ElseToken,
MatchToken,
MutToken,
LetToken,
WhileToken,
WhereToken,
RefToken,
TrueToken,
FalseToken,
BreakToken,
ContinueToken,
ConfigurableToken,
TypeToken,
PtrToken,
SliceToken,
PanicToken
}
fn peek_token<T: Token>(peeker: Peeker<'_>) -> Option<T> {
let span = peeker
.peek_punct_kinds(T::PUNCT_KINDS, T::NOT_FOLLOWED_BY)
.ok()?;
Some(T::new(span))
}
fn parse_token<T: Token + Peek>(parser: &mut Parser) -> ParseResult<T> {
match parser.take() {
Some(value) => Ok(value),
None => {
let kinds = T::PUNCT_KINDS.to_owned();
Err(parser.emit_error(ParseErrorKind::ExpectedPunct { kinds }))
}
}
}
macro_rules! token_impls {
($($ty:ty),*) => {
$(
impl Peek for $ty {
fn peek(peeker: Peeker<'_>) -> Option<Self> {
peek_token(peeker)
}
}
impl Parse for $ty {
fn parse(parser: &mut Parser) -> ParseResult<Self> {
parse_token(parser)
}
}
)*
};
}
token_impls! {
SemicolonToken,
ForwardSlashToken,
DoubleColonToken,
StarToken,
DoubleStarToken,
CommaToken,
ColonToken,
RightArrowToken,
LessThanToken,
GreaterThanToken,
OpenAngleBracketToken,
CloseAngleBracketToken,
EqToken,
AddEqToken,
SubEqToken,
StarEqToken,
DivEqToken,
ShlEqToken,
ShrEqToken,
FatRightArrowToken,
DotToken,
DoubleDotToken,
BangToken,
PercentToken,
AddToken,
SubToken,
ShrToken,
ShlToken,
AmpersandToken,
CaretToken,
PipeToken,
DoubleEqToken,
BangEqToken,
GreaterThanEqToken,
LessThanEqToken,
DoubleAmpersandToken,
DoublePipeToken,
UnderscoreToken,
HashToken,
HashBangToken
}
// Keep this in sync with the list in `sway-ast/keywords.rs` defined by define_keyword!
pub const RESERVED_KEYWORDS: phf::Set<&'static str> = phf::phf_set! {
"script",
"contract",
"predicate",
"library",
"mod",
"pub",
"use",
"as",
"struct",
"enum",
"self",
"fn",
"trait",
"impl",
"for",
"abi",
"const",
"storage",
"str",
"asm",
"return",
"if",
"else",
"match",
"mut",
"let",
"while",
"where",
"ref",
"true",
"false",
"break",
"continue",
"configurable",
"type",
"panic",
};
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-parse/src/submodule.rs | sway-parse/src/submodule.rs | use crate::{Parse, ParseResult, Parser};
use sway_ast::submodule::Submodule;
impl Parse for Submodule {
fn parse(parser: &mut Parser) -> ParseResult<Submodule> {
let visibility = parser.take();
let mod_token = parser.parse()?;
let name = parser.parse()?;
let semicolon_token = parser.parse()?;
Ok(Submodule {
mod_token,
name,
semicolon_token,
visibility,
})
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-parse/src/test_utils.rs | sway-parse/src/test_utils.rs | use crate::{priv_prelude::ParseToEnd, Parse, Parser};
use sway_error::handler::Handler;
use sway_features::ExperimentalFeatures;
pub fn parse<T>(input: &str) -> T
where
T: Parse,
{
let handler = Handler::default();
let ts = crate::token::lex(&handler, input.into(), 0, input.len(), None).unwrap();
let r = Parser::new(&handler, &ts, ExperimentalFeatures::default()).parse();
if handler.has_errors() || handler.has_warnings() {
panic!("{:?}", handler.consume());
}
r.unwrap_or_else(|_| panic!("Parse error: {:?}", handler.consume().0))
}
pub fn parse_to_end<T>(input: &str) -> T
where
T: ParseToEnd,
{
let handler = <_>::default();
let ts = crate::token::lex(&handler, input.into(), 0, input.len(), None).unwrap();
let r = Parser::new(&handler, &ts, ExperimentalFeatures::default())
.parse_to_end()
.map(|(m, _)| m);
if handler.has_errors() || handler.has_warnings() {
panic!("{:?}", handler.consume());
}
r.unwrap_or_else(|_| panic!("Parse error: {:?}", handler.consume().0))
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-parse/src/punctuated.rs | sway-parse/src/punctuated.rs | use crate::{Parse, ParseResult, ParseToEnd, Parser, ParserConsumed};
use sway_ast::punctuated::Punctuated;
impl<T, P> ParseToEnd for Punctuated<T, P>
where
T: Parse,
P: Parse,
{
fn parse_to_end<'a, 'e>(
mut parser: Parser<'a, '_>,
) -> ParseResult<(Punctuated<T, P>, ParserConsumed<'a>)> {
let mut value_separator_pairs = Vec::new();
loop {
if let Some(consumed) = parser.check_empty() {
let punctuated = Punctuated {
value_separator_pairs,
final_value_opt: None,
};
return Ok((punctuated, consumed));
}
let value = parser.parse()?;
if let Some(consumed) = parser.check_empty() {
let punctuated = Punctuated {
value_separator_pairs,
final_value_opt: Some(Box::new(value)),
};
return Ok((punctuated, consumed));
}
let separator = parser.parse()?;
value_separator_pairs.push((value, separator));
}
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-parse/src/generics.rs | sway-parse/src/generics.rs | use crate::{Parse, ParseResult, Parser};
use sway_ast::generics::GenericParam;
use sway_ast::keywords::{ColonToken, CommaToken, ConstToken};
use sway_ast::punctuated::Punctuated;
use sway_ast::{AngleBrackets, GenericArgs, GenericParams};
use sway_types::Ident;
impl Parse for GenericParam {
fn parse(parser: &mut Parser) -> ParseResult<Self>
where
Self: Sized,
{
if parser.peek::<ConstToken>().is_some() {
let _ = parser.parse::<ConstToken>()?;
let ident = parser.parse::<Ident>()?;
let _ = parser.parse::<ColonToken>()?;
let ty = parser.parse::<Ident>()?;
Ok(GenericParam::Const { ident, ty })
} else {
let ident = parser.parse::<Ident>()?;
Ok(GenericParam::Trait { ident })
}
}
}
impl Parse for GenericParams {
fn parse(parser: &mut Parser) -> ParseResult<GenericParams> {
parse_angle_comma(parser).map(|parameters| GenericParams { parameters })
}
}
impl Parse for GenericArgs {
fn parse(parser: &mut Parser) -> ParseResult<GenericArgs> {
parse_angle_comma(parser).map(|parameters| GenericArgs { parameters })
}
}
/// Parse a list of `T`s delimited by `<` and `>` and separated by `,`.
fn parse_angle_comma<T: Parse>(
parser: &mut Parser,
) -> ParseResult<AngleBrackets<Punctuated<T, CommaToken>>> {
let open_angle_bracket_token = parser.parse()?;
let mut value_separator_pairs = Vec::new();
let (final_value_opt, close_angle_bracket_token) = loop {
if let Some(close_angle_bracket_token) = parser.take() {
break (None, close_angle_bracket_token);
};
let ident = parser.parse()?;
if let Some(close_angle_bracket_token) = parser.take() {
break (Some(Box::new(ident)), close_angle_bracket_token);
};
let comma_token = parser.parse()?;
value_separator_pairs.push((ident, comma_token));
};
let punctuated = Punctuated {
value_separator_pairs,
final_value_opt,
};
Ok(AngleBrackets {
open_angle_bracket_token,
inner: punctuated,
close_angle_bracket_token,
})
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-parse/src/literal.rs | sway-parse/src/literal.rs | use crate::{Parse, ParseResult, Parser, Peek, Peeker};
use sway_ast::Literal;
use sway_error::parser_error::ParseErrorKind;
impl Peek for Literal {
fn peek(peeker: Peeker<'_>) -> Option<Literal> {
peeker.peek_literal().ok().cloned()
}
}
impl Parse for Literal {
fn parse(parser: &mut Parser) -> ParseResult<Literal> {
parser
.take()
.ok_or_else(|| parser.emit_error(ParseErrorKind::ExpectedLiteral))
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-parse/src/brackets.rs | sway-parse/src/brackets.rs | use crate::{Parse, ParseResult, ParseToEnd, Parser};
use sway_ast::brackets::{Braces, Parens, SquareBrackets};
use sway_error::handler::ErrorEmitted;
use sway_error::parser_error::ParseErrorKind;
use sway_types::ast::Delimiter;
pub trait ParseBracket<T>: Sized {
fn try_parse(parser: &mut Parser) -> ParseResult<Option<Self>>
where
T: ParseToEnd;
fn parse_all_inner(
parser: &mut Parser,
on_error: impl FnOnce(Parser) -> ErrorEmitted,
) -> ParseResult<Self>
where
T: Parse;
fn try_parse_all_inner(
parser: &mut Parser,
on_error: impl FnOnce(Parser) -> ErrorEmitted,
) -> ParseResult<Option<Self>>
where
T: Parse;
}
macro_rules! impl_brackets (
($ty_name:ident, $delimiter:ident, $error:ident) => {
impl<T> ParseBracket<T> for $ty_name<T> {
fn try_parse(parser: &mut Parser) -> ParseResult<Option<$ty_name<T>>>
where
T: ParseToEnd
{
match parser.enter_delimited(Delimiter::$delimiter) {
Some((parser, span)) => {
let (inner, _consumed) = parser.parse_to_end()?;
Ok(Some($ty_name { inner, span }))
},
None => Ok(None),
}
}
fn parse_all_inner(
parser: &mut Parser,
on_error: impl FnOnce(Parser) -> ErrorEmitted,
) -> ParseResult<$ty_name<T>>
where
T: Parse
{
match parser.enter_delimited(Delimiter::$delimiter) {
Some((mut parser, span)) => {
let inner = parser.parse()?;
if !parser.is_empty() {
return Err(on_error(parser))
}
Ok($ty_name { inner, span })
},
None => Err(parser.emit_error(ParseErrorKind::$error)),
}
}
fn try_parse_all_inner(
parser: &mut Parser,
on_error: impl FnOnce(Parser) -> ErrorEmitted,
) -> ParseResult<Option<$ty_name<T>>>
where
T: Parse
{
match parser.enter_delimited(Delimiter::$delimiter) {
Some((mut parser, span)) => {
let inner = parser.parse()?;
if !parser.is_empty() {
return Err(on_error(parser))
}
Ok(Some($ty_name { inner, span }))
},
None => Ok(None),
}
}
}
impl<T> Parse for $ty_name<T>
where
T: ParseToEnd,
{
fn parse(parser: &mut Parser) -> ParseResult<$ty_name<T>> {
match parser.enter_delimited(Delimiter::$delimiter) {
Some((parser, span)) => {
let (inner, _consumed) = parser.parse_to_end()?;
Ok($ty_name { inner, span })
},
None => Err(parser.emit_error(ParseErrorKind::$error)),
}
}
}
};
);
impl_brackets!(Braces, Brace, ExpectedOpenBrace);
impl_brackets!(Parens, Parenthesis, ExpectedOpenParen);
impl_brackets!(SquareBrackets, Bracket, ExpectedOpenBracket);
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-parse/src/token.rs | sway-parse/src/token.rs | use core::mem;
use extension_trait::extension_trait;
use num_bigint::BigUint;
use sway_ast::literal::{LitChar, LitInt, LitIntType, LitString, Literal};
use sway_ast::token::{
Comment, CommentKind, CommentedGroup, CommentedTokenStream, CommentedTokenTree, DocComment,
DocStyle, Punct, Spacing, TokenStream,
};
use sway_error::error::CompileError;
use sway_error::handler::{ErrorEmitted, Handler};
use sway_error::lex_error::{LexError, LexErrorKind};
use sway_types::span::Source;
use sway_types::{
ast::{Delimiter, PunctKind},
Ident, SourceId, Span, Spanned,
};
use unicode_bidi::format_chars::{ALM, FSI, LRE, LRI, LRM, LRO, PDF, PDI, RLE, RLI, RLM, RLO};
use unicode_xid::UnicodeXID;
#[extension_trait]
impl CharExt for char {
/// Converts the character into an opening delimiter, if any.
fn as_open_delimiter(self) -> Option<Delimiter> {
match self {
'(' => Some(Delimiter::Parenthesis),
'{' => Some(Delimiter::Brace),
'[' => Some(Delimiter::Bracket),
_ => None,
}
}
/// Converts the character into a closing delimiter, if any.
fn as_close_delimiter(self) -> Option<Delimiter> {
match self {
')' => Some(Delimiter::Parenthesis),
'}' => Some(Delimiter::Brace),
']' => Some(Delimiter::Bracket),
_ => None,
}
}
/// Determines what sort of punctuation this character is, if any.
fn as_punct_kind(self) -> Option<PunctKind> {
match self {
';' => Some(PunctKind::Semicolon),
':' => Some(PunctKind::Colon),
'/' => Some(PunctKind::ForwardSlash),
',' => Some(PunctKind::Comma),
'*' => Some(PunctKind::Star),
'+' => Some(PunctKind::Add),
'-' => Some(PunctKind::Sub),
'<' => Some(PunctKind::LessThan),
'>' => Some(PunctKind::GreaterThan),
'=' => Some(PunctKind::Equals),
'.' => Some(PunctKind::Dot),
'!' => Some(PunctKind::Bang),
'%' => Some(PunctKind::Percent),
'&' => Some(PunctKind::Ampersand),
'^' => Some(PunctKind::Caret),
'|' => Some(PunctKind::Pipe),
'_' => Some(PunctKind::Underscore),
'#' => Some(PunctKind::Sharp),
_ => None,
}
}
}
struct CharIndicesInner<'a> {
src: &'a str,
position: usize,
}
impl Iterator for CharIndicesInner<'_> {
type Item = (usize, char);
fn next(&mut self) -> Option<(usize, char)> {
let mut char_indices = self.src[self.position..].char_indices();
let (_, c) = char_indices.next()?;
let ret = (self.position, c);
match char_indices.next() {
Some((char_width, _)) => self.position += char_width,
None => self.position = self.src.len(),
};
Some(ret)
}
}
type CharIndices<'a> = std::iter::Peekable<CharIndicesInner<'a>>;
type Result<T> = core::result::Result<T, ErrorEmitted>;
struct Lexer<'l> {
handler: &'l Handler,
src: &'l Source,
source_id: &'l Option<SourceId>,
stream: &'l mut CharIndices<'l>,
}
pub fn lex(
handler: &Handler,
src: Source,
start: usize,
end: usize,
source_id: Option<SourceId>,
) -> Result<TokenStream> {
lex_commented(handler, src, start, end, &source_id).map(|stream| stream.strip_comments())
}
/// Identifier & path validation.
///
/// A *path* may optionally start with `::` and is otherwise a `::`‑separated
/// list of identifiers. Identifiers follow these rules:
///
/// * Must not be empty.
/// * Must not be just `_`.
/// * Must not start with two underscores (`__`).
/// * First char: Unicode XID_Start or `_`.
/// * Remaining chars: Unicode XID_Continue.
///
/// Any colon that is *not* part of a `::` token is rejected, and empty path
/// segments such as `foo::` or `foo:::bar` are invalid.
pub fn is_valid_identifier_or_path(s: &str) -> bool {
// Reject empty string early.
if s.is_empty() {
return false;
}
// Handle an optional leading `::`.
let mut input = s;
if let Some(rest) = input.strip_prefix("::") {
input = rest;
// Bare `::` is invalid.
if input.is_empty() {
return false;
}
}
// Split on *exactly* two consecutive colons. Any single `:` or triple
// `:::` will leave stray `:` characters inside a segment and fail below.
for segment in input.split("::") {
if !is_valid_identifier(segment) {
return false;
}
}
true
}
/// Check a single identifier segment.
fn is_valid_identifier(ident: &str) -> bool {
// Reject empty, bare underscore, or double‑underscore prefix.
if ident.is_empty() || ident == "_" || ident.starts_with("__") {
return false;
}
let mut chars = ident.chars();
let first = chars.next().unwrap();
// First char: Unicode XID_Start or underscore.
if !(first.is_xid_start() || first == '_') {
return false;
}
// Remaining chars: Unicode XID_Continue.
chars.all(|c| c.is_xid_continue())
}
pub fn lex_commented(
handler: &Handler,
src: Source,
start: usize,
end: usize,
source_id: &Option<SourceId>,
) -> Result<CommentedTokenStream> {
let stream = &mut CharIndicesInner {
src: &src.text[..end],
position: start,
}
.peekable();
let mut l = Lexer {
handler,
src: &src,
source_id,
stream,
};
let mut file_start_offset: usize = 0;
let mut parent_token_trees = Vec::new();
let mut token_trees = Vec::new();
while let Some((mut index, mut character)) = l.stream.next() {
if character.is_whitespace() {
// if the beginning of a file starts with whitespace
// we must keep track to ensure that the module level docs
// will get inserted into the tree correctly
if index - file_start_offset == 0 {
file_start_offset += character.len_utf8();
}
continue;
}
if character == '/' {
match l.stream.peek() {
Some((_, '/')) => {
// search_end is the index at which we stop looking backwards for
// a newline
let search_end = token_trees
.last()
.map(|tt| {
if let CommentedTokenTree::Tree(t) = tt {
t.span().end()
} else {
0
}
})
.unwrap_or_default();
let has_newline = src.text[search_end..index]
.chars()
.rev()
.take_while(|c| c.is_whitespace())
.filter(|&c| c == '\n')
.count()
> 0;
// We found a comment at the start of file, which should be accounted for as a Newlined comment.
let start_of_file_found = search_end == 0 && index == 0;
let comment_kind = if has_newline || start_of_file_found {
CommentKind::Newlined
} else {
CommentKind::Trailing
};
let ctt = lex_line_comment(&mut l, end, index, comment_kind);
token_trees.push(ctt);
continue;
}
Some((_, '*')) => {
if let Some(token) = lex_block_comment(&mut l, index) {
token_trees.push(token);
}
continue;
}
Some(_) | None => {}
}
}
if character.is_xid_start() || character == '_' {
// Raw identifier, e.g., `r#foo`? Then mark as such, stripping the prefix `r#`.
let is_raw_ident = character == 'r' && matches!(l.stream.peek(), Some((_, '#')));
if is_raw_ident {
l.stream.next();
if let Some((next_index, next_character)) = l.stream.next() {
character = next_character;
index = next_index;
}
if !(character.is_xid_start() || character == '_') {
let kind = LexErrorKind::InvalidCharacter {
position: index,
character,
};
let span = span_one(&l, index, character);
error(l.handler, LexError { kind, span });
continue;
}
}
// Don't accept just `_` as an identifier.
let not_is_single_underscore = character != '_'
|| l.stream
.peek()
.is_some_and(|(_, next)| next.is_xid_continue());
if not_is_single_underscore {
// Consume until we hit other than `XID_CONTINUE`.
while l.stream.next_if(|(_, c)| c.is_xid_continue()).is_some() {}
let ident = Ident::new_with_raw(span_until(&mut l, index), is_raw_ident);
token_trees.push(CommentedTokenTree::Tree(ident.into()));
continue;
}
}
if let Some(delimiter) = character.as_open_delimiter() {
let token_trees = mem::take(&mut token_trees);
parent_token_trees.push((token_trees, index, delimiter));
continue;
}
if let Some(close_delimiter) = character.as_close_delimiter() {
match parent_token_trees.pop() {
None => {
// Recover by ignoring the unexpected closing delim,
// giving the parser opportunities to realize the need for an opening delim
// in e.g., this example:
//
// fn foo() // <-- Parser expects grouped tokens in `{ ... }` here.
// let x = 0;
// } // <- This recovery.
let kind = LexErrorKind::UnexpectedCloseDelimiter {
position: index,
close_delimiter,
};
let span = span_one(&l, index, character);
error(l.handler, LexError { kind, span });
}
Some((parent, open_index, open_delimiter)) => {
if open_delimiter != close_delimiter {
// Recover on e.g., a `{ )` mismatch by having `)` interpreted as `}`.
let kind = LexErrorKind::MismatchedDelimiters {
open_position: open_index,
close_position: index,
open_delimiter,
close_delimiter,
};
let span = span_one(&l, index, character);
error(l.handler, LexError { kind, span });
}
token_trees = lex_close_delimiter(
&mut l,
index,
parent,
token_trees,
open_index,
open_delimiter,
);
}
}
continue;
}
if let Some(token) = lex_string(&mut l, index, character)? {
token_trees.push(token);
continue;
}
if let Some(token) = lex_char(&mut l, index, character)? {
token_trees.push(token);
continue;
}
if let Some(token) = lex_int_lit(&mut l, index, character)? {
token_trees.push(token);
continue;
}
if let Some(token) = lex_punctuation(&mut l, index, character) {
token_trees.push(token);
continue;
}
// Recover by simply ignoring the character.
// NOTE(Centril): I'm not sure how good of an idea this is... time will tell.
let kind = LexErrorKind::InvalidCharacter {
position: index,
character,
};
let span = span_one(&l, index, character);
error(l.handler, LexError { kind, span });
continue;
}
// Recover all unclosed delimiters.
while let Some((parent, open_index, open_delimiter)) = parent_token_trees.pop() {
let kind = LexErrorKind::UnclosedDelimiter {
open_position: open_index,
open_delimiter,
};
let span = span_one(&l, open_index, open_delimiter.as_open_char());
error(l.handler, LexError { kind, span });
token_trees = lex_close_delimiter(
&mut l,
src.text.len(),
parent,
token_trees,
open_index,
open_delimiter,
);
}
Ok(CommentedTokenStream {
token_trees,
full_span: span(&l, start, end),
})
}
fn lex_close_delimiter(
l: &mut Lexer<'_>,
index: usize,
mut parent: Vec<CommentedTokenTree>,
token_trees: Vec<CommentedTokenTree>,
open_index: usize,
delimiter: Delimiter,
) -> Vec<CommentedTokenTree> {
let start_index = open_index + delimiter.as_open_char().len_utf8();
let full_span = span(l, start_index, index);
let group = CommentedGroup {
token_stream: CommentedTokenStream {
token_trees,
full_span,
},
delimiter,
span: span_until(l, open_index),
};
parent.push(CommentedTokenTree::Tree(group.into()));
parent
}
fn lex_line_comment(
l: &mut Lexer<'_>,
end: usize,
index: usize,
comment_kind: CommentKind,
) -> CommentedTokenTree {
let _ = l.stream.next();
// Find end; either at EOF or at `\n`.
let end = l
.stream
.find(|(_, character)| *character == '\n')
.map_or(end, |(end, _)| end);
let sp = span(l, index, end);
let doc_style = match (sp.as_str().chars().nth(2), sp.as_str().chars().nth(3)) {
// `//!` is an inner line doc comment.
(Some('!'), _) => Some(DocStyle::Inner),
// `////` (more than 3 slashes) is not considered a doc comment.
(Some('/'), Some('/')) => None,
// `///` is an outer line doc comment.
(Some('/'), _) => Some(DocStyle::Outer),
_ => None,
};
if let Some(doc_style) = doc_style {
let doc_comment = DocComment {
span: sp,
doc_style,
content_span: span(l, index + 3, end),
};
CommentedTokenTree::Tree(doc_comment.into())
} else {
Comment {
span: sp,
comment_kind,
}
.into()
}
}
fn lex_block_comment(l: &mut Lexer<'_>, index: usize) -> Option<CommentedTokenTree> {
// Lexing a multi-line comment.
let _ = l.stream.next();
let mut unclosed_indices = vec![index];
let unclosed_multiline_comment = |l: &Lexer<'_>, unclosed_indices: Vec<_>| {
let span = span(l, *unclosed_indices.last().unwrap(), l.src.text.len() - 1);
let kind = LexErrorKind::UnclosedMultilineComment { unclosed_indices };
error(l.handler, LexError { kind, span });
None
};
// We first start by assuming that block comments are inlined.
let mut comment_kind = CommentKind::Inlined;
loop {
match l.stream.next() {
None => return unclosed_multiline_comment(l, unclosed_indices),
Some((_, '*')) => match l.stream.next() {
None => return unclosed_multiline_comment(l, unclosed_indices),
// Matched `*/`, so we're closing some multi-line comment. It could be nested.
Some((slash_ix, '/')) => {
let start = unclosed_indices.pop().unwrap();
if unclosed_indices.is_empty() {
// For the purposes of lexing,
// nested multi-line comments constitute a single multi-line comment.
// We could represent them as several ones, but that's unnecessary.
let end = slash_ix + '/'.len_utf8();
let span = span(l, start, end);
return Some(Comment { span, comment_kind }.into());
}
}
Some(_) => {}
},
// Found nested multi-line comment.
Some((next_index, '/')) => match l.stream.next() {
None => return unclosed_multiline_comment(l, unclosed_indices),
Some((_, '*')) => unclosed_indices.push(next_index),
Some(_) => {}
},
Some((_, '\n')) => {
// If we find a newline character while lexing, this means that the block comment is multiline.
// Example:
// /* this is a
// multilined block comment */
comment_kind = CommentKind::Multilined;
}
Some(_) => {}
}
}
}
fn lex_string(
l: &mut Lexer<'_>,
index: usize,
character: char,
) -> Result<Option<CommentedTokenTree>> {
if character != '"' {
return Ok(None);
}
let mut parsed = String::new();
loop {
let unclosed_string_lit = |l: &Lexer<'_>, end| {
error(
l.handler,
LexError {
kind: LexErrorKind::UnclosedStringLiteral { position: index },
span: span(l, index, end),
},
)
};
let (next_index, next_character) = l.stream.next().ok_or_else(|| {
// last character may not be a unicode boundary
let mut end = l.src.text.len() - 1;
while !l.src.text.is_char_boundary(end) {
end -= 1;
}
unclosed_string_lit(l, end)
})?;
parsed.push(match next_character {
'\\' => parse_escape_code(l)
.map_err(|e| e.unwrap_or_else(|| unclosed_string_lit(l, l.src.text.len())))?,
'"' => break,
// do not allow text direction codepoints
ALM | FSI | LRE | LRI | LRM | LRO | PDF | PDI | RLE | RLI | RLM | RLO => {
let kind = LexErrorKind::UnicodeTextDirInLiteral {
position: next_index,
character: next_character,
};
let span = span_one(l, next_index, next_character);
error(l.handler, LexError { span, kind });
continue;
}
_ => next_character,
});
}
let span = span_until(l, index);
let literal = Literal::String(LitString { span, parsed });
Ok(Some(CommentedTokenTree::Tree(literal.into())))
}
fn lex_char(
l: &mut Lexer<'_>,
index: usize,
character: char,
) -> Result<Option<CommentedTokenTree>> {
let is_quote = |c| c == '\'';
if !is_quote(character) {
return Ok(None);
}
let unclosed_char_lit = |l: &Lexer<'_>| {
let err = LexError {
kind: LexErrorKind::UnclosedCharLiteral { position: index },
span: span(l, index, l.src.text.len()),
};
error(l.handler, err)
};
let next = |l: &mut Lexer<'_>| l.stream.next().ok_or_else(|| unclosed_char_lit(l));
let escape = |l: &mut Lexer<'_>, next_char| {
if next_char == '\\' {
parse_escape_code(l).map_err(|e| e.unwrap_or_else(|| unclosed_char_lit(l)))
} else {
Ok(next_char)
}
};
let (next_index, next_char) = next(l)?;
// do not allow text direction codepoints
if let ALM | FSI | LRE | LRI | LRM | LRO | PDF | PDI | RLE | RLI | RLM | RLO = next_char {
let kind = LexErrorKind::UnicodeTextDirInLiteral {
position: next_index,
character: next_char,
};
let span = span_one(l, next_index, next_char);
error(l.handler, LexError { span, kind });
}
let parsed = escape(l, next_char)?;
// Consume the closing `'`.
let (next_index, next_char) = next(l)?;
let sp = span_until(l, index);
// Not a closing quote? Then this is e.g., 'ab'.
// Most likely the user meant a string literal, so recover as that instead.
let literal = if !is_quote(next_char) {
let mut string = String::new();
string.push(parsed);
string.push(escape(l, next_char)?);
loop {
let (_, next_char) = next(l)?;
if is_quote(next_char) {
break;
}
string.push(next_char);
}
// Emit the expected closing quote error.
error(
l.handler,
LexError {
kind: LexErrorKind::ExpectedCloseQuote {
position: next_index,
},
span: span(l, next_index, next_index + string.len()),
},
);
Literal::String(LitString {
span: sp,
parsed: string,
})
} else {
Literal::Char(LitChar { span: sp, parsed })
};
Ok(Some(CommentedTokenTree::Tree(literal.into())))
}
fn parse_escape_code(l: &mut Lexer<'_>) -> core::result::Result<char, Option<ErrorEmitted>> {
let error = |kind, span| Err(Some(error(l.handler, LexError { kind, span })));
match l.stream.next() {
None => Err(None),
Some((_, '"')) => Ok('"'),
Some((_, '\'')) => Ok('\''),
Some((_, 'n')) => Ok('\n'),
Some((_, 'r')) => Ok('\r'),
Some((_, 't')) => Ok('\t'),
Some((_, '\\')) => Ok('\\'),
Some((_, '0')) => Ok('\0'),
Some((index, 'x')) => {
let (high, low) = match (l.stream.next(), l.stream.next()) {
(Some((_, high)), Some((_, low))) => (high, low),
_ => return Err(None),
};
let (high, low) = match (high.to_digit(16), low.to_digit(16)) {
(Some(high), Some(low)) => (high, low),
_ => return error(LexErrorKind::InvalidHexEscape, span_until(l, index)),
};
let parsed_character = char::from_u32((high << 4) | low).unwrap();
Ok(parsed_character)
}
Some((index, 'u')) => {
match l.stream.next() {
None => return Err(None),
Some((_, '{')) => (),
Some((_, unexpected_char)) => {
let span = span_one(l, index, unexpected_char);
let kind = LexErrorKind::UnicodeEscapeMissingBrace { position: index };
return error(kind, span);
}
}
let mut digits_start_position_opt = None;
let mut char_value = BigUint::from(0u32);
let digits_end_position = loop {
let (position, digit) = match l.stream.next() {
None => return Err(None),
Some((position, '}')) => break position,
Some((position, digit)) => (position, digit),
};
if digits_start_position_opt.is_none() {
digits_start_position_opt = Some(position);
};
let digit = match digit.to_digit(16) {
None => {
let span = span_one(l, position, digit);
let kind = LexErrorKind::InvalidUnicodeEscapeDigit { position };
return error(kind, span);
}
Some(digit) => digit,
};
char_value *= 16u32;
char_value += digit;
};
let digits_start_position = digits_start_position_opt.unwrap_or(digits_end_position);
let char_value = match u32::try_from(char_value) {
Err(..) => {
let span = span(l, digits_start_position, digits_end_position);
let kind = LexErrorKind::UnicodeEscapeOutOfRange { position: index };
return error(kind, span);
}
Ok(char_value) => char_value,
};
let parsed_character = match char::from_u32(char_value) {
None => {
let span_all = span_until(l, index);
let kind = LexErrorKind::UnicodeEscapeInvalidCharValue { span: span_all };
let span = span(l, digits_start_position, digits_end_position);
return error(kind, span);
}
Some(parsed_character) => parsed_character,
};
Ok(parsed_character)
}
Some((index, unexpected_char)) => error(
LexErrorKind::InvalidEscapeCode { position: index },
span_one(l, index, unexpected_char),
),
}
}
fn lex_int_lit(
l: &mut Lexer<'_>,
index: usize,
character: char,
) -> Result<Option<CommentedTokenTree>> {
let digit = match character.to_digit(10) {
None => return Ok(None),
Some(d) => d,
};
let decimal_int_lit = |l, digit: u32| {
let mut big_uint = BigUint::from(digit);
let end_opt = parse_digits(&mut big_uint, l, 10);
(big_uint, end_opt)
};
let (big_uint, end_opt) = if digit == 0 {
let prefixed_int_lit = |l: &mut Lexer<'_>, radix| {
let _ = l.stream.next();
let d = l.stream.next();
let incomplete_int_lit = |end| {
let kind = match radix {
16 => LexErrorKind::IncompleteHexIntLiteral { position: index },
8 => LexErrorKind::IncompleteOctalIntLiteral { position: index },
2 => LexErrorKind::IncompleteBinaryIntLiteral { position: index },
_ => unreachable!(),
};
let span = span(l, index, end);
error(l.handler, LexError { kind, span })
};
let (digit_pos, digit) = d.ok_or_else(|| incomplete_int_lit(l.src.text.len()))?;
let radix_digit = digit
.to_digit(radix)
.ok_or_else(|| incomplete_int_lit(digit_pos))?;
let mut big_uint = BigUint::from(radix_digit);
let end_opt = parse_digits(&mut big_uint, l, radix);
Ok((big_uint, end_opt))
};
match l.stream.peek() {
Some((_, 'x')) => prefixed_int_lit(l, 16)?,
Some((_, 'o')) => prefixed_int_lit(l, 8)?,
Some((_, 'b')) => prefixed_int_lit(l, 2)?,
Some((_, '_' | '0'..='9')) => decimal_int_lit(l, 0),
Some(&(next_index, _)) => (BigUint::from(0u32), Some(next_index)),
None => (BigUint::from(0u32), None),
}
} else {
decimal_int_lit(l, digit)
};
let ty_opt = lex_int_ty_opt(l)?;
let literal = Literal::Int(LitInt {
span: span(l, index, end_opt.unwrap_or(l.src.text.len())),
parsed: big_uint,
ty_opt,
is_generated_b256: false,
});
Ok(Some(CommentedTokenTree::Tree(literal.into())))
}
fn lex_int_ty_opt(l: &mut Lexer<'_>) -> Result<Option<(LitIntType, Span)>> {
let (suffix_start_position, c) = match l.stream.next_if(|(_, c)| c.is_xid_continue()) {
None => return Ok(None),
Some(x) => x,
};
let mut suffix = String::from(c);
let suffix_end_position = loop {
match l.stream.peek() {
Some((_, c)) if c.is_xid_continue() => {
suffix.push(*c);
let _ = l.stream.next();
}
Some((pos, _)) => break *pos,
None => break l.src.text.len(),
}
};
// Parse the suffix to a known one, or if unknown, recover by throwing it away.
let ty = match parse_int_suffix(&suffix) {
Some(s) => s,
None => {
let span = span(l, suffix_start_position, suffix_end_position);
let kind = LexErrorKind::InvalidIntSuffix {
suffix: Ident::new(span.clone()),
};
error(l.handler, LexError { kind, span });
return Ok(None);
}
};
let span = span_until(l, suffix_start_position);
Ok(Some((ty, span)))
}
/// Interpret the given `suffix` string as a `LitIntType`.
pub fn parse_int_suffix(suffix: &str) -> Option<LitIntType> {
Some(match suffix {
"u8" => LitIntType::U8,
"u16" => LitIntType::U16,
"u32" => LitIntType::U32,
"u64" => LitIntType::U64,
"u256" => LitIntType::U256,
"i8" => LitIntType::I8,
"i16" => LitIntType::I16,
"i32" => LitIntType::I32,
"i64" => LitIntType::I64,
_ => return None,
})
}
fn parse_digits(big_uint: &mut BigUint, l: &mut Lexer<'_>, radix: u32) -> Option<usize> {
loop {
match l.stream.peek() {
None => break None,
Some((_, '_')) => {
let _ = l.stream.next();
}
Some(&(index, character)) => match character.to_digit(radix) {
None => break Some(index),
Some(digit) => {
let _ = l.stream.next();
*big_uint *= radix;
*big_uint += digit;
}
},
};
}
}
fn lex_punctuation(l: &mut Lexer<'_>, index: usize, character: char) -> Option<CommentedTokenTree> {
let punct = Punct {
kind: character.as_punct_kind()?,
spacing: match l.stream.peek() {
Some((_, next_character)) if next_character.as_punct_kind().is_some() => Spacing::Joint,
_ => Spacing::Alone,
},
span: span_until(l, index),
};
Some(CommentedTokenTree::Tree(punct.into()))
}
fn span_until(l: &mut Lexer<'_>, start: usize) -> Span {
let end = l.stream.peek().map_or(l.src.text.len(), |(end, _)| *end);
span(l, start, end)
}
fn span_one(l: &Lexer<'_>, start: usize, c: char) -> Span {
span(l, start, start + c.len_utf8())
}
fn span(l: &Lexer<'_>, start: usize, end: usize) -> Span {
Span::new(l.src.clone(), start, end, *l.source_id).unwrap()
}
/// Emit a lexer error.
fn error(handler: &Handler, error: LexError) -> ErrorEmitted {
handler.emit_err(CompileError::Lex { error })
}
#[cfg(test)]
mod tests {
use super::*;
use assert_matches::assert_matches;
use sway_ast::{
literal::{LitChar, Literal},
token::{
Comment, CommentKind, CommentedTokenTree, CommentedTree, DocComment, DocStyle,
TokenTree,
},
};
use sway_error::{
error::CompileError,
handler::Handler,
lex_error::{LexError, LexErrorKind},
};
#[test]
fn lex_bidi() {
let input = "
script;
use std::string::String;
fn main() {
let a = String::from_ascii_str(\"fuel\");
let b = String::from_ascii_str(\"fuel\u{202E}\u{2066}// Same string again\u{2069}\u{2066}\");
if a.as_bytes() == b.as_bytes() {
log(\"same\");
} else {
log(\"different\");
}
let lrm = '\u{202E}';
log(lrm);
}
";
let start = 0;
let end = input.len();
let path = None;
let handler = Handler::default();
let _stream = lex_commented(&handler, input.into(), start, end, &path).unwrap();
let (errors, warnings, infos) = handler.consume();
assert_eq!(infos.len(), 0);
assert_eq!(warnings.len(), 0);
assert_eq!(errors.len(), 5);
for err in errors {
assert_matches!(
err,
CompileError::Lex {
error: LexError {
span: _,
kind: LexErrorKind::UnicodeTextDirInLiteral {
position: _,
character: _
}
}
}
);
}
}
#[test]
fn lex_commented_token_stream() {
let input = r#"
//
// Single-line comment.
struct Foo {
/* multi-
* line-
* comment */
bar: i32, // trailing comment
}
"#;
let start = 0;
let end = input.len();
let path = None;
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | true |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-parse/src/expr/asm.rs | sway-parse/src/expr/asm.rs | use crate::expr::op_code::parse_instruction;
use crate::{Parse, ParseResult, ParseToEnd, Parser, ParserConsumed};
use core::str::FromStr;
use num_bigint::BigUint;
use sway_ast::expr::asm::{
AsmBlock, AsmBlockContents, AsmFinalExpr, AsmImmediate, AsmRegisterDeclaration,
};
use sway_error::parser_error::ParseErrorKind;
use sway_types::{Ident, Spanned};
impl Parse for AsmBlock {
fn parse(parser: &mut Parser) -> ParseResult<AsmBlock> {
let asm_token = parser.parse()?;
let registers = parser.parse()?;
let contents = parser.parse()?;
Ok(AsmBlock {
asm_token,
registers,
contents,
})
}
}
impl Parse for AsmRegisterDeclaration {
fn parse(parser: &mut Parser) -> ParseResult<AsmRegisterDeclaration> {
let register = parser.parse()?;
let value_opt = match parser.take() {
Some(colon_token) => {
let value = parser.parse()?;
Some((colon_token, value))
}
None => None,
};
Ok(AsmRegisterDeclaration {
register,
value_opt,
})
}
}
impl ParseToEnd for AsmBlockContents {
fn parse_to_end<'a, 'e>(
mut parser: Parser<'a, '_>,
) -> ParseResult<(AsmBlockContents, ParserConsumed<'a>)> {
let mut instructions = Vec::new();
let (final_expr_opt, consumed) = loop {
if let Some(consumed) = parser.check_empty() {
break (None, consumed);
}
// Parse the opcode directly instead of calling `parser.parse()` to avoid checking for
// illegal identifiers such as keywords. opcode names should not be subject to those
// checks because some opcodes, such as `mod`, are also Sway keywords.
let ident = match parser.take::<Ident>() {
Some(ident) => ident,
None => return Err(parser.emit_error(ParseErrorKind::ExpectedIdent)),
};
if let Some(consumed) = parser.check_empty() {
let final_expr = AsmFinalExpr {
register: ident,
ty_opt: None,
};
break (Some(final_expr), consumed);
}
if let Some(colon_token) = parser.take() {
let ty = parser.parse()?;
let consumed = match parser.check_empty() {
Some(consumed) => consumed,
None => {
return Err(
parser.emit_error(ParseErrorKind::UnexpectedTokenAfterAsmReturnType)
);
}
};
let final_expr = AsmFinalExpr {
register: ident,
ty_opt: Some((colon_token, ty)),
};
break (Some(final_expr), consumed);
}
let instruction = parse_instruction(ident, &mut parser)?;
let semicolon_token = parser.parse()?;
instructions.push((instruction, semicolon_token));
};
let contents = AsmBlockContents {
instructions,
final_expr_opt,
};
Ok((contents, consumed))
}
}
impl Parse for AsmImmediate {
fn parse(parser: &mut Parser) -> ParseResult<AsmImmediate> {
let ident = parser.parse::<Ident>()?;
let digits = ident
.as_str()
.strip_prefix('i')
.ok_or_else(|| parser.emit_error(ParseErrorKind::MalformedAsmImmediate))?;
let parsed = BigUint::from_str(digits)
.ok()
.ok_or_else(|| parser.emit_error(ParseErrorKind::MalformedAsmImmediate))?;
Ok(AsmImmediate {
span: ident.span(),
parsed,
})
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-parse/src/expr/op_code.rs | sway-parse/src/expr/op_code.rs | use crate::{ParseResult, Parser};
use sway_ast::expr::op_code::*;
use sway_error::parser_error::ParseErrorKind;
use sway_types::{Ident, Spanned};
macro_rules! define_op_codes (
( $(($op_name:ident, $ty_name:ident, $s:literal, ($($arg_name:ident),*)),)* ) => {
pub const OP_CODES: &'static [&'static str] = &[
$($s),*
];
pub fn parse_instruction(ident: Ident, parser: &mut Parser) -> ParseResult<Instruction> {
match ident.as_str() {
$($s => {
$(
let $arg_name = parser.parse()?;
)*
Ok($ty_name::instruction(ident, ($($arg_name,)*)))
},)*
_ => {
let span = ident.span().clone();
Err(parser.emit_error_with_span(ParseErrorKind::UnrecognizedOpCode {
known_op_codes: OP_CODES,
}, span))
},
}
}
};
);
define_op_codes!(
/* Arithmetic/Logic (ALU) Instructions */
(Add, AddOpcode, "add", (ret, lhs, rhs)),
(Addi, AddiOpcode, "addi", (ret, lhs, rhs)),
(And, AndOpcode, "and", (ret, lhs, rhs)),
(Andi, AndiOpcode, "andi", (ret, lhs, rhs)),
(Div, DivOpcode, "div", (ret, lhs, rhs)),
(Divi, DiviOpcode, "divi", (ret, lhs, rhs)),
(Eq, EqOpcode, "eq", (ret, lhs, rhs)),
(Exp, ExpOpcode, "exp", (ret, base, power)),
(Expi, ExpiOpcode, "expi", (ret, base, power)),
(Gt, GtOpcode, "gt", (ret, lhs, rhs)),
(Lt, LtOpcode, "lt", (ret, lhs, rhs)),
(Mlog, MlogOpcode, "mlog", (ret, arg, base)),
(Mod, ModOpcode, "mod", (ret, lhs, rhs)),
(Modi, ModiOpcode, "modi", (ret, lhs, rhs)),
(Move, MoveOpcode, "move", (ret, from)),
(Movi, MoviOpcode, "movi", (ret, arg)),
(Mroo, MrooOpcode, "mroo", (ret, arg, root)),
(Mul, MulOpcode, "mul", (ret, lhs, rhs)),
(Muli, MuliOpcode, "muli", (ret, lhs, rhs)),
(Noop, NoopOpcode, "noop", ()),
(Not, NotOpcode, "not", (ret, arg)),
(Or, OrOpcode, "or", (ret, lhs, rhs)),
(Ori, OriOpcode, "ori", (ret, lhs, rhs)),
(Sll, SllOpcode, "sll", (ret, lhs, rhs)),
(Slli, SlliOpcode, "slli", (ret, lhs, rhs)),
(Srl, SrlOpcode, "srl", (ret, lhs, rhs)),
(Srli, SrliOpcode, "srli", (ret, lhs, rhs)),
(Sub, SubOpcode, "sub", (ret, lhs, rhs)),
(Subi, SubiOpcode, "subi", (ret, lhs, rhs)),
(Wqcm, WqcmOpcode, "wqcm", (ret, lhs, rhs, op_mode)),
(Wqop, WqopOpcode, "wqop", (ret, lhs, rhs, op_mode)),
(Wqml, WqmlOpcode, "wqml", (ret, lhs, rhs, indirect)),
(Wqdv, WqdvOpcode, "wqdv", (ret, lhs, rhs, indirect)),
(Wqmd, WqmdOpcode, "wqmd", (ret, lhs_a, lhs_b, rhs)),
(Wqam, WqamOpcode, "wqam", (ret, lhs_a, lhs_b, rhs)),
(Wqmm, WqmmOpcode, "wqmm", (ret, lhs_a, lhs_b, rhs)),
(Xor, XorOpcode, "xor", (ret, lhs, rhs)),
(Xori, XoriOpcode, "xori", (ret, lhs, rhs)),
/* Control Flow Instructions */
(Jmp, JmpOpcode, "jmp", (offset)),
(Ji, JiOpcode, "ji", (offset)),
(Jne, JneOpcode, "jne", (lhs, rhs, offset)),
(Jnei, JneiOpcode, "jnei", (lhs, rhs, offset)),
(Jnzi, JnziOpcode, "jnzi", (arg, offset)),
(Jmpb, JmpbOpcode, "jmpb", (offset_reg, offset)),
(Jmpf, JmpfOpcode, "jmpf", (offset_reg, offset)),
(Jnzb, JnzbOpcode, "jnzb", (arg, offset_reg, offset)),
(Jnzf, JnzfOpcode, "jnzf", (arg, offset_reg, offset)),
(Jneb, JnebOpcode, "jneb", (lhs, rhs, offset_reg, offset)),
(Jnef, JnefOpcode, "jnef", (lhs, rhs, offset_reg, offset)),
(Jal, JalOpcode, "jal", (addr, offset_reg, offset)),
(Ret, RetOpcode, "ret", (value)),
/* Memory Instructions */
(Aloc, AlocOpcode, "aloc", (size)),
(Cfei, CfeiOpcode, "cfei", (size)),
(Cfsi, CfsiOpcode, "cfsi", (size)),
(Cfe, CfeOpcode, "cfe", (size)),
(Cfs, CfsOpcode, "cfs", (size)),
(Lb, LbOpcode, "lb", (ret, addr, offset)),
(Lw, LwOpcode, "lw", (ret, addr, offset)),
(Mcl, MclOpcode, "mcl", (addr, size)),
(Mcli, McliOpcode, "mcli", (addr, size)),
(Mcp, McpOpcode, "mcp", (dst_addr, src_addr, size)),
(Mcpi, McpiOpcode, "mcpi", (dst_addr, src_addr, size)),
(Meq, MeqOpcode, "meq", (ret, lhs_addr, rhs_addr, size)),
(Sb, SbOpcode, "sb", (addr, value, offset)),
(Sw, SwOpcode, "sw", (addr, value, offset)),
/* Contract Instructions */
(Bal, BalOpcode, "bal", (ret, asset, contract)),
(Bhei, BheiOpcode, "bhei", (ret)),
(Bhsh, BhshOpcode, "bhsh", (addr, height)),
(Burn, BurnOpcode, "burn", (coins, sub_id)),
(Call, CallOpcode, "call", (args_addr, coins, asset, gas)),
(Cb, CbOpcode, "cb", (addr)),
(Ccp, CcpOpcode, "ccp", (dst_addr, contract, src_addr, size)),
(Croo, CrooOpcode, "croo", (addr, contract)),
(Csiz, CsizOpcode, "csiz", (ret, contract)),
(Bsiz, BsizOpcode, "bsiz", (ret, contract)),
(Ldc, LdcOpcode, "ldc", (contract, addr, size, imm)),
(Bldd, BlddOpcode, "bldd", (dst_ptr, addr, offset, len)),
(Log, LogOpcode, "log", (reg_a, reg_b, reg_c, reg_d)),
(Logd, LogdOpcode, "logd", (reg_a, reg_b, addr, size)),
(Mint, MintOpcode, "mint", (coins, sub_id)),
(Retd, RetdOpcode, "retd", (addr, size)),
(Rvrt, RvrtOpcode, "rvrt", (value)),
(Smo, SmoOpcode, "smo", (addr, len, output, coins)),
(Scwq, ScwqOpcode, "scwq", (addr, is_set, len)),
(Srw, SrwOpcode, "srw", (ret, is_set, state_addr)),
(Srwq, SrwqOpcode, "srwq", (addr, is_set, state_addr, count)),
(Sww, SwwOpcode, "sww", (state_addr, is_set, value)),
(Swwq, SwwqOpcode, "swwq", (state_addr, is_set, addr, count)),
(Time, TimeOpcode, "time", (ret, height)),
(Tr, TrOpcode, "tr", (contract, coins, asset)),
(Tro, TroOpcode, "tro", (addr, output, coins, asset)),
/* Cryptographic Instructions */
(Eck1, Eck1Opcode, "eck1", (addr, sig, hash)),
(Ecr1, Ecr1Opcode, "ecr1", (addr, sig, hash)),
(Ed19, Ed19Opcode, "ed19", (addr, sig, hash, len)),
(K256, K256Opcode, "k256", (addr, data, size)),
(S256, S256Opcode, "s256", (addr, data, size)),
(
ECOP,
ECOPOpcode,
"ecop",
(dst_addr, curve, operation, src_addr)
),
(
EPAR,
EPAROpcode,
"epar",
(ret, curve, groups_of_points, addr)
),
/* Other Instructions */
(Ecal, EcalOpcode, "ecal", (reg_a, reg_b, reg_c, reg_d)),
(Flag, FlagOpcode, "flag", (value)),
(Gm, GmOpcode, "gm", (ret, op)),
(Gtf, GtfOpcode, "gtf", (ret, index, tx_field_id)),
/* Non-VM Instructions */
(Blob, BlobOpcode, "blob", (size)),
);
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-parse/src/expr/mod.rs | sway-parse/src/expr/mod.rs | use crate::{Parse, ParseBracket, ParseResult, ParseToEnd, Parser, ParserConsumed, Peek};
use sway_ast::brackets::{Braces, Parens, SquareBrackets};
use sway_ast::expr::{LoopControlFlow, ReassignmentOp, ReassignmentOpVariant};
use sway_ast::keywords::{
AbiToken, AddEqToken, AmpersandToken, AsmToken, CommaToken, ConfigurableToken, ConstToken,
DivEqToken, DoubleColonToken, EnumToken, EqToken, FalseToken, FnToken, HashToken, IfToken,
ImplToken, LetToken, MutToken, OpenAngleBracketToken, PubToken, SemicolonToken, ShlEqToken,
ShrEqToken, StarEqToken, StorageToken, StructToken, SubEqToken, TraitToken, TrueToken,
TypeToken, UseToken,
};
use sway_ast::literal::{LitBool, LitBoolType};
use sway_ast::punctuated::Punctuated;
use sway_ast::token::DocComment;
use sway_ast::{
AbiCastArgs, CodeBlockContents, Expr, ExprArrayDescriptor, ExprStructField,
ExprTupleDescriptor, GenericArgs, IfCondition, IfExpr, LitInt, Literal, MatchBranch,
MatchBranchKind, PathExpr, PathExprSegment, Statement, StatementLet,
};
use sway_error::parser_error::ParseErrorKind;
use sway_types::{ast::Delimiter, Ident, Span, Spanned};
mod asm;
pub mod op_code;
impl ParseToEnd for AbiCastArgs {
fn parse_to_end<'a, 'e>(
mut parser: Parser<'a, '_>,
) -> ParseResult<(AbiCastArgs, ParserConsumed<'a>)> {
let name = parser.parse()?;
let comma_token = parser.parse()?;
let address = parser.parse()?;
match parser.check_empty() {
Some(consumed) => {
let abi_cast_args = AbiCastArgs {
name,
comma_token,
address,
};
Ok((abi_cast_args, consumed))
}
None => Err(parser.emit_error(ParseErrorKind::UnexpectedTokenAfterAbiAddress)),
}
}
}
impl Parse for IfExpr {
fn parse(parser: &mut Parser) -> ParseResult<IfExpr> {
let if_token = parser.parse()?;
let condition = parser.parse()?;
let then_block = parser.parse()?;
let else_opt = match parser.take() {
Some(else_token) => {
let else_body = match parser.guarded_parse::<IfToken, _>()? {
Some(if_expr) => LoopControlFlow::Continue(Box::new(if_expr)),
None => LoopControlFlow::Break(parser.parse()?),
};
Some((else_token, else_body))
}
None => None,
};
Ok(IfExpr {
if_token,
condition,
then_block,
else_opt,
})
}
}
impl Parse for IfCondition {
fn parse(parser: &mut Parser) -> ParseResult<IfCondition> {
if let Some(let_token) = parser.take() {
let lhs = parser.parse()?;
let eq_token = parser.parse()?;
let rhs = Box::new(parse_condition(parser)?);
Ok(IfCondition::Let {
let_token,
lhs,
eq_token,
rhs,
})
} else {
let expr = Box::new(parse_condition(parser)?);
Ok(IfCondition::Expr(expr))
}
}
}
impl Parse for Expr {
fn parse(parser: &mut Parser) -> ParseResult<Expr> {
parse_reassignment(parser, ParseExprCtx::default())
}
}
impl Parse for StatementLet {
fn parse(parser: &mut Parser) -> ParseResult<Self> {
let let_token: LetToken = parser.parse()?;
if parser.peek::<EqToken>().is_some() {
return Err(parser.emit_error_with_span(
ParseErrorKind::ExpectedPattern,
let_token
.span()
.next_char_utf8()
.unwrap_or_else(|| let_token.span()),
));
}
let pattern = parser.try_parse(true)?;
let ty_opt = match parser.take() {
Some(colon_token) => Some((colon_token, parser.parse()?)),
None => None,
};
let eq_token: EqToken = parser.try_parse(true)?;
let expr = parser.try_parse(true)?;
// Recover on missing semicolon.
let semicolon_token = parser.try_parse(true)?;
Ok(StatementLet {
let_token,
pattern,
ty_opt,
eq_token,
expr,
semicolon_token,
})
}
}
impl ParseToEnd for CodeBlockContents {
fn parse_to_end<'a, 'e>(
mut parser: Parser<'a, '_>,
) -> ParseResult<(CodeBlockContents, ParserConsumed<'a>)> {
let mut statements = Vec::new();
let (final_expr_opt, consumed) = loop {
if let Some(consumed) = parser.check_empty() {
break (None, consumed);
}
match parser.call_parsing_function_with_recovery(parse_stmt) {
Ok(StmtOrTail::Stmt(s)) => statements.push(s),
Ok(StmtOrTail::Tail(e, c)) => break (Some(e), c),
Err(r) => {
let (spans, error) = r
.recover_at_next_line_with_fallback_error(ParseErrorKind::InvalidStatement);
statements.push(Statement::Error(spans, error));
}
}
};
let code_block_contents = CodeBlockContents {
statements,
final_expr_opt,
span: parser.full_span().clone(),
};
Ok((code_block_contents, consumed))
}
}
/// A statement or a tail expression in a block.
#[allow(clippy::large_enum_variant)]
enum StmtOrTail<'a> {
/// A statement.
Stmt(Statement),
/// Tail expression in a block.
Tail(Box<Expr>, ParserConsumed<'a>),
}
/// Parses either a statement or a tail expression.
fn parse_stmt<'a>(parser: &mut Parser<'a, '_>) -> ParseResult<StmtOrTail<'a>> {
let stmt = |s| Ok(StmtOrTail::Stmt(s));
// Try parsing an item as a statement.
if parser.peek::<UseToken>().is_some()
|| parser.peek::<StructToken>().is_some()
|| parser.peek::<EnumToken>().is_some()
|| parser.peek::<FnToken>().is_some()
|| parser.peek::<PubToken>().is_some()
|| parser.peek::<TraitToken>().is_some()
|| parser.peek::<ImplToken>().is_some()
|| parser.peek::<(AbiToken, Ident)>().is_some()
|| parser.peek::<ConstToken>().is_some()
|| parser.peek::<TypeToken>().is_some()
|| parser.peek::<DocComment>().is_some()
|| parser.peek::<HashToken>().is_some()
|| matches!(
parser.peek::<(StorageToken, Delimiter)>(),
Some((_, Delimiter::Brace))
)
|| matches!(
parser.peek::<(ConfigurableToken, Delimiter)>(),
Some((_, Delimiter::Brace))
)
{
return stmt(Statement::Item(parser.parse()?));
}
// Try a `let` statement.
if let Some(item) = parser.guarded_parse::<LetToken, StatementLet>()? {
return stmt(Statement::Let(item));
}
// Try an `expr;` statement.
let expr = parse_statement_expr(parser)?;
if let Some(semicolon_token) = parser.take() {
return stmt(Statement::Expr {
expr,
semicolon_token_opt: Some(semicolon_token),
});
}
// Reached EOF? Then an expression is a statement.
if let Some(consumed) = parser.check_empty() {
return Ok(StmtOrTail::Tail(Box::new(expr), consumed));
}
// For statements like `if`,
// they don't need to be terminated by `;` to be statements.
if expr.is_control_flow() {
return stmt(Statement::Expr {
expr,
semicolon_token_opt: None,
});
}
Err(parser.emit_error(ParseErrorKind::UnexpectedTokenInStatement))
}
#[derive(Clone, Copy, Debug, Default)]
struct ParseExprCtx {
pub parsing_conditional: bool,
pub at_start_of_statement: bool,
}
impl ParseExprCtx {
pub fn not_statement(self) -> ParseExprCtx {
ParseExprCtx {
at_start_of_statement: false,
..self
}
}
}
fn parse_condition(parser: &mut Parser) -> ParseResult<Expr> {
let ctx = ParseExprCtx {
parsing_conditional: true,
at_start_of_statement: false,
};
parse_reassignment(parser, ctx)
}
fn parse_statement_expr(parser: &mut Parser) -> ParseResult<Expr> {
let ctx = ParseExprCtx {
parsing_conditional: false,
at_start_of_statement: true,
};
parse_reassignment(parser, ctx)
}
/// Eats a `ReassignmentOp`, if any, from `parser`.
fn take_reassignment_op(parser: &mut Parser) -> Option<ReassignmentOp> {
let (variant, span) = if let Some(add_eq_token) = parser.take::<AddEqToken>() {
(ReassignmentOpVariant::AddEquals, add_eq_token.span())
} else if let Some(sub_eq_token) = parser.take::<SubEqToken>() {
(ReassignmentOpVariant::SubEquals, sub_eq_token.span())
} else if let Some(mul_eq_token) = parser.take::<StarEqToken>() {
(ReassignmentOpVariant::MulEquals, mul_eq_token.span())
} else if let Some(div_eq_token) = parser.take::<DivEqToken>() {
(ReassignmentOpVariant::DivEquals, div_eq_token.span())
} else if let Some(shl_eq_token) = parser.take::<ShlEqToken>() {
(ReassignmentOpVariant::ShlEquals, shl_eq_token.span())
} else if let Some(shr_eq_token) = parser.take::<ShrEqToken>() {
(ReassignmentOpVariant::ShrEquals, shr_eq_token.span())
} else if let Some(eq_token) = parser.take::<EqToken>() {
(ReassignmentOpVariant::Equals, eq_token.span())
} else {
return None;
};
Some(ReassignmentOp { variant, span })
}
fn parse_reassignment(parser: &mut Parser, ctx: ParseExprCtx) -> ParseResult<Expr> {
let expr = parse_logical_or(parser, ctx)?;
let expr_span = expr.span();
if let Some(reassignment_op) = take_reassignment_op(parser) {
let assignable = match expr.try_into_assignable() {
Ok(assignable) => assignable,
Err(expr) => {
let span = expr.span();
return Err(parser.emit_error_with_span(
ParseErrorKind::UnassignableExpression {
erroneous_expression_kind: expr.friendly_name(),
erroneous_expression_span: span,
},
expr_span,
));
}
};
let expr = Box::new(parse_reassignment(parser, ctx.not_statement())?);
return Ok(Expr::Reassignment {
assignable,
reassignment_op,
expr,
});
}
Ok(expr)
}
fn parse_op_rhs<O: Peek>(
parser: &mut Parser,
ctx: ParseExprCtx,
sub: impl Fn(&mut Parser, ParseExprCtx) -> ParseResult<Expr>,
) -> ParseResult<Option<(O, Box<Expr>)>> {
if let Some(op_token) = parser.take() {
let rhs = Box::new(sub(parser, ctx.not_statement())?);
return Ok(Some((op_token, rhs)));
}
Ok(None)
}
fn parse_binary<O: Peek>(
parser: &mut Parser,
ctx: ParseExprCtx,
sub: impl Fn(&mut Parser, ParseExprCtx) -> ParseResult<Expr>,
combine: impl Fn(Box<Expr>, Box<Expr>, O) -> Expr,
) -> ParseResult<Expr> {
let mut expr = sub(parser, ctx)?;
if expr.is_control_flow() && ctx.at_start_of_statement {
return Ok(expr);
}
while let Some((op_token, rhs)) = parse_op_rhs(parser, ctx, &sub)? {
expr = combine(Box::new(expr), rhs, op_token);
}
Ok(expr)
}
fn parse_logical_or(parser: &mut Parser, ctx: ParseExprCtx) -> ParseResult<Expr> {
let combine = |lhs, rhs, double_pipe_token| Expr::LogicalOr {
lhs,
double_pipe_token,
rhs,
};
parse_binary(parser, ctx, parse_logical_and, combine)
}
fn parse_logical_and(parser: &mut Parser, ctx: ParseExprCtx) -> ParseResult<Expr> {
let combine = |lhs, rhs, double_ampersand_token| Expr::LogicalAnd {
lhs,
double_ampersand_token,
rhs,
};
parse_binary(parser, ctx, parse_comparison, combine)
}
fn parse_comparison(parser: &mut Parser, ctx: ParseExprCtx) -> ParseResult<Expr> {
let expr = parse_bit_or(parser, ctx)?;
let expr = if expr.is_control_flow() && ctx.at_start_of_statement {
expr
} else if let Some((double_eq_token, rhs)) = parse_op_rhs(parser, ctx, parse_bit_or)? {
Expr::Equal {
lhs: Box::new(expr),
double_eq_token,
rhs,
}
} else if let Some((bang_eq_token, rhs)) = parse_op_rhs(parser, ctx, parse_bit_or)? {
Expr::NotEqual {
lhs: Box::new(expr),
bang_eq_token,
rhs,
}
} else if let Some((less_than_token, rhs)) = parse_op_rhs(parser, ctx, parse_bit_or)? {
Expr::LessThan {
lhs: Box::new(expr),
less_than_token,
rhs,
}
} else if let Some((greater_than_token, rhs)) = parse_op_rhs(parser, ctx, parse_bit_or)? {
Expr::GreaterThan {
lhs: Box::new(expr),
greater_than_token,
rhs,
}
} else if let Some((less_than_eq_token, rhs)) = parse_op_rhs(parser, ctx, parse_bit_or)? {
Expr::LessThanEq {
lhs: Box::new(expr),
less_than_eq_token,
rhs,
}
} else if let Some((greater_than_eq_token, rhs)) = parse_op_rhs(parser, ctx, parse_bit_or)? {
Expr::GreaterThanEq {
lhs: Box::new(expr),
greater_than_eq_token,
rhs,
}
} else {
expr
};
Ok(expr)
}
fn parse_bit_or(parser: &mut Parser, ctx: ParseExprCtx) -> ParseResult<Expr> {
let combine = |lhs, rhs, pipe_token| Expr::BitOr {
lhs,
pipe_token,
rhs,
};
parse_binary(parser, ctx, parse_bit_xor, combine)
}
fn parse_bit_xor(parser: &mut Parser, ctx: ParseExprCtx) -> ParseResult<Expr> {
let combine = |lhs, rhs, caret_token| Expr::BitXor {
lhs,
caret_token,
rhs,
};
parse_binary(parser, ctx, parse_bit_and, combine)
}
fn parse_bit_and(parser: &mut Parser, ctx: ParseExprCtx) -> ParseResult<Expr> {
let combine = |lhs, rhs, ampersand_token| Expr::BitAnd {
lhs,
ampersand_token,
rhs,
};
parse_binary(parser, ctx, parse_shift, combine)
}
fn parse_shift(parser: &mut Parser, ctx: ParseExprCtx) -> ParseResult<Expr> {
let mut expr = parse_add(parser, ctx)?;
if expr.is_control_flow() && ctx.at_start_of_statement {
return Ok(expr);
}
loop {
expr = if let Some((shl_token, rhs)) = parse_op_rhs(parser, ctx, parse_add)? {
Expr::Shl {
lhs: Box::new(expr),
shl_token,
rhs,
}
} else if let Some((shr_token, rhs)) = parse_op_rhs(parser, ctx, parse_add)? {
Expr::Shr {
lhs: Box::new(expr),
shr_token,
rhs,
}
} else {
return Ok(expr);
};
}
}
fn parse_add(parser: &mut Parser, ctx: ParseExprCtx) -> ParseResult<Expr> {
let mut expr = parse_mul(parser, ctx)?;
if expr.is_control_flow() && ctx.at_start_of_statement {
return Ok(expr);
}
loop {
expr = if let Some((add_token, rhs)) = parse_op_rhs(parser, ctx, parse_mul)? {
Expr::Add {
lhs: Box::new(expr),
add_token,
rhs,
}
} else if let Some((sub_token, rhs)) = parse_op_rhs(parser, ctx, parse_mul)? {
Expr::Sub {
lhs: Box::new(expr),
sub_token,
rhs,
}
} else {
return Ok(expr);
};
}
}
fn parse_mul(parser: &mut Parser, ctx: ParseExprCtx) -> ParseResult<Expr> {
let mut expr = parse_unary_op(parser, ctx)?;
if expr.is_control_flow() && ctx.at_start_of_statement {
return Ok(expr);
}
loop {
expr = if let Some((double_star_token, rhs)) = parse_op_rhs(parser, ctx, parse_unary_op)? {
Expr::Pow {
lhs: Box::new(expr),
double_star_token,
rhs,
}
} else if let Some((star_token, rhs)) = parse_op_rhs(parser, ctx, parse_unary_op)? {
Expr::Mul {
lhs: Box::new(expr),
star_token,
rhs,
}
} else if let Some((forward_slash_token, rhs)) = parse_op_rhs(parser, ctx, parse_unary_op)?
{
Expr::Div {
lhs: Box::new(expr),
forward_slash_token,
rhs,
}
} else if let Some((percent_token, rhs)) = parse_op_rhs(parser, ctx, parse_unary_op)? {
Expr::Modulo {
lhs: Box::new(expr),
percent_token,
rhs,
}
} else {
return Ok(expr);
};
}
}
fn parse_unary_op(parser: &mut Parser, ctx: ParseExprCtx) -> ParseResult<Expr> {
if let Some((ampersand_token, mut_token, expr)) = parse_referencing(parser, ctx)? {
return Ok(Expr::Ref {
ampersand_token,
mut_token,
expr,
});
}
if let Some((star_token, expr)) = parse_op_rhs(parser, ctx, parse_unary_op)? {
return Ok(Expr::Deref { star_token, expr });
}
if let Some((bang_token, expr)) = parse_op_rhs(parser, ctx, parse_unary_op)? {
return Ok(Expr::Not { bang_token, expr });
}
return parse_projection(parser, ctx);
#[allow(clippy::type_complexity)] // Used just here for getting the three parsed elements.
fn parse_referencing(
parser: &mut Parser,
ctx: ParseExprCtx,
) -> ParseResult<Option<(AmpersandToken, Option<MutToken>, Box<Expr>)>> {
if let Some(ampersand_token) = parser.take() {
let mut_token = parser.take::<MutToken>();
let expr = Box::new(parse_unary_op(parser, ctx.not_statement())?);
return Ok(Some((ampersand_token, mut_token, expr)));
}
Ok(None)
}
}
fn parse_projection(parser: &mut Parser, ctx: ParseExprCtx) -> ParseResult<Expr> {
let mut expr = parse_func_app(parser, ctx)?;
loop {
if let Some(arg) = SquareBrackets::try_parse_all_inner(parser, |mut parser| {
parser.emit_error(ParseErrorKind::UnexpectedTokenAfterArrayIndex)
})? {
let target = Box::new(expr);
expr = Expr::Index { target, arg };
continue;
}
if let Some(dot_token) = parser.take() {
let target = Box::new(expr);
// Try parsing a field access or a method call.
if let Some(path_seg) = parser.guarded_parse::<Ident, PathExprSegment>()? {
if !ctx.parsing_conditional {
if let Some(contract_args) = Braces::try_parse(parser)? {
expr = Expr::MethodCall {
target,
dot_token,
path_seg,
contract_args_opt: Some(contract_args),
args: Parens::parse(parser)?,
};
continue;
}
}
if let Some(args) = Parens::try_parse(parser)? {
expr = Expr::MethodCall {
target,
dot_token,
path_seg,
contract_args_opt: None,
args,
};
continue;
}
// No arguments, so this is a field projection.
ensure_field_projection_no_generics(parser, &path_seg.generics_opt);
expr = Expr::FieldProjection {
target,
dot_token,
name: path_seg.name,
};
continue;
}
// Try parsing a tuple field projection.
if let Some(lit) = parser.take() {
let lit_int = match lit {
Literal::Int(lit_int) => lit_int,
_ => {
let span = lit.span();
return Err(parser
.emit_error_with_span(ParseErrorKind::InvalidLiteralFieldName, span));
}
};
let LitInt {
span,
parsed,
ty_opt,
is_generated_b256: _,
} = lit_int;
if ty_opt.is_some() {
return Err(
parser.emit_error_with_span(ParseErrorKind::IntFieldWithTypeSuffix, span)
);
}
let field = parsed;
let field_span = span;
expr = Expr::TupleFieldProjection {
target,
dot_token,
field,
field_span,
};
continue;
}
// Nothing expected followed. Now we have parsed `expr .`.
// Try to recover as an unknown sort of expression.
let err = parser.emit_error(ParseErrorKind::ExpectedFieldName);
return Ok(Expr::Error([target.span(), dot_token.span()].into(), err));
}
return Ok(expr);
}
}
/// Ensure we don't have `foo.bar::<...>` where `bar` isn't a method call.
fn ensure_field_projection_no_generics(
parser: &mut Parser,
generic_args: &Option<(DoubleColonToken, GenericArgs)>,
) {
if let Some((dct, generic_args)) = generic_args {
let span = Span::join(dct.span(), &generic_args.span());
parser.emit_error_with_span(ParseErrorKind::FieldProjectionWithGenericArgs, span);
}
}
fn parse_func_app(parser: &mut Parser, ctx: ParseExprCtx) -> ParseResult<Expr> {
let mut expr = parse_atom(parser, ctx)?;
if expr.is_control_flow() && ctx.at_start_of_statement {
return Ok(expr);
}
while let Some(args) = Parens::try_parse(parser)? {
let func = Box::new(expr);
expr = Expr::FuncApp { func, args };
}
Ok(expr)
}
fn parse_atom(parser: &mut Parser, ctx: ParseExprCtx) -> ParseResult<Expr> {
if let Some(code_block_inner) = Braces::try_parse(parser)? {
return Ok(Expr::Block(code_block_inner));
}
if let Some(array_inner) = SquareBrackets::try_parse(parser)? {
return Ok(Expr::Array(array_inner));
}
if let Some((mut parser, span)) = parser.enter_delimited(Delimiter::Parenthesis) {
if let Some(_consumed) = parser.check_empty() {
return Ok(Expr::Tuple(Parens::new(ExprTupleDescriptor::Nil, span)));
}
let head = parser.parse()?;
if let Some(comma_token) = parser.take() {
let (tail, _consumed) = parser.parse_to_end()?;
let tuple = ExprTupleDescriptor::Cons {
head,
comma_token,
tail,
};
return Ok(Expr::Tuple(Parens::new(tuple, span)));
}
if let Some(_consumed) = parser.check_empty() {
return Ok(Expr::Parens(Parens::new(head, span)));
}
return Err(
parser.emit_error(ParseErrorKind::ExpectedCommaOrCloseParenInTupleOrParenExpression)
);
}
let lit_bool = |span, kind| Ok(Expr::Literal(Literal::Bool(LitBool { span, kind })));
if let Some(ident) = parser.take::<TrueToken>() {
return lit_bool(ident.span(), LitBoolType::True);
}
if let Some(ident) = parser.take::<FalseToken>() {
return lit_bool(ident.span(), LitBoolType::False);
}
if let Some(asm_block) = parser.guarded_parse::<AsmToken, _>()? {
return Ok(Expr::Asm(asm_block));
}
if let Some(break_token) = parser.take() {
return Ok(Expr::Break { break_token });
}
if let Some(continue_token) = parser.take() {
return Ok(Expr::Continue { continue_token });
}
if let Some(abi_token) = parser.take() {
let args = parser.parse()?;
return Ok(Expr::AbiCast { abi_token, args });
}
if let Some(return_token) = parser.take() {
if parser.is_empty()
|| parser.peek::<CommaToken>().is_some()
|| parser.peek::<SemicolonToken>().is_some()
{
return Ok(Expr::Return {
return_token,
expr_opt: None,
});
}
let expr = parser.parse()?;
return Ok(Expr::Return {
return_token,
expr_opt: Some(expr),
});
}
if let Some(panic_token) = parser.take() {
if parser.is_empty()
|| parser.peek::<CommaToken>().is_some()
|| parser.peek::<SemicolonToken>().is_some()
{
return Ok(Expr::Panic {
panic_token,
expr_opt: None,
});
}
let expr = parser.parse()?;
return Ok(Expr::Panic {
panic_token,
expr_opt: Some(expr),
});
}
if let Some(if_expr) = parser.guarded_parse::<IfToken, _>()? {
return Ok(Expr::If(if_expr));
}
if let Some(match_token) = parser.take() {
let condition = Box::new(parse_condition(parser)?);
let branches = parser.parse()?;
return Ok(Expr::Match {
match_token,
value: condition,
branches,
});
}
if let Some(while_token) = parser.take() {
let condition = Box::new(parse_condition(parser)?);
let block = parser.parse()?;
return Ok(Expr::While {
while_token,
condition,
block,
});
}
if let Some(for_token) = parser.take() {
let value_pattern = parser.parse()?;
let in_token = parser.parse()?;
let iterator = Box::new(parse_condition(parser)?);
let block = parser.parse()?;
return Ok(Expr::For {
for_token,
value_pattern,
in_token,
iterator,
block,
});
}
if parser.peek::<OpenAngleBracketToken>().is_some()
|| parser.peek::<DoubleColonToken>().is_some()
|| parser.peek::<Ident>().is_some()
{
let path: PathExpr = parser.parse()?;
if path.incomplete_suffix {
// We tried parsing it as a path but we didn't succeed so we try to recover this
// as an unknown sort of expression. This happens, for instance, when the user
// types `foo::`
return Ok(Expr::Error(
[path.span()].into(),
parser.emit_error(ParseErrorKind::ExpectedPathType),
));
}
if !ctx.parsing_conditional {
if let Some(fields) = Braces::try_parse(parser)? {
return Ok(Expr::Struct { path, fields });
}
};
return Ok(Expr::Path(path));
}
if let Some(literal) = parser.take() {
return Ok(Expr::Literal(literal));
}
Err(parser.emit_error(ParseErrorKind::ExpectedExpression))
}
impl Parse for ExprStructField {
fn parse(parser: &mut Parser) -> ParseResult<ExprStructField> {
let field_name = parser.parse()?;
let expr_opt = match parser.take() {
Some(colon_token) => {
let expr = parser.parse()?;
Some((colon_token, expr))
}
None => None,
};
Ok(ExprStructField {
field_name,
expr_opt,
})
}
}
impl ParseToEnd for ExprArrayDescriptor {
fn parse_to_end<'a, 'e>(
mut parser: Parser<'a, '_>,
) -> ParseResult<(ExprArrayDescriptor, ParserConsumed<'a>)> {
if let Some(consumed) = parser.check_empty() {
let punctuated = Punctuated::empty();
let descriptor = ExprArrayDescriptor::Sequence(punctuated);
return Ok((descriptor, consumed));
}
let value = parser.parse()?;
if let Some(semicolon_token) = parser.take() {
let length = parser.parse()?;
let consumed = match parser.check_empty() {
Some(consumed) => consumed,
None => {
return Err(parser.emit_error(ParseErrorKind::UnexpectedTokenAfterArrayLength));
}
};
let descriptor = ExprArrayDescriptor::Repeat {
value: Box::new(value),
semicolon_token,
length,
};
return Ok((descriptor, consumed));
}
if let Some(comma_token) = parser.take() {
let (mut punctuated, consumed): (Punctuated<_, _>, _) = parser.parse_to_end()?;
punctuated
.value_separator_pairs
.insert(0, (value, comma_token));
let descriptor = ExprArrayDescriptor::Sequence(punctuated);
return Ok((descriptor, consumed));
}
if let Some(consumed) = parser.check_empty() {
let punctuated = Punctuated::single(value);
let descriptor = ExprArrayDescriptor::Sequence(punctuated);
return Ok((descriptor, consumed));
}
Err(parser.emit_error(ParseErrorKind::ExpectedCommaSemicolonOrCloseBracketInArray))
}
}
impl Parse for MatchBranch {
fn parse(parser: &mut Parser) -> ParseResult<MatchBranch> {
let pattern = parser.parse()?;
let fat_right_arrow_token = parser.parse()?;
let kind = parser.parse()?;
Ok(MatchBranch {
pattern,
fat_right_arrow_token,
kind,
})
}
}
impl Parse for MatchBranchKind {
fn parse(parser: &mut Parser) -> ParseResult<MatchBranchKind> {
if let Some(block) = Braces::try_parse(parser)? {
return Ok(MatchBranchKind::Block {
block,
comma_token_opt: parser.take(),
});
}
let expr = parser.parse()?;
let comma_token = parser.parse()?;
Ok(MatchBranchKind::Expr { expr, comma_token })
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-parse/src/ty/mod.rs | sway-parse/src/ty/mod.rs | use crate::{Parse, ParseBracket, ParseResult, ParseToEnd, Parser, ParserConsumed};
use sway_ast::brackets::{Parens, SquareBrackets};
use sway_ast::keywords::{DoubleColonToken, OpenAngleBracketToken, PtrToken, SliceToken, StrToken};
use sway_ast::ty::{Ty, TyArrayDescriptor, TyTupleDescriptor};
use sway_ast::{Expr, Literal};
use sway_error::parser_error::ParseErrorKind;
use sway_types::{ast::Delimiter, Ident};
impl Parse for Ty {
fn parse(parser: &mut Parser) -> ParseResult<Ty> {
// parse parens carefully, such that only patterns of (ty) are parsed as ty,
// and patterns of (ty,) are parsed as one-arity tuples with one element ty
if let Some((mut parser, span)) = parser.enter_delimited(Delimiter::Parenthesis) {
if let Some(_consumed) = parser.check_empty() {
return Ok(Ty::Tuple(Parens::new(TyTupleDescriptor::Nil, span)));
}
let head = parser.parse()?;
if let Some(comma_token) = parser.take() {
let (tail, _consumed) = parser.parse_to_end()?;
let tuple = TyTupleDescriptor::Cons {
head,
comma_token,
tail,
};
return Ok(Ty::Tuple(Parens::new(tuple, span)));
}
if parser.check_empty().is_some() {
return Ok(*head);
}
return Err(parser
.emit_error(ParseErrorKind::ExpectedCommaOrCloseParenInTupleOrParenExpression));
}
if let Some((mut inner_parser, span)) = parser.enter_delimited(Delimiter::Bracket) {
// array like [type; len]
if let Ok((array, _)) = inner_parser.try_parse_to_end::<TyArrayDescriptor>(false) {
return Ok(Ty::Array(SquareBrackets { inner: array, span }));
}
// slice like [type]
if let Ok(Some((ty, _))) = inner_parser.try_parse_and_check_empty::<Ty>(false) {
return Ok(Ty::Slice {
slice_token: None,
ty: SquareBrackets {
inner: Box::new(ty),
span,
},
});
}
}
// string array like str[1] or str[N]
// or string slice like str
if let Some(str_token) = parser.take::<StrToken>() {
let length = SquareBrackets::try_parse_all_inner(parser, |mut parser| {
parser.emit_error(ParseErrorKind::UnexpectedTokenAfterStrLength)
})?;
let t = match length {
Some(length) => Ty::StringArray { str_token, length },
None => Ty::StringSlice(str_token),
};
return Ok(t);
}
if let Some(underscore_token) = parser.take() {
return Ok(Ty::Infer { underscore_token });
}
if let Some(ptr_token) = parser.take::<PtrToken>() {
let ty = SquareBrackets::parse_all_inner(parser, |mut parser| {
parser.emit_error(ParseErrorKind::UnexpectedTokenAfterPtrType)
})?;
return Ok(Ty::Ptr { ptr_token, ty });
}
// slice like __slice[type]
// TODO: deprecate this syntax (see https://github.com/FuelLabs/sway/issues/5110)
if let Some(slice_token) = parser.take::<SliceToken>() {
let ty = SquareBrackets::<Box<Ty>>::parse_all_inner(parser, |mut parser| {
parser.emit_error(ParseErrorKind::UnexpectedTokenAfterSliceType)
})?;
return Ok(Ty::Slice {
slice_token: Some(slice_token),
ty,
});
}
if let Some(ampersand_token) = parser.take() {
let mut_token = parser.take();
let ty = Box::new(parser.parse()?);
return Ok(Ty::Ref {
ampersand_token,
mut_token,
ty,
});
}
if let Some(bang_token) = parser.take() {
return Ok(Ty::Never { bang_token });
}
if parser.peek::<OpenAngleBracketToken>().is_some()
|| parser.peek::<DoubleColonToken>().is_some()
|| parser.peek::<Ident>().is_some()
{
let path_type = parser.parse()?;
return Ok(Ty::Path(path_type));
}
if let Ok(literal) = parser.parse::<Literal>() {
return Ok(Ty::Expr(Box::new(Expr::Literal(literal))));
}
Err(parser.emit_error(ParseErrorKind::ExpectedType))
}
}
impl ParseToEnd for TyArrayDescriptor {
fn parse_to_end<'a, 'e>(
mut parser: Parser<'a, '_>,
) -> ParseResult<(TyArrayDescriptor, ParserConsumed<'a>)> {
let ty = parser.parse()?;
let semicolon_token = parser.parse()?;
let length = parser.parse()?;
let consumed = match parser.check_empty() {
Some(consumed) => consumed,
None => {
return Err(parser.emit_error(ParseErrorKind::UnexpectedTokenAfterArrayTypeLength))
}
};
let descriptor = TyArrayDescriptor {
ty,
semicolon_token,
length,
};
Ok((descriptor, consumed))
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::test_utils::parse;
use assert_matches::*;
#[test]
fn parse_ptr() {
let item = parse::<Ty>(
r#"
__ptr[T]
"#,
);
assert_matches!(item, Ty::Ptr { .. });
}
#[test]
fn parse_array() {
let item = parse::<Ty>("[T; 1]");
assert_matches!(item, Ty::Array { .. });
}
#[test]
fn parse_slice() {
// deprecated syntax
let item = parse::<Ty>("__slice[T]");
assert_matches!(item, Ty::Slice { .. });
// " new" syntax
let item = parse::<Ty>("[T]");
assert_matches!(item, Ty::Slice { .. });
let item = parse::<Ty>("&[T]");
assert_matches!(item, Ty::Ref { ty, .. } if matches!(&*ty, Ty::Slice { .. }));
}
#[test]
fn parse_ref() {
let item = parse::<Ty>(
r#"
&T
"#,
);
assert_matches!(
item,
Ty::Ref {
mut_token: None,
..
}
);
}
#[test]
fn parse_mut_ref() {
let item = parse::<Ty>(
r#"
&mut T
"#,
);
assert_matches!(
item,
Ty::Ref {
mut_token: Some(_),
..
}
);
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-parse/src/item/item_abi.rs | sway-parse/src/item/item_abi.rs | use crate::{Parse, ParseBracket, ParseResult, Parser};
use sway_ast::attribute::Annotated;
use sway_ast::{Braces, ItemAbi, ItemFn, ItemTraitItem};
impl Parse for ItemAbi {
fn parse(parser: &mut Parser) -> ParseResult<ItemAbi> {
let abi_token = parser.parse()?;
let name = parser.parse()?;
let super_traits = match parser.take() {
Some(colon_token) => {
let traits = parser.parse()?;
Some((colon_token, traits))
}
None => None,
};
let abi_items: Braces<Vec<Annotated<ItemTraitItem>>> = parser.parse()?;
for annotated in abi_items.get().iter() {
#[allow(irrefutable_let_patterns)]
if let ItemTraitItem::Fn(fn_signature, _) = &annotated.value {
parser.ban_visibility_qualifier(&fn_signature.visibility)?;
}
}
let abi_defs_opt: Option<Braces<Vec<Annotated<ItemFn>>>> = Braces::try_parse(parser)?;
if let Some(abi_defs) = &abi_defs_opt {
for item_fn in abi_defs.get().iter() {
parser.ban_visibility_qualifier(&item_fn.value.fn_signature.visibility)?;
}
}
Ok(ItemAbi {
abi_token,
name,
super_traits,
abi_items,
abi_defs_opt,
})
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-parse/src/item/item_storage.rs | sway-parse/src/item/item_storage.rs | use crate::{Parse, ParseResult, Parser};
use sway_ast::{
attribute::Annotated,
keywords::{ColonToken, InToken},
Braces, CommaToken, Expr, ItemStorage, Punctuated, StorageEntry, StorageField,
};
use sway_types::BaseIdent;
impl Parse for StorageEntry {
fn parse(parser: &mut Parser) -> ParseResult<StorageEntry> {
let name: BaseIdent = parser.parse()?;
let mut field = None;
let mut namespace = None;
if parser.peek::<ColonToken>().is_some() || parser.peek::<InToken>().is_some() {
let mut f: StorageField = parser.parse()?;
f.name = name.clone();
field = Some(f);
} else {
let n: Braces<Punctuated<Annotated<Box<StorageEntry>>, CommaToken>> = parser.parse()?;
namespace = Some(n);
}
Ok(StorageEntry {
name,
namespace,
field,
})
}
}
impl Parse for StorageField {
fn parse(parser: &mut Parser) -> ParseResult<StorageField> {
let name = BaseIdent::dummy(); // Name will be overridden in StorageEntry parse.
let in_token: Option<InToken> = parser.take();
let mut key_opt: Option<Expr> = None;
if in_token.is_some() {
key_opt = Some(parser.parse()?);
}
let colon_token = parser.parse()?;
let ty = parser.parse()?;
let eq_token = parser.parse()?;
let initializer = parser.parse()?;
Ok(StorageField {
name,
in_token,
key_expr: key_opt,
colon_token,
ty,
eq_token,
initializer,
})
}
}
impl Parse for ItemStorage {
fn parse(parser: &mut Parser) -> ParseResult<ItemStorage> {
let storage_token = parser.parse()?;
let entries = parser.parse()?;
Ok(ItemStorage {
storage_token,
entries,
})
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-parse/src/item/item_enum.rs | sway-parse/src/item/item_enum.rs | use crate::{Parse, ParseResult, Parser};
use sway_ast::keywords::{OpenAngleBracketToken, WhereToken};
use sway_ast::ItemEnum;
impl Parse for ItemEnum {
fn parse(parser: &mut Parser) -> ParseResult<ItemEnum> {
Ok(ItemEnum {
visibility: parser.take(),
enum_token: parser.parse()?,
name: parser.parse()?,
generics: parser.guarded_parse::<OpenAngleBracketToken, _>()?,
where_clause_opt: parser.guarded_parse::<WhereToken, _>()?,
fields: parser.parse()?,
})
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-parse/src/item/item_type_alias.rs | sway-parse/src/item/item_type_alias.rs | use crate::{Parse, ParseResult, Parser};
use sway_ast::ItemTypeAlias;
impl Parse for ItemTypeAlias {
fn parse(parser: &mut Parser) -> ParseResult<ItemTypeAlias> {
let visibility = parser.take();
let type_token = parser.parse()?;
let name = parser.parse()?;
let eq_token = parser.parse()?;
let ty = parser.parse()?;
let semicolon_token = parser.parse()?;
Ok(ItemTypeAlias {
visibility,
name,
type_token,
eq_token,
ty,
semicolon_token,
})
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-parse/src/item/item_const.rs | sway-parse/src/item/item_const.rs | use crate::{Parse, ParseResult, Parser};
use sway_ast::ItemConst;
impl Parse for ItemConst {
fn parse(parser: &mut Parser) -> ParseResult<ItemConst> {
let pub_token = parser.take();
let const_token = parser.parse()?;
let name = parser.parse()?;
let ty_opt = match parser.take() {
Some(colon_token) => {
let ty = parser.parse()?;
Some((colon_token, ty))
}
None => None,
};
let eq_token_opt = parser.take();
let expr_opt = match &eq_token_opt {
Some(_eq) => Some(parser.parse()?),
None => None,
};
// Use the default here since the braces parsing is expecting
// a semicolon, that allows us to re-use the same parsing code
// between associated consts and module-level consts.
let semicolon_token = parser.peek().unwrap_or_default();
Ok(ItemConst {
pub_token,
const_token,
name,
ty_opt,
eq_token_opt,
expr_opt,
semicolon_token,
})
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-parse/src/item/item_configurable.rs | sway-parse/src/item/item_configurable.rs | use crate::{Parse, ParseResult, Parser};
use sway_ast::{ConfigurableField, ItemConfigurable};
impl Parse for ConfigurableField {
fn parse(parser: &mut Parser) -> ParseResult<ConfigurableField> {
let name = parser.parse()?;
let colon_token = parser.parse()?;
let ty = parser.parse()?;
let eq_token = parser.parse()?;
let initializer = parser.parse()?;
Ok(ConfigurableField {
name,
colon_token,
ty,
eq_token,
initializer,
})
}
}
impl Parse for ItemConfigurable {
fn parse(parser: &mut Parser) -> ParseResult<ItemConfigurable> {
let configurable_token = parser.parse()?;
let fields = parser.parse()?;
Ok(ItemConfigurable {
configurable_token,
fields,
})
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-parse/src/item/item_struct.rs | sway-parse/src/item/item_struct.rs | use crate::{Parse, ParseResult, Parser};
use sway_ast::keywords::{ClassToken, Keyword, OpenAngleBracketToken, StructToken, WhereToken};
use sway_ast::ItemStruct;
use sway_error::parser_error::ParseErrorKind;
use sway_types::Spanned;
impl Parse for ItemStruct {
fn parse(parser: &mut Parser) -> ParseResult<ItemStruct> {
let visibility = parser.take();
// Parse `struct`, or recover on `class` as if `struct` was written.
let struct_token = if let Some(ct) = parser.take::<ClassToken>() {
parser.emit_error(ParseErrorKind::UnexpectedClass);
StructToken::new(ct.span())
} else {
parser.parse()?
};
Ok(ItemStruct {
visibility,
struct_token,
name: parser.parse()?,
generics: parser.guarded_parse::<OpenAngleBracketToken, _>()?,
where_clause_opt: parser.guarded_parse::<WhereToken, _>()?,
fields: parser.parse()?,
})
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-parse/src/item/mod.rs | sway-parse/src/item/mod.rs | use crate::{Parse, ParseResult, ParseToEnd, Parser, ParserConsumed};
use sway_ast::keywords::{
AbiToken, ClassToken, ColonToken, ConfigurableToken, ConstToken, EnumToken, FnToken, ImplToken,
ModToken, MutToken, OpenAngleBracketToken, RefToken, SelfToken, SemicolonToken, StorageToken,
StructToken, TraitToken, TypeToken, UseToken, WhereToken,
};
use sway_ast::{
FnArg, FnArgs, FnSignature, ItemConst, ItemEnum, ItemFn, ItemKind, ItemStruct, ItemTrait,
ItemTypeAlias, ItemUse, Submodule, TraitType, TypeField,
};
use sway_error::parser_error::ParseErrorKind;
use sway_types::ast::Delimiter;
use sway_types::{Ident, Span, Spanned};
mod item_abi;
mod item_configurable;
mod item_const;
mod item_enum;
mod item_fn;
mod item_impl;
mod item_storage;
mod item_struct;
mod item_trait;
mod item_type_alias;
mod item_use;
impl Parse for ItemKind {
fn parse(parser: &mut Parser) -> ParseResult<ItemKind> {
// FIXME(Centril): Visibility should be moved out of `ItemKind` variants,
// introducing a struct `Item` that holds the visibility and the kind,
// and then validate in an "AST validation" step which kinds that should have `pub`s.
let mut visibility = parser.take();
let kind = if let Some(mut item) = parser.guarded_parse::<ModToken, Submodule>()? {
item.visibility = visibility.take();
ItemKind::Submodule(item)
} else if let Some(mut item) = parser.guarded_parse::<UseToken, ItemUse>()? {
item.visibility = visibility.take();
ItemKind::Use(item)
} else if let Some(mut item) = parser.guarded_parse::<ClassToken, ItemStruct>()? {
item.visibility = visibility.take();
ItemKind::Struct(item)
} else if let Some(mut item) = parser.guarded_parse::<StructToken, ItemStruct>()? {
item.visibility = visibility.take();
ItemKind::Struct(item)
} else if let Some(mut item) = parser.guarded_parse::<EnumToken, ItemEnum>()? {
item.visibility = visibility.take();
ItemKind::Enum(item)
} else if let Some(mut item) = parser.guarded_parse::<FnToken, ItemFn>()? {
item.fn_signature.visibility = visibility.take();
ItemKind::Fn(item)
} else if let Some(mut item) = parser.guarded_parse::<TraitToken, ItemTrait>()? {
item.visibility = visibility.take();
ItemKind::Trait(item)
} else if let Some(item) = parser.guarded_parse::<ImplToken, _>()? {
ItemKind::Impl(item)
} else if let Some(item) = parser.guarded_parse::<AbiToken, _>()? {
ItemKind::Abi(item)
} else if let Some(mut item) = parser.guarded_parse::<ConstToken, ItemConst>()? {
item.pub_token = visibility.take();
parser.take::<SemicolonToken>().ok_or_else(|| {
parser.emit_error(ParseErrorKind::ExpectedPunct {
kinds: vec![sway_types::ast::PunctKind::Semicolon],
})
})?;
ItemKind::Const(item)
} else if let Some(item) = parser.guarded_parse::<StorageToken, _>()? {
ItemKind::Storage(item)
} else if let Some(item) = parser.guarded_parse::<ConfigurableToken, _>()? {
ItemKind::Configurable(item)
} else if let Some(mut item) = parser.guarded_parse::<TypeToken, ItemTypeAlias>()? {
item.visibility = visibility.take();
ItemKind::TypeAlias(item)
} else {
return Err(parser.emit_error(ParseErrorKind::ExpectedAnItem));
};
// Ban visibility qualifiers that haven't been consumed, but do so with recovery.
let _ = parser.ban_visibility_qualifier(&visibility);
Ok(kind)
}
fn error(
spans: Box<[sway_types::Span]>,
error: sway_error::handler::ErrorEmitted,
) -> Option<Self>
where
Self: Sized,
{
Some(ItemKind::Error(spans, error))
}
}
impl Parse for TypeField {
fn parse(parser: &mut Parser) -> ParseResult<TypeField> {
let visibility = parser.take();
let name: Ident = parser.parse()?;
let name_span = name.span();
// Check for the common, valid case `Variant: Type` first.
if parser.peek::<ColonToken>().is_some() {
let colon_token = parser.parse()?;
let ty = parser.parse()?;
return Ok(TypeField {
visibility,
name,
colon_token,
ty,
});
}
// --- Colon was not found after name, proceed with error handling ---
// Case 1: Check for invalid struct-like variant `Variant { ... }`
if let Some((_inner_parser, brace_span)) = parser.enter_delimited(Delimiter::Brace) {
let error_span = Span::join(name_span, &brace_span);
return Err(parser.emit_error_with_span(
ParseErrorKind::MissingColonInEnumTypeField {
variant_name: name,
tuple_contents: None, // Not a tuple issue
},
error_span,
));
}
// Case 2: Check for invalid tuple-like variant `Variant ( ... )` (missing colon)
if let Some((inner_parser, paren_span)) = parser.enter_delimited(Delimiter::Parenthesis) {
let tuple_contents_span = inner_parser.full_span().clone();
let error_span = Span::join(name_span.clone(), &paren_span);
return Err(parser.emit_error_with_span(
ParseErrorKind::MissingColonInEnumTypeField {
variant_name: name,
tuple_contents: Some(tuple_contents_span),
},
error_span,
));
}
// Case 3: Check for unit-like variant `Variant,` or `Variant` (at end)
if parser.is_empty() || parser.peek::<sway_ast::CommaToken>().is_some() {
return Err(parser.emit_error_with_span(
ParseErrorKind::MissingColonInEnumTypeField {
variant_name: name,
tuple_contents: Some(Span::dummy()), // Indicate unit-like
},
name_span,
));
}
// Case 4: Something else follows where a colon was expected
Err(parser.emit_error_with_span(
ParseErrorKind::MissingColonInEnumTypeField {
variant_name: name,
tuple_contents: None,
},
name_span,
))
}
}
impl ParseToEnd for FnArgs {
fn parse_to_end<'a, 'e>(
mut parser: Parser<'a, '_>,
) -> ParseResult<(FnArgs, ParserConsumed<'a>)> {
let mut ref_self: Option<RefToken> = None;
let mut mutable_self: Option<MutToken> = None;
if parser.peek::<(MutToken, SelfToken)>().is_some()
|| parser.peek::<(RefToken, MutToken, SelfToken)>().is_some()
{
ref_self = parser.take();
mutable_self = parser.take();
}
match parser.take() {
Some(self_token) => {
match parser.take() {
Some(comma_token) => {
let (args, consumed) = parser.parse_to_end()?;
let fn_args = FnArgs::NonStatic {
self_token,
ref_self,
mutable_self,
args_opt: Some((comma_token, args)),
};
Ok((fn_args, consumed))
}
None => {
let fn_args = FnArgs::NonStatic {
self_token,
ref_self,
mutable_self,
args_opt: None,
};
match parser.check_empty() {
Some(consumed) => Ok((fn_args, consumed)),
None => Err(parser
.emit_error(ParseErrorKind::ExpectedCommaOrCloseParenInFnArgs)),
}
}
}
}
None => {
let (args, consumed) = parser.parse_to_end()?;
let fn_args = FnArgs::Static(args);
Ok((fn_args, consumed))
}
}
}
}
impl Parse for FnArg {
fn parse(parser: &mut Parser) -> ParseResult<FnArg> {
Ok(FnArg {
pattern: parser.parse()?,
colon_token: parser.parse()?,
ty: parser.parse()?,
})
}
}
impl Parse for FnSignature {
fn parse(parser: &mut Parser) -> ParseResult<FnSignature> {
Ok(FnSignature {
visibility: parser.take(),
fn_token: parser.parse()?,
name: parser.parse()?,
generics: parser.guarded_parse::<OpenAngleBracketToken, _>()?,
arguments: parser.parse()?,
return_type_opt: match parser.take() {
Some(right_arrow_token) => {
let ty = parser.parse()?;
Some((right_arrow_token, ty))
}
None => None,
},
where_clause_opt: parser.guarded_parse::<WhereToken, _>()?,
})
}
}
impl Parse for TraitType {
fn parse(parser: &mut Parser) -> ParseResult<TraitType> {
let type_token = parser.parse()?;
let name = parser.parse()?;
let eq_token_opt = parser.take();
let ty_opt = match &eq_token_opt {
Some(_eq) => Some(parser.parse()?),
None => None,
};
let semicolon_token = parser.peek().unwrap_or_default();
Ok(TraitType {
type_token,
name,
eq_token_opt,
ty_opt,
semicolon_token,
})
}
}
// -------------------------------------------------------------------------------------------------
#[cfg(test)]
mod tests {
use super::*;
use crate::test_utils::parse;
use sway_ast::{AttributeDecl, Item, ItemTraitItem, Statement};
// Attribute name and its list of parameters
type ParameterizedAttr<'a> = (&'a str, Option<Vec<&'a str>>);
fn attributes(attributes: &[AttributeDecl]) -> Vec<Vec<ParameterizedAttr>> {
attributes
.iter()
.map(|attr_decl| {
attr_decl
.attribute
.get()
.into_iter()
.map(|att| {
(
att.name.as_str(),
att.args.as_ref().map(|arg| {
arg.get().into_iter().map(|a| a.name.as_str()).collect()
}),
)
})
.collect()
})
.collect()
}
#[test]
fn parse_doc_comment() {
let item = parse::<Item>(
r#"
// I will be ignored.
//! This is a misplaced inner doc comment.
/// This is an outer doc comment.
//! This is a misplaced inner doc comment.
// I will be ignored.
/// This is an outer doc comment.
// I will be ignored.
fn f() -> bool {
false
}
"#,
);
assert!(matches!(item.value, ItemKind::Fn(_)));
assert_eq!(
attributes(&item.attributes),
vec![
[(
"doc-comment",
Some(vec![" This is a misplaced inner doc comment."])
)],
[("doc-comment", Some(vec![" This is an outer doc comment."]))],
[(
"doc-comment",
Some(vec![" This is a misplaced inner doc comment."])
)],
[("doc-comment", Some(vec![" This is an outer doc comment."]))],
]
);
}
#[test]
fn parse_doc_comment_struct() {
let item = parse::<Item>(
r#"
// I will be ignored.
//! This is a misplaced inner doc comment.
/// This is an outer doc comment.
//! This is a misplaced inner doc comment.
// I will be ignored.
/// This is an outer doc comment.
// I will be ignored.
struct MyStruct {
// I will be ignored.
//! This is a misplaced inner doc comment.
/// This is an outer doc comment.
//! This is a misplaced inner doc comment.
// I will be ignored.
/// This is an outer doc comment.
// I will be ignored.
a: bool,
}
"#,
);
/* struct annotations */
assert!(matches!(item.value, ItemKind::Struct(_)));
assert_eq!(
attributes(&item.attributes),
vec![
[(
"doc-comment",
Some(vec![" This is a misplaced inner doc comment."])
)],
[("doc-comment", Some(vec![" This is an outer doc comment."]))],
[(
"doc-comment",
Some(vec![" This is a misplaced inner doc comment."])
)],
[("doc-comment", Some(vec![" This is an outer doc comment."]))],
]
);
/* struct field annotations */
let item = match item.value {
ItemKind::Struct(item) => item.fields.inner.into_iter().next().unwrap(),
_ => unreachable!(),
};
assert_eq!(
attributes(&item.attributes),
vec![
[(
"doc-comment",
Some(vec![" This is a misplaced inner doc comment."])
)],
[("doc-comment", Some(vec![" This is an outer doc comment."]))],
[(
"doc-comment",
Some(vec![" This is a misplaced inner doc comment."])
)],
[("doc-comment", Some(vec![" This is an outer doc comment."]))],
]
);
}
#[test]
fn parse_attributes_none() {
let item = parse::<Item>(
r#"
fn f() -> bool {
false
}
"#,
);
assert!(matches!(item.value, ItemKind::Fn(_)));
assert!(item.attributes.is_empty());
}
#[test]
fn parse_attributes_fn_basic() {
let item = parse::<Item>(
r#"
#[foo]
fn f() -> bool {
false
}
"#,
);
assert!(matches!(item.value, ItemKind::Fn(_)));
assert_eq!(attributes(&item.attributes), vec![[("foo", None)]]);
}
#[test]
fn parse_attributes_fn_one_arg_value() {
let item = parse::<Item>(
r#"
#[cfg(target = "evm")]
fn f() -> bool {
false
}
"#,
);
assert!(matches!(item.value, ItemKind::Fn(_)));
assert_eq!(
attributes(&item.attributes),
vec![[("cfg", Some(vec!["target"]))]]
);
}
#[test]
fn parse_attributes_fn_two_arg_values() {
let item = parse::<Item>(
r#"
#[cfg(target = "evm", feature = "test")]
fn f() -> bool {
false
}
"#,
);
assert!(matches!(item.value, ItemKind::Fn(_)));
assert_eq!(
attributes(&item.attributes),
vec![[("cfg", Some(vec!["target", "feature"]))]]
);
}
#[test]
fn parse_attributes_fn_two_basic() {
let item = parse::<Item>(
r#"
#[foo]
#[bar]
fn f() -> bool {
false
}
"#,
);
assert!(matches!(item.value, ItemKind::Fn(_)));
assert_eq!(
attributes(&item.attributes),
vec![[("foo", None)], [("bar", None)]]
);
}
#[test]
fn parse_attributes_fn_one_arg() {
let item = parse::<Item>(
r#"
#[foo(one)]
fn f() -> bool {
false
}
"#,
);
assert!(matches!(item.value, ItemKind::Fn(_)));
assert_eq!(
attributes(&item.attributes),
vec![[("foo", Some(vec!["one"]))]]
);
}
#[test]
fn parse_attributes_fn_empty_parens() {
let item = parse::<Item>(
r#"
#[foo()]
fn f() -> bool {
false
}
"#,
);
assert!(matches!(item.value, ItemKind::Fn(_)));
assert_eq!(attributes(&item.attributes), vec![[("foo", Some(vec![]))]]);
}
#[test]
fn parse_attributes_fn_zero_and_one_arg() {
let item = parse::<Item>(
r#"
#[bar]
#[foo(one)]
fn f() -> bool {
false
}
"#,
);
assert!(matches!(item.value, ItemKind::Fn(_)));
assert_eq!(
attributes(&item.attributes),
vec![[("bar", None)], [("foo", Some(vec!["one"]))]]
);
}
#[test]
fn parse_attributes_fn_one_and_zero_arg() {
let item = parse::<Item>(
r#"
#[foo(one)]
#[bar]
fn f() -> bool {
false
}
"#,
);
assert!(matches!(item.value, ItemKind::Fn(_)));
assert_eq!(
attributes(&item.attributes),
vec![[("foo", Some(vec!["one"]))], [("bar", None)]]
);
}
#[test]
fn parse_attributes_fn_two_args() {
let item = parse::<Item>(
r#"
#[foo(one, two)]
fn f() -> bool {
false
}
"#,
);
assert!(matches!(item.value, ItemKind::Fn(_)));
assert_eq!(
attributes(&item.attributes),
vec![[("foo", Some(vec!["one", "two"]))]]
);
}
#[test]
fn parse_attributes_fn_zero_one_and_three_args() {
let item = parse::<Item>(
r#"
#[bar]
#[foo(one)]
#[baz(two,three,four)]
fn f() -> bool {
false
}
"#,
);
assert!(matches!(item.value, ItemKind::Fn(_)));
assert_eq!(
attributes(&item.attributes),
vec![
[("bar", None)],
[("foo", Some(vec!["one"]))],
[("baz", Some(vec!["two", "three", "four"]))]
]
);
}
#[test]
fn parse_attributes_fn_zero_one_and_three_args_in_one_attribute_decl() {
let item = parse::<Item>(
r#"
#[bar, foo(one), baz(two,three,four)]
fn f() -> bool {
false
}
"#,
);
assert!(matches!(item.value, ItemKind::Fn(_)));
assert_eq!(
attributes(&item.attributes),
vec![[
("bar", None),
("foo", Some(vec!["one"])),
("baz", Some(vec!["two", "three", "four"]))
]]
);
}
#[test]
fn parse_attributes_trait() {
let item = parse::<Item>(
r#"
trait T {
#[foo(one)]
#[bar]
fn f() -> bool;
} {
#[bar(one, two, three)]
fn g() -> bool {
f()
}
}
"#,
);
// The trait itself has no attributes.
assert!(matches!(item.value, ItemKind::Trait(_)));
assert_eq!(item.attributes.len(), 0);
if let ItemKind::Trait(item_trait) = item.value {
let mut decls = item_trait.trait_items.get().iter();
let trait_item = decls.next();
assert!(trait_item.is_some());
let annotated = trait_item.unwrap();
if let ItemTraitItem::Fn(_fn_sig, _) = &annotated.value {
assert_eq!(
attributes(&annotated.attributes),
vec![[("foo", Some(vec!["one"]))], [("bar", None)]]
);
}
assert!(decls.next().is_none());
assert!(item_trait.trait_defs_opt.is_some());
let mut defs = item_trait.trait_defs_opt.as_ref().unwrap().get().iter();
let g_sig = defs.next();
assert!(g_sig.is_some());
assert_eq!(
attributes(&g_sig.unwrap().attributes),
vec![[("bar", Some(vec!["one", "two", "three"]))],]
);
assert!(defs.next().is_none());
} else {
panic!("Parsed trait is not a trait.");
}
}
#[test]
fn parse_attributes_abi() {
let item = parse::<Item>(
r#"
abi A {
#[bar(one, two, three)]
fn f() -> bool;
#[foo]
fn g() -> u64;
} {
#[baz(one)]
fn h() -> bool {
f()
}
}
"#,
);
// The ABI itself has no attributes.
assert!(matches!(item.value, ItemKind::Abi(_)));
assert_eq!(item.attributes.len(), 0);
if let ItemKind::Abi(item_abi) = item.value {
let mut decls = item_abi.abi_items.get().iter();
let f_sig = decls.next();
assert!(f_sig.is_some());
assert_eq!(
attributes(&f_sig.unwrap().attributes),
vec![[("bar", Some(vec!["one", "two", "three"]))],]
);
let g_sig = decls.next();
assert!(g_sig.is_some());
assert_eq!(
attributes(&g_sig.unwrap().attributes),
vec![[("foo", None)],]
);
assert!(decls.next().is_none());
assert!(item_abi.abi_defs_opt.is_some());
let mut defs = item_abi.abi_defs_opt.as_ref().unwrap().get().iter();
let h_sig = defs.next();
assert!(h_sig.is_some());
assert_eq!(
attributes(&h_sig.unwrap().attributes),
vec![[("baz", Some(vec!["one"]))],]
);
assert!(defs.next().is_none());
} else {
panic!("Parsed ABI is not an ABI.");
}
}
#[test]
fn parse_attributes_doc_comment() {
let item = parse::<Item>(
r#"
/// This is a doc comment.
/// This is another doc comment.
fn f() -> bool {
false
}
"#,
);
assert!(matches!(item.value, ItemKind::Fn(_)));
assert_eq!(
attributes(&item.attributes),
vec![
[("doc-comment", Some(vec![" This is a doc comment."]))],
[("doc-comment", Some(vec![" This is another doc comment."]))]
]
);
}
#[test]
fn parse_nested_annotations() {
let item = parse::<ItemFn>(
r#"
fn fun() {
/// Struct Comment.
#[allow(dead_code)]
struct S {
/// Field Comment.
#[allow(dead_code)]
field: u8,
}
/// Const Comment.
#[allow(dead_code)]
const CONST: u8 = 0;
}
"#,
);
let item_attributes =
item.body
.inner
.statements
.iter()
.fold(vec![], |mut acc, statement| match statement {
Statement::Item(item) => {
acc.push(attributes(&item.attributes));
if let ItemKind::Struct(item_struct) = &item.value {
let mut struct_attributes = item_struct
.fields
.inner
.value_separator_pairs
.iter()
.map(|(field, _)| attributes(&field.attributes))
.collect::<Vec<_>>();
acc.append(&mut struct_attributes);
}
acc
}
_ => acc,
});
assert_eq!(
item_attributes,
vec![
vec![
[("doc-comment", Some(vec![" Struct Comment."]))],
[("allow", Some(vec!["dead_code"]))],
],
vec![
[("doc-comment", Some(vec![" Field Comment."]))],
[("allow", Some(vec!["dead_code"]))],
],
vec![
[("doc-comment", Some(vec![" Const Comment."]))],
[("allow", Some(vec!["dead_code"]))],
]
],
);
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-parse/src/item/item_trait.rs | sway-parse/src/item/item_trait.rs | use crate::{Parse, ParseBracket, ParseResult, Parser};
use sway_ast::attribute::Annotated;
use sway_ast::keywords::{ConstToken, FnToken, OpenAngleBracketToken, TypeToken, WhereToken};
use sway_ast::{Braces, ItemFn, ItemTrait, ItemTraitItem, PubToken, Traits};
use sway_error::parser_error::ParseErrorKind;
impl Parse for ItemTraitItem {
fn parse(parser: &mut Parser) -> ParseResult<ItemTraitItem> {
if parser.peek::<PubToken>().is_some() || parser.peek::<FnToken>().is_some() {
let fn_decl = parser.parse()?;
let semicolon = parser.parse().ok();
Ok(ItemTraitItem::Fn(fn_decl, semicolon))
} else if let Some(_const_keyword) = parser.peek::<ConstToken>() {
let const_decl = parser.parse()?;
let semicolon = parser.parse().ok();
Ok(ItemTraitItem::Const(const_decl, semicolon))
} else if let Some(_type_keyword) = parser.peek::<TypeToken>() {
let type_decl = parser.parse()?;
let semicolon = parser.parse().ok();
Ok(ItemTraitItem::Type(type_decl, semicolon))
} else {
Err(parser.emit_error(ParseErrorKind::ExpectedAnItem))
}
}
fn error(
spans: Box<[sway_types::Span]>,
error: sway_error::handler::ErrorEmitted,
) -> Option<Self>
where
Self: Sized,
{
Some(ItemTraitItem::Error(spans, error))
}
}
impl Parse for ItemTrait {
fn parse(parser: &mut Parser) -> ParseResult<ItemTrait> {
let visibility = parser.take();
let trait_token = parser.parse()?;
let name = parser.parse()?;
let generics = parser.guarded_parse::<OpenAngleBracketToken, _>()?;
let super_traits = match parser.take() {
Some(colon_token) => {
let traits = parser.parse()?;
Some((colon_token, traits))
}
None => None,
};
let where_clause_opt = parser.guarded_parse::<WhereToken, _>()?;
let trait_items: Braces<Vec<Annotated<ItemTraitItem>>> = parser.parse()?;
for item in trait_items.get().iter() {
if let ItemTraitItem::Fn(fn_sig, _) = &item.value {
parser.ban_visibility_qualifier(&fn_sig.visibility)?;
}
}
let trait_defs_opt: Option<Braces<Vec<Annotated<ItemFn>>>> = Braces::try_parse(parser)?;
if let Some(trait_defs) = &trait_defs_opt {
for item in trait_defs.get().iter() {
parser.ban_visibility_qualifier(&item.value.fn_signature.visibility)?;
}
}
Ok(ItemTrait {
visibility,
trait_token,
name,
generics,
where_clause_opt,
super_traits,
trait_items,
trait_defs_opt,
})
}
}
impl Parse for Traits {
fn parse(parser: &mut Parser) -> ParseResult<Traits> {
let prefix = parser.parse()?;
let mut suffixes = Vec::new();
while let Some(add_token) = parser.take() {
let suffix = parser.parse()?;
suffixes.push((add_token, suffix));
}
let traits = Traits { prefix, suffixes };
Ok(traits)
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-parse/src/item/item_use.rs | sway-parse/src/item/item_use.rs | use crate::{Parse, ParseBracket, ParseResult, Parser};
use sway_ast::{Braces, ItemUse, UseTree};
use sway_error::parser_error::ParseErrorKind;
use sway_types::Spanned;
impl Parse for UseTree {
fn parse(parser: &mut Parser) -> ParseResult<UseTree> {
if let Some(imports) = Braces::try_parse(parser)? {
return Ok(UseTree::Group { imports });
}
if let Some(star_token) = parser.take() {
return Ok(UseTree::Glob { star_token });
}
let name = parser
.take()
.ok_or_else(|| parser.emit_error(ParseErrorKind::ExpectedImportNameGroupOrGlob))?;
if let Some(as_token) = parser.take() {
let alias = parser.parse()?;
return Ok(UseTree::Rename {
name,
as_token,
alias,
});
}
if let Some(double_colon_token) = parser.take() {
if let Ok(suffix) = parser.parse() {
return Ok(UseTree::Path {
prefix: name,
double_colon_token,
suffix,
});
} else {
// parser recovery for foo::
return Ok(UseTree::Error {
spans: Box::new([name.span(), double_colon_token.span()]),
});
}
}
Ok(UseTree::Name { name })
}
}
impl Parse for ItemUse {
fn parse(parser: &mut Parser) -> ParseResult<ItemUse> {
let use_token = parser.parse()?;
let root_import = parser.take();
let tree = parser.parse()?;
let semicolon_token = parser.parse()?;
Ok(ItemUse {
visibility: None,
use_token,
root_import,
tree,
semicolon_token,
})
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-parse/src/item/item_impl.rs | sway-parse/src/item/item_impl.rs | use crate::{Parse, ParseResult, Parser};
use sway_ast::attribute::Annotated;
use sway_ast::keywords::{
ConstToken, FnToken, OpenAngleBracketToken, SemicolonToken, TypeToken, WhereToken,
};
use sway_ast::{Braces, GenericParams, ItemImpl, ItemImplItem, PubToken, Ty};
use sway_error::parser_error::ParseErrorKind;
impl Parse for ItemImplItem {
fn parse(parser: &mut Parser) -> ParseResult<ItemImplItem> {
if parser.peek::<FnToken>().is_some()
|| (parser.peek::<PubToken>().is_some() && parser.peek_next::<FnToken>().is_some())
{
let fn_decl = parser.parse()?;
Ok(ItemImplItem::Fn(fn_decl))
} else if parser.peek::<ConstToken>().is_some()
|| (parser.peek::<PubToken>().is_some() && parser.peek_next::<ConstToken>().is_some())
{
let const_decl = parser.parse()?;
parser.parse::<SemicolonToken>()?;
Ok(ItemImplItem::Const(const_decl))
} else if let Some(_type_keyword) = parser.peek::<TypeToken>() {
let type_decl = parser.parse()?;
parser.parse::<SemicolonToken>()?;
Ok(ItemImplItem::Type(type_decl))
} else {
Err(parser.emit_error(ParseErrorKind::ExpectedAnItem))
}
}
}
impl Parse for ItemImpl {
fn parse(parser: &mut Parser) -> ParseResult<ItemImpl> {
let impl_token = parser.parse()?;
let generic_params_opt = parser.guarded_parse::<OpenAngleBracketToken, GenericParams>()?;
let ty = parser.parse::<Ty>()?;
let (trait_opt, ty) = match parser.take() {
Some(for_token) => match ty {
Ty::Path(path_type) => (Some((path_type, for_token)), parser.parse()?),
_ => {
return Err(parser.emit_error(ParseErrorKind::ExpectedPathType));
}
},
None => (None, ty),
};
let where_clause_opt = parser.guarded_parse::<WhereToken, _>()?;
let contents: Braces<Vec<Annotated<ItemImplItem>>> = parser.parse()?;
if trait_opt.is_some() {
for annotated in contents.get().iter() {
if let ItemImplItem::Fn(item_fn) = &annotated.value {
parser.ban_visibility_qualifier(&item_fn.fn_signature.visibility)?;
}
}
}
Ok(ItemImpl {
impl_token,
generic_params_opt,
trait_opt,
ty,
where_clause_opt,
contents,
})
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::test_utils::parse;
use assert_matches::*;
#[test]
fn parse_impl_ptr() {
let item = parse::<ItemImpl>(
r#"
impl __ptr[T] {}
"#,
);
assert_matches!(item.ty, Ty::Ptr { .. });
}
#[test]
fn parse_impl_for_ptr() {
let item = parse::<ItemImpl>(
r#"
impl Foo for __ptr[T] {}
"#,
);
assert_matches!(item.ty, Ty::Ptr { .. });
}
#[test]
fn parse_impl_slice() {
// deprecated syntax
let item = parse::<ItemImpl>("impl __slice[T] {}");
assert_matches!(
item.ty,
Ty::Slice {
slice_token: Some(..),
ty: _
}
);
// "new" syntax
let item = parse::<ItemImpl>("impl [T] {}");
assert_matches!(
item.ty,
Ty::Slice {
slice_token: None,
ty: _
}
);
let item = parse::<ItemImpl>("impl &[T] {}");
assert_matches!(item.ty, Ty::Ref { ty, .. } if matches!(&*ty, Ty::Slice { .. }));
}
#[test]
fn parse_impl_for_slice() {
let item = parse::<ItemImpl>(
r#"
impl Foo for __slice[T] {}
"#,
);
assert_matches!(item.ty, Ty::Slice { .. });
}
#[test]
fn parse_impl_ref() {
let item = parse::<ItemImpl>(
r#"
impl &T {}
"#,
);
assert_matches!(
item.ty,
Ty::Ref {
mut_token: None,
..
}
);
}
#[test]
fn parse_impl_for_ref() {
let item = parse::<ItemImpl>(
r#"
impl Foo for &T {}
"#,
);
assert_matches!(
item.ty,
Ty::Ref {
mut_token: None,
..
}
);
}
#[test]
fn parse_impl_mut_ref() {
let item = parse::<ItemImpl>(
r#"
impl &mut T {}
"#,
);
assert_matches!(
item.ty,
Ty::Ref {
mut_token: Some(_),
..
}
);
}
#[test]
fn parse_impl_for_mut_ref() {
let item = parse::<ItemImpl>(
r#"
impl Foo for &mut T {}
"#,
);
assert_matches!(
item.ty,
Ty::Ref {
mut_token: Some(_),
..
}
);
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-parse/src/item/item_fn.rs | sway-parse/src/item/item_fn.rs | use crate::{Parse, ParseResult, Parser};
use sway_ast::ItemFn;
impl Parse for ItemFn {
fn parse(parser: &mut Parser) -> ParseResult<ItemFn> {
let fn_signature = parser.parse()?;
let body = parser.parse()?;
Ok(ItemFn { fn_signature, body })
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-parse/tests/noop_script.rs | sway-parse/tests/noop_script.rs | use insta::*;
use crate::common::parse_file;
mod common;
#[test]
fn noop_script_file() {
assert_ron_snapshot!(parse_file(r#"
script;
fn main() {
()
}
"#,), @r#"
Some(Annotated(
attributes: [],
value: Module(
kind: Script(
script_token: ScriptToken(
span: Span(
src: "\n script;\n \n fn main() {\n ()\n }\n ",
start: 7,
end: 13,
source_id: None,
),
),
),
semicolon_token: SemicolonToken(
span: Span(
src: "\n script;\n \n fn main() {\n ()\n }\n ",
start: 13,
end: 14,
source_id: None,
),
),
items: [
Annotated(
attributes: [],
value: Fn(ItemFn(
fn_signature: FnSignature(
visibility: None,
fn_token: FnToken(
span: Span(
src: "\n script;\n \n fn main() {\n ()\n }\n ",
start: 28,
end: 30,
source_id: None,
),
),
name: BaseIdent(
name_override_opt: None,
span: Span(
src: "\n script;\n \n fn main() {\n ()\n }\n ",
start: 31,
end: 35,
source_id: None,
),
is_raw_ident: false,
),
generics: None,
arguments: Parens(
inner: Static(Punctuated(
value_separator_pairs: [],
final_value_opt: None,
)),
span: Span(
src: "\n script;\n \n fn main() {\n ()\n }\n ",
start: 35,
end: 37,
source_id: None,
),
),
return_type_opt: None,
where_clause_opt: None,
),
body: Braces(
inner: CodeBlockContents(
statements: [],
final_expr_opt: Some(Tuple(Parens(
inner: Nil,
span: Span(
src: "\n script;\n \n fn main() {\n ()\n }\n ",
start: 48,
end: 50,
source_id: None,
),
))),
span: Span(
src: "\n script;\n \n fn main() {\n ()\n }\n ",
start: 39,
end: 57,
source_id: None,
),
),
span: Span(
src: "\n script;\n \n fn main() {\n ()\n }\n ",
start: 38,
end: 58,
source_id: None,
),
),
)),
),
],
),
))
"#);
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-parse/tests/common/mod.rs | sway-parse/tests/common/mod.rs | use sway_ast::{attribute::Annotated, Module};
use sway_features::ExperimentalFeatures;
use sway_parse::parse_file as sway_parse_parse_file;
pub fn parse_file(src: &str) -> Option<Annotated<Module>> {
let handler = <_>::default();
let path = None;
sway_parse_parse_file(&handler, src.into(), path, ExperimentalFeatures::default()).ok()
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-pkg/src/lib.rs | forc-pkg/src/lib.rs | //! Building, locking, fetching and updating sway projects as Forc packages.
//!
//! A forc package represents a Sway project with a `Forc.toml` manifest file declared at its root.
//! The project should consist of one or more Sway modules under a `src` directory. It may also
//! declare a set of forc package dependencies within its manifest.
pub mod lock;
pub mod manifest;
mod pkg;
pub mod source;
pub use lock::Lock;
pub use manifest::{
build_profile::BuildProfile, PackageManifest, PackageManifestFile, WorkspaceManifest,
WorkspaceManifestFile,
};
#[doc(inline)]
pub use pkg::*;
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-pkg/src/lock.rs | forc-pkg/src/lock.rs | use crate::{pkg, source, DepKind, Edge};
use anyhow::{anyhow, Result};
use forc_tracing::{println_action_green, println_action_red};
use petgraph::{visit::EdgeRef, Direction};
use serde::{Deserialize, Serialize};
use std::{
borrow::Cow,
collections::{BTreeSet, HashMap, HashSet},
fs,
path::Path,
str::FromStr,
};
use sway_core::fuel_prelude::fuel_tx;
/// The graph of pinned packages represented as a toml-serialization-friendly structure.
#[derive(Debug, Default, Deserialize, Serialize)]
pub struct Lock {
// Named `package` so that each entry serializes to lock file under `[[package]]` like cargo.
pub(crate) package: BTreeSet<PkgLock>,
}
/// Packages that have been removed and added between two `Lock` instances.
///
/// The result of `new_lock.diff(&old_lock)`.
pub struct Diff<'a> {
pub removed: BTreeSet<&'a PkgLock>,
pub added: BTreeSet<&'a PkgLock>,
}
#[derive(Clone, Debug, Eq, Hash, Ord, PartialEq, PartialOrd, Deserialize, Serialize)]
#[serde(rename_all = "kebab-case")]
pub struct PkgLock {
pub(crate) name: String,
// TODO: Cargo *always* includes version, whereas we don't even parse it when reading a
// project's `Manifest` yet. If we decide to enforce versions, we'll want to remove the
// `Option`.
version: Option<semver::Version>,
// Short-hand string describing where this package is sourced from.
source: String,
dependencies: Option<Vec<PkgDepLine>>,
contract_dependencies: Option<Vec<PkgDepLine>>,
}
/// `PkgDepLine` is a terse, single-line, git-diff-friendly description of a package's
/// dependency. It is formatted like so:
///
/// ```ignore
/// (<dep_name>) <pkg_name> <source_string> (<salt>)
/// ```
///
/// The `(<dep_name>)` segment is only included in the uncommon case that the dependency name does
/// not match the package name, i.e. if the `package` field was specified for the dependency.
///
/// The source string is included in order to be able to uniquely distinguish between multiple
/// different versions of the same package.
pub type PkgDepLine = String;
impl PkgLock {
/// Construct a package lock given a package's entry in the package graph.
pub fn from_node(graph: &pkg::Graph, node: pkg::NodeIx, disambiguate: &HashSet<&str>) -> Self {
let pinned = &graph[node];
let name = pinned.name.clone();
let version = pinned.source.semver();
let source = pinned.source.to_string();
// Collection of all dependencies, so this includes both contract-dependencies and
// lib-dependencies
let all_dependencies: Vec<(String, DepKind)> = graph
.edges_directed(node, Direction::Outgoing)
.map(|edge| {
let dep_edge = edge.weight();
let dep_node = edge.target();
let dep_pkg = &graph[dep_node];
let dep_name = if *dep_edge.name != dep_pkg.name {
Some(&dep_edge.name[..])
} else {
None
};
let dep_kind = &dep_edge.kind;
let disambiguate = disambiguate.contains(&dep_pkg.name[..]);
(
pkg_dep_line(
dep_name,
&dep_pkg.name,
&dep_pkg.source,
dep_kind,
disambiguate,
),
dep_kind.clone(),
)
})
.collect();
let mut dependencies: Vec<String> = all_dependencies
.iter()
.filter_map(|(dep_pkg, dep_kind)| {
(*dep_kind == DepKind::Library).then_some(dep_pkg.clone())
})
.collect();
let mut contract_dependencies: Vec<String> = all_dependencies
.iter()
.filter_map(|(dep_pkg, dep_kind)| {
matches!(*dep_kind, DepKind::Contract { .. }).then_some(dep_pkg.clone())
})
.collect();
dependencies.sort();
contract_dependencies.sort();
let dependencies = if !dependencies.is_empty() {
Some(dependencies)
} else {
None
};
let contract_dependencies = if !contract_dependencies.is_empty() {
Some(contract_dependencies)
} else {
None
};
Self {
name,
version,
source,
dependencies,
contract_dependencies,
}
}
/// A string that uniquely identifies a package and its source.
///
/// Formatted as `<name> <source>`.
pub fn unique_string(&self) -> String {
pkg_unique_string(&self.name, &self.source)
}
/// The string representation used for specifying this package as a dependency.
///
/// If this package's name is not enough to disambiguate it from other packages within the
/// graph, this returns `<name> <source>`. If it is, it simply returns the name.
pub fn name_disambiguated(&self, disambiguate: &HashSet<&str>) -> Cow<str> {
let disambiguate = disambiguate.contains(&self.name[..]);
pkg_name_disambiguated(&self.name, &self.source, disambiguate)
}
}
/// Represents a `DepKind` before getting parsed.
///
/// Used to carry on the type of the `DepKind` until parsing. After parsing pkg_dep_line converted into `DepKind`.
enum UnparsedDepKind {
Library,
Contract,
}
impl Lock {
/// Load the `Lock` structure from the TOML `Forc.lock` file at the specified path.
pub fn from_path(path: &Path) -> Result<Self> {
let string = fs::read_to_string(path)
.map_err(|e| anyhow!("failed to read {}: {}", path.display(), e))?;
toml::de::from_str(&string).map_err(|e| anyhow!("failed to parse lock file: {}", e))
}
/// Given a graph of pinned packages, create a `Lock` representing the `Forc.lock` file
/// structure.
pub fn from_graph(graph: &pkg::Graph) -> Self {
let names = graph.node_indices().map(|n| &graph[n].name[..]);
let disambiguate: HashSet<_> = names_requiring_disambiguation(names).collect();
// Collect the packages.
let package: BTreeSet<_> = graph
.node_indices()
.map(|node| PkgLock::from_node(graph, node, &disambiguate))
.collect();
Self { package }
}
/// Given a `Lock` loaded from a `Forc.lock` file, produce the graph of pinned dependencies.
pub fn to_graph(&self) -> Result<pkg::Graph> {
let mut graph = pkg::Graph::new();
// Track the names which need to be disambiguated in the dependency list.
let names = self.package.iter().map(|pkg| &pkg.name[..]);
let disambiguate: HashSet<_> = names_requiring_disambiguation(names).collect();
// Add all nodes to the graph.
// Keep track of "<name> <source>" to node-index mappings for the edge collection pass.
let mut pkg_to_node: HashMap<String, pkg::NodeIx> = HashMap::new();
for pkg in &self.package {
// Note: `key` may be either `<name> <source>` or just `<name>` if disambiguation not
// required.
let key = pkg.name_disambiguated(&disambiguate).into_owned();
let name = pkg.name.clone();
let source: source::Pinned = pkg.source.parse().map_err(|e| {
anyhow!("invalid 'source' entry for package {} lock: {:?}", name, e)
})?;
let pkg = pkg::Pinned { name, source };
let node = graph.add_node(pkg);
pkg_to_node.insert(key, node);
}
// On the second pass, add all edges.
for pkg in &self.package {
let key = pkg.name_disambiguated(&disambiguate);
let node = pkg_to_node[&key[..]];
// If `pkg.contract_dependencies` is None, we will be collecting an empty list of
// contract_deps so that we will omit them during edge adding phase
let contract_deps = pkg
.contract_dependencies
.as_ref()
.into_iter()
.flatten()
.map(|contract_dep| (contract_dep, UnparsedDepKind::Contract));
// If `pkg.dependencies` is None, we will be collecting an empty list of
// lib_deps so that we will omit them during edge adding phase
let lib_deps = pkg
.dependencies
.as_ref()
.into_iter()
.flatten()
.map(|lib_dep| (lib_dep, UnparsedDepKind::Library));
for (dep_line, dep_kind) in lib_deps.chain(contract_deps) {
let (dep_name, dep_key, dep_salt) = parse_pkg_dep_line(dep_line)
.map_err(|e| anyhow!("failed to parse dependency \"{}\": {}", dep_line, e))?;
let dep_node = pkg_to_node
.get(dep_key)
.copied()
.ok_or_else(|| anyhow!("found dep {} without node entry in graph", dep_key))?;
let dep_name = dep_name.unwrap_or(&graph[dep_node].name).to_string();
let dep_kind = match dep_kind {
UnparsedDepKind::Library => DepKind::Library,
UnparsedDepKind::Contract => {
let dep_salt = dep_salt.unwrap_or_default();
DepKind::Contract { salt: dep_salt }
}
};
let dep_edge = Edge::new(dep_name, dep_kind);
graph.update_edge(node, dep_node, dep_edge);
}
}
Ok(graph)
}
/// Create a diff between `self` and the `old` `Lock`.
///
/// Useful for showing the user which dependencies are out of date, or which have been updated.
pub fn diff<'a>(&'a self, old: &'a Self) -> Diff<'a> {
let added = self.package.difference(&old.package).collect();
let removed = old.package.difference(&self.package).collect();
Diff { added, removed }
}
}
/// Collect the set of package names that require disambiguation.
fn names_requiring_disambiguation<'a, I>(names: I) -> impl Iterator<Item = &'a str>
where
I: IntoIterator<Item = &'a str>,
{
let mut visited = BTreeSet::default();
names.into_iter().filter(move |&name| !visited.insert(name))
}
fn pkg_name_disambiguated<'a>(name: &'a str, source: &'a str, disambiguate: bool) -> Cow<'a, str> {
match disambiguate {
true => Cow::Owned(pkg_unique_string(name, source)),
false => Cow::Borrowed(name),
}
}
fn pkg_unique_string(name: &str, source: &str) -> String {
format!("{name} {source}")
}
fn pkg_dep_line(
dep_name: Option<&str>,
name: &str,
source: &source::Pinned,
dep_kind: &DepKind,
disambiguate: bool,
) -> PkgDepLine {
// Only include the full unique string in the case that this dep requires disambiguation.
let source_string = source.to_string();
let pkg_string = pkg_name_disambiguated(name, &source_string, disambiguate);
// Prefix the dependency name if it differs from the package name.
let pkg_string = match dep_name {
None => pkg_string.into_owned(),
Some(dep_name) => format!("({dep_name}) {pkg_string}"),
};
// Append the salt if dep_kind is DepKind::Contract.
match dep_kind {
DepKind::Library => pkg_string,
DepKind::Contract { salt } => {
if *salt == fuel_tx::Salt::zeroed() {
pkg_string
} else {
format!("{pkg_string} ({salt})")
}
}
}
}
type ParsedPkgLine<'a> = (Option<&'a str>, &'a str, Option<fuel_tx::Salt>);
// Parse the given `PkgDepLine` into its dependency name and unique string segments.
//
// I.e. given "(<dep_name>) <name> <source> (<salt>)", returns ("<dep_name>", "<name> <source>", "<salt>").
//
// Note that <source> may not appear in the case it is not required for disambiguation.
fn parse_pkg_dep_line(pkg_dep_line: &str) -> anyhow::Result<ParsedPkgLine> {
let s = pkg_dep_line.trim();
let (dep_name, s) = match s.starts_with('(') {
false => (None, s),
true => {
// If we have the open bracket, grab everything until the closing bracket.
let s = &s["(".len()..];
let mut iter = s.split(')');
let dep_name = iter
.next()
.ok_or_else(|| anyhow!("missing closing parenthesis"))?;
// The rest is the unique package string and possibly the salt.
let s = &s[dep_name.len() + ")".len()..];
(Some(dep_name), s)
}
};
// Check for salt.
let mut iter = s.split('(');
let pkg_str = iter
.next()
.ok_or_else(|| anyhow!("missing pkg string"))?
.trim();
let salt_str = iter.next().map(|s| s.trim()).map(|s| &s[..s.len() - 1]);
let salt = match salt_str {
Some(salt_str) => Some(
fuel_tx::Salt::from_str(salt_str)
.map_err(|e| anyhow!("invalid salt in lock file: {e}"))?,
),
None => None,
};
Ok((dep_name, pkg_str, salt))
}
pub fn print_diff(member_names: &HashSet<String>, diff: &Diff) {
print_removed_pkgs(member_names, diff.removed.iter().copied());
print_added_pkgs(member_names, diff.added.iter().copied());
}
pub fn print_removed_pkgs<'a, I>(member_names: &HashSet<String>, removed: I)
where
I: IntoIterator<Item = &'a PkgLock>,
{
for pkg in removed {
if !member_names.contains(&pkg.name) {
let src = match pkg.source.starts_with(source::git::Pinned::PREFIX) {
true => format!(" {}", pkg.source),
false => String::new(),
};
println_action_red(
"Removing",
&format!("{}{src}", ansiterm::Style::new().bold().paint(&pkg.name)),
);
}
}
}
pub fn print_added_pkgs<'a, I>(member_names: &HashSet<String>, removed: I)
where
I: IntoIterator<Item = &'a PkgLock>,
{
for pkg in removed {
if !member_names.contains(&pkg.name) {
let src = match pkg.source.starts_with(source::git::Pinned::PREFIX) {
true => format!(" {}", pkg.source),
false => "".to_string(),
};
println_action_green(
"Adding",
&format!("{}{src}", ansiterm::Style::new().bold().paint(&pkg.name)),
);
}
}
}
#[cfg(test)]
mod tests {
use sway_core::fuel_prelude::fuel_tx;
use super::parse_pkg_dep_line;
#[test]
fn test_parse_pkg_line_with_salt_with_dep_name() {
let pkg_dep_line = "(std2) std path+from-root (0000000000000000000000000000000000000000000000000000000000000000)";
let (dep_name, pkg_string, salt) = parse_pkg_dep_line(pkg_dep_line).unwrap();
assert_eq!(salt, Some(fuel_tx::Salt::zeroed()));
assert_eq!(dep_name, Some("std2"));
assert_eq!(pkg_string, "std path+from-root");
}
#[test]
fn test_parse_pkg_line_with_salt_without_dep_name() {
let pkg_dep_line =
"std path+from-root (0000000000000000000000000000000000000000000000000000000000000000)";
let (dep_name, pkg_string, salt) = parse_pkg_dep_line(pkg_dep_line).unwrap();
assert_eq!(salt, Some(fuel_tx::Salt::zeroed()));
assert_eq!(dep_name, None);
assert_eq!(pkg_string, "std path+from-root");
}
#[test]
fn test_parse_pkg_line_without_salt_with_dep_name() {
let pkg_dep_line = "(std2) std path+from-root";
let (dep_name, pkg_string, salt) = parse_pkg_dep_line(pkg_dep_line).unwrap();
assert_eq!(salt, None);
assert_eq!(dep_name, Some("std2"));
assert_eq!(pkg_string, "std path+from-root");
}
#[test]
fn test_parse_pkg_line_without_salt_without_dep_name() {
let pkg_dep_line = "std path+from-root";
let (dep_name, pkg_string, salt) = parse_pkg_dep_line(pkg_dep_line).unwrap();
assert_eq!(salt, None);
assert_eq!(dep_name, None);
assert_eq!(pkg_string, "std path+from-root");
}
#[test]
#[should_panic]
fn test_parse_pkg_line_invalid_salt() {
let pkg_dep_line = "std path+from-root (1)";
parse_pkg_dep_line(pkg_dep_line).unwrap();
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-pkg/src/pkg.rs | forc-pkg/src/pkg.rs | use crate::manifest::GenericManifestFile;
use crate::{
lock::Lock,
manifest::{Dependency, ManifestFile, MemberManifestFiles, PackageManifestFile},
source::{self, IPFSNode, Source},
BuildProfile,
};
use anyhow::{anyhow, bail, Context, Error, Result};
use byte_unit::{Byte, UnitType};
use forc_tracing::{println_action_green, println_warning};
use forc_util::{
default_output_directory, find_file_name, kebab_to_snake_case, print_compiling, print_infos,
print_on_failure, print_warnings,
};
use petgraph::{
self, dot,
visit::{Bfs, Dfs, EdgeRef, Walker},
Directed, Direction,
};
use serde::{Deserialize, Serialize};
use std::{
collections::{hash_map, BTreeSet, HashMap, HashSet},
fmt,
fs::{self, File},
hash::{Hash, Hasher},
io::Write,
path::{Path, PathBuf},
str::FromStr,
sync::{atomic::AtomicBool, Arc},
};
use sway_core::transform::AttributeArg;
pub use sway_core::Programs;
use sway_core::{
abi_generation::{
evm_abi,
fuel_abi::{self, AbiContext},
},
asm_generation::ProgramABI,
decl_engine::DeclRefFunction,
fuel_prelude::{
fuel_crypto,
fuel_tx::{self, Contract, ContractId, StorageSlot},
},
language::parsed::TreeType,
semantic_analysis::namespace,
source_map::SourceMap,
write_dwarf, BuildTarget, Engines, FinalizedEntry, LspConfig,
};
use sway_core::{namespace::Package, Observer};
use sway_core::{set_bytecode_configurables_offset, DbgGeneration, IrCli, PrintAsm};
use sway_error::{error::CompileError, handler::Handler, warning::CompileWarning};
use sway_features::ExperimentalFeatures;
use sway_types::{Ident, ProgramId, Span, Spanned};
use sway_utils::{constants, time_expr, PerformanceData, PerformanceMetric};
use tracing::{debug, info};
type GraphIx = u32;
type Node = Pinned;
#[derive(PartialEq, Eq, Clone, Debug)]
pub struct Edge {
/// The name specified on the left hand side of the `=` in a dependency declaration under
/// `[dependencies]` or `[contract-dependencies]` within a forc manifest.
///
/// The name of a dependency may differ from the package name in the case that the dependency's
/// `package` field is specified.
///
/// For example, in the following, `foo` is assumed to be both the package name and the dependency
/// name:
///
/// ```toml
/// foo = { git = "https://github.com/owner/repo", branch = "master" }
/// ```
///
/// In the following case however, `foo` is the package name, but the dependency name is `foo-alt`:
///
/// ```toml
/// foo-alt = { git = "https://github.com/owner/repo", branch = "master", package = "foo" }
/// ```
pub name: String,
pub kind: DepKind,
}
#[derive(PartialEq, Eq, Clone, Debug)]
pub enum DepKind {
/// The dependency is a library and declared under `[dependencies]`.
Library,
/// The dependency is a contract and declared under `[contract-dependencies]`.
Contract { salt: fuel_tx::Salt },
}
pub type Graph = petgraph::stable_graph::StableGraph<Node, Edge, Directed, GraphIx>;
pub type EdgeIx = petgraph::graph::EdgeIndex<GraphIx>;
pub type NodeIx = petgraph::graph::NodeIndex<GraphIx>;
pub type ManifestMap = HashMap<PinnedId, PackageManifestFile>;
/// A unique ID for a pinned package.
///
/// The internal value is produced by hashing the package's name and `source::Pinned`.
#[derive(Copy, Clone, Debug, Eq, Hash, PartialEq, Deserialize, Serialize)]
pub struct PinnedId(u64);
/// The result of successfully compiling a package.
#[derive(Debug, Clone)]
pub struct BuiltPackage {
pub descriptor: PackageDescriptor,
pub program_abi: ProgramABI,
pub storage_slots: Vec<StorageSlot>,
pub warnings: Vec<CompileWarning>,
pub source_map: SourceMap,
pub tree_type: TreeType,
pub bytecode: BuiltPackageBytecode,
/// `Some` for contract member builds where tests were included. This is
/// required so that we can deploy once instance of the contract (without
/// tests) with a valid contract ID before executing the tests as scripts.
///
/// For non-contract members, this is always `None`.
pub bytecode_without_tests: Option<BuiltPackageBytecode>,
}
/// The package descriptors that a `BuiltPackage` holds so that the source used for building the
/// package can be retrieved later on.
#[derive(Debug, Clone)]
pub struct PackageDescriptor {
pub name: String,
pub target: BuildTarget,
pub manifest_file: PackageManifestFile,
pub pinned: Pinned,
}
/// The bytecode associated with a built package along with its entry points.
#[derive(Debug, Clone)]
pub struct BuiltPackageBytecode {
pub bytes: Vec<u8>,
pub entries: Vec<PkgEntry>,
}
/// Represents a package entry point.
#[derive(Debug, Clone)]
pub struct PkgEntry {
pub finalized: FinalizedEntry,
pub kind: PkgEntryKind,
}
/// Data specific to each kind of package entry point.
#[derive(Debug, Clone)]
pub enum PkgEntryKind {
Main,
Test(PkgTestEntry),
}
/// The possible conditions for a test result to be considered "passing".
#[derive(Debug, Clone)]
pub enum TestPassCondition {
ShouldRevert(Option<u64>),
ShouldNotRevert,
}
/// Data specific to the test entry point.
#[derive(Debug, Clone)]
pub struct PkgTestEntry {
pub pass_condition: TestPassCondition,
pub span: Span,
pub file_path: Arc<PathBuf>,
}
/// The result of successfully compiling a workspace.
pub type BuiltWorkspace = Vec<Arc<BuiltPackage>>;
#[derive(Debug, Clone)]
pub enum Built {
/// Represents a standalone package build.
Package(Arc<BuiltPackage>),
/// Represents a workspace build.
Workspace(BuiltWorkspace),
}
/// The result of the `compile` function, i.e. compiling a single package.
pub struct CompiledPackage {
pub source_map: SourceMap,
pub tree_type: TreeType,
pub program_abi: ProgramABI,
pub storage_slots: Vec<StorageSlot>,
pub bytecode: BuiltPackageBytecode,
pub namespace: namespace::Package,
pub warnings: Vec<CompileWarning>,
pub metrics: PerformanceData,
}
/// Compiled contract dependency parts relevant to calculating a contract's ID.
pub struct CompiledContractDependency {
pub bytecode: Vec<u8>,
pub storage_slots: Vec<StorageSlot>,
}
/// The set of compiled contract dependencies, provided to dependency namespace construction.
pub type CompiledContractDeps = HashMap<NodeIx, CompiledContractDependency>;
/// A package uniquely identified by name along with its source.
#[derive(Clone, Debug, Eq, Hash, Ord, PartialEq, PartialOrd, Deserialize, Serialize)]
pub struct Pkg {
/// The unique name of the package as declared in its manifest.
pub name: String,
/// Where the package is sourced from.
pub source: Source,
}
/// A package uniquely identified by name along with its pinned source.
#[derive(Clone, Debug, Eq, Hash, PartialEq, Deserialize, Serialize)]
pub struct Pinned {
pub name: String,
pub source: source::Pinned,
}
/// Represents the full build plan for a project.
#[derive(Clone, Debug)]
pub struct BuildPlan {
graph: Graph,
manifest_map: ManifestMap,
compilation_order: Vec<NodeIx>,
}
/// Error returned upon failed parsing of `PinnedId::from_str`.
#[derive(Clone, Debug)]
pub struct PinnedIdParseError;
#[derive(Default, Clone)]
pub struct PkgOpts {
/// Path to the project, if not specified, current working directory will be used.
pub path: Option<String>,
/// Offline mode, prevents Forc from using the network when managing dependencies.
/// Meaning it will only try to use previously downloaded dependencies.
pub offline: bool,
/// Terse mode. Limited warning and error output.
pub terse: bool,
/// Requires that the Forc.lock file is up-to-date. If the lock file is missing, or it
/// needs to be updated, Forc will exit with an error
pub locked: bool,
/// The directory in which the sway compiler output artifacts are placed.
///
/// By default, this is `<project-root>/out`.
pub output_directory: Option<String>,
/// The IPFS node to be used for fetching IPFS sources.
pub ipfs_node: IPFSNode,
}
#[derive(Default, Clone)]
pub struct PrintOpts {
/// Print the generated Sway AST (Abstract Syntax Tree).
pub ast: bool,
/// Print the computed Sway DCA (Dead Code Analysis) graph to the specified path.
/// If not specified prints to stdout.
pub dca_graph: Option<String>,
/// Specifies the url format to be used in the generated dot file.
/// Variables {path}, {line} {col} can be used in the provided format.
/// An example for vscode would be: "vscode://file/{path}:{line}:{col}"
pub dca_graph_url_format: Option<String>,
/// Print the generated ASM.
pub asm: PrintAsm,
/// Print the bytecode. This is the final output of the compiler.
pub bytecode: bool,
/// Print the original source code together with bytecode.
pub bytecode_spans: bool,
/// Print the generated Sway IR (Intermediate Representation).
pub ir: IrCli,
/// Output build errors and warnings in reverse order.
pub reverse_order: bool,
}
#[derive(Default, Clone)]
pub struct MinifyOpts {
/// By default the JSON for ABIs is formatted for human readability. By using this option JSON
/// output will be "minified", i.e. all on one line without whitespace.
pub json_abi: bool,
/// By default the JSON for initial storage slots is formatted for human readability. By using
/// this option JSON output will be "minified", i.e. all on one line without whitespace.
pub json_storage_slots: bool,
}
/// Represents a compiled contract ID as a pub const in a contract.
type ContractIdConst = String;
#[derive(Serialize, Deserialize, Clone, Debug, Default, PartialEq, Eq)]
pub struct DumpOpts {
/// Dump all trait implementations for the given type name.
pub dump_impls: Option<String>,
}
/// The set of options provided to the `build` functions.
#[derive(Default, Clone)]
pub struct BuildOpts {
pub pkg: PkgOpts,
pub print: PrintOpts,
pub verify_ir: IrCli,
pub minify: MinifyOpts,
pub dump: DumpOpts,
/// If set, generates a JSON file containing the hex-encoded script binary.
pub hex_outfile: Option<String>,
/// If set, outputs a binary file representing the script bytes.
pub binary_outfile: Option<String>,
/// If set, outputs debug info to the provided file.
/// If the argument provided ends with .json, a JSON is emitted,
/// otherwise, an ELF file containing DWARF is emitted.
pub debug_outfile: Option<String>,
/// Build target to use.
pub build_target: BuildTarget,
/// Name of the build profile to use.
pub build_profile: String,
/// Use the release build profile.
/// The release profile can be customized in the manifest file.
pub release: bool,
/// Output the time elapsed over each part of the compilation process.
pub time_phases: bool,
/// Profile the build process.
pub profile: bool,
/// If set, outputs compilation metrics info in JSON format.
pub metrics_outfile: Option<String>,
/// Warnings must be treated as compiler errors.
pub error_on_warnings: bool,
/// Include all test functions within the build.
pub tests: bool,
/// The set of options to filter by member project kind.
pub member_filter: MemberFilter,
/// Set of enabled experimental flags
pub experimental: Vec<sway_features::Feature>,
/// Set of disabled experimental flags
pub no_experimental: Vec<sway_features::Feature>,
/// Do not output any build artifacts, e.g., bytecode, ABI JSON, etc.
pub no_output: bool,
}
/// The set of options to filter type of projects to build in a workspace.
#[derive(Clone)]
pub struct MemberFilter {
pub build_contracts: bool,
pub build_scripts: bool,
pub build_predicates: bool,
pub build_libraries: bool,
}
impl Default for MemberFilter {
fn default() -> Self {
Self {
build_contracts: true,
build_scripts: true,
build_predicates: true,
build_libraries: true,
}
}
}
impl MemberFilter {
/// Returns a new `MemberFilter` that only builds scripts.
pub fn only_scripts() -> Self {
Self {
build_contracts: false,
build_scripts: true,
build_predicates: false,
build_libraries: false,
}
}
/// Returns a new `MemberFilter` that only builds contracts.
pub fn only_contracts() -> Self {
Self {
build_contracts: true,
build_scripts: false,
build_predicates: false,
build_libraries: false,
}
}
/// Returns a new `MemberFilter`, that only builds predicates.
pub fn only_predicates() -> Self {
Self {
build_contracts: false,
build_scripts: false,
build_predicates: true,
build_libraries: false,
}
}
/// Filter given target of output nodes according to the this `MemberFilter`.
pub fn filter_outputs(
&self,
build_plan: &BuildPlan,
outputs: HashSet<NodeIx>,
) -> HashSet<NodeIx> {
let graph = build_plan.graph();
let manifest_map = build_plan.manifest_map();
outputs
.into_iter()
.filter(|&node_ix| {
let pkg = &graph[node_ix];
let pkg_manifest = &manifest_map[&pkg.id()];
let program_type = pkg_manifest.program_type();
// Since parser cannot recover for program type detection, for the scenarios that
// parser fails to parse the code, program type detection is not possible. So in
// failing to parse cases we should try to build at least until
// https://github.com/FuelLabs/sway/issues/3017 is fixed. Until then we should
// build those members because of two reasons:
//
// 1. The member could already be from the desired member type
// 2. If we do not try to build there is no way users can know there is a code
// piece failing to be parsed in their workspace.
match program_type {
Ok(program_type) => match program_type {
TreeType::Predicate => self.build_predicates,
TreeType::Script => self.build_scripts,
TreeType::Contract => self.build_contracts,
TreeType::Library => self.build_libraries,
},
Err(_) => true,
}
})
.collect()
}
}
impl BuildOpts {
/// Return a `BuildOpts` with modified `tests` field.
pub fn include_tests(self, include_tests: bool) -> Self {
Self {
tests: include_tests,
..self
}
}
}
impl Edge {
pub fn new(name: String, kind: DepKind) -> Edge {
Edge { name, kind }
}
}
impl BuiltPackage {
/// Writes bytecode of the BuiltPackage to the given `path`.
pub fn write_bytecode(&self, path: &Path) -> Result<()> {
fs::write(path, &self.bytecode.bytes)?;
Ok(())
}
pub fn write_hexcode(&self, path: &Path) -> Result<()> {
let hex_file = serde_json::json!({
"hex": format!("0x{}", hex::encode(&self.bytecode.bytes)),
});
fs::write(path, hex_file.to_string())?;
Ok(())
}
/// Writes debug_info (source_map) of the BuiltPackage to the given `out_file`.
pub fn write_debug_info(&self, out_file: &Path) -> Result<()> {
if matches!(out_file.extension(), Some(ext) if ext == "json") {
let source_map_json =
serde_json::to_vec(&self.source_map).expect("JSON serialization failed");
fs::write(out_file, source_map_json)?;
} else {
let primary_dir = self.descriptor.manifest_file.dir();
let primary_src = self.descriptor.manifest_file.entry_path();
write_dwarf(&self.source_map, primary_dir, &primary_src, out_file)?;
}
Ok(())
}
pub fn json_abi_string(&self, minify_json_abi: bool) -> Result<Option<String>> {
match &self.program_abi {
ProgramABI::Fuel(program_abi) => {
if !program_abi.functions.is_empty() {
let json_string = if minify_json_abi {
serde_json::to_string(&program_abi)
} else {
serde_json::to_string_pretty(&program_abi)
}?;
Ok(Some(json_string))
} else {
Ok(None)
}
}
ProgramABI::Evm(program_abi) => {
if !program_abi.is_empty() {
let json_string = if minify_json_abi {
serde_json::to_string(&program_abi)
} else {
serde_json::to_string_pretty(&program_abi)
}?;
Ok(Some(json_string))
} else {
Ok(None)
}
}
// TODO?
ProgramABI::MidenVM(()) => Ok(None),
}
}
/// Writes the ABI in JSON format to the given `path`.
pub fn write_json_abi(&self, path: &Path, minify: &MinifyOpts) -> Result<()> {
if let Some(json_abi_string) = self.json_abi_string(minify.json_abi)? {
let mut file = File::create(path)?;
file.write_all(json_abi_string.as_bytes())?;
}
Ok(())
}
/// Writes BuiltPackage to `output_dir`.
pub fn write_output(
&self,
minify: &MinifyOpts,
pkg_name: &str,
output_dir: &Path,
) -> Result<()> {
if !output_dir.exists() {
fs::create_dir_all(output_dir)?;
}
// Place build artifacts into the output directory.
let bin_path = output_dir.join(pkg_name).with_extension("bin");
self.write_bytecode(&bin_path)?;
let program_abi_stem = format!("{pkg_name}-abi");
let json_abi_path = output_dir.join(program_abi_stem).with_extension("json");
self.write_json_abi(&json_abi_path, minify)?;
debug!(
" Bytecode size: {} bytes ({})",
self.bytecode.bytes.len(),
format_bytecode_size(self.bytecode.bytes.len())
);
// Additional ops required depending on the program type
match self.tree_type {
TreeType::Contract => {
// For contracts, emit a JSON file with all the initialized storage slots.
let storage_slots_stem = format!("{pkg_name}-storage_slots");
let storage_slots_path = output_dir.join(storage_slots_stem).with_extension("json");
let storage_slots_file = File::create(storage_slots_path)?;
let res = if minify.json_storage_slots {
serde_json::to_writer(&storage_slots_file, &self.storage_slots)
} else {
serde_json::to_writer_pretty(&storage_slots_file, &self.storage_slots)
};
res?;
}
TreeType::Predicate => {
// Get the root hash of the bytecode for predicates and store the result in a file in the output directory
let root = format!(
"0x{}",
fuel_tx::Input::predicate_owner(&self.bytecode.bytes)
);
let root_file_name = format!("{}{}", &pkg_name, SWAY_BIN_ROOT_SUFFIX);
let root_path = output_dir.join(root_file_name);
fs::write(root_path, &root)?;
info!(" Predicate root: {}", root);
}
TreeType::Script => {
// hash the bytecode for scripts and store the result in a file in the output directory
let bytecode_hash =
format!("0x{}", fuel_crypto::Hasher::hash(&self.bytecode.bytes));
let hash_file_name = format!("{}{}", &pkg_name, SWAY_BIN_HASH_SUFFIX);
let hash_path = output_dir.join(hash_file_name);
fs::write(hash_path, &bytecode_hash)?;
debug!(" Bytecode hash: {}", bytecode_hash);
}
_ => (),
}
Ok(())
}
}
impl Built {
/// Returns an iterator yielding all member built packages.
pub fn into_members<'a>(
&'a self,
) -> Box<dyn Iterator<Item = (&'a Pinned, Arc<BuiltPackage>)> + 'a> {
// NOTE: Since pkg is a `Arc<_>`, pkg clones in this function are only reference
// increments. `BuiltPackage` struct does not get copied.`
match self {
Built::Package(pkg) => {
let pinned = &pkg.as_ref().descriptor.pinned;
let pkg = pkg.clone();
Box::new(std::iter::once((pinned, pkg)))
}
Built::Workspace(workspace) => Box::new(
workspace
.iter()
.map(|pkg| (&pkg.descriptor.pinned, pkg.clone())),
),
}
}
/// Tries to retrieve the `Built` as a `BuiltPackage`.
pub fn expect_pkg(self) -> Result<Arc<BuiltPackage>> {
match self {
Built::Package(built_pkg) => Ok(built_pkg),
Built::Workspace(_) => bail!("expected `Built` to be `Built::Package`"),
}
}
}
impl BuildPlan {
/// Create a new build plan for the project from the build options provided.
///
/// To do so, it tries to read the manifet file at the target path and creates the plan with
/// `BuildPlan::from_lock_and_manifest`.
pub fn from_pkg_opts(pkg_options: &PkgOpts) -> Result<Self> {
let path = &pkg_options.path;
let manifest_dir = if let Some(ref path) = path {
PathBuf::from(path)
} else {
std::env::current_dir()?
};
let manifest_file = ManifestFile::from_dir(manifest_dir)?;
let member_manifests = manifest_file.member_manifests()?;
// Check if we have members to build so that we are not trying to build an empty workspace.
if member_manifests.is_empty() {
bail!("No member found to build")
}
let lock_path = manifest_file.lock_path()?;
Self::from_lock_and_manifests(
&lock_path,
&member_manifests,
pkg_options.locked,
pkg_options.offline,
&pkg_options.ipfs_node,
)
}
/// Create a new build plan for the project by fetching and pinning all dependencies.
///
/// To account for an existing lock file, use `from_lock_and_manifest` instead.
pub fn from_manifests(
manifests: &MemberManifestFiles,
offline: bool,
ipfs_node: &IPFSNode,
) -> Result<Self> {
// Check toolchain version
validate_version(manifests)?;
let mut graph = Graph::default();
let mut manifest_map = ManifestMap::default();
fetch_graph(manifests, offline, ipfs_node, &mut graph, &mut manifest_map)?;
// Validate the graph, since we constructed the graph from scratch the paths will not be a
// problem but the version check is still needed
validate_graph(&graph, manifests)?;
let compilation_order = compilation_order(&graph)?;
Ok(Self {
graph,
manifest_map,
compilation_order,
})
}
/// Create a new build plan taking into account the state of both the PackageManifest and the existing
/// lock file if there is one.
///
/// This will first attempt to load a build plan from the lock file and validate the resulting
/// graph using the current state of the PackageManifest.
///
/// This includes checking if the [dependencies] or [patch] tables have changed and checking
/// the validity of the local path dependencies. If any changes are detected, the graph is
/// updated and any new packages that require fetching are fetched.
///
/// The resulting build plan should always be in a valid state that is ready for building or
/// checking.
// TODO: Currently (if `--locked` isn't specified) this writes the updated lock directly. This
// probably should not be the role of the `BuildPlan` constructor - instead, we should return
// the manifest alongside some lock diff type that can be used to optionally write the updated
// lock file and print the diff.
pub fn from_lock_and_manifests(
lock_path: &Path,
manifests: &MemberManifestFiles,
locked: bool,
offline: bool,
ipfs_node: &IPFSNode,
) -> Result<Self> {
// Check toolchain version
validate_version(manifests)?;
// Keep track of the cause for the new lock file if it turns out we need one.
let mut new_lock_cause = None;
// First, attempt to load the lock.
let lock = Lock::from_path(lock_path).unwrap_or_else(|e| {
new_lock_cause = if e.to_string().contains("No such file or directory") {
Some(anyhow!("lock file did not exist"))
} else {
Some(e)
};
Lock::default()
});
// Next, construct the package graph from the lock.
let mut graph = lock.to_graph().unwrap_or_else(|e| {
new_lock_cause = Some(anyhow!("Invalid lock: {}", e));
Graph::default()
});
// Since the lock file was last created there are many ways in which it might have been
// invalidated. E.g. a package's manifest `[dependencies]` table might have changed, a user
// might have edited the `Forc.lock` file when they shouldn't have, a path dependency no
// longer exists at its specified location, etc. We must first remove all invalid nodes
// before we can determine what we need to fetch.
let invalid_deps = validate_graph(&graph, manifests)?;
let members: HashSet<String> = manifests.keys().cloned().collect();
remove_deps(&mut graph, &members, &invalid_deps);
// We know that the remaining nodes have valid paths, otherwise they would have been
// removed. We can safely produce an initial `manifest_map`.
let mut manifest_map = graph_to_manifest_map(manifests, &graph)?;
// Attempt to fetch the remainder of the graph.
let _added = fetch_graph(manifests, offline, ipfs_node, &mut graph, &mut manifest_map)?;
// Determine the compilation order.
let compilation_order = compilation_order(&graph)?;
let plan = Self {
graph,
manifest_map,
compilation_order,
};
// Construct the new lock and check the diff.
let new_lock = Lock::from_graph(plan.graph());
let lock_diff = new_lock.diff(&lock);
if !lock_diff.removed.is_empty() || !lock_diff.added.is_empty() {
new_lock_cause.get_or_insert(anyhow!("lock file did not match manifest"));
}
// If there was some change in the lock file, write the new one and print the cause.
if let Some(cause) = new_lock_cause {
if locked {
bail!(
"The lock file {} needs to be updated (Cause: {}) \
but --locked was passed to prevent this.",
lock_path.to_string_lossy(),
cause,
);
}
println_action_green(
"Creating",
&format!("a new `Forc.lock` file. (Cause: {cause})"),
);
let member_names = manifests
.values()
.map(|manifest| manifest.project.name.to_string())
.collect();
crate::lock::print_diff(&member_names, &lock_diff);
let string = toml::ser::to_string_pretty(&new_lock)
.map_err(|e| anyhow!("failed to serialize lock file: {}", e))?;
fs::write(lock_path, string)
.map_err(|e| anyhow!("failed to write lock file: {}", e))?;
debug!(" Created new lock file at {}", lock_path.display());
}
Ok(plan)
}
/// Produce an iterator yielding all contract dependencies of given node in the order of
/// compilation.
pub fn contract_dependencies(&self, node: NodeIx) -> impl Iterator<Item = NodeIx> + '_ {
let graph = self.graph();
let connected: HashSet<_> = Dfs::new(graph, node).iter(graph).collect();
self.compilation_order()
.iter()
.cloned()
.filter(move |&n| n != node)
.filter(|&n| {
graph
.edges_directed(n, Direction::Incoming)
.any(|edge| matches!(edge.weight().kind, DepKind::Contract { .. }))
})
.filter(move |&n| connected.contains(&n))
}
/// Produce an iterator yielding all workspace member nodes in order of compilation.
///
/// In the case that this [BuildPlan] was constructed for a single package,
/// only that package's node will be yielded.
pub fn member_nodes(&self) -> impl Iterator<Item = NodeIx> + '_ {
self.compilation_order()
.iter()
.copied()
.filter(|&n| self.graph[n].source == source::Pinned::MEMBER)
}
/// Produce an iterator yielding all workspace member pinned pkgs in order of compilation.
///
/// In the case that this `BuildPlan` was constructed for a single package,
/// only that package's pinned pkg will be yielded.
pub fn member_pinned_pkgs(&self) -> impl Iterator<Item = Pinned> + '_ {
let graph = self.graph();
self.member_nodes().map(|node| &graph[node]).cloned()
}
/// View the build plan's compilation graph.
pub fn graph(&self) -> &Graph {
&self.graph
}
/// View the build plan's map of pinned package IDs to their associated manifest.
pub fn manifest_map(&self) -> &ManifestMap {
&self.manifest_map
}
/// The order in which nodes are compiled, determined via a toposort of the package graph.
pub fn compilation_order(&self) -> &[NodeIx] {
&self.compilation_order
}
/// Produce the node index of the member with the given name.
pub fn find_member_index(&self, member_name: &str) -> Option<NodeIx> {
self.member_nodes()
.find(|node_ix| self.graph[*node_ix].name == member_name)
}
/// Produce an iterator yielding indices for the given node and its dependencies in BFS order.
pub fn node_deps(&self, n: NodeIx) -> impl '_ + Iterator<Item = NodeIx> {
let bfs = Bfs::new(&self.graph, n);
// Return an iterator yielding visitable nodes from the given node.
bfs.iter(&self.graph)
}
/// Produce an iterator yielding build profiles from the member nodes of this BuildPlan.
pub fn build_profiles(&self) -> impl '_ + Iterator<Item = (String, BuildProfile)> {
let manifest_map = &self.manifest_map;
let graph = &self.graph;
self.member_nodes().flat_map(|member_node| {
manifest_map[&graph[member_node].id()]
.build_profiles()
.map(|(n, p)| (n.clone(), p.clone()))
})
}
/// Returns a salt for the given pinned package if it is a contract and `None` for libraries.
pub fn salt(&self, pinned: &Pinned) -> Option<fuel_tx::Salt> {
let graph = self.graph();
let node_ix = graph
.node_indices()
.find(|node_ix| graph[*node_ix] == *pinned);
node_ix.and_then(|node| {
graph
.edges_directed(node, Direction::Incoming)
.map(|e| match e.weight().kind {
DepKind::Library => None,
DepKind::Contract { salt } => Some(salt),
})
.next()
.flatten()
})
}
/// Returns a [String] representing the build dependency graph in GraphViz DOT format.
pub fn visualize(&self, url_file_prefix: Option<String>) -> String {
format!(
"{:?}",
dot::Dot::with_attr_getters(
&self.graph,
&[dot::Config::NodeNoLabel, dot::Config::EdgeNoLabel],
&|_, _| String::new(),
&|_, nr| {
let url = url_file_prefix.clone().map_or(String::new(), |prefix| {
self.manifest_map
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | true |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-pkg/src/source/path.rs | forc-pkg/src/source/path.rs | use crate::manifest::GenericManifestFile;
use crate::{manifest::PackageManifestFile, pkg::PinnedId, source};
use serde::{Deserialize, Serialize};
use std::{
fmt,
path::{Path, PathBuf},
str::FromStr,
};
/// A path to a directory with a `Forc.toml` manifest at its root.
pub type Source = PathBuf;
/// A pinned instance of a path source.
#[derive(Clone, Debug, Eq, Hash, PartialEq, Deserialize, Serialize)]
pub struct Pinned {
/// The ID of the package that is the root of the subgraph of path dependencies that this
/// package is a part of.
///
/// In other words, when traversing the parents of this package, this is the ID of the first
/// non-path ancestor package.
///
/// As a result, this will always be either a git package or the root package.
///
/// This allows for disambiguating path dependencies of the same name that have different path
/// roots.
pub path_root: PinnedId,
}
/// Error returned upon failed parsing of `SourcePathPinned::from_str`.
#[derive(Clone, Debug)]
pub struct SourcePathPinnedParseError;
impl Pinned {
pub const PREFIX: &'static str = "path";
}
impl fmt::Display for Pinned {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
// path+from-root-<id>
write!(f, "{}+from-root-{}", Self::PREFIX, self.path_root)
}
}
impl FromStr for Pinned {
type Err = SourcePathPinnedParseError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
// path+from-root-<id>
let s = s.trim();
// Check for prefix at the start.
let prefix_plus = format!("{}+", Self::PREFIX);
if s.find(&prefix_plus) != Some(0) {
return Err(SourcePathPinnedParseError);
}
let s = &s[prefix_plus.len()..];
// Parse the `from-root-*` section.
let path_root = s
.split("from-root-")
.nth(1)
.ok_or(SourcePathPinnedParseError)?
.parse()
.map_err(|_| SourcePathPinnedParseError)?;
Ok(Self { path_root })
}
}
impl source::Pin for Source {
type Pinned = Pinned;
fn pin(&self, ctx: source::PinCtx) -> anyhow::Result<(Self::Pinned, PathBuf)> {
let path_root = ctx.path_root();
let pinned = Pinned { path_root };
Ok((pinned, self.clone()))
}
}
impl source::Fetch for Pinned {
fn fetch(&self, _ctx: source::PinCtx, local: &Path) -> anyhow::Result<PackageManifestFile> {
let manifest = PackageManifestFile::from_dir(local)?;
Ok(manifest)
}
}
impl source::DepPath for Pinned {
fn dep_path(&self, _name: &str) -> anyhow::Result<source::DependencyPath> {
Ok(source::DependencyPath::Root(self.path_root))
}
}
impl From<Pinned> for source::Pinned {
fn from(p: Pinned) -> Self {
Self::Path(p)
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-pkg/src/source/mod.rs | forc-pkg/src/source/mod.rs | //! Related to pinning, fetching, validating and caching the source for packages.
//!
//! To add a new source kind:
//!
//! 1. Add a new module.
//! 2. Create types providing implementations for each of the traits in this module.
//! 3. Add a variant to the `Source` and `Pinned` types in this module.
//! 4. Add variant support to the `from_manifest_dep` and `FromStr` implementations.
pub mod git;
pub(crate) mod ipfs;
mod member;
pub mod path;
pub mod reg;
use self::git::Url;
use crate::manifest::GenericManifestFile;
use crate::{
manifest::{self, MemberManifestFiles, PackageManifestFile},
pkg::{ManifestMap, PinnedId},
};
use anyhow::{anyhow, bail, Result};
use serde::{Deserialize, Serialize};
use std::{
collections::hash_map,
fmt,
hash::{Hash, Hasher},
path::{Path, PathBuf},
str::FromStr,
};
use sway_utils::{DEFAULT_IPFS_GATEWAY_URL, DEFAULT_REGISTRY_IPFS_GATEWAY_URL};
/// Pin this source at a specific "version", return the local directory to fetch into.
trait Pin {
type Pinned: Fetch + Hash;
fn pin(&self, ctx: PinCtx) -> Result<(Self::Pinned, PathBuf)>;
}
/// Fetch (and optionally cache) a pinned instance of this source to the given path.
trait Fetch {
fn fetch(&self, ctx: PinCtx, local: &Path) -> Result<PackageManifestFile>;
}
/// Given a parent manifest, return the canonical, local path for this source as a dependency.
trait DepPath {
fn dep_path(&self, name: &str) -> Result<DependencyPath>;
}
type FetchId = u64;
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord)]
pub enum IPFSNode {
Local,
WithUrl(String),
}
impl Default for IPFSNode {
fn default() -> Self {
Self::WithUrl(DEFAULT_IPFS_GATEWAY_URL.to_string())
}
}
impl IPFSNode {
/// Returns an IPFSNode configured to use the Fuel-operated IPFS gateway.
pub fn fuel() -> Self {
Self::WithUrl(DEFAULT_REGISTRY_IPFS_GATEWAY_URL.to_string())
}
/// Returns an IPFSNode configured to use the public IPFS gateway.
pub fn public() -> Self {
Self::WithUrl(DEFAULT_IPFS_GATEWAY_URL.to_string())
}
}
impl FromStr for IPFSNode {
type Err = anyhow::Error;
fn from_str(value: &str) -> Result<Self, Self::Err> {
match value {
"PUBLIC" => {
let url = sway_utils::constants::DEFAULT_IPFS_GATEWAY_URL;
Ok(IPFSNode::WithUrl(url.to_string()))
}
"FUEL" => {
let url = sway_utils::constants::DEFAULT_REGISTRY_IPFS_GATEWAY_URL;
Ok(IPFSNode::WithUrl(url.to_string()))
}
"LOCAL" => Ok(IPFSNode::Local),
url => Ok(IPFSNode::WithUrl(url.to_string())),
}
}
}
/// Specifies a base source for a package.
///
/// - For registry packages, this includes a base version.
/// - For git packages, this includes a base git reference like a branch or tag.
///
/// Note that a `Source` does not specify a specific, pinned version. Rather, it specifies a source
/// at which the current latest version may be located.
#[derive(Clone, Debug, Eq, Hash, Ord, PartialEq, PartialOrd, Deserialize, Serialize)]
pub enum Source {
/// Used to refer to a workspace member project.
Member(member::Source),
/// A git repo with a `Forc.toml` manifest at its root.
Git(git::Source),
/// A path to a directory with a `Forc.toml` manifest at its root.
Path(path::Source),
/// A package described by its IPFS CID.
Ipfs(ipfs::Source),
/// A forc project hosted on the official registry.
Registry(reg::Source),
}
/// A pinned instance of the package source.
///
/// Specifies an exact version to use, or an exact commit in the case of git dependencies. The
/// pinned version or commit is updated upon creation of the lock file and on `forc update`.
#[derive(Clone, Debug, Eq, Hash, PartialEq, Deserialize, Serialize)]
pub enum Pinned {
Member(member::Pinned),
Git(git::Pinned),
Path(path::Pinned),
Ipfs(ipfs::Pinned),
Registry(reg::Pinned),
}
#[derive(Clone)]
pub(crate) struct PinCtx<'a> {
/// A unique hash associated with the process' current fetch pass.
/// NOTE: Only to be used for creating temporary directories. Should not
/// interact with anything that appears in the pinned output.
pub(crate) fetch_id: FetchId,
/// Within the context of a package graph fetch traversal, represents the current path root.
pub(crate) path_root: PinnedId,
/// Whether or not the fetch is occurring offline.
pub(crate) offline: bool,
/// The name of the package associated with this source.
pub(crate) name: &'a str,
/// The IPFS node to use for fetching IPFS sources.
pub(crate) ipfs_node: &'a IPFSNode,
}
pub(crate) enum DependencyPath {
/// The dependency is another member of the workspace.
Member,
/// The dependency is located at this specific path.
ManifestPath(PathBuf),
/// Path is pinned via manifest, relative to the given root node.
Root(PinnedId),
}
/// A wrapper type for providing `Display` implementations for compiling msgs.
pub struct DisplayCompiling<'a, T> {
source: &'a T,
manifest_dir: &'a Path,
}
/// Error returned upon failed parsing of `SourcePinned::from_str`.
#[derive(Clone, Debug)]
pub struct PinnedParseError;
impl Source {
/// Construct a source from path information collected from manifest file.
fn with_path_dependency(
relative_path: &Path,
manifest_dir: &Path,
member_manifests: &MemberManifestFiles,
) -> Result<Self> {
let path = manifest_dir.join(relative_path);
let canonical_path = path
.canonicalize()
.map_err(|e| anyhow!("Failed to canonicalize dependency path {:?}: {}", path, e))?;
// Check if path is a member of a workspace.
if member_manifests
.values()
.any(|pkg_manifest| pkg_manifest.dir() == canonical_path)
{
Ok(Source::Member(member::Source(canonical_path)))
} else {
Ok(Source::Path(canonical_path))
}
}
/// Construct a source from version information collected from manifest file.
fn with_version_dependency(
pkg_name: &str,
version: &str,
namespace: ®::file_location::Namespace,
) -> Result<Self> {
// TODO: update here once we are supporting non-exact versions (non `x.y.z` versions)
// see: https://github.com/FuelLabs/sway/issues/7060
let semver = semver::Version::parse(version)?;
let source = reg::Source {
version: semver,
namespace: namespace.clone(),
name: pkg_name.to_string(),
};
Ok(Source::Registry(source))
}
/// Convert the given manifest `Dependency` declaration to a `Source`.
pub fn from_manifest_dep(
manifest_dir: &Path,
dep_name: &str,
dep: &manifest::Dependency,
member_manifests: &MemberManifestFiles,
) -> Result<Self> {
let source = match dep {
manifest::Dependency::Simple(ref ver_str) => Source::with_version_dependency(
dep_name,
ver_str,
®::file_location::Namespace::Flat,
)?,
manifest::Dependency::Detailed(ref det) => {
match (&det.path, &det.version, &det.git, &det.ipfs) {
(Some(relative_path), _, _, _) => {
let relative_path = PathBuf::from_str(relative_path)?;
Source::with_path_dependency(
&relative_path,
manifest_dir,
member_manifests,
)?
}
(_, _, Some(repo), _) => {
let reference = match (&det.branch, &det.tag, &det.rev) {
(Some(branch), None, None) => git::Reference::Branch(branch.clone()),
(None, Some(tag), None) => git::Reference::Tag(tag.clone()),
(None, None, Some(rev)) => git::Reference::Rev(rev.clone()),
(None, None, None) => git::Reference::DefaultBranch,
_ => bail!(
"git dependencies support at most one reference: \
either `branch`, `tag` or `rev`"
),
};
let repo = Url::from_str(repo)?;
let source = git::Source { repo, reference };
Source::Git(source)
}
(_, _, _, Some(ipfs)) => {
let cid = ipfs.parse()?;
let source = ipfs::Source(cid);
Source::Ipfs(source)
}
(None, Some(version), _, _) => {
let namespace = det.namespace.as_ref().map_or_else(
|| reg::file_location::Namespace::Flat,
|ns| reg::file_location::Namespace::Domain(ns.to_string()),
);
Source::with_version_dependency(dep_name, version, &namespace)?
}
_ => {
bail!("unsupported set of fields for dependency: {:?}", dep);
}
}
}
};
Ok(source)
}
/// Convert the given manifest `Dependency` declaration to a source,
/// applying any relevant patches from within the given `manifest` as
/// necessary.
pub fn from_manifest_dep_patched(
manifest: &PackageManifestFile,
dep_name: &str,
dep: &manifest::Dependency,
members: &MemberManifestFiles,
) -> Result<Self> {
let unpatched = Self::from_manifest_dep(manifest.dir(), dep_name, dep, members)?;
unpatched.apply_patch(dep_name, manifest, members)
}
/// If a patch exists for this dependency source within the given project
/// manifest, this returns the patch.
///
/// Supports patching both Git and Registry dependencies:
/// - Git: [patch.'https://github.com/org/repo']
/// - Registry: [patch.'forc.pub']
///
/// Note: Quotes are required around patch keys containing dots to follow TOML spec.
/// Without quotes, `[patch.forc.pub]` creates nested tables instead of a single key.
fn dep_patch(
&self,
dep_name: &str,
manifest: &PackageManifestFile,
) -> Result<Option<manifest::Dependency>> {
// Helper to check if a patch exists for the given key
let check_patches = |patch_key: &str| -> Result<Option<manifest::Dependency>> {
let patches = manifest.resolve_patch(patch_key)?;
Ok(patches.and_then(|p| p.get(dep_name).cloned()))
};
match self {
Source::Git(git) => {
let git_url = git.repo.to_string();
check_patches(&git_url)
}
Source::Registry(reg_source) => {
// Try namespace-specific patch first (more specific takes priority)
if let reg::file_location::Namespace::Domain(ns) = ®_source.namespace {
let namespaced_key = format!("{}/{}", reg::REGISTRY_PATCH_KEY, ns);
if let Some(patch) = check_patches(&namespaced_key)? {
return Ok(Some(patch));
}
}
// Fall back to generic registry patch
check_patches(reg::REGISTRY_PATCH_KEY)
}
_ => Ok(None),
}
}
/// If a patch exists for the dependency associated with this source within
/// the given manifest, this returns a new `Source` with the patch applied.
///
/// If no patch exists, this returns the original `Source`.
pub fn apply_patch(
&self,
dep_name: &str,
manifest: &PackageManifestFile,
members: &MemberManifestFiles,
) -> Result<Self> {
match self.dep_patch(dep_name, manifest)? {
Some(patch) => Self::from_manifest_dep(manifest.dir(), dep_name, &patch, members),
None => Ok(self.clone()),
}
}
/// Attempt to determine the pinned version or commit for the source.
///
/// Also updates the manifest map with a path to the local copy of the pkg.
///
/// The `path_root` is required for `Path` dependencies and must specify the package that is the
/// root of the current subgraph of path dependencies.
pub(crate) fn pin(&self, ctx: PinCtx, manifests: &mut ManifestMap) -> Result<Pinned> {
fn f<T>(source: &T, ctx: PinCtx, manifests: &mut ManifestMap) -> Result<T::Pinned>
where
T: Pin,
T::Pinned: Clone,
Pinned: From<T::Pinned>,
{
let (pinned, fetch_path) = source.pin(ctx.clone())?;
let id = PinnedId::new(ctx.name(), &Pinned::from(pinned.clone()));
if let hash_map::Entry::Vacant(entry) = manifests.entry(id) {
entry.insert(pinned.fetch(ctx, &fetch_path)?);
}
Ok(pinned)
}
match self {
Source::Member(source) => Ok(Pinned::Member(f(source, ctx, manifests)?)),
Source::Path(source) => Ok(Pinned::Path(f(source, ctx, manifests)?)),
Source::Git(source) => Ok(Pinned::Git(f(source, ctx, manifests)?)),
Source::Ipfs(source) => Ok(Pinned::Ipfs(f(source, ctx, manifests)?)),
Source::Registry(source) => Ok(Pinned::Registry(f(source, ctx, manifests)?)),
}
}
}
impl Pinned {
pub(crate) const MEMBER: Self = Self::Member(member::Pinned);
/// Return how the pinned source for a dependency can be found on the local file system.
pub(crate) fn dep_path(&self, name: &str) -> Result<DependencyPath> {
match self {
Self::Member(pinned) => pinned.dep_path(name),
Self::Path(pinned) => pinned.dep_path(name),
Self::Git(pinned) => pinned.dep_path(name),
Self::Ipfs(pinned) => pinned.dep_path(name),
Self::Registry(pinned) => pinned.dep_path(name),
}
}
/// If the source is associated with a specific semver version, emit it.
///
/// Used solely for the package lock file.
pub fn semver(&self) -> Option<semver::Version> {
match self {
Self::Registry(reg) => Some(reg.source.version.clone()),
_ => None,
}
}
/// Wrap `self` in some type able to be formatted for the compiling output.
///
/// This refers to `<source>` in the following:
/// ```ignore
/// Compiling <kind> <name> (<source>)
/// ```
pub fn display_compiling<'a>(&'a self, manifest_dir: &'a Path) -> DisplayCompiling<'a, Self> {
DisplayCompiling {
source: self,
manifest_dir,
}
}
/// Retrieve the unpinned instance of this source.
pub fn unpinned(&self, path: &Path) -> Source {
match self {
Self::Member(_) => Source::Member(member::Source(path.to_owned())),
Self::Git(git) => Source::Git(git.source.clone()),
Self::Path(_) => Source::Path(path.to_owned()),
Self::Ipfs(ipfs) => Source::Ipfs(ipfs::Source(ipfs.0.clone())),
Self::Registry(reg) => Source::Registry(reg.source.clone()),
}
}
}
impl<'a> PinCtx<'a> {
fn fetch_id(&self) -> FetchId {
self.fetch_id
}
fn path_root(&self) -> PinnedId {
self.path_root
}
fn offline(&self) -> bool {
self.offline
}
fn name(&self) -> &str {
self.name
}
fn ipfs_node(&self) -> &'a IPFSNode {
self.ipfs_node
}
}
impl fmt::Display for Pinned {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
Self::Member(src) => src.fmt(f),
Self::Path(src) => src.fmt(f),
Self::Git(src) => src.fmt(f),
Self::Ipfs(src) => src.fmt(f),
Self::Registry(src) => src.fmt(f),
}
}
}
impl fmt::Display for DisplayCompiling<'_, Pinned> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self.source {
Pinned::Member(_) => self.manifest_dir.display().fmt(f),
Pinned::Path(_src) => self.manifest_dir.display().fmt(f),
Pinned::Git(src) => src.fmt(f),
Pinned::Ipfs(src) => src.fmt(f),
Pinned::Registry(src) => src.fmt(f),
}
}
}
impl FromStr for Pinned {
type Err = PinnedParseError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
// Also check `"root"` to support reading the legacy `Forc.lock` format and to
// avoid breaking old projects.
let source = if s == "root" || s == "member" {
Self::Member(member::Pinned)
} else if let Ok(src) = path::Pinned::from_str(s) {
Self::Path(src)
} else if let Ok(src) = git::Pinned::from_str(s) {
Self::Git(src)
} else if let Ok(src) = ipfs::Pinned::from_str(s) {
Self::Ipfs(src)
} else if let Ok(src) = reg::Pinned::from_str(s) {
Self::Registry(src)
} else {
return Err(PinnedParseError);
};
Ok(source)
}
}
/// Produce a unique ID for a particular fetch pass.
///
/// This is used in the temporary git directory and allows for avoiding contention over the git
/// repo directory.
pub fn fetch_id(path: &Path, timestamp: std::time::Instant) -> u64 {
let mut hasher = hash_map::DefaultHasher::new();
path.hash(&mut hasher);
timestamp.hash(&mut hasher);
hasher.finish()
}
#[cfg(test)]
mod tests {
use super::*;
use crate::manifest::{Dependency, DependencyDetails};
use std::collections::BTreeMap;
/// Helper to create a minimal test manifest file with patch table
fn create_test_manifest_file_with_patches(
patches: BTreeMap<String, BTreeMap<String, Dependency>>,
) -> (tempfile::TempDir, PackageManifestFile) {
// Create a minimal TOML string
let mut toml_str = r#"[project]
name = "test_pkg"
license = "Apache-2.0"
entry = "main.sw"
implicit-std = false
"#
.to_string();
// Add patches if any
if !patches.is_empty() {
toml_str.push('\n');
for (patch_key, patch_deps) in patches {
toml_str.push_str(&format!("[patch.'{}']\n", patch_key));
for (dep_name, dep) in patch_deps {
// Manually construct the dependency string
let dep_toml = match dep {
Dependency::Simple(ver) => format!(r#""{ver}""#),
Dependency::Detailed(det) => {
let mut parts = Vec::new();
if let Some(path) = &det.path {
parts.push(format!(r#"path = "{path}""#));
}
if let Some(git) = &det.git {
parts.push(format!(r#"git = "{git}""#));
}
if let Some(branch) = &det.branch {
parts.push(format!(r#"branch = "{branch}""#));
}
if let Some(tag) = &det.tag {
parts.push(format!(r#"tag = "{tag}""#));
}
if let Some(version) = &det.version {
parts.push(format!(r#"version = "{version}""#));
}
format!("{{ {} }}", parts.join(", "))
}
};
toml_str.push_str(&format!("{} = {}\n", dep_name, dep_toml));
}
}
}
// Create necessary directory structure
let temp_dir = tempfile::tempdir().unwrap();
let src_dir = temp_dir.path().join("src");
std::fs::create_dir(&src_dir).unwrap();
// Create a minimal main.sw file
let main_sw_path = src_dir.join("main.sw");
std::fs::write(&main_sw_path, "contract;").unwrap();
// Write manifest file
let manifest_path = temp_dir.path().join("Forc.toml");
std::fs::write(&manifest_path, toml_str).unwrap();
// Read back as PackageManifestFile
let manifest_file = PackageManifestFile::from_file(&manifest_path).unwrap();
(temp_dir, manifest_file)
}
/// Helper to create a path dependency
fn path_dep(path: &str) -> Dependency {
Dependency::Detailed(DependencyDetails {
path: Some(path.to_string()),
..Default::default()
})
}
/// Helper to create a git dependency
fn git_dep(repo: &str, branch: &str) -> Dependency {
Dependency::Detailed(DependencyDetails {
git: Some(repo.to_string()),
branch: Some(branch.to_string()),
..Default::default()
})
}
#[test]
fn test_registry_patch_flat_namespace() {
// Create a registry source with flat namespace
let source = Source::Registry(reg::Source {
name: "std".to_string(),
version: semver::Version::new(0, 63, 0),
namespace: reg::file_location::Namespace::Flat,
});
// Create a manifest with a forc.pub patch
let mut patches = BTreeMap::new();
let mut forc_pub_patches = BTreeMap::new();
forc_pub_patches.insert("std".to_string(), path_dep("../local-std"));
patches.insert("forc.pub".to_string(), forc_pub_patches);
let (_temp_dir, manifest_file) = create_test_manifest_file_with_patches(patches);
// Test that the patch is found
let patch = source.dep_patch("std", &manifest_file).unwrap();
assert!(
patch.is_some(),
"Should find patch for flat namespace registry dependency"
);
let patch = patch.unwrap();
match patch {
Dependency::Detailed(det) => {
assert_eq!(det.path, Some("../local-std".to_string()));
}
_ => panic!("Expected detailed dependency"),
}
}
#[test]
fn test_registry_patch_domain_namespace() {
// Create a registry source with domain namespace
let source = Source::Registry(reg::Source {
name: "fuel-core".to_string(),
version: semver::Version::new(1, 0, 0),
namespace: reg::file_location::Namespace::Domain("com/fuel".to_string()),
});
// Create a manifest with a namespaced patch
let mut patches = BTreeMap::new();
let mut namespaced_patches = BTreeMap::new();
namespaced_patches.insert("fuel-core".to_string(), path_dep("../local-fuel-core"));
patches.insert("forc.pub/com/fuel".to_string(), namespaced_patches);
let (_temp_dir, manifest_file) = create_test_manifest_file_with_patches(patches);
// Test that the patch is found
let patch = source.dep_patch("fuel-core", &manifest_file).unwrap();
assert!(
patch.is_some(),
"Should find patch for domain namespace registry dependency"
);
let patch = patch.unwrap();
match patch {
Dependency::Detailed(det) => {
assert_eq!(det.path, Some("../local-fuel-core".to_string()));
}
_ => panic!("Expected detailed dependency"),
}
}
#[test]
fn test_registry_patch_namespace_priority() {
// Create a registry source with domain namespace
let source = Source::Registry(reg::Source {
name: "my-lib".to_string(),
version: semver::Version::new(2, 0, 0),
namespace: reg::file_location::Namespace::Domain("com/myorg".to_string()),
});
// Create a manifest with BOTH namespaced and generic patches
let mut patches = BTreeMap::new();
// Namespace-specific patch
let mut namespaced_patches = BTreeMap::new();
namespaced_patches.insert("my-lib".to_string(), path_dep("../namespaced-lib"));
patches.insert("forc.pub/com/myorg".to_string(), namespaced_patches);
// Generic patch
let mut generic_patches = BTreeMap::new();
generic_patches.insert("my-lib".to_string(), path_dep("../generic-lib"));
patches.insert("forc.pub".to_string(), generic_patches);
let (_temp_dir, manifest_file) = create_test_manifest_file_with_patches(patches);
// Test that namespace-specific patch takes priority
let patch = source.dep_patch("my-lib", &manifest_file).unwrap();
assert!(patch.is_some(), "Should find patch");
let patch = patch.unwrap();
match patch {
Dependency::Detailed(det) => {
assert_eq!(
det.path,
Some("../namespaced-lib".to_string()),
"Should use namespace-specific patch, not generic patch"
);
}
_ => panic!("Expected detailed dependency"),
}
}
#[test]
fn test_registry_patch_fallback_to_generic() {
// Create a registry source with domain namespace
let source = Source::Registry(reg::Source {
name: "common-lib".to_string(),
version: semver::Version::new(1, 0, 0),
namespace: reg::file_location::Namespace::Domain("com/myorg".to_string()),
});
// Create a manifest with ONLY generic patch (no namespace-specific)
let mut patches = BTreeMap::new();
let mut generic_patches = BTreeMap::new();
generic_patches.insert("common-lib".to_string(), path_dep("../common-lib"));
patches.insert("forc.pub".to_string(), generic_patches);
let (_temp_dir, manifest_file) = create_test_manifest_file_with_patches(patches);
// Test that it falls back to generic patch
let patch = source.dep_patch("common-lib", &manifest_file).unwrap();
assert!(patch.is_some(), "Should find generic patch as fallback");
let patch = patch.unwrap();
match patch {
Dependency::Detailed(det) => {
assert_eq!(det.path, Some("../common-lib".to_string()));
}
_ => panic!("Expected detailed dependency"),
}
}
#[test]
fn test_git_patch_still_works() {
// Create a git source
let repo_url = "https://github.com/fuellabs/sway";
let source = Source::Git(git::Source {
repo: git::Url::from_str(repo_url).unwrap(),
reference: git::Reference::Tag("v0.63.0".to_string()),
});
// Create a manifest with a git patch
let mut patches = BTreeMap::new();
let mut git_patches = BTreeMap::new();
git_patches.insert(
"std".to_string(),
git_dep("https://github.com/fuellabs/sway", "feature-branch"),
);
patches.insert(repo_url.to_string(), git_patches);
let (_temp_dir, manifest_file) = create_test_manifest_file_with_patches(patches);
// Test that git patch still works
let patch = source.dep_patch("std", &manifest_file).unwrap();
assert!(
patch.is_some(),
"Should find git patch (backward compatibility)"
);
let patch = patch.unwrap();
match patch {
Dependency::Detailed(det) => {
assert_eq!(
det.git,
Some("https://github.com/fuellabs/sway".to_string())
);
assert_eq!(det.branch, Some("feature-branch".to_string()));
}
_ => panic!("Expected detailed dependency"),
}
}
#[test]
fn test_no_patch_found() {
// Create a registry source
let source = Source::Registry(reg::Source {
name: "no-patch-lib".to_string(),
version: semver::Version::new(1, 0, 0),
namespace: reg::file_location::Namespace::Flat,
});
// Create a manifest with patches for different packages
let mut patches = BTreeMap::new();
let mut forc_pub_patches = BTreeMap::new();
forc_pub_patches.insert("other-lib".to_string(), path_dep("../other-lib"));
patches.insert("forc.pub".to_string(), forc_pub_patches);
let (_temp_dir, manifest_file) = create_test_manifest_file_with_patches(patches);
// Test that no patch is found
let patch = source.dep_patch("no-patch-lib", &manifest_file).unwrap();
assert!(
patch.is_none(),
"Should not find patch for different package"
);
}
#[test]
fn test_path_source_no_patch() {
// Path sources should not have patches
let source = Source::Path(PathBuf::from("/some/path"));
let (_temp_dir, manifest_file) = create_test_manifest_file_with_patches(BTreeMap::new());
// Test that no patch is found for path sources
let patch = source.dep_patch("anything", &manifest_file).unwrap();
assert!(patch.is_none(), "Path sources should not support patches");
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-pkg/src/source/ipfs.rs | forc-pkg/src/source/ipfs.rs | use crate::manifest::GenericManifestFile;
use crate::{
manifest::{self, PackageManifestFile},
source,
};
use anyhow::Result;
use flate2::read::GzDecoder;
use forc_tracing::println_action_green;
use futures::TryStreamExt;
use ipfs_api::IpfsApi;
use ipfs_api_backend_hyper as ipfs_api;
use serde::{Deserialize, Deserializer, Serialize, Serializer};
use std::{
fmt,
path::{Path, PathBuf},
str::FromStr,
};
use tar::Archive;
#[derive(Clone, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
pub struct Cid(pub(crate) cid::Cid);
/// A client that can interact with local ipfs daemon.
pub type IpfsClient = ipfs_api::IpfsClient;
/// Package source at a specific content address.
#[derive(Clone, Debug, Eq, Hash, Ord, PartialEq, PartialOrd, Serialize, Deserialize)]
pub struct Source(pub Cid);
/// A pinned instance of an ipfs source
#[derive(Clone, Debug, Eq, Hash, Ord, PartialEq, PartialOrd, Serialize, Deserialize)]
pub struct Pinned(pub Cid);
impl Pinned {
pub const PREFIX: &'static str = "ipfs";
}
const IPFS_DIR_NAME: &str = "ipfs";
const IPFS_CACHE_DIR_NAME: &str = "cache";
impl FromStr for Cid {
type Err = <cid::Cid as FromStr>::Err;
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
let cid = s.parse()?;
Ok(Self(cid))
}
}
impl source::Pin for Source {
type Pinned = Pinned;
fn pin(&self, _ctx: source::PinCtx) -> Result<(Self::Pinned, PathBuf)> {
let cid = &self.0;
let pinned = Pinned(cid.clone());
let path = pkg_cache_dir(cid);
Ok((pinned, path))
}
}
impl source::Fetch for Pinned {
fn fetch(&self, ctx: source::PinCtx, repo_path: &Path) -> Result<PackageManifestFile> {
// TODO: implement local cache search for ipfs sources.
if ctx.offline {
anyhow::bail!("offline fetching for IPFS sources is not supported")
}
let mut lock = forc_util::path_lock(repo_path)?;
// TODO: Here we assume that if the local path already exists, that it contains the
// full and correct source for that registry entry and hasn't been tampered with. This is
// probably fine for most cases as users should never be touching these
// directories, however we should add some code to validate this. E.g. can we
// recreate the ipfs cid by hashing the directory or something along these lines?
// https://github.com/FuelLabs/sway/issues/7075
{
let _guard = lock.write()?;
if !repo_path.exists() {
println_action_green(
"Fetching",
&format!("{} {}", ansiterm::Style::new().bold().paint(ctx.name), self),
);
let cid = self.0.clone();
let ipfs_node = ctx.ipfs_node().clone();
let ipfs_client = ipfs_client();
let dest = cache_dir();
crate::source::reg::block_on_any_runtime(async move {
match ipfs_node {
source::IPFSNode::Local => {
println_action_green("Fetching", "with local IPFS node");
cid.fetch_with_client(&ipfs_client, &dest).await
}
source::IPFSNode::WithUrl(ipfs_node_gateway_url) => {
println_action_green(
"Fetching",
&format!(
"from {ipfs_node_gateway_url}. Note: This can take several minutes."
),
);
cid.fetch_with_gateway_url(&ipfs_node_gateway_url, &dest)
.await
}
}
})?;
}
}
let path = {
let _guard = lock.read()?;
manifest::find_within(repo_path, ctx.name()).ok_or_else(|| {
anyhow::anyhow!("failed to find package `{}` in {}", ctx.name(), self)
})?
};
PackageManifestFile::from_file(path)
}
}
impl source::DepPath for Pinned {
fn dep_path(&self, name: &str) -> anyhow::Result<source::DependencyPath> {
let repo_path = pkg_cache_dir(&self.0);
// Co-ordinate access to the ipfs checkout directory using an advisory file lock.
let lock = forc_util::path_lock(&repo_path)?;
let _guard = lock.read()?;
let path = manifest::find_within(&repo_path, name)
.ok_or_else(|| anyhow::anyhow!("failed to find package `{}` in {}", name, self))?;
Ok(source::DependencyPath::ManifestPath(path))
}
}
impl From<Pinned> for source::Pinned {
fn from(p: Pinned) -> Self {
Self::Ipfs(p)
}
}
impl fmt::Display for Pinned {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}+{}", Self::PREFIX, self.0 .0)
}
}
impl Cid {
fn extract_archive<R: std::io::Read>(&self, reader: R, dst: &Path) -> Result<()> {
let dst_dir = dst.join(self.0.to_string());
std::fs::create_dir_all(&dst_dir)?;
let mut archive = Archive::new(reader);
for entry in archive.entries()? {
let mut entry = entry?;
entry.unpack_in(&dst_dir)?;
}
Ok(())
}
/// Using local node, fetches the content described by this cid.
pub(crate) async fn fetch_with_client(
&self,
ipfs_client: &IpfsClient,
dst: &Path,
) -> Result<()> {
let cid_path = format!("/ipfs/{}", self.0);
// Since we are fetching packages as a folder, they are returned as a tar archive.
let bytes = ipfs_client
.get(&cid_path)
.map_ok(|chunk| chunk.to_vec())
.try_concat()
.await?;
// After collecting bytes of the archive, we unpack it to the dst.
self.extract_archive(bytes.as_slice(), dst)?;
Ok(())
}
/// Using the provided gateway url, fetches the content described by this cid.
pub(crate) async fn fetch_with_gateway_url(&self, gateway_url: &str, dst: &Path) -> Result<()> {
let client = reqwest::Client::new();
// We request the content to be served to us in tar format by the public gateway.
let fetch_url = format!(
"{}/ipfs/{}?download=true&filename={}.tar.gz",
gateway_url, self.0, self.0
);
let req = client.get(&fetch_url);
let res = req.send().await?;
if !res.status().is_success() {
anyhow::bail!("Failed to fetch from {fetch_url:?}");
}
let bytes: Vec<_> = res.bytes().await?.into_iter().collect();
let tar = GzDecoder::new(bytes.as_slice());
// After collecting and decoding bytes of the archive, we unpack it to the dst.
self.extract_archive(tar, dst)?;
Ok(())
}
}
#[derive(Debug)]
pub enum PinnedParseError {
Prefix,
Cid(<cid::Cid as FromStr>::Err),
}
impl FromStr for Pinned {
type Err = PinnedParseError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
// ipfs+<cid>
let s = s.trim();
// Parse the prefix.
let prefix_plus = format!("{}+", Self::PREFIX);
if s.find(&prefix_plus) != Some(0) {
return Err(PinnedParseError::Prefix);
}
let s = &s[prefix_plus.len()..];
// Then the CID.
let cid: cid::Cid = s.parse().map_err(PinnedParseError::Cid)?;
Ok(Self(Cid(cid)))
}
}
impl Serialize for Cid {
fn serialize<S: Serializer>(&self, s: S) -> Result<S::Ok, S::Error> {
let cid_string: String = format!("{}", self.0);
cid_string.serialize(s)
}
}
impl<'de> Deserialize<'de> for Cid {
fn deserialize<D: Deserializer<'de>>(d: D) -> Result<Self, D::Error> {
use serde::de::Error;
let cid_string = String::deserialize(d)?;
let cid: cid::Cid = cid_string.parse().map_err(|e| {
let msg = format!("failed to parse CID from {cid_string:?}: {e}");
D::Error::custom(msg)
})?;
Ok(Self(cid))
}
}
fn ipfs_dir() -> PathBuf {
forc_util::user_forc_directory().join(IPFS_DIR_NAME)
}
fn cache_dir() -> PathBuf {
ipfs_dir().join(IPFS_CACHE_DIR_NAME)
}
fn pkg_cache_dir(cid: &Cid) -> PathBuf {
cache_dir().join(format!("{}", cid.0))
}
/// Returns a `IpfsClient` instance ready to be used to make requests to local ipfs node.
pub(crate) fn ipfs_client() -> IpfsClient {
IpfsClient::default()
}
#[cfg(test)]
mod tests {
use super::*;
use anyhow::Result;
use std::io::Cursor;
use tar::Header;
use tempfile::TempDir;
fn create_header(path: &str, size: u64) -> Header {
let mut header = Header::new_gnu();
header.set_path(path).unwrap();
header.set_size(size);
header.set_mode(0o755);
header.set_cksum();
header
}
fn create_test_tar(files: &[(&str, &str)]) -> Vec<u8> {
let mut ar = tar::Builder::new(Vec::new());
// Add root project directory
let header = create_header("test-project/", 0);
ar.append(&header, &mut std::io::empty()).unwrap();
// Add files
for (path, content) in files {
let full_path = format!("test-project/{path}");
let header = create_header(&full_path, content.len() as u64);
ar.append(&header, content.as_bytes()).unwrap();
}
ar.into_inner().unwrap()
}
fn create_test_cid() -> Cid {
let cid = cid::Cid::from_str("QmYwAPJzv5CZsnA625s3Xf2nemtYgPpHdWEz79ojWnPbdG").unwrap();
Cid(cid)
}
#[test]
fn test_basic_extraction() -> Result<()> {
let temp_dir = TempDir::new()?;
let cid = create_test_cid();
let tar_content = create_test_tar(&[("test.txt", "hello world")]);
cid.extract_archive(Cursor::new(tar_content), temp_dir.path())?;
let extracted_path = temp_dir
.path()
.join(cid.0.to_string())
.join("test-project")
.join("test.txt");
assert!(extracted_path.exists());
assert_eq!(std::fs::read_to_string(extracted_path)?, "hello world");
Ok(())
}
#[test]
fn test_nested_files() -> Result<()> {
let temp_dir = TempDir::new()?;
let cid = create_test_cid();
let tar_content =
create_test_tar(&[("src/main.sw", "contract {};"), ("README.md", "# Test")]);
cid.extract_archive(Cursor::new(tar_content), temp_dir.path())?;
let base = temp_dir.path().join(cid.0.to_string()).join("test-project");
assert_eq!(
std::fs::read_to_string(base.join("src/main.sw"))?,
"contract {};"
);
assert_eq!(std::fs::read_to_string(base.join("README.md"))?, "# Test");
Ok(())
}
#[test]
fn test_invalid_tar() {
let temp_dir = TempDir::new().unwrap();
let cid = create_test_cid();
let result = cid.extract_archive(Cursor::new(b"not a tar file"), temp_dir.path());
assert!(result.is_err());
}
#[test]
fn test_source_ipfs_pinned_parsing() {
let string = "ipfs+QmYwAPJzv5CZsnA625s3Xf2nemtYgPpHdWEz79ojWnPbdG";
let expected = Pinned(Cid(cid::Cid::from_str(
"QmYwAPJzv5CZsnA625s3Xf2nemtYgPpHdWEz79ojWnPbdG",
)
.unwrap()));
let parsed = Pinned::from_str(string).unwrap();
assert_eq!(parsed, expected);
let serialized = expected.to_string();
assert_eq!(&serialized, string);
}
#[test]
fn test_path_traversal_prevention() -> Result<()> {
let temp_dir = TempDir::new()?;
let cid = create_test_cid();
// Create a known directory structure
let target_dir = temp_dir.path().join("target");
std::fs::create_dir(&target_dir)?;
// Create our canary file in a known location
let canary_content = "sensitive content";
let canary_path = target_dir.join("canary.txt");
std::fs::write(&canary_path, canary_content)?;
// Create tar with malicious path targeting our specific canary file
let mut header = tar::Header::new_gnu();
let malicious_path = b"../../target/canary.txt";
header.as_gnu_mut().unwrap().name[..malicious_path.len()].copy_from_slice(malicious_path);
header.set_size(17);
header.set_mode(0o644);
header.set_cksum();
let mut ar = tar::Builder::new(Vec::new());
ar.append(&header, b"malicious content".as_slice())?;
// Add safe file
let mut safe_header = tar::Header::new_gnu();
safe_header.set_path("safe.txt")?;
safe_header.set_size(12);
safe_header.set_mode(0o644);
safe_header.set_cksum();
ar.append(&safe_header, b"safe content".as_slice())?;
// Extract to a subdirectory of temp_dir
let tar_content = ar.into_inner()?;
let extract_dir = temp_dir.path().join("extract");
std::fs::create_dir(&extract_dir)?;
cid.extract_archive(Cursor::new(tar_content), &extract_dir)?;
// Verify canary file was not modified
assert_eq!(
std::fs::read_to_string(&canary_path)?,
canary_content,
"Canary file was modified - path traversal protection failed!"
);
Ok(())
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-pkg/src/source/member.rs | forc-pkg/src/source/member.rs | use crate::manifest::GenericManifestFile;
use crate::{manifest::PackageManifestFile, source};
use serde::{Deserialize, Serialize};
use std::{
fmt,
path::{Path, PathBuf},
};
/// Member source representation as a canonical path.
#[derive(Clone, Debug, Eq, Hash, Ord, PartialEq, PartialOrd, Deserialize, Serialize)]
pub struct Source(pub(super) PathBuf);
/// A pinned instance of a member source requires no information as it's a part
/// of the workspace.
#[derive(Clone, Debug, Eq, Hash, PartialEq, Deserialize, Serialize)]
pub struct Pinned;
impl fmt::Display for Pinned {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "member")
}
}
impl source::Pin for Source {
type Pinned = Pinned;
fn pin(&self, _ctx: source::PinCtx) -> anyhow::Result<(Self::Pinned, PathBuf)> {
Ok((Pinned, self.0.clone()))
}
}
impl source::Fetch for Pinned {
fn fetch(&self, _ctx: source::PinCtx, local: &Path) -> anyhow::Result<PackageManifestFile> {
let manifest = PackageManifestFile::from_dir(local)?;
Ok(manifest)
}
}
impl source::DepPath for Pinned {
fn dep_path(&self, _name: &str) -> anyhow::Result<source::DependencyPath> {
Ok(source::DependencyPath::Member)
}
}
impl From<Pinned> for source::Pinned {
fn from(p: Pinned) -> Self {
Self::Member(p)
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-pkg/src/source/reg/index_file.rs | forc-pkg/src/source/reg/index_file.rs | //! This module handles everything to do with index files.
//!
//! Index files are for creating set of information for identifying a published
//! package. They are used by forc while fetching to actually convert a registry
//! index into a IPFS CID. We also add some metadata to this index files to
//! enable forc to do "more clever" fetching during build process. By moving
//! dependency resolution from the time a package is fetched to the point we
//! start fetching we are actively enabling forc to fetch packages and their
//! dependencies in parallel.
//!
//! There are two main things forc needs to be able to do for index files:
//! 1: Creation of index files from published packages
//! 2: Calculating correct path for given package index.
use serde::{Deserialize, Serialize};
use std::collections::BTreeMap;
#[derive(Serialize, Deserialize, Default)]
pub struct IndexFile {
/// Each published instance for this specific package, keyed by their
/// versions. The reason we are doing this type of mapping is for use of
/// ease and deterministic ordering, we are effectively duplicating version
/// of package but keeping `PackageEntry` self contained.
#[serde(flatten)]
versions: BTreeMap<semver::Version, PackageEntry>,
}
/// A unique representation of each published package to `forc.pub`. Contains:
///
/// 1. The name of the package.
/// 2. The version of the package.
/// 3. CID of the package's source code. This is how forc actually resolves a
/// package name, version information into actual information on how to get
/// the package.
/// 4. CID of the package's abi if the package is a contract.
/// 5. Dependencies of this package. If there are other packages this package
/// depends on, some information can be directly found in the root package
/// to enable parallel fetching.
#[derive(Serialize, Deserialize, Clone)]
pub struct PackageEntry {
/// Name of the package.
/// This is the actual package name needed in forc.toml file to fetch this
/// package.
#[serde(alias = "package_name")]
name: String,
/// Version of the package.
/// This is the actual package version needed in forc.toml file to fetch
/// this package.
version: semver::Version,
/// IPFS CID of this specific package's source code. This is pinned by
/// forc.pub at the time of package publishing and thus will be
/// available all the time.
source_cid: String,
/// IPFS CID of this specific package's abi. This is pinned by
/// forc.pub at the time of package publishing and thus will be
/// available all the time if this exists in the first place, i.e the
/// package is a contract.
abi_cid: Option<String>,
/// Dependencies of the current package entry. Can be consumed to enable
/// parallel fetching by the consumers of this index, mainly forc.
dependencies: Vec<PackageDependencyIdentifier>,
/// Determines if the package should be skipped while building. Marked as
/// voided by the publisher for various reasons.
yanked: bool,
}
#[derive(Serialize, Deserialize, Clone)]
pub struct PackageDependencyIdentifier {
/// Name of the dependency.
/// Name and version information can be used by consumer of this index
/// to resolve dependencies.
package_name: String,
/// Version of the dependency.
/// Name and version information can be used by consumer of this index
/// to resolve dependencies.
version: String,
}
impl PackageEntry {
pub fn new(
name: String,
version: semver::Version,
source_cid: String,
abi_cid: Option<String>,
dependencies: Vec<PackageDependencyIdentifier>,
yanked: bool,
) -> Self {
Self {
name,
version,
source_cid,
abi_cid,
dependencies,
yanked,
}
}
/// Returns the name of this `PackageEntry`.
pub fn name(&self) -> &str {
&self.name
}
/// Returns the version of this `PackageEntry`.
pub fn version(&self) -> &semver::Version {
&self.version
}
/// Returns the source cid of this `PackageEntry`.
pub fn source_cid(&self) -> &str {
&self.source_cid
}
/// Returns the abi cid of this `PackageEntry`.
pub fn abi_cid(&self) -> Option<&str> {
self.abi_cid.as_deref()
}
/// Returns an iterator over dependencies of this package.
pub fn dependencies(&self) -> impl Iterator<Item = &PackageDependencyIdentifier> {
self.dependencies.iter()
}
/// Returns the `yanked` status of this package.
pub fn yanked(&self) -> bool {
self.yanked
}
}
impl PackageDependencyIdentifier {
pub fn new(package_name: String, version: String) -> Self {
Self {
package_name,
version,
}
}
}
impl IndexFile {
/// Returns the package entry if the specified version exists.
/// Otherwise returns `None`.
pub fn get(&self, version: &semver::Version) -> Option<&PackageEntry> {
self.versions.get(version)
}
/// Inserts a package into this `IndexFile`
/// NOTE: if there is a package with the same version in the index file
/// it will get overridden.
pub fn insert(&mut self, package: PackageEntry) {
let pkg_version = package.version().clone();
self.versions.insert(pkg_version, package);
}
/// Returns an iterator over the versions in the index file.
pub fn versions(&self) -> impl Iterator<Item = &semver::Version> {
self.versions.keys()
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_serialize_deserialize_empty_index() {
let index = IndexFile {
versions: BTreeMap::new(),
};
let serialized = serde_json::to_string(&index).unwrap();
assert_eq!(serialized, "{}");
let deserialized: IndexFile = serde_json::from_str(&serialized).unwrap();
assert_eq!(deserialized.versions.len(), 0);
}
#[test]
fn test_json_format() {
// Test parsing from a JSON
let json = r#"{
"0.0.1":{
"package_name":"tester",
"version":"0.0.1",
"source_cid":"QmOlderHash",
"abi_cid":"QmOlderAbiHash",
"dependencies":[],
"yanked": false
},
"0.0.2":{
"package_name":"tester",
"version":"0.0.2",
"source_cid":"QmExampleHash",
"abi_cid":"QmExampleAbiHash",
"dependencies":[],
"yanked": false
}
}"#;
let deserialized: IndexFile = serde_json::from_str(json).unwrap();
assert_eq!(deserialized.versions.len(), 2);
assert!(deserialized
.versions
.contains_key(&semver::Version::new(0, 0, 1)));
assert!(deserialized
.versions
.contains_key(&semver::Version::new(0, 0, 2)));
let v011 = &deserialized.versions[&semver::Version::new(0, 0, 1)];
assert_eq!(v011.source_cid, "QmOlderHash");
assert_eq!(v011.abi_cid, Some("QmOlderAbiHash".to_string()));
assert_eq!(v011.dependencies.len(), 0);
let v012 = &deserialized.versions[&semver::Version::new(0, 0, 2)];
assert_eq!(v012.source_cid, "QmExampleHash");
assert_eq!(v012.abi_cid, Some("QmExampleAbiHash".to_string()));
assert_eq!(v012.dependencies.len(), 0);
}
#[test]
fn test_add_new_package_entry_and_parse_back() {
let json = r#"{
"1.0.0": {
"name": "existing-package",
"version": "1.0.0",
"source_cid": "QmExistingHash",
"abi_cid": "QmExistingAbiHash",
"dependencies": [
{
"package_name": "dep1",
"version": "^0.5.0"
}
],
"yanked": false
}
}"#;
let mut index_file: IndexFile = serde_json::from_str(json).unwrap();
assert_eq!(index_file.versions.len(), 1);
assert!(index_file
.versions
.contains_key(&semver::Version::new(1, 0, 0)));
let dependencies = vec![
PackageDependencyIdentifier::new("new-dep1".to_string(), "^1.0.0".to_string()),
PackageDependencyIdentifier::new("new-dep2".to_string(), "=0.9.0".to_string()),
];
let yanked = false;
let new_package = PackageEntry::new(
"new-package".to_string(),
semver::Version::new(2, 1, 0),
"QmNewPackageHash".to_string(),
Some("QmNewPackageAbiHash".to_string()),
dependencies,
yanked,
);
index_file.insert(new_package);
assert_eq!(index_file.versions.len(), 2);
assert!(index_file
.versions
.contains_key(&semver::Version::new(1, 0, 0)));
assert!(index_file
.versions
.contains_key(&semver::Version::new(2, 1, 0)));
let updated_json = serde_json::to_string_pretty(&index_file).unwrap();
let reparsed_index: IndexFile = serde_json::from_str(&updated_json).unwrap();
assert_eq!(reparsed_index.versions.len(), 2);
assert!(reparsed_index
.versions
.contains_key(&semver::Version::new(1, 0, 0)));
assert!(reparsed_index
.versions
.contains_key(&semver::Version::new(2, 1, 0)));
let new_pkg = reparsed_index.get(&semver::Version::new(2, 1, 0)).unwrap();
assert_eq!(new_pkg.name(), "new-package");
assert_eq!(new_pkg.version(), &semver::Version::new(2, 1, 0));
assert_eq!(new_pkg.source_cid(), "QmNewPackageHash");
assert_eq!(new_pkg.abi_cid(), Some("QmNewPackageAbiHash"));
let deps: Vec<_> = new_pkg.dependencies().collect();
assert_eq!(deps.len(), 2);
assert_eq!(deps[0].package_name, "new-dep1");
assert_eq!(deps[0].version, "^1.0.0");
assert_eq!(deps[1].package_name, "new-dep2");
assert_eq!(deps[1].version, "=0.9.0");
let orig_pkg = reparsed_index.get(&semver::Version::new(1, 0, 0)).unwrap();
assert_eq!(orig_pkg.name(), "existing-package");
assert_eq!(orig_pkg.source_cid(), "QmExistingHash");
}
#[test]
fn test_json_with_dependencies() {
// Test parsing a JSON with dependencies
let json = r#"{
"1.0.0": {
"package_name": "main-package",
"version": "1.0.0",
"source_cid": "QmMainHash",
"abi_cid": null,
"dependencies": [
{
"package_name": "dep-package",
"version": "^0.5.0"
},
{
"package_name": "another-dep",
"version": "=0.9.1"
},
{
"package_name": "third-dep",
"version": "0.2.0"
}
],
"yanked": false
}
}"#;
let deserialized: IndexFile = serde_json::from_str(json).unwrap();
// Verify main package
assert_eq!(deserialized.versions.len(), 1);
assert!(deserialized
.versions
.contains_key(&semver::Version::new(1, 0, 0)));
let main_pkg = &deserialized.versions[&semver::Version::new(1, 0, 0)];
assert_eq!(main_pkg.name, "main-package");
assert_eq!(main_pkg.source_cid, "QmMainHash");
assert_eq!(main_pkg.abi_cid, None);
assert!(!main_pkg.yanked);
// Verify dependencies
assert_eq!(main_pkg.dependencies.len(), 3);
// Check first dependency
let dep1 = &main_pkg.dependencies[0];
assert_eq!(dep1.package_name, "dep-package");
assert_eq!(dep1.version, "^0.5.0");
// Check second dependency
let dep2 = &main_pkg.dependencies[1];
assert_eq!(dep2.package_name, "another-dep");
assert_eq!(dep2.version, "=0.9.1");
// Check third dependency
let dep3 = &main_pkg.dependencies[2];
assert_eq!(dep3.package_name, "third-dep");
assert_eq!(dep3.version, "0.2.0");
// Test round-trip serialization
let serialized = serde_json::to_string_pretty(&deserialized).unwrap();
println!("Re-serialized JSON: {serialized}");
// Deserialize again to ensure it's valid
let re_deserialized: IndexFile = serde_json::from_str(&serialized).unwrap();
assert_eq!(re_deserialized.versions.len(), 1);
// Verify the structure is preserved
let main_pkg2 = &re_deserialized.versions[&semver::Version::new(1, 0, 0)];
assert_eq!(main_pkg2.dependencies.len(), 3);
}
#[test]
fn test_json_with_missing_optional_fields() {
// Test parsing a JSON where some optional fields are missing
let json = r#"{
"0.5.0": {
"package_name": "minimal-package",
"version": "0.5.0",
"source_cid": "QmMinimalHash",
"dependencies": [],
"yanked": false
}
}"#;
let deserialized: IndexFile = serde_json::from_str(json).unwrap();
assert_eq!(deserialized.versions.len(), 1);
let pkg = &deserialized.versions[&semver::Version::new(0, 5, 0)];
assert_eq!(pkg.name, "minimal-package");
assert_eq!(pkg.source_cid, "QmMinimalHash");
assert_eq!(pkg.abi_cid, None);
assert_eq!(pkg.dependencies.len(), 0);
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-pkg/src/source/reg/mod.rs | forc-pkg/src/source/reg/mod.rs | pub mod file_location;
pub mod index_file;
use super::IPFSNode;
use crate::{
manifest::{self, GenericManifestFile, PackageManifestFile},
source::{
self,
ipfs::{ipfs_client, Cid},
},
};
use anyhow::{anyhow, bail, Context};
use file_location::{location_from_root, Namespace};
use flate2::read::GzDecoder;
use forc_tracing::println_action_green;
use index_file::IndexFile;
use serde::{Deserialize, Serialize};
use std::{
fmt::Display,
fs,
path::{Path, PathBuf},
str::FromStr,
thread,
time::Duration,
};
use tar::Archive;
/// Name of the folder containing fetched registry sources.
pub const REG_DIR_NAME: &str = "registry";
/// Registry identifier for use in patch tables.
/// This allows users to patch registry dependencies like: [patch.forc.pub]
pub const REGISTRY_PATCH_KEY: &str = "forc.pub";
/// A package from the official registry.
#[derive(Clone, Debug, Eq, Hash, Ord, PartialEq, PartialOrd, Deserialize, Serialize)]
pub struct Source {
/// The name of the specified package.
pub name: String,
/// The base version specified for the package.
pub version: semver::Version,
/// The namespace this package resides in, if no there is no namespace in
/// registry setup, this will be `None`.
pub namespace: Namespace,
}
/// A pinned instance of the registry source.
#[derive(Clone, Debug, Eq, Hash, PartialEq, Deserialize, Serialize)]
pub struct Pinned {
/// The registry package with base version.
pub source: Source,
/// The corresponding CID for this registry entry.
pub cid: Cid,
}
/// A resolver for registry index hosted as a github repo.
///
/// Given a package name and a version, a `GithubRegistryResolver` will be able
/// to resolve, fetch, pin a package through using the index hosted on a github
/// repository.
pub struct GithubRegistryResolver {
/// Name of the github organization holding the registry index repository.
repo_org: String,
/// Name of git repository holding the registry index.
repo_name: String,
/// The number of letters used to chunk package name.
///
/// Example:
/// If set to 2, and package name is "foobar", the index file location
/// will be ".../fo/ob/ar/foobar".
chunk_size: usize,
/// Type of the namespacing is needed to determine whether to add domain at
/// the beginning of the file location.
namespace: Namespace,
/// Branch name of the registry repo, the resolver is going to be using.
branch_name: String,
}
/// Error returned upon failed parsing of `Pinned::from_str`.
#[derive(Clone, Debug)]
pub enum PinnedParseError {
Prefix,
PackageName,
PackageVersion,
Cid,
Namespace,
}
impl GithubRegistryResolver {
/// Default github organization name that holds the registry git repo.
pub const DEFAULT_GITHUB_ORG: &str = "FuelLabs";
/// Default name of the repository that holds the registry git repo.
pub const DEFAULT_REPO_NAME: &str = "forc.pub-index";
/// Default chunking size of the repository that holds registry git repo.
pub const DEFAULT_CHUNKING_SIZE: usize = 2;
/// Default branch name for the repository repo.
const DEFAULT_BRANCH_NAME: &str = "master";
/// Default timeout for each github look-up request. If exceeded request is
/// dropped.
const DEFAULT_TIMEOUT_MS: u64 = 10000;
pub fn new(
repo_org: String,
repo_name: String,
chunk_size: usize,
namespace: Namespace,
branch_name: String,
) -> Self {
Self {
repo_org,
repo_name,
chunk_size,
namespace,
branch_name,
}
}
/// Returns a `GithubRegistryResolver` that automatically uses
/// `Self::DEFAULT_GITHUB_ORG` and `Self::DEFAULT_REPO_NAME`.
pub fn with_default_github(namespace: Namespace) -> Self {
Self {
repo_org: Self::DEFAULT_GITHUB_ORG.to_string(),
repo_name: Self::DEFAULT_REPO_NAME.to_string(),
chunk_size: Self::DEFAULT_CHUNKING_SIZE,
namespace,
branch_name: Self::DEFAULT_BRANCH_NAME.to_string(),
}
}
/// Returns the namespace associated with this `GithubRegistryResolver`.
///
/// See `[GithubRegistryResolver::namespace]` for details.
pub fn namespace(&self) -> &Namespace {
&self.namespace
}
/// Returns the branch name used by this `GithubRegistryResolver`.
///
/// See `[GithubRegistryResolver::branch_name]` for details.
pub fn branch_name(&self) -> &str {
&self.branch_name
}
/// Returns the chunk size used by this `GithubRegistryResolver`.
///
/// See `[GithubRegistryResolver::chunk_size]` for details.
pub fn chunk_size(&self) -> usize {
self.chunk_size
}
/// Returns the owner of the repo this `GithubRegistryResolver` configured
/// to fetch from.
///
/// See `[GithubRegistryResolver::repo_org]` for details.
pub fn repo_org(&self) -> &str {
&self.repo_org
}
/// Returns the name of the repo this `GithubRegistryResolver` configured
/// to fetch from.
///
/// See `[GithubRegistryResolver::repo_name]` for details.
pub fn repo_name(&self) -> &str {
&self.repo_name
}
}
impl Pinned {
pub const PREFIX: &str = "registry";
}
impl Display for Pinned {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
// registry+<package_name>?v<version>#<cid>!namespace
write!(
f,
"{}+{}?{}#{}!{}",
Self::PREFIX,
self.source.name,
self.source.version,
self.cid.0,
self.source.namespace
)
}
}
impl FromStr for Pinned {
type Err = PinnedParseError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
// registry+<package_name>?v<version>#<cid>!<namespace>
let s = s.trim();
// Check for "registry+" at the start.
let prefix_plus = format!("{}+", Self::PREFIX);
if s.find(&prefix_plus).is_some_and(|loc| loc != 0) {
return Err(PinnedParseError::Prefix);
}
let without_prefix = &s[prefix_plus.len()..];
// Parse the package name.
let pkg_name = without_prefix
.split('?')
.next()
.ok_or(PinnedParseError::PackageName)?;
let without_package_name = &without_prefix[pkg_name.len() + "?".len()..];
let mut s_iter = without_package_name.split('#');
// Parse the package version
let pkg_version = s_iter.next().ok_or(PinnedParseError::PackageVersion)?;
let pkg_version =
semver::Version::from_str(pkg_version).map_err(|_| PinnedParseError::PackageVersion)?;
// Parse the CID and namespace.
let cid_and_namespace = s_iter.next().ok_or(PinnedParseError::Cid)?;
let mut s_iter = cid_and_namespace.split('!');
let cid = s_iter.next().ok_or(PinnedParseError::Cid)?;
if !validate_cid(cid) {
return Err(PinnedParseError::Cid);
}
let cid = Cid::from_str(cid).map_err(|_| PinnedParseError::Cid)?;
// If there is a namespace string after ! and if it is not empty
// get a `Namespace::Domain` otherwise return a `Namespace::Flat`.
let namespace = s_iter
.next()
.filter(|ns| !ns.is_empty())
.map_or_else(|| Namespace::Flat, |ns| Namespace::Domain(ns.to_string()));
let source = Source {
name: pkg_name.to_string(),
version: pkg_version,
namespace,
};
Ok(Self { source, cid })
}
}
impl Display for Source {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}+{}", self.name, self.version)
}
}
#[cfg(not(test))]
fn registry_dir() -> PathBuf {
forc_util::user_forc_directory().join(REG_DIR_NAME)
}
#[cfg(test)]
fn registry_dir() -> PathBuf {
use once_cell::sync::Lazy;
use std::sync::Mutex;
static TEST_REGISTRY_DIR: Lazy<Mutex<Option<PathBuf>>> = Lazy::new(|| Mutex::new(None));
let mut dir = TEST_REGISTRY_DIR.lock().unwrap();
if let Some(ref path) = *dir {
path.clone()
} else {
let temp_dir = tempfile::tempdir().expect("Failed to create temp dir for tests");
let path = temp_dir.path().join(REG_DIR_NAME);
std::fs::create_dir_all(&path).expect("Failed to create test registry dir");
// Keep the temp dir alive by leaking it (only for tests)
let leaked_path = temp_dir.keep().join(REG_DIR_NAME);
*dir = Some(leaked_path.clone());
leaked_path
}
}
fn registry_with_namespace_dir(namespace: &Namespace) -> PathBuf {
let base = registry_dir();
match namespace {
Namespace::Flat => base,
Namespace::Domain(ns) => base.join(ns),
}
}
fn registry_package_dir(
namespace: &Namespace,
pkg_name: &str,
pkg_version: &semver::Version,
) -> PathBuf {
registry_with_namespace_dir(namespace).join(format!("{pkg_name}-{pkg_version}"))
}
/// The name to use for a package's identifier entry under the user's forc directory.
fn registry_package_dir_name(name: &str, pkg_version: &semver::Version) -> String {
use std::hash::{Hash, Hasher};
fn hash_version(pkg_version: &semver::Version) -> u64 {
let mut hasher = std::collections::hash_map::DefaultHasher::new();
pkg_version.hash(&mut hasher);
hasher.finish()
}
let package_ver_hash = hash_version(pkg_version);
format!("{name}-{package_ver_hash:x}")
}
/// Validates if the cid string is valid by checking the initial 2 letters and
/// length.
///
/// For CIDs to be marked as valid:
/// 1. Must start with `Qm`.
/// 2. Must be 46 chars long.
///
/// For more details see: https://docs.ipfs.tech/concepts/content-addressing/#version-0-v0
fn validate_cid(cid: &str) -> bool {
let cid = cid.trim();
let starts_with_qm = cid.starts_with("Qm");
starts_with_qm && cid.len() == 46
}
/// A temporary directory that we can use for cloning a registry-sourced package's index file and discovering
/// the corresponding CID for that package.
///
/// The resulting directory is:
///
/// ```ignore
/// $HOME/.forc/registry/cache/tmp/<fetch_id>-name-<version_hash>
/// ```
///
/// A unique `fetch_id` may be specified to avoid contention over the registry directory in the
/// case that multiple processes or threads may be building different projects that may require
/// fetching the same dependency.
fn tmp_registry_package_dir(
fetch_id: u64,
name: &str,
version: &semver::Version,
namespace: &Namespace,
) -> PathBuf {
let repo_dir_name = format!(
"{:x}-{}",
fetch_id,
registry_package_dir_name(name, version)
);
registry_with_namespace_dir(namespace)
.join("tmp")
.join(repo_dir_name)
}
impl source::Pin for Source {
type Pinned = Pinned;
fn pin(&self, ctx: source::PinCtx) -> anyhow::Result<(Self::Pinned, PathBuf)> {
let pkg_name = ctx.name.to_string();
let fetch_id = ctx.fetch_id();
let source = self.clone();
let pkg_name = pkg_name.clone();
let cid = block_on_any_runtime(async move {
with_tmp_fetch_index(fetch_id, &pkg_name, &source, |index_file| {
let version = source.version.clone();
let pkg_name = pkg_name.clone();
async move {
let pkg_entry = index_file
.get(&version)
.ok_or_else(|| anyhow!("No {} found for {}", version, pkg_name))?;
Cid::from_str(pkg_entry.source_cid()).map_err(anyhow::Error::from)
}
})
.await
})?;
let path = registry_package_dir(&self.namespace, ctx.name, &self.version);
let pinned = Pinned {
source: self.clone(),
cid,
};
Ok((pinned, path))
}
}
impl source::Fetch for Pinned {
fn fetch(&self, ctx: source::PinCtx, path: &Path) -> anyhow::Result<PackageManifestFile> {
// Co-ordinate access to the registry checkout directory using an advisory file lock.
let mut lock = forc_util::path_lock(path)?;
// TODO: Here we assume that if the local path already exists, that it contains the
// full and correct source for that registry entry and hasn't been tampered with. This is
// probably fine for most cases as users should never be touching these
// directories, however we should add some code to validate this. E.g. can we
// recreate the ipfs cid by hashing the directory or something along these lines?
// https://github.com/FuelLabs/sway/issues/7075
{
let _guard = lock.write()?;
if !path.exists() {
println_action_green(
"Fetching",
&format!(
"{} {}",
ansiterm::Style::new().bold().paint(ctx.name),
self.source.version
),
);
let pinned = self.clone();
let fetch_id = ctx.fetch_id();
let ipfs_node = ctx.ipfs_node().clone();
block_on_any_runtime(async move {
// If the user is trying to use public IPFS node with
// registry sources. Use fuel operated ipfs node
// instead.
let node = match ipfs_node {
node if node == IPFSNode::public() => IPFSNode::fuel(),
node => node,
};
fetch(fetch_id, &pinned, &node).await
})?;
}
}
let path = {
let _guard = lock.read()?;
manifest::find_within(path, ctx.name())
.ok_or_else(|| anyhow!("failed to find package `{}` in {}", ctx.name(), self))?
};
PackageManifestFile::from_file(path)
}
}
impl source::DepPath for Pinned {
fn dep_path(&self, _name: &str) -> anyhow::Result<source::DependencyPath> {
bail!("dep_path: registry dependencies are not yet supported");
}
}
impl From<Pinned> for source::Pinned {
fn from(p: Pinned) -> Self {
Self::Registry(p)
}
}
/// Resolve a CID from index file and pinned package. Basically goes through
/// the index file to find corresponding entry described by the pinned instance.
fn resolve_to_cid(index_file: &IndexFile, pinned: &Pinned) -> anyhow::Result<Cid> {
let other_versions = index_file
.versions()
.filter(|ver| **ver != pinned.source.version)
.map(|ver| format!("{}.{}.{}", ver.major, ver.minor, ver.patch))
.collect::<Vec<_>>()
.join(",");
let package_entry = index_file.get(&pinned.source.version).ok_or_else(|| {
anyhow!(
"Version {} not found for {}. Other available versions: [{}]",
pinned.source.version,
pinned.source.name,
other_versions
)
})?;
let cid = Cid::from_str(package_entry.source_cid()).with_context(|| {
format!(
"Invalid CID {}v{}: `{}`",
package_entry.name(),
package_entry.version(),
package_entry.source_cid()
)
})?;
if package_entry.yanked() {
bail!(
"Version {} of {} is yanked. Other available versions: [{}]",
pinned.source.version,
pinned.source.name,
other_versions
);
}
Ok(cid)
}
async fn fetch(fetch_id: u64, pinned: &Pinned, ipfs_node: &IPFSNode) -> anyhow::Result<PathBuf> {
let path = with_tmp_fetch_index(
fetch_id,
&pinned.source.name,
&pinned.source,
|index_file| async move {
let path = registry_package_dir(
&pinned.source.namespace,
&pinned.source.name,
&pinned.source.version,
);
if path.exists() {
let _ = fs::remove_dir_all(&path);
}
let cid = resolve_to_cid(&index_file, pinned)?;
// Create directory only after we've validated the package exists in the index
fs::create_dir_all(&path)?;
// Use a cleanup guard to ensure directory is removed if fetch fails
let cleanup_guard = scopeguard::guard(&path, |path| {
if path.exists() {
let _ = fs::remove_dir_all(path);
}
});
// Try IPFS first, fallback to CDN if it fails
let ipfs_result = match ipfs_node {
IPFSNode::Local => {
println_action_green("Fetching", "with local IPFS node");
cid.fetch_with_client(&ipfs_client(), &path).await
}
IPFSNode::WithUrl(gateway_url) => {
println_action_green(
"Fetching",
&format!("from {gateway_url}. Note: This can take several minutes."),
);
cid.fetch_with_gateway_url(gateway_url, &path).await
}
};
// If IPFS fails, try CDN fallback
let fetch_result = if let Err(ipfs_error) = ipfs_result {
println_action_green("Warning", &format!("IPFS fetch failed: {ipfs_error}"));
fetch_from_s3(pinned, &path).await.with_context(|| {
format!("Both IPFS and CDN fallback failed. IPFS error: {ipfs_error}")
})
} else {
Ok(())
};
match fetch_result {
Ok(()) => {
// Fetch successful, defuse the cleanup guard so directory is preserved
scopeguard::ScopeGuard::into_inner(cleanup_guard);
}
Err(e) => {
// Fetch failed, cleanup guard will automatically remove the directory
return Err(e);
}
}
Ok(path)
},
)
.await?;
Ok(path)
}
/// Fetches package from CDN as a fallback when IPFS fails
async fn fetch_from_s3(pinned: &Pinned, path: &Path) -> anyhow::Result<()> {
let client = reqwest::Client::builder()
.timeout(std::time::Duration::from_secs(180))
.build()
.context("Failed to create HTTP client")?;
// Construct CDN URL directly from IPFS hash
let cdn_url = format!("https://cdn.forc.pub/{}", pinned.cid.0);
println_action_green(
"Fetching",
&format!("from {cdn_url}. Note: This can take several minutes."),
);
// Download directly from CDN
let source_response = client
.get(&cdn_url)
.send()
.await
.context("Failed to download source code from CDN")?;
if !source_response.status().is_success() {
bail!(
"Failed to download source from CDN: HTTP {}",
source_response.status()
);
}
let bytes = source_response
.bytes()
.await
.context("Failed to read source code bytes")?;
// Extract the tarball to the destination path
extract_s3_archive(&bytes, path, &pinned.cid)?;
Ok(())
}
/// Extracts CDN archive to destination path
fn extract_s3_archive(bytes: &[u8], dst: &Path, cid: &Cid) -> anyhow::Result<()> {
// Create the destination directory with CID name (to match IPFS behavior)
let dst_dir = dst.join(cid.0.to_string());
fs::create_dir_all(&dst_dir)?;
// Decompress and extract the tar.gz archive
let tar = GzDecoder::new(bytes);
let mut archive = Archive::new(tar);
// Extract all entries
for entry in archive.entries()? {
let mut entry = entry?;
entry.unpack_in(&dst_dir)?;
}
Ok(())
}
async fn with_tmp_fetch_index<F, O, Fut>(
fetch_id: u64,
pkg_name: &str,
source: &Source,
f: F,
) -> anyhow::Result<O>
where
F: FnOnce(IndexFile) -> Fut,
Fut: std::future::Future<Output = anyhow::Result<O>>,
{
let tmp_dir = tmp_registry_package_dir(fetch_id, pkg_name, &source.version, &source.namespace);
if tmp_dir.exists() {
let _ = std::fs::remove_dir_all(&tmp_dir);
}
// Add a guard to ensure cleanup happens if we got out of scope whether by
// returning or panicking.
let _cleanup_guard = scopeguard::guard(&tmp_dir, |dir| {
let _ = std::fs::remove_dir_all(dir);
});
let github_resolver = GithubRegistryResolver::with_default_github(source.namespace.clone());
let path = location_from_root(github_resolver.chunk_size, &source.namespace, pkg_name)
.display()
.to_string();
let index_repo_owner = github_resolver.repo_org();
let index_repo_name = github_resolver.repo_name();
let reference = format!("refs/heads/{}", github_resolver.branch_name());
let github_endpoint = format!(
"https://raw.githubusercontent.com/{index_repo_owner}/{index_repo_name}/{reference}/{path}"
);
let client = reqwest::Client::new();
let timeout_duration = Duration::from_millis(GithubRegistryResolver::DEFAULT_TIMEOUT_MS);
let index_response = client
.get(github_endpoint)
.timeout(timeout_duration)
.send()
.await
.map_err(|e| {
anyhow!(
"Failed to send request to github to obtain package index file from registry {e}"
)
})?
.error_for_status()
.map_err(|_| anyhow!("Failed to fetch {pkg_name}"))?;
let contents = index_response.text().await?;
let index_file: IndexFile = serde_json::from_str(&contents).with_context(|| {
format!("Unable to deserialize a github registry lookup response. Body was: \"{contents}\"")
})?;
let res = f(index_file).await?;
Ok(res)
}
/// Execute an async block on a Tokio runtime.
///
/// If we are already in a runtime, this will spawn a new OS thread to create a new runtime.
///
/// If we are not in a runtime, a new runtime is created and the future is blocked on.
pub(crate) fn block_on_any_runtime<F>(future: F) -> F::Output
where
F: std::future::Future + Send + 'static,
F::Output: Send + 'static,
{
if tokio::runtime::Handle::try_current().is_ok() {
// In a runtime context. Spawn a new thread to run the async code.
thread::spawn(move || {
let rt = tokio::runtime::Builder::new_current_thread()
.enable_all()
.build()
.unwrap();
rt.block_on(future)
})
.join()
.unwrap()
} else {
// Not in a runtime context. Okay to create a new runtime and block.
let rt = tokio::runtime::Builder::new_current_thread()
.enable_all()
.build()
.unwrap();
rt.block_on(future)
}
}
#[cfg(test)]
mod tests {
use super::{
block_on_any_runtime, fetch, file_location::Namespace, registry_package_dir,
resolve_to_cid, Pinned, Source,
};
use crate::source::{
ipfs::Cid,
reg::index_file::{IndexFile, PackageEntry},
IPFSNode,
};
use std::{fs, str::FromStr};
#[test]
fn parse_pinned_entry_without_namespace() {
let pinned_str = "registry+core?0.0.1#QmdMVqLqpba2mMB5AUjYCxubC6tLGevQFunpBkbC2UbrKS!";
let pinned = Pinned::from_str(pinned_str).unwrap();
let expected_source = Source {
name: "core".to_string(),
version: semver::Version::new(0, 0, 1),
namespace: Namespace::Flat,
};
let cid = Cid::from_str("QmdMVqLqpba2mMB5AUjYCxubC6tLGevQFunpBkbC2UbrKS").unwrap();
let expected_pinned = Pinned {
source: expected_source,
cid,
};
assert_eq!(pinned, expected_pinned)
}
#[test]
fn parse_pinned_entry_with_namespace() {
let pinned_str =
"registry+core?0.0.1#QmdMVqLqpba2mMB5AUjYCxubC6tLGevQFunpBkbC2UbrKS!fuelnamespace";
let pinned = Pinned::from_str(pinned_str).unwrap();
let expected_source = Source {
name: "core".to_string(),
version: semver::Version::new(0, 0, 1),
namespace: Namespace::Domain("fuelnamespace".to_string()),
};
let cid = Cid::from_str("QmdMVqLqpba2mMB5AUjYCxubC6tLGevQFunpBkbC2UbrKS").unwrap();
let expected_pinned = Pinned {
source: expected_source,
cid,
};
assert_eq!(pinned, expected_pinned)
}
#[test]
fn test_resolve_to_cid() {
let mut index_file = IndexFile::default();
// Add a regular version with a valid CID
let valid_cid = "QmdMVqLqpba2mMB5AUjYCxubC6tLGevQFunpBkbC2UbrKS";
let valid_version = semver::Version::new(1, 0, 0);
let valid_entry = PackageEntry::new(
"test_package".to_string(),
valid_version.clone(),
valid_cid.to_string(),
None, // no abi_cid
vec![], // no dependencies
false, // not yanked
);
index_file.insert(valid_entry);
// Add a yanked version
let yanked_cid = "QmdMVqLqpba2mMB5AUjYCxubC6tLGevQFunpBkbC2UbrKR";
let yanked_version = semver::Version::new(0, 9, 0);
let yanked_entry = PackageEntry::new(
"test_package".to_string(),
yanked_version.clone(),
yanked_cid.to_string(),
None, // no abi_cid
vec![], // no dependencies
true, // yanked
);
index_file.insert(yanked_entry);
// Add another version just to have multiple available
let other_cid = "QmdMVqLqpba2mMB5AUjYCxubC6tLGevQFunpBkbC2UbrKT";
let other_version = semver::Version::new(1, 1, 0);
let other_entry = PackageEntry::new(
"test_package".to_string(),
other_version.clone(),
other_cid.to_string(),
None, // no abi_cid
vec![], // no dependencies
false, // not yanked
);
index_file.insert(other_entry);
// Test Case 1: Successful resolution
let valid_source = Source {
name: "test_package".to_string(),
version: valid_version.clone(),
namespace: Namespace::Flat,
};
let valid_pinned = Pinned {
source: valid_source,
cid: Cid::from_str(valid_cid).unwrap(),
};
let result = resolve_to_cid(&index_file, &valid_pinned);
assert!(result.is_ok());
let valid_cid = Cid::from_str(valid_cid).unwrap();
assert_eq!(result.unwrap(), valid_cid);
// Test Case 2: Error when version doesn't exist
let nonexistent_version = semver::Version::new(2, 0, 0);
let nonexistent_source = Source {
name: "test_package".to_string(),
version: nonexistent_version,
namespace: Namespace::Flat,
};
let nonexistent_pinned = Pinned {
source: nonexistent_source,
// this cid just a placeholder, as this version does not exists
cid: valid_cid,
};
let result = resolve_to_cid(&index_file, &nonexistent_pinned);
assert!(result.is_err());
let error_msg = result.unwrap_err().to_string();
assert!(error_msg.contains("Version 2.0.0 not found"));
assert!(
error_msg.contains("Other available versions: [1.1.0,0.9.0,1.0.0]")
|| error_msg.contains("Other available versions: [0.9.0,1.0.0,1.1.0]")
|| error_msg.contains("Other available versions: [1.0.0,0.9.0,1.1.0]")
|| error_msg.contains("Other available versions: [0.9.0,1.1.0,1.0.0]")
|| error_msg.contains("Other available versions: [1.0.0,1.1.0,0.9.0]")
|| error_msg.contains("Other available versions: [1.1.0,1.0.0,0.9.0]")
);
// Test Case 3: Error when version is yanked
let yanked_source = Source {
name: "test_package".to_string(),
version: yanked_version.clone(),
namespace: Namespace::Flat,
};
let yanked_pinned = Pinned {
source: yanked_source,
cid: Cid::from_str(yanked_cid).unwrap(),
};
let result = resolve_to_cid(&index_file, &yanked_pinned);
assert!(result.is_err());
let error_msg = result.unwrap_err().to_string();
assert!(error_msg.contains("Version 0.9.0 of test_package is yanked"));
assert!(
error_msg.contains("Other available versions: [1.1.0,1.0.0]")
|| error_msg.contains("Other available versions: [1.0.0,1.1.0]")
);
}
#[test]
fn test_fetch_directory_cleanup_on_failure() {
// The test itself doesn't need to assert anything about the result,
// the assertions inside the async block are what matter
block_on_any_runtime(async {
let pinned = Pinned {
source: Source {
name: "nonexistent_test_package".to_string(),
version: semver::Version::new(1, 0, 0),
namespace: Namespace::Flat,
},
// Valid CID format but this will fail because the package doesn't exist in the index
cid: Cid::from_str("QmdMVqLqpba2mMB5AUjYCxubC6tLGevQFunpBkbC2UbrKS").unwrap(),
};
// Get the expected package directory path
let expected_path = registry_package_dir(
&pinned.source.namespace,
&pinned.source.name,
&pinned.source.version,
);
// Ensure the directory doesn't exist initially
if expected_path.exists() {
let _ = fs::remove_dir_all(&expected_path);
}
assert!(!expected_path.exists());
// Call the actual fetch function with an IPFS node that will fail
// This will fail during index lookup (the package doesn't exist in registry)
let fetch_id = 12345;
let ipfs_node = IPFSNode::WithUrl("https://invalid-url.com".to_string());
let result = fetch(fetch_id, &pinned, &ipfs_node).await;
// Verify that fetch failed (package not found in index)
assert!(result.is_err());
let error_msg = result.unwrap_err().to_string();
assert!(error_msg.contains("Failed to fetch nonexistent_test_package"));
// Most importantly, verify that no directory was created or if it was created, it got cleaned up
assert!(
!expected_path.exists(),
"Directory should not exist after fetch failure, but it exists at: {}",
expected_path.display()
);
});
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-pkg/src/source/reg/file_location.rs | forc-pkg/src/source/reg/file_location.rs | use serde::{Deserialize, Serialize};
use std::{fmt::Display, path::PathBuf};
/// Number of levels of nesting to use for file locations.
const NESTING_LEVELS: usize = 2;
#[derive(Clone, Debug, Eq, Hash, Ord, PartialEq, PartialOrd, Deserialize, Serialize)]
pub enum Namespace {
/// Flat namespace means no sub-namespace with different domains.
/// Location calculator won't be adding anything specific for this to the
/// file location.
Flat,
/// Domain namespace means we have custom namespaces and first component of
/// the file location of the index file will be the domain of the namespace.
/// Which means in the index repository all namespaced packages will first
/// have the namespace in their paths.
Domain(String),
}
impl Display for Namespace {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Namespace::Flat => write!(f, ""),
Namespace::Domain(s) => write!(f, "{s}"),
}
}
}
/// Calculates the exact file location from the root of the namespace repo.
/// If the configuration includes a namespace, it will be the first part of
/// the path followed by chunks.
pub fn location_from_root(chunk_size: usize, namespace: &Namespace, package_name: &str) -> PathBuf {
let mut path = PathBuf::new();
// Add domain to path if namespace is 'Domain' and it is not empty
// otherwise skip.
match namespace {
Namespace::Domain(domain) if !domain.is_empty() => {
path.push(domain);
}
_ => {}
}
// If chunking is disabled we do not have any folder in the index.
if chunk_size == 0 {
path.push(package_name);
return path;
}
let char_count = chunk_size * NESTING_LEVELS;
let to_be_chunked_section = package_name
.chars()
.enumerate()
.take_while(|(index, _)| *index < char_count)
.map(|(_, ch)| ch);
let chars: Vec<char> = to_be_chunked_section.collect();
for chunk in chars.chunks(chunk_size) {
let chunk_str: String = chunk.iter().collect();
path.push(chunk_str);
}
path.push(package_name);
path
}
#[cfg(test)]
mod tests {
use super::*;
use crate::source::reg::index_file::PackageEntry;
use semver::Version;
use std::path::Path;
fn create_package_entry(name: &str) -> PackageEntry {
let name = name.to_string();
let version = Version::new(1, 0, 0);
let source_cid = "QmHash".to_string();
let abi_cid = None;
let dependencies = vec![];
let yanked = false;
PackageEntry::new(name, version, source_cid, abi_cid, dependencies, yanked)
}
#[test]
fn test_flat_namespace_with_small_package() {
let chunk_size = 2;
let namespace = Namespace::Flat;
let entry = create_package_entry("ab");
let path = location_from_root(chunk_size, &namespace, entry.name());
assert_eq!(path, Path::new("ab").join("ab"));
}
#[test]
fn test_flat_namespace_with_regular_package() {
let chunk_size = 2;
let namespace = Namespace::Flat;
let entry = create_package_entry("foobar");
let path = location_from_root(chunk_size, &namespace, entry.name());
// Should produce: fo/ob/foobar
assert_eq!(path, Path::new("fo").join("ob").join("foobar"));
}
#[test]
fn test_domain_namespace() {
let chunk_size = 2;
let namespace = Namespace::Domain("example".to_string());
let entry = create_package_entry("foobar");
let path = location_from_root(chunk_size, &namespace, entry.name());
// Should produce: example/fo/ob/foobar
assert_eq!(
path,
Path::new("example").join("fo").join("ob").join("foobar")
);
}
#[test]
fn test_odd_length_package_name() {
let chunk_size = 2;
let namespace = Namespace::Flat;
let entry = create_package_entry("hello");
let path = location_from_root(chunk_size, &namespace, entry.name());
// Should produce: he/ll/hello
assert_eq!(path, Path::new("he").join("ll").join("hello"));
}
#[test]
fn test_larger_chunking_size() {
let chunk_size = 3;
let namespace = Namespace::Flat;
let entry = create_package_entry("fibonacci");
let path = location_from_root(chunk_size, &namespace, entry.name());
// Should produce: fib/ona/fibonacci
assert_eq!(path, Path::new("fib").join("ona").join("fibonacci"));
}
#[test]
fn test_chunking_size_larger_than_name() {
let chunk_size = 10;
let namespace = Namespace::Flat;
let entry = create_package_entry("small");
let path = location_from_root(chunk_size, &namespace, entry.name());
// Should produce: small/small
assert_eq!(path, Path::new("small").join("small"));
}
#[test]
fn test_unicode_package_name() {
let chunk_size = 2;
let namespace = Namespace::Flat;
let entry = create_package_entry("héllo");
let path = location_from_root(chunk_size, &namespace, entry.name());
// Should produce: hé/ll/héllo
assert_eq!(path, Path::new("hé").join("ll").join("héllo"));
}
#[test]
fn test_empty_package_name() {
let chunk_size = 0;
let namespace = Namespace::Flat;
let entry = create_package_entry("");
let path = location_from_root(chunk_size, &namespace, entry.name());
// Should just produce: ""
assert_eq!(path, Path::new(""));
}
#[test]
fn test_chunking_size_zero() {
let chunk_size = 0;
let namespace = Namespace::Flat;
let entry = create_package_entry("package");
let path = location_from_root(chunk_size, &namespace, entry.name());
// Should just produce: package
assert_eq!(path, Path::new("package"));
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-pkg/src/source/git/auth.rs | forc-pkg/src/source/git/auth.rs | /// Handler holds all required information for handling authentication callbacks from `git2`.
pub(crate) struct AuthHandler {
config: git2::Config,
/// Shows if the `AuthHandler` tried to make `SSH` authentication so far.
ssh_authentication_attempt: bool,
/// Shows if the `AuthHandler` tried to make `USER_PASS_PLAINTEXT` authentication so far.
plain_user_pass_attempt: bool,
}
impl AuthHandler {
/// Creates a new `AuthHandler` from all fields of the struct. If there are no specific reasons
/// not to, `default_with_config` should be preferred.
fn new(
config: git2::Config,
ssh_authentication_attempt: bool,
plain_user_pass_attempt: bool,
) -> Self {
Self {
config,
ssh_authentication_attempt,
plain_user_pass_attempt,
}
}
/// Creates a new `AuthContext` with provided `git2::Config` and default values for other
/// context used during handling authentication callbacks.
pub(crate) fn default_with_config(config: git2::Config) -> Self {
let ssh_authentication_attempt = false;
let plain_user_pass_attempt = false;
Self::new(config, ssh_authentication_attempt, plain_user_pass_attempt)
}
pub fn handle_callback(
&mut self,
url: &str,
username: Option<&str>,
allowed: git2::CredentialType,
) -> Result<git2::Cred, git2::Error> {
if allowed.contains(git2::CredentialType::SSH_KEY) && !self.ssh_authentication_attempt {
self.ssh_authentication_attempt = true;
// If SSH_KEY authentication is allowed, a callback username is provided, so the
// following unwrap is guaranteed.
let username = username.ok_or_else(|| {
git2::Error::from_str("username must be provided with SSH_KEY callback")
})?;
return git2::Cred::ssh_key_from_agent(username);
}
if allowed.contains(git2::CredentialType::USER_PASS_PLAINTEXT)
&& !self.plain_user_pass_attempt
{
self.plain_user_pass_attempt = true;
return git2::Cred::credential_helper(&self.config, url, username);
}
if allowed.contains(git2::CredentialType::DEFAULT) {
return git2::Cred::default();
}
Err(git2::Error::from_str(
"Tried all possible credential types for authentication",
))
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-pkg/src/source/git/mod.rs | forc-pkg/src/source/git/mod.rs | mod auth;
use crate::manifest::GenericManifestFile;
use crate::{
manifest::{self, PackageManifestFile},
source,
};
use anyhow::{anyhow, bail, Context, Result};
use forc_tracing::println_action_green;
use forc_util::git_checkouts_directory;
use serde::{Deserialize, Serialize};
use std::fmt::Display;
use std::{
collections::hash_map,
fmt, fs,
path::{Path, PathBuf},
str::FromStr,
};
#[derive(Clone, Debug, Eq, Hash, Ord, PartialEq, PartialOrd, Deserialize, Serialize)]
pub struct Url {
url: gix_url::Url,
}
/// A git repo with a `Forc.toml` manifest at its root.
#[derive(Clone, Debug, Eq, Hash, Ord, PartialEq, PartialOrd, Deserialize, Serialize)]
pub struct Source {
/// The URL at which the repository is located.
pub repo: Url,
/// A git reference, e.g. a branch or tag.
pub reference: Reference,
}
impl Display for Source {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{} {}", self.repo, self.reference)
}
}
/// Used to distinguish between types of git references.
///
/// For the most part, `Reference` is useful to refine the `refspecs` used to fetch remote
/// repositories.
#[derive(Clone, Debug, Eq, Hash, Ord, PartialEq, PartialOrd, Deserialize, Serialize)]
pub enum Reference {
Branch(String),
Tag(String),
Rev(String),
DefaultBranch,
}
/// A pinned instance of a git source.
#[derive(Clone, Debug, Eq, Hash, PartialEq, Deserialize, Serialize)]
pub struct Pinned {
/// The git source that is being pinned.
pub source: Source,
/// The hash to which we have pinned the source.
pub commit_hash: String,
}
/// Error returned upon failed parsing of `Pinned::from_str`.
#[derive(Clone, Debug)]
pub enum PinnedParseError {
Prefix,
Url,
Reference,
CommitHash,
}
/// Represents the Head's commit hash and time (in seconds) from epoch
type HeadWithTime = (String, i64);
const DEFAULT_REMOTE_NAME: &str = "origin";
/// Everything needed to recognize a checkout in offline mode
///
/// Since we are omitting `.git` folder to save disk space, we need an indexing file
/// to recognize a checkout while searching local checkouts in offline mode
#[derive(Serialize, Deserialize)]
pub struct SourceIndex {
/// Type of the git reference
pub git_reference: Reference,
pub head_with_time: HeadWithTime,
}
impl SourceIndex {
pub fn new(time: i64, git_reference: Reference, commit_hash: String) -> SourceIndex {
SourceIndex {
git_reference,
head_with_time: (commit_hash, time),
}
}
}
impl Reference {
/// Resolves the parsed forc git reference to the associated git ID.
pub fn resolve(&self, repo: &git2::Repository) -> Result<git2::Oid> {
// Find the commit associated with this tag.
fn resolve_tag(repo: &git2::Repository, tag: &str) -> Result<git2::Oid> {
let refname = format!("refs/remotes/{DEFAULT_REMOTE_NAME}/tags/{tag}");
let id = repo.refname_to_id(&refname)?;
let obj = repo.find_object(id, None)?;
let obj = obj.peel(git2::ObjectType::Commit)?;
Ok(obj.id())
}
// Resolve to the target for the given branch.
fn resolve_branch(repo: &git2::Repository, branch: &str) -> Result<git2::Oid> {
let name = format!("{DEFAULT_REMOTE_NAME}/{branch}");
let b = repo
.find_branch(&name, git2::BranchType::Remote)
.with_context(|| format!("failed to find branch `{branch}`"))?;
b.get()
.target()
.ok_or_else(|| anyhow::format_err!("branch `{}` did not have a target", branch))
}
// Use the HEAD commit when default branch is specified.
fn resolve_default_branch(repo: &git2::Repository) -> Result<git2::Oid> {
let head_id =
repo.refname_to_id(&format!("refs/remotes/{DEFAULT_REMOTE_NAME}/HEAD"))?;
let head = repo.find_object(head_id, None)?;
Ok(head.peel(git2::ObjectType::Commit)?.id())
}
// Find the commit for the given revision.
fn resolve_rev(repo: &git2::Repository, rev: &str) -> Result<git2::Oid> {
let obj = repo.revparse_single(rev)?;
match obj.as_tag() {
Some(tag) => Ok(tag.target_id()),
None => Ok(obj.id()),
}
}
match self {
Reference::Tag(s) => {
resolve_tag(repo, s).with_context(|| format!("failed to find tag `{s}`"))
}
Reference::Branch(s) => resolve_branch(repo, s),
Reference::DefaultBranch => resolve_default_branch(repo),
Reference::Rev(s) => resolve_rev(repo, s),
}
}
}
impl Pinned {
pub const PREFIX: &'static str = "git";
}
impl source::Pin for Source {
type Pinned = Pinned;
fn pin(&self, ctx: source::PinCtx) -> Result<(Self::Pinned, PathBuf)> {
// If the git source directly specifies a full commit hash, we should check
// to see if we have a local copy. Otherwise we cannot know what commit we should pin
// to without fetching the repo into a temporary directory.
let pinned = if ctx.offline() {
let (_local_path, commit_hash) =
search_source_locally(ctx.name(), self)?.ok_or_else(|| {
anyhow!(
"Unable to fetch pkg {:?} from {:?} in offline mode",
ctx.name(),
self.repo
)
})?;
Pinned {
source: self.clone(),
commit_hash,
}
} else if let Reference::DefaultBranch | Reference::Branch(_) = self.reference {
// If the reference is to a branch or to the default branch we need to fetch
// from remote even though we may have it locally. Because remote may contain a
// newer commit.
pin(ctx.fetch_id(), ctx.name(), self.clone())?
} else {
// If we are in online mode and the reference is to a specific commit (tag or
// rev) we can first search it locally and re-use it.
match search_source_locally(ctx.name(), self) {
Ok(Some((_local_path, commit_hash))) => Pinned {
source: self.clone(),
commit_hash,
},
_ => {
// If the checkout we are looking for does not exists locally or an
// error happened during the search fetch it
pin(ctx.fetch_id(), ctx.name(), self.clone())?
}
}
};
let repo_path = commit_path(ctx.name(), &pinned.source.repo, &pinned.commit_hash);
Ok((pinned, repo_path))
}
}
impl source::Fetch for Pinned {
fn fetch(&self, ctx: source::PinCtx, repo_path: &Path) -> Result<PackageManifestFile> {
// Co-ordinate access to the git checkout directory using an advisory file lock.
let mut lock = forc_util::path_lock(repo_path)?;
// TODO: Here we assume that if the local path already exists, that it contains the
// full and correct source for that commit and hasn't been tampered with. This is
// probably fine for most cases as users should never be touching these
// directories, however we should add some code to validate this. E.g. can we
// recreate the git hash by hashing the directory or something along these lines
// using git?
// https://github.com/FuelLabs/sway/issues/7075
{
let _guard = lock.write()?;
if !repo_path.exists() {
println_action_green(
"Fetching",
&format!("{} {}", ansiterm::Style::new().bold().paint(ctx.name), self),
);
fetch(ctx.fetch_id(), ctx.name(), self)?;
}
}
let path = {
let _guard = lock.read()?;
manifest::find_within(repo_path, ctx.name())
.ok_or_else(|| anyhow!("failed to find package `{}` in {}", ctx.name(), self))?
};
PackageManifestFile::from_file(path)
}
}
impl source::DepPath for Pinned {
fn dep_path(&self, name: &str) -> anyhow::Result<source::DependencyPath> {
let repo_path = commit_path(name, &self.source.repo, &self.commit_hash);
// Co-ordinate access to the git checkout directory using an advisory file lock.
let lock = forc_util::path_lock(&repo_path)?;
let _guard = lock.read()?;
let path = manifest::find_within(&repo_path, name)
.ok_or_else(|| anyhow!("failed to find package `{}` in {}", name, self))?;
Ok(source::DependencyPath::ManifestPath(path))
}
}
impl fmt::Display for Url {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let url_string = self.url.to_bstring().to_string();
write!(f, "{url_string}")
}
}
impl fmt::Display for Pinned {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
// git+<url/to/repo>?<ref_kind>=<ref_string>#<commit>
write!(
f,
"{}+{}?{}#{}",
Self::PREFIX,
self.source.repo,
self.source.reference,
self.commit_hash
)
}
}
impl fmt::Display for Reference {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
Reference::Branch(ref s) => write!(f, "branch={s}"),
Reference::Tag(ref s) => write!(f, "tag={s}"),
Reference::Rev(ref _s) => write!(f, "rev"),
Reference::DefaultBranch => write!(f, "default-branch"),
}
}
}
impl FromStr for Url {
type Err = anyhow::Error;
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
let url = gix_url::Url::from_bytes(s.as_bytes().into()).map_err(|e| anyhow!("{}", e))?;
Ok(Self { url })
}
}
impl FromStr for Pinned {
type Err = PinnedParseError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
// git+<url/to/repo>?<reference>#<commit>
let s = s.trim();
// Check for "git+" at the start.
let prefix_plus = format!("{}+", Self::PREFIX);
if s.find(&prefix_plus) != Some(0) {
return Err(PinnedParseError::Prefix);
}
let s = &s[prefix_plus.len()..];
// Parse the `repo` URL.
let repo_str = s.split('?').next().ok_or(PinnedParseError::Url)?;
let repo = Url::from_str(repo_str).map_err(|_| PinnedParseError::Url)?;
let s = &s[repo_str.len() + "?".len()..];
// Parse the git reference and commit hash. This can be any of either:
// - `branch=<branch-name>#<commit-hash>`
// - `tag=<tag-name>#<commit-hash>`
// - `rev#<commit-hash>`
// - `default#<commit-hash>`
let mut s_iter = s.split('#');
let reference = s_iter.next().ok_or(PinnedParseError::Reference)?;
let commit_hash = s_iter
.next()
.ok_or(PinnedParseError::CommitHash)?
.to_string();
validate_git_commit_hash(&commit_hash).map_err(|_| PinnedParseError::CommitHash)?;
const BRANCH: &str = "branch=";
const TAG: &str = "tag=";
let reference = if reference.find(BRANCH) == Some(0) {
Reference::Branch(reference[BRANCH.len()..].to_string())
} else if reference.find(TAG) == Some(0) {
Reference::Tag(reference[TAG.len()..].to_string())
} else if reference == "rev" {
Reference::Rev(commit_hash.to_string())
} else if reference == "default-branch" {
Reference::DefaultBranch
} else {
return Err(PinnedParseError::Reference);
};
let source = Source { repo, reference };
Ok(Self {
source,
commit_hash,
})
}
}
impl Default for Reference {
fn default() -> Self {
Self::DefaultBranch
}
}
impl From<Pinned> for source::Pinned {
fn from(p: Pinned) -> Self {
Self::Git(p)
}
}
/// The name to use for a package's git repository under the user's forc directory.
fn git_repo_dir_name(name: &str, repo: &Url) -> String {
use std::hash::{Hash, Hasher};
fn hash_url(url: &Url) -> u64 {
let mut hasher = hash_map::DefaultHasher::new();
url.hash(&mut hasher);
hasher.finish()
}
let repo_url_hash = hash_url(repo);
format!("{name}-{repo_url_hash:x}")
}
fn validate_git_commit_hash(commit_hash: &str) -> Result<()> {
const LEN: usize = 40;
if commit_hash.len() != LEN {
bail!(
"invalid hash length: expected {}, found {}",
LEN,
commit_hash.len()
);
}
if !commit_hash.chars().all(|c| c.is_ascii_alphanumeric()) {
bail!("hash contains one or more non-ascii-alphanumeric characters");
}
Ok(())
}
/// A temporary directory that we can use for cloning a git-sourced package's repo and discovering
/// the current HEAD for the given git reference.
///
/// The resulting directory is:
///
/// ```ignore
/// $HOME/.forc/git/checkouts/tmp/<fetch_id>-name-<repo_url_hash>
/// ```
///
/// A unique `fetch_id` may be specified to avoid contention over the git repo directory in the
/// case that multiple processes or threads may be building different projects that may require
/// fetching the same dependency.
fn tmp_git_repo_dir(fetch_id: u64, name: &str, repo: &Url) -> PathBuf {
let repo_dir_name = format!("{:x}-{}", fetch_id, git_repo_dir_name(name, repo));
git_checkouts_directory().join("tmp").join(repo_dir_name)
}
/// Given a git reference, build a list of `refspecs` required for the fetch operation.
///
/// Also returns whether or not our reference implies we require fetching tags.
fn git_ref_to_refspecs(reference: &Reference) -> (Vec<String>, bool) {
let mut refspecs = vec![];
let mut tags = false;
match reference {
Reference::Branch(s) => {
refspecs.push(format!(
"+refs/heads/{s}:refs/remotes/{DEFAULT_REMOTE_NAME}/{s}"
));
}
Reference::Tag(s) => {
refspecs.push(format!(
"+refs/tags/{s}:refs/remotes/{DEFAULT_REMOTE_NAME}/tags/{s}"
));
}
Reference::Rev(s) => {
if s.starts_with("refs/") {
refspecs.push(format!("+{s}:{s}"));
} else {
// We can't fetch the commit directly, so we fetch all branches and tags in order
// to find it.
refspecs.push(format!(
"+refs/heads/*:refs/remotes/{DEFAULT_REMOTE_NAME}/*"
));
refspecs.push(format!("+HEAD:refs/remotes/{DEFAULT_REMOTE_NAME}/HEAD"));
tags = true;
}
}
Reference::DefaultBranch => {
refspecs.push(format!("+HEAD:refs/remotes/{DEFAULT_REMOTE_NAME}/HEAD"));
}
}
(refspecs, tags)
}
/// Initializes a temporary git repo for the package and fetches only the reference associated with
/// the given source.
fn with_tmp_git_repo<F, O>(fetch_id: u64, name: &str, source: &Source, f: F) -> Result<O>
where
F: FnOnce(git2::Repository) -> Result<O>,
{
// Clear existing temporary directory if it exists.
let repo_dir = tmp_git_repo_dir(fetch_id, name, &source.repo);
if repo_dir.exists() {
let _ = std::fs::remove_dir_all(&repo_dir);
}
// Add a guard to ensure cleanup happens if we got out of scope whether by
// returning or panicking.
let _cleanup_guard = scopeguard::guard(&repo_dir, |dir| {
let _ = std::fs::remove_dir_all(dir);
});
let config = git2::Config::open_default().unwrap();
// Init auth manager
let mut auth_handler = auth::AuthHandler::default_with_config(config);
// Setup remote callbacks
let mut callback = git2::RemoteCallbacks::new();
callback.credentials(move |url, username, allowed| {
auth_handler.handle_callback(url, username, allowed)
});
// Initialise the repository.
let repo = git2::Repository::init(&repo_dir)
.map_err(|e| anyhow!("failed to init repo at \"{}\": {}", repo_dir.display(), e))?;
// Fetch the necessary references.
let (refspecs, tags) = git_ref_to_refspecs(&source.reference);
// Fetch the refspecs.
let mut fetch_opts = git2::FetchOptions::new();
fetch_opts.remote_callbacks(callback);
if tags {
fetch_opts.download_tags(git2::AutotagOption::All);
}
let repo_url_string = source.repo.to_string();
repo.remote_anonymous(&repo_url_string)?
.fetch(&refspecs, Some(&mut fetch_opts), None)
.with_context(|| {
format!(
"failed to fetch `{}`. Check your connection or run in `--offline` mode",
&repo_url_string
)
})?;
// Call the user function.
let output = f(repo)?;
Ok(output)
}
/// Pin the given git-sourced package.
///
/// This clones the repository to a temporary directory in order to determine the commit at the
/// HEAD of the given git reference.
pub fn pin(fetch_id: u64, name: &str, source: Source) -> Result<Pinned> {
let commit_hash = with_tmp_git_repo(fetch_id, name, &source, |repo| {
// Resolve the reference to the commit ID.
let commit_id = source
.reference
.resolve(&repo)
.with_context(|| format!("Failed to resolve manifest reference: {source}"))?;
Ok(format!("{commit_id}"))
})?;
Ok(Pinned {
source,
commit_hash,
})
}
/// The path to which a git package commit should be checked out.
///
/// The resulting directory is:
///
/// ```ignore
/// $HOME/.forc/git/checkouts/name-<repo_url_hash>/<commit_hash>
/// ```
///
/// where `<repo_url_hash>` is a hash of the source repository URL.
pub fn commit_path(name: &str, repo: &Url, commit_hash: &str) -> PathBuf {
let repo_dir_name = git_repo_dir_name(name, repo);
git_checkouts_directory()
.join(repo_dir_name)
.join(commit_hash)
}
/// Fetch the repo at the given git package's URL and checkout the pinned commit.
///
/// Returns the location of the checked out commit.
///
/// NOTE: This function assumes that the caller has acquired an advisory lock to co-ordinate access
/// to the git repository checkout path.
pub fn fetch(fetch_id: u64, name: &str, pinned: &Pinned) -> Result<PathBuf> {
let path = commit_path(name, &pinned.source.repo, &pinned.commit_hash);
// Checkout the pinned hash to the path.
with_tmp_git_repo(fetch_id, name, &pinned.source, |repo| {
// Change HEAD to point to the pinned commit.
let id = git2::Oid::from_str(&pinned.commit_hash)?;
repo.set_head_detached(id)?;
// If the directory exists, remove it. Note that we already check for an existing,
// cached checkout directory for re-use prior to reaching the `fetch` function.
if path.exists() {
let _ = fs::remove_dir_all(&path);
}
fs::create_dir_all(&path)?;
// Checkout HEAD to the target directory.
let mut checkout = git2::build::CheckoutBuilder::new();
checkout.force().target_dir(&path);
repo.checkout_head(Some(&mut checkout))?;
// Fetch HEAD time and create an index
let current_head = repo.revparse_single("HEAD")?;
let head_commit = current_head
.as_commit()
.ok_or_else(|| anyhow!("Cannot get commit from {}", current_head.id()))?;
let head_time = head_commit.time().seconds();
let source_index = SourceIndex::new(
head_time,
pinned.source.reference.clone(),
pinned.commit_hash.clone(),
);
// Write the index file
fs::write(
path.join(".forc_index"),
serde_json::to_string(&source_index)?,
)?;
Ok(())
})?;
Ok(path)
}
/// Search local checkout dir for git sources, for non-branch git references tries to find the
/// exact match. For branch references, tries to find the most recent repo present locally with the given repo
pub(crate) fn search_source_locally(
name: &str,
git_source: &Source,
) -> Result<Option<(PathBuf, String)>> {
// In the checkouts dir iterate over dirs whose name starts with `name`
let checkouts_dir = git_checkouts_directory();
match &git_source.reference {
Reference::Branch(branch) => {
// Collect repos from this branch with their HEAD time
let repos_from_branch = collect_local_repos_with_branch(checkouts_dir, name, branch)?;
// Get the newest repo by their HEAD commit times
let newest_branch_repo = repos_from_branch
.into_iter()
.max_by_key(|&(_, (_, time))| time)
.map(|(repo_path, (hash, _))| (repo_path, hash));
Ok(newest_branch_repo)
}
_ => find_exact_local_repo_with_reference(checkouts_dir, name, &git_source.reference),
}
}
/// Search and collect repos from checkouts_dir that are from given branch and for the given package
fn collect_local_repos_with_branch(
checkouts_dir: PathBuf,
package_name: &str,
branch_name: &str,
) -> Result<Vec<(PathBuf, HeadWithTime)>> {
let mut list_of_repos = Vec::new();
with_search_checkouts(checkouts_dir, package_name, |repo_index, repo_dir_path| {
// Check if the repo's HEAD commit to verify it is from desired branch
if let Reference::Branch(branch) = repo_index.git_reference {
if branch == branch_name {
list_of_repos.push((repo_dir_path, repo_index.head_with_time));
}
}
Ok(())
})?;
Ok(list_of_repos)
}
/// Search an exact reference in locally available repos
fn find_exact_local_repo_with_reference(
checkouts_dir: PathBuf,
package_name: &str,
git_reference: &Reference,
) -> Result<Option<(PathBuf, String)>> {
let mut found_local_repo = None;
if let Reference::Tag(tag) = git_reference {
found_local_repo = find_repo_with_tag(tag, package_name, checkouts_dir)?;
} else if let Reference::Rev(rev) = git_reference {
found_local_repo = find_repo_with_rev(rev, package_name, checkouts_dir)?;
}
Ok(found_local_repo)
}
/// Search and find the match repo between the given tag and locally available options
fn find_repo_with_tag(
tag: &str,
package_name: &str,
checkouts_dir: PathBuf,
) -> Result<Option<(PathBuf, String)>> {
let mut found_local_repo = None;
with_search_checkouts(checkouts_dir, package_name, |repo_index, repo_dir_path| {
// Get current head of the repo
let current_head = repo_index.head_with_time.0;
if let Reference::Tag(curr_repo_tag) = repo_index.git_reference {
if curr_repo_tag == tag {
found_local_repo = Some((repo_dir_path, current_head));
}
}
Ok(())
})?;
Ok(found_local_repo)
}
/// Search and find the match repo between the given rev and locally available options
fn find_repo_with_rev(
rev: &str,
package_name: &str,
checkouts_dir: PathBuf,
) -> Result<Option<(PathBuf, String)>> {
let mut found_local_repo = None;
with_search_checkouts(checkouts_dir, package_name, |repo_index, repo_dir_path| {
// Get current head of the repo
let current_head = repo_index.head_with_time.0;
if let Reference::Rev(curr_repo_rev) = repo_index.git_reference {
if curr_repo_rev == rev {
found_local_repo = Some((repo_dir_path, current_head));
}
}
Ok(())
})?;
Ok(found_local_repo)
}
/// Search local checkouts directory and apply the given function. This is used for iterating over
/// possible options of a given package.
fn with_search_checkouts<F>(checkouts_dir: PathBuf, package_name: &str, mut f: F) -> Result<()>
where
F: FnMut(SourceIndex, PathBuf) -> Result<()>,
{
for entry in fs::read_dir(checkouts_dir)? {
let entry = entry?;
let folder_name = entry
.file_name()
.into_string()
.map_err(|_| anyhow!("invalid folder name"))?;
if folder_name.starts_with(package_name) {
// Search if the dir we are looking starts with the name of our package
for repo_dir in fs::read_dir(entry.path())? {
// Iterate over all dirs inside the `name-***` directory and try to open repo from
// each dirs inside this one
let repo_dir = repo_dir
.map_err(|e| anyhow!("Cannot find local repo at checkouts dir {}", e))?;
if repo_dir.file_type()?.is_dir() {
// Get the path of the current repo
let repo_dir_path = repo_dir.path();
// Get the index file from the found path
if let Ok(index_file) = fs::read_to_string(repo_dir_path.join(".forc_index")) {
let index = serde_json::from_str(&index_file)?;
f(index, repo_dir_path)?;
}
}
}
}
}
Ok(())
}
#[test]
fn test_source_git_pinned_parsing() {
let strings = [
"git+https://github.com/foo/bar?branch=baz#64092602dd6158f3e41d775ed889389440a2cd86",
"git+https://github.com/fuellabs/sway-lib-std?tag=v0.1.0#0000000000000000000000000000000000000000",
"git+https://some-git-host.com/owner/repo?rev#FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF",
"git+https://some-git-host.com/owner/repo?default-branch#AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA",
];
let expected = [
Pinned {
source: Source {
repo: Url::from_str("https://github.com/foo/bar").unwrap(),
reference: Reference::Branch("baz".to_string()),
},
commit_hash: "64092602dd6158f3e41d775ed889389440a2cd86".to_string(),
},
Pinned {
source: Source {
repo: Url::from_str("https://github.com/fuellabs/sway-lib-std").unwrap(),
reference: Reference::Tag("v0.1.0".to_string()),
},
commit_hash: "0000000000000000000000000000000000000000".to_string(),
},
Pinned {
source: Source {
repo: Url::from_str("https://some-git-host.com/owner/repo").unwrap(),
reference: Reference::Rev("FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF".to_string()),
},
commit_hash: "FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF".to_string(),
},
Pinned {
source: Source {
repo: Url::from_str("https://some-git-host.com/owner/repo").unwrap(),
reference: Reference::DefaultBranch,
},
commit_hash: "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA".to_string(),
},
];
for (&string, expected) in strings.iter().zip(&expected) {
let parsed = Pinned::from_str(string).unwrap();
assert_eq!(&parsed, expected);
let serialized = expected.to_string();
assert_eq!(&serialized, string);
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-pkg/src/manifest/dep_modifier.rs | forc-pkg/src/manifest/dep_modifier.rs | use crate::manifest::{
ContractDependency, Dependency, DependencyDetails, GenericManifestFile, HexSalt,
};
use crate::source::IPFSNode;
use crate::{self as pkg, Lock, PackageManifestFile};
use anyhow::{anyhow, bail, Result};
use pkg::manifest::ManifestFile;
use std::collections::BTreeMap;
use std::fmt;
use std::path::Path;
use std::path::PathBuf;
use std::str::FromStr;
use sway_core::fuel_prelude::fuel_tx;
use toml_edit::{DocumentMut, InlineTable, Item, Table, Value};
use tracing::info;
#[derive(Clone, Debug, Default)]
pub enum Action {
#[default]
Add,
Remove,
}
#[derive(Clone, Debug, Default)]
pub struct ModifyOpts {
// === Manifest Options ===
pub manifest_path: Option<String>,
// === Package Selection ===
pub package: Option<String>,
// === Source (Add only) ===
pub source_path: Option<String>,
pub git: Option<String>,
pub branch: Option<String>,
pub tag: Option<String>,
pub rev: Option<String>,
pub ipfs: Option<String>,
// === Section ===
pub contract_deps: bool,
pub salt: Option<String>,
// === IPFS Node ===
pub ipfs_node: Option<IPFSNode>,
// === Dependencies & Flags ===
pub dependencies: Vec<String>,
pub dry_run: bool,
pub offline: bool,
pub action: Action,
}
pub fn modify_dependencies(opts: ModifyOpts) -> Result<()> {
let manifest_file = if let Some(p) = &opts.manifest_path {
let path = &PathBuf::from(p);
ManifestFile::from_file(path)?
} else {
let cwd = std::env::current_dir()?;
ManifestFile::from_dir(cwd)?
};
let root_dir = manifest_file.root_dir();
let member_manifests = manifest_file.member_manifests()?;
let package_manifest_dir =
resolve_package_path(&manifest_file, &opts.package, &root_dir, &member_manifests)?;
let content = std::fs::read_to_string(&package_manifest_dir)?;
let mut toml_doc = content.parse::<DocumentMut>()?;
let backup_doc = toml_doc.clone();
let old_package_manifest = PackageManifestFile::from_file(&package_manifest_dir)?;
let lock_path = old_package_manifest.lock_path()?;
let old_lock = Lock::from_path(&lock_path).ok().unwrap_or_default();
let section = if opts.contract_deps {
Section::ContractDeps
} else {
Section::Deps
};
match opts.action {
Action::Add => {
for dependency in &opts.dependencies {
let (dep_name, dependency_data) = resolve_dependency(
dependency,
&opts,
&member_manifests,
&old_package_manifest.dir().to_path_buf(),
)?;
section.add_deps_manifest_table(
&mut toml_doc,
dep_name,
dependency_data,
opts.salt.clone(),
)?;
}
}
Action::Remove => {
let dep_refs: Vec<&str> = opts.dependencies.iter().map(String::as_str).collect();
section.remove_deps_manifest_table(&mut toml_doc, &dep_refs)?;
}
}
// write updates to toml doc
std::fs::write(&package_manifest_dir, toml_doc.to_string())?;
let updated_package_manifest = PackageManifestFile::from_file(&package_manifest_dir)?;
let member_manifests = updated_package_manifest.member_manifests()?;
let new_plan = pkg::BuildPlan::from_lock_and_manifests(
&lock_path,
&member_manifests,
false,
opts.offline,
&opts.ipfs_node.clone().unwrap_or_default(),
);
new_plan.or_else(|e| {
std::fs::write(&package_manifest_dir, backup_doc.to_string())
.map_err(|write_err| anyhow!("failed to write toml file: {}", write_err))?;
Err(e)
})?;
if opts.dry_run {
info!("Dry run enabled. toml file not modified.");
std::fs::write(&package_manifest_dir, backup_doc.to_string())?;
let string = toml::ser::to_string_pretty(&old_lock)?;
std::fs::write(&lock_path, string)?;
return Ok(());
}
Ok(())
}
fn resolve_package_path(
manifest_file: &ManifestFile,
package: &Option<String>,
root_dir: &Path,
member_manifests: &BTreeMap<String, PackageManifestFile>,
) -> Result<PathBuf> {
if manifest_file.is_workspace() {
let Some(package_name) = package else {
let packages = member_manifests
.keys()
.cloned()
.collect::<Vec<_>>()
.join(", ");
bail!("`forc add` could not determine which package to modify. Use --package.\nAvailable: {}", packages);
};
resolve_workspace_path_inner(member_manifests, package_name, root_dir)
} else if let Some(package_name) = package {
resolve_workspace_path_inner(member_manifests, package_name, root_dir)
} else {
Ok(manifest_file.path().to_path_buf())
}
}
fn resolve_workspace_path_inner(
member_manifests: &BTreeMap<String, PackageManifestFile>,
package_name: &str,
root_dir: &Path,
) -> Result<PathBuf> {
if let Some(dir) = member_manifests.get(package_name) {
Ok(dir.path().to_path_buf())
} else {
bail!(
"package(s) {} not found in workspace {}",
package_name,
root_dir.to_string_lossy()
)
}
}
fn resolve_dependency(
raw: &str,
opts: &ModifyOpts,
member_manifests: &BTreeMap<String, PackageManifestFile>,
package_dir: &PathBuf,
) -> Result<(String, Dependency)> {
let dep_spec: DepSpec = raw.parse()?;
let dep_name = dep_spec.name;
let mut details = DependencyDetails {
version: dep_spec.version_req.clone(),
namespace: None,
path: opts.source_path.clone(),
git: opts.git.clone(),
branch: opts.branch.clone(),
tag: opts.tag.clone(),
package: None,
rev: opts.rev.clone(),
ipfs: opts.ipfs.clone(),
};
details.validate()?;
let dependency_data = if let Some(version) = dep_spec.version_req {
Dependency::Simple(version)
} else if details.is_source_empty() {
if let Some(member) = member_manifests.get(&dep_name) {
if member.dir() == package_dir {
bail!("cannot add `{}` as a dependency to itself", dep_name);
}
let sibling_parent = package_dir.parent().unwrap();
let rel_path = member
.dir()
.strip_prefix(sibling_parent)
.map(|p| PathBuf::from("..").join(p))
.unwrap_or_else(|_| member.dir().to_path_buf());
details.path = Some(rel_path.to_string_lossy().to_string());
Dependency::Detailed(details)
} else {
// Fallback: no explicit source & not a sibling package.
// TODO: Integrate registry support (e.g., forc.pub) here.
bail!(
"dependency `{}` source not specified. Please specify a source (e.g., git, path) or version.",
dep_name
);
}
} else {
Dependency::Detailed(details)
};
Ok((dep_name, dependency_data))
}
/// Reference to a package to be added as a dependency.
///
/// See `forc add` help for more info.
#[derive(Clone, Debug, Default)]
pub struct DepSpec {
pub name: String,
pub version_req: Option<String>,
}
impl FromStr for DepSpec {
type Err = anyhow::Error;
fn from_str(s: &str) -> anyhow::Result<Self> {
if s.trim().is_empty() {
bail!("Dependency spec cannot be empty");
}
let mut s = s.trim().split('@');
let name = s
.next()
.ok_or_else(|| anyhow::anyhow!("missing dependency name"))?;
let version_req = s.next().map(|s| s.to_string());
if let Some(ref v) = version_req {
semver::VersionReq::parse(v)
.map_err(|_| anyhow::anyhow!("invalid version requirement `{v}`"))?;
}
Ok(Self {
name: name.to_string(),
version_req,
})
}
}
impl fmt::Display for DepSpec {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match &self.version_req {
Some(version) => write!(f, "{}@{}", self.name, version),
None => write!(f, "{}", self.name),
}
}
}
#[derive(Clone)]
pub enum Section {
Deps,
ContractDeps,
}
impl fmt::Display for Section {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let section = match self {
Section::Deps => "dependencies",
Section::ContractDeps => "contract-dependencies",
};
write!(f, "{section}")
}
}
impl Section {
pub fn add_deps_manifest_table(
&self,
doc: &mut DocumentMut,
dep_name: String,
dep_data: Dependency,
salt: Option<String>,
) -> Result<()> {
let section_name = self.to_string();
if !doc.as_table().contains_key(§ion_name) {
doc[§ion_name] = Item::Table(Table::new());
}
let table = doc[section_name.as_str()].as_table_mut().unwrap();
match self {
Section::Deps => {
let item = match dep_data {
Dependency::Simple(ver) => ver.to_string().into(),
Dependency::Detailed(details) => {
Item::Value(toml_edit::Value::InlineTable(generate_table(&details)))
}
};
table.insert(&dep_name, item);
}
Section::ContractDeps => {
let resolved_salt = match salt.as_ref().or(salt.as_ref()) {
Some(s) => {
HexSalt::from_str(s).map_err(|e| anyhow!("Invalid salt format: {}", e))?
}
None => HexSalt(fuel_tx::Salt::default()),
};
let contract_dep = ContractDependency {
dependency: dep_data,
salt: resolved_salt.clone(),
};
let dep = &contract_dep.dependency;
let salt: &HexSalt = &contract_dep.salt;
let item = match dep {
Dependency::Simple(ver) => {
let mut inline = InlineTable::default();
inline.insert("version", Value::from(ver.to_string()));
inline.insert("salt", Value::from(format!("0x{salt}")));
Item::Value(toml_edit::Value::InlineTable(inline))
}
Dependency::Detailed(details) => {
let mut inline = generate_table(details);
inline.insert("salt", Value::from(format!("0x{salt}")));
Item::Value(toml_edit::Value::InlineTable(inline))
}
};
table.insert(&dep_name, item);
}
};
Ok(())
}
pub fn remove_deps_manifest_table(self, doc: &mut DocumentMut, deps: &[&str]) -> Result<()> {
let section_name = self.to_string();
let section_table = doc[section_name.as_str()].as_table_mut().ok_or_else(|| {
anyhow!(
"the dependency `{}` could not be found in `{}`",
deps.join(", "),
section_name,
)
})?;
match self {
Section::Deps => {
for dep in deps {
if !section_table.contains_key(dep) {
bail!(
"the dependency `{}` could not be found in `{}`",
dep,
section_name
);
}
section_table.remove(dep);
}
}
Section::ContractDeps => {
for dep in deps {
if !section_table.contains_key(dep) {
bail!(
"the dependency `{}` could not be found in `{}`",
dep,
section_name
);
}
section_table.remove(dep);
}
}
}
Ok(())
}
}
fn generate_table(details: &DependencyDetails) -> InlineTable {
let mut inline = InlineTable::default();
if let Some(version) = &details.version {
inline.insert("version", Value::from(version.to_string()));
}
if let Some(git) = &details.git {
inline.insert("git", Value::from(git.to_string()));
}
if let Some(branch) = &details.branch {
inline.insert("branch", Value::from(branch.to_string()));
}
if let Some(tag) = &details.tag {
inline.insert("tag", Value::from(tag.to_string()));
}
if let Some(rev) = &details.rev {
inline.insert("rev", Value::from(rev.to_string()));
}
if let Some(path) = &details.path {
inline.insert("path", Value::from(path.to_string()));
}
if let Some(ipfs) = &details.ipfs {
inline.insert("cid", Value::from(ipfs.to_string()));
}
inline
}
#[cfg(test)]
mod tests {
use super::*;
use crate::WorkspaceManifestFile;
use std::fs;
use std::str::FromStr;
use tempfile::{tempdir, TempDir};
fn create_test_package(
name: &str,
source_files: Vec<(&str, &str)>,
) -> Result<(TempDir, PackageManifestFile)> {
let temp_dir = tempdir()?;
let base_path = temp_dir.path();
// Create package structure
fs::create_dir_all(base_path.join("src"))?;
// Create Forc.toml
let forc_toml = format!(
r#"
[project]
authors = ["Test"]
entry = "main.sw"
license = "MIT"
name = "{name}"
[dependencies]
"#
);
fs::write(base_path.join("Forc.toml"), forc_toml)?;
// Create source files
for (file_name, content) in source_files {
// Handle nested directories in the file path
let file_path = base_path.join("src").join(file_name);
if let Some(parent) = file_path.parent() {
fs::create_dir_all(parent)?;
}
fs::write(file_path, content)?;
}
// Create the manifest file
let manifest_file = PackageManifestFile::from_file(base_path.join("Forc.toml"))?;
Ok((temp_dir, manifest_file))
}
fn create_test_workspace(
members: Vec<(&str, Vec<(&str, &str)>)>,
) -> Result<(TempDir, WorkspaceManifestFile)> {
let temp_dir = tempdir()?;
let base_path = temp_dir.path();
// Create workspace Forc.toml
let mut workspace_toml = "[workspace]\nmembers = [".to_string();
for (i, (name, _)) in members.iter().enumerate() {
if i > 0 {
workspace_toml.push_str(", ");
}
workspace_toml.push_str(&format!("\"{name}\""));
}
workspace_toml.push_str("]\n");
fs::write(base_path.join("Forc.toml"), workspace_toml)?;
// Create each member
for (name, source_files) in members {
let member_path = base_path.join(name);
fs::create_dir_all(member_path.join("src"))?;
// Create member Forc.toml
let forc_toml = format!(
r#"
[project]
authors = ["Test"]
entry = "main.sw"
license = "MIT"
name = "{name}"
[dependencies]
"#
);
fs::write(member_path.join("Forc.toml"), forc_toml)?;
// Create source files
for (file_name, content) in source_files {
// Handle nested directories in the file path
let file_path = member_path.join("src").join(file_name);
if let Some(parent) = file_path.parent() {
fs::create_dir_all(parent)?;
}
fs::write(file_path, content)?;
}
}
// Create the workspace manifest file
let manifest_file = WorkspaceManifestFile::from_file(base_path.join("Forc.toml"))?;
Ok((temp_dir, manifest_file))
}
#[test]
fn test_dep_from_str_name_only() {
let dep: DepSpec = "abc".parse().expect("parsing dep spec failed");
assert_eq!(dep.name, "abc".to_string());
assert_eq!(dep.version_req, None);
}
#[test]
fn test_dep_from_str_name_and_version() {
let dep: DepSpec = "abc@1".parse().expect("parsing dep spec failed");
assert_eq!(dep.name, "abc".to_string());
assert_eq!(dep.version_req, Some("1".to_string()));
}
#[test]
fn test_dep_spec_invalid_version_req() {
let input = "foo@not-a-version";
let result = DepSpec::from_str(input);
assert!(result.is_err());
assert!(
result
.unwrap_err()
.to_string()
.contains("invalid version requirement"),
"Expected version requirement parse failure"
);
}
#[test]
fn test_dep_from_str_invalid() {
assert!(DepSpec::from_str("").is_err());
}
#[test]
fn test_resolve_package_path_single_package_mode() {
let (temp_dir, pkg_manifest) =
create_test_package("test_pkg", vec![("main.sw", "fn main() -> u64 { 42 }")]).unwrap();
let package_spec_dir = temp_dir.path().to_path_buf();
let expected_path = pkg_manifest.path;
let manifest_file = ManifestFile::from_dir(&package_spec_dir).unwrap();
let members = manifest_file.member_manifests().unwrap();
let root_dir = manifest_file.root_dir();
let result = resolve_package_path(&manifest_file, &None, &root_dir, &members).unwrap();
assert_eq!(result, expected_path);
}
#[test]
fn test_resolve_package_path_workspace_with_package_found() {
let (temp_dir, _) = create_test_workspace(vec![
("pkg1", vec![("main.sw", "fn main() -> u64 { 1 }")]),
("pkg2", vec![("main.sw", "fn main() -> u64 { 2 }")]),
])
.unwrap();
let base_path = temp_dir.path();
let expected_path = base_path.join("pkg1/Forc.toml");
let manifest_file = ManifestFile::from_dir(base_path).unwrap();
let members = manifest_file.member_manifests().unwrap();
let root_dir = manifest_file.root_dir();
let package = "pkg1".to_string();
let result =
resolve_package_path(&manifest_file, &Some(package), &root_dir, &members).unwrap();
assert_eq!(result, expected_path);
}
#[test]
fn test_resolve_package_path_workspace_package_not_found() {
let (temp_dir, _) = create_test_workspace(vec![
("pkg1", vec![("main.sw", "fn main() -> u64 { 1 }")]),
("pkg2", vec![("main.sw", "fn main() -> u64 { 2 }")]),
])
.unwrap();
let base_path = temp_dir.path();
let manifest_file = ManifestFile::from_dir(base_path).unwrap();
let members = manifest_file.member_manifests().unwrap();
let root_dir = manifest_file.root_dir();
let err = resolve_package_path(
&manifest_file,
&Some("missing_pkg".into()),
&root_dir,
&members,
)
.unwrap_err();
assert!(
err.to_string().contains("package(s) missing_pkg not found"),
"unexpected error: {err}"
);
}
#[test]
fn test_resolve_package_path_workspace_package_not_set() {
let (temp_dir, _) = create_test_workspace(vec![
("pkg1", vec![("main.sw", "fn main() -> u64 { 1 }")]),
("pkg2", vec![("main.sw", "fn main() -> u64 { 2 }")]),
])
.unwrap();
let base_path = temp_dir.path();
let manifest_file = ManifestFile::from_dir(base_path).unwrap();
let members = manifest_file.member_manifests().unwrap();
let root_dir = manifest_file.root_dir();
let err = resolve_package_path(&manifest_file, &None, &root_dir, &members).unwrap_err();
let resp = "`forc add` could not determine which package to modify. Use --package.\nAvailable: pkg1, pkg2".to_string();
assert!(err.to_string().contains(&resp), "unexpected error: {err}");
}
#[test]
fn test_resolve_dependency_simple_version() {
let opts = ModifyOpts {
dependencies: vec!["dep@1.0.0".to_string()],
..Default::default()
};
let (temp_dir, _) =
create_test_package("test_pkg", vec![("main.sw", "fn main() -> u64 { 42 }")]).unwrap();
let package_spec_dir = temp_dir.path().to_path_buf();
let manifest_file = ManifestFile::from_dir(&package_spec_dir).unwrap();
let members = manifest_file.member_manifests().unwrap();
let (name, data) =
resolve_dependency("dep@1.0.0", &opts, &members, &package_spec_dir).unwrap();
assert_eq!(name, "dep");
match data {
Dependency::Simple(v) => assert_eq!(v, "1.0.0"),
_ => panic!("Expected simple dependency"),
}
}
#[test]
fn test_resolve_dependency_detailed_variants() {
let base_opts = ModifyOpts {
..Default::default()
};
let (temp_dir, _) =
create_test_package("test_pkg", vec![("main.sw", "fn main() -> u64 { 42 }")]).unwrap();
let package_spec_dir = temp_dir.path().to_path_buf();
let manifest_file = ManifestFile::from_dir(&package_spec_dir).unwrap();
let members = manifest_file.member_manifests().unwrap();
let dep = "dummy_dep";
let git = "https://github.com/example/repo.git";
// Git alone
{
let mut opts = base_opts.clone();
opts.git = Some(git.to_string());
let (name, data) = resolve_dependency(dep, &opts, &members, &package_spec_dir).unwrap();
assert_eq!(name, dep);
match data {
Dependency::Detailed(details) => {
assert_eq!(details.git.as_deref(), Some(git));
}
_ => panic!("Expected detailed dependency with git"),
}
}
// Git + branch
{
let mut opts = base_opts.clone();
opts.git = Some(git.to_string());
opts.branch = Some("main".to_string());
let (name, data) = resolve_dependency(dep, &opts, &members, &package_spec_dir).unwrap();
assert_eq!(name, dep);
match data {
Dependency::Detailed(details) => {
assert_eq!(details.git.as_deref(), Some(git));
assert_eq!(details.branch.as_deref(), Some("main"));
}
_ => panic!("Expected detailed dependency with git+branch"),
}
}
// Git + rev
{
let mut opts = base_opts.clone();
opts.git = Some(git.to_string());
opts.rev = Some("deadbeef".to_string());
let (name, data) = resolve_dependency(dep, &opts, &members, &package_spec_dir).unwrap();
assert_eq!(name, dep);
match data {
Dependency::Detailed(details) => {
assert_eq!(details.git.as_deref(), Some(git));
assert_eq!(details.rev.as_deref(), Some("deadbeef"));
}
_ => panic!("Expected detailed dependency with git+rev"),
}
}
// Git + tag
{
let mut opts = base_opts.clone();
opts.git = Some(git.to_string());
opts.tag = Some("v1.2.3".to_string());
let (name, data) = resolve_dependency(dep, &opts, &members, &package_spec_dir).unwrap();
assert_eq!(name, dep);
match data {
Dependency::Detailed(details) => {
assert_eq!(details.git.as_deref(), Some(git));
assert_eq!(details.tag.as_deref(), Some("v1.2.3"));
}
_ => panic!("Expected detailed dependency with git+tag"),
}
}
// dep + ipfs
{
let mut opts = base_opts.clone();
opts.ipfs = Some("QmYwAPJzv5CZsnA".to_string());
let (name, data) = resolve_dependency(dep, &opts, &members, &package_spec_dir).unwrap();
assert_eq!(name, dep);
match data {
Dependency::Detailed(details) => {
assert_eq!(details.ipfs.as_deref(), Some("QmYwAPJzv5CZsnA"));
}
_ => panic!("Expected detailed dependency with git+tag"),
}
}
}
#[test]
fn test_resolve_dependency_detailed_variant_failure() {
let base_opts = ModifyOpts {
..Default::default()
};
let (temp_dir, _) =
create_test_package("test_pkg", vec![("main.sw", "fn main() -> u64 { 42 }")]).unwrap();
let package_spec_dir = temp_dir.path().to_path_buf();
let manifest_file = ManifestFile::from_dir(&package_spec_dir).unwrap();
let members = manifest_file.member_manifests().unwrap();
let dep = "dummy_dep";
let git = "https://github.com/example/repo.git";
// no Git + branch
{
let mut opts = base_opts.clone();
opts.branch = Some("main".to_string());
let result = resolve_dependency(dep, &opts, &members, &package_spec_dir);
assert!(result.is_err());
assert!(result
.unwrap_err()
.to_string()
.contains("Details reserved for git sources used without a git field"));
}
// no Git + rev
{
let mut opts = base_opts.clone();
opts.rev = Some("deadbeef".to_string());
let result = resolve_dependency(dep, &opts, &members, &package_spec_dir);
assert!(result.is_err());
assert!(result
.unwrap_err()
.to_string()
.contains("Details reserved for git sources used without a git field"));
}
// no Git + tag
{
let mut opts = base_opts.clone();
opts.tag = Some("v1.2.3".to_string());
let result = resolve_dependency(dep, &opts, &members, &package_spec_dir);
assert!(result.is_err());
assert!(result
.unwrap_err()
.to_string()
.contains("Details reserved for git sources used without a git field"));
}
// git + tag + rev + branch
{
let mut opts = base_opts.clone();
opts.git = Some(git.to_string());
opts.tag = Some("v1.2.3".to_string());
opts.rev = Some("deadbeef".to_string());
opts.branch = Some("main".to_string());
let result = resolve_dependency(dep, &opts, &members, &package_spec_dir);
assert!(result.is_err());
assert!(result
.unwrap_err()
.to_string()
.contains("Cannot specify `branch`, `tag`, and `rev` together for dependency with a Git source"));
}
// git + branch + tag
{
let mut opts = base_opts.clone();
opts.git = Some(git.to_string());
opts.tag = Some("v1.2.3".to_string());
opts.branch = Some("main".to_string());
let result = resolve_dependency(dep, &opts, &members, &package_spec_dir);
assert!(result.is_err());
assert!(result.unwrap_err().to_string().contains(
"Cannot specify both `branch` and `tag` for dependency with a Git source"
));
}
// git + tag + rev
{
let mut opts = base_opts.clone();
opts.git = Some(git.to_string());
opts.tag = Some("v1.2.3".to_string());
opts.rev = Some("deadbeef".to_string());
let result = resolve_dependency(dep, &opts, &members, &package_spec_dir);
assert!(result.is_err());
assert!(result
.unwrap_err()
.to_string()
.contains("Cannot specify both `rev` and `tag` for dependency with a Git source"));
}
// git + branch + rev
{
let mut opts = base_opts.clone();
opts.git = Some(git.to_string());
opts.rev = Some("deadbeef".to_string());
opts.branch = Some("main".to_string());
let result = resolve_dependency(dep, &opts, &members, &package_spec_dir);
assert!(result.is_err());
assert!(result.unwrap_err().to_string().contains(
"Cannot specify both `branch` and `rev` for dependency with a Git source"
));
}
// no source provided
{
let opts = base_opts.clone();
let result = resolve_dependency(dep, &opts, &members, &package_spec_dir);
assert!(result.is_err());
assert!(result.unwrap_err().to_string().contains(
"dependency `dummy_dep` source not specified. Please specify a source (e.g., git, path) or version"
));
}
}
#[test]
fn test_resolve_dependency_from_workspace_sibling() {
let (temp_dir, _) = create_test_workspace(vec![
("pkg1", vec![("main.sw", "fn main() -> u64 { 1 }")]),
("pkg2", vec![("main.sw", "fn main() -> u64 { 2 }")]),
])
.unwrap();
let base_path = temp_dir.path();
let package_dir = base_path.join("pkg2");
let dep = "pkg1";
let manifest_file = ManifestFile::from_dir(base_path).unwrap();
let members = manifest_file.member_manifests().unwrap();
let opts = ModifyOpts {
source_path: None,
dependencies: vec![dep.to_string()],
package: Some("pkg2".to_string()),
..Default::default()
};
let (name, data) =
resolve_dependency(dep, &opts, &members, &package_dir).expect("should resolve");
assert_eq!(name, dep);
match data {
Dependency::Detailed(details) => {
assert!(details.path.is_some());
let actual_path = details.path.as_ref().unwrap();
assert_eq!(actual_path, "../pkg1");
}
_ => panic!("Expected detailed dependency with fallback path"),
}
}
#[test]
fn test_resolve_dependency_self_dependency_error() {
let (temp_dir, _) = create_test_workspace(vec![
("pkg1", vec![("main.sw", "fn main() -> u64 { 1 }")]),
("pkg2", vec![("main.sw", "fn main() -> u64 { 2 }")]),
])
.unwrap();
let base_path = temp_dir.path();
let package_dir = base_path.join("pkg1");
let dep = "pkg1";
let resp = format!("cannot add `{dep}` as a dependency to itself");
let manifest_file = ManifestFile::from_dir(base_path).unwrap();
let members = manifest_file.member_manifests().unwrap();
let opts = ModifyOpts {
dependencies: vec![dep.to_string()],
package: Some("package-1".to_string()),
..Default::default()
};
let error = resolve_dependency(dep, &opts, &members, &package_dir).unwrap_err();
assert!(error.to_string().contains(&resp));
}
#[test]
fn test_resolve_dependency_invalid_string() {
let opts = ModifyOpts {
dependencies: vec!["".to_string()],
..Default::default()
};
let result = resolve_dependency("", &opts, &BTreeMap::new(), &PathBuf::new());
assert!(result.is_err());
assert!(result
.unwrap_err()
.to_string()
.contains("Dependency spec cannot be empty"));
}
#[test]
fn test_dep_section_add_to_toml_regular_dependency_success() {
let toml_str = r#"
[project]
name = "package"
entry = "main.sw"
license = "Apache-2.0"
authors = ["Fuel Labs"]
"#;
let mut doc: DocumentMut = toml_str.parse().unwrap();
let dep_data = Dependency::Simple("1.0.0".into());
let section = Section::Deps;
section
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | true |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-pkg/src/manifest/build_profile.rs | forc-pkg/src/manifest/build_profile.rs | use serde::{Deserialize, Serialize};
use sway_core::{Backtrace, IrCli, OptLevel, PrintAsm};
use crate::DumpOpts;
/// Parameters to pass through to the `sway_core::BuildConfig` during compilation.
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, Eq)]
#[serde(rename_all = "kebab-case")]
pub struct BuildProfile {
#[serde(default)]
pub name: String,
#[serde(default)]
pub print_ast: bool,
pub print_dca_graph: Option<String>,
pub print_dca_graph_url_format: Option<String>,
pub dump: DumpOpts,
#[serde(default)]
pub print_ir: IrCli,
#[serde(default)]
pub verify_ir: IrCli,
#[serde(default)]
pub print_asm: PrintAsm,
#[serde(default)]
pub print_bytecode: bool,
#[serde(default)]
pub print_bytecode_spans: bool,
#[serde(default)]
pub terse: bool,
#[serde(default)]
pub time_phases: bool,
#[serde(default)]
pub profile: bool,
#[serde(default)]
pub metrics_outfile: Option<String>,
#[serde(default)]
pub include_tests: bool,
#[serde(default)]
pub error_on_warnings: bool,
#[serde(default)]
pub reverse_results: bool,
#[serde(default)]
pub optimization_level: OptLevel,
#[serde(default)]
pub backtrace: Backtrace,
}
impl BuildProfile {
pub const DEBUG: &'static str = "debug";
pub const RELEASE: &'static str = "release";
pub const DEFAULT: &'static str = Self::DEBUG;
pub fn debug() -> Self {
Self {
name: Self::DEBUG.into(),
dump: DumpOpts::default(),
print_ast: false,
print_dca_graph: None,
print_dca_graph_url_format: None,
print_ir: IrCli::default(),
verify_ir: IrCli::default(),
print_asm: PrintAsm::default(),
print_bytecode: false,
print_bytecode_spans: false,
terse: false,
time_phases: false,
profile: false,
metrics_outfile: None,
include_tests: false,
error_on_warnings: false,
reverse_results: false,
optimization_level: OptLevel::Opt0,
backtrace: Backtrace::AllExceptNever,
}
}
pub fn release() -> Self {
Self {
name: Self::RELEASE.to_string(),
dump: DumpOpts::default(),
print_ast: false,
print_dca_graph: None,
print_dca_graph_url_format: None,
print_ir: IrCli::default(),
verify_ir: IrCli::default(),
print_asm: PrintAsm::default(),
print_bytecode: false,
print_bytecode_spans: false,
terse: false,
time_phases: false,
profile: false,
metrics_outfile: None,
include_tests: false,
error_on_warnings: false,
reverse_results: false,
optimization_level: OptLevel::Opt1,
backtrace: Backtrace::OnlyAlways,
}
}
pub fn is_release(&self) -> bool {
self.name == Self::RELEASE
}
}
impl Default for BuildProfile {
fn default() -> Self {
Self::debug()
}
}
#[cfg(test)]
mod tests {
use crate::{BuildProfile, DumpOpts, PackageManifest};
use sway_core::{Backtrace, IrCli, OptLevel, PrintAsm};
#[test]
fn test_build_profiles() {
let manifest = PackageManifest::from_dir("./tests/sections").expect("manifest");
let build_profiles = manifest.build_profile.expect("build profile");
assert_eq!(build_profiles.len(), 5);
// Standard debug profile without adaptations.
let expected = BuildProfile::debug();
let profile = build_profiles.get("debug").expect("debug profile");
assert_eq!(*profile, expected);
// Profile based on debug profile with adjusted ASM printing options.
let expected = BuildProfile {
name: "".into(),
print_asm: PrintAsm::r#final(),
..BuildProfile::debug()
};
let profile = build_profiles.get("custom_asm").expect("custom profile");
assert_eq!(*profile, expected);
// Profile based on debug profile with adjusted IR printing options.
let expected = BuildProfile {
name: "".into(),
print_ir: IrCli {
initial: true,
r#final: false,
modified_only: true,
passes: vec!["dce".to_string(), "sroa".to_string()],
},
..BuildProfile::debug()
};
let profile = build_profiles
.get("custom_ir")
.expect("custom profile for IR");
assert_eq!(*profile, expected);
// Profile based on debug profile with adjusted backtrace option.
let expected = BuildProfile {
name: "".into(),
backtrace: Backtrace::OnlyAlways,
..BuildProfile::debug()
};
let profile = build_profiles
.get("custom_backtrace")
.expect("custom profile for backtrace");
assert_eq!(*profile, expected);
// Adapted release profile.
let expected = BuildProfile {
name: "".into(),
dump: DumpOpts::default(),
print_ast: true,
print_dca_graph: Some("dca_graph".into()),
print_dca_graph_url_format: Some("print_dca_graph_url_format".into()),
print_ir: IrCli::r#final(),
verify_ir: IrCli::none(),
print_asm: PrintAsm::all(),
print_bytecode: true,
print_bytecode_spans: false,
terse: true,
time_phases: true,
profile: false,
metrics_outfile: Some("metrics_outfile".into()),
include_tests: true,
error_on_warnings: true,
reverse_results: true,
optimization_level: OptLevel::Opt0,
backtrace: Backtrace::None,
};
let profile = build_profiles.get("release").expect("release profile");
assert_eq!(*profile, expected);
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/forc-pkg/src/manifest/mod.rs | forc-pkg/src/manifest/mod.rs | pub mod build_profile;
pub mod dep_modifier;
use crate::pkg::{manifest_file_missing, parsing_failed, wrong_program_type};
use anyhow::{anyhow, bail, Context, Result};
use forc_tracing::println_warning;
use forc_util::{validate_name, validate_project_name};
use semver::Version;
use serde::{de, Deserialize, Serialize};
use serde_with::{serde_as, DisplayFromStr};
use std::{
collections::{BTreeMap, HashMap},
fmt::Display,
path::{Path, PathBuf},
str::FromStr,
};
use sway_core::{fuel_prelude::fuel_tx, language::parsed::TreeType, parse_tree_type, BuildTarget};
use sway_error::handler::Handler;
use sway_types::span::Source;
use sway_utils::{
constants, find_nested_manifest_dir, find_parent_manifest_dir,
find_parent_manifest_dir_with_check,
};
use url::Url;
use self::build_profile::BuildProfile;
/// The name of a workspace member package.
pub type MemberName = String;
/// A manifest for each workspace member, or just one manifest if working with a single package
pub type MemberManifestFiles = BTreeMap<MemberName, PackageManifestFile>;
pub trait GenericManifestFile {
fn from_file<P: AsRef<Path>>(path: P) -> Result<Self>
where
Self: Sized;
fn from_dir<P: AsRef<Path>>(dir: P) -> Result<Self>
where
Self: Sized;
/// The path to the `Forc.toml` from which this manifest was loaded.
///
/// This will always be a canonical path.
fn path(&self) -> &Path;
/// The path to the directory containing the `Forc.toml` from which this manifest was loaded.
///
/// This will always be a canonical path.
fn dir(&self) -> &Path {
self.path()
.parent()
.expect("failed to retrieve manifest directory")
}
/// Returns the path of the `Forc.lock` file.
fn lock_path(&self) -> Result<PathBuf>;
/// Returns a mapping of member member names to package manifest files.
fn member_manifests(&self) -> Result<MemberManifestFiles>;
}
#[derive(Clone, Debug)]
pub enum ManifestFile {
Package(Box<PackageManifestFile>),
Workspace(WorkspaceManifestFile),
}
impl ManifestFile {
pub fn is_workspace(&self) -> bool {
matches!(self, ManifestFile::Workspace(_))
}
pub fn root_dir(&self) -> PathBuf {
match self {
ManifestFile::Package(pkg_manifest_file) => pkg_manifest_file
.workspace()
.ok()
.flatten()
.map(|ws| ws.dir().to_path_buf())
.unwrap_or_else(|| pkg_manifest_file.dir().to_path_buf()),
ManifestFile::Workspace(workspace_manifest_file) => {
workspace_manifest_file.dir().to_path_buf()
}
}
}
}
impl GenericManifestFile for ManifestFile {
/// Returns a `PackageManifestFile` if the path is within a package directory, otherwise
/// returns a `WorkspaceManifestFile` if within a workspace directory.
fn from_dir<P: AsRef<Path>>(path: P) -> Result<Self> {
let maybe_pkg_manifest = PackageManifestFile::from_dir(path.as_ref());
let manifest_file = if let Err(e) = maybe_pkg_manifest {
if e.to_string().contains("missing field `project`") {
// This might be a workspace manifest file
let workspace_manifest_file = WorkspaceManifestFile::from_dir(path.as_ref())?;
ManifestFile::Workspace(workspace_manifest_file)
} else {
bail!("{}", e)
}
} else if let Ok(pkg_manifest) = maybe_pkg_manifest {
ManifestFile::Package(Box::new(pkg_manifest))
} else {
bail!(
"Cannot find a valid `Forc.toml` at {}",
path.as_ref().to_string_lossy()
)
};
Ok(manifest_file)
}
/// Returns a `PackageManifestFile` if the path is pointing to package manifest, otherwise
/// returns a `WorkspaceManifestFile` if it is pointing to a workspace manifest.
fn from_file<P: AsRef<Path>>(path: P) -> Result<Self> {
let maybe_pkg_manifest = PackageManifestFile::from_file(path.as_ref());
let manifest_file = if let Err(e) = maybe_pkg_manifest {
if e.to_string().contains("missing field `project`") {
// This might be a workspace manifest file
let workspace_manifest_file = WorkspaceManifestFile::from_file(path.as_ref())?;
ManifestFile::Workspace(workspace_manifest_file)
} else {
bail!("{}", e)
}
} else if let Ok(pkg_manifest) = maybe_pkg_manifest {
ManifestFile::Package(Box::new(pkg_manifest))
} else {
bail!(
"Cannot find a valid `Forc.toml` at {}",
path.as_ref().to_string_lossy()
)
};
Ok(manifest_file)
}
/// The path to the `Forc.toml` from which this manifest was loaded.
///
/// This will always be a canonical path.
fn path(&self) -> &Path {
match self {
ManifestFile::Package(pkg_manifest_file) => pkg_manifest_file.path(),
ManifestFile::Workspace(workspace_manifest_file) => workspace_manifest_file.path(),
}
}
fn member_manifests(&self) -> Result<MemberManifestFiles> {
match self {
ManifestFile::Package(pkg_manifest_file) => pkg_manifest_file.member_manifests(),
ManifestFile::Workspace(workspace_manifest_file) => {
workspace_manifest_file.member_manifests()
}
}
}
/// Returns the path of the lock file for the given ManifestFile
fn lock_path(&self) -> Result<PathBuf> {
match self {
ManifestFile::Package(pkg_manifest) => pkg_manifest.lock_path(),
ManifestFile::Workspace(workspace_manifest) => workspace_manifest.lock_path(),
}
}
}
impl TryInto<PackageManifestFile> for ManifestFile {
type Error = anyhow::Error;
fn try_into(self) -> Result<PackageManifestFile> {
match self {
ManifestFile::Package(pkg_manifest_file) => Ok(*pkg_manifest_file),
ManifestFile::Workspace(_) => {
bail!("Cannot convert workspace manifest to package manifest")
}
}
}
}
impl TryInto<WorkspaceManifestFile> for ManifestFile {
type Error = anyhow::Error;
fn try_into(self) -> Result<WorkspaceManifestFile> {
match self {
ManifestFile::Package(_) => {
bail!("Cannot convert package manifest to workspace manifest")
}
ManifestFile::Workspace(workspace_manifest_file) => Ok(workspace_manifest_file),
}
}
}
type PatchMap = BTreeMap<String, Dependency>;
/// A [PackageManifest] that was deserialized from a file at a particular path.
#[derive(Clone, Debug, PartialEq)]
pub struct PackageManifestFile {
/// The deserialized `Forc.toml`.
manifest: PackageManifest,
/// The path from which the `Forc.toml` file was read.
path: PathBuf,
}
/// A direct mapping to a `Forc.toml`.
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq)]
#[serde(rename_all = "kebab-case")]
pub struct PackageManifest {
pub project: Project,
pub network: Option<Network>,
pub dependencies: Option<BTreeMap<String, Dependency>>,
pub patch: Option<BTreeMap<String, PatchMap>>,
/// A list of [configuration-time constants](https://github.com/FuelLabs/sway/issues/1498).
pub build_target: Option<BTreeMap<String, BuildTarget>>,
build_profile: Option<BTreeMap<String, BuildProfile>>,
pub contract_dependencies: Option<BTreeMap<String, ContractDependency>>,
pub proxy: Option<Proxy>,
}
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq)]
#[serde(rename_all = "kebab-case")]
pub struct Project {
pub authors: Option<Vec<String>>,
#[serde(deserialize_with = "validate_package_name")]
pub name: String,
pub version: Option<Version>,
pub description: Option<String>,
pub organization: Option<String>,
pub license: String,
pub homepage: Option<Url>,
pub repository: Option<Url>,
pub documentation: Option<Url>,
pub categories: Option<Vec<String>>,
pub keywords: Option<Vec<String>>,
#[serde(default = "default_entry")]
pub entry: String,
pub implicit_std: Option<bool>,
pub forc_version: Option<semver::Version>,
#[serde(default)]
pub experimental: HashMap<String, bool>,
pub metadata: Option<toml::Value>,
pub force_dbg_in_release: Option<bool>,
}
// Validation function for the `name` field
fn validate_package_name<'de, D>(deserializer: D) -> Result<String, D::Error>
where
D: de::Deserializer<'de>,
{
let name: String = Deserialize::deserialize(deserializer)?;
match validate_project_name(&name) {
Ok(_) => Ok(name),
Err(e) => Err(de::Error::custom(e.to_string())),
}
}
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, Eq)]
#[serde(rename_all = "kebab-case")]
pub struct Network {
#[serde(default = "default_url")]
pub url: String,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)]
pub struct HexSalt(pub fuel_tx::Salt);
impl FromStr for HexSalt {
type Err = anyhow::Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
// cut 0x from start.
let normalized = s
.strip_prefix("0x")
.ok_or_else(|| anyhow::anyhow!("hex salt declaration needs to start with 0x"))?;
let salt: fuel_tx::Salt =
fuel_tx::Salt::from_str(normalized).map_err(|e| anyhow::anyhow!("{e}"))?;
let hex_salt = Self(salt);
Ok(hex_salt)
}
}
impl Display for HexSalt {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let salt = self.0;
write!(f, "{salt}")
}
}
fn default_hex_salt() -> HexSalt {
HexSalt(fuel_tx::Salt::default())
}
#[serde_as]
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, Eq)]
#[serde(rename_all = "kebab-case")]
pub struct ContractDependency {
#[serde(flatten)]
pub dependency: Dependency,
#[serde_as(as = "DisplayFromStr")]
#[serde(default = "default_hex_salt")]
pub salt: HexSalt,
}
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, Eq)]
#[serde(untagged)]
pub enum Dependency {
/// In the simple format, only a version is specified, eg.
/// `package = "<version>"`
Simple(String),
/// The simple format is equivalent to a detailed dependency
/// specifying only a version, eg.
/// `package = { version = "<version>" }`
Detailed(DependencyDetails),
}
#[derive(Serialize, Deserialize, Clone, Debug, Default, PartialEq, Eq)]
#[serde(rename_all = "kebab-case")]
pub struct DependencyDetails {
pub(crate) version: Option<String>,
pub(crate) namespace: Option<String>,
pub path: Option<String>,
pub(crate) git: Option<String>,
pub(crate) branch: Option<String>,
pub(crate) tag: Option<String>,
pub(crate) package: Option<String>,
pub(crate) rev: Option<String>,
pub(crate) ipfs: Option<String>,
}
/// Describes the details around proxy contract.
#[derive(Serialize, Deserialize, Clone, Debug, Default, PartialEq, Eq)]
#[serde(rename_all = "kebab-case")]
pub struct Proxy {
pub enabled: bool,
/// Points to the proxy contract to be updated with the new contract id.
/// If there is a value for this field, forc will try to update the proxy contract's storage
/// field such that it points to current contract's deployed instance.
pub address: Option<String>,
}
impl DependencyDetails {
/// Checks if dependency details reserved for a specific dependency type used without the main
/// detail for that type.
///
/// Following dependency details sets are considered to be invalid:
/// 1. A set of dependency details which declares `branch`, `tag` or `rev` without `git`.
pub fn validate(&self) -> anyhow::Result<()> {
let DependencyDetails {
git,
branch,
tag,
rev,
version,
ipfs,
namespace,
path,
..
} = self;
if git.is_none() && (branch.is_some() || tag.is_some() || rev.is_some()) {
bail!("Details reserved for git sources used without a git field");
}
if git.is_some() && branch.is_some() && tag.is_some() && rev.is_some() {
bail!("Cannot specify `branch`, `tag`, and `rev` together for dependency with a Git source");
}
if git.is_some() && branch.is_some() && tag.is_some() {
bail!("Cannot specify both `branch` and `tag` for dependency with a Git source");
}
if git.is_some() && rev.is_some() && tag.is_some() {
bail!("Cannot specify both `rev` and `tag` for dependency with a Git source");
}
if git.is_some() && branch.is_some() && rev.is_some() {
bail!("Cannot specify both `branch` and `rev` for dependency with a Git source");
}
if version.is_some() && git.is_some() {
bail!("Both version and git details provided for same dependency");
}
if version.is_some() && ipfs.is_some() {
bail!("Both version and ipfs details provided for same dependency");
}
if version.is_none() && namespace.is_some() {
bail!("Namespace can only be specified for sources with version");
}
if version.is_some() && path.is_some() {
bail!("Both version and path details provided for same dependency");
}
Ok(())
}
pub fn is_source_empty(&self) -> bool {
self.git.is_none() && self.path.is_none() && self.ipfs.is_none()
}
}
impl Dependency {
/// The string of the `package` field if specified.
pub fn package(&self) -> Option<&str> {
match *self {
Self::Simple(_) => None,
Self::Detailed(ref det) => det.package.as_deref(),
}
}
/// The string of the `version` field if specified.
pub fn version(&self) -> Option<&str> {
match *self {
Self::Simple(ref version) => Some(version),
Self::Detailed(ref det) => det.version.as_deref(),
}
}
}
impl PackageManifestFile {
/// Returns an iterator over patches defined in underlying `PackageManifest` if this is a
/// standalone package.
///
/// If this package is a member of a workspace, patches are fetched from
/// the workspace manifest file, ignoring any patch defined in the package
/// manifest file, even if a patch section is not defined in the namespace.
fn resolve_patches(&self) -> Result<impl Iterator<Item = (String, PatchMap)>> {
if let Some(workspace) = self.workspace().ok().flatten() {
// If workspace is defined, passing a local patch is a warning, but the global patch is used
if self.patch.is_some() {
println_warning("Patch for the non root package will be ignored.");
println_warning(&format!(
"Specify patch at the workspace root: {}",
workspace.path().to_str().unwrap_or_default()
));
}
Ok(workspace
.patch
.as_ref()
.cloned()
.unwrap_or_default()
.into_iter())
} else {
Ok(self.patch.as_ref().cloned().unwrap_or_default().into_iter())
}
}
/// Retrieve the listed patches for the given name from underlying `PackageManifest` if this is
/// a standalone package.
///
/// If this package is a member of a workspace, patch is fetched from
/// the workspace manifest file.
pub fn resolve_patch(&self, patch_name: &str) -> Result<Option<PatchMap>> {
Ok(self
.resolve_patches()?
.find(|(p_name, _)| patch_name == p_name.as_str())
.map(|(_, patch)| patch))
}
/// Given the directory in which the file associated with this `PackageManifest` resides, produce the
/// path to the entry file as specified in the manifest.
///
/// This will always be a canonical path.
pub fn entry_path(&self) -> PathBuf {
self.dir()
.join(constants::SRC_DIR)
.join(&self.project.entry)
}
/// Produces the string of the entry point file.
pub fn entry_string(&self) -> Result<Source> {
let entry_path = self.entry_path();
let entry_string = std::fs::read_to_string(entry_path)?;
Ok(entry_string.as_str().into())
}
/// Parse and return the associated project's program type.
pub fn program_type(&self) -> Result<TreeType> {
let entry_string = self.entry_string()?;
let handler = Handler::default();
let parse_res = parse_tree_type(&handler, entry_string);
parse_res.map_err(|_| {
let (errors, _warnings, _infos) = handler.consume();
parsing_failed(&self.project.name, &errors)
})
}
/// Given the current directory and expected program type,
/// determines whether the correct program type is present.
pub fn check_program_type(&self, expected_types: &[TreeType]) -> Result<()> {
let parsed_type = self.program_type()?;
if !expected_types.contains(&parsed_type) {
bail!(wrong_program_type(
&self.project.name,
expected_types,
parsed_type
));
} else {
Ok(())
}
}
/// Access the build profile associated with the given profile name.
pub fn build_profile(&self, profile_name: &str) -> Option<&BuildProfile> {
self.build_profile
.as_ref()
.and_then(|profiles| profiles.get(profile_name))
}
/// Given the name of a `path` dependency, returns the full canonical `Path` to the dependency.
pub fn dep_path(&self, dep_name: &str) -> Option<PathBuf> {
let dir = self.dir();
let details = self.dep_detailed(dep_name)?;
details.path.as_ref().and_then(|path_str| {
let path = Path::new(path_str);
match path.is_absolute() {
true => Some(path.to_owned()),
false => dir.join(path).canonicalize().ok(),
}
})
}
/// Returns the workspace manifest file if this `PackageManifestFile` is one of the members.
pub fn workspace(&self) -> Result<Option<WorkspaceManifestFile>> {
let parent_dir = match self.dir().parent() {
None => return Ok(None),
Some(dir) => dir,
};
let ws_manifest = match WorkspaceManifestFile::from_dir(parent_dir) {
Ok(manifest) => manifest,
Err(e) => {
// Check if the error is missing workspace manifest file. Do not return that error if that
// is the case as we do not want to return error if this is a single project
// without a workspace.
if e.to_string().contains("could not find") {
return Ok(None);
} else {
return Err(e);
}
}
};
if ws_manifest.is_member_path(self.dir())? {
Ok(Some(ws_manifest))
} else {
Ok(None)
}
}
/// Returns an immutable reference to the project name that this manifest file describes.
pub fn project_name(&self) -> &str {
&self.project.name
}
/// Validate the `PackageManifestFile`.
///
/// This checks:
/// 1. Validity of the underlying `PackageManifest`.
/// 2. Existence of the entry file.
pub fn validate(&self) -> Result<()> {
self.manifest.validate()?;
let mut entry_path = self.path.clone();
entry_path.pop();
let entry_path = entry_path
.join(constants::SRC_DIR)
.join(&self.project.entry);
if !entry_path.exists() {
bail!(
"failed to validate path from entry field {:?} in Forc manifest file.",
self.project.entry
)
}
// Check for nested packages.
//
// `path` is the path to manifest file. To start nested package search we need to start
// from manifest's directory. So, last part of the path (the filename, "/forc.toml") needs
// to be removed.
let mut pkg_dir = self.path.to_path_buf();
pkg_dir.pop();
if let Some(nested_package) = find_nested_manifest_dir(&pkg_dir) {
// remove file name from nested_package_manifest
bail!("Nested packages are not supported, please consider separating the nested package at {} from the package at {}, or if it makes sense consider creating a workspace.", nested_package.display(), pkg_dir.display())
}
Ok(())
}
}
impl GenericManifestFile for PackageManifestFile {
/// Given a path to a `Forc.toml`, read it and construct a `PackageManifest`.
///
/// This also `validate`s the manifest, returning an `Err` in the case that invalid names,
/// fields were used.
///
/// If `std` is unspecified, `std` will be added to the `dependencies` table
/// implicitly. In this case, the git tag associated with the version of this crate is used to
/// specify the pinned commit at which we fetch `std`.
fn from_file<P: AsRef<Path>>(path: P) -> Result<Self> {
let path = path.as_ref().canonicalize()?;
let manifest = PackageManifest::from_file(&path)?;
let manifest_file = Self { manifest, path };
manifest_file.validate()?;
Ok(manifest_file)
}
/// Read the manifest from the `Forc.toml` in the directory specified by the given `path` or
/// any of its parent directories.
///
/// This is short for `PackageManifest::from_file`, but takes care of constructing the path to the
/// file.
fn from_dir<P: AsRef<Path>>(manifest_dir: P) -> Result<Self> {
let manifest_dir = manifest_dir.as_ref();
let dir = find_parent_manifest_dir(manifest_dir)
.ok_or_else(|| manifest_file_missing(manifest_dir))?;
let path = dir.join(constants::MANIFEST_FILE_NAME);
Self::from_file(path)
}
fn path(&self) -> &Path {
&self.path
}
/// Returns the location of the lock file for `PackageManifestFile`.
/// Checks if this PackageManifestFile corresponds to a workspace member and if that is the case
/// returns the workspace level lock file's location.
///
/// This will always be a canonical path.
fn lock_path(&self) -> Result<PathBuf> {
// Check if this package is in a workspace
let workspace_manifest = self.workspace()?;
if let Some(workspace_manifest) = workspace_manifest {
workspace_manifest.lock_path()
} else {
Ok(self.dir().to_path_buf().join(constants::LOCK_FILE_NAME))
}
}
fn member_manifests(&self) -> Result<MemberManifestFiles> {
let mut member_manifest_files = BTreeMap::new();
// Check if this package is in a workspace, in that case insert all member manifests
if let Some(workspace_manifest_file) = self.workspace()? {
for member_manifest in workspace_manifest_file.member_pkg_manifests()? {
let member_manifest = member_manifest.with_context(|| "Invalid member manifest")?;
member_manifest_files.insert(member_manifest.project.name.clone(), member_manifest);
}
} else {
let member_name = &self.project.name;
member_manifest_files.insert(member_name.clone(), self.clone());
}
Ok(member_manifest_files)
}
}
impl PackageManifest {
pub const DEFAULT_ENTRY_FILE_NAME: &'static str = "main.sw";
/// Given a path to a `Forc.toml`, read it and construct a `PackageManifest`.
///
/// This also `validate`s the manifest, returning an `Err` in the case that invalid names,
/// fields were used.
///
/// If `std` is unspecified, `std` will be added to the `dependencies` table
/// implicitly. In this case, the git tag associated with the version of this crate is used to
/// specify the pinned commit at which we fetch `std`.
pub fn from_file<P: AsRef<Path>>(path: P) -> Result<Self> {
// While creating a `ManifestFile` we need to check if the given path corresponds to a
// package or a workspace. While doing so, we should be printing the warnings if the given
// file parses so that we only see warnings for the correct type of manifest.
let path = path.as_ref();
let contents = std::fs::read_to_string(path)
.map_err(|e| anyhow!("failed to read manifest at {:?}: {}", path, e))?;
Self::from_string(contents)
}
/// Given a path to a `Forc.toml`, read it and construct a `PackageManifest`.
///
/// This also `validate`s the manifest, returning an `Err` in the case that invalid names,
/// fields were used.
///
/// If `std` is unspecified, `std` will be added to the `dependencies` table
/// implicitly. In this case, the git tag associated with the version of this crate is used to
/// specify the pinned commit at which we fetch `std`.
pub fn from_string(contents: String) -> Result<Self> {
// While creating a `ManifestFile` we need to check if the given path corresponds to a
// package or a workspace. While doing so, we should be printing the warnings if the given
// file parses so that we only see warnings for the correct type of manifest.
let mut warnings = vec![];
let toml_de = toml::de::Deserializer::new(&contents);
let mut manifest: Self = serde_ignored::deserialize(toml_de, |path| {
let warning = format!("unused manifest key: {path}");
warnings.push(warning);
})
.map_err(|e| anyhow!("failed to parse manifest: {}.", e))?;
for warning in warnings {
println_warning(&warning);
}
manifest.implicitly_include_std_if_missing();
manifest.implicitly_include_default_build_profiles_if_missing();
manifest.validate()?;
Ok(manifest)
}
/// Validate the `PackageManifest`.
///
/// This checks:
/// 1. The project and organization names against a set of reserved/restricted keywords and patterns.
/// 2. The validity of the details provided. Makes sure that there are no mismatching detail
/// declarations (to prevent mixing details specific to certain types).
/// 3. The dependencies listed does not have an alias ("package" field) that is the same as package name.
pub fn validate(&self) -> Result<()> {
validate_project_name(&self.project.name)?;
if let Some(ref org) = self.project.organization {
validate_name(org, "organization name")?;
}
for (dep_name, dependency_details) in self.deps_detailed() {
dependency_details.validate()?;
if dependency_details
.package
.as_ref()
.is_some_and(|package_alias| package_alias == &self.project.name)
{
bail!(format!("Dependency \"{dep_name}\" declares an alias (\"package\" field) that is the same as project name"))
}
if dep_name == &self.project.name {
bail!(format!(
"Dependency \"{dep_name}\" collides with project name."
))
}
}
Ok(())
}
/// Given a directory to a forc project containing a `Forc.toml`, read the manifest.
///
/// This is short for `PackageManifest::from_file`, but takes care of constructing the path to the
/// file.
pub fn from_dir<P: AsRef<Path>>(dir: P) -> Result<Self> {
let dir = dir.as_ref();
let manifest_dir =
find_parent_manifest_dir(dir).ok_or_else(|| manifest_file_missing(dir))?;
let file_path = manifest_dir.join(constants::MANIFEST_FILE_NAME);
Self::from_file(file_path)
}
/// Produce an iterator yielding all listed dependencies.
pub fn deps(&self) -> impl Iterator<Item = (&String, &Dependency)> {
self.dependencies
.as_ref()
.into_iter()
.flat_map(|deps| deps.iter())
}
/// Produce an iterator yielding all listed build profiles.
pub fn build_profiles(&self) -> impl Iterator<Item = (&String, &BuildProfile)> {
self.build_profile
.as_ref()
.into_iter()
.flat_map(|deps| deps.iter())
}
/// Produce an iterator yielding all listed contract dependencies
pub fn contract_deps(&self) -> impl Iterator<Item = (&String, &ContractDependency)> {
self.contract_dependencies
.as_ref()
.into_iter()
.flat_map(|deps| deps.iter())
}
/// Produce an iterator yielding all `Detailed` dependencies.
pub fn deps_detailed(&self) -> impl Iterator<Item = (&String, &DependencyDetails)> {
self.deps().filter_map(|(name, dep)| match dep {
Dependency::Detailed(ref det) => Some((name, det)),
Dependency::Simple(_) => None,
})
}
/// Produce an iterator yielding all listed patches.
pub fn patches(&self) -> impl Iterator<Item = (&String, &PatchMap)> {
self.patch
.as_ref()
.into_iter()
.flat_map(|patches| patches.iter())
}
/// Retrieve the listed patches for the given name.
pub fn patch(&self, patch_name: &str) -> Option<&PatchMap> {
self.patch
.as_ref()
.and_then(|patches| patches.get(patch_name))
}
/// Retrieve the proxy table for the package.
pub fn proxy(&self) -> Option<&Proxy> {
self.proxy.as_ref()
}
/// Check for the `std` package under `[dependencies]`. If it is missing, add
/// `std` implicitly.
///
/// This makes the common case of depending on `std` a lot smoother for most users, while still
/// allowing for the uncommon case of custom `std` deps.
fn implicitly_include_std_if_missing(&mut self) {
use sway_types::constants::STD;
// Don't include `std` if:
// - this *is* `std`.
// - `std` package is already specified.
// - a dependency already exists with the name "std".
if self.project.name == STD
|| self.pkg_dep(STD).is_some()
|| self.dep(STD).is_some()
|| !self.project.implicit_std.unwrap_or(true)
{
return;
}
// Add a `[dependencies]` table if there isn't one.
let deps = self.dependencies.get_or_insert_with(Default::default);
// Add the missing dependency.
let std_dep = implicit_std_dep();
deps.insert(STD.to_string(), std_dep);
}
/// Check for the `debug` and `release` packages under `[build-profile]`. If they are missing add them.
/// If they are provided, use the provided `debug` or `release` so that they override the default `debug`
/// and `release`.
fn implicitly_include_default_build_profiles_if_missing(&mut self) {
let build_profiles = self.build_profile.get_or_insert_with(Default::default);
if build_profiles.get(BuildProfile::DEBUG).is_none() {
build_profiles.insert(BuildProfile::DEBUG.into(), BuildProfile::debug());
}
if build_profiles.get(BuildProfile::RELEASE).is_none() {
build_profiles.insert(BuildProfile::RELEASE.into(), BuildProfile::release());
}
}
/// Retrieve a reference to the dependency with the given name.
pub fn dep(&self, dep_name: &str) -> Option<&Dependency> {
self.dependencies
.as_ref()
.and_then(|deps| deps.get(dep_name))
}
/// Retrieve a reference to the dependency with the given name.
pub fn dep_detailed(&self, dep_name: &str) -> Option<&DependencyDetails> {
self.dep(dep_name).and_then(|dep| match dep {
Dependency::Simple(_) => None,
Dependency::Detailed(detailed) => Some(detailed),
})
}
/// Retrieve a reference to the contract dependency with the given name.
pub fn contract_dep(&self, contract_dep_name: &str) -> Option<&ContractDependency> {
self.contract_dependencies
.as_ref()
.and_then(|contract_dependencies| contract_dependencies.get(contract_dep_name))
}
/// Retrieve a reference to the contract dependency with the given name.
pub fn contract_dependency_detailed(
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | true |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-error/src/convert_parse_tree_error.rs | sway-error/src/convert_parse_tree_error.rs | use sway_types::{Ident, IdentUnique, Span, Spanned};
use thiserror::Error;
use crate::formatting::{
a_or_an, num_to_str, num_to_str_or_none, plural_s, sequence_to_str, Enclosing,
};
#[derive(Error, Debug, Clone, PartialEq, Eq, Hash)]
pub enum ConvertParseTreeError {
#[error("Imports without items are not supported")]
ImportsWithoutItemsNotSupported { span: Span },
#[error("functions used in applications may not be arbitrary expressions")]
FunctionArbitraryExpression { span: Span },
#[error("generics are not supported here")]
GenericsNotSupportedHere { span: Span },
#[error("multiple generics are not supported")]
MultipleGenericsNotSupported { span: Span },
#[error("tuple index out of range")]
TupleIndexOutOfRange { span: Span },
#[error("shift-left expressions are not implemented")]
ShlNotImplemented { span: Span },
#[error("shift-right expressions are not implemented")]
ShrNotImplemented { span: Span },
#[error("bitwise xor expressions are not implemented")]
BitXorNotImplemented { span: Span },
#[error("integer literals in this position cannot have a type suffix")]
IntTySuffixNotSupported { span: Span },
#[error("int literal out of range")]
IntLiteralOutOfRange { span: Span },
#[error("expected an integer literal")]
IntLiteralExpected { span: Span },
#[error("qualified path roots are not implemented")]
QualifiedPathRootsNotImplemented { span: Span },
#[error("char literals are not implemented")]
CharLiteralsNotImplemented { span: Span },
#[error("hex literals must have 1..16 or 64 digits")]
HexLiteralLength { span: Span },
#[error("binary literals must have either 1..64 or 256 digits")]
BinaryLiteralLength { span: Span },
#[error("u8 literal out of range")]
U8LiteralOutOfRange { span: Span },
#[error("u16 literal out of range")]
U16LiteralOutOfRange { span: Span },
#[error("u32 literal out of range")]
U32LiteralOutOfRange { span: Span },
#[error("u64 literal out of range")]
U64LiteralOutOfRange { span: Span },
#[error("signed integers are not supported")]
SignedIntegersNotSupported { span: Span },
#[error("ref variables are not supported")]
RefVariablesNotSupported { span: Span },
#[error("literal patterns not supported in this position")]
LiteralPatternsNotSupportedHere { span: Span },
#[error("constant patterns not supported in this position")]
ConstantPatternsNotSupportedHere { span: Span },
#[error("constructor patterns not supported in this position")]
ConstructorPatternsNotSupportedHere { span: Span },
#[error("struct patterns not supported in this position")]
StructPatternsNotSupportedHere { span: Span },
#[error("wildcard patterns not supported in this position")]
WildcardPatternsNotSupportedHere { span: Span },
#[error("or patterns not supported in this position")]
OrPatternsNotSupportedHere { span: Span },
#[error("tuple patterns not supported in this position")]
TuplePatternsNotSupportedHere { span: Span },
#[error("ref patterns not supported in this position")]
RefPatternsNotSupportedHere { span: Span },
#[error("constructor patterns require a single argument")]
ConstructorPatternOneArg { span: Span },
#[error("constructor patterns cannot contain sub-patterns")]
ConstructorPatternSubPatterns { span: Span },
#[error("paths are not supported in this position")]
PathsNotSupportedHere { span: Span },
#[error("Fully specified types are not supported in this position. Try importing the type and referring to it here.")]
FullySpecifiedTypesNotSupported { span: Span },
#[error("ContractCaller requires exactly one generic argument")]
ContractCallerOneGenericArg { span: Span },
#[error("ContractCaller requires a named type for its generic argument")]
ContractCallerNamedTypeGenericArg { span: Span },
#[error("cannot find type \"{ty_name}\" in this scope")]
ConstrainedNonExistentType { ty_name: Ident, span: Span },
#[error("__get_storage_key does not take arguments")]
GetStorageKeyTooManyArgs { span: Span },
#[error("recursive types are not supported")]
RecursiveType { span: Span },
#[error("enum variant \"{name}\" already declared")]
DuplicateEnumVariant { name: Ident, span: Span },
#[error("storage field \"{name}\" already declared")]
DuplicateStorageField { name: Ident, span: Span },
#[error("configurable \"{name}\" already declared")]
DuplicateConfigurable { name: Ident, span: Span },
#[error("Multiple configurable blocks detected in this module")]
MultipleConfigurableBlocksInModule { span: Span },
#[error("struct field \"{name}\" already declared")]
DuplicateStructField { name: Ident, span: Span },
#[error("identifier \"{name}\" bound more than once in this parameter list")]
DuplicateParameterIdentifier { name: Ident, span: Span },
#[error("self parameter is not allowed for {fn_kind}")]
SelfParameterNotAllowedForFn { fn_kind: String, span: Span },
#[error("Expected module at the beginning before any other items.")]
ExpectedModuleAtBeginning { span: Span },
#[error("Constant requires expression.")]
ConstantRequiresExpression { span: Span },
#[error("Constant requires type ascription.")]
ConstantRequiresTypeAscription { span: Span },
#[error("Unknown type name \"self\". A self type with a similar name exists (notice the capitalization): `Self`")]
UnknownTypeNameSelf { span: Span },
#[error("{}", match get_attribute_type(attribute) {
AttributeType::InnerDocComment => format!("Inner doc comment (`//!`) cannot document {}{target_friendly_name}.", a_or_an(&target_friendly_name)),
AttributeType::OuterDocComment => format!("Outer doc comment (`///`) cannot document {}{target_friendly_name}.", a_or_an(&target_friendly_name)),
AttributeType::Attribute => format!("\"{attribute}\" attribute cannot annotate {}{target_friendly_name}.", a_or_an(&target_friendly_name)),
})]
InvalidAttributeTarget {
span: Span,
attribute: Ident,
target_friendly_name: &'static str,
can_only_annotate_help: Vec<&'static str>,
},
#[error("\"{last_occurrence}\" attribute can be applied only once, but is applied {} times.", num_to_str(previous_occurrences.len() + 1))]
InvalidAttributeMultiplicity {
last_occurrence: IdentUnique,
previous_occurrences: Vec<IdentUnique>,
},
#[error("\"{attribute}\" attribute must {}, but has {}.", get_expected_attributes_args_multiplicity_msg(args_multiplicity), num_to_str_or_none(*num_of_args))]
InvalidAttributeArgsMultiplicity {
span: Span,
attribute: Ident,
args_multiplicity: (usize, usize),
num_of_args: usize,
},
#[error(
"\"{arg}\" is an invalid argument for attribute \"{attribute}\". Valid arguments are: {}.",
sequence_to_str(expected_args, Enclosing::DoubleQuote, usize::MAX)
)]
InvalidAttributeArg {
attribute: Ident,
arg: IdentUnique,
expected_args: Vec<&'static str>,
},
#[error("\"{arg}\" argument of the attribute \"{attribute}\" must {}have a value.",
match value_span {
Some(_) => "not ",
None => "",
}
)]
InvalidAttributeArgExpectsValue {
attribute: Ident,
arg: IdentUnique,
value_span: Option<Span>,
},
#[error("\"{arg}\" argument must have a value of type \"{expected_type}\".")]
InvalidAttributeArgValueType {
span: Span,
arg: Ident,
expected_type: &'static str,
received_type: &'static str,
},
#[error("{} is an invalid value for argument \"{arg}\". Valid values are: {}.", span.as_str(), sequence_to_str(expected_values, Enclosing::DoubleQuote, usize::MAX))]
InvalidAttributeArgValue {
span: Span,
arg: Ident,
expected_values: Vec<&'static str>,
},
}
pub(crate) enum AttributeType {
/// `//!`.
InnerDocComment,
/// `///`.
OuterDocComment,
/// `#[attribute]` or `#![attribute]`.
Attribute,
}
pub(crate) fn get_attribute_type(attribute: &Ident) -> AttributeType {
// The doc-comment attribute name has the span that
// points to the actual comment line.
// Other attributes have spans that point to the actual
// attribute name.
let span = attribute.span();
let attribute = span.as_str();
if attribute.starts_with("//!") {
AttributeType::InnerDocComment
} else if attribute.starts_with("///") {
AttributeType::OuterDocComment
} else {
AttributeType::Attribute
}
}
pub(crate) fn get_expected_attributes_args_multiplicity_msg(
args_multiplicity: &(usize, usize),
) -> String {
match *args_multiplicity {
(0, 0) => "not have any arguments".to_string(),
(min, max) if min == max => format!("have exactly {} argument{}", num_to_str(min), plural_s(min)),
(min, max) if min == max - 1 => format!("have {} or {} argument{}", num_to_str_or_none(min), num_to_str(max), plural_s(max)),
(0, max) if max != usize::MAX => format!("have at most {} argument{}", num_to_str(max), plural_s(max)),
(min, usize::MAX) if min != usize::MIN => format!("have at least {} argument{}", num_to_str(min), plural_s(min)),
(min, max) if max != usize::MAX => format!("have between {} and {} arguments", num_to_str(min), num_to_str(max)),
(0, usize::MAX) => unreachable!("if any number of arguments are accepted the `InvalidAttributeArgsMultiplicity` error cannot occur"),
_ => unreachable!("`min` is `always` less than or equal to `max` and all combinations are already covered"),
}
}
impl Spanned for ConvertParseTreeError {
fn span(&self) -> Span {
match self {
ConvertParseTreeError::ImportsWithoutItemsNotSupported { span } => span.clone(),
ConvertParseTreeError::FunctionArbitraryExpression { span } => span.clone(),
ConvertParseTreeError::GenericsNotSupportedHere { span } => span.clone(),
ConvertParseTreeError::MultipleGenericsNotSupported { span } => span.clone(),
ConvertParseTreeError::TupleIndexOutOfRange { span } => span.clone(),
ConvertParseTreeError::ShlNotImplemented { span } => span.clone(),
ConvertParseTreeError::ShrNotImplemented { span } => span.clone(),
ConvertParseTreeError::BitXorNotImplemented { span } => span.clone(),
ConvertParseTreeError::IntTySuffixNotSupported { span } => span.clone(),
ConvertParseTreeError::IntLiteralOutOfRange { span } => span.clone(),
ConvertParseTreeError::IntLiteralExpected { span } => span.clone(),
ConvertParseTreeError::QualifiedPathRootsNotImplemented { span } => span.clone(),
ConvertParseTreeError::CharLiteralsNotImplemented { span } => span.clone(),
ConvertParseTreeError::HexLiteralLength { span } => span.clone(),
ConvertParseTreeError::BinaryLiteralLength { span } => span.clone(),
ConvertParseTreeError::U8LiteralOutOfRange { span } => span.clone(),
ConvertParseTreeError::U16LiteralOutOfRange { span } => span.clone(),
ConvertParseTreeError::U32LiteralOutOfRange { span } => span.clone(),
ConvertParseTreeError::U64LiteralOutOfRange { span } => span.clone(),
ConvertParseTreeError::SignedIntegersNotSupported { span } => span.clone(),
ConvertParseTreeError::RefVariablesNotSupported { span } => span.clone(),
ConvertParseTreeError::LiteralPatternsNotSupportedHere { span } => span.clone(),
ConvertParseTreeError::ConstantPatternsNotSupportedHere { span } => span.clone(),
ConvertParseTreeError::ConstructorPatternsNotSupportedHere { span } => span.clone(),
ConvertParseTreeError::StructPatternsNotSupportedHere { span } => span.clone(),
ConvertParseTreeError::WildcardPatternsNotSupportedHere { span } => span.clone(),
ConvertParseTreeError::OrPatternsNotSupportedHere { span } => span.clone(),
ConvertParseTreeError::TuplePatternsNotSupportedHere { span } => span.clone(),
ConvertParseTreeError::RefPatternsNotSupportedHere { span } => span.clone(),
ConvertParseTreeError::ConstructorPatternOneArg { span } => span.clone(),
ConvertParseTreeError::ConstructorPatternSubPatterns { span } => span.clone(),
ConvertParseTreeError::PathsNotSupportedHere { span } => span.clone(),
ConvertParseTreeError::FullySpecifiedTypesNotSupported { span } => span.clone(),
ConvertParseTreeError::ContractCallerOneGenericArg { span } => span.clone(),
ConvertParseTreeError::ContractCallerNamedTypeGenericArg { span } => span.clone(),
ConvertParseTreeError::ConstrainedNonExistentType { span, .. } => span.clone(),
ConvertParseTreeError::GetStorageKeyTooManyArgs { span, .. } => span.clone(),
ConvertParseTreeError::RecursiveType { span } => span.clone(),
ConvertParseTreeError::DuplicateEnumVariant { span, .. } => span.clone(),
ConvertParseTreeError::DuplicateStorageField { span, .. } => span.clone(),
ConvertParseTreeError::DuplicateConfigurable { span, .. } => span.clone(),
ConvertParseTreeError::MultipleConfigurableBlocksInModule { span } => span.clone(),
ConvertParseTreeError::DuplicateStructField { span, .. } => span.clone(),
ConvertParseTreeError::DuplicateParameterIdentifier { span, .. } => span.clone(),
ConvertParseTreeError::SelfParameterNotAllowedForFn { span, .. } => span.clone(),
ConvertParseTreeError::ExpectedModuleAtBeginning { span } => span.clone(),
ConvertParseTreeError::ConstantRequiresExpression { span } => span.clone(),
ConvertParseTreeError::ConstantRequiresTypeAscription { span } => span.clone(),
ConvertParseTreeError::UnknownTypeNameSelf { span } => span.clone(),
ConvertParseTreeError::InvalidAttributeTarget { span, .. } => span.clone(),
ConvertParseTreeError::InvalidAttributeMultiplicity {
last_occurrence: last_attribute,
..
} => last_attribute.span(),
ConvertParseTreeError::InvalidAttributeArgsMultiplicity { span, .. } => span.clone(),
ConvertParseTreeError::InvalidAttributeArg { arg, .. } => arg.span(),
ConvertParseTreeError::InvalidAttributeArgExpectsValue { arg, .. } => arg.span(),
ConvertParseTreeError::InvalidAttributeArgValueType { span, .. } => span.clone(),
ConvertParseTreeError::InvalidAttributeArgValue { span, .. } => span.clone(),
}
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-error/src/lib.rs | sway-error/src/lib.rs | pub mod convert_parse_tree_error;
pub mod diagnostic;
pub mod error;
pub mod formatting;
pub mod handler;
pub mod lex_error;
pub mod parser_error;
pub mod type_error;
pub mod warning;
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-error/src/type_error.rs | sway-error/src/type_error.rs | use sway_types::{Span, Spanned};
use thiserror::Error;
#[derive(Error, Debug, Clone, PartialEq, Eq, Hash)]
pub enum TypeError {
#[error(
"Mismatched types.\n\
expected: {expected}\n\
found: {received}.\n\
{help}",
help = if !help_text.is_empty() { format!("help: {help_text}") } else { String::new() }
)]
MismatchedType {
expected: String,
received: String,
help_text: String,
span: Span,
},
#[error("This type is not known. Try annotating it with a type annotation.")]
UnknownType { span: Span },
#[error(
"The pattern for this match expression arm has a mismatched type.\n\
expected: {expected}\n\
found: {received}.\n\
"
)]
MatchArmScrutineeWrongType {
expected: String,
received: String,
span: Span,
},
#[error("Literal would overflow because its value does not fit into \"{expected}\"")]
LiteralOverflow { expected: String, span: Span },
}
impl Spanned for TypeError {
fn span(&self) -> Span {
use TypeError::*;
match self {
MismatchedType { span, .. } => span.clone(),
UnknownType { span } => span.clone(),
MatchArmScrutineeWrongType { span, .. } => span.clone(),
LiteralOverflow { span, .. } => span.clone(),
}
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-error/src/warning.rs | sway-error/src/warning.rs | use crate::{
diagnostic::{Code, Diagnostic, Hint, Issue, Reason, ToDiagnostic},
formatting::{
did_you_mean_help, first_line, num_to_str, sequence_to_list, sequence_to_str, Enclosing,
Indent,
},
};
use core::fmt;
use either::Either;
use sway_types::{Ident, IdentUnique, SourceId, Span, Spanned};
// TODO: since moving to using Idents instead of strings,
// the warning_content will usually contain a duplicate of the span.
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct CompileWarning {
pub span: Span,
pub warning_content: Warning,
}
impl Spanned for CompileWarning {
fn span(&self) -> Span {
self.span.clone()
}
}
impl CompileWarning {
pub fn to_friendly_warning_string(&self) -> String {
self.warning_content.to_string()
}
pub fn source_id(&self) -> Option<SourceId> {
self.span.source_id().cloned()
}
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum Warning {
NonClassCaseStructName {
struct_name: Ident,
},
NonClassCaseTypeParameter {
name: Ident,
},
NonClassCaseTraitName {
name: Ident,
},
NonClassCaseEnumName {
enum_name: Ident,
},
NonClassCaseEnumVariantName {
variant_name: Ident,
},
NonSnakeCaseStructFieldName {
field_name: Ident,
},
NonSnakeCaseFunctionName {
name: Ident,
},
NonScreamingSnakeCaseConstName {
name: Ident,
},
UnusedReturnValue {
r#type: String,
},
SimilarMethodFound {
lib: Ident,
module: Ident,
name: Ident,
},
ShadowsOtherSymbol {
name: Ident,
},
AsmBlockIsEmpty,
UninitializedAsmRegShadowsItem {
/// Text "Constant" or "Configurable" or "Variable".
/// Denotes the type of the `item` that shadows the uninitialized ASM register.
constant_or_configurable_or_variable: &'static str,
/// The name of the item that shadows the register, that points to the name in
/// the item declaration.
item: IdentUnique,
},
OverridingTraitImplementation,
DeadDeclaration,
DeadEnumDeclaration,
DeadFunctionDeclaration,
DeadStructDeclaration,
DeadTrait,
UnreachableCode,
DeadEnumVariant {
variant_name: Ident,
},
DeadMethod,
StructFieldNeverRead,
ShadowingReservedRegister {
reg_name: Ident,
},
DeadStorageDeclaration,
DeadStorageDeclarationForFunction {
unneeded_attrib: String,
},
MatchExpressionUnreachableArm {
match_value: Span,
match_type: String,
// Either preceding non catch-all arms or a single interior catch-all arm.
preceding_arms: Either<Vec<Span>, Span>,
unreachable_arm: Span,
is_last_arm: bool,
is_catch_all_arm: bool,
},
UnknownAttribute {
attribute: IdentUnique,
known_attributes: &'static [&'static str],
},
UnknownAttributeArg {
attribute: Ident,
arg: IdentUnique,
expected_args: Vec<&'static str>,
},
EffectAfterInteraction {
effect: String,
effect_in_suggestion: String,
block_name: Ident,
},
UsingDeprecated {
deprecated_element: DeprecatedElement,
deprecated_element_name: String,
help: Option<String>,
},
InherentImplForExternalType {
type_name: String,
type_definition_span: Option<Span>,
},
DuplicatedStorageKey {
first_field: IdentUnique,
first_field_full_name: String,
first_field_key_is_compiler_generated: bool,
second_field: IdentUnique,
second_field_full_name: String,
second_field_key_is_compiler_generated: bool,
key: String,
},
ErrorTypeEmptyEnum {
enum_name: IdentUnique,
},
ErrorEmptyErrorMessage {
enum_name: Ident,
enum_variant_name: Ident,
},
ErrorDuplicatedErrorMessage {
last_occurrence: Span,
previous_occurrences: Vec<Span>,
},
}
/// Elements that can be deprecated.
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum DeprecatedElement {
Struct,
StructField,
Enum,
EnumVariant,
Function,
Const,
Configurable,
}
impl fmt::Display for DeprecatedElement {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Self::Struct => write!(f, "Struct"),
Self::StructField => write!(f, "Struct field"),
Self::Enum => write!(f, "Enum"),
Self::EnumVariant => write!(f, "Enum variant"),
Self::Function => write!(f, "Function"),
Self::Const => write!(f, "Constant"),
Self::Configurable => write!(f, "Configurable"),
}
}
}
impl fmt::Display for Warning {
// This trait requires `fmt` with this exact signature.
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
use sway_types::style::*;
use Warning::*;
match self {
NonClassCaseStructName { struct_name } => {
write!(f,
"Struct name \"{}\" is not idiomatic. Structs should have a ClassCase name, like \
\"{}\".",
struct_name,
to_upper_camel_case(struct_name.as_str())
)
}
NonClassCaseTypeParameter { name } => {
write!(f,
"Type parameter \"{}\" is not idiomatic. TypeParameters should have a ClassCase name, like \
\"{}\".",
name,
to_upper_camel_case(name.as_str())
)
}
NonClassCaseTraitName { name } => {
write!(f,
"Trait name \"{}\" is not idiomatic. Traits should have a ClassCase name, like \
\"{}\".",
name,
to_upper_camel_case(name.as_str())
)
}
NonClassCaseEnumName { enum_name } => write!(
f,
"Enum \"{}\"'s capitalization is not idiomatic. Enums should have a ClassCase \
name, like \"{}\".",
enum_name,
to_upper_camel_case(enum_name.as_str())
),
NonSnakeCaseStructFieldName { field_name } => write!(
f,
"Struct field name \"{}\" is not idiomatic. Struct field names should have a \
snake_case name, like \"{}\".",
field_name,
to_snake_case(field_name.as_str())
),
NonClassCaseEnumVariantName { variant_name } => write!(
f,
"Enum variant name \"{}\" is not idiomatic. Enum variant names should be \
ClassCase, like \"{}\".",
variant_name,
to_upper_camel_case(variant_name.as_str())
),
NonSnakeCaseFunctionName { name } => {
write!(f,
"Function name \"{}\" is not idiomatic. Function names should be snake_case, like \
\"{}\".",
name,
to_snake_case(name.as_str())
)
}
NonScreamingSnakeCaseConstName { name } => {
write!(
f,
"Constant name \"{name}\" is not idiomatic. Constant names should be SCREAMING_SNAKE_CASE, like \
\"{}\".",
to_screaming_snake_case(name.as_str()),
)
},
UnusedReturnValue { r#type } => write!(
f,
"This returns a value of type \"{type}\", which is not assigned to anything and is \
ignored."
),
SimilarMethodFound { lib, module, name } => write!(
f,
"A method with the same name was found for type {name} in dependency \"{lib}::{module}\". \
Traits must be in scope in order to access their methods. "
),
ShadowsOtherSymbol { name } => write!(
f,
"This shadows another symbol in this scope with the same name \"{name}\"."
),
AsmBlockIsEmpty => write!(
f,
"This ASM block is empty."
),
UninitializedAsmRegShadowsItem { constant_or_configurable_or_variable, item } => write!(
f,
"This uninitialized register is shadowing a {}. You probably meant to also initialize it, like \"{item}: {item}\".",
constant_or_configurable_or_variable.to_ascii_lowercase(),
),
OverridingTraitImplementation => write!(
f,
"This trait implementation overrides another one that was previously defined."
),
DeadDeclaration => write!(f, "This declaration is never used."),
DeadEnumDeclaration => write!(f, "This enum is never used."),
DeadStructDeclaration => write!(f, "This struct is never used."),
DeadFunctionDeclaration => write!(f, "This function is never called."),
UnreachableCode => write!(f, "This code is unreachable."),
DeadEnumVariant { variant_name } => {
write!(f, "Enum variant {variant_name} is never constructed.")
}
DeadTrait => write!(f, "This trait is never implemented."),
DeadMethod => write!(f, "This method is never called."),
StructFieldNeverRead => write!(f, "This struct field is never accessed."),
ShadowingReservedRegister { reg_name } => write!(
f,
"This register declaration shadows the reserved register, \"{reg_name}\"."
),
DeadStorageDeclaration => write!(
f,
"This storage declaration is never accessed and can be removed."
),
DeadStorageDeclarationForFunction { unneeded_attrib } => write!(
f,
"This function's storage attributes declaration does not match its \
actual storage access pattern: '{unneeded_attrib}' attribute(s) can be removed."
),
MatchExpressionUnreachableArm { .. } => write!(f, "This match arm is unreachable."),
UnknownAttribute { attribute, .. } => write!(f, "Unknown attribute \"{attribute}\"."),
UnknownAttributeArg { attribute, arg, expected_args } => write!(
f,
"\"{arg}\" is an unknown argument for attribute \"{attribute}\". Known arguments are: {}.", sequence_to_str(expected_args, Enclosing::DoubleQuote, usize::MAX)
),
EffectAfterInteraction {effect, effect_in_suggestion, block_name} =>
write!(f, "{effect} after external contract interaction in function or method \"{block_name}\". \
Consider {effect_in_suggestion} before calling another contract"),
UsingDeprecated { deprecated_element_name, deprecated_element, help } =>
write!(f, "{deprecated_element} \"{deprecated_element_name}\" is deprecated. {}", help.as_ref().unwrap_or(&"".into())),
InherentImplForExternalType { type_name, .. } =>
write!(
f,
"Inherent implementation for `{type_name}` must be defined in the package that defines the type."
),
DuplicatedStorageKey { first_field_full_name, second_field_full_name, key, .. } =>
write!(f, "Two storage fields have the same storage key.\nFirst field: {first_field_full_name}\nSecond field: {second_field_full_name}\nKey: {key}"),
ErrorTypeEmptyEnum { enum_name } =>
write!(f, "Empty error type enum \"{enum_name}\" can never be instantiated and used in `panic` expressions."),
ErrorEmptyErrorMessage { enum_name, enum_variant_name } =>
write!(f, "Error enum variant \"{enum_name}::{enum_variant_name}\" has an empty error message. Consider adding a helpful error message."),
ErrorDuplicatedErrorMessage { previous_occurrences, .. } =>
write!(f, "This error message is duplicated{}. Consider using a unique error message for every error variant.",
if previous_occurrences.len() == 1 {
"".to_string()
} else {
format!(" {} times", num_to_str(previous_occurrences.len()))
}
),
}
}
}
#[allow(dead_code)]
const FUTURE_HARD_ERROR_HELP: &str =
"In future versions of Sway this warning will become a hard error.";
impl ToDiagnostic for CompileWarning {
fn to_diagnostic(&self, source_engine: &sway_types::SourceEngine) -> Diagnostic {
let code = Code::warnings;
use sway_types::style::*;
use Warning::*;
match &self.warning_content {
NonScreamingSnakeCaseConstName { name } => Diagnostic {
reason: Some(Reason::new(code(1), "Constant name is not idiomatic".to_string())),
issue: Issue::warning(
source_engine,
name.span(),
format!("Constant \"{name}\" should be SCREAMING_SNAKE_CASE."),
),
hints: vec![
Hint::help(
source_engine,
name.span(),
format!("Consider renaming it to, e.g., \"{}\".", to_screaming_snake_case(name.as_str())),
),
],
help: vec![
format!("In Sway, ABIs, structs, traits, and enums are CapitalCase."),
format!("Modules, variables, and functions are snake_case, while constants are SCREAMING_SNAKE_CASE."),
],
},
MatchExpressionUnreachableArm { match_value, match_type, preceding_arms, unreachable_arm, is_last_arm, is_catch_all_arm } => Diagnostic {
reason: Some(Reason::new(code(1), "Match arm is unreachable".to_string())),
issue: Issue::warning(
source_engine,
unreachable_arm.clone(),
match (*is_last_arm, *is_catch_all_arm) {
(true, true) => format!("Last catch-all match arm `{}` is unreachable.", unreachable_arm.as_str()),
_ => format!("Match arm `{}` is unreachable.", unreachable_arm.as_str())
}
),
hints: vec![
Hint::info(
source_engine,
match_value.clone(),
format!("The expression to match on is of type \"{match_type}\".")
),
if preceding_arms.is_right() {
Hint::help(
source_engine,
preceding_arms.as_ref().unwrap_right().clone(),
format!("Catch-all arm `{}` makes all match arms below it unreachable.", preceding_arms.as_ref().unwrap_right().as_str())
)
}
else {
Hint::info(
source_engine,
Span::join_all(preceding_arms.as_ref().unwrap_left().clone()),
if *is_last_arm {
format!("Preceding match arms already match all possible values of `{}`.", match_value.as_str())
}
else {
format!("Preceding match arms already match all the values that `{}` can match.", unreachable_arm.as_str())
}
)
}
],
help: if preceding_arms.is_right() {
let catch_all_arm = preceding_arms.as_ref().unwrap_right().as_str();
vec![
format!("Catch-all patterns make sense only in last match arms."),
format!("Consider removing the catch-all arm `{catch_all_arm}` or making it the last arm."),
format!("Consider removing the unreachable arms below the `{catch_all_arm}` arm."),
]
}
else if *is_last_arm && *is_catch_all_arm {
vec![
format!("Catch-all patterns are often used in last match arms."),
format!("But in this case, the preceding arms already match all possible values of `{}`.", match_value.as_str()),
format!("Consider removing the unreachable last catch-all arm."),
]
}
else {
vec![
format!("Consider removing the unreachable arm."),
]
}
},
UninitializedAsmRegShadowsItem { constant_or_configurable_or_variable, item } => Diagnostic {
reason: Some(Reason::new(code(1), format!("Uninitialized ASM register is shadowing a {}", constant_or_configurable_or_variable.to_ascii_lowercase()))),
issue: Issue::warning(
source_engine,
self.span(),
format!("Uninitialized register \"{item}\" is shadowing a {} of the same name.", constant_or_configurable_or_variable.to_ascii_lowercase()),
),
hints: {
let mut hints = vec![
Hint::info(
source_engine,
item.span(),
format!("{constant_or_configurable_or_variable} \"{item}\" is declared here.")
),
];
hints.append(&mut Hint::multi_help(
source_engine,
&self.span(),
vec![
format!("Are you trying to initialize the register to the value of the {}?", constant_or_configurable_or_variable.to_ascii_lowercase()),
format!("In that case, you must do it explicitly: `{item}: {item}`."),
format!("Otherwise, to avoid the confusion with the shadowed {}, consider renaming the register \"{item}\".", constant_or_configurable_or_variable.to_ascii_lowercase()),
]
));
hints
},
help: vec![],
},
AsmBlockIsEmpty => Diagnostic {
reason: Some(Reason::new(code(1), "ASM block is empty".to_string())),
issue: Issue::warning(
source_engine,
self.span(),
"This ASM block is empty.".to_string(),
),
hints: vec![],
help: vec![
"Consider adding assembly instructions or a return register to the ASM block, or removing the block altogether.".to_string(),
],
},
DuplicatedStorageKey { first_field, first_field_full_name, first_field_key_is_compiler_generated, second_field, second_field_full_name, second_field_key_is_compiler_generated, key } => Diagnostic {
reason: Some(Reason::new(code(1), "Two storage fields have the same storage key".to_string())),
issue: Issue::warning(
source_engine,
first_field.span(),
format!("\"{first_field_full_name}\" has the same storage key as \"{second_field_full_name}\"."),
),
hints: vec![
Hint::info(
source_engine,
second_field.span(),
format!("\"{second_field_full_name}\" is declared here."),
),
],
help: vec![
if *first_field_key_is_compiler_generated || *second_field_key_is_compiler_generated {
format!("The key of \"{}\" is generated by the compiler using the following formula:",
if *first_field_key_is_compiler_generated {
first_field_full_name
} else {
second_field_full_name
}
)
} else {
"Both keys are explicitly defined by using the `in` keyword.".to_string()
},
if *first_field_key_is_compiler_generated || *second_field_key_is_compiler_generated {
format!("{}sha256((0u8, \"{}\"))",
Indent::Single,
if *first_field_key_is_compiler_generated {
first_field_full_name
} else {
second_field_full_name
}
)
} else {
Diagnostic::help_none()
},
format!("The common key is: {key}.")
],
},
UnknownAttribute { attribute, known_attributes } => Diagnostic {
reason: Some(Reason::new(code(1), "Attribute is unknown".to_string())),
issue: Issue::warning(
source_engine,
attribute.span(),
format!("\"{attribute}\" attribute is unknown.")
),
hints: vec![did_you_mean_help(source_engine, attribute.span(), known_attributes.iter(), 2, Enclosing::DoubleQuote)],
help: vec![
"Unknown attributes are allowed and can be used by third-party tools,".to_string(),
"but the compiler ignores them.".to_string(),
],
},
UnknownAttributeArg { attribute, arg, expected_args } => Diagnostic {
reason: Some(Reason::new(code(1), "Attribute argument is unknown".to_string())),
issue: Issue::warning(
source_engine,
arg.span(),
format!("\"{arg}\" is an unknown argument for attribute \"{attribute}\".")
),
hints: {
let mut hints = vec![did_you_mean_help(source_engine, arg.span(), expected_args, 2, Enclosing::DoubleQuote)];
if expected_args.len() == 1 {
hints.push(Hint::help(source_engine, arg.span(), format!("The only known argument is \"{}\".", expected_args[0])));
} else if expected_args.len() <= 3 {
hints.push(Hint::help(source_engine, arg.span(), format!("Known arguments are {}.", sequence_to_str(expected_args, Enclosing::DoubleQuote, usize::MAX))));
} else {
hints.push(Hint::help(source_engine, arg.span(), "Known arguments are:".to_string()));
hints.append(&mut Hint::multi_help(source_engine, &arg.span(), sequence_to_list(expected_args, Indent::Single, usize::MAX)))
}
hints
},
help: vec![
format!("Unknown attribute arguments are allowed for some attributes like \"{attribute}\"."),
"They can be used by third-party tools, but the compiler ignores them.".to_string(),
],
},
UsingDeprecated { deprecated_element, deprecated_element_name, help } => Diagnostic {
reason: Some(Reason::new(code(1), format!("{deprecated_element} is deprecated"))),
issue: Issue::warning(
source_engine,
self.span(),
format!("{deprecated_element} \"{deprecated_element_name}\" is deprecated."),
),
hints: help.as_ref().map_or(vec![], |help| vec![
Hint::help(
source_engine,
self.span(),
help.clone(),
),
]),
help: vec![],
},
InherentImplForExternalType { type_name, type_definition_span } => Diagnostic {
reason: Some(Reason::new(
code(1),
"coherence violation: inherent implementations must be defined in the type's defining package".into()
)),
issue: Issue::warning(
source_engine,
self.span(),
format!(
"cannot define inherent implementation for `{type_name}`: type is defined in a different package"
),
),
hints: match type_definition_span.clone() {
Some(def_span) => vec![Hint::info(
source_engine,
def_span,
format!("Type `{type_name}` is defined here."),
)],
None => vec![],
},
help: vec![
FUTURE_HARD_ERROR_HELP.to_string(),
Diagnostic::help_empty_line(),
"move this impl into the package that defines the type".to_string(),
"or define and use a local trait instead to avoid the orphan rule".to_string(),
],
},
ErrorTypeEmptyEnum { enum_name } => Diagnostic {
reason: Some(Reason::new(code(1), "Empty error type enum cannot be used in `panic` expressions".to_string())),
issue: Issue::warning(
source_engine,
enum_name.span(),
format!("Error type enum \"{enum_name}\" is empty and can never be used in `panic` expressions."),
),
hints: vec![],
help: vec![
"Empty enums with no enum variants can never be instantiated.".to_string(),
"Thus, they cannot have instances to use as arguments in `panic` expressions.".to_string(),
Diagnostic::help_empty_line(),
format!("Consider adding enum variants to \"{enum_name}\" and attributing them"),
"with the `#[error]` attribute.".to_string(),
],
},
ErrorEmptyErrorMessage { enum_name, enum_variant_name } => Diagnostic {
reason: Some(Reason::new(code(1), "Error message is empty".to_string())),
issue: Issue::warning(
source_engine,
self.span(),
format!("Error enum variant \"{enum_name}::{enum_variant_name}\" has an empty error message."),
),
hints: vec![
Hint::help(
source_engine,
self.span(),
"Consider adding a helpful error message here.".to_string(),
)
],
help: vec![],
},
ErrorDuplicatedErrorMessage { last_occurrence, previous_occurrences } => Diagnostic {
reason: Some(Reason::new(code(1), "Error message is duplicated".to_string())),
issue: Issue::error(
source_engine,
last_occurrence.clone(),
"This error message is duplicated.".to_string(),
),
hints: {
let (first_occurrence, other_occurrences) = previous_occurrences.split_first().expect("there is at least one previous occurrence in `previous_occurrences`");
let mut hints = vec![Hint::info(source_engine, first_occurrence.clone(), "It is already used here.".to_string())];
other_occurrences.iter().for_each(|occurrence| hints.push(Hint::info(source_engine, occurrence.clone(), "And here.".to_string())));
hints.push(Hint::help(source_engine, last_occurrence.clone(), "Consider using a unique error message for every error variant.".to_string()));
hints
},
help: vec![],
},
UnusedReturnValue { r#type } => Diagnostic {
reason: Some(Reason::new(code(1), "Returned value is ignored".to_string())),
issue: Issue::warning(
source_engine,
self.span(),
"This returns a value which is not assigned to anything and is ignored.".to_string(),
),
hints: vec![
Hint::help(
source_engine,
self.span(),
format!("The returned value has type \"{type}\"."),
)
],
help: vec![
"If you want to intentionally ignore the returned value, use `let _ = ...`:".to_string(),
format!("{}let _ = {};", Indent::Single, first_line(self.span.as_str(), true)),
],
},
_ => Diagnostic {
// TODO: Temporarily we use self here to achieve backward compatibility.
// In general, self must not be used and will not be used once we
// switch to our own #[error] macro. All the values for the formatting
// of a diagnostic must come from the enum variant parameters.
issue: Issue::warning(source_engine, self.span(), format!("{}", self.warning_content)),
..Default::default()
}
}
}
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct CollectedTraitImpl {
pub impl_span: Span,
pub trait_name: String,
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum Info {
ImplTraitsForType { impls: Vec<CollectedTraitImpl> },
}
impl fmt::Display for Info {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
use Info::*;
match self {
ImplTraitsForType { impls } => {
write!(
f,
"Implemented traits: \"{:?}\"",
impls
.iter()
.map(|i| i.impl_span.as_str())
.collect::<Vec<_>>()
)
}
}
}
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct CompileInfo {
pub span: Span,
pub content: Info,
}
impl Spanned for CompileInfo {
fn span(&self) -> Span {
self.span.clone()
}
}
impl CompileInfo {
pub fn source_id(&self) -> Option<SourceId> {
self.span.source_id().cloned()
}
pub fn to_friendly_string(&self) -> String {
self.content.to_string()
}
}
impl ToDiagnostic for CompileInfo {
fn to_diagnostic(&self, source_engine: &sway_types::SourceEngine) -> Diagnostic {
let code = Code::warnings;
use Info::*;
match &self.content {
ImplTraitsForType { impls } => Diagnostic {
reason: Some(Reason::new(
code(1),
"Trait impls dump was requested.".to_string(),
)),
issue: Issue::info(
source_engine,
self.span(),
"Matching implemented traits for this type.".to_string(),
),
hints: impls
.iter()
.map(|i| {
Hint::help(
source_engine,
i.impl_span.clone(),
format!("trait is {}", i.trait_name.clone()),
)
})
.collect::<Vec<_>>(),
help: vec![],
},
}
}
}
#[cfg(test)]
mod test {
use sway_types::style::*;
#[test]
fn detect_styles() {
let snake_cases = [
"hello",
"__hello",
"blah32",
"some_words_here",
"___some_words_here",
];
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | true |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-error/src/lex_error.rs | sway-error/src/lex_error.rs | use sway_types::{ast::Delimiter, Ident, Span, Spanned};
use thiserror::Error;
#[derive(Error, Debug, Clone, PartialEq, Eq, PartialOrd, Hash)]
#[error("{}", kind)]
pub struct LexError {
pub span: Span,
pub kind: LexErrorKind,
}
#[derive(Error, Debug, Clone, PartialEq, Eq, PartialOrd, Hash)]
pub enum LexErrorKind {
#[error("unclosed multiline comment")]
UnclosedMultilineComment { unclosed_indices: Vec<usize> },
#[error("unexpected close delimiter")]
UnexpectedCloseDelimiter {
position: usize,
close_delimiter: Delimiter,
},
#[error("mismatched delimiters")]
MismatchedDelimiters {
open_position: usize,
close_position: usize,
open_delimiter: Delimiter,
close_delimiter: Delimiter,
},
#[error("unclosed delimiter")]
UnclosedDelimiter {
open_position: usize,
open_delimiter: Delimiter,
},
#[error("unclosed string literal")]
UnclosedStringLiteral { position: usize },
#[error("unclosed char literal")]
UnclosedCharLiteral { position: usize },
#[error("expected close quote")]
ExpectedCloseQuote { position: usize },
#[error("incomplete hex int literal")]
IncompleteHexIntLiteral { position: usize },
#[error("incomplete binary int literal")]
IncompleteBinaryIntLiteral { position: usize },
#[error("incomplete octal int literal")]
IncompleteOctalIntLiteral { position: usize },
#[error("invalid int suffix: {}", suffix)]
InvalidIntSuffix { suffix: Ident },
#[error("invalid character")]
InvalidCharacter { position: usize, character: char },
#[error("invalid hex escape")]
InvalidHexEscape,
#[error("unicode escape missing brace")]
UnicodeEscapeMissingBrace { position: usize },
#[error("invalid unicode escape digit")]
InvalidUnicodeEscapeDigit { position: usize },
#[error("unicode escape out of range")]
UnicodeEscapeOutOfRange { position: usize },
#[error("unicode escape represents an invalid char value")]
UnicodeEscapeInvalidCharValue { span: Span },
#[error("unicode text direction codepoint in literal")]
UnicodeTextDirInLiteral { position: usize, character: char },
#[error("invalid escape code")]
InvalidEscapeCode { position: usize },
}
impl Spanned for LexError {
fn span(&self) -> Span {
self.span.clone()
}
}
impl LexError {
pub fn span_ref(&self) -> &Span {
&self.span
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-error/src/error.rs | sway-error/src/error.rs | use crate::convert_parse_tree_error::{
get_attribute_type, get_expected_attributes_args_multiplicity_msg, AttributeType,
ConvertParseTreeError,
};
use crate::diagnostic::{Code, Diagnostic, Hint, Issue, Reason, ToDiagnostic};
use crate::formatting::*;
use crate::lex_error::LexError;
use crate::parser_error::{ParseError, ParseErrorKind};
use crate::type_error::TypeError;
use core::fmt;
use std::fmt::Formatter;
use sway_types::style::to_snake_case;
use sway_types::{BaseIdent, Ident, IdentUnique, SourceEngine, Span, Spanned};
use thiserror::Error;
use self::ShadowingSource::*;
use self::StructFieldUsageContext::*;
#[derive(Error, Debug, Clone, PartialEq, Eq, Hash)]
pub enum InterfaceName {
Abi(Ident),
Trait(Ident),
}
impl fmt::Display for InterfaceName {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
InterfaceName::Abi(name) => write!(f, "ABI \"{name}\""),
InterfaceName::Trait(name) => write!(f, "trait \"{name}\""),
}
}
}
// TODO: Since moving to using Idents instead of strings, there are a lot of redundant spans in
// this type. When replacing Strings + Spans with Idents, be aware of the rule explained below.
// When defining error structures that display identifiers, we prefer passing Idents over Strings.
// The error span can come from that same Ident or can be a different span.
// We handle those two cases in the following way:
// - If the error span equals Ident's span, we use IdentUnique and never the plain Ident.
// - If the error span is different then Ident's span, we pass Ident and Span as two separate fields.
//
// The reason for this rule is clearly communicating the difference of the two cases in every error,
// as well as avoiding issues with the error message deduplication explained below.
//
// Deduplication of error messages might remove errors that are actually not duplicates because
// although they point to the same Ident (in terms of the identifier's name), the span can be different.
// Deduplication works on hashes and Ident's hash contains only the name and not the span.
// That's why we always use IdentUnique whenever we extract the span from the provided Ident.
// Using IdentUnique also clearly communicates that we are extracting the span from the
// provided identifier.
#[derive(Error, Debug, Clone, PartialEq, Eq, Hash)]
pub enum CompileError {
#[error("\"const generics\" are not supported here.")]
ConstGenericNotSupportedHere { span: Span },
#[error("This expression is not supported as lengths.")]
LengthExpressionNotSupported { span: Span },
#[error(
"This needs \"{feature}\" to be enabled, but it is currently disabled. For more details go to {url}."
)]
FeatureIsDisabled {
feature: String,
url: String,
span: Span,
},
#[error(
"There was an error while evaluating the evaluation order for the module dependency graph."
)]
ModuleDepGraphEvaluationError {},
#[error("A cyclic reference was found between the modules: {}.",
modules.iter().map(|ident| ident.as_str().to_string())
.collect::<Vec<_>>()
.join(", "))]
ModuleDepGraphCyclicReference { modules: Vec<BaseIdent> },
#[error("Variable \"{var_name}\" does not exist in this scope.")]
UnknownVariable { var_name: Ident, span: Span },
#[error("Identifier \"{name}\" was used as a variable, but it is actually a {what_it_is}.")]
NotAVariable {
name: Ident,
what_it_is: &'static str,
span: Span,
},
#[error("{feature} is currently not implemented.")]
Unimplemented {
/// The description of the unimplemented feature,
/// formulated in a way that fits into common ending
/// "is currently not implemented."
/// E.g., "Using something".
feature: String,
/// Help lines. Empty if there is no additional help.
/// To get an empty line between the help lines,
/// insert a [String] containing only a space: `" ".to_string()`.
help: Vec<String>,
span: Span,
},
#[error("{0}")]
TypeError(TypeError),
#[error("Error parsing input: {err:?}")]
ParseError { span: Span, err: String },
#[error(
"Internal compiler error: {0}\nPlease file an issue on the repository and include the \
code that triggered this error."
)]
Internal(&'static str, Span),
#[error(
"Internal compiler error: {0}\nPlease file an issue on the repository and include the \
code that triggered this error."
)]
InternalOwned(String, Span),
#[error(
"Predicate declaration contains no main function. Predicates require a main function."
)]
NoPredicateMainFunction(Span),
#[error("A predicate's main function must return a boolean.")]
PredicateMainDoesNotReturnBool(Span),
#[error("Script declaration contains no main function. Scripts require a main function.")]
NoScriptMainFunction(Span),
#[error("Fallback function already defined in scope.")]
MultipleDefinitionsOfFallbackFunction { name: Ident, span: Span },
#[error("Function \"{name}\" was already defined in scope.")]
MultipleDefinitionsOfFunction { name: Ident, span: Span },
#[error("Name \"{name}\" is defined multiple times.")]
MultipleDefinitionsOfName { name: Ident, span: Span },
#[error("Constant \"{name}\" was already defined in scope.")]
MultipleDefinitionsOfConstant { name: Ident, new: Span, old: Span },
#[error("Type \"{name}\" was already defined in scope.")]
MultipleDefinitionsOfType { name: Ident, span: Span },
#[error("Variable \"{}\" is already defined in match arm.", first_definition.as_str())]
MultipleDefinitionsOfMatchArmVariable {
match_value: Span,
match_type: String,
first_definition: Span,
first_definition_is_struct_field: bool,
duplicate: Span,
duplicate_is_struct_field: bool,
},
#[error(
"Assignment to an immutable variable. Variable \"{decl_name} is not declared as mutable."
)]
AssignmentToNonMutableVariable {
/// Variable name pointing to the name in the variable declaration.
decl_name: Ident,
/// The complete left-hand side of the assignment.
lhs_span: Span,
},
#[error(
"Assignment to a {}. {} cannot be assigned to.",
if *is_configurable {
"configurable"
} else {
"constant"
},
if *is_configurable {
"Configurables"
} else {
"Constants"
}
)]
AssignmentToConstantOrConfigurable {
/// Constant or configurable name pointing to the name in the constant declaration.
decl_name: Ident,
is_configurable: bool,
/// The complete left-hand side of the assignment.
lhs_span: Span,
},
#[error(
"This assignment target cannot be assigned to, because {} is {}{decl_friendly_type_name} and not a mutable variable.",
if let Some(decl_name) = decl_name {
format!("\"{decl_name}\"")
} else {
"this".to_string()
},
a_or_an(decl_friendly_type_name)
)]
DeclAssignmentTargetCannotBeAssignedTo {
/// Name of the declared variant, pointing to the name in the declaration.
decl_name: Option<Ident>,
/// Friendly name of the type of the declaration. E.g., "function", or "struct".
decl_friendly_type_name: &'static str,
/// The complete left-hand side of the assignment.
lhs_span: Span,
},
#[error("This reference is not a reference to a mutable value (`&mut`).")]
AssignmentViaNonMutableReference {
/// Name of the reference, if the left-hand side of the assignment is a reference variable,
/// pointing to the name in the reference variable declaration.
///
/// `None` if the assignment LHS is an arbitrary expression and not a variable.
decl_reference_name: Option<Ident>,
/// [Span] of the right-hand side of the reference variable definition,
/// if the left-hand side of the assignment is a reference variable.
decl_reference_rhs: Option<Span>,
/// The type of the reference, if the left-hand side of the assignment is a reference variable,
/// expected to start with `&`.
decl_reference_type: String,
span: Span,
},
#[error(
"Cannot call method \"{method_name}\" on variable \"{variable_name}\" because \
\"{variable_name}\" is not declared as mutable."
)]
MethodRequiresMutableSelf {
method_name: Ident,
variable_name: Ident,
span: Span,
},
#[error(
"This parameter was declared as mutable, which is not supported yet, did you mean to use ref mut?"
)]
MutableParameterNotSupported { param_name: Ident, span: Span },
#[error("Cannot pass immutable argument to mutable parameter.")]
ImmutableArgumentToMutableParameter { span: Span },
#[error("ref mut or mut parameter is not allowed for contract ABI function.")]
RefMutableNotAllowedInContractAbi { param_name: Ident, span: Span },
#[error("Reference to a mutable value cannot reference a constant.")]
RefMutCannotReferenceConstant {
/// Constant, as accessed in code. E.g.:
/// - `MY_CONST`
/// - `LIB_CONST_ALIAS`
/// - `::lib::module::SOME_CONST`
constant: String,
span: Span,
},
#[error("Reference to a mutable value cannot reference an immutable variable.")]
RefMutCannotReferenceImmutableVariable {
/// Variable name pointing to the name in the variable declaration.
decl_name: Ident,
span: Span,
},
#[error(
"Cannot call associated function \"{fn_name}\" as a method. Use associated function \
syntax instead."
)]
AssociatedFunctionCalledAsMethod { fn_name: Ident, span: Span },
#[error(
"Generic type \"{name}\" is not in scope. Perhaps you meant to specify type parameters in \
the function signature? For example: \n`fn \
{fn_name}<{comma_separated_generic_params}>({args}) -> ... `"
)]
TypeParameterNotInTypeScope {
name: Ident,
span: Span,
comma_separated_generic_params: String,
fn_name: Ident,
args: String,
},
#[error(
"expected: {expected} \n\
found: {given} \n\
help: The definition of this {decl_type} must \
match the one in the {interface_name} declaration."
)]
MismatchedTypeInInterfaceSurface {
interface_name: InterfaceName,
span: Span,
decl_type: String,
given: String,
expected: String,
},
#[error("Trait \"{name}\" cannot be found in the current scope.")]
UnknownTrait { span: Span, name: Ident },
#[error("Function \"{name}\" is not a part of {interface_name}'s interface surface.")]
FunctionNotAPartOfInterfaceSurface {
name: Ident,
interface_name: InterfaceName,
span: Span,
},
#[error("Constant \"{name}\" is not a part of {interface_name}'s interface surface.")]
ConstantNotAPartOfInterfaceSurface {
name: Ident,
interface_name: InterfaceName,
span: Span,
},
#[error("Type \"{name}\" is not a part of {interface_name}'s interface surface.")]
TypeNotAPartOfInterfaceSurface {
name: Ident,
interface_name: InterfaceName,
span: Span,
},
#[error("Constants are missing from this trait implementation: {}",
missing_constants.iter().map(|ident| ident.as_str().to_string())
.collect::<Vec<_>>()
.join("\n"))]
MissingInterfaceSurfaceConstants {
missing_constants: Vec<BaseIdent>,
span: Span,
},
#[error("Associated types are missing from this trait implementation: {}",
missing_types.iter().map(|ident| ident.as_str().to_string())
.collect::<Vec<_>>()
.join("\n"))]
MissingInterfaceSurfaceTypes {
missing_types: Vec<BaseIdent>,
span: Span,
},
#[error("Functions are missing from this trait implementation: {}",
missing_functions.iter().map(|ident| ident.as_str().to_string())
.collect::<Vec<_>>()
.join("\n"))]
MissingInterfaceSurfaceMethods {
missing_functions: Vec<BaseIdent>,
span: Span,
},
#[error("Expected {} type {} for \"{name}\", but instead found {}.", expected, if *expected == 1usize { "argument" } else { "arguments" }, given)]
IncorrectNumberOfTypeArguments {
name: Ident,
given: usize,
expected: usize,
span: Span,
},
#[error("\"{name}\" does not take type arguments.")]
DoesNotTakeTypeArguments { name: Ident, span: Span },
#[error("\"{name}\" does not take type arguments as prefix.")]
DoesNotTakeTypeArgumentsAsPrefix { name: Ident, span: Span },
#[error("Type arguments are not allowed for this type.")]
TypeArgumentsNotAllowed { span: Span },
#[error("\"{name}\" needs type arguments.")]
NeedsTypeArguments { name: Ident, span: Span },
#[error(
"Enum with name \"{name}\" could not be found in this scope. Perhaps you need to import \
it?"
)]
EnumNotFound { name: Ident, span: Span },
/// This error is used only for error recovery and is not emitted as a compiler
/// error to the final compilation output. The compiler emits the cumulative error
/// [CompileError::StructInstantiationMissingFields] given below, and that one also
/// only if the struct can actually be instantiated.
#[error("Instantiation of the struct \"{struct_name}\" is missing field \"{field_name}\".")]
StructInstantiationMissingFieldForErrorRecovery {
field_name: Ident,
/// Original, non-aliased struct name.
struct_name: Ident,
span: Span,
},
#[error("Instantiation of the struct \"{struct_name}\" is missing {} {}.",
if field_names.len() == 1 { "field" } else { "fields" },
field_names.iter().map(|name| format!("\"{name}\"")).collect::<Vec::<_>>().join(", "))]
StructInstantiationMissingFields {
field_names: Vec<Ident>,
/// Original, non-aliased struct name.
struct_name: Ident,
span: Span,
struct_decl_span: Span,
total_number_of_fields: usize,
},
#[error("Struct \"{struct_name}\" cannot be instantiated here because it has private fields.")]
StructCannotBeInstantiated {
/// Original, non-aliased struct name.
struct_name: Ident,
span: Span,
struct_decl_span: Span,
private_fields: Vec<Ident>,
/// All available public constructors if `is_in_storage_declaration` is false,
/// or only the public constructors that potentially evaluate to a constant
/// if `is_in_storage_declaration` is true.
constructors: Vec<String>,
/// True if the struct has only private fields.
all_fields_are_private: bool,
is_in_storage_declaration: bool,
struct_can_be_changed: bool,
},
#[error("Field \"{field_name}\" of the struct \"{struct_name}\" is private.")]
StructFieldIsPrivate {
field_name: IdentUnique,
/// Original, non-aliased struct name.
struct_name: Ident,
field_decl_span: Span,
struct_can_be_changed: bool,
usage_context: StructFieldUsageContext,
},
#[error("Field \"{field_name}\" does not exist in struct \"{struct_name}\".")]
StructFieldDoesNotExist {
field_name: IdentUnique,
/// Only public fields if `is_public_struct_access` is true.
available_fields: Vec<Ident>,
is_public_struct_access: bool,
/// Original, non-aliased struct name.
struct_name: Ident,
struct_decl_span: Span,
struct_is_empty: bool,
usage_context: StructFieldUsageContext,
},
#[error("Field \"{field_name}\" has multiple definitions.")]
StructFieldDuplicated { field_name: Ident, duplicate: Ident },
#[error("No function \"{expected_signature}\" found for type \"{type_name}\".{}",
if matching_methods.is_empty() {
"".to_string()
} else {
format!("\nDid you mean:\n{}",
matching_methods.iter().map(|m| format!("{}{m}", Indent::Double)).collect::<Vec<_>>().join("\n"))
}
)]
/// Note that _method_ here means **any function associated to a type**, with or without
/// the `self` argument.
MethodNotFound {
called_method: IdentUnique,
expected_signature: String,
type_name: String,
matching_methods: Vec<String>,
},
#[error("Module \"{name}\" could not be found.")]
ModuleNotFound { span: Span, name: String },
#[error("This expression has type \"{actually}\", which is not a struct. Fields can only be accessed on structs.")]
FieldAccessOnNonStruct {
actually: String,
/// Name of the storage variable, if the field access
/// happens within the access to a storage variable.
storage_variable: Option<String>,
/// Name of the field that is tried to be accessed.
field_name: IdentUnique,
span: Span,
},
#[error("This expression has type \"{actually}\", which is not a tuple. Elements can only be accessed on tuples.")]
TupleElementAccessOnNonTuple {
actually: String,
span: Span,
index: usize,
index_span: Span,
},
#[error("This expression has type \"{actually}\", which is not an indexable type.")]
NotIndexable { actually: String, span: Span },
#[error("\"{name}\" is a {actually}, not an enum.")]
NotAnEnum {
name: String,
span: Span,
actually: String,
},
#[error("This is a {actually}, not a struct.")]
NotAStruct { span: Span, actually: String },
#[error("This is a {actually}, not an enum.")]
DeclIsNotAnEnum { actually: String, span: Span },
#[error("This is a {actually}, not a struct.")]
DeclIsNotAStruct { actually: String, span: Span },
#[error("This is a {actually}, not a function.")]
DeclIsNotAFunction { actually: String, span: Span },
#[error("This is a {actually}, not a variable.")]
DeclIsNotAVariable { actually: String, span: Span },
#[error("This is a {actually}, not an ABI.")]
DeclIsNotAnAbi { actually: String, span: Span },
#[error("This is a {actually}, not a trait.")]
DeclIsNotATrait { actually: String, span: Span },
#[error("This is a {actually}, not an impl block.")]
DeclIsNotAnImplTrait { actually: String, span: Span },
#[error("This is a {actually}, not a trait function.")]
DeclIsNotATraitFn { actually: String, span: Span },
#[error("This is a {actually}, not storage.")]
DeclIsNotStorage { actually: String, span: Span },
#[error("This is a {actually}, not a constant")]
DeclIsNotAConstant { actually: String, span: Span },
#[error("This is a {actually}, not a type alias")]
DeclIsNotATypeAlias { actually: String, span: Span },
#[error("Could not find symbol \"{name}\" in this scope.")]
SymbolNotFound { name: Ident, span: Span },
#[error("Found multiple bindings for \"{name}\" in this scope.")]
SymbolWithMultipleBindings {
name: Ident,
paths: Vec<String>,
span: Span,
},
#[error("Symbol \"{name}\" is private.")]
ImportPrivateSymbol { name: Ident, span: Span },
#[error("Module \"{name}\" is private.")]
ImportPrivateModule { name: Ident, span: Span },
#[error(
"Because this if expression's value is used, an \"else\" branch is required and it must \
return type \"{r#type}\""
)]
NoElseBranch { span: Span, r#type: String },
#[error(
"Symbol \"{name}\" does not refer to a type, it refers to a {actually_is}. It cannot be \
used in this position."
)]
NotAType {
span: Span,
name: String,
actually_is: &'static str,
},
#[error(
"This enum variant requires an instantiation expression. Try initializing it with \
arguments in parentheses."
)]
MissingEnumInstantiator { span: Span },
#[error(
"This path must return a value of type \"{ty}\" from function \"{function_name}\", but it \
does not."
)]
PathDoesNotReturn {
span: Span,
ty: String,
function_name: Ident,
},
#[error(
"This register was not initialized in the initialization section of the ASM expression. \
Initialized registers are: {initialized_registers}"
)]
UnknownRegister {
span: Span,
initialized_registers: String,
},
#[error("This opcode takes an immediate value but none was provided.")]
MissingImmediate { span: Span },
#[error("This immediate value is invalid.")]
InvalidImmediateValue { span: Span },
#[error("Variant \"{variant_name}\" does not exist on enum \"{enum_name}\"")]
UnknownEnumVariant {
enum_name: Ident,
variant_name: Ident,
span: Span,
},
#[error("Unknown opcode: \"{op_name}\".")]
UnrecognizedOp { op_name: Ident, span: Span },
#[error("Cannot infer type for type parameter \"{ty}\". Insufficient type information provided. Try annotating its type.")]
UnableToInferGeneric { ty: String, span: Span },
#[error("The generic type parameter \"{ty}\" is unconstrained.")]
UnconstrainedGenericParameter { ty: String, span: Span },
#[error("Trait \"{trait_name}\" is not implemented for type \"{ty}\".")]
TraitConstraintNotSatisfied {
type_id: usize, // Used to filter errors in method application type check.
ty: String,
trait_name: String,
span: Span,
},
#[error(
"Expects trait constraint \"{param}: {trait_name}\" which is missing from type parameter \"{param}\"."
)]
TraitConstraintMissing {
param: String,
trait_name: String,
span: Span,
},
#[error("The value \"{val}\" is too large to fit in this 6-bit immediate spot.")]
Immediate06TooLarge { val: u64, span: Span },
#[error("The value \"{val}\" is too large to fit in this 12-bit immediate spot.")]
Immediate12TooLarge { val: u64, span: Span },
#[error("The value \"{val}\" is too large to fit in this 18-bit immediate spot.")]
Immediate18TooLarge { val: u64, span: Span },
#[error("The value \"{val}\" is too large to fit in this 24-bit immediate spot.")]
Immediate24TooLarge { val: u64, span: Span },
#[error(
"This op expects {expected} register(s) as arguments, but you provided {received} register(s)."
)]
IncorrectNumberOfAsmRegisters {
span: Span,
expected: usize,
received: usize,
},
#[error("This op does not take an immediate value.")]
UnnecessaryImmediate { span: Span },
#[error("This reference is ambiguous, and could refer to a module, enum, or function of the same name. Try qualifying the name with a path.")]
AmbiguousPath { span: Span },
#[error("This is a module path, and not an expression.")]
ModulePathIsNotAnExpression { module_path: String, span: Span },
#[error("Unknown type name.")]
UnknownType { span: Span },
#[error("Unknown type name \"{name}\".")]
UnknownTypeName { name: String, span: Span },
#[error("The file {file_path} could not be read: {stringified_error}")]
FileCouldNotBeRead {
span: Span,
file_path: String,
stringified_error: String,
},
#[error("This imported file must be a library. It must start with \"library;\"")]
ImportMustBeLibrary { span: Span },
#[error("An enum instantiaton cannot contain more than one value. This should be a single value of type {ty}.")]
MoreThanOneEnumInstantiator { span: Span, ty: String },
#[error("This enum variant represents the unit type, so it should not be instantiated with any value.")]
UnnecessaryEnumInstantiator { span: Span },
#[error("The enum variant `{ty}` is of type `unit`, so its constructor does not take arguments or parentheses. Try removing the ().")]
UnitVariantWithParenthesesEnumInstantiator { span: Span, ty: String },
#[error("Cannot find trait \"{name}\" in this scope.")]
TraitNotFound { name: String, span: Span },
#[error("Trait \"{trait_name}\" is not imported when calling \"{function_name}\".\nThe import is needed because \"{function_name}\" uses \"{trait_name}\" in one of its trait constraints.")]
TraitNotImportedAtFunctionApplication {
trait_name: String,
function_name: String,
function_call_site_span: Span,
trait_constraint_span: Span,
trait_candidates: Vec<String>,
},
#[error("This expression is not valid on the left hand side of a reassignment.")]
InvalidExpressionOnLhs { span: Span },
#[error("This code cannot be evaluated to a constant")]
CannotBeEvaluatedToConst { span: Span },
#[error(
"This code cannot be evaluated to a configurable because its size is not always limited."
)]
CannotBeEvaluatedToConfigurableSizeUnknown { span: Span },
#[error("{} \"{method_name}\" expects {expected} {} but you provided {received}.",
if *dot_syntax_used { "Method" } else { "Function" },
if *expected == 1usize { "argument" } else {"arguments"},
)]
TooManyArgumentsForFunction {
span: Span,
method_name: Ident,
dot_syntax_used: bool,
expected: usize,
received: usize,
},
#[error("{} \"{method_name}\" expects {expected} {} but you provided {received}.",
if *dot_syntax_used { "Method" } else { "Function" },
if *expected == 1usize { "argument" } else {"arguments"},
)]
TooFewArgumentsForFunction {
span: Span,
method_name: Ident,
dot_syntax_used: bool,
expected: usize,
received: usize,
},
#[error("The function \"{method_name}\" was called without parentheses. Try adding ().")]
MissingParenthesesForFunction { span: Span, method_name: Ident },
#[error("This type is invalid in a function selector. A contract ABI function selector must be a known sized type, not generic.")]
InvalidAbiType { span: Span },
#[error("This is a {actually_is}, not an ABI. An ABI cast requires a valid ABI to cast the address to.")]
NotAnAbi {
span: Span,
actually_is: &'static str,
},
#[error("An ABI can only be implemented for the `Contract` type, so this implementation of an ABI for type \"{ty}\" is invalid.")]
ImplAbiForNonContract { span: Span, ty: String },
#[error("Trait \"{trait_name}\" is already implemented for type \"{type_implementing_for}\".")]
ConflictingImplsForTraitAndType {
trait_name: String,
type_implementing_for: String,
type_implementing_for_unaliased: String,
existing_impl_span: Span,
second_impl_span: Span,
},
#[error(
"\"{marker_trait_full_name}\" is a marker trait and cannot be explicitly implemented."
)]
MarkerTraitExplicitlyImplemented {
marker_trait_full_name: String,
span: Span,
},
#[error("Duplicate definitions for the {decl_kind} \"{decl_name}\" for type \"{type_implementing_for}\".")]
DuplicateDeclDefinedForType {
decl_kind: String,
decl_name: String,
type_implementing_for: String,
type_implementing_for_unaliased: String,
existing_impl_span: Span,
second_impl_span: Span,
},
#[error("The function \"{fn_name}\" in {interface_name} is defined with {num_parameters} parameters, but the provided implementation has {provided_parameters} parameters.")]
IncorrectNumberOfInterfaceSurfaceFunctionParameters {
fn_name: Ident,
interface_name: InterfaceName,
num_parameters: usize,
provided_parameters: usize,
span: Span,
},
#[error("This parameter was declared as type {should_be}, but argument of type {provided} was provided.")]
ArgumentParameterTypeMismatch {
span: Span,
should_be: String,
provided: String,
},
#[error("Function {fn_name} is recursive, which is unsupported at this time.")]
RecursiveCall { fn_name: Ident, span: Span },
#[error(
"Function {fn_name} is recursive via {call_chain}, which is unsupported at this time."
)]
RecursiveCallChain {
fn_name: Ident,
call_chain: String, // Pretty list of symbols, e.g., "a, b and c".
span: Span,
},
#[error("Type {name} is recursive, which is unsupported at this time.")]
RecursiveType { name: Ident, span: Span },
#[error("Type {name} is recursive via {type_chain}, which is unsupported at this time.")]
RecursiveTypeChain {
name: Ident,
type_chain: String, // Pretty list of symbols, e.g., "a, b and c".
span: Span,
},
#[error("The GM (get-metadata) opcode, when called from an external context, will cause the VM to panic.")]
GMFromExternalContext { span: Span },
#[error("The MINT opcode cannot be used in an external context.")]
MintFromExternalContext { span: Span },
#[error("The BURN opcode cannot be used in an external context.")]
BurnFromExternalContext { span: Span },
#[error("Contract storage cannot be used in an external context.")]
ContractStorageFromExternalContext { span: Span },
#[error("The {opcode} opcode cannot be used in a predicate.")]
InvalidOpcodeFromPredicate { opcode: String, span: Span },
#[error("Index out of bounds; the length is {count} but the index is {index}.")]
ArrayOutOfBounds { index: u64, count: u64, span: Span },
#[error(
"Invalid range; the range end at index {end} is smaller than its start at index {start}"
)]
InvalidRangeEndGreaterThanStart { start: u64, end: u64, span: Span },
#[error("Tuple index {index} is out of bounds. The tuple has {count} element{}.", plural_s(*count))]
TupleIndexOutOfBounds {
index: usize,
count: usize,
tuple_type: String,
span: Span,
prefix_span: Span,
},
#[error("Constant requires expression.")]
ConstantRequiresExpression { span: Span },
#[error("Constants cannot be shadowed. {shadowing_source} \"{name}\" shadows constant of the same name.")]
ConstantsCannotBeShadowed {
/// Defines what shadows the constant.
///
/// Although being ready in the diagnostic, the `PatternMatchingStructFieldVar` option
/// is currently not used. Getting the information about imports and aliases while
/// type checking match branches is too much effort at the moment, compared to gained
/// additional clarity of the error message. We might add support for this option in
/// the future.
shadowing_source: ShadowingSource,
name: IdentUnique,
constant_span: Span,
constant_decl_span: Span,
is_alias: bool,
},
#[error("Configurables cannot be shadowed. {shadowing_source} \"{name}\" shadows configurable of the same name.")]
ConfigurablesCannotBeShadowed {
/// Defines what shadows the configurable.
///
/// Using configurable in pattern matching, expecting to behave same as a constant,
/// will result in [CompileError::ConfigurablesCannotBeMatchedAgainst].
/// Otherwise, we would end up with a very confusing error message that
/// a configurable cannot be shadowed by a variable.
/// In the, unlikely but equally confusing, case of a struct field pattern variable
/// named same as the configurable we also want to provide a better explanation
/// and `shadowing_source` helps us distinguish that case as well.
shadowing_source: ShadowingSource,
name: IdentUnique,
configurable_span: Span,
},
#[error("Configurables cannot be matched against. Configurable \"{name}\" cannot be used in pattern matching.")]
ConfigurablesCannotBeMatchedAgainst {
name: IdentUnique,
configurable_span: Span,
},
#[error(
"Constants cannot shadow variables. Constant \"{name}\" shadows variable of the same name."
)]
ConstantShadowsVariable {
name: IdentUnique,
variable_span: Span,
},
#[error("{existing_constant_or_configurable} of the name \"{name}\" already exists.")]
ConstantDuplicatesConstantOrConfigurable {
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | true |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-error/src/handler.rs | sway-error/src/handler.rs | use crate::{
error::CompileError,
warning::{CompileInfo, CompileWarning},
};
use core::cell::RefCell;
/// A handler with which you can emit diagnostics.
#[derive(Default, Debug, Clone)]
pub struct Handler {
/// The inner handler.
/// This construction is used to avoid `&mut` all over the compiler.
inner: RefCell<HandlerDiagnostics>,
}
/// Contains the actual data for `Handler`.
/// Modelled this way to afford an API using interior mutability.
#[derive(Default, Debug, Clone)]
struct HandlerDiagnostics {
/// The sink through which errors will be emitted.
errors: Vec<CompileError>,
/// The sink through which warnings will be emitted.
warnings: Vec<CompileWarning>,
/// The sink through which infos will be emitted.
infos: Vec<CompileInfo>,
}
impl Handler {
pub fn from_parts(
errors: Vec<CompileError>,
warnings: Vec<CompileWarning>,
infos: Vec<CompileInfo>,
) -> Self {
Self {
inner: RefCell::new(HandlerDiagnostics {
errors,
warnings,
infos,
}),
}
}
/// Emit the error `err`.
pub fn emit_err(&self, err: CompileError) -> ErrorEmitted {
self.inner.borrow_mut().errors.push(err);
ErrorEmitted { _priv: () }
}
// Compilation should be cancelled.
pub fn cancel(&self) -> ErrorEmitted {
ErrorEmitted { _priv: () }
}
/// Emit the warning `warn`.
pub fn emit_warn(&self, warn: CompileWarning) {
self.inner.borrow_mut().warnings.push(warn);
}
/// Emit the info `info`.
pub fn emit_info(&self, info: CompileInfo) {
self.inner.borrow_mut().infos.push(info);
}
pub fn has_errors(&self) -> bool {
!self.inner.borrow().errors.is_empty()
}
pub fn find_error(&self, f: impl FnMut(&&CompileError) -> bool) -> Option<CompileError> {
self.inner.borrow().errors.iter().find(f).cloned()
}
pub fn has_warnings(&self) -> bool {
!self.inner.borrow().warnings.is_empty()
}
pub fn scope<T>(
&self,
f: impl FnOnce(&Handler) -> Result<T, ErrorEmitted>,
) -> Result<T, ErrorEmitted> {
let scoped_handler = Handler::default();
let closure_res = f(&scoped_handler);
match self.append(scoped_handler) {
Some(err) => Err(err),
None => closure_res,
}
}
/// Extract all the diagnostics from this handler.
pub fn consume(self) -> (Vec<CompileError>, Vec<CompileWarning>, Vec<CompileInfo>) {
let inner = self.inner.into_inner();
(inner.errors, inner.warnings, inner.infos)
}
pub fn append(&self, other: Handler) -> Option<ErrorEmitted> {
let other_has_errors = other.has_errors();
let (errors, warnings, infos) = other.consume();
for warn in warnings {
self.emit_warn(warn);
}
for err in errors {
self.emit_err(err);
}
for inf in infos {
self.emit_info(inf);
}
if other_has_errors {
Some(ErrorEmitted { _priv: () })
} else {
None
}
}
pub fn dedup(&self) {
let mut inner = self.inner.borrow_mut();
inner.errors = dedup_unsorted(inner.errors.clone());
inner.warnings = dedup_unsorted(inner.warnings.clone());
}
/// Retains only the elements specified by the predicate.
///
/// In other words, remove all elements `e` for which `f(&e)` returns `false`.
/// This method operates in place, visiting each element exactly once in the
/// original order, and preserves the order of the retained elements.
pub fn retain_err<F>(&self, f: F)
where
F: FnMut(&CompileError) -> bool,
{
self.inner.borrow_mut().errors.retain(f)
}
// Map all errors from `other` into this handler. If any mapping returns `None` it is ignored. This
// method returns if any error was mapped or not.
pub fn map_and_emit_errors_from(
&self,
other: Handler,
mut f: impl FnMut(CompileError) -> Option<CompileError>,
) -> Result<(), ErrorEmitted> {
let mut emitted = Ok(());
let (errs, _, _) = other.consume();
for err in errs {
if let Some(err) = (f)(err) {
emitted = Err(self.emit_err(err));
}
}
emitted
}
}
/// Proof that an error was emitted through a `Handler`.
#[derive(Debug, Default, Copy, Clone, PartialEq, Eq, Hash)]
pub struct ErrorEmitted {
_priv: (),
}
/// We want compile errors and warnings to retain their ordering, since typically
/// they are grouped by relevance. However, we want to deduplicate them.
/// Stdlib dedup in Rust assumes sorted data for efficiency, but we don't want that.
/// A hash set would also mess up the order, so this is just a brute force way of doing it
/// with a vector.
fn dedup_unsorted<T: PartialEq + std::hash::Hash + Clone + Eq>(mut data: Vec<T>) -> Vec<T> {
use std::collections::HashSet;
let mut seen = HashSet::new();
data.retain(|item| seen.insert(item.clone()));
data
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-error/src/formatting.rs | sway-error/src/formatting.rs | //! This module contains various helper functions for easier formatting and creation of user-friendly messages.
use std::{
borrow::Cow,
cmp::{self, Ordering},
fmt::{self, Display},
};
use sway_types::{SourceEngine, SourceId, Span};
use crate::diagnostic::Hint;
/// Returns the file name (with extension) for the provided `source_id`,
/// or `None` if the `source_id` is `None` or the file name cannot be
/// obtained.
pub fn get_file_name(source_engine: &SourceEngine, source_id: Option<&SourceId>) -> Option<String> {
match source_id {
Some(source_id) => source_engine.get_file_name(source_id),
None => None,
}
}
/// Returns reading-friendly textual representation for `num` smaller than or equal to 10
/// or its numeric representation if it is greater than 10.
pub fn num_to_str(num: usize) -> String {
match num {
0 => "zero".to_string(),
1 => "one".to_string(),
2 => "two".to_string(),
3 => "three".to_string(),
4 => "four".to_string(),
5 => "five".to_string(),
6 => "six".to_string(),
7 => "seven".to_string(),
8 => "eight".to_string(),
9 => "nine".to_string(),
10 => "ten".to_string(),
_ => format!("{num}"),
}
}
/// Returns reading-friendly textual representation for `num` smaller than or equal to 10
/// or its numeric representation if it is greater than 10.
///
/// Zero is returned as "none".
pub fn num_to_str_or_none(num: usize) -> String {
if num == 0 {
"none".to_string()
} else {
num_to_str(num)
}
}
pub enum Enclosing {
#[allow(dead_code)]
None,
DoubleQuote,
}
impl Display for Enclosing {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(
f,
"{}",
match self {
Self::None => "",
Self::DoubleQuote => "\"",
},
)
}
}
pub enum Indent {
#[allow(dead_code)]
None,
Single,
Double,
}
impl Display for Indent {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(
f,
"{}",
match self {
Self::None => "",
Self::Single => " ",
Self::Double => " ",
},
)
}
}
/// Returns reading-friendly textual representation of the `sequence`, with comma-separated
/// items and each item optionally enclosed in the specified `enclosing`.
/// If the sequence has more than `max_items` the remaining items are replaced
/// with the text "and <number> more".
///
/// E.g.:
/// - \[a\] => "a"
/// - \[a, b\] => "a" and "b"
/// - \[a, b, c\] => "a", "b" and "c"
/// - \[a, b, c, d\] => "a", "b", "c" and one more
/// - \[a, b, c, d, e\] => "a", "b", "c" and two more
///
/// Panics if the `sequence` is empty, or `max_items` is zero.
pub fn sequence_to_str<T>(sequence: &[T], enclosing: Enclosing, max_items: usize) -> String
where
T: Display,
{
sequence_to_str_impl(sequence, enclosing, max_items, "and")
}
/// Returns reading-friendly textual representation of the `sequence`, with comma-separated
/// items and each item optionally enclosed in the specified `enclosing`.
/// If the sequence has more than `max_items` the remaining items are replaced
/// with the text "or <number> more".
///
/// E.g.:
/// - \[a\] => "a"
/// - \[a, b\] => "a" or "b"
/// - \[a, b, c\] => "a", "b" or "c"
/// - \[a, b, c, d\] => "a", "b", "c" or one more
/// - \[a, b, c, d, e\] => "a", "b", "c" or two more
///
/// Panics if the `sequence` is empty, or `max_items` is zero.
pub fn sequence_to_str_or<T>(sequence: &[T], enclosing: Enclosing, max_items: usize) -> String
where
T: Display,
{
sequence_to_str_impl(sequence, enclosing, max_items, "or")
}
fn sequence_to_str_impl<T>(
sequence: &[T],
enclosing: Enclosing,
max_items: usize,
and_or: &str,
) -> String
where
T: Display,
{
assert!(
!sequence.is_empty(),
"Sequence to display must not be empty."
);
assert!(
max_items > 0,
"Maximum number of items to display must be greater than zero."
);
let max_items = cmp::min(max_items, sequence.len());
let (to_display, remaining) = sequence.split_at(max_items);
let fmt_item = |item: &T| format!("{enclosing}{item}{enclosing}");
if !remaining.is_empty() {
format!(
"{}, {} {} more",
to_display
.iter()
.map(fmt_item)
.collect::<Vec<_>>()
.join(", "),
and_or,
num_to_str(remaining.len())
)
} else {
match to_display {
[] => unreachable!("There must be at least one item in the sequence."),
[item] => fmt_item(item),
[first_item, second_item] => {
format!(
"{} {} {}",
fmt_item(first_item),
and_or,
fmt_item(second_item)
)
}
_ => format!(
"{}, {} {}",
to_display
.split_last()
.unwrap()
.1
.iter()
.map(fmt_item)
.collect::<Vec::<_>>()
.join(", "),
and_or,
fmt_item(to_display.last().unwrap())
),
}
}
}
/// Returns reading-friendly textual representation of the `sequence`, with vertically
/// listed items and each item indented for the `indent` and preceded with the dash (-).
/// If the sequence has more than `max_items` the remaining items are replaced
/// with the text "and <number> more".
///
/// E.g.:
/// * \[a\] =>
/// - a
/// * \[a, b\] =>
/// - a
/// - b
/// * \[a, b, c, d, e\] =>
/// - a
/// - b
/// - and three more
///
/// Panics if the `sequence` is empty, or `max_items` is zero.
pub fn sequence_to_list<T>(sequence: &[T], indent: Indent, max_items: usize) -> Vec<String>
where
T: Display,
{
assert!(
!sequence.is_empty(),
"Sequence to display must not be empty."
);
assert!(
max_items > 0,
"Maximum number of items to display must be greater than zero."
);
let mut result = vec![];
let max_items = cmp::min(max_items, sequence.len());
let (to_display, remaining) = sequence.split_at(max_items);
for item in to_display {
result.push(format!("{indent}- {item}"));
}
if !remaining.is_empty() {
result.push(format!(
"{indent}- and {} more",
num_to_str(remaining.len())
));
}
result
}
/// Returns "s" if `count` is different than 1, otherwise empty string.
/// Convenient for building simple plural of words.
pub fn plural_s(count: usize) -> &'static str {
if count == 1 {
""
} else {
"s"
}
}
/// Returns "is" if `count` is 1, otherwise "are".
pub fn is_are(count: usize) -> &'static str {
if count == 1 {
"is"
} else {
"are"
}
}
/// Returns `singular` if `count` is 1, otherwise `plural`.
pub fn singular_plural<'a>(count: usize, singular: &'a str, plural: &'a str) -> &'a str {
if count == 1 {
singular
} else {
plural
}
}
/// Returns the short name of a type or function represented by the `full_name`.
/// Convenient for subsequent showing only the short name of a full name that was
/// already shown.
///
/// The `full_name` is expected to be a call path with or without generic parameters,
/// eventually prefixed with `&`s or `&mut`s for types.
///
/// E.g.:
/// - `SomeType` -> `SomeType`
/// - `SomeType<T>` -> `SomeType`
/// - `std::ops::Eq` -> `Eq`
/// - `some_lib::Struct<A, B>` -> `Struct`
/// - `some_lib::Struct<some::other::lib::A, some::other::lib::B>` -> `Struct`
/// - `&mut some_lib::Struct<&some::other::lib::A, &mut some::other::lib::B>` -> `&mut Struct`
/// - `&&&mut some_lib::Struct<&some::other::lib::A, &mut some::other::lib::B>` -> `&&&mut Struct`
/// - `some_lib::fns::some_function<A, B>` -> `some_function`
pub fn short_name(full_name: &str) -> String {
// Preserve leading references, `&`s and `&mut`s.
let mut name_start_index = 0;
loop {
let reminder = &full_name[name_start_index..];
if reminder.starts_with('&') {
name_start_index += 1;
} else if reminder.starts_with("mut ") {
name_start_index += 4;
} else {
break;
}
}
let full_name_without_refs = &full_name[name_start_index..];
let full_name_without_generics = match full_name_without_refs.find('<') {
Some(index) => &full_name_without_refs[..index],
None => full_name_without_refs,
};
let short_name = match full_name_without_generics.rfind(':') {
Some(index) if index < full_name_without_generics.len() - 1 => {
full_name_without_generics.split_at(index + 1).1.to_string()
}
_ => full_name_without_generics.to_string(),
};
format!("{}{short_name}", &full_name[..name_start_index])
}
/// Returns indefinite article "a" or "an" that corresponds to the `word`,
/// or an empty string if the indefinite article do not fit to the word.
///
/// Note that the function does not recognize plurals and assumes that the
/// `word` is in singular.
///
/// If an article is returned, it is followed by a space, e.g. "a ".
pub fn a_or_an<S: AsRef<str> + ?Sized>(word: &S) -> &'static str {
let is_a = in_definite::is_an(word.as_ref());
match is_a {
in_definite::Is::An => "an ",
in_definite::Is::A => "a ",
in_definite::Is::None => "",
}
}
/// Returns the ordinal suffix for the given `num`.
/// Convenient for building ordinal numbers like "1st", "2nd", "3rd", "4th", etc.
pub fn ord_num_suffix(num: usize) -> &'static str {
match num % 100 {
11..=13 => "th",
_ => match num % 10 {
1 => "st",
2 => "nd", // typos:ignore
3 => "rd",
_ => "th",
},
}
}
/// Returns `text` with the first character turned into ASCII uppercase.
pub fn ascii_sentence_case(text: &String) -> Cow<String> {
if text.is_empty() || text.chars().next().unwrap().is_uppercase() {
Cow::Borrowed(text)
} else {
let mut result = text.clone();
result[0..1].make_ascii_uppercase();
Cow::Owned(result.to_owned())
}
}
/// Returns the first line in `text`, up to the first `\n` if the `text` contains
/// multiple lines, and optionally adds ellipses "..." to the end of the line
/// if `with_ellipses` is true.
///
/// If the `text` is a single-line string, returns the original `text`.
///
/// Suitable for showing just the first line of a piece of code.
/// E.g., if `text` is:
/// if x {
/// 0
/// } else {
/// 1
/// }
/// the returned value, with ellipses, will be:
/// if x {...
pub fn first_line(text: &str, with_ellipses: bool) -> Cow<str> {
if !text.contains('\n') {
Cow::Borrowed(text)
} else {
let index_of_new_line = text.find('\n').unwrap();
Cow::Owned(text[..index_of_new_line].to_string() + if with_ellipses { "..." } else { "" })
}
}
/// Finds strings from an iterable of `possible_values` similar to a given value `v`.
/// Returns a vector of all possible values that exceed a similarity threshold,
/// sorted by similarity (most similar comes first). The returned vector will have
/// at most `max_num_of_suggestions` elements.
///
/// The implementation is taken and adapted from the [Clap project](https://github.com/clap-rs/clap/blob/50f7646cf72dd7d4e76d9284d76bdcdaceb7c049/clap_builder/src/parser/features/suggestions.rs#L11).
pub fn did_you_mean<T, I>(v: &str, possible_values: I, max_num_of_suggestions: usize) -> Vec<String>
where
T: AsRef<str>,
I: IntoIterator<Item = T>,
{
let mut candidates: Vec<_> = possible_values
.into_iter()
.map(|pv| (strsim::jaro(v, pv.as_ref()), pv.as_ref().to_owned()))
// Confidence of 0.7 so that bar -> baz is suggested.
.filter(|(confidence, _)| *confidence > 0.7)
.collect();
candidates.sort_by(|a, b| b.0.partial_cmp(&a.0).unwrap_or(Ordering::Equal));
candidates
.into_iter()
.take(max_num_of_suggestions)
.map(|(_, pv)| pv)
.collect()
}
/// Returns a single line "Did you mean" [Hint::help]. E.g.: Did you mean "this" or "that"?
///
/// The input value is taken from the `span` and the help hint is positioned at that `span`.
/// Each suggestion are enclosed in `enclosing`.
pub fn did_you_mean_help<T, I>(
source_engine: &SourceEngine,
span: Span,
possible_values: I,
max_num_of_suggestions: usize,
enclosing: Enclosing,
) -> Hint
where
T: AsRef<str>,
I: IntoIterator<Item = T>,
{
let suggestions = &did_you_mean(span.as_str(), possible_values, max_num_of_suggestions);
if suggestions.is_empty() {
Hint::none()
} else {
Hint::help(
source_engine,
span,
format!(
"Did you mean {}?",
sequence_to_str_or(suggestions, enclosing, max_num_of_suggestions)
),
)
}
}
mod test {
#[test]
fn test_short_name() {
use super::short_name;
let test = |full_name: &str, expected: &str| {
let short_name = short_name(full_name);
assert_eq!(short_name, expected, "Full name: {full_name}.");
};
test("SomeType", "SomeType");
test("&SomeType", "&SomeType");
test("&&&SomeType", "&&&SomeType");
test("&mut &&mut SomeType", "&mut &&mut SomeType");
test("&&&mut &mut SomeType", "&&&mut &mut SomeType");
test("SomeType<T>", "SomeType");
test("&SomeType<&T>", "&SomeType");
test("&&&SomeType<&&&T>", "&&&SomeType");
test("&mut &&mut SomeType<&mut &&mut T>", "&mut &&mut SomeType");
test(
"&&&mut &mut SomeType<&&&mut &mut T>",
"&&&mut &mut SomeType",
);
test("std::ops::Eq", "Eq");
test("some_lib::Struct<A, B>", "Struct");
test("&&mut some_lib::Struct<&A, &mut B>", "&&mut Struct");
test(
"some_lib::Struct<some::other::lib::A, some::other::lib::B>",
"Struct",
);
test(
"&&&mut some_lib::Struct<some::other::lib::A, some::other::lib::B>",
"&&&mut Struct",
);
test(
"some_lib::fn::function<some::other::lib::A<T1, T2>, some::other::lib::B<T3>>",
"function",
);
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-error/src/parser_error.rs | sway-error/src/parser_error.rs | use sway_types::ast::PunctKind;
use sway_types::{Ident, Span};
use thiserror::Error;
#[derive(Debug, Error, Clone, PartialEq, Eq, Hash)]
pub enum ParseErrorKind {
#[error("Expected an import name, group of imports, or `*`.")]
ExpectedImportNameGroupOrGlob,
#[error("Expected an item.")]
ExpectedAnItem,
#[error("Expected {} element.",
if *is_only_documented {
"a documented"
} else {
"an annotated"
}
)]
ExpectedAnAnnotatedElement {
/// True if the element is only documented with
/// doc comments but without any other
/// inner or outer attributes in the annotations.
is_only_documented: bool,
},
#[error("Expected an inner doc comment (`//!`) to be at the top of the module file.")]
ExpectedInnerDocCommentAtTheTopOfFile,
#[error("Expected a comma or closing parenthesis in function arguments.")]
ExpectedCommaOrCloseParenInFnArgs,
#[error("Unknown assembly instruction.")]
UnrecognizedOpCode {
known_op_codes: &'static [&'static str],
},
#[error("Unexpected token in statement.")]
UnexpectedTokenInStatement,
#[error("This expression cannot be assigned to.")]
UnassignableExpression {
/// The friendly name of the kind of the expression
/// that makes the overall expression unassignable.
/// E.g., "function call", or "struct instantiation".
erroneous_expression_kind: &'static str,
/// [Span] that points to either the whole left-hand
/// side of the reassignment, or to a [Span] of an
/// erroneous nested expression, if only a part of
/// the assignment target expression is erroneous.
erroneous_expression_span: Span,
},
#[error("Unexpected token after array index.")]
UnexpectedTokenAfterArrayIndex,
#[error("Invalid literal to use as a field name.")]
InvalidLiteralFieldName,
#[error("Invalid statement.")]
InvalidStatement,
#[error("Invalid item.")]
InvalidItem,
#[error("Integer field names cannot have type suffixes.")]
IntFieldWithTypeSuffix,
#[error("Expected a field name.")]
ExpectedFieldName,
#[error("Expected a comma or closing parenthesis in this tuple or parenthesized expression.")]
ExpectedCommaOrCloseParenInTupleOrParenExpression,
#[error("Expected an expression.")]
ExpectedExpression,
#[error("Unexpected token after array length.")]
UnexpectedTokenAfterArrayLength,
#[error("Expected a comma, semicolon or closing bracket when parsing this array.")]
ExpectedCommaSemicolonOrCloseBracketInArray,
#[error("Unexpected token after asm return type.")]
UnexpectedTokenAfterAsmReturnType,
#[error("Malformed asm immediate value.")]
MalformedAsmImmediate,
#[error("Expected an identifier.")]
ExpectedIdent,
#[error("Expected an pattern.")]
ExpectedPattern,
#[error("Unexpected token after str length.")]
UnexpectedTokenAfterStrLength,
#[error("Expected a type.")]
ExpectedType,
#[error("Unexpected token after array type length.")]
UnexpectedTokenAfterArrayTypeLength,
#[error("Expected an opening brace.")]
ExpectedOpenBrace,
#[error("Expected an opening parenthesis.")]
ExpectedOpenParen,
#[error("Expected an opening square bracket.")]
ExpectedOpenBracket,
#[error("Expected a literal.")]
ExpectedLiteral,
#[error("Expected a module kind (script, contract, predicate, or library).")]
ExpectedModuleKind,
#[error("Expected `{}`.", kinds.iter().map(PunctKind::as_char).collect::<String>())]
ExpectedPunct { kinds: Vec<PunctKind> },
#[error("Expected `{}`.", word)]
ExpectedKeyword { word: &'static str },
#[error("Unexpected token after abi address.")]
UnexpectedTokenAfterAbiAddress,
#[error("Expected an attribute.")]
ExpectedAnAttribute,
#[error("Unexpected token after an attribute.")]
UnexpectedTokenAfterAttribute,
#[error("Identifiers cannot begin with a double underscore, as that naming convention is reserved for compiler intrinsics.")]
InvalidDoubleUnderscore,
#[error("Unexpected rest token, must be at the end of pattern.")]
UnexpectedRestPattern,
#[error("Identifier cannot be a reserved keyword.")]
ReservedKeywordIdentifier,
#[error("Unnecessary visibility qualifier, `{}` is implied here.", visibility)]
UnnecessaryVisibilityQualifier { visibility: Ident },
#[error("Expected a doc comment.")]
ExpectedDocComment,
#[error("Use the `struct` keyword to define records, instead of `class`.")]
UnexpectedClass,
#[error("Field projections, e.g., `foo.bar` cannot have type arguments.")]
FieldProjectionWithGenericArgs,
#[error("Unexpected token after __ptr type.")]
UnexpectedTokenAfterPtrType,
#[error("Unexpected token after __slice type.")]
UnexpectedTokenAfterSliceType,
#[error("Expected a path type.")]
ExpectedPathType,
#[error("Expected ':'. Enum variants must be in the form `Variant: ()`, `Variant: <type>`, or `Variant: (<type1>, ..., <typeN>)`. E.g., `Foo: (), or `Bar: (bool, u32)`.")]
MissingColonInEnumTypeField {
variant_name: Ident,
tuple_contents: Option<Span>,
},
#[error("Expected storage key of type U256.")]
ExpectedStorageKeyU256,
}
#[derive(Debug, Error, Clone, PartialEq, Eq, Hash)]
#[error("{}", kind)]
pub struct ParseError {
pub span: Span,
pub kind: ParseErrorKind,
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-error/src/diagnostic.rs | sway-error/src/diagnostic.rs | use std::{path::PathBuf, vec};
use sway_types::{SourceEngine, Span};
/// Provides detailed, rich description of a compile error or warning.
#[derive(Debug, Default)]
pub struct Diagnostic {
pub reason: Option<Reason>, // TODO: Make mandatory once we remove all old-style warnings and errors.
pub issue: Issue,
pub hints: Vec<Hint>,
pub help: Vec<String>,
}
impl Diagnostic {
/// For backward compatibility purposes. True if the diagnostic
/// was defined before the detailed diagnostics were introduced.
/// An old-style diagnostic contains just the issue.
pub fn is_old_style(&self) -> bool {
self.reason.is_none() && self.hints.is_empty() && self.help.is_empty()
}
pub fn level(&self) -> Level {
match self.issue.label_type {
LabelType::Error => Level::Error,
LabelType::Warning => Level::Warning,
LabelType::Info => Level::Info,
_ => unreachable!("The diagnostic level can be only Error, Warning, or Info, and this is enforced via Diagnostics API.")
}
}
pub fn reason(&self) -> Option<&Reason> {
self.reason.as_ref()
}
pub fn issue(&self) -> &Issue {
&self.issue
}
/// All the labels, potentially in different source files.
pub fn labels(&self) -> Vec<&Label> {
let mut labels = Vec::<&Label>::new();
if self.issue.is_in_source() {
labels.push(&self.issue);
}
for hint in self.hints.iter().filter(|hint| hint.is_in_source()) {
labels.push(hint);
}
labels
}
/// All the labels in the source file found at `source_path`.
pub fn labels_in_source(&self, source_path: &SourcePath) -> Vec<&Label> {
self.labels()
.iter()
// Safe unwrapping because all the labels are in source.
.filter(|&label| label.source_path().unwrap() == source_path)
.copied()
.collect()
}
// All the labels that occur in the same source file where the diagnostic issue occurs.
pub fn labels_in_issue_source(&self) -> Vec<&Label> {
if !self.issue.is_in_source() {
return vec![];
}
// Safe unwrapping because the issue is in source.
self.labels_in_source(self.issue.source_path().unwrap())
}
pub fn help(&self) -> impl Iterator<Item = &String> + '_ {
self.help.iter().filter(|help| !help.is_empty())
}
/// A help text that will never be displayed. Convenient when defining help lines
/// that are displayed only when a condition is met.
pub fn help_none() -> String {
String::new()
}
/// Displays an empty line in the help footer.
/// Convenient when defining visual separations within suggestions.
pub fn help_empty_line() -> String {
String::from(" ")
}
/// All the source files that are related to the diagnostic.
/// This means the source file of the issue itself as well
/// as source files of all the hints.
pub fn related_sources(&self, include_issue_source: bool) -> Vec<&SourcePath> {
let mut source_files = vec![];
let issue_is_in_source = self.issue.is_in_source();
// All `source_path()` unwrappings are safe because we check the existence
// of source in case of an issue, and `self.labels()` returns
// only labels that are in source.
if issue_is_in_source && include_issue_source {
source_files.push(self.issue.source_path().unwrap());
}
for hint in self.labels() {
let file = hint.source_path().unwrap();
if !include_issue_source
&& issue_is_in_source
&& file == self.issue.source_path().unwrap()
{
continue;
}
if !source_files.contains(&file) {
source_files.push(file)
}
}
source_files
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Default)]
pub enum Level {
Info,
Warning,
#[default]
Error,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Default)]
pub enum LabelType {
#[default]
Info,
Help,
Warning,
Error,
}
/// Diagnostic message related to a span of source code in a source file.
///
/// If the message in a particular situation cannot be related to a span
/// in a known source file (e.g., when importing symbols)
/// the span must be set to [Span::dummy]. Such messages without a valid span
/// will be ignored.
///
/// E.g., a note like 'The function "{name}" is defined here.'
/// will be displayed only when we have access to the source
/// code in which the function is defined.
///
/// We can also have error messages that are not related to any particular
/// place in code.
#[derive(Debug)]
pub struct Label {
label_type: LabelType,
span: Span,
text: String,
source_path: Option<SourcePath>,
}
impl Label {
pub fn info(source_engine: &SourceEngine, span: Span, text: String) -> Label {
Self::new(source_engine, LabelType::Info, span, text)
}
pub fn help(source_engine: &SourceEngine, span: Span, text: String) -> Label {
Self::new(source_engine, LabelType::Help, span, text)
}
pub fn warning(source_engine: &SourceEngine, span: Span, text: String) -> Label {
Self::new(source_engine, LabelType::Warning, span, text)
}
pub fn error(source_engine: &SourceEngine, span: Span, text: String) -> Label {
Self::new(source_engine, LabelType::Error, span, text)
}
fn new(source_engine: &SourceEngine, label_type: LabelType, span: Span, text: String) -> Label {
let source_path = Self::get_source_path(source_engine, &span);
Label {
label_type,
span,
text,
source_path,
}
}
/// True if the `Label` is actually related to a span of source code in a source file.
pub fn is_in_source(&self) -> bool {
self.source_path.is_some() && (self.span.start() < self.span.end())
}
pub fn label_type(&self) -> LabelType {
self.label_type
}
pub fn span(&self) -> &Span {
&self.span
}
pub fn text(&self) -> &str {
self.text.as_ref()
}
pub fn source_path(&self) -> Option<&SourcePath> {
self.source_path.as_ref()
}
fn get_source_path(source_engine: &SourceEngine, span: &Span) -> Option<SourcePath> {
let path_buf = span
.source_id()
.cloned()
.map(|id| source_engine.get_path(&id));
let path_string = path_buf.as_ref().map(|p| p.to_string_lossy().to_string());
match (path_buf, path_string) {
(Some(path_buf), Some(path_string)) => Some(SourcePath {
path_buf,
path_string,
}),
_ => None,
}
}
}
impl Default for Label {
fn default() -> Self {
Self {
label_type: LabelType::Info,
span: Span::dummy(),
text: "".to_string(),
source_path: None,
}
}
}
#[derive(Debug)]
pub struct Issue {
label: Label,
}
impl Issue {
pub fn warning(source_engine: &SourceEngine, span: Span, text: String) -> Self {
Self {
label: Label::warning(source_engine, span, text),
}
}
pub fn error(source_engine: &SourceEngine, span: Span, text: String) -> Self {
Self {
label: Label::error(source_engine, span, text),
}
}
pub fn info(source_engine: &SourceEngine, span: Span, text: String) -> Self {
Self {
label: Label::info(source_engine, span, text),
}
}
}
impl Default for Issue {
fn default() -> Self {
Self {
label: Label {
label_type: LabelType::Error,
..Default::default()
},
}
}
}
impl std::ops::Deref for Issue {
type Target = Label;
fn deref(&self) -> &Self::Target {
&self.label
}
}
#[derive(Debug, Default)]
pub struct Hint {
label: Label,
}
impl Hint {
pub fn info(source_engine: &SourceEngine, span: Span, text: String) -> Self {
Self {
label: Label::info(source_engine, span, text),
}
}
pub fn underscored_info(source_engine: &SourceEngine, span: Span) -> Self {
Self::info(source_engine, span, "".to_string())
}
pub fn multi_info(source_engine: &SourceEngine, span: &Span, hints: Vec<String>) -> Vec<Self> {
hints
.into_iter()
.map(|hint| Self::info(source_engine, span.clone(), hint))
.collect()
}
pub fn help(source_engine: &SourceEngine, span: Span, text: String) -> Self {
Self {
label: Label::help(source_engine, span, text),
}
}
pub fn multi_help(source_engine: &SourceEngine, span: &Span, hints: Vec<String>) -> Vec<Self> {
hints
.into_iter()
.map(|hint| Self::help(source_engine, span.clone(), hint))
.collect()
}
pub fn warning(source_engine: &SourceEngine, span: Span, text: String) -> Self {
Self {
label: Label::warning(source_engine, span, text),
}
}
pub fn multi_warning(
source_engine: &SourceEngine,
span: &Span,
hints: Vec<String>,
) -> Vec<Self> {
hints
.into_iter()
.map(|hint| Self::warning(source_engine, span.clone(), hint))
.collect()
}
pub fn error(source_engine: &SourceEngine, span: Span, text: String) -> Self {
Self {
label: Label::error(source_engine, span, text),
}
}
pub fn multi_error(source_engine: &SourceEngine, span: &Span, hints: Vec<String>) -> Vec<Self> {
hints
.into_iter()
.map(|hint| Self::error(source_engine, span.clone(), hint))
.collect()
}
/// A [Hint] that will never be displayed. Convenient when defining [Hint]s that
/// are displayed only if a condition is met.
pub fn none() -> Self {
Self {
label: Label::default(),
}
}
}
impl std::ops::Deref for Hint {
type Target = Label;
fn deref(&self) -> &Self::Target {
&self.label
}
}
#[derive(Debug, Clone, PartialEq, Eq, Default)]
pub struct SourcePath {
path_buf: PathBuf,
path_string: String,
}
impl SourcePath {
pub fn as_path_buf(&self) -> &PathBuf {
&self.path_buf
}
pub fn as_str(&self) -> &str {
self.path_string.as_ref()
}
}
/// Describes the different areas that we have in the
/// sway-error crate. It allows grouping of diagnostics
/// and ensuring that we have unique diagnostic code
/// numbers in each of the groups.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Default)]
pub enum DiagnosticArea {
#[default]
LexicalAnalysis,
Parsing,
ParseTreeConversion,
TypeChecking,
SemanticAnalysis,
Warnings,
Migrations,
}
impl DiagnosticArea {
pub fn prefix(&self) -> &'static str {
match self {
Self::LexicalAnalysis => "E0",
Self::Parsing => "E1",
Self::ParseTreeConversion => "E2",
Self::TypeChecking => "E3",
Self::SemanticAnalysis => "E4",
Self::Warnings => "W0",
Self::Migrations => "M0",
}
}
}
#[derive(Debug, Clone, PartialEq, Eq, Default)]
pub struct Code {
area: DiagnosticArea,
number: u16,
text: String,
}
impl Code {
pub fn lexical_analysis(number: u16) -> Code {
Self::new(DiagnosticArea::LexicalAnalysis, number)
}
pub fn parsing(number: u16) -> Code {
Self::new(DiagnosticArea::Parsing, number)
}
pub fn parse_tree_conversion(number: u16) -> Code {
Self::new(DiagnosticArea::ParseTreeConversion, number)
}
pub fn type_checking(number: u16) -> Code {
Self::new(DiagnosticArea::TypeChecking, number)
}
pub fn semantic_analysis(number: u16) -> Self {
Self::new(DiagnosticArea::SemanticAnalysis, number)
}
pub fn warnings(number: u16) -> Code {
Self::new(DiagnosticArea::Warnings, number)
}
pub fn migrations(number: u16) -> Code {
Self::new(DiagnosticArea::Migrations, number)
}
fn new(area: DiagnosticArea, number: u16) -> Self {
debug_assert!(
0 < number && number < 999,
"The diagnostic code number must be greater then zero and smaller then 999."
);
Self {
area,
number,
text: format!("{}{:03}", area.prefix(), number),
}
}
pub fn as_str(&self) -> &str {
self.text.as_ref()
}
}
#[derive(Debug, Clone, PartialEq, Eq, Default)]
pub struct Reason {
code: Code,
description: String,
}
impl Reason {
pub fn new(code: Code, description: String) -> Self {
Self { code, description }
}
pub fn code(&self) -> &str {
self.code.as_str()
}
pub fn description(&self) -> &str {
self.description.as_ref()
}
}
pub trait ToDiagnostic {
fn to_diagnostic(&self, source_engine: &SourceEngine) -> Diagnostic;
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-ast/src/attribute.rs | sway-ast/src/attribute.rs | use crate::priv_prelude::*;
#[derive(Clone, Debug, Serialize)]
pub struct Annotated<T> {
pub attributes: Vec<AttributeDecl>,
pub value: T,
}
// Storage access and purity.
pub const STORAGE_ATTRIBUTE_NAME: &str = "storage";
pub const STORAGE_READ_ARG_NAME: &str = "read";
pub const STORAGE_WRITE_ARG_NAME: &str = "write";
// Function inlining.
pub const INLINE_ATTRIBUTE_NAME: &str = "inline";
pub const INLINE_NEVER_ARG_NAME: &str = "never";
pub const INLINE_ALWAYS_ARG_NAME: &str = "always";
// Payable functions.
pub const PAYABLE_ATTRIBUTE_NAME: &str = "payable";
// Fallback functions.
pub const FALLBACK_ATTRIBUTE_NAME: &str = "fallback";
// Documentation comments.
// Note that because "doc-comment" is not a valid identifier,
// doc-comment attributes cannot be declared in code.
// They are exclusively created by the compiler to denote
// doc comments, `///` and `//!`.
pub const DOC_COMMENT_ATTRIBUTE_NAME: &str = "doc-comment";
// In-language unit testing.
pub const TEST_ATTRIBUTE_NAME: &str = "test";
pub const TEST_SHOULD_REVERT_ARG_NAME: &str = "should_revert";
// Allow warnings.
pub const ALLOW_ATTRIBUTE_NAME: &str = "allow";
pub const ALLOW_DEAD_CODE_ARG_NAME: &str = "dead_code";
pub const ALLOW_DEPRECATED_ARG_NAME: &str = "deprecated";
// Conditional compilation.
pub const CFG_ATTRIBUTE_NAME: &str = "cfg";
pub const CFG_TARGET_ARG_NAME: &str = "target";
pub const CFG_PROGRAM_TYPE_ARG_NAME: &str = "program_type";
// Deprecation.
pub const DEPRECATED_ATTRIBUTE_NAME: &str = "deprecated";
pub const DEPRECATED_NOTE_ARG_NAME: &str = "note";
// Error types.
pub const ERROR_TYPE_ATTRIBUTE_NAME: &str = "error_type";
pub const ERROR_ATTRIBUTE_NAME: &str = "error";
pub const ERROR_M_ARG_NAME: &str = "m";
// Backtracing.
pub const TRACE_ATTRIBUTE_NAME: &str = "trace";
pub const TRACE_NEVER_ARG_NAME: &str = "never";
pub const TRACE_ALWAYS_ARG_NAME: &str = "always";
// Abi names.
pub const ABI_NAME_ATTRIBUTE_NAME: &str = "abi_name";
pub const ABI_NAME_NAME_ARG_NAME: &str = "name";
// Events and indexing.
pub const EVENT_ATTRIBUTE_NAME: &str = "event";
pub const INDEXED_ATTRIBUTE_NAME: &str = "indexed";
pub const KNOWN_ATTRIBUTE_NAMES: &[&str] = &[
STORAGE_ATTRIBUTE_NAME,
DOC_COMMENT_ATTRIBUTE_NAME,
TEST_ATTRIBUTE_NAME,
INLINE_ATTRIBUTE_NAME,
PAYABLE_ATTRIBUTE_NAME,
ALLOW_ATTRIBUTE_NAME,
CFG_ATTRIBUTE_NAME,
DEPRECATED_ATTRIBUTE_NAME,
FALLBACK_ATTRIBUTE_NAME,
ABI_NAME_ATTRIBUTE_NAME,
EVENT_ATTRIBUTE_NAME,
INDEXED_ATTRIBUTE_NAME,
];
/// An attribute declaration. Attribute declaration
/// can potentially have an arbitrary number of [Attribute]s.
/// [Attribute]s can potentially have any number of [AttributeArg]s.
/// Each [AttributeArg] can have a value assigned.
///
/// E.g.:
///
/// ```ignore
/// #[attribute]
/// #[attribute_1, attribute_2]
/// #[attribute()]
/// #[attribute(arg)]
/// #[attribute(arg_1, arg_2)]
/// #[attribute(arg_1 = "value", arg_2 = true)]
/// #[attribute_1, attribute_2(arg_1), attribute_3(arg_1, arg_2 = true)]
/// ```
///
/// [AttributeDecl]s can be _inner_ or _outer_, as explained in [AttributeHashKind].
// TODO: Currently, inner attributes are supported only on module doc comments,
// those starting with `//!`.
// See: https://github.com/FuelLabs/sway/issues/6924
#[derive(Clone, Debug, Serialize)]
pub struct AttributeDecl {
pub hash_kind: AttributeHashKind,
pub attribute: SquareBrackets<Punctuated<Attribute, CommaToken>>,
}
impl AttributeDecl {
/// Creates the `doc-comment` [AttributeDecl] for a single line of an outer comment. E.g.:
/// ```ignore
/// /// This is an outer comment.
/// ```
/// The `span` is the overall span: `/// This is an outer comment.`.
/// The `content_span` is the span of the content, without the leading `///`: ` This is an outer comment.`.
pub fn new_outer_doc_comment(span: Span, content_span: Span) -> Self {
Self::new_doc_comment(
span.clone(),
content_span,
AttributeHashKind::Outer(HashToken::new(span)),
)
}
/// Creates the `doc-comment` [AttributeDecl] for a single line of an inner comment. E.g.:
/// ```ignore
/// //! This is an inner comment.
/// ```
/// The `span` is the overall span: `//! This is an inner comment.`.
/// The `content_span` is the span of the content, without the leading `//!`: ` This is an inner comment.`.
pub fn new_inner_doc_comment(span: Span, content_span: Span) -> Self {
Self::new_doc_comment(
span.clone(),
content_span,
AttributeHashKind::Inner(HashBangToken::new(span)),
)
}
fn new_doc_comment(span: Span, content_span: Span, hash_kind: AttributeHashKind) -> Self {
// TODO: Store the comment line in an argument value as
// discussed in https://github.com/FuelLabs/sway/issues/6938.
let name = Ident::new_no_trim(content_span.clone());
AttributeDecl {
hash_kind,
attribute: SquareBrackets::new(
Punctuated::single(Attribute {
name: Ident::new_with_override(
DOC_COMMENT_ATTRIBUTE_NAME.to_string(),
span.clone(),
),
args: Some(Parens::new(
Punctuated::single(AttributeArg { name, value: None }),
content_span,
)),
}),
span,
),
}
}
/// `self` is a doc comment, either an inner (`//!`) or outer (`///`).
pub fn is_doc_comment(&self) -> bool {
self.attribute.inner.value_separator_pairs.is_empty()
&& self
.attribute
.inner
.final_value_opt
.as_ref()
.is_some_and(|attr| attr.is_doc_comment())
}
pub fn is_inner(&self) -> bool {
matches!(self.hash_kind, AttributeHashKind::Inner(_))
}
}
impl Spanned for AttributeDecl {
fn span(&self) -> Span {
let hash_span = match &self.hash_kind {
AttributeHashKind::Inner(hash_bang_token) => hash_bang_token.span(),
AttributeHashKind::Outer(hash_token) => hash_token.span(),
};
Span::join(hash_span, &self.attribute.span())
}
}
/// Denotes if an [AttributeDecl] is an _inner_ or _outer_ attribute declaration.
///
/// E.g.:
/// ```ignore
/// /// This is an outer doc comment.
/// /// It annotates the struct `Foo`.
/// struct Foo {}
///
/// // This is an outer attribute.
/// // In annotates the function `fun`.
/// #[inline(always)]
/// fn fun() {}
///
/// //! This is an inner doc comment.
/// //! It annotates the library module.
/// library;
///
/// // This is an inner attribute.
/// // In annotates whichever item it is declared in.
/// #![allow(dead_code)]
/// ```
#[derive(Clone, Debug, Serialize)]
pub enum AttributeHashKind {
/// Inner specifies that the attribute annotates
/// the item that the attribute is declared within.
Inner(HashBangToken),
/// Outer specifies that the attribute annotates
/// the item that immediately follows the attribute.
Outer(HashToken),
}
impl Spanned for AttributeHashKind {
fn span(&self) -> Span {
match self {
AttributeHashKind::Inner(token) => token.span(),
AttributeHashKind::Outer(token) => token.span(),
}
}
}
#[derive(Clone, Debug, Serialize)]
pub struct AttributeArg {
pub name: Ident,
pub value: Option<Literal>,
}
impl Spanned for AttributeArg {
fn span(&self) -> Span {
if let Some(value) = &self.value {
Span::join(self.name.span(), &value.span())
} else {
self.name.span()
}
}
}
#[derive(Clone, Debug, Serialize)]
pub struct Attribute {
pub name: Ident,
pub args: Option<Parens<Punctuated<AttributeArg, CommaToken>>>,
}
impl Attribute {
pub fn is_doc_comment(&self) -> bool {
self.name.as_str() == DOC_COMMENT_ATTRIBUTE_NAME
}
pub fn is_cfg(&self) -> bool {
self.name.as_str() == CFG_ATTRIBUTE_NAME
}
}
impl Spanned for Attribute {
fn span(&self) -> Span {
self.args
.as_ref()
.map(|args| Span::join(self.name.span(), &args.span()))
.unwrap_or_else(|| self.name.span())
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-ast/src/path.rs | sway-ast/src/path.rs | use crate::priv_prelude::*;
#[derive(Clone, Debug, Serialize)]
pub struct PathExpr {
pub root_opt: Option<(Option<AngleBrackets<QualifiedPathRoot>>, DoubleColonToken)>,
pub prefix: PathExprSegment,
pub suffix: Vec<(DoubleColonToken, PathExprSegment)>,
// path expression with incomplete suffix are needed to do
// parser recovery on inputs like foo::
#[serde(skip_serializing)]
pub incomplete_suffix: bool,
}
#[derive(Clone, Debug, Serialize)]
pub struct PathExprSegment {
pub name: Ident,
pub generics_opt: Option<(DoubleColonToken, GenericArgs)>,
}
impl PathExpr {
pub fn last_segment(&self) -> &PathExprSegment {
self.suffix
.iter()
.map(|s| &s.1)
.next_back()
.unwrap_or(&self.prefix)
}
pub fn last_segment_mut(&mut self) -> &mut PathExprSegment {
self.suffix
.iter_mut()
.map(|s| &mut s.1)
.next_back()
.unwrap_or(&mut self.prefix)
}
}
impl Spanned for PathExpr {
fn span(&self) -> Span {
let start = match &self.root_opt {
Some((qualified_path_root_opt, double_colon_token)) => match qualified_path_root_opt {
Some(qualified_path_root) => qualified_path_root.span(),
None => double_colon_token.span(),
},
None => self.prefix.span(),
};
let end = match self.suffix.last() {
Some((_, path_expr_segment)) => path_expr_segment.span(),
None => self.prefix.span(),
};
Span::join(start, &end)
}
}
impl PathExpr {
#[allow(clippy::result_large_err)]
pub fn try_into_ident(self) -> Result<Ident, PathExpr> {
if self.root_opt.is_none()
&& self.suffix.is_empty()
&& self.prefix.generics_opt.is_none()
&& !self.incomplete_suffix
{
return Ok(self.prefix.name);
}
Err(self)
}
}
impl Spanned for PathExprSegment {
fn span(&self) -> Span {
let start = self.name.span();
match &self.generics_opt {
Some((_, generic_args)) => Span::join(start, &generic_args.span()),
None => start,
}
}
}
#[derive(Clone, Debug, Serialize)]
pub struct PathType {
pub root_opt: Option<(Option<AngleBrackets<QualifiedPathRoot>>, DoubleColonToken)>,
pub prefix: PathTypeSegment,
pub suffix: Vec<(DoubleColonToken, PathTypeSegment)>,
}
impl PathType {
pub fn last_segment(&self) -> &PathTypeSegment {
self.suffix
.iter()
.map(|s| &s.1)
.next_back()
.unwrap_or(&self.prefix)
}
pub fn last_segment_mut(&mut self) -> &mut PathTypeSegment {
self.suffix
.iter_mut()
.map(|s| &mut s.1)
.next_back()
.unwrap_or(&mut self.prefix)
}
}
impl Spanned for PathType {
fn span(&self) -> Span {
let start = match &self.root_opt {
Some((qualified_path_root_opt, double_colon_token)) => match qualified_path_root_opt {
Some(qualified_path_root) => qualified_path_root.span(),
None => double_colon_token.span(),
},
None => self.prefix.span(),
};
let end = match self.suffix.last() {
Some((_, path_type_segment)) => path_type_segment.span(),
None => self.prefix.span(),
};
Span::join(start, &end)
}
}
#[derive(Clone, Debug, Serialize)]
pub struct PathTypeSegment {
pub name: Ident,
pub generics_opt: Option<(Option<DoubleColonToken>, GenericArgs)>,
}
impl Spanned for PathTypeSegment {
fn span(&self) -> Span {
let start = self.name.span();
match &self.generics_opt {
Some((_, generic_args)) => Span::join(start, &generic_args.span()),
None => start,
}
}
}
#[derive(Clone, Debug, Serialize)]
pub struct QualifiedPathRoot {
pub ty: Box<Ty>,
pub as_trait: (AsToken, Box<PathType>),
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-ast/src/lib.rs | sway-ast/src/lib.rs | pub mod assignable;
pub mod attribute;
pub mod brackets;
pub mod expr;
pub mod generics;
pub mod intrinsics;
mod item;
pub mod keywords;
pub mod literal;
pub mod module;
pub mod path;
pub mod pattern;
mod priv_prelude;
pub mod punctuated;
pub mod statement;
pub mod submodule;
pub mod token;
pub mod ty;
pub mod where_clause;
pub use crate::{
assignable::Assignable,
attribute::AttributeDecl,
brackets::{AngleBrackets, Braces, Parens},
expr::{
asm::{AsmBlock, AsmRegisterDeclaration},
op_code::Instruction,
AbiCastArgs, CodeBlockContents, Expr, ExprArrayDescriptor, ExprStructField,
ExprTupleDescriptor, IfCondition, IfExpr, MatchBranch, MatchBranchKind,
},
generics::{GenericArgs, GenericParams},
intrinsics::*,
item::{
item_abi::ItemAbi,
item_configurable::{ConfigurableField, ItemConfigurable},
item_const::ItemConst,
item_enum::ItemEnum,
item_fn::ItemFn,
item_impl::{ImplItemParent, ItemImpl, ItemImplItem},
item_storage::{ItemStorage, StorageEntry, StorageField},
item_struct::ItemStruct,
item_trait::{ItemTrait, ItemTraitItem, Traits},
item_type_alias::ItemTypeAlias,
item_use::{ItemUse, UseTree},
FnArg, FnArgs, FnSignature, Item, ItemKind, TraitType, TypeField,
},
keywords::{CommaToken, DoubleColonToken, PubToken},
literal::{LitInt, LitIntType, Literal},
module::{Module, ModuleKind},
path::{PathExpr, PathExprSegment, PathType, PathTypeSegment, QualifiedPathRoot},
pattern::{Pattern, PatternStructField},
punctuated::Punctuated,
statement::{Statement, StatementLet},
submodule::Submodule,
ty::Ty,
where_clause::{WhereBound, WhereClause},
};
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-ast/src/pattern.rs | sway-ast/src/pattern.rs | use sway_error::handler::ErrorEmitted;
use crate::priv_prelude::*;
#[derive(Clone, Debug, Serialize)]
pub enum Pattern {
Or {
lhs: Box<Pattern>,
pipe_token: PipeToken,
rhs: Box<Pattern>,
},
Wildcard {
underscore_token: UnderscoreToken,
},
/// A pattern made of a single ident, which could either be a variable or an enum variant
AmbiguousSingleIdent(Ident),
Var {
reference: Option<RefToken>, // TODO: (REFERENCES) Implement `ref`, `mut`, and `ref mut` when implementing matching of references.
mutable: Option<MutToken>,
name: Ident,
},
Literal(Literal),
Constant(PathExpr),
Constructor {
path: PathExpr,
args: Parens<Punctuated<Pattern, CommaToken>>,
},
Struct {
path: PathExpr,
fields: Braces<Punctuated<PatternStructField, CommaToken>>,
},
Tuple(Parens<Punctuated<Pattern, CommaToken>>),
// to handle parser recovery: Error represents an incomplete Constructor
Error(Box<[Span]>, #[serde(skip_serializing)] ErrorEmitted),
}
impl Spanned for Pattern {
fn span(&self) -> Span {
match self {
Pattern::Or {
lhs,
pipe_token,
rhs,
} => Span::join(Span::join(lhs.span(), &pipe_token.span()), &rhs.span()),
Pattern::Wildcard { underscore_token } => underscore_token.span(),
Pattern::Var {
reference,
mutable,
name,
} => match (reference, mutable) {
(Some(ref_token), Some(mut_token)) => Span::join(
Span::join(ref_token.span(), &mut_token.span()),
&name.span(),
),
(Some(ref_token), None) => Span::join(ref_token.span(), &name.span()),
(None, Some(mut_token)) => Span::join(mut_token.span(), &name.span()),
(None, None) => name.span(),
},
Pattern::AmbiguousSingleIdent(ident) => ident.span(),
Pattern::Literal(literal) => literal.span(),
Pattern::Constant(path_expr) => path_expr.span(),
Pattern::Constructor { path, args } => Span::join(path.span(), &args.span()),
Pattern::Struct { path, fields } => Span::join(path.span(), &fields.span()),
Pattern::Tuple(pat_tuple) => pat_tuple.span(),
Pattern::Error(spans, _) => spans
.iter()
.cloned()
.reduce(|s1: Span, s2: Span| Span::join(s1, &s2))
.unwrap(),
}
}
}
#[derive(Clone, Debug, Serialize)]
pub enum PatternStructField {
Rest {
token: DoubleDotToken,
},
Field {
field_name: Ident,
pattern_opt: Option<(ColonToken, Box<Pattern>)>,
},
}
impl Spanned for PatternStructField {
fn span(&self) -> Span {
use PatternStructField::*;
match &self {
Rest { token } => token.span(),
Field {
field_name,
pattern_opt,
} => match pattern_opt {
Some((_colon_token, pattern)) => Span::join(field_name.span(), &pattern.span()),
None => field_name.span(),
},
}
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-ast/src/where_clause.rs | sway-ast/src/where_clause.rs | use crate::priv_prelude::*;
#[derive(Clone, Debug, Serialize)]
pub struct WhereClause {
pub where_token: WhereToken,
pub bounds: Punctuated<WhereBound, CommaToken>,
}
#[derive(Clone, Debug, Serialize)]
pub struct WhereBound {
pub ty_name: Ident,
pub colon_token: ColonToken,
pub bounds: Traits,
}
impl Spanned for WhereClause {
fn span(&self) -> Span {
let where_token_span = self.where_token.span();
match &self.bounds.final_value_opt {
Some(where_bound) => Span::join(where_token_span, &where_bound.span()),
None => match self.bounds.value_separator_pairs.last() {
Some((_, comma_token)) => Span::join(where_token_span, &comma_token.span()),
None => where_token_span,
},
}
}
}
impl Spanned for WhereBound {
fn span(&self) -> Span {
Span::join(self.ty_name.span(), &self.bounds.span())
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-ast/src/module.rs | sway-ast/src/module.rs | use sway_types::SourceId;
use crate::priv_prelude::*;
#[derive(Clone, Debug, Serialize)]
pub struct Module {
pub kind: ModuleKind,
pub semicolon_token: SemicolonToken,
pub items: Vec<Item>,
}
impl Module {
pub fn submodules(&self) -> impl Iterator<Item = &Submodule> {
self.items.iter().filter_map(|i| {
if let ItemKind::Submodule(submod) = &i.value {
Some(submod)
} else {
None
}
})
}
pub fn source_id(&self) -> Option<SourceId> {
self.kind.span().source_id().copied()
}
}
impl Spanned for Module {
fn span(&self) -> Span {
let start = self.kind.span();
let end = if let Some(item) = self.items.last() {
item.span()
} else {
self.semicolon_token.span()
};
Span::join(start, &end)
}
}
#[derive(Clone, Debug, Serialize)]
pub enum ModuleKind {
Script { script_token: ScriptToken },
Contract { contract_token: ContractToken },
Predicate { predicate_token: PredicateToken },
Library { library_token: LibraryToken },
}
impl ModuleKind {
/// [ModuleKind]'s friendly name string used for various reportings.
pub fn friendly_name(&self) -> &'static str {
use ModuleKind::*;
match self {
Script { .. } => "module kind (script)",
Contract { .. } => "module kind (contract)",
Predicate { .. } => "module kind (predicate)",
Library { .. } => "module kind (library)",
}
}
}
impl Spanned for ModuleKind {
fn span(&self) -> Span {
match self {
Self::Script { script_token } => script_token.span(),
Self::Contract { contract_token } => contract_token.span(),
Self::Predicate { predicate_token } => predicate_token.span(),
Self::Library { library_token } => library_token.span(),
}
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-ast/src/priv_prelude.rs | sway-ast/src/priv_prelude.rs | pub use {
crate::{
assignable::Assignable,
attribute::Annotated,
brackets::{AngleBrackets, Braces, Parens, SquareBrackets},
expr::{
asm::{AsmBlock, AsmImmediate},
op_code::Instruction,
CodeBlockContents, Expr,
},
generics::{GenericArgs, GenericParams},
item::{
item_abi::ItemAbi,
item_configurable::ItemConfigurable,
item_const::ItemConst,
item_enum::ItemEnum,
item_fn::ItemFn,
item_impl::ItemImpl,
item_storage::ItemStorage,
item_struct::ItemStruct,
item_trait::{ItemTrait, Traits},
item_type_alias::ItemTypeAlias,
item_use::ItemUse,
FnSignature, Item, ItemKind, TraitType, TypeField,
},
keywords::*,
literal::Literal,
path::{PathExpr, PathType},
pattern::Pattern,
punctuated::Punctuated,
statement::Statement,
submodule::Submodule,
ty::Ty,
where_clause::WhereClause,
},
extension_trait::extension_trait,
num_bigint::BigUint,
serde::{Deserialize, Serialize},
sway_types::{
ast::{Delimiter, PunctKind},
Ident, Span, Spanned,
},
};
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.