repo stringlengths 6 65 | file_url stringlengths 81 311 | file_path stringlengths 6 227 | content stringlengths 0 32.8k | language stringclasses 1
value | license stringclasses 7
values | commit_sha stringlengths 40 40 | retrieved_at stringdate 2026-01-04 15:31:58 2026-01-04 20:25:31 | truncated bool 2
classes |
|---|---|---|---|---|---|---|---|---|
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-lsp/src/capabilities/code_actions/diagnostic/qualify.rs | sway-lsp/src/capabilities/code_actions/diagnostic/qualify.rs | use super::auto_import::get_call_paths_for_name;
use crate::capabilities::{
code_actions::{CodeActionContext, CODE_ACTION_QUALIFY_TITLE},
diagnostic::DiagnosticData,
};
use lsp_types::{
CodeAction as LspCodeAction, CodeActionKind, CodeActionOrCommand, Range, TextEdit,
WorkspaceEdit,
};
use serde_json::Value;
use std::collections::HashMap;
/// Returns a list of [CodeActionOrCommand] suggestions for qualifying an unknown symbol with a path.
pub(crate) fn qualify_code_action(
ctx: &CodeActionContext,
diagnostics: &mut impl Iterator<Item = (Range, DiagnosticData)>,
) -> Option<Vec<CodeActionOrCommand>> {
// Find a diagnostic that has the attached metadata indicating we should try to qualify the path.
let (symbol_name, range) = diagnostics.find_map(|(range, diag)| {
let name = diag.unknown_symbol_name?;
Some((name, range))
})?;
// Check if there are any matching call paths to import using the name from the diagnostic data.
let call_paths = get_call_paths_for_name(ctx, &symbol_name)?;
// Create a list of code actions, one for each potential call path.
let actions = call_paths
.map(|call_path| {
let text_edit = TextEdit {
range,
new_text: format!("{call_path}"),
};
let changes = HashMap::from([(ctx.uri.clone(), vec![text_edit])]);
CodeActionOrCommand::CodeAction(LspCodeAction {
title: format!("{CODE_ACTION_QUALIFY_TITLE} `{call_path}`"),
kind: Some(CodeActionKind::QUICKFIX),
edit: Some(WorkspaceEdit {
changes: Some(changes),
..Default::default()
}),
data: Some(Value::String(ctx.uri.to_string())),
..Default::default()
})
})
.collect::<Vec<_>>();
if !actions.is_empty() {
return Some(actions);
}
None
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-lsp/src/capabilities/code_actions/diagnostic/mod.rs | sway-lsp/src/capabilities/code_actions/diagnostic/mod.rs | mod auto_import;
mod qualify;
use crate::capabilities::{code_actions::CodeActionContext, diagnostic::DiagnosticData};
use lsp_types::CodeActionOrCommand;
use self::auto_import::import_code_action;
use self::qualify::qualify_code_action;
/// Returns a list of [CodeActionOrCommand] based on the relevant compiler diagnostics.
pub(crate) fn code_actions(ctx: &CodeActionContext) -> Option<Vec<CodeActionOrCommand>> {
// Find diagnostics that have attached metadata.
let diagnostics_with_data = ctx.diagnostics.iter().filter_map(|diag| {
if let Some(data) = diag.clone().data {
if let Ok(data) = serde_json::from_value::<DiagnosticData>(data) {
return Some((diag.range, data));
}
}
None
});
import_code_action(ctx, &mut diagnostics_with_data.clone())
.into_iter()
.chain(qualify_code_action(ctx, &mut diagnostics_with_data.clone()))
.reduce(|mut combined, mut curr| {
combined.append(&mut curr);
combined
})
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-lsp/src/capabilities/code_actions/diagnostic/auto_import.rs | sway-lsp/src/capabilities/code_actions/diagnostic/auto_import.rs | use crate::{
capabilities::{
code_actions::{CodeActionContext, CODE_ACTION_IMPORT_TITLE},
diagnostic::DiagnosticData,
},
core::token::{get_range_from_span, ParsedAstToken, SymbolKind, TypedAstToken},
};
use lsp_types::{
CodeAction as LspCodeAction, CodeActionKind, CodeActionOrCommand, Position, Range, TextEdit,
WorkspaceEdit,
};
use serde_json::Value;
use std::{
cmp::Ordering,
collections::{BTreeSet, HashMap},
iter,
};
use sway_core::language::{
parsed::ImportType,
ty::{
TyConstantDecl, TyDecl, TyFunctionDecl, TyIncludeStatement, TyTypeAliasDecl, TyUseStatement,
},
CallPath,
};
use sway_types::{Ident, Spanned};
/// Returns a list of [CodeActionOrCommand] suggestions for inserting a missing import.
pub(crate) fn import_code_action(
ctx: &CodeActionContext,
diagnostics: &mut impl Iterator<Item = (Range, DiagnosticData)>,
) -> Option<Vec<CodeActionOrCommand>> {
// Find a diagnostic that has the attached metadata indicating we should try to suggest an auto-import.
let symbol_name = diagnostics.find_map(|(_, diag)| diag.unknown_symbol_name)?;
// Check if there are any matching call paths to import using the name from the diagnostic data.
let call_paths = get_call_paths_for_name(ctx, &symbol_name)?;
// Collect the tokens we need to determine where to insert the import statement.
let mut use_statements = Vec::<TyUseStatement>::new();
let mut include_statements = Vec::<TyIncludeStatement>::new();
let mut program_type_keyword = None;
ctx.tokens.tokens_for_file(ctx.temp_uri).for_each(|item| {
if let Some(TypedAstToken::TypedUseStatement(use_stmt)) = &item.value().as_typed() {
use_statements.push(use_stmt.clone());
} else if let Some(TypedAstToken::TypedIncludeStatement(include_stmt)) =
&item.value().as_typed()
{
include_statements.push(include_stmt.clone());
} else if item.value().kind == SymbolKind::ProgramTypeKeyword {
if let Some(ParsedAstToken::Keyword(ident)) = &item.value().as_parsed() {
program_type_keyword = Some(ident.clone());
}
}
});
// Create a list of code actions, one for each potential call path.
let actions = call_paths
.map(|call_path| {
let text_edit = get_text_edit(
&call_path,
&use_statements,
&include_statements,
&program_type_keyword,
);
let changes = HashMap::from([(ctx.uri.clone(), vec![text_edit])]);
CodeActionOrCommand::CodeAction(LspCodeAction {
title: format!("{CODE_ACTION_IMPORT_TITLE} `{call_path}`"),
kind: Some(CodeActionKind::QUICKFIX),
edit: Some(WorkspaceEdit {
changes: Some(changes),
..Default::default()
}),
data: Some(Value::String(ctx.uri.to_string())),
..Default::default()
})
})
.collect::<Vec<_>>();
if !actions.is_empty() {
return Some(actions);
}
None
}
/// Returns an [Iterator] of [CallPath]s that match the given symbol name. The [CallPath]s are sorted
/// alphabetically.
pub(crate) fn get_call_paths_for_name<'s>(
ctx: &'s CodeActionContext,
symbol_name: &'s String,
) -> Option<impl 's + Iterator<Item = CallPath>> {
let mut call_paths = ctx
.tokens
.tokens_for_name(symbol_name)
.filter_map(move |item| {
// If the typed token is a declaration, then we can import it.
match item.value().as_typed().as_ref() {
Some(TypedAstToken::TypedDeclaration(ty_decl)) => match ty_decl {
TyDecl::StructDecl(decl) => {
let struct_decl = ctx.engines.de().get_struct(&decl.decl_id);
let call_path = struct_decl
.call_path
.to_import_path(ctx.engines, ctx.namespace);
Some(call_path)
}
TyDecl::EnumDecl(decl) => {
let enum_decl = ctx.engines.de().get_enum(&decl.decl_id);
let call_path = enum_decl
.call_path
.to_import_path(ctx.engines, ctx.namespace);
Some(call_path)
}
TyDecl::TraitDecl(decl) => {
let trait_decl = ctx.engines.de().get_trait(&decl.decl_id);
let call_path = trait_decl
.call_path
.to_import_path(ctx.engines, ctx.namespace);
Some(call_path)
}
TyDecl::FunctionDecl(decl) => {
let function_decl = ctx.engines.de().get_function(&decl.decl_id);
let call_path = function_decl
.call_path
.to_import_path(ctx.engines, ctx.namespace);
Some(call_path)
}
TyDecl::ConstantDecl(decl) => {
let constant_decl = ctx.engines.de().get_constant(&decl.decl_id);
let call_path = constant_decl
.call_path
.to_import_path(ctx.engines, ctx.namespace);
Some(call_path)
}
TyDecl::TypeAliasDecl(decl) => {
let type_alias_decl = ctx.engines.de().get_type_alias(&decl.decl_id);
let call_path = type_alias_decl
.call_path
.to_import_path(ctx.engines, ctx.namespace);
Some(call_path)
}
_ => None,
},
Some(TypedAstToken::TypedFunctionDeclaration(TyFunctionDecl {
call_path, ..
})) => {
let call_path = call_path.to_import_path(ctx.engines, ctx.namespace);
Some(call_path)
}
Some(TypedAstToken::TypedConstantDeclaration(TyConstantDecl {
call_path, ..
}))
| Some(TypedAstToken::TypedTypeAliasDeclaration(TyTypeAliasDecl {
call_path,
..
})) => {
let call_path = call_path.to_import_path(ctx.engines, ctx.namespace);
Some(call_path)
}
_ => None,
}
})
.collect::<Vec<_>>();
call_paths.sort();
Some(call_paths.into_iter())
}
/// Returns a [TextEdit] to insert an import statement for the given [CallPath] in the appropriate location in the file.
///
/// To determine where to insert the import statement in the file, we try these options and do
/// one of the following, based on the contents of the file.
///
/// 1. Add the import to an existing import that has the same prefix.
/// 2. Insert the import on a new line relative to existing use statements.
/// 3. Insert the import on a new line after existing mod statements.
/// 4. Insert the import on a new line after the program type statement (e.g. `contract;`)
/// 5. If all else fails, insert it at the beginning of the file.
fn get_text_edit(
call_path: &CallPath,
use_statements: &[TyUseStatement],
include_statements: &[TyIncludeStatement],
program_type_keyword: &Option<Ident>,
) -> TextEdit {
get_text_edit_for_group(call_path, use_statements)
.or_else(|| get_text_edit_in_use_block(call_path, use_statements))
.unwrap_or(get_text_edit_fallback(
call_path,
include_statements,
program_type_keyword,
))
}
/// Returns a [TextEdit] that inserts the call path into the existing statement if there is an
/// existing [TyUseStatement] with the same prefix as the given [CallPath]. Otherwise, returns [None].
fn get_text_edit_for_group(
call_path: &CallPath,
use_statements: &[TyUseStatement],
) -> Option<TextEdit> {
let group_statements = use_statements.iter().filter(|use_stmt| {
call_path
.prefixes
.iter()
.zip(use_stmt.call_path.iter())
.all(|(prefix, stmt_prefix)| prefix.as_str() == stmt_prefix.as_str())
});
let mut group_statement_span = None;
let mut suffixes = group_statements
.filter_map(|stmt| {
// Set the group statement span if it hasn't been set yet. If it has been set, filter out
// any statements that aren't part of the same import group.
if group_statement_span.is_none() {
group_statement_span = Some(stmt.span());
} else if group_statement_span != Some(stmt.span()) {
return None;
}
let name = match &stmt.import_type {
ImportType::Star => "*".to_string(),
ImportType::SelfImport(_) => "self".to_string(),
ImportType::Item(ident) => ident.to_string(),
};
match &stmt.alias {
Some(alias) => Some(format!("{name} as {alias}")),
None => Some(name),
}
})
.chain(iter::once(call_path.suffix.to_string()))
.collect::<BTreeSet<_>>()
.into_iter()
.collect::<Vec<_>>();
// If there were no imports with the same prefix, return None. Otherwise, build the text edit response.
group_statement_span.map(|span| {
suffixes.sort();
let suffix_string = suffixes.join(", ");
let prefix_string = call_path
.prefixes
.iter()
.map(sway_types::BaseIdent::as_str)
.collect::<Vec<_>>()
.join("::");
TextEdit {
range: get_range_from_span(&span.clone()),
new_text: format!("use {prefix_string}::{{{suffix_string}}};"),
}
})
}
/// If there are existing [TyUseStatement]s, returns a [TextEdit] to insert the new import statement on the
/// line above or below an existing statement, ordered alphabetically.
fn get_text_edit_in_use_block(
call_path: &CallPath,
use_statements: &[TyUseStatement],
) -> Option<TextEdit> {
let after_statement = use_statements.iter().reduce(|acc, curr| {
if call_path.span().as_str().cmp(curr.span().as_str()) == Ordering::Greater
&& curr.span().as_str().cmp(acc.span().as_str()) == Ordering::Greater
{
return curr;
}
acc
})?;
let after_range = get_range_from_span(&after_statement.span());
let range_line = if call_path
.span()
.as_str()
.cmp(after_statement.span().as_str())
== Ordering::Greater
{
after_range.end.line + 1
} else {
after_range.start.line
};
Some(TextEdit {
range: Range::new(Position::new(range_line, 0), Position::new(range_line, 0)),
new_text: format!("use {call_path};\n"),
})
}
/// Returns a [TextEdit] to insert an import statement either after the last mod statement, after the program
/// type statement, or at the beginning of the file.
fn get_text_edit_fallback(
call_path: &CallPath,
include_statements: &[TyIncludeStatement],
program_type_keyword: &Option<Ident>,
) -> TextEdit {
let range_line = include_statements
.iter()
.map(|stmt| stmt.span())
.reduce(|acc, span| {
if span > acc {
return span;
}
acc
})
.map(|span| get_range_from_span(&span).end.line + 1)
.unwrap_or(
program_type_keyword
.clone()
.map(|keyword| get_range_from_span(&keyword.span()).end.line + 1)
.unwrap_or(1),
);
TextEdit {
range: Range::new(Position::new(range_line, 0), Position::new(range_line, 0)),
new_text: format!("\nuse {call_path};\n"),
}
}
#[cfg(test)]
mod tests {
use sway_core::language::Visibility;
use sway_types::{span::Source, Span};
use super::*;
fn assert_text_edit(text_edit: TextEdit, expected_range: Range, expected_text: String) {
assert_eq!(text_edit.range, expected_range);
assert_eq!(text_edit.new_text, expected_text);
}
fn get_mock_call_path(prefixes: Vec<&str>, suffix: &str) -> CallPath {
CallPath {
prefixes: get_mock_prefixes(prefixes),
suffix: Ident::new_no_span(suffix.to_string()),
callpath_type: sway_core::language::CallPathType::Full,
}
}
fn get_mock_prefixes(prefixes: Vec<&str>) -> Vec<Ident> {
prefixes
.into_iter()
.map(|p| Ident::new(Span::from_string(p.into())))
.collect()
}
fn get_prefixes_from_src(src: &Source, prefixes: Vec<&str>) -> Vec<Ident> {
prefixes
.into_iter()
.filter_map(|p| get_ident_from_src(src, p))
.collect()
}
fn get_span_from_src(src: &Source, text: &str) -> Option<Span> {
let start = src.text.find(text)?;
let end = start + text.len();
Span::new(src.clone(), start, end, None)
}
fn get_ident_from_src(src: &Source, name: &str) -> Option<Ident> {
let span = get_span_from_src(src, name)?;
Some(Ident::new(span))
}
fn get_use_stmt_from_src(
src: &Source,
prefixes: Vec<&str>,
import_type: ImportType,
text: &str,
) -> TyUseStatement {
TyUseStatement {
call_path: get_prefixes_from_src(src, prefixes),
span: get_span_from_src(src, text).unwrap(),
import_type,
is_relative_to_package_root: false,
alias: None,
}
}
fn get_incl_stmt_from_src(src: &Source, mod_name: &str, text: &str) -> TyIncludeStatement {
TyIncludeStatement {
span: get_span_from_src(src, text).unwrap(),
mod_name: get_ident_from_src(src, mod_name).unwrap(),
visibility: Visibility::Private,
}
}
#[test]
fn get_text_edit_existing_import() {
let src = Source::new(
r#"contract;
use a:b:C;
use b:c:*;
"#,
);
let new_call_path = get_mock_call_path(vec!["a", "b"], "D");
let use_statements = vec![
get_use_stmt_from_src(
&src,
Vec::from(["a", "b"]),
ImportType::Item(get_ident_from_src(&src, "C").unwrap()),
"use a:b:C;",
),
get_use_stmt_from_src(&src, Vec::from(["b", "c"]), ImportType::Star, "use b:c:*;"),
];
let include_statements = vec![];
let program_type_keyword = get_ident_from_src(&src, "contract");
let expected_range = Range::new(Position::new(2, 0), Position::new(2, 10));
let expected_text = "use a::b::{C, D};".into();
let text_edit = get_text_edit(
&new_call_path,
&use_statements,
&include_statements,
&program_type_keyword,
);
assert_text_edit(text_edit, expected_range, expected_text);
}
#[test]
fn get_text_edit_new_import() {
let src = Source::new(
r#"predicate;
use b:c:*;
"#,
);
let new_call_path = get_mock_call_path(vec!["a", "b"], "C");
let use_statements = vec![get_use_stmt_from_src(
&src,
Vec::from(["b", "c"]),
ImportType::Star,
"use b:c:*;",
)];
let include_statements = vec![];
let program_type_keyword = get_ident_from_src(&src, "predicate");
let expected_range = Range::new(Position::new(2, 0), Position::new(2, 0));
let expected_text = "use a::b::C;\n".into();
let text_edit = get_text_edit(
&new_call_path,
&use_statements,
&include_statements,
&program_type_keyword,
);
assert_text_edit(text_edit, expected_range, expected_text);
}
#[test]
fn get_text_edit_existing_group_import() {
let src = Source::new(
r#"contract;
use b:c:{D, F};
"#,
);
let new_call_path = get_mock_call_path(vec!["b", "c"], "E");
let use_statements = vec![
get_use_stmt_from_src(
&src,
Vec::from(["b", "c"]),
ImportType::Item(get_ident_from_src(&src, "D").unwrap()),
"use b:c:{D, F};",
),
get_use_stmt_from_src(
&src,
Vec::from(["b", "c"]),
ImportType::Item(get_ident_from_src(&src, "F").unwrap()),
"use b:c:{D, F};",
),
];
let include_statements = vec![];
let program_type_keyword = get_ident_from_src(&src, "contract");
let expected_range = Range::new(Position::new(2, 0), Position::new(2, 15));
let expected_text = "use b::c::{D, E, F};".into();
let text_edit = get_text_edit(
&new_call_path,
&use_statements,
&include_statements,
&program_type_keyword,
);
assert_text_edit(text_edit, expected_range, expected_text);
}
#[test]
fn get_text_edit_after_mod() {
let src = Source::new(
r#"library;
mod my_module;
pub mod zz_module;
"#,
);
let new_call_path = get_mock_call_path(vec!["b", "c"], "D");
let use_statements = vec![];
let include_statements = vec![
get_incl_stmt_from_src(&src, "my_module", "mod my_module;"),
get_incl_stmt_from_src(&src, "zz_module", "pub mod zz_module"),
];
let program_type_keyword = get_ident_from_src(&src, "library");
let expected_range = Range::new(Position::new(4, 0), Position::new(4, 0));
let expected_text = "\nuse b::c::D;\n".into();
let text_edit = get_text_edit(
&new_call_path,
&use_statements,
&include_statements,
&program_type_keyword,
);
assert_text_edit(text_edit, expected_range, expected_text);
}
#[test]
fn get_text_edit_after_program() {
let src = Source::new(
r#"script;
const HI: u8 = 0;
"#,
);
let new_call_path = get_mock_call_path(vec!["b", "c"], "D");
let use_statements = vec![];
let include_statements = vec![];
let program_type_keyword = get_ident_from_src(&src, "script");
let expected_range = Range::new(Position::new(1, 0), Position::new(1, 0));
let expected_text = "\nuse b::c::D;\n".into();
let text_edit = get_text_edit(
&new_call_path,
&use_statements,
&include_statements,
&program_type_keyword,
);
assert_text_edit(text_edit, expected_range, expected_text);
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-lsp/src/capabilities/code_actions/common/generate_doc.rs | sway-lsp/src/capabilities/code_actions/common/generate_doc.rs | use crate::capabilities::code_actions::CodeAction;
use sway_core::{Engines, TypeId};
use sway_types::Spanned;
pub(crate) trait GenerateDocCodeAction<'a, T: Spanned>: CodeAction<'a, T> {
/// Returns a placeholder description as a vector of strings.
fn description_section(&self) -> Vec<String> {
vec!["Add a brief description.".to_string()]
}
/// Returns a placeholder information section as a vector of strings.
fn info_section(&self) -> Vec<String> {
vec![
String::new(),
"### Additional Information".to_string(),
String::new(),
"Provide information beyond the core purpose or functionality.".to_string(),
]
}
fn default_template(&self) -> String {
let lines: Vec<String> = vec![self.description_section(), self.info_section()]
.into_iter()
.flatten()
.collect();
self.format_lines(lines)
}
/// Formats a vector of lines into a doc comment [String].
fn format_lines(&self, lines: Vec<String>) -> String {
lines.iter().fold(String::new(), |output, line| {
format!("{}{}/// {}\n", output, self.indentation(), line)
})
}
/// Formats a list item with a name and type into a [String].
fn formatted_list_item(
&self,
engines: &'a Engines,
name: Option<String>,
type_id: TypeId,
) -> String {
let name_string = match name {
Some(name) => format!("`{name}`: "),
None => String::new(),
};
let type_string = match engines.te().get(type_id).is_unit() {
true => "()".to_string(),
false => format!("[{}]", engines.help_out(type_id)),
};
format!("* {name_string}{type_string} - Add description here",)
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-lsp/src/capabilities/code_actions/common/basic_doc_comment.rs | sway-lsp/src/capabilities/code_actions/common/basic_doc_comment.rs | use crate::capabilities::code_actions::{CodeAction, CodeActionContext, CODE_ACTION_DOC_TITLE};
use lsp_types::{Range, Url};
use sway_types::Spanned;
use super::generate_doc::GenerateDocCodeAction;
pub struct BasicDocCommentCodeAction<'a, T: Spanned> {
decl: &'a T,
uri: &'a Url,
}
impl<'a, T: Spanned> GenerateDocCodeAction<'a, T> for BasicDocCommentCodeAction<'a, T> {}
impl<'a, T: Spanned> CodeAction<'a, T> for BasicDocCommentCodeAction<'a, T> {
fn new(ctx: &CodeActionContext<'a>, decl: &'a T) -> Self {
Self { decl, uri: ctx.uri }
}
fn new_text(&self) -> String {
self.default_template()
}
fn range(&self) -> Range {
self.range_before()
}
fn title(&self) -> String {
CODE_ACTION_DOC_TITLE.to_string()
}
fn decl(&self) -> &T {
self.decl
}
fn uri(&self) -> &Url {
self.uri
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-lsp/src/capabilities/code_actions/common/mod.rs | sway-lsp/src/capabilities/code_actions/common/mod.rs | pub mod basic_doc_comment;
pub mod fn_doc_comment;
pub mod generate_doc;
pub mod generate_impl;
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-lsp/src/capabilities/code_actions/common/fn_doc_comment.rs | sway-lsp/src/capabilities/code_actions/common/fn_doc_comment.rs | use crate::capabilities::code_actions::{CodeAction, CodeActionContext, CODE_ACTION_DOC_TITLE};
use lsp_types::{Range, Url};
use sway_core::{language::ty::FunctionSignature, Engines};
use sway_types::{Named, Spanned};
use super::generate_doc::GenerateDocCodeAction;
pub struct FnDocCommentCodeAction<'a, T: Spanned + Named + FunctionSignature> {
engines: &'a Engines,
decl: &'a T,
uri: &'a Url,
}
impl<'a, T: Spanned + Named + FunctionSignature> GenerateDocCodeAction<'a, T>
for FnDocCommentCodeAction<'a, T>
{
}
impl<'a, T: Spanned + Named + FunctionSignature> CodeAction<'a, T>
for FnDocCommentCodeAction<'a, T>
{
fn new(ctx: &CodeActionContext<'a>, decl: &'a T) -> Self {
Self {
engines: ctx.engines,
decl,
uri: ctx.uri,
}
}
fn new_text(&self) -> String {
let lines: Vec<String> = vec![
self.description_section(),
self.info_section(),
self.arguments_section(),
self.returns_section(),
self.reverts_section(),
self.storage_access_section(),
self.examples_section(),
]
.into_iter()
.flatten()
.collect();
self.format_lines(lines)
}
fn range(&self) -> Range {
self.range_before()
}
fn title(&self) -> String {
CODE_ACTION_DOC_TITLE.to_string()
}
fn decl(&self) -> &T {
self.decl
}
fn uri(&self) -> &Url {
self.uri
}
}
impl<T: Spanned + Named + FunctionSignature> FnDocCommentCodeAction<'_, T> {
/// Formats the return value of the function into a vector of strings.
fn reverts_section(&self) -> Vec<String> {
vec![
String::new(),
"### Reverts".to_string(),
String::new(),
"* List any cases where the function will revert".to_string(),
]
}
/// Formats the return value of the function into a vector of strings.
fn storage_access_section(&self) -> Vec<String> {
vec![
String::new(),
"### Number of Storage Accesses".to_string(),
String::new(),
"* Reads: `0`".to_string(),
"* Writes: `0`".to_string(),
"* Clears: `0`".to_string(),
]
}
/// Formats the arguments of the function into a vector of strings.
fn arguments_section(&self) -> Vec<String> {
if self.decl.parameters().is_empty() {
return vec![];
}
let mut lines = vec![String::new(), "### Arguments".to_string(), String::new()];
self.decl.parameters().iter().for_each(|param| {
lines.push(self.formatted_list_item(
self.engines,
Some(param.name.to_string()),
param.type_argument.type_id,
));
});
lines
}
/// Formats the return value of the function into a vector of strings.
fn returns_section(&self) -> Vec<String> {
if self
.engines
.te()
.get(self.decl.return_type().type_id)
.is_unit()
{
return vec![];
}
vec![
String::new(),
"### Returns".to_string(),
String::new(),
self.formatted_list_item(self.engines, None, self.decl.return_type().type_id),
]
}
/// Generates examples of function usage and formats it into a vector of strings.
fn examples_section(&self) -> Vec<String> {
let example_args = self
.decl
.parameters()
.iter()
.map(|param| param.name.to_string())
.collect::<Vec<String>>()
.join(", ");
let example = format!("let x = {}({});", self.decl.name(), example_args);
vec![
String::new(),
"### Examples".to_string(),
String::new(),
"```sway".to_string(),
example,
"```".to_string(),
]
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-lsp/src/capabilities/code_actions/common/generate_impl.rs | sway-lsp/src/capabilities/code_actions/common/generate_impl.rs | use sway_core::{
transform::{AttributeKind, Attributes},
TypeParameter,
};
use sway_types::{Named, Spanned};
use crate::capabilities::code_actions::CodeAction;
pub(crate) const CONTRACT: &str = "Contract";
pub(crate) const TAB: &str = " ";
pub(crate) trait GenerateImplCodeAction<'a, T: Spanned>: CodeAction<'a, T> {
/// Returns a [String] holding the name of the declaration.
fn decl_name(&self) -> String;
/// Returns an optional [String] of the type parameters for the given [TypeParameter] vector.
fn type_param_string(&self, type_params: &[TypeParameter]) -> Option<String> {
if type_params.is_empty() {
None
} else {
Some(
type_params
.iter()
.map(|param| param.name().to_string())
.collect::<Vec<_>>()
.join(", "),
)
}
}
/// Returns a [String] of a generated impl with the optional `for <for_name>` signature.
/// Can be used for both ABI and Struct impls.
fn impl_string(
&self,
type_params: Option<String>,
body: String,
for_name: Option<String>,
) -> String {
let for_string = match for_name {
Some(name) => format!(" for {name}"),
None => String::new(),
};
let type_param_string = match type_params {
Some(params) => format!("<{params}>"),
None => String::new(),
};
format!(
"\nimpl{} {}{}{} {{{}}}\n",
type_param_string,
self.decl_name(),
type_param_string,
for_string,
body
)
}
/// Returns a [String] of `attributes`, optionally excluding doc comments.
fn attribute_string(&self, attributes: &Attributes, include_comments: bool) -> String {
let attr_string = attributes
.all()
.filter_map(|attr| match attr.kind {
AttributeKind::DocComment => {
if include_comments {
return Some(format!("{}{}", TAB, attr.span.as_str()));
}
None
}
_ => Some(format!("{}{}", TAB, attr.span.as_str())),
})
.collect::<Vec<_>>()
.join("\n");
let attribute_padding = if attr_string.len() > 1 { "\n" } else { "" };
format!("{attr_string}{attribute_padding}")
}
/// Returns a [String] of a generated function signature.
fn fn_signature_string(
&self,
fn_name: String,
params_string: String,
attributes: &Attributes,
return_type_string: String,
body: Option<String>,
) -> String {
let attribute_string = self.attribute_string(attributes, false);
let body_string = match body {
Some(body) => format!(" {body} "),
None => String::new(),
};
format!(
"{attribute_string}{TAB}fn {fn_name}({params_string}){return_type_string} {{{body_string}}}",
)
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-lsp/src/capabilities/code_actions/constant_decl/mod.rs | sway-lsp/src/capabilities/code_actions/constant_decl/mod.rs | use crate::capabilities::code_actions::{CodeAction, CodeActionContext};
use lsp_types::CodeActionOrCommand;
use sway_core::language::ty;
use super::common::basic_doc_comment::BasicDocCommentCodeAction;
pub(crate) fn code_actions(
decl: &ty::TyConstantDecl,
ctx: &CodeActionContext,
) -> Vec<CodeActionOrCommand> {
vec![BasicDocCommentCodeAction::new(ctx, decl).code_action()]
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-lsp/src/capabilities/code_actions/enum_decl/enum_impl.rs | sway-lsp/src/capabilities/code_actions/enum_decl/enum_impl.rs | use crate::capabilities::code_actions::{
common::generate_impl::{GenerateImplCodeAction, TAB},
CodeAction, CodeActionContext, CODE_ACTION_IMPL_TITLE,
};
use lsp_types::{Range, Url};
use sway_core::language::ty::TyEnumDecl;
pub(crate) struct EnumImplCodeAction<'a> {
decl: &'a TyEnumDecl,
uri: &'a Url,
}
impl<'a> GenerateImplCodeAction<'a, TyEnumDecl> for EnumImplCodeAction<'a> {
fn decl_name(&self) -> String {
self.decl.call_path.suffix.to_string()
}
}
impl<'a> CodeAction<'a, TyEnumDecl> for EnumImplCodeAction<'a> {
fn new(ctx: &CodeActionContext<'a>, decl: &'a TyEnumDecl) -> Self {
Self { decl, uri: ctx.uri }
}
fn new_text(&self) -> String {
self.impl_string(
self.type_param_string(&self.decl.generic_parameters),
format!("\n{TAB}\n"),
None,
)
}
fn title(&self) -> String {
format!("{} `{}`", CODE_ACTION_IMPL_TITLE, self.decl_name())
}
fn range(&self) -> Range {
self.range_after()
}
fn decl(&self) -> &TyEnumDecl {
self.decl
}
fn uri(&self) -> &Url {
self.uri
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-lsp/src/capabilities/code_actions/enum_decl/mod.rs | sway-lsp/src/capabilities/code_actions/enum_decl/mod.rs | pub(crate) mod enum_impl;
use self::enum_impl::EnumImplCodeAction;
use crate::capabilities::code_actions::{CodeAction, CodeActionContext};
use lsp_types::CodeActionOrCommand;
use sway_core::{decl_engine::id::DeclId, language::ty};
use super::common::basic_doc_comment::BasicDocCommentCodeAction;
pub(crate) fn code_actions(
decl_id: &DeclId<ty::TyEnumDecl>,
ctx: &CodeActionContext,
) -> Vec<CodeActionOrCommand> {
let decl = (*ctx.engines.de().get_enum(decl_id)).clone();
vec![
EnumImplCodeAction::new(ctx, &decl).code_action(),
BasicDocCommentCodeAction::new(ctx, &decl).code_action(),
]
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-lsp/src/capabilities/code_actions/abi_decl/abi_impl.rs | sway-lsp/src/capabilities/code_actions/abi_decl/abi_impl.rs | use crate::capabilities::code_actions::{
common::generate_impl::{GenerateImplCodeAction, CONTRACT},
CodeAction, CodeActionContext, CODE_ACTION_IMPL_TITLE,
};
use lsp_types::{Range, Url};
use sway_core::{
language::ty::{self, TyAbiDecl, TyFunctionParameter, TyTraitFn},
Engines,
};
use sway_types::Spanned;
pub(crate) struct AbiImplCodeAction<'a> {
engines: &'a Engines,
decl: &'a TyAbiDecl,
uri: &'a Url,
}
impl<'a> GenerateImplCodeAction<'a, TyAbiDecl> for AbiImplCodeAction<'a> {
fn decl_name(&self) -> String {
self.decl.name.to_string()
}
}
impl<'a> CodeAction<'a, TyAbiDecl> for AbiImplCodeAction<'a> {
fn new(ctx: &CodeActionContext<'a>, decl: &'a TyAbiDecl) -> Self {
Self {
engines: ctx.engines,
decl,
uri: ctx.uri,
}
}
fn new_text(&self) -> String {
self.impl_string(
None,
self.fn_signatures_string(),
Some(CONTRACT.to_string()),
)
}
fn title(&self) -> String {
format!("{} `{}`", CODE_ACTION_IMPL_TITLE, self.decl_name())
}
fn range(&self) -> Range {
self.range_after()
}
fn decl(&self) -> &TyAbiDecl {
self.decl
}
fn uri(&self) -> &Url {
self.uri
}
}
impl AbiImplCodeAction<'_> {
fn return_type_string(&self, function_decl: &TyTraitFn) -> String {
let type_engine = self.engines.te();
// Unit is the implicit return type for ABI functions.
if type_engine.get(function_decl.return_type.type_id).is_unit() {
String::new()
} else {
format!(" -> {}", function_decl.return_type.span().as_str())
}
}
fn fn_signatures_string(&self) -> String {
let decl_engine = self.engines.de();
format!(
"\n{}\n",
self.decl
.interface_surface
.iter()
.map(|item| {
match item {
ty::TyTraitInterfaceItem::TraitFn(function_decl_ref) => {
let function_decl = decl_engine.get_trait_fn(function_decl_ref);
self.fn_signature_string(
function_decl.name.to_string(),
params_string(&function_decl.parameters),
&function_decl.attributes,
self.return_type_string(&function_decl),
None,
)
}
ty::TyTraitInterfaceItem::Constant(_)
| ty::TyTraitInterfaceItem::Type(_) => unreachable!(),
}
})
.collect::<Vec<String>>()
.join("\n")
)
}
}
fn params_string(params: &[TyFunctionParameter]) -> String {
params
.iter()
.map(|param| format!("{}: {}", param.name, param.type_argument.span().as_str()))
.collect::<Vec<String>>()
.join(", ")
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-lsp/src/capabilities/code_actions/abi_decl/mod.rs | sway-lsp/src/capabilities/code_actions/abi_decl/mod.rs | pub(crate) mod abi_impl;
use self::abi_impl::AbiImplCodeAction;
use super::{CodeAction, CodeActionContext};
use lsp_types::CodeActionOrCommand;
use sway_core::{decl_engine::id::DeclId, language::ty::TyAbiDecl};
pub(crate) fn code_actions(
decl_id: &DeclId<TyAbiDecl>,
ctx: &CodeActionContext,
) -> Vec<CodeActionOrCommand> {
let decl = ctx.engines.de().get_abi(decl_id);
vec![AbiImplCodeAction::new(ctx, &decl).code_action()]
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-lsp/src/capabilities/code_actions/trait_fn/mod.rs | sway-lsp/src/capabilities/code_actions/trait_fn/mod.rs | use crate::capabilities::code_actions::{CodeAction, CodeActionContext};
use lsp_types::CodeActionOrCommand;
use sway_core::language::ty;
use super::common::fn_doc_comment::FnDocCommentCodeAction;
pub(crate) fn code_actions(
decl: &ty::TyTraitFn,
ctx: &CodeActionContext,
) -> Vec<CodeActionOrCommand> {
vec![FnDocCommentCodeAction::new(ctx, decl).code_action()]
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-lsp/src/capabilities/code_actions/struct_field/mod.rs | sway-lsp/src/capabilities/code_actions/struct_field/mod.rs | use crate::capabilities::code_actions::{CodeAction, CodeActionContext};
use lsp_types::CodeActionOrCommand;
use sway_core::language::ty;
use super::common::basic_doc_comment::BasicDocCommentCodeAction;
pub(crate) fn code_actions(
decl: &ty::TyStructField,
ctx: &CodeActionContext,
) -> Vec<CodeActionOrCommand> {
vec![BasicDocCommentCodeAction::new(ctx, decl).code_action()]
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-lsp/src/traverse/parsed_tree.rs | sway-lsp/src/traverse/parsed_tree.rs | #![allow(dead_code)]
use crate::{
core::{
token::{
desugared_op, type_info_to_symbol_kind, ParsedAstToken, SymbolKind, Token,
TypeDefinition,
},
token_map::TokenMap,
},
traverse::{adaptive_iter, Parse, ParseContext},
};
use rayon::iter::{IntoParallelRefIterator, ParallelIterator};
use sway_core::{
compiler_generated::{
is_generated_any_match_expression_var_name, is_generated_destructured_struct_var_name,
is_generated_tuple_var_name,
},
decl_engine::parsed_id::ParsedDeclId,
language::{
parsed::{
AbiCastExpression, AbiDeclaration, AmbiguousPathExpression, ArrayExpression,
ArrayIndexExpression, AstNode, AstNodeContent, ConfigurableDeclaration,
ConstGenericDeclaration, ConstantDeclaration, Declaration, DelineatedPathExpression,
EnumDeclaration, EnumVariant, Expression, ExpressionKind, ForLoopExpression,
FunctionApplicationExpression, FunctionDeclaration, FunctionParameter, IfExpression,
ImplItem, ImplSelfOrTrait, ImportType, IncludeStatement, IntrinsicFunctionExpression,
LazyOperatorExpression, MatchExpression, MethodApplicationExpression, MethodName,
ParseModule, ParseProgram, ParseSubmodule, QualifiedPathType, ReassignmentExpression,
ReassignmentTarget, RefExpression, Scrutinee, StorageAccessExpression,
StorageDeclaration, StorageEntry, StorageField, StorageNamespace, StructDeclaration,
StructExpression, StructExpressionField, StructField, StructScrutineeField,
SubfieldExpression, Supertrait, TraitDeclaration, TraitFn, TraitItem,
TraitTypeDeclaration, TupleIndexExpression, TypeAliasDeclaration, UseStatement,
VariableDeclaration, WhileLoopExpression,
},
CallPathTree, HasSubmodules, Literal,
},
transform::Attributes,
type_system::{GenericArgument, TypeParameter},
GenericTypeArgument, TraitConstraint, TypeInfo,
};
use sway_types::{Ident, Span, Spanned};
pub struct ParsedTree<'a> {
ctx: &'a ParseContext<'a>,
}
impl<'a> ParsedTree<'a> {
pub fn new(ctx: &'a ParseContext<'a>) -> Self {
Self { ctx }
}
pub fn traverse_node(&self, node: &AstNode) {
node.parse(self.ctx);
}
/// Collects module names from the mod statements
pub fn collect_module_spans(&self, parse_program: &ParseProgram) {
self.collect_parse_module(&parse_program.root);
}
fn collect_parse_module(&self, parse_module: &ParseModule) {
self.ctx.tokens.insert(
self.ctx
.ident(&Ident::new(parse_module.module_kind_span.clone())),
Token::from_parsed(
ParsedAstToken::LibrarySpan(parse_module.module_kind_span.clone()),
SymbolKind::Keyword,
),
);
for (
_,
ParseSubmodule {
module,
mod_name_span,
..
},
) in parse_module.submodules_recursive()
{
self.ctx.tokens.insert(
self.ctx.ident(&Ident::new(mod_name_span.clone())),
Token::from_parsed(ParsedAstToken::ModuleName, SymbolKind::Module),
);
self.collect_parse_module(module);
}
}
}
impl Parse for Attributes {
fn parse(&self, ctx: &ParseContext) {
self.all_as_slice()
.par_iter()
.filter(|attribute| !attribute.is_doc_comment())
.for_each_with(ctx, |ctx, attribute| {
ctx.tokens.insert(
ctx.ident(&attribute.name),
Token::from_parsed(
ParsedAstToken::Attribute(attribute.clone()),
SymbolKind::DeriveHelper,
),
);
});
}
}
impl Parse for AstNode {
fn parse(&self, ctx: &ParseContext) {
match &self.content {
AstNodeContent::Declaration(declaration) => declaration.parse(ctx),
AstNodeContent::Expression(expression) => {
expression.parse(ctx);
}
AstNodeContent::UseStatement(use_statement) => use_statement.parse(ctx),
AstNodeContent::IncludeStatement(include_statement) => include_statement.parse(ctx),
AstNodeContent::Error(_, _) => {}
}
}
}
impl Parse for Declaration {
fn parse(&self, ctx: &ParseContext) {
match self {
Declaration::VariableDeclaration(decl_id) => decl_id.parse(ctx),
Declaration::FunctionDeclaration(decl_id) => decl_id.parse(ctx),
Declaration::TraitDeclaration(decl_id) => decl_id.parse(ctx),
Declaration::StructDeclaration(decl_id) => decl_id.parse(ctx),
Declaration::EnumDeclaration(decl_id) => decl_id.parse(ctx),
Declaration::EnumVariantDeclaration(_decl) => unreachable!(),
Declaration::ImplSelfOrTrait(decl_id) => decl_id.parse(ctx),
Declaration::AbiDeclaration(decl_id) => decl_id.parse(ctx),
Declaration::ConstantDeclaration(decl_id) => decl_id.parse(ctx),
Declaration::ConfigurableDeclaration(decl_id) => decl_id.parse(ctx),
Declaration::ConstGenericDeclaration(decl_id) => decl_id.parse(ctx),
Declaration::StorageDeclaration(decl_id) => decl_id.parse(ctx),
Declaration::TypeAliasDeclaration(decl_id) => decl_id.parse(ctx),
Declaration::TraitTypeDeclaration(decl_id) => decl_id.parse(ctx),
Declaration::TraitFnDeclaration(decl_id) => decl_id.parse(ctx),
}
}
}
impl Parse for UseStatement {
fn parse(&self, ctx: &ParseContext) {
if let Some(alias) = &self.alias {
ctx.tokens.insert(
ctx.ident(alias),
Token::from_parsed(
ParsedAstToken::UseStatement(self.clone()),
SymbolKind::Unknown,
),
);
}
adaptive_iter(&self.call_path, |prefix| {
ctx.tokens.insert(
ctx.ident(prefix),
Token::from_parsed(
ParsedAstToken::UseStatement(self.clone()),
SymbolKind::Module,
),
);
});
match &self.import_type {
ImportType::Item(item) => {
ctx.tokens.insert(
ctx.ident(item),
Token::from_parsed(
ParsedAstToken::UseStatement(self.clone()),
SymbolKind::Unknown,
),
);
}
ImportType::SelfImport(span) => {
ctx.tokens.insert(
ctx.ident(&Ident::new(span.clone())),
Token::from_parsed(
ParsedAstToken::UseStatement(self.clone()),
SymbolKind::Unknown,
),
);
}
ImportType::Star => {}
}
}
}
impl Parse for IncludeStatement {
fn parse(&self, ctx: &ParseContext) {
ctx.tokens.insert(
ctx.ident(&self.mod_name),
Token::from_parsed(
ParsedAstToken::IncludeStatement(self.clone()),
SymbolKind::Unknown,
),
);
}
}
impl Parse for Expression {
fn parse(&self, ctx: &ParseContext) {
match &self.kind {
ExpressionKind::Error(part_spans, _) => {
adaptive_iter(part_spans, |span| {
ctx.tokens.insert(
ctx.ident(&Ident::new(span.clone())),
Token::from_parsed(
ParsedAstToken::ErrorRecovery(span.clone()),
SymbolKind::Unknown,
),
);
});
}
ExpressionKind::Literal(value) => {
let symbol_kind = literal_to_symbol_kind(value);
ctx.tokens.insert(
ctx.ident(&Ident::new(self.span.clone())),
Token::from_parsed(ParsedAstToken::Expression(self.clone()), symbol_kind),
);
}
ExpressionKind::FunctionApplication(function_application_expression) => {
function_application_expression.parse(ctx);
}
ExpressionKind::LazyOperator(LazyOperatorExpression { lhs, rhs, .. }) => {
lhs.parse(ctx);
rhs.parse(ctx);
}
ExpressionKind::Variable(name) => {
if !(is_generated_tuple_var_name(name.as_str())
|| is_generated_any_match_expression_var_name(name.as_str()))
{
let symbol_kind = if is_generated_destructured_struct_var_name(name.as_str()) {
SymbolKind::Struct
} else if name.as_str() == "self" {
SymbolKind::SelfKeyword
} else {
SymbolKind::Variable
};
ctx.tokens.insert(
ctx.ident(name),
Token::from_parsed(ParsedAstToken::Expression(self.clone()), symbol_kind),
);
}
}
ExpressionKind::Tuple(fields) => {
adaptive_iter(fields, |field| field.parse(ctx));
}
ExpressionKind::TupleIndex(TupleIndexExpression {
prefix, index_span, ..
}) => {
prefix.parse(ctx);
ctx.tokens.insert(
ctx.ident(&Ident::new(index_span.clone())),
Token::from_parsed(
ParsedAstToken::Expression(self.clone()),
SymbolKind::NumericLiteral,
),
);
}
ExpressionKind::Array(array_expression) => {
array_expression.parse(ctx);
}
ExpressionKind::Struct(struct_expression) => {
struct_expression.parse(ctx);
}
ExpressionKind::CodeBlock(code_block) => {
adaptive_iter(&code_block.contents, |node| node.parse(ctx));
}
ExpressionKind::If(IfExpression {
condition,
then,
r#else,
..
}) => {
condition.parse(ctx);
then.parse(ctx);
if let Some(r#else) = r#else {
r#else.parse(ctx);
}
}
ExpressionKind::Match(MatchExpression {
value, branches, ..
}) => {
value.parse(ctx);
adaptive_iter(branches, |branch| {
branch.scrutinee.parse(ctx);
branch.result.parse(ctx);
});
}
ExpressionKind::Asm(asm) => {
adaptive_iter(&asm.registers, |register| {
if let Some(initializer) = ®ister.initializer {
initializer.parse(ctx);
}
});
}
ExpressionKind::MethodApplication(method_application_expression) => {
method_application_expression.parse(ctx);
}
ExpressionKind::Subfield(SubfieldExpression {
prefix,
field_to_access,
..
}) => {
prefix.parse(ctx);
ctx.tokens.insert(
ctx.ident(field_to_access),
Token::from_parsed(ParsedAstToken::Expression(self.clone()), SymbolKind::Field),
);
}
ExpressionKind::AmbiguousVariableExpression(ident) => {
ctx.tokens.insert(
ctx.ident(ident),
Token::from_parsed(ParsedAstToken::Ident(ident.clone()), SymbolKind::Unknown),
);
}
ExpressionKind::AmbiguousPathExpression(path_expr) => {
path_expr.parse(ctx);
}
ExpressionKind::DelineatedPath(delineated_path_expression) => {
delineated_path_expression.parse(ctx);
}
ExpressionKind::AbiCast(abi_cast_expression) => {
abi_cast_expression.parse(ctx);
}
ExpressionKind::ArrayIndex(ArrayIndexExpression { prefix, index, .. }) => {
prefix.parse(ctx);
index.parse(ctx);
}
ExpressionKind::StorageAccess(StorageAccessExpression {
field_names,
namespace_names,
storage_keyword_span,
}) => {
let storage_ident = Ident::new(storage_keyword_span.clone());
ctx.tokens.insert(
ctx.ident(&storage_ident),
Token::from_parsed(ParsedAstToken::Ident(storage_ident), SymbolKind::Unknown),
);
adaptive_iter(namespace_names, |namespace_name| {
ctx.tokens.insert(
ctx.ident(namespace_name),
Token::from_parsed(
ParsedAstToken::Ident(namespace_name.clone()),
SymbolKind::Field,
),
);
});
adaptive_iter(field_names, |field_name| {
ctx.tokens.insert(
ctx.ident(field_name),
Token::from_parsed(
ParsedAstToken::Ident(field_name.clone()),
SymbolKind::Field,
),
);
});
}
ExpressionKind::IntrinsicFunction(intrinsic_function_expression) => {
intrinsic_function_expression.parse(ctx);
}
ExpressionKind::WhileLoop(WhileLoopExpression {
body, condition, ..
}) => {
adaptive_iter(&body.contents, |node| node.parse(ctx));
condition.parse(ctx);
}
ExpressionKind::ForLoop(ForLoopExpression { desugared }) => {
desugared.parse(ctx);
}
ExpressionKind::Reassignment(reassignment) => {
reassignment.parse(ctx);
}
ExpressionKind::ImplicitReturn(expr)
| ExpressionKind::Return(expr)
| ExpressionKind::Panic(expr)
| ExpressionKind::Ref(RefExpression { value: expr, .. })
| ExpressionKind::Deref(expr) => {
expr.parse(ctx);
}
// We are collecting these tokens in the lexed phase.
ExpressionKind::Break | ExpressionKind::Continue => {}
}
}
}
impl Parse for ReassignmentExpression {
fn parse(&self, ctx: &ParseContext) {
self.rhs.parse(ctx);
match &self.lhs {
ReassignmentTarget::ElementAccess(exp) | ReassignmentTarget::Deref(exp) => {
exp.parse(ctx);
}
}
}
}
impl Parse for IntrinsicFunctionExpression {
fn parse(&self, ctx: &ParseContext) {
ctx.tokens.insert(
ctx.ident(&self.name),
Token::from_parsed(
ParsedAstToken::Intrinsic(self.kind_binding.inner.clone()),
SymbolKind::Intrinsic,
),
);
adaptive_iter(&self.arguments, |arg| arg.parse(ctx));
adaptive_iter(&self.kind_binding.type_arguments.to_vec(), |type_arg| {
type_arg.parse(ctx);
});
}
}
impl Parse for AbiCastExpression {
fn parse(&self, ctx: &ParseContext) {
adaptive_iter(&self.abi_name.prefixes, |ident| {
ctx.tokens.insert(
ctx.ident(ident),
Token::from_parsed(ParsedAstToken::Ident(ident.clone()), SymbolKind::Module),
);
});
ctx.tokens.insert(
ctx.ident(&self.abi_name.suffix),
Token::from_parsed(
ParsedAstToken::AbiCastExpression(self.clone()),
SymbolKind::Trait,
),
);
self.address.parse(ctx);
}
}
impl Parse for DelineatedPathExpression {
fn parse(&self, ctx: &ParseContext) {
let DelineatedPathExpression {
call_path_binding,
args,
} = self;
adaptive_iter(&call_path_binding.inner.call_path.prefixes, |ident| {
ctx.tokens.insert(
ctx.ident(ident),
Token::from_parsed(ParsedAstToken::Ident(ident.clone()), SymbolKind::Enum),
);
});
ctx.tokens.insert(
ctx.ident(&call_path_binding.inner.call_path.suffix),
Token::from_parsed(
ParsedAstToken::DelineatedPathExpression(self.clone()),
SymbolKind::Variant,
),
);
adaptive_iter(&call_path_binding.type_arguments.to_vec(), |type_arg| {
type_arg.parse(ctx);
});
if let Some(args_vec) = args.as_ref() {
adaptive_iter(args_vec, |exp| exp.parse(ctx));
}
collect_qualified_path_root(ctx, call_path_binding.inner.qualified_path_root.clone());
}
}
impl Parse for AmbiguousPathExpression {
fn parse(&self, ctx: &ParseContext) {
let AmbiguousPathExpression {
call_path_binding,
args,
qualified_path_root,
} = self;
for ident in call_path_binding.inner.prefixes.iter().chain(
call_path_binding
.inner
.suffix
.before
.iter()
.map(|before| &before.inner),
) {
ctx.tokens.insert(
ctx.ident(ident),
Token::from_parsed(ParsedAstToken::Ident(ident.clone()), SymbolKind::Enum),
);
}
ctx.tokens.insert(
ctx.ident(&call_path_binding.inner.suffix.suffix),
Token::from_parsed(
ParsedAstToken::AmbiguousPathExpression(self.clone()),
SymbolKind::Variant,
),
);
adaptive_iter(&call_path_binding.type_arguments.to_vec(), |type_arg| {
type_arg.parse(ctx);
});
adaptive_iter(args, |exp| exp.parse(ctx));
collect_qualified_path_root(ctx, qualified_path_root.clone().map(Box::new));
}
}
impl Parse for MethodApplicationExpression {
fn parse(&self, ctx: &ParseContext) {
let prefixes = match &self.method_name_binding.inner {
MethodName::FromType {
call_path_binding, ..
} => call_path_binding.inner.prefixes.clone(),
MethodName::FromTrait { call_path, .. } => call_path.prefixes.clone(),
_ => vec![],
};
if let MethodName::FromType {
call_path_binding, ..
} = &self.method_name_binding.inner
{
let (type_info, ident) = &call_path_binding.inner.suffix;
collect_type_info_token(ctx, type_info, Some(&ident.span()));
}
adaptive_iter(
&self.method_name_binding.type_arguments.to_vec(),
|type_arg| {
type_arg.parse(ctx);
},
);
// Don't collect applications of desugared operators due to mismatched ident lengths.
if !desugared_op(&prefixes) {
ctx.tokens.insert(
ctx.ident(&self.method_name_binding.inner.easy_name()),
Token::from_parsed(
ParsedAstToken::MethodApplicationExpression(self.clone()),
SymbolKind::Struct,
),
);
}
adaptive_iter(&self.arguments, |arg| arg.parse(ctx));
adaptive_iter(&self.contract_call_params, |param| param.parse(ctx));
}
}
impl Parse for Scrutinee {
fn parse(&self, ctx: &ParseContext) {
match self {
Scrutinee::CatchAll { .. } => (),
Scrutinee::Literal { ref value, span } => {
let token = Token::from_parsed(
ParsedAstToken::Scrutinee(self.clone()),
literal_to_symbol_kind(value),
);
ctx.tokens
.insert(ctx.ident(&Ident::new(span.clone())), token);
}
Scrutinee::Variable { name, .. } => {
ctx.tokens.insert(
ctx.ident(name),
// it could either be a variable or a constant
Token::from_parsed(
ParsedAstToken::Scrutinee(self.clone()),
SymbolKind::Unknown,
),
);
}
Scrutinee::StructScrutinee {
struct_name,
fields,
..
} => {
adaptive_iter(&struct_name.prefixes, |ident| {
let token = Token::from_parsed(
ParsedAstToken::Ident(ident.clone()),
SymbolKind::Struct,
);
ctx.tokens.insert(ctx.ident(ident), token);
});
ctx.tokens.insert(
ctx.ident(&struct_name.suffix),
Token::from_parsed(ParsedAstToken::Scrutinee(self.clone()), SymbolKind::Struct),
);
adaptive_iter(fields, |field| field.parse(ctx));
}
Scrutinee::EnumScrutinee {
call_path, value, ..
} => {
adaptive_iter(&call_path.prefixes, |ident| {
ctx.tokens.insert(
ctx.ident(ident),
Token::from_parsed(ParsedAstToken::Ident(ident.clone()), SymbolKind::Enum),
);
});
let token = Token::from_parsed(
ParsedAstToken::Scrutinee(self.clone()),
SymbolKind::Variant,
);
ctx.tokens.insert(ctx.ident(&call_path.suffix), token);
value.parse(ctx);
}
Scrutinee::AmbiguousSingleIdent(ident) => {
let token =
Token::from_parsed(ParsedAstToken::Ident(ident.clone()), SymbolKind::Unknown);
ctx.tokens.insert(ctx.ident(ident), token);
}
Scrutinee::Tuple { elems, .. } | Scrutinee::Or { elems, .. } => {
adaptive_iter(elems, |elem| elem.parse(ctx));
}
Scrutinee::Error { .. } => {
// FIXME: Left for @JoshuaBatty to use.
}
}
}
}
impl Parse for StructScrutineeField {
fn parse(&self, ctx: &ParseContext) {
let token = Token::from_parsed(
ParsedAstToken::StructScrutineeField(self.clone()),
SymbolKind::Field,
);
if let StructScrutineeField::Field {
field, scrutinee, ..
} = self
{
ctx.tokens.insert(ctx.ident(field), token);
if let Some(scrutinee) = scrutinee {
scrutinee.parse(ctx);
}
}
}
}
impl Parse for StructExpression {
fn parse(&self, ctx: &ParseContext) {
adaptive_iter(&self.call_path_binding.inner.prefixes, |ident| {
ctx.tokens.insert(
ctx.ident(ident),
Token::from_parsed(ParsedAstToken::Ident(ident.clone()), SymbolKind::Struct),
);
});
let name = &self.call_path_binding.inner.suffix;
let symbol_kind = if name.as_str() == "Self" {
SymbolKind::SelfKeyword
} else {
SymbolKind::Struct
};
ctx.tokens.insert(
ctx.ident(name),
Token::from_parsed(ParsedAstToken::StructExpression(self.clone()), symbol_kind),
);
let type_arguments = &self.call_path_binding.type_arguments.to_vec();
adaptive_iter(type_arguments, |type_arg| type_arg.parse(ctx));
adaptive_iter(&self.fields, |field| field.parse(ctx));
}
}
impl Parse for StructExpressionField {
fn parse(&self, ctx: &ParseContext) {
ctx.tokens.insert(
ctx.ident(&self.name),
Token::from_parsed(
ParsedAstToken::StructExpressionField(self.clone()),
SymbolKind::Field,
),
);
self.value.parse(ctx);
}
}
impl Parse for ArrayExpression {
fn parse(&self, ctx: &ParseContext) {
match self {
ArrayExpression::Explicit {
contents,
length_span,
} => {
adaptive_iter(contents, |exp| exp.parse(ctx));
if let Some(length_span) = &length_span {
let ident = Ident::new(length_span.clone());
ctx.tokens.insert(
ctx.ident(&ident),
Token::from_parsed(
ParsedAstToken::Ident(ident.clone()),
SymbolKind::NumericLiteral,
),
);
}
}
ArrayExpression::Repeat { value, length } => {
value.parse(ctx);
length.parse(ctx);
}
}
}
}
impl Parse for FunctionApplicationExpression {
fn parse(&self, ctx: &ParseContext) {
// Don't collect applications of desugared operators due to mismatched ident lengths.
if !desugared_op(&self.call_path_binding.inner.prefixes) {
adaptive_iter(&self.call_path_binding.inner.prefixes, |ident| {
ctx.tokens.insert(
ctx.ident(ident),
Token::from_parsed(ParsedAstToken::Ident(ident.clone()), SymbolKind::Module),
);
});
ctx.tokens.insert(
ctx.ident(&self.call_path_binding.inner.suffix),
Token::from_parsed(
ParsedAstToken::FunctionApplicationExpression(self.clone()),
SymbolKind::Function,
),
);
adaptive_iter(
&self.call_path_binding.type_arguments.to_vec(),
|type_arg| {
type_arg.parse(ctx);
},
);
}
adaptive_iter(&self.arguments, |exp| exp.parse(ctx));
}
}
impl Parse for ParsedDeclId<VariableDeclaration> {
fn parse(&self, ctx: &ParseContext) {
let var_decl = ctx.engines.pe().get_variable(self);
// Don't collect tokens if the idents are generated tuple or match desugaring names.
// The individual elements are handled in the subsequent VariableDeclaration's.
if !(is_generated_tuple_var_name(var_decl.name.as_str())
|| is_generated_any_match_expression_var_name(var_decl.name.as_str()))
{
let symbol_kind = if is_generated_destructured_struct_var_name(var_decl.name.as_str()) {
SymbolKind::Struct
} else {
SymbolKind::Variable
};
// We want to use the span from variable.name to construct a
// new Ident as the name_override_opt can be set to one of the
// const prefixes and not the actual token name.
let ident = if var_decl.name.is_raw_ident() {
Ident::new_with_raw(var_decl.name.span(), true)
} else {
Ident::new(var_decl.name.span())
};
ctx.tokens.insert(
ctx.ident(&ident),
Token::from_parsed(
ParsedAstToken::Declaration(Declaration::VariableDeclaration(*self)),
symbol_kind,
),
);
var_decl.type_ascription.parse(ctx);
}
var_decl.body.parse(ctx);
}
}
impl Parse for ParsedDeclId<FunctionDeclaration> {
fn parse(&self, ctx: &ParseContext) {
let token = Token::from_parsed(
ParsedAstToken::Declaration(Declaration::FunctionDeclaration(*self)),
SymbolKind::Function,
);
let fn_decl = ctx.engines.pe().get_function(self);
ctx.tokens.insert(ctx.ident(&fn_decl.name), token.clone());
adaptive_iter(&fn_decl.body.contents, |node| node.parse(ctx));
adaptive_iter(&fn_decl.parameters, |param| param.parse(ctx));
adaptive_iter(&fn_decl.type_parameters, |type_param| type_param.parse(ctx));
for (ident, constraints) in &fn_decl.where_clause {
ctx.tokens.insert(ctx.ident(ident), token.clone());
adaptive_iter(constraints, |constraint| constraint.parse(ctx));
}
fn_decl.return_type.parse(ctx);
fn_decl.attributes.parse(ctx);
}
}
impl Parse for ParsedDeclId<TraitDeclaration> {
fn parse(&self, ctx: &ParseContext) {
let trait_decl = ctx.engines.pe().get_trait(self);
ctx.tokens.insert(
ctx.ident(&trait_decl.name),
Token::from_parsed(
ParsedAstToken::Declaration(Declaration::TraitDeclaration(*self)),
SymbolKind::Trait,
),
);
adaptive_iter(&trait_decl.interface_surface, |item| match item {
TraitItem::TraitFn(trait_fn) => trait_fn.parse(ctx),
TraitItem::Constant(const_decl) => const_decl.parse(ctx),
TraitItem::Type(trait_type) => trait_type.parse(ctx),
TraitItem::Error(_, _) => {}
});
adaptive_iter(&trait_decl.methods, |func_dec| func_dec.parse(ctx));
adaptive_iter(&trait_decl.supertraits, |supertrait| supertrait.parse(ctx));
}
}
impl Parse for ParsedDeclId<StructDeclaration> {
fn parse(&self, ctx: &ParseContext) {
let struct_decl = ctx.engines.pe().get_struct(self);
ctx.tokens.insert(
ctx.ident(&struct_decl.name),
Token::from_parsed(
ParsedAstToken::Declaration(Declaration::StructDeclaration(*self)),
SymbolKind::Struct,
),
);
adaptive_iter(&struct_decl.fields, |field| field.parse(ctx));
adaptive_iter(&struct_decl.type_parameters, |type_param| {
type_param.parse(ctx);
});
struct_decl.attributes.parse(ctx);
}
}
impl Parse for ParsedDeclId<EnumDeclaration> {
fn parse(&self, ctx: &ParseContext) {
let enum_decl = ctx.engines.pe().get_enum(self);
ctx.tokens.insert(
ctx.ident(&enum_decl.name),
Token::from_parsed(
ParsedAstToken::Declaration(Declaration::EnumDeclaration(*self)),
SymbolKind::Enum,
),
);
adaptive_iter(&enum_decl.type_parameters, |type_param| {
type_param.parse(ctx);
});
adaptive_iter(&enum_decl.variants, |variant| variant.parse(ctx));
enum_decl.attributes.parse(ctx);
}
}
impl Parse for ParsedDeclId<ImplSelfOrTrait> {
fn parse(&self, ctx: &ParseContext) {
let impl_self_or_trait = ctx.engines.pe().get_impl_self_or_trait(self);
adaptive_iter(&impl_self_or_trait.trait_name.prefixes, |ident| {
ctx.tokens.insert(
ctx.ident(ident),
Token::from_parsed(ParsedAstToken::Ident(ident.clone()), SymbolKind::Module),
);
});
ctx.tokens.insert(
ctx.ident(&impl_self_or_trait.trait_name.suffix),
Token::from_parsed(
ParsedAstToken::Declaration(Declaration::ImplSelfOrTrait(*self)),
SymbolKind::Trait,
),
);
if let TypeInfo::Custom {
qualified_call_path,
type_arguments,
} = &&*ctx
.engines
.te()
.get(impl_self_or_trait.implementing_for.type_id)
{
ctx.tokens.insert(
ctx.ident(&qualified_call_path.call_path.suffix),
Token::from_parsed(
ParsedAstToken::Declaration(Declaration::ImplSelfOrTrait(*self)),
SymbolKind::Struct,
),
);
if let Some(type_arguments) = type_arguments {
adaptive_iter(type_arguments, |type_arg| type_arg.parse(ctx));
}
} else {
impl_self_or_trait.implementing_for.parse(ctx);
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | true |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-lsp/src/traverse/typed_tree.rs | sway-lsp/src/traverse/typed_tree.rs | #![allow(dead_code)]
use crate::{
core::token::{
type_info_to_symbol_kind, SymbolKind, Token, TokenAstNode, TokenIdent, TypeDefinition,
TypedAstToken,
},
traverse::{adaptive_iter, Parse, ParseContext},
};
use dashmap::mapref::one::RefMut;
use rayon::iter::{IndexedParallelIterator, IntoParallelRefIterator, ParallelIterator};
use sway_core::{
decl_engine::{id::DeclId, InterfaceDeclId},
language::{
parsed::{ImportType, QualifiedPathType, Supertrait},
ty::{self, GetDeclIdent, TyModule, TyReassignmentTarget, TySubmodule},
CallPathTree, CallPathType,
},
type_system::GenericArgument,
GenericTypeArgument, TraitConstraint, TypeId, TypeInfo,
};
use sway_error::handler::Handler;
use sway_types::{Ident, Named, Span, Spanned};
use sway_utils::iter_prefixes;
pub struct TypedTree<'a> {
ctx: &'a ParseContext<'a>,
}
impl<'a> TypedTree<'a> {
pub fn new(ctx: &'a ParseContext<'a>) -> Self {
Self { ctx }
}
pub fn traverse_node(&self, node: &ty::TyAstNode) {
node.parse(self.ctx);
}
/// Collects module names from the mod statements
pub fn collect_module_spans(&self, root: &TyModule) {
self.collect_module(root);
}
fn collect_module(&self, typed_module: &TyModule) {
for (
_,
TySubmodule {
module,
mod_name_span,
},
) in typed_module.submodules_recursive()
{
if let Some(mut token) = self
.ctx
.tokens
.try_get_mut_with_retry(&self.ctx.ident(&Ident::new(mod_name_span.clone())))
{
token.ast_node = TokenAstNode::Typed(TypedAstToken::TypedModuleName);
token.type_def = Some(TypeDefinition::Ident(Ident::new(module.span.clone())));
}
self.collect_module(module);
}
}
}
impl Parse for ty::TyAstNode {
fn parse(&self, ctx: &ParseContext) {
match &self.content {
ty::TyAstNodeContent::Declaration(declaration) => declaration.parse(ctx),
ty::TyAstNodeContent::Expression(expression) => expression.parse(ctx),
ty::TyAstNodeContent::SideEffect(side_effect) => side_effect.parse(ctx),
ty::TyAstNodeContent::Error(_, _) => {}
};
}
}
impl Parse for ty::TyDecl {
fn parse(&self, ctx: &ParseContext) {
match self {
ty::TyDecl::VariableDecl(decl) => decl.parse(ctx),
ty::TyDecl::ConstantDecl(decl) => decl.parse(ctx),
ty::TyDecl::ConfigurableDecl(decl) => decl.parse(ctx),
ty::TyDecl::ConstGenericDecl(decl) => decl.parse(ctx),
ty::TyDecl::FunctionDecl(decl) => decl.parse(ctx),
ty::TyDecl::TraitDecl(decl) => decl.parse(ctx),
ty::TyDecl::StructDecl(decl) => decl.parse(ctx),
ty::TyDecl::EnumDecl(decl) => collect_enum(ctx, &decl.decl_id, self),
ty::TyDecl::EnumVariantDecl(decl) => collect_enum(ctx, decl.enum_ref.id(), self),
ty::TyDecl::ImplSelfOrTrait(decl) => decl.parse(ctx),
ty::TyDecl::AbiDecl(decl) => decl.parse(ctx),
ty::TyDecl::GenericTypeForFunctionScope(decl) => decl.parse(ctx),
ty::TyDecl::ErrorRecovery(_, _) => {}
ty::TyDecl::StorageDecl(decl) => decl.parse(ctx),
ty::TyDecl::TypeAliasDecl(decl) => decl.parse(ctx),
ty::TyDecl::TraitTypeDecl(decl) => decl.parse(ctx),
}
}
}
impl Parse for ty::TySideEffect {
fn parse(&self, ctx: &ParseContext) {
use ty::TySideEffectVariant::{IncludeStatement, UseStatement};
match &self.side_effect {
UseStatement(
use_statement @ ty::TyUseStatement {
call_path,
span: _,
import_type,
alias,
is_relative_to_package_root,
},
) => {
let full_path =
mod_path_to_full_path(call_path, *is_relative_to_package_root, ctx.namespace);
for (mod_path, ident) in iter_prefixes(&full_path).zip(&full_path) {
if let Some(mut token) = ctx.tokens.try_get_mut_with_retry(&ctx.ident(ident)) {
token.ast_node = TokenAstNode::Typed(TypedAstToken::TypedUseStatement(
use_statement.clone(),
));
if let Some(span) = ctx
.namespace
.module_from_absolute_path(mod_path)
.and_then(|tgt_submod| tgt_submod.span().clone())
{
token.type_def = Some(TypeDefinition::Ident(Ident::new(span)));
}
}
}
match &import_type {
ImportType::Item(item) => {
if let Some(mut token) = ctx.tokens.try_get_mut_with_retry(&ctx.ident(item))
{
token.ast_node = TokenAstNode::Typed(TypedAstToken::TypedUseStatement(
use_statement.clone(),
));
let mut symbol_kind = SymbolKind::Unknown;
let mut type_def = None;
if let Some(decl_ident) = ctx
.namespace
.module_from_absolute_path(&full_path)
.and_then(|module| {
module
.resolve_symbol(&Handler::default(), ctx.engines, item)
.ok()
})
.and_then(|(decl, _)| {
decl.expect_typed_ref().get_decl_ident(ctx.engines)
})
{
// Update the symbol kind to match the declarations symbol kind
if let Some(decl) =
ctx.tokens.try_get(&ctx.ident(&decl_ident)).try_unwrap()
{
symbol_kind = decl.value().kind.clone();
}
type_def = Some(TypeDefinition::Ident(decl_ident));
}
token.kind = symbol_kind.clone();
token.type_def.clone_from(&type_def);
// the alias should take on the same symbol kind and type definition
if let Some(alias) = alias {
if let Some(mut token) =
ctx.tokens.try_get_mut_with_retry(&ctx.ident(alias))
{
token.ast_node = TokenAstNode::Typed(
TypedAstToken::TypedUseStatement(use_statement.clone()),
);
token.kind = symbol_kind;
token.type_def = type_def;
}
}
}
}
ImportType::SelfImport(span) => {
if let Some(mut token) = ctx
.tokens
.try_get_mut_with_retry(&ctx.ident(&Ident::new(span.clone())))
{
token.ast_node = TokenAstNode::Typed(TypedAstToken::TypedUseStatement(
use_statement.clone(),
));
if let Some(span) = ctx
.namespace
.module_from_absolute_path(&full_path)
.and_then(|tgt_submod| tgt_submod.span().clone())
{
token.type_def = Some(TypeDefinition::Ident(Ident::new(span)));
}
}
}
ImportType::Star => {}
}
}
IncludeStatement(
include_statement @ ty::TyIncludeStatement {
span: _,
mod_name,
visibility: _,
},
) => {
if let Some(mut token) = ctx.tokens.try_get_mut_with_retry(&ctx.ident(mod_name)) {
token.ast_node = TokenAstNode::Typed(TypedAstToken::TypedIncludeStatement(
include_statement.clone(),
));
if let Some(span) = ctx
.namespace
.root_module()
.submodule(std::slice::from_ref(mod_name))
.and_then(|tgt_submod| tgt_submod.span().clone())
{
token.type_def = Some(TypeDefinition::Ident(Ident::new(span)));
}
}
}
}
}
}
impl Parse for ty::TyExpression {
fn parse(&self, ctx: &ParseContext) {
match &self.expression {
ty::TyExpressionVariant::Literal { .. } => {
if let Some(mut token) = ctx
.tokens
.try_get_mut_with_retry(&ctx.ident(&Ident::new(self.span.clone())))
{
token.ast_node =
TokenAstNode::Typed(TypedAstToken::TypedExpression(self.clone()));
}
}
ty::TyExpressionVariant::FunctionApplication {
call_path,
contract_call_params,
arguments,
fn_ref,
type_binding,
method_target,
..
} => {
if let Some(type_binding) = type_binding {
adaptive_iter(&type_binding.type_arguments.to_vec(), |type_arg| {
collect_generic_argument(ctx, type_arg);
});
}
let implementing_type_name = (*ctx.engines.de().get_function(fn_ref))
.clone()
.implementing_type
.and_then(|impl_type| impl_type.get_decl_ident(ctx.engines));
let prefixes = if let Some(impl_type_name) = implementing_type_name {
// the last prefix of the call path is not a module but a type
if let Some((last, prefixes)) = call_path.prefixes.split_last() {
if let Some(mut token) = ctx.tokens.try_get_mut_with_retry(&ctx.ident(last))
{
if let Some(method_target) = method_target {
token.ast_node = TokenAstNode::Typed(TypedAstToken::Ident(
impl_type_name.clone(),
));
token.type_def = Some(TypeDefinition::TypeId(*method_target));
}
}
prefixes
} else {
&call_path.prefixes
}
} else {
&call_path.prefixes
};
collect_call_path_prefixes(ctx, prefixes, call_path.callpath_type);
if let Some(mut token) = ctx
.tokens
.try_get_mut_with_retry(&ctx.ident(&call_path.suffix))
{
token.ast_node =
TokenAstNode::Typed(TypedAstToken::TypedExpression(self.clone()));
let function_decl = ctx.engines.de().get_function(fn_ref);
token.type_def = Some(TypeDefinition::Ident(function_decl.name.clone()));
}
contract_call_params.values().for_each(|exp| exp.parse(ctx));
adaptive_iter(arguments, |(ident, exp)| {
if let Some(mut token) = ctx.tokens.try_get_mut_with_retry(&ctx.ident(ident)) {
token.ast_node = TokenAstNode::Typed(TypedAstToken::Ident(ident.clone()));
}
exp.parse(ctx);
});
let function_decl = ctx.engines.de().get_function(fn_ref);
adaptive_iter(&function_decl.body.contents, |node| node.parse(ctx));
}
ty::TyExpressionVariant::LazyOperator { lhs, rhs, .. } => {
lhs.parse(ctx);
rhs.parse(ctx);
}
ty::TyExpressionVariant::ConstantExpression {
ref decl,
span,
call_path,
..
} => {
collect_const_decl(ctx, decl, Some(&Ident::new(span.clone())));
if let Some(call_path) = call_path {
collect_call_path_prefixes(ctx, &call_path.prefixes, call_path.callpath_type);
}
}
ty::TyExpressionVariant::ConfigurableExpression {
ref decl,
span,
call_path,
..
} => {
collect_configurable_decl(ctx, decl, Some(&Ident::new(span.clone())));
if let Some(call_path) = call_path {
collect_call_path_prefixes(ctx, &call_path.prefixes, call_path.callpath_type);
}
}
ty::TyExpressionVariant::ConstGenericExpression {
ref decl,
span,
call_path,
..
} => {
collect_const_generic_decl(ctx, decl, Some(&Ident::new(span.clone())));
collect_call_path_prefixes(ctx, &call_path.prefixes, call_path.callpath_type);
}
ty::TyExpressionVariant::VariableExpression {
ref name,
ref span,
ref call_path,
..
} => {
if let Some(call_path) = call_path {
collect_call_path_prefixes(ctx, &call_path.prefixes, call_path.callpath_type);
}
if let Some(mut token) = ctx
.tokens
.try_get_mut_with_retry(&ctx.ident(&Ident::new(span.clone())))
{
token.ast_node =
TokenAstNode::Typed(TypedAstToken::TypedExpression(self.clone()));
token.type_def = Some(TypeDefinition::Ident(name.clone()));
}
}
ty::TyExpressionVariant::Tuple { fields } => {
adaptive_iter(fields, |field| field.parse(ctx));
}
ty::TyExpressionVariant::ArrayExplicit {
elem_type: _,
contents,
} => {
adaptive_iter(contents, |exp| exp.parse(ctx));
}
ty::TyExpressionVariant::ArrayRepeat {
elem_type: _,
value,
length,
} => {
value.parse(ctx);
length.parse(ctx);
}
ty::TyExpressionVariant::ArrayIndex { prefix, index } => {
prefix.parse(ctx);
index.parse(ctx);
}
ty::TyExpressionVariant::StructExpression {
fields,
call_path_binding,
..
} => {
if let Some(mut token) = ctx
.tokens
.try_get_mut_with_retry(&ctx.ident(&call_path_binding.inner.suffix))
{
token.ast_node =
TokenAstNode::Typed(TypedAstToken::TypedExpression(self.clone()));
token.type_def = Some(TypeDefinition::TypeId(self.return_type));
}
adaptive_iter(&call_path_binding.type_arguments.to_vec(), |type_arg| {
collect_generic_argument(ctx, type_arg);
});
collect_call_path_prefixes(
ctx,
&call_path_binding.inner.prefixes,
call_path_binding.inner.callpath_type,
);
adaptive_iter(fields, |field| {
if let Some(mut token) =
ctx.tokens.try_get_mut_with_retry(&ctx.ident(&field.name))
{
token.ast_node = TokenAstNode::Typed(TypedAstToken::TypedExpression(
field.value.clone(),
));
if let Some(struct_decl) = &ctx
.tokens
.struct_declaration_of_type_id(ctx.engines, &self.return_type)
{
struct_decl.fields.iter().for_each(|decl_field| {
if decl_field.name == field.name {
token.type_def =
Some(TypeDefinition::Ident(decl_field.name.clone()));
}
});
}
}
field.value.parse(ctx);
});
}
ty::TyExpressionVariant::CodeBlock(code_block) => {
adaptive_iter(&code_block.contents, |node| node.parse(ctx));
}
ty::TyExpressionVariant::FunctionParameter => {}
ty::TyExpressionVariant::MatchExp {
desugared,
scrutinees,
} => {
// Order is important here, the expression must be processed first otherwise the
// scrutinee information will get overwritten by processing the underlying tree of
// conditions
desugared.parse(ctx);
adaptive_iter(scrutinees, |s| s.parse(ctx));
}
ty::TyExpressionVariant::IfExp {
condition,
then,
r#else,
} => {
condition.parse(ctx);
then.parse(ctx);
if let Some(r#else) = r#else {
r#else.parse(ctx);
}
}
ty::TyExpressionVariant::AsmExpression { registers, .. } => {
adaptive_iter(registers, |r| {
if let Some(initializer) = &r.initializer {
initializer.parse(ctx);
}
});
}
ty::TyExpressionVariant::StructFieldAccess {
prefix,
field_to_access,
field_instantiation_span,
..
} => {
prefix.parse(ctx);
if let Some(mut token) = ctx.tokens.try_get_mut_with_retry(
&ctx.ident(&Ident::new(field_instantiation_span.clone())),
) {
token.ast_node =
TokenAstNode::Typed(TypedAstToken::TypedExpression(self.clone()));
token.type_def = Some(TypeDefinition::Ident(field_to_access.name.clone()));
}
}
ty::TyExpressionVariant::TupleElemAccess {
prefix,
elem_to_access_span,
..
} => {
prefix.parse(ctx);
if let Some(mut token) = ctx
.tokens
.try_get_mut_with_retry(&ctx.ident(&Ident::new(elem_to_access_span.clone())))
{
token.ast_node =
TokenAstNode::Typed(TypedAstToken::TypedExpression(self.clone()));
}
}
ty::TyExpressionVariant::EnumInstantiation {
variant_name,
variant_instantiation_span,
enum_ref,
contents,
call_path_binding,
..
} => {
if let Some(mut token) = ctx
.tokens
.try_get_mut_with_retry(&ctx.ident(&call_path_binding.inner.suffix))
{
token.ast_node =
TokenAstNode::Typed(TypedAstToken::TypedExpression(self.clone()));
token.type_def = Some(TypeDefinition::Ident(enum_ref.name().clone()));
}
adaptive_iter(&call_path_binding.type_arguments.to_vec(), |type_arg| {
collect_generic_argument(ctx, type_arg);
});
collect_call_path_prefixes(
ctx,
&call_path_binding.inner.prefixes,
call_path_binding.inner.callpath_type,
);
if let Some(mut token) = ctx.tokens.try_get_mut_with_retry(
&ctx.ident(&Ident::new(variant_instantiation_span.clone())),
) {
token.ast_node =
TokenAstNode::Typed(TypedAstToken::TypedExpression(self.clone()));
token.type_def = Some(TypeDefinition::Ident(variant_name.clone()));
}
if let Some(contents) = contents.as_deref() {
contents.parse(ctx);
}
}
ty::TyExpressionVariant::AbiCast {
abi_name, address, ..
} => {
collect_call_path_prefixes(ctx, &abi_name.prefixes, abi_name.callpath_type);
if let Some(mut token) = ctx
.tokens
.try_get_mut_with_retry(&ctx.ident(&abi_name.suffix))
{
token.ast_node =
TokenAstNode::Typed(TypedAstToken::TypedExpression(self.clone()));
let full_path = mod_path_to_full_path(&abi_name.prefixes, false, ctx.namespace);
if let Some(abi_def_ident) = ctx
.namespace
.module_from_absolute_path(&full_path)
.and_then(|module| {
module
.resolve_symbol(&Handler::default(), ctx.engines, &abi_name.suffix)
.ok()
})
.and_then(|(decl, _)| decl.expect_typed_ref().get_decl_ident(ctx.engines))
{
token.type_def = Some(TypeDefinition::Ident(abi_def_ident));
}
}
address.parse(ctx);
}
ty::TyExpressionVariant::StorageAccess(storage_access) => {
// collect storage keyword
if let Some(mut token) = ctx.tokens.try_get_mut_with_retry(
&ctx.ident(&Ident::new(storage_access.storage_keyword_span.clone())),
) {
token.ast_node = TokenAstNode::Typed(TypedAstToken::TypedStorageAccess(
storage_access.clone(),
));
if let Some(storage) = ctx
.namespace
.root_module()
.root_items()
.get_declared_storage(ctx.engines.de())
{
token.type_def =
Some(TypeDefinition::Ident(storage.storage_keyword.clone()));
}
}
if let Some((head_field, tail_fields)) = storage_access.fields.split_first() {
// collect the first ident as a field of the storage definition
if let Some(mut token) = ctx
.tokens
.try_get_mut_with_retry(&ctx.ident(&head_field.name))
{
token.ast_node = TokenAstNode::Typed(
TypedAstToken::TypedStorageAccessDescriptor(head_field.clone()),
);
if let Some(storage_field) = ctx
.namespace
.root_module()
.root_items()
.get_declared_storage(ctx.engines.de())
.and_then(|storage| {
storage
.fields
.into_iter()
.find(|f| f.name.as_str() == head_field.name.as_str())
})
{
token.type_def =
Some(TypeDefinition::Ident(storage_field.name.clone()));
}
}
// collect the rest of the idents as fields of their respective types
tail_fields
.par_iter()
.zip(storage_access.fields.par_iter().map(|f| f.type_id))
.for_each(|(field, container_type_id)| {
if let Some(mut token) =
ctx.tokens.try_get_mut_with_retry(&ctx.ident(&field.name))
{
token.ast_node =
TokenAstNode::Typed(TypedAstToken::Ident(field.name.clone()));
match &*ctx.engines.te().get(container_type_id) {
TypeInfo::Struct(decl_ref) => {
if let Some(field_name) = ctx
.engines
.de()
.get_struct(decl_ref)
.fields
.par_iter()
.find_any(|struct_field| {
struct_field.name.as_str() == field.name.as_str()
})
.map(|struct_field| struct_field.name.clone())
{
token.type_def =
Some(TypeDefinition::Ident(field_name));
}
}
_ => {
token.type_def =
Some(TypeDefinition::TypeId(field.type_id));
}
}
}
});
}
}
ty::TyExpressionVariant::IntrinsicFunction(kind) => {
kind.parse(ctx);
}
ty::TyExpressionVariant::AbiName { .. } => {}
ty::TyExpressionVariant::EnumTag { exp } => {
exp.parse(ctx);
}
ty::TyExpressionVariant::UnsafeDowncast {
exp,
variant,
call_path_decl: _,
} => {
exp.parse(ctx);
if let Some(mut token) =
ctx.tokens.try_get_mut_with_retry(&ctx.ident(&variant.name))
{
token.ast_node =
TokenAstNode::Typed(TypedAstToken::TypedExpression(self.clone()));
}
}
ty::TyExpressionVariant::WhileLoop {
body, condition, ..
} => {
condition.parse(ctx);
adaptive_iter(&body.contents, |node| node.parse(ctx));
}
ty::TyExpressionVariant::ForLoop { desugared, .. } => {
desugared.parse(ctx);
}
ty::TyExpressionVariant::Break | ty::TyExpressionVariant::Continue => (),
ty::TyExpressionVariant::Reassignment(reassignment) => {
reassignment.parse(ctx);
}
ty::TyExpressionVariant::ImplicitReturn(exp)
| ty::TyExpressionVariant::Return(exp)
| ty::TyExpressionVariant::Panic(exp)
| ty::TyExpressionVariant::Ref(exp)
| ty::TyExpressionVariant::Deref(exp) => {
exp.parse(ctx);
}
}
}
}
impl Parse for ty::TyVariableDecl {
fn parse(&self, ctx: &ParseContext) {
if let Some(mut token) = ctx.tokens.try_get_mut_with_retry(&ctx.ident(&self.name)) {
token.ast_node = TokenAstNode::Typed(TypedAstToken::TypedDeclaration(
ty::TyDecl::VariableDecl(Box::new(self.clone())),
));
token.type_def = Some(TypeDefinition::Ident(self.name.clone()));
}
if let Some(call_path_tree) = self.type_ascription.call_path_tree.as_ref() {
collect_call_type_argument_path_tree(ctx, call_path_tree, &self.type_ascription);
}
self.body.parse(ctx);
}
}
impl Parse for ty::ConstantDecl {
fn parse(&self, ctx: &ParseContext) {
let const_decl = ctx.engines.de().get_constant(&self.decl_id);
collect_const_decl(ctx, &const_decl, None);
}
}
impl Parse for ty::ConfigurableDecl {
fn parse(&self, ctx: &ParseContext) {
let decl = ctx.engines.de().get_configurable(&self.decl_id);
collect_configurable_decl(ctx, &decl, None);
}
}
impl Parse for ty::ConstGenericDecl {
fn parse(&self, ctx: &ParseContext) {
let decl = ctx.engines.de().get_const_generic(&self.decl_id);
collect_const_generic_decl(ctx, &decl, None);
}
}
impl Parse for ty::TraitTypeDecl {
fn parse(&self, ctx: &ParseContext) {
let type_decl = ctx.engines.de().get_type(&self.decl_id);
collect_trait_type_decl(ctx, &type_decl, &type_decl.span);
}
}
impl Parse for ty::FunctionDecl {
fn parse(&self, ctx: &ParseContext) {
let func_decl = ctx.engines.de().get_function(&self.decl_id);
let typed_token = TypedAstToken::TypedFunctionDeclaration((*func_decl).clone());
if let Some(mut token) = ctx
.tokens
.try_get_mut_with_retry(&ctx.ident(&func_decl.name))
{
token.ast_node = TokenAstNode::Typed(typed_token.clone());
token.type_def = Some(TypeDefinition::Ident(func_decl.name.clone()));
}
adaptive_iter(&func_decl.body.contents, |node| node.parse(ctx));
adaptive_iter(&func_decl.parameters, |param| param.parse(ctx));
adaptive_iter(&func_decl.type_parameters, |type_param| {
if let Some(type_param) = type_param.as_type_parameter() {
collect_type_id(
ctx,
type_param.type_id,
&typed_token,
type_param.name.span(),
);
}
});
collect_generic_type_argument(ctx, &func_decl.return_type);
adaptive_iter(&func_decl.where_clause, |(ident, trait_constraints)| {
adaptive_iter(trait_constraints, |constraint| {
collect_trait_constraint(ctx, constraint);
});
if let Some(mut token) = ctx.tokens.try_get_mut_with_retry(&ctx.ident(ident)) {
token.ast_node = TokenAstNode::Typed(typed_token.clone());
if let Some(param_decl_ident) = func_decl
.type_parameters
.par_iter()
.filter_map(|x| x.as_type_parameter())
.find_any(|type_param| type_param.name.as_str() == ident.as_str())
.map(|type_param| type_param.name.clone())
{
token.type_def = Some(TypeDefinition::Ident(param_decl_ident));
}
}
});
}
}
impl Parse for ty::TraitDecl {
fn parse(&self, ctx: &ParseContext) {
let trait_decl = ctx.engines.de().get_trait(&self.decl_id);
if let Some(mut token) = ctx
.tokens
.try_get_mut_with_retry(&ctx.ident(&trait_decl.name))
{
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | true |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-lsp/src/traverse/dependency.rs | sway-lsp/src/traverse/dependency.rs | use crate::{
core::token::{ParsedAstToken, SymbolKind, Token, TokenAstNode, TypeDefinition, TypedAstToken},
traverse::ParseContext,
};
use sway_core::language::{
parsed::{AstNode, AstNodeContent, Declaration},
ty,
};
use sway_types::Named;
/// Insert Declaration tokens into the TokenMap.
pub fn collect_parsed_declaration(node: &AstNode, ctx: &ParseContext) {
if let AstNodeContent::Declaration(declaration) = &node.content {
let parsed_token = ParsedAstToken::Declaration(declaration.clone());
let (ident, symbol_kind) = match declaration {
Declaration::VariableDeclaration(decl_id) => {
let variable = ctx.engines.pe().get_variable(decl_id);
(variable.name.clone(), SymbolKind::Variable)
}
Declaration::StructDeclaration(decl_id) => {
let decl = ctx.engines.pe().get_struct(decl_id);
(decl.name.clone(), SymbolKind::Struct)
}
Declaration::TraitDeclaration(decl_id) => {
let decl = ctx.engines.pe().get_trait(decl_id);
(decl.name.clone(), SymbolKind::Trait)
}
Declaration::FunctionDeclaration(decl_id) => {
let decl = ctx.engines.pe().get_function(decl_id);
(decl.name.clone(), SymbolKind::Function)
}
Declaration::ConstantDeclaration(decl_id) => {
let decl = ctx.engines.pe().get_constant(decl_id);
(decl.name.clone(), SymbolKind::Const)
}
Declaration::EnumDeclaration(decl_id) => {
let decl = ctx.engines.pe().get_enum(decl_id);
(decl.name.clone(), SymbolKind::Enum)
}
_ => return,
};
let token = Token::from_parsed(parsed_token, symbol_kind);
ctx.tokens.insert(ctx.ident(&ident), token);
}
}
/// Insert TypedDeclaration tokens into the TokenMap.
pub fn collect_typed_declaration(node: &ty::TyAstNode, ctx: &ParseContext) {
if let ty::TyAstNodeContent::Declaration(declaration) = &node.content {
let typed_token = TypedAstToken::TypedDeclaration(declaration.clone());
let ident = match declaration {
ty::TyDecl::ConstantDecl(ty::ConstantDecl { decl_id }) => {
ctx.engines.de().get_constant(decl_id).name().clone()
}
ty::TyDecl::FunctionDecl(ty::FunctionDecl { decl_id }) => {
ctx.engines.de().get_function(decl_id).name().clone()
}
ty::TyDecl::TraitDecl(ty::TraitDecl { decl_id }) => {
ctx.engines.de().get_trait(decl_id).name().clone()
}
ty::TyDecl::StructDecl(ty::StructDecl { decl_id }) => {
ctx.engines.de().get_struct(decl_id).name().clone()
}
ty::TyDecl::EnumDecl(ty::EnumDecl { decl_id }) => {
ctx.engines.de().get_enum(decl_id).name().clone()
}
ty::TyDecl::VariableDecl(variable) => variable.name.clone(),
_ => return,
};
let token_ident = ctx.ident(&ident);
if let Some(mut token) = ctx.tokens.try_get_mut_with_retry(&token_ident) {
token.ast_node = TokenAstNode::Typed(typed_token);
token.type_def = Some(TypeDefinition::Ident(ident));
}
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-lsp/src/traverse/mod.rs | sway-lsp/src/traverse/mod.rs | use crate::core::{token::TokenIdent, token_map::TokenMap};
use rayon_cond::CondIterator;
use sway_core::{namespace::Package, Engines};
pub(crate) mod dependency;
pub(crate) mod lexed_tree;
pub(crate) mod parsed_tree;
pub(crate) mod typed_tree;
pub struct ParseContext<'a> {
tokens: &'a TokenMap,
pub engines: &'a Engines,
namespace: &'a Package,
}
impl<'a> ParseContext<'a> {
pub fn new(tokens: &'a TokenMap, engines: &'a Engines, namespace: &'a Package) -> Self {
Self {
tokens,
engines,
namespace,
}
}
pub fn ident(&self, ident: &sway_types::Ident) -> TokenIdent {
TokenIdent::new(ident, self.engines.se())
}
}
/// The `Parse` trait is used to parse tokens from an AST during traversal.
pub trait Parse {
fn parse(&self, ctx: &ParseContext);
}
/// Determines the threshold a collection must meet to be processed in parallel.
const PARALLEL_THRESHOLD: usize = 8;
/// Iterates over items, choosing parallel or sequential execution based on size.
pub fn adaptive_iter<T, F>(items: &[T], action: F)
where
T: Sync + Send, // Required for parallel processing
F: Fn(&T) + Sync + Send, // Action to be applied to each item
{
// Determine if the length meets the parallel threshold
let use_parallel = items.len() >= PARALLEL_THRESHOLD;
// Create a conditional iterator based on the use_parallel flag
CondIterator::new(items, use_parallel).for_each(action);
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-lsp/src/traverse/lexed_tree.rs | sway-lsp/src/traverse/lexed_tree.rs | use crate::{
core::token::{ParsedAstToken, SymbolKind, Token},
traverse::{adaptive_iter, Parse, ParseContext},
};
use rayon::iter::{ParallelBridge, ParallelIterator};
use sway_ast::{
assignable::ElementAccess, attribute::Annotated, expr::LoopControlFlow, ty::TyTupleDescriptor,
Assignable, CodeBlockContents, ConfigurableField, Expr, ExprArrayDescriptor, ExprStructField,
ExprTupleDescriptor, FnArg, FnArgs, FnSignature, IfCondition, IfExpr, ItemAbi,
ItemConfigurable, ItemConst, ItemEnum, ItemFn, ItemImpl, ItemImplItem, ItemKind, ItemStorage,
ItemStruct, ItemTrait, ItemTypeAlias, ItemUse, MatchBranchKind, ModuleKind, Pattern,
PatternStructField, Statement, StatementLet, StorageEntry, StorageField, TraitType, Ty,
TypeField, UseTree,
};
use sway_core::language::{lexed::LexedProgram, HasSubmodules};
use sway_types::{Ident, Span, Spanned};
pub struct LexedTree<'a> {
ctx: &'a ParseContext<'a>,
}
impl<'a> LexedTree<'a> {
pub fn new(ctx: &'a ParseContext<'a>) -> Self {
Self { ctx }
}
pub fn traverse_node(&self, node: &Annotated<ItemKind>) {
node.value.parse(self.ctx);
}
pub fn collect_module_kinds(&self, lexed_program: &LexedProgram) {
insert_module_kind(self.ctx, &lexed_program.root.tree.value.kind);
lexed_program
.root
.submodules_recursive()
.for_each(|(_, dep)| {
insert_module_kind(self.ctx, &dep.module.tree.value.kind);
});
}
}
fn insert_module_kind(ctx: &ParseContext, kind: &ModuleKind) {
match kind {
ModuleKind::Script { script_token } => {
insert_program_type_keyword(ctx, script_token.span());
}
ModuleKind::Contract { contract_token } => {
insert_program_type_keyword(ctx, contract_token.span());
}
ModuleKind::Predicate { predicate_token } => {
insert_program_type_keyword(ctx, predicate_token.span());
}
ModuleKind::Library { library_token, .. } => {
insert_program_type_keyword(ctx, library_token.span());
}
}
}
fn insert_program_type_keyword(ctx: &ParseContext, span: Span) {
let ident = Ident::new(span);
let token = Token::from_parsed(
ParsedAstToken::Keyword(ident.clone()),
SymbolKind::ProgramTypeKeyword,
);
ctx.tokens.insert(ctx.ident(&ident), token);
}
fn insert_keyword(ctx: &ParseContext, span: Span) {
let ident = Ident::new(span);
let token = Token::from_parsed(ParsedAstToken::Keyword(ident.clone()), SymbolKind::Keyword);
ctx.tokens.insert(ctx.ident(&ident), token);
}
impl Parse for ItemKind {
fn parse(&self, ctx: &ParseContext) {
match self {
ItemKind::Submodule(submod) => {
insert_keyword(ctx, submod.mod_token.span());
}
ItemKind::Use(item_use) => {
item_use.parse(ctx);
}
ItemKind::Struct(item_struct) => {
item_struct.parse(ctx);
}
ItemKind::Enum(item_enum) => {
item_enum.parse(ctx);
}
ItemKind::Fn(item_func) => {
item_func.parse(ctx);
}
ItemKind::Trait(item_trait) => {
item_trait.parse(ctx);
}
ItemKind::Impl(item_impl) => {
item_impl.parse(ctx);
}
ItemKind::Abi(item_abi) => {
item_abi.parse(ctx);
}
ItemKind::Const(item_const) => {
item_const.parse(ctx);
}
ItemKind::Storage(item_storage) => {
item_storage.parse(ctx);
}
ItemKind::Configurable(item_configurable) => {
item_configurable.parse(ctx);
}
ItemKind::TypeAlias(item_type_alias) => {
item_type_alias.parse(ctx);
}
ItemKind::Error(_, _) => {}
}
}
}
impl Parse for Expr {
fn parse(&self, ctx: &ParseContext) {
match self {
Expr::AbiCast { abi_token, args } => {
insert_keyword(ctx, abi_token.span());
args.get().address.parse(ctx);
}
Expr::Struct { fields, .. } => {
fields
.get()
.into_iter()
.par_bridge()
.for_each(|field| field.parse(ctx));
}
Expr::Tuple(tuple) => {
tuple.get().parse(ctx);
}
Expr::Parens(parens) => {
parens.get().parse(ctx);
}
Expr::Block(block) => {
block.get().parse(ctx);
}
Expr::Array(array) => {
array.get().parse(ctx);
}
Expr::Return {
return_token,
expr_opt,
} => {
insert_keyword(ctx, return_token.span());
if let Some(expr) = expr_opt {
expr.parse(ctx);
}
}
Expr::Panic {
panic_token,
expr_opt,
} => {
insert_keyword(ctx, panic_token.span());
if let Some(expr) = expr_opt {
expr.parse(ctx);
}
}
Expr::If(if_expr) => {
if_expr.parse(ctx);
}
Expr::Match {
match_token,
value,
branches,
} => {
insert_keyword(ctx, match_token.span());
value.parse(ctx);
adaptive_iter(branches.get(), |branch| {
branch.pattern.parse(ctx);
branch.kind.parse(ctx);
});
}
Expr::While {
while_token,
condition,
block,
} => {
insert_keyword(ctx, while_token.span());
condition.parse(ctx);
block.get().parse(ctx);
}
Expr::FuncApp { func, args } => {
func.parse(ctx);
args.get()
.into_iter()
.par_bridge()
.for_each(|expr| expr.parse(ctx));
}
Expr::Index { target, arg } => {
target.parse(ctx);
arg.get().parse(ctx);
}
Expr::MethodCall {
target,
contract_args_opt,
args,
..
} => {
target.parse(ctx);
if let Some(contract_args) = contract_args_opt {
contract_args
.get()
.into_iter()
.par_bridge()
.for_each(|expr| expr.parse(ctx));
}
args.get()
.into_iter()
.par_bridge()
.for_each(|expr| expr.parse(ctx));
}
Expr::FieldProjection { target, .. } => {
target.parse(ctx);
}
Expr::TupleFieldProjection { target, .. } => {
target.parse(ctx);
}
Expr::Ref {
mut_token, expr, ..
} => {
if let Some(mut_token) = mut_token {
insert_keyword(ctx, mut_token.span());
}
expr.parse(ctx);
}
Expr::Deref { expr, .. } => {
expr.parse(ctx);
}
Expr::Not { expr, .. } => {
expr.parse(ctx);
}
Expr::Mul { lhs, rhs, .. }
| Expr::Div { lhs, rhs, .. }
| Expr::Pow { lhs, rhs, .. }
| Expr::Modulo { lhs, rhs, .. }
| Expr::Add { lhs, rhs, .. }
| Expr::Sub { lhs, rhs, .. }
| Expr::Shl { lhs, rhs, .. }
| Expr::Shr { lhs, rhs, .. }
| Expr::BitAnd { lhs, rhs, .. }
| Expr::BitXor { lhs, rhs, .. }
| Expr::BitOr { lhs, rhs, .. }
| Expr::Equal { lhs, rhs, .. }
| Expr::NotEqual { lhs, rhs, .. }
| Expr::LessThan { lhs, rhs, .. }
| Expr::GreaterThan { lhs, rhs, .. }
| Expr::LessThanEq { lhs, rhs, .. }
| Expr::GreaterThanEq { lhs, rhs, .. }
| Expr::LogicalAnd { lhs, rhs, .. }
| Expr::LogicalOr { lhs, rhs, .. } => {
lhs.parse(ctx);
rhs.parse(ctx);
}
Expr::Reassignment {
assignable, expr, ..
} => {
assignable.parse(ctx);
expr.parse(ctx);
}
Expr::Break { break_token } => {
insert_keyword(ctx, break_token.span());
}
Expr::Continue { continue_token } => {
insert_keyword(ctx, continue_token.span());
}
_ => {}
}
}
}
impl Parse for ItemUse {
fn parse(&self, ctx: &ParseContext) {
if let Some(visibility) = &self.visibility {
insert_keyword(ctx, visibility.span());
}
insert_keyword(ctx, self.use_token.span());
self.tree.parse(ctx);
}
}
impl Parse for ItemStruct {
fn parse(&self, ctx: &ParseContext) {
if let Some(visibility) = &self.visibility {
insert_keyword(ctx, visibility.span());
}
insert_keyword(ctx, self.struct_token.span());
if let Some(where_clause_opt) = &self.where_clause_opt {
insert_keyword(ctx, where_clause_opt.where_token.span());
}
self.fields
.get()
.into_iter()
.par_bridge()
.for_each(|field| field.value.parse(ctx));
}
}
impl Parse for ItemEnum {
fn parse(&self, ctx: &ParseContext) {
if let Some(visibility) = &self.visibility {
insert_keyword(ctx, visibility.span());
}
insert_keyword(ctx, self.enum_token.span());
if let Some(where_clause_opt) = &self.where_clause_opt {
insert_keyword(ctx, where_clause_opt.where_token.span());
}
self.fields
.get()
.into_iter()
.par_bridge()
.for_each(|field| field.value.parse(ctx));
}
}
impl Parse for ItemFn {
fn parse(&self, ctx: &ParseContext) {
self.fn_signature.parse(ctx);
self.body.get().parse(ctx);
}
}
impl Parse for ItemTrait {
fn parse(&self, ctx: &ParseContext) {
if let Some(visibility) = &self.visibility {
insert_keyword(ctx, visibility.span());
}
insert_keyword(ctx, self.trait_token.span());
if let Some(where_clause_opt) = &self.where_clause_opt {
insert_keyword(ctx, where_clause_opt.where_token.span());
}
adaptive_iter(self.trait_items.get(), |annotated| match &annotated.value {
sway_ast::ItemTraitItem::Fn(fn_sig, _) => fn_sig.parse(ctx),
sway_ast::ItemTraitItem::Const(item_const, _) => item_const.parse(ctx),
sway_ast::ItemTraitItem::Type(item_type, _) => item_type.parse(ctx),
sway_ast::ItemTraitItem::Error(_, _) => {}
});
if let Some(trait_defs_opt) = &self.trait_defs_opt {
adaptive_iter(trait_defs_opt.get(), |item| item.value.parse(ctx));
}
}
}
impl Parse for ItemImpl {
fn parse(&self, ctx: &ParseContext) {
insert_keyword(ctx, self.impl_token.span());
if let Some((.., for_token)) = &self.trait_opt {
insert_keyword(ctx, for_token.span());
}
self.ty.parse(ctx);
if let Some(where_clause_opt) = &self.where_clause_opt {
insert_keyword(ctx, where_clause_opt.where_token.span());
}
adaptive_iter(self.contents.get(), |item| match &item.value {
ItemImplItem::Fn(fn_decl) => fn_decl.parse(ctx),
ItemImplItem::Const(const_decl) => const_decl.parse(ctx),
ItemImplItem::Type(type_decl) => type_decl.parse(ctx),
});
}
}
impl Parse for ItemAbi {
fn parse(&self, ctx: &ParseContext) {
insert_keyword(ctx, self.abi_token.span());
adaptive_iter(self.abi_items.get(), |annotated| match &annotated.value {
sway_ast::ItemTraitItem::Fn(fn_sig, _) => fn_sig.parse(ctx),
sway_ast::ItemTraitItem::Const(item_const, _) => item_const.parse(ctx),
sway_ast::ItemTraitItem::Type(item_type, _) => item_type.parse(ctx),
sway_ast::ItemTraitItem::Error(_, _) => {}
});
if let Some(abi_defs_opt) = self.abi_defs_opt.as_ref() {
adaptive_iter(abi_defs_opt.get(), |item| item.value.parse(ctx));
}
}
}
impl Parse for ItemConst {
fn parse(&self, ctx: &ParseContext) {
if let Some(visibility) = &self.pub_token {
insert_keyword(ctx, visibility.span());
}
insert_keyword(ctx, self.const_token.span());
if let Some((.., ty)) = self.ty_opt.as_ref() {
ty.parse(ctx);
}
if let Some(expr) = self.expr_opt.as_ref() {
expr.parse(ctx);
}
}
}
impl Parse for TraitType {
fn parse(&self, ctx: &ParseContext) {
insert_keyword(ctx, self.type_token.span());
if let Some(ty) = self.ty_opt.as_ref() {
ty.parse(ctx);
}
}
}
impl Parse for ItemStorage {
fn parse(&self, ctx: &ParseContext) {
insert_keyword(ctx, self.storage_token.span());
self.entries
.get()
.into_iter()
.par_bridge()
.for_each(|entry| entry.value.parse(ctx));
}
}
impl Parse for StorageEntry {
fn parse(&self, ctx: &ParseContext) {
if let Some(namespace) = &self.namespace {
namespace
.clone()
.into_inner()
.into_iter()
.par_bridge()
.for_each(|entry| entry.value.parse(ctx));
} else if let Some(field) = &self.field {
field.parse(ctx);
}
}
}
impl Parse for StorageField {
fn parse(&self, ctx: &ParseContext) {
self.ty.parse(ctx);
self.initializer.parse(ctx);
}
}
impl Parse for ItemConfigurable {
fn parse(&self, ctx: &ParseContext) {
insert_keyword(ctx, self.configurable_token.span());
self.fields
.get()
.into_iter()
.par_bridge()
.for_each(|field| field.value.parse(ctx));
}
}
impl Parse for ConfigurableField {
fn parse(&self, ctx: &ParseContext) {
self.ty.parse(ctx);
self.initializer.parse(ctx);
}
}
impl Parse for ItemTypeAlias {
fn parse(&self, ctx: &ParseContext) {
if let Some(visibility) = &self.visibility {
insert_keyword(ctx, visibility.span());
}
insert_keyword(ctx, self.type_token.span());
self.ty.parse(ctx);
}
}
impl Parse for UseTree {
fn parse(&self, ctx: &ParseContext) {
match self {
UseTree::Group { imports } => {
imports.get().into_iter().par_bridge().for_each(|use_tree| {
use_tree.parse(ctx);
});
}
UseTree::Rename { as_token, .. } => {
insert_keyword(ctx, as_token.span());
}
UseTree::Path { suffix, .. } => {
suffix.parse(ctx);
}
_ => {}
}
}
}
impl Parse for TypeField {
fn parse(&self, ctx: &ParseContext) {
if let Some(visibility) = &self.visibility {
insert_keyword(ctx, visibility.span());
}
self.ty.parse(ctx);
}
}
impl Parse for Ty {
fn parse(&self, ctx: &ParseContext) {
match self {
Ty::Tuple(tuple) => {
tuple.get().parse(ctx);
}
Ty::Array(array) => {
let inner = array.get();
inner.ty.parse(ctx);
inner.length.parse(ctx);
}
Ty::StringArray { str_token, length } => {
insert_keyword(ctx, str_token.span());
length.get().parse(ctx);
}
_ => {}
}
}
}
impl Parse for FnSignature {
fn parse(&self, ctx: &ParseContext) {
if let Some(visibility) = &self.visibility {
insert_keyword(ctx, visibility.span());
}
insert_keyword(ctx, self.fn_token.span());
self.arguments.get().parse(ctx);
if let Some((.., ty)) = &self.return_type_opt {
ty.parse(ctx);
}
if let Some(where_clause) = &self.where_clause_opt {
insert_keyword(ctx, where_clause.where_token.span());
}
}
}
impl Parse for FnArgs {
fn parse(&self, ctx: &ParseContext) {
match self {
FnArgs::Static(punct) => {
punct
.into_iter()
.par_bridge()
.for_each(|fn_arg| fn_arg.parse(ctx));
}
FnArgs::NonStatic {
self_token,
ref_self,
mutable_self,
args_opt,
} => {
insert_keyword(ctx, self_token.span());
if let Some(ref_token) = ref_self {
insert_keyword(ctx, ref_token.span());
}
if let Some(mut_token) = mutable_self {
insert_keyword(ctx, mut_token.span());
}
if let Some((.., punct)) = args_opt {
punct
.into_iter()
.par_bridge()
.for_each(|fn_arg| fn_arg.parse(ctx));
}
}
}
}
}
impl Parse for FnArg {
fn parse(&self, ctx: &ParseContext) {
self.pattern.parse(ctx);
self.ty.parse(ctx);
}
}
impl Parse for CodeBlockContents {
fn parse(&self, ctx: &ParseContext) {
adaptive_iter(&self.statements, |statement| {
statement.parse(ctx);
});
if let Some(expr) = self.final_expr_opt.as_ref() {
expr.parse(ctx);
}
}
}
impl Parse for Statement {
fn parse(&self, ctx: &ParseContext) {
match self {
Statement::Let(let_stmt) => {
let_stmt.parse(ctx);
}
Statement::Expr { expr, .. } => {
expr.parse(ctx);
}
Statement::Item(item) => {
item.value.parse(ctx);
}
Statement::Error(_, _) => {}
}
}
}
impl Parse for StatementLet {
fn parse(&self, ctx: &ParseContext) {
insert_keyword(ctx, self.let_token.span());
self.pattern.parse(ctx);
if let Some((.., ty)) = &self.ty_opt {
ty.parse(ctx);
}
self.expr.parse(ctx);
}
}
impl Parse for ExprArrayDescriptor {
fn parse(&self, ctx: &ParseContext) {
match self {
ExprArrayDescriptor::Sequence(punct) => {
punct
.into_iter()
.par_bridge()
.for_each(|expr| expr.parse(ctx));
}
ExprArrayDescriptor::Repeat { value, length, .. } => {
value.parse(ctx);
length.parse(ctx);
}
}
}
}
impl Parse for IfExpr {
fn parse(&self, ctx: &ParseContext) {
insert_keyword(ctx, self.if_token.span());
self.condition.parse(ctx);
self.then_block.get().parse(ctx);
if let Some((else_token, control_flow)) = &self.else_opt {
insert_keyword(ctx, else_token.span());
match control_flow {
LoopControlFlow::Break(block) => {
block.get().parse(ctx);
}
LoopControlFlow::Continue(if_expr) => {
if_expr.parse(ctx);
}
}
}
}
}
impl Parse for IfCondition {
fn parse(&self, ctx: &ParseContext) {
match self {
IfCondition::Expr(expr) => {
expr.parse(ctx);
}
IfCondition::Let {
let_token,
lhs,
rhs,
..
} => {
insert_keyword(ctx, let_token.span());
lhs.parse(ctx);
rhs.parse(ctx);
}
}
}
}
impl Parse for Pattern {
fn parse(&self, ctx: &ParseContext) {
match self {
Pattern::Var {
reference, mutable, ..
} => {
if let Some(reference) = reference {
insert_keyword(ctx, reference.span());
}
if let Some(mutable) = mutable {
insert_keyword(ctx, mutable.span());
}
}
Pattern::Constructor { args, .. } | Pattern::Tuple(args) => {
args.get()
.into_iter()
.par_bridge()
.for_each(|pattern| pattern.parse(ctx));
}
Pattern::Struct { fields, .. } => {
fields
.get()
.into_iter()
.par_bridge()
.for_each(|field| field.parse(ctx));
}
_ => {}
}
}
}
impl Parse for PatternStructField {
fn parse(&self, ctx: &ParseContext) {
if let PatternStructField::Field {
pattern_opt: Some((.., pattern)),
..
} = self
{
pattern.parse(ctx);
}
}
}
impl Parse for MatchBranchKind {
fn parse(&self, ctx: &ParseContext) {
match self {
MatchBranchKind::Block { block, .. } => {
block.get().parse(ctx);
}
MatchBranchKind::Expr { expr, .. } => {
expr.parse(ctx);
}
}
}
}
impl Parse for ExprStructField {
fn parse(&self, ctx: &ParseContext) {
if let Some((.., expr)) = &self.expr_opt {
expr.parse(ctx);
}
}
}
impl Parse for ExprTupleDescriptor {
fn parse(&self, ctx: &ParseContext) {
if let ExprTupleDescriptor::Cons { head, tail, .. } = self {
head.parse(ctx);
tail.into_iter()
.par_bridge()
.for_each(|expr| expr.parse(ctx));
}
}
}
impl Parse for TyTupleDescriptor {
fn parse(&self, ctx: &ParseContext) {
if let TyTupleDescriptor::Cons { head, tail, .. } = self {
head.parse(ctx);
tail.into_iter()
.par_bridge()
.for_each(|expr| expr.parse(ctx));
}
}
}
impl Parse for ElementAccess {
fn parse(&self, ctx: &ParseContext) {
match self {
ElementAccess::Index { target, arg } => {
target.parse(ctx);
arg.get().parse(ctx);
}
ElementAccess::FieldProjection { target, .. }
| ElementAccess::TupleFieldProjection { target, .. } => {
target.parse(ctx);
}
ElementAccess::Deref { target, .. } => target.parse(ctx),
ElementAccess::Var(_) => {}
}
}
}
impl Parse for Assignable {
fn parse(&self, ctx: &ParseContext) {
match self {
Assignable::ElementAccess(element_access) => element_access.parse(ctx),
Assignable::Deref { expr, .. } => expr.parse(ctx),
}
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-lsp/src/utils/test.rs | sway-lsp/src/utils/test.rs | rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false | |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-lsp/src/utils/markup.rs | sway-lsp/src/utils/markup.rs | //! Markdown formatting.
//!
//! Sometimes, we want to display a "rich text" in the UI. At the moment, we use
//! markdown for this purpose.
//! Modified from rust-analyzer.
use crate::{
capabilities::hover::hover_link_contents::RelatedType, config::LspClient,
core::token::get_range_from_span, utils::document::get_url_from_span,
};
use serde_json::{json, Value};
use std::fmt::{self};
use sway_types::{SourceEngine, Span};
use urlencoding::encode;
const GO_TO_COMMAND: &str = "sway.goToLocation";
const PEEK_COMMAND: &str = "sway.peekLocations";
/// A handy wrapper around `String` for constructing markdown documents.
#[derive(Default, Debug)]
pub struct Markup {
text: String,
}
impl From<Markup> for String {
fn from(markup: Markup) -> Self {
markup.text
}
}
impl From<String> for Markup {
fn from(text: String) -> Self {
Markup { text }
}
}
impl fmt::Display for Markup {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Display::fmt(&self.text, f)
}
}
impl Markup {
/// Creates a new empty `Markup`.
pub fn new() -> Self {
Self {
text: String::new(),
}
}
/// If contents is `Some`, format the contents within a sway code block.
pub fn maybe_add_sway_block(self, contents: Option<String>) -> Self {
match contents {
Some(contents) => self.fenced_sway_block(&contents),
None => self,
}
}
/// Adds go-to links if there are any related types, a link to view implementations if there are any,
/// or nothing if there are no related types or implementations. Only adds links for VSCode clients.
pub fn maybe_add_links(
self,
source_engine: &SourceEngine,
related_types: &[RelatedType],
implementations: &[Span],
client_config: &LspClient,
) -> Self {
if client_config != &LspClient::VsCode {
return self;
}
if related_types.is_empty() {
let locations = implementations
.iter()
.filter_map(|span| {
if let Ok(uri) = get_url_from_span(source_engine, span) {
let range = get_range_from_span(span);
Some(json!({ "uri": uri, "range": range }))
} else {
None
}
})
.collect::<Vec<_>>();
if locations.len() > 1 {
let args = json!([{ "locations": locations }]);
let links_string = format!(
"[{} implementations]({} {})",
locations.len(),
command_uri(PEEK_COMMAND, &args),
quoted_tooltip("Go to implementations")
);
self.text(&links_string)
} else {
self
}
} else {
let links_string = related_types
.iter()
.map(|related_type| {
let args = json!([{ "uri": related_type.uri, "range": &related_type.range }]);
format!(
"[{}]({} {})",
related_type.name,
command_uri(GO_TO_COMMAND, &args),
quoted_tooltip(&related_type.callpath.to_string())
)
})
.collect::<Vec<_>>()
.join(" | ");
self.text(&format!("Go to {links_string}"))
}
}
/// Contents will be formatted with sway syntax highlighting.
pub fn fenced_sway_block(self, contents: &impl fmt::Display) -> Self {
let code_block = format!("```sway\n{contents}\n```");
self.text(&code_block)
}
/// Add text to the markup.
pub fn text(self, contents: &str) -> Self {
if !self.text.is_empty() {
return self.line_separator().push_str(contents);
}
self.push_str(contents)
}
/// Add text without a line separator.
fn push_str(mut self, contents: &str) -> Self {
self.text.push_str(contents);
self
}
/// Add a new section.
fn line_separator(mut self) -> Self {
self.text.push_str("\n---\n");
self
}
/// Get the inner string as a str.
pub fn as_str(&self) -> &str {
self.text.as_str()
}
}
/// Builds a markdown URI using the "command" scheme and args passed as encoded JSON.
fn command_uri(command: &str, args: &Value) -> String {
format!("command:{}?{}", command, encode(args.to_string().as_str()))
}
fn quoted_tooltip(text: &str) -> String {
format!("\"{text}\"")
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-lsp/src/utils/document.rs | sway-lsp/src/utils/document.rs | use crate::error::DirectoryError;
use lsp_types::Url;
use std::path::PathBuf;
use sway_types::{SourceEngine, Span};
/// Create a [Url] from a [`PathBuf`].
pub fn get_url_from_path(path: &PathBuf) -> Result<Url, DirectoryError> {
Url::from_file_path(path).map_err(|()| DirectoryError::UrlFromPathFailed {
path: path.to_string_lossy().to_string(),
})
}
/// Create a [`PathBuf`] from a [Url].
pub fn get_path_from_url(url: &Url) -> Result<PathBuf, DirectoryError> {
url.to_file_path()
.map_err(|()| DirectoryError::PathFromUrlFailed {
url: url.to_string(),
})
}
/// Create a [Url] from a [Span].
pub fn get_url_from_span(source_engine: &SourceEngine, span: &Span) -> Result<Url, DirectoryError> {
if let Some(source_id) = span.source_id() {
let path = source_engine.get_path(source_id);
get_url_from_path(&path)
} else {
Err(DirectoryError::UrlFromSpanFailed {
span: span.as_str().to_string(),
})
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-lsp/src/utils/keyword_docs.rs | sway-lsp/src/utils/keyword_docs.rs | #![allow(unused)]
use proc_macro2::{TokenStream, TokenTree};
use quote::ToTokens;
use std::{collections::hash_map::HashMap, fmt::Write};
use syn::{parse_quote, ItemMod};
// Ported from https://github.com/rust-lang/rust/blob/master/library/std/src/keyword_docs.rs
/// Documentation for sway keywords.
/// Primarily used for showing documentation on LSP hover requests.
/// Key = keyword
/// Value = documentation
#[derive(Debug, Default)]
pub struct KeywordDocs(HashMap<String, String>);
impl KeywordDocs {
pub fn new() -> Self {
let pub_keyword: ItemMod = parse_quote! {
/// Make an item visible to others.
///
/// The keyword `pub` makes any module, function, or data structure accessible from inside
/// of external modules. The `pub` keyword may also be used in a `use` declaration to re-export
/// an identifier from a namespace.
mod pub_keyword {}
};
let use_keyword: ItemMod = parse_quote! {
/// Import or rename items from other crates or modules.
///
/// Usually a `use` keyword is used to shorten the path required to refer to a module item.
/// The keyword may appear in modules, blocks and even functions, usually at the top.
///
/// The most basic usage of the keyword is `use path::to::item;`,
/// though a number of convenient shortcuts are supported:
///
/// * Simultaneously binding a list of paths with a common prefix,
/// using the glob-like brace syntax `use a::b::{c, d, e::f, g::h::i};`
/// * Simultaneously binding a list of paths with a common prefix and their common parent module,
/// using the [`self`] keyword, such as `use a::b::{self, c, d::e};`
/// * Rebinding the target name as a new local name, using the syntax `use p::q::r as x;`.
/// This can also be used with the last two features: `use a::b::{self as ab, c as abc}`.
/// * Binding all paths matching a given prefix,
/// using the asterisk wildcard syntax `use a::b::*;`.
/// * Nesting groups of the previous features multiple times,
/// such as `use a::b::{self as ab, c, d::{*, e::f}};`
/// * Reexporting with visibility modifiers such as `pub use a::b;`
mod use_keyword {}
};
let as_keyword: ItemMod = parse_quote! {
/// Cast between types, or rename an import.
///
/// In general, any cast that can be performed via ascribing the type can also be done using `as`,
/// so instead of writing `let x: u32 = 123`, you can write `let x = 123 as u32` (note: `let x: u32
/// = 123` would be best in that situation). The same is not true in the other direction
///
/// `as` can also be used with the `_` placeholder when the destination type can be inferred. Note
/// that this can cause inference breakage and usually such code should use an explicit type for
/// both clarity and stability.
///
/// `as` is also used to rename imports in [`use`] statements:
///
/// ```sway
/// use foo::Foo as MyFoo;
/// ```
mod as_keyword {}
};
let struct_keyword: ItemMod = parse_quote! {
/// A type that is composed of other types.
///
/// Structs in Sway come in three flavors: Structs with named fields, tuple structs, and unit
/// structs.
///
/// ```sway
/// struct Regular {
/// field1: u8,
/// field2: u32,
/// pub field3: bool
/// }
///
/// struct Tuple(u32, u64);
///
/// struct Unit;
/// ```
///
/// Regular structs are the most commonly used. Each field defined within them has a name and a
/// type, and once defined can be accessed using `example_struct.field` syntax. The fields of a
/// struct share its mutability, so `foo.bar = 2;` would only be valid if `foo` was mutable. Adding
/// `pub` to a field makes it visible to code in other modules, as well as allowing it to be
/// directly accessed and modified.
///
/// Tuple structs are similar to regular structs, but its fields have no names. They are used like
/// tuples, with deconstruction possible via `let TupleStruct(x, y) = foo;` syntax. For accessing
/// individual variables, the same syntax is used as with regular tuples, namely `foo.0`, `foo.1`,
/// etc, starting at zero.
///
/// Unit structs are most commonly used as marker. They have a size of zero bytes, but unlike empty
/// enums they can be instantiated, making them isomorphic to the unit type `()`. Unit structs are
/// useful when you need to implement a trait on something, but don't need to store any data inside
/// it.
///
/// # Instantiation
///
/// Structs can be instantiated in different ways, all of which can be mixed and
/// matched as needed. The most common way to make a new struct is via a constructor method such as
/// `new()`, but when that isn't available (or you're writing the constructor itself), struct
/// literal syntax is used:
///
/// ```sway
/// # struct Foo { field1: u8, field2: u32, etc: bool }
/// let example = Foo {
/// field1: 42,
/// field2: 1024,
/// etc: true,
/// };
/// ```
///
/// It's only possible to directly instantiate a struct using struct literal syntax when all of its
/// fields are visible to you.
///
/// There are a handful of shortcuts provided to make writing constructors more convenient, most
/// common of which is the Field Init shorthand. When there is a variable and a field of the same
/// name, the assignment can be simplified from `field: field` into simply `field`. The following
/// example of a hypothetical constructor demonstrates this:
///
/// ```sway
/// struct User {
/// age: u8,
/// admin: bool,
/// }
///
/// impl User {
/// pub fn new(age: u8) -> Self {
/// Self {
/// age,
/// admin: false,
/// }
/// }
/// }
/// ```
///
/// Tuple structs are instantiated in the same way as tuples themselves, except with the struct's
/// name as a prefix: `Foo(123, false, 26)`.
///
/// Empty structs are instantiated with just their name, and don't need anything else. `let thing =
/// EmptyStruct;`
///
/// # Style conventions
///
/// Structs are always written in CamelCase, with few exceptions. While the trailing comma on a
/// struct's list of fields can be omitted, it's usually kept for convenience in adding and
/// removing fields down the line.
mod struct_keyword {}
};
let enum_keyword: ItemMod = parse_quote! {
/// A type that can be any one of several variants.
///
/// Enums in Sway are similar to those of other compiled languages like C, but have important
/// differences that make them considerably more powerful. What Sway calls enums are more commonly
/// known as [Algebraic Data Types][ADT] if you're coming from a functional programming background.
/// The important detail is that each enum variant can have data to go along with it.
///
/// ```sway
/// # struct Coord;
/// enum SimpleEnum {
/// FirstVariant,
/// SecondVariant,
/// ThirdVariant,
/// }
///
/// enum Location {
/// Unknown,
/// Anonymous,
/// Known(Coord),
/// }
///
/// enum ComplexEnum {
/// Nothing,
/// Something(u32),
/// LotsOfThings {
/// usual_struct_stuff: bool,
/// blah: u8,
/// }
/// }
///
/// enum EmptyEnum { }
/// ```
///
/// The first enum shown is the usual kind of enum you'd find in a C-style language. The second
/// shows off a hypothetical example of something storing location data, with `Coord` being any
/// other type that's needed, for example a struct. The third example demonstrates the kind of
/// data a variant can store, ranging from nothing, to a tuple, to an anonymous struct.
///
/// Instantiating enum variants involves explicitly using the enum's name as its namespace,
/// followed by one of its variants. `SimpleEnum::SecondVariant` would be an example from above.
/// When data follows along with a variant, such as with sway's built-in [`Option`] type, the data
/// is added as the type describes, for example `Option::Some(123)`. The same follows with
/// struct-like variants, with things looking like `ComplexEnum::LotsOfThings { usual_struct_stuff:
/// true, blah: 245, }`. Empty Enums are similar to [`!`] in that they cannot be
/// instantiated at all, and are used mainly to mess with the type system in interesting ways.
///
/// [ADT]: https://en.wikipedia.org/wiki/Algebraic_data_type
mod enum_keyword {}
};
let self_keyword: ItemMod = parse_quote! {
/// The receiver of a method, or the current module.
///
/// `self` is used in two situations: referencing the current module and marking
/// the receiver of a method.
///
/// In paths, `self` can be used to refer to the current module, either in a
/// [`use`] statement or in a path to access an element:
///
/// ```sway
/// use std::contract_id::{self, ContractId};
/// ```
///
/// Is functionally the same as:
///
/// ```sway
/// use std::contract_id;
/// use std::contract_id::ContractId;
/// ```
///
/// `self` as the current receiver for a method allows to omit the parameter
/// type most of the time. With the exception of this particularity, `self` is
/// used much like any other parameter:
///
/// ```sway
/// struct Foo(u32);
///
/// impl Foo {
/// // No `self`.
/// fn new() -> Self {
/// Self(0)
/// }
///
/// // Borrowing `self`.
/// fn value(&self) -> u32 {
/// self.0
/// }
///
/// // Updating `self` mutably.
/// fn clear(ref mut self) {
/// self.0 = 0
/// }
/// }
/// ```
mod self_keyword {}
};
let fn_keyword: ItemMod = parse_quote! {
/// Functions are the primary way code is executed within Sway. Function blocks, usually just
/// called functions, can be defined in a variety of different places and be assigned many
/// different attributes and modifiers.
///
/// Standalone functions that just sit within a module not attached to anything else are common,
/// but most functions will end up being inside [`impl`] blocks, either on another type itself, or
/// as a trait impl for that type.
///
/// ```sway
/// fn standalone_function() {
/// // code
/// }
///
/// pub fn public_thing(argument: bool) -> bool {
/// // code
/// true
/// }
///
/// struct Thing {
/// foo: u32,
/// }
///
/// impl Thing {
/// pub fn new() -> Self {
/// Self {
/// foo: 42,
/// }
/// }
/// }
/// ```
///
/// In addition to presenting fixed types in the form of `fn name(arg: type, ..) -> return_type`,
/// functions can also declare a list of type parameters along with trait bounds that they fall
/// into.
///
/// ```sway
/// fn add_points<T>(a: MyPoint<T>, b: MyPoint<T>) -> MyPoint<T> where T: MyAdd {
/// MyPoint {
/// x: a.x.my_add(b.x),
/// y: a.y.my_add(b.y),
/// }
/// }
/// ```
mod fn_keyword {}
};
let trait_keyword: ItemMod = parse_quote! {
/// A common interface for a group of types.
///
/// A `trait` is like an interface that data types can implement. When a type
/// implements a trait it can be treated abstractly as that trait using generics
/// or trait objects.
///
/// Traits can be made up of three varieties of associated items:
///
/// - functions and methods
/// - types
/// - constants
///
/// Traits may also contain additional type parameters. Those type parameters
/// or the trait itself can be constrained by other traits.
///
/// Traits can serve as markers or carry other logical semantics that
/// aren't expressed through their items. When a type implements that
/// trait it is promising to uphold its contract.
///
/// # Examples
///
/// Traits are declared using the `trait` keyword. Types can implement them
/// using [`impl`] `Trait` [`for`] `Type`:
///
/// ```sway
/// trait Setter<T> {
/// fn set(self, new_value: T) -> Self;
/// }
///
/// struct FooBarData<T> {
/// value: T
/// }
///
/// impl<T> Setter<T> for FooBarData<T> {
/// fn set(self, new_value: T) -> Self {
/// FooBarData {
/// value: new_value,
/// }
/// }
/// }
/// ```
mod trait_keyword {}
};
let impl_keyword: ItemMod = parse_quote! {
/// Implement some functionality for a type.
///
/// The `impl` keyword is primarily used to define implementations on types. Inherent
/// implementations are standalone, while trait implementations are used to implement traits for
/// types, or other traits.
///
/// Functions and consts can both be defined in an implementation. A function defined in an
/// `impl` block can be standalone, meaning it would be called like `Foo::bar()`. If the function
/// takes `self`, or `ref mut self` as its first argument, it can also be called using
/// method-call syntax, a familiar feature to any object oriented programmer, like `foo.bar()`.
///
/// ```sway
/// struct Example {
/// number: u32,
/// }
///
/// impl Example {
/// fn answer(ref mut self) {
/// self.number += 42;
/// }
///
/// fn get_number(self) -> u32 {
/// self.number
/// }
/// }
/// ```
mod impl_keyword {}
};
let const_keyword: ItemMod = parse_quote! {
/// Compile-time constants.
///
/// Sometimes a certain value is used many times throughout a program, and it can become
/// inconvenient to copy it over and over. What's more, it's not always possible or desirable to
/// make it a variable that gets carried around to each function that needs it. In these cases, the
/// `const` keyword provides a convenient alternative to code duplication:
///
/// ```sway
/// const NUMBER_1: u64 = 7;
///
/// let foo = 123 + NUMBER_1;
/// ```
///
/// Constants must be explicitly typed; unlike with `let`, you can't ignore their type and let the
/// compiler figure it out.
///
/// Constants should always be in `SCREAMING_SNAKE_CASE`.
mod const_keyword {}
};
let return_keyword: ItemMod = parse_quote! {
/// Return a value from a function.
///
/// A `return` marks the end of an execution path in a function:
///
/// ```sway
/// fn foo() -> u32 {
/// return 3;
/// }
/// assert(foo(), 3);
/// ```
///
/// `return` is not needed when the returned value is the last expression in the
/// function. In this case the `;` is omitted:
///
/// ```sway
/// fn foo() -> u32 {
/// 3
/// }
/// assert(foo(), 3);
/// ```
///
/// `return` returns from the function immediately (an "early return"):
///
/// ```sway
/// fn main() -> u64 {
/// let x = if true {
/// Result::Err::<u64, u32>(12)
/// } else {
/// return 10;
/// };
/// 44
/// }
/// ```
mod return_keyword {}
};
let if_keyword: ItemMod = parse_quote! {
/// Evaluate a block if a condition holds.
///
/// `if` is a familiar construct to most programmers, and is the main way you'll often do logic in
/// your code. However, unlike in most languages, `if` blocks can also act as expressions.
///
/// ```sway
/// if 1 == 2 {
/// log("whoops, mathematics broke");
/// revert(0);
/// } else {
/// log("everything's fine!");
/// }
///
/// let x = 5;
/// let y = if x == 5 {
/// 10
/// } else {
/// 15
/// };
/// assert(y == 10);
///
/// let opt = Some(5);
/// if let Some(x) = opt {
/// // x is 5
/// }
/// ```
///
/// Shown above are the three typical forms an `if` block comes in. First is the usual kind of
/// thing you'd see in many languages, with an optional `else` block. Second uses `if` as an
/// expression, which is only possible if all branches return the same type. An `if` expression can
/// be used everywhere you'd expect. The third kind of `if` block is an `if let` block, which
/// behaves similarly to using a `match` expression.
mod if_keyword {}
};
let else_keyword: ItemMod = parse_quote! {
/// What expression to evaluate when an [`if`] condition evaluates to [`false`].
///
/// `else` expressions are optional. When no else expressions are supplied it is assumed to evaluate
/// to the unit type `()`.
///
/// The type that the `else` blocks evaluate to must be compatible with the type that the `if` block
/// evaluates to.
///
/// As can be seen below, `else` must be followed by either: `if`, `if let`, or a block `{}` and it
/// will return the value of that expression.
///
/// ```sway
/// let condition = false;
/// let result = if condition == true {
/// 101
/// } else {
/// 102
/// };
/// assert(result == 102);
/// ```
///
/// There is possibly no limit to the number of `else` blocks that could follow an `if` expression
/// however if you have several then a [`match`] expression might be preferable.
mod else_keyword {}
};
let match_keyword: ItemMod = parse_quote! {
/// Control flow based on pattern matching.
///
/// `match` can be used to run code conditionally. Every pattern must
/// be handled exhaustively either explicitly or by using wildcards like
/// `_` in the `match`. Since `match` is an expression, values can also be
/// returned.
///
/// ```sway
/// let opt = None::<u32>;
/// let x = match opt {
/// Some(int) => int,
/// None => 10,
/// };
/// assert(x, 10);
/// ```
mod match_keyword {}
};
let mut_keyword: ItemMod = parse_quote! {
/// A mutable variable, reference, or pointer.
///
/// `mut` can be used in several situations. The first is mutable variables,
/// which can be used anywhere you can bind a value to a variable name. Some
/// examples:
///
/// ```sway
/// let mut a = 5;
/// a = 6;
/// assert(a, 6);
/// ```
///
/// The second is mutable references. They can be created from `mut` variables
/// and must be unique: no other variables can have a mutable reference, nor a
/// shared reference.
///
/// ```sway
/// // Taking a mutable reference.
/// fn takes_ref_mut_array(ref mut arr: [u64; 1]) {
/// arr[0] = 10;
/// }
/// ```
mod mut_keyword {}
};
let let_keyword: ItemMod = parse_quote! {
/// Bind a value to a variable.
///
/// The primary use for the `let` keyword is in `let` statements, which are used to introduce a new
/// set of variables into the current scope, as given by a pattern.
///
/// ```sway
/// let thing1: u32 = 100;
/// let thing2 = 200 + thing1;
///
/// let mut changing_thing = true;
/// changing_thing = false;
/// ```
///
/// The pattern is most commonly a single variable, which means no pattern matching is done and
/// the expression given is bound to the variable. Apart from that, patterns used in `let` bindings
/// can be as complicated as needed, given that the pattern is exhaustive. The type of the pattern
/// is optionally given afterwards, but if left blank is automatically inferred by the compiler if possible.
///
/// Variables in Sway are immutable by default, and require the `mut` keyword to be made mutable.
///
/// Multiple variables can be defined with the same name, known as shadowing. This doesn't affect
/// the original variable in any way beyond being unable to directly access it beyond the point of
/// shadowing. It continues to remain in scope, getting dropped only when it falls out of scope.
/// Shadowed variables don't need to have the same type as the variables shadowing them.
///
/// ```sway
/// let shadowing_example = true;
/// let shadowing_example: u32 = 123;
/// let shadowing_example = shadowing_example as u8;
/// ```
///
/// Other places the `let` keyword is used include along with [`if`], in the form of `if let`
/// expressions. They're useful if the pattern being matched isn't exhaustive, such as with
/// enumerations.
mod let_keyword {}
};
let while_keyword: ItemMod = parse_quote! {
/// Loop while a condition is upheld.
///
/// A `while` expression is used for predicate loops. The `while` expression runs the conditional
/// expression before running the loop body, then runs the loop body if the conditional
/// expression evaluates to `true`, or exits the loop otherwise.
///
/// ```sway
/// let mut counter = 0;
///
/// while counter < 10 {
/// log(counter);
/// counter += 1;
/// }
/// ```
///
/// A `while` expression cannot break with a value and always evaluates to `()`.
///
/// ```sway
/// let mut i = 1;
///
/// while i < 100 {
/// i *= 2;
/// if i == 64 {
/// break; // Exit when `i` is 64.
/// }
/// }
/// ```
mod while_keyword {}
};
let true_keyword: ItemMod = parse_quote! {
/// A value of type [`bool`] representing logical **true**.
///
/// Logically `true` is not equal to [`false`].
///
/// ## Control structures that check for **true**
///
/// Several of Sway's control structures will check for a `bool` condition evaluating to **true**.
///
/// * The condition in an [`if`] expression must be of type `bool`.
/// Whenever that condition evaluates to **true**, the `if` expression takes
/// on the value of the first block. If however, the condition evaluates
/// to `false`, the expression takes on value of the `else` block if there is one.
///
/// * [`while`] is another control flow construct expecting a `bool`-typed condition.
/// As long as the condition evaluates to **true**, the `while` loop will continually
/// evaluate its associated block.
///
/// * [`match`] arms can have guard clauses on them.
mod true_keyword {}
};
let false_keyword: ItemMod = parse_quote! {
/// A value of type [`bool`] representing logical **false**.
///
/// `false` is the logical opposite of [`true`].
///
/// See the documentation for [`true`] for more information.
mod false_keyword {}
};
let break_keyword: ItemMod = parse_quote! {
/// Exit early from a loop.
///
/// When `break` is encountered, execution of the associated loop body is
/// immediately terminated.
///
/// ```sway
/// let mut x = 0;
///
/// for x < 100 {
/// if x > 12 {
/// break;
/// }
/// x += 1;
/// }
///
/// assert(x == 12);
/// ```
mod break_keyword {}
};
let continue_keyword: ItemMod = parse_quote! {
/// Skip to the next iteration of a loop.
///
/// When `continue` is encountered, the current iteration is terminated, returning control to the
/// loop head, typically continuing with the next iteration.
///
/// ```sway
/// // Printing odd numbers by skipping even ones
/// for number in 1..=10 {
/// if number % 2 == 0 {
/// continue;
/// }
/// log(number);
/// }
/// ```
mod continue_keyword {}
};
let abi_keyword: ItemMod = parse_quote! {
/// Defines an Application Binary Interface (ABI).
///
/// An `abi` block defines a set of methods that a contract exposes externally. It acts as the
/// public interface for interacting with a smart contract. Only one `abi` block is allowed per
/// contract.
///
/// The methods defined within an `abi` block must be implemented in an associated [`impl`] block
/// for the contract.
///
/// ```sway
/// contract;
///
/// abi MyContract {
/// #[storage(read, write)]
/// fn update_counter(amount: u64) -> u64;
/// }
///
/// impl MyContract for Contract {
/// #[storage(read, write)]
/// fn update_counter(amount: u64) -> u64 {
/// let current = storage.counter;
/// storage.counter = current + amount;
/// storage.counter
/// }
/// }
/// ```
mod abi_keyword {}
};
// TODO
let str_keyword: ItemMod = parse_quote! {
mod str_keyword {}
};
// TODO
let for_keyword: ItemMod = parse_quote! {
mod for_keyword {}
};
// TODO
let where_keyword: ItemMod = parse_quote! {
mod where_keyword {}
};
// TODO
let ref_keyword: ItemMod = parse_quote! {
mod ref_keyword {}
};
// TODO
let script_keyword: ItemMod = parse_quote! {
mod script_keyword {}
};
// TODO
let contract_keyword: ItemMod = parse_quote! {
mod contract_keyword {}
};
// TODO
let predicate_keyword: ItemMod = parse_quote! {
mod predicate_keyword {}
};
// TODO
let library_keyword: ItemMod = parse_quote! {
mod library_keyword {}
};
// TODO
let mod_keyword: ItemMod = parse_quote! {
mod mod_keyword {}
};
// TODO
let storage_keyword: ItemMod = parse_quote! {
mod storage_keyword {}
};
// TODO
let asm_keyword: ItemMod = parse_quote! {
mod asm_keyword {}
};
// TODO
let deref_keyword: ItemMod = parse_quote! {
mod deref_keyword {}
};
// TODO
let configurable_keyword: ItemMod = parse_quote! {
mod configurable_keyword {}
};
// TODO
let type_keyword: ItemMod = parse_quote! {
mod type_keyword {}
};
// TODO
let panic_keyword: ItemMod = parse_quote! {
mod panic_keyword {}
};
let mut keyword_docs = HashMap::new();
let keywords = vec![
pub_keyword,
use_keyword,
as_keyword,
struct_keyword,
enum_keyword,
self_keyword,
fn_keyword,
trait_keyword,
impl_keyword,
for_keyword,
const_keyword,
return_keyword,
if_keyword,
else_keyword,
match_keyword,
mut_keyword,
let_keyword,
while_keyword,
where_keyword,
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | true |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-lsp/src/utils/debug.rs | sway-lsp/src/utils/debug.rs | #![allow(dead_code)]
use crate::core::token::{Token, TokenIdent};
use dashmap::mapref::multiple::RefMulti;
use lsp_types::{Diagnostic, DiagnosticSeverity};
use sway_core::{
decl_engine::DeclEngine,
language::{ty, Literal},
};
pub(crate) fn generate_warnings_non_typed_tokens<'s, I>(tokens: I) -> Vec<Diagnostic>
where
I: Iterator<Item = RefMulti<'s, TokenIdent, Token>>,
{
tokens
.filter_map(|entry| {
let (ident, token) = entry.pair();
if token.as_parsed().is_some() {
Some(warning_from_ident(ident))
} else {
None
}
})
.collect()
}
pub(crate) fn generate_warnings_for_parsed_tokens<'s, I>(tokens: I) -> Vec<Diagnostic>
where
I: Iterator<Item = RefMulti<'s, TokenIdent, Token>>,
{
tokens
.map(|entry| warning_from_ident(entry.key()))
.collect()
}
pub(crate) fn generate_warnings_for_typed_tokens<'s, I>(tokens: I) -> Vec<Diagnostic>
where
I: Iterator<Item = RefMulti<'s, TokenIdent, Token>>,
{
tokens
.filter_map(|entry| {
let (ident, token) = entry.pair();
if token.as_typed().is_some() {
Some(warning_from_ident(ident))
} else {
None
}
})
.collect()
}
fn warning_from_ident(ident: &TokenIdent) -> Diagnostic {
Diagnostic {
range: ident.range,
severity: Some(DiagnosticSeverity::WARNING),
message: String::new(),
..Default::default()
}
}
fn literal_to_string(literal: &Literal) -> String {
match literal {
Literal::U8(_) => "u8".into(),
Literal::U16(_) => "u16".into(),
Literal::U32(_) => "u32".into(),
Literal::U64(_) | Literal::Numeric(_) => "u64".into(),
Literal::U256(_) => "u256".into(),
Literal::String(len) => format!("str[{}]", len.as_str().len()),
Literal::Boolean(_) => "bool".into(),
Literal::B256(_) => "b256".into(),
Literal::Binary(_) => unreachable!("literals cannot be expressed in the language yet"),
}
}
/// Print the AST nodes in a human readable format
/// by getting the types from the declaration engine
/// and formatting them into a String.
pub(crate) fn print_decl_engine_types(
all_nodes: &[ty::TyAstNode],
decl_engine: &DeclEngine,
) -> String {
all_nodes
.iter()
.map(|n| match &n.content {
ty::TyAstNodeContent::Declaration(declaration) => match declaration {
ty::TyDecl::ConstantDecl(ty::ConstantDecl { decl_id, .. }) => {
let const_decl = decl_engine.get_constant(decl_id);
format!("{const_decl:#?}")
}
ty::TyDecl::FunctionDecl(ty::FunctionDecl { decl_id, .. }) => {
let func_decl = decl_engine.get_function(decl_id);
format!("{func_decl:#?}")
}
ty::TyDecl::TraitDecl(ty::TraitDecl { decl_id, .. }) => {
let trait_decl = decl_engine.get_trait(decl_id);
format!("{trait_decl:#?}")
}
ty::TyDecl::StructDecl(ty::StructDecl { decl_id, .. }) => {
let struct_decl = decl_engine.get_struct(decl_id);
format!("{struct_decl:#?}")
}
ty::TyDecl::EnumDecl(ty::EnumDecl { decl_id, .. }) => {
let enum_decl = decl_engine.get_enum(decl_id);
format!("{enum_decl:#?}")
}
ty::TyDecl::AbiDecl(ty::AbiDecl { decl_id, .. }) => {
let abi_decl = decl_engine.get_abi(decl_id);
format!("{abi_decl:#?}")
}
ty::TyDecl::StorageDecl(ty::StorageDecl { decl_id, .. }) => {
let storage_decl = decl_engine.get_storage(decl_id);
format!("{storage_decl:#?}")
}
_ => format!("{declaration:#?}"),
},
ty::TyAstNodeContent::Expression(expression) => {
format!("{expression:#?}")
}
ty::TyAstNodeContent::SideEffect(side_effect) => format!("{side_effect:#?}"),
ty::TyAstNodeContent::Error(_, _) => "error".to_string(),
})
.fold(String::new(), |output, s| format!("{output}{s}\n"))
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-lsp/src/utils/markdown.rs | sway-lsp/src/utils/markdown.rs | //! Transforms markdown
const SWAYDOC_FENCES: [&str; 2] = ["```", "~~~"];
/// Transforms markdown and takes care of any code blocks
/// to allow for syntax highlighting.
pub fn format_docs(src: &str) -> String {
let mut processed_lines = Vec::new();
let mut in_code_block = false;
let mut is_sway = false;
for mut line in src.lines() {
if in_code_block && is_sway && code_line_ignored_by_swaydoc(line) {
continue;
}
if let Some(header) = SWAYDOC_FENCES
.into_iter()
.find_map(|fence| line.strip_prefix(fence))
{
in_code_block ^= true;
if in_code_block {
is_sway = is_sway_fence(header);
if is_sway {
line = "```sway";
}
}
}
if in_code_block {
let trimmed = line.trim_start();
if trimmed.starts_with("##") {
line = &trimmed[1..];
}
}
processed_lines.push(line);
}
processed_lines.join("\n")
}
fn code_line_ignored_by_swaydoc(line: &str) -> bool {
let trimmed = line.trim();
trimmed == "#" || trimmed.starts_with("# ") || trimmed.starts_with("#\t")
}
// stripped down version of https://github.com/rust-lang/rust/blob/392ba2ba1a7d6c542d2459fb8133bebf62a4a423/src/librustdoc/html/markdown.rs#L810-L933
fn is_sway_fence(s: &str) -> bool {
let mut seen_sway_tags = false;
let mut seen_other_tags = false;
let tokens = s.trim().split([',', ' ', '\t']).filter_map(|t| {
let t = t.trim();
(!t.is_empty()).then_some(t)
});
for token in tokens {
match token {
"should_panic" | "no_run" | "ignore" | "allow_fail" => {
seen_sway_tags = !seen_other_tags;
}
"sway" => seen_sway_tags = true,
"test_harness" | "compile_fail" => seen_sway_tags = !seen_other_tags || seen_sway_tags,
_ => seen_other_tags = true,
}
}
!seen_other_tags || seen_sway_tags
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_format_docs_adds_sway() {
let comment = "```\nfn some_sway() {}\n```";
assert_eq!(format_docs(comment), "```sway\nfn some_sway() {}\n```");
}
#[test]
fn test_format_docs_handles_plain_text() {
let comment = "```text\nthis is plain text\n```";
assert_eq!(format_docs(comment), "```text\nthis is plain text\n```");
}
#[test]
fn test_format_docs_handles_non_sway() {
let comment = "```sh\nsupposedly shell code\n```";
assert_eq!(format_docs(comment), "```sh\nsupposedly shell code\n```");
}
#[test]
fn test_format_docs_handles_sway_alias() {
let comment = "```ignore\nlet z = 55;\n```";
assert_eq!(format_docs(comment), "```sway\nlet z = 55;\n```");
}
#[test]
fn test_format_docs_handles_complex_code_block_attrs() {
let comment = "```sway,no_run\nlet z = 55;\n```";
assert_eq!(format_docs(comment), "```sway\nlet z = 55;\n```");
}
#[test]
fn test_format_docs_skips_comments_in_sway_block() {
let comment =
"```sway\n # skip1\n# skip2\n#stay1\nstay2\n#\n #\n # \n #\tskip3\n\t#\t\n```";
assert_eq!(format_docs(comment), "```sway\n#stay1\nstay2\n```");
}
#[test]
fn test_format_docs_does_not_skip_lines_if_plain_text() {
let comment =
"```text\n # stay1\n# stay2\n#stay3\nstay4\n#\n #\n # \n #\tstay5\n\t#\t\n```";
assert_eq!(
format_docs(comment),
"```text\n # stay1\n# stay2\n#stay3\nstay4\n#\n #\n # \n #\tstay5\n\t#\t\n```",
);
}
#[test]
fn test_format_docs_keeps_comments_outside_of_sway_block() {
let comment = " # stay1\n# stay2\n#stay3\nstay4\n#\n #\n # \n #\tstay5\n\t#\t";
assert_eq!(format_docs(comment), comment);
}
#[test]
fn test_format_docs_preserves_newlines() {
let comment = "this\nis\nmultiline";
assert_eq!(format_docs(comment), comment);
}
#[test]
fn test_code_blocks_in_comments_marked_as_sway() {
let comment = r#"```sway
fn main(){}
```
Some comment.
```
let a = 1;
```"#;
assert_eq!(
format_docs(comment),
"```sway\nfn main(){}\n```\nSome comment.\n```sway\nlet a = 1;\n```"
);
}
#[test]
fn test_code_blocks_in_comments_marked_as_text() {
let comment = r#"```text
filler
text
```
Some comment.
```
let a = 1;
```"#;
assert_eq!(
format_docs(comment),
"```text\nfiller\ntext\n```\nSome comment.\n```sway\nlet a = 1;\n```"
);
}
#[test]
fn test_format_docs_handles_escape_double_hashes() {
let comment = r#"```sway
let s = "foo
## bar # baz";
```"#;
assert_eq!(
format_docs(comment),
"```sway\nlet s = \"foo\n# bar # baz\";\n```"
);
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-lsp/src/utils/mod.rs | sway-lsp/src/utils/mod.rs | pub(crate) mod attributes;
pub mod debug;
pub(crate) mod document;
pub mod keyword_docs;
pub mod markdown;
pub(crate) mod markup;
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-lsp/src/utils/attributes.rs | sway-lsp/src/utils/attributes.rs | #![allow(dead_code)]
use crate::core::token::{ParsedAstToken, Token, TokenAstNode, TypedAstToken};
use sway_core::{
language::{parsed::Declaration, ty},
transform, Engines,
};
/// Gets attributes from typed token, falling back to parsed AST node if needed.
/// Callback can be used to retrieve doc comment attributes or storage attributes.
pub fn attributes<F>(engines: &Engines, token: &Token, mut callback: F)
where
F: FnMut(&transform::Attributes),
{
match &token.ast_node {
TokenAstNode::Typed(typed_token) => match typed_token {
TypedAstToken::TypedDeclaration(decl) => match decl {
ty::TyDecl::EnumDecl(ty::EnumDecl { decl_id, .. }) => {
let enum_decl = engines.de().get_enum(decl_id);
callback(&enum_decl.attributes);
}
ty::TyDecl::StructDecl(ty::StructDecl { decl_id, .. }) => {
let struct_decl = engines.de().get_struct(decl_id);
callback(&struct_decl.attributes);
}
ty::TyDecl::StorageDecl(ty::StorageDecl { decl_id, .. }) => {
let storage_decl = engines.de().get_storage(decl_id);
callback(&storage_decl.attributes);
}
ty::TyDecl::AbiDecl(ty::AbiDecl { decl_id, .. }) => {
let abi_decl = engines.de().get_abi(decl_id);
callback(&abi_decl.attributes);
}
_ => {}
},
TypedAstToken::TypedFunctionDeclaration(fn_decl) => {
callback(&fn_decl.attributes);
}
TypedAstToken::TypedConstantDeclaration(constant) => {
callback(&constant.attributes);
}
TypedAstToken::TypedStorageField(field) => {
callback(&field.attributes);
}
TypedAstToken::TypedStructField(field) => {
callback(&field.attributes);
}
TypedAstToken::TypedTraitFn(trait_fn) => {
callback(&trait_fn.attributes);
}
TypedAstToken::TypedEnumVariant(variant) => {
callback(&variant.attributes);
}
TypedAstToken::TypedConfigurableDeclaration(configurable) => {
callback(&configurable.attributes);
}
TypedAstToken::TypedTraitTypeDeclaration(trait_type) => {
callback(&trait_type.attributes);
}
TypedAstToken::TypedTypeAliasDeclaration(type_alias) => {
callback(&type_alias.attributes);
}
_ => {}
},
TokenAstNode::Parsed(parsed_token) => match &parsed_token {
ParsedAstToken::Declaration(declaration) => match declaration {
Declaration::EnumDeclaration(decl_id) => {
let decl = engines.pe().get_enum(decl_id);
callback(&decl.attributes);
}
Declaration::FunctionDeclaration(decl_id) => {
let decl = engines.pe().get_function(decl_id);
callback(&decl.attributes);
}
Declaration::StructDeclaration(decl_id) => {
let decl = engines.pe().get_struct(decl_id);
callback(&decl.attributes);
}
Declaration::ConstantDeclaration(decl_id) => {
let decl = engines.pe().get_constant(decl_id);
callback(&decl.attributes);
}
Declaration::StorageDeclaration(decl_id) => {
let decl = engines.pe().get_storage(decl_id);
callback(&decl.attributes);
}
Declaration::AbiDeclaration(decl_id) => {
let decl = engines.pe().get_abi(decl_id);
callback(&decl.attributes);
}
_ => {}
},
ParsedAstToken::StorageField(field) => callback(&field.attributes),
ParsedAstToken::StructField(field) => callback(&field.attributes),
ParsedAstToken::TraitFn(decl_id) => {
let decl = engines.pe().get_trait_fn(decl_id);
callback(&decl.attributes);
}
ParsedAstToken::EnumVariant(variant) => callback(&variant.attributes),
_ => {}
},
}
}
pub fn doc_comment_attributes<F>(engines: &Engines, token: &Token, mut callback: F)
where
F: FnMut(&[&transform::Attribute]),
{
attributes(engines, token, |attributes| {
let attrs = attributes
.of_kind(transform::AttributeKind::DocComment)
.collect::<Vec<_>>();
if !attrs.is_empty() {
callback(attrs.as_slice());
}
});
}
pub fn storage_attributes<F>(engines: &Engines, token: &Token, callback: F)
where
F: Fn(&[&transform::Attribute]),
{
attributes(engines, token, |attributes| {
let attrs = attributes
.of_kind(transform::AttributeKind::Storage)
.collect::<Vec<_>>();
if !attrs.is_empty() {
callback(attrs.as_slice());
}
});
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-lsp/src/handlers/notification.rs | sway-lsp/src/handlers/notification.rs | //! This module is responsible for implementing handlers for Language Server
//! Protocol. This module specifically handles notification messages sent by the Client.
use crate::{
core::{document::Documents, session::Session, sync::SyncWorkspace},
error::LanguageServerError,
server_state::{CompilationContext, ServerState, TaskMessage},
};
use lsp_types::{
DidChangeTextDocumentParams, DidChangeWatchedFilesParams, DidOpenTextDocumentParams,
DidSaveTextDocumentParams, FileChangeType, Url,
};
use std::{
collections::BTreeMap,
path::PathBuf,
sync::{atomic::Ordering, Arc},
};
pub async fn handle_did_open_text_document(
state: &ServerState,
params: DidOpenTextDocumentParams,
) -> Result<(), LanguageServerError> {
let file_uri = ¶ms.text_document.uri;
// Initialize the SyncWorkspace for this file if it doesn't exist.
let sync_workspace = state.get_or_init_sync_workspace(file_uri).await?;
// Get or create a session for the original file URI.
let (uri, session) = state.uri_and_session_from_workspace(¶ms.text_document.uri)?;
state.documents.handle_open_file(&uri).await;
send_new_compilation_request(state, session.clone(), &uri, None, false, sync_workspace);
state.is_compiling.store(true, Ordering::SeqCst);
state.wait_for_parsing().await;
state
.publish_diagnostics(uri, params.text_document.uri, session)
.await;
Ok(())
}
fn send_new_compilation_request(
state: &ServerState,
session: Arc<Session>,
uri: &Url,
version: Option<i32>,
optimized_build: bool,
sync_workspace: Arc<SyncWorkspace>,
) {
let file_versions = file_versions(&state.documents, uri, version.map(|v| v as u64));
if state.is_compiling.load(Ordering::SeqCst) {
// If we are already compiling, then we need to retrigger compilation
state.retrigger_compilation.store(true, Ordering::SeqCst);
}
// Check if the channel is full. If it is, we want to ensure that the compilation
// thread receives only the most recent value.
if state.cb_tx.is_full() {
while let Ok(TaskMessage::CompilationContext(_)) = state.cb_rx.try_recv() {
// Loop will continue to remove `CompilationContext` messages
// until the channel has no more of them.
}
}
let _ = state
.cb_tx
.send(TaskMessage::CompilationContext(CompilationContext {
session: session.clone(),
engines: state.engines.clone(),
token_map: state.token_map.clone(),
compiled_programs: state.compiled_programs.clone(),
runnables: state.runnables.clone(),
uri: uri.clone(),
version,
optimized_build,
gc_options: state.config.read().garbage_collection.clone(),
file_versions,
sync: sync_workspace,
}));
}
pub async fn handle_did_change_text_document(
state: &ServerState,
params: DidChangeTextDocumentParams,
) -> Result<(), LanguageServerError> {
if let Err(err) = state
.pid_locked_files
.mark_file_as_dirty(¶ms.text_document.uri)
{
tracing::warn!("Failed to mark file as dirty: {}", err);
}
let (uri, session) = state.uri_and_session_from_workspace(¶ms.text_document.uri)?;
let sync_workspace = state.get_sync_workspace_for_uri(¶ms.text_document.uri)?;
state
.documents
.write_changes_to_file(&uri, ¶ms.content_changes)
.await?;
send_new_compilation_request(
state,
session.clone(),
&uri,
Some(params.text_document.version),
// TODO: Set this back to true once https://github.com/FuelLabs/sway/issues/6576 is fixed.
false,
sync_workspace,
);
Ok(())
}
fn file_versions(
documents: &Documents,
uri: &Url,
version: Option<u64>,
) -> BTreeMap<PathBuf, Option<u64>> {
let mut file_versions = BTreeMap::new();
for item in documents.iter() {
let path = PathBuf::from(item.key());
if path == uri.to_file_path().unwrap() {
file_versions.insert(path, version);
} else {
file_versions.insert(path, None);
}
}
file_versions
}
pub(crate) async fn handle_did_save_text_document(
state: &ServerState,
params: DidSaveTextDocumentParams,
) -> Result<(), LanguageServerError> {
state
.pid_locked_files
.remove_dirty_flag(¶ms.text_document.uri)?;
let (uri, session) = state.uri_and_session_from_workspace(¶ms.text_document.uri)?;
let sync_workspace = state.get_sync_workspace_for_uri(¶ms.text_document.uri)?;
send_new_compilation_request(state, session.clone(), &uri, None, false, sync_workspace);
state.wait_for_parsing().await;
state
.publish_diagnostics(uri, params.text_document.uri, session)
.await;
Ok(())
}
pub(crate) fn handle_did_change_watched_files(
state: &ServerState,
params: DidChangeWatchedFilesParams,
) -> Result<(), LanguageServerError> {
for event in params.changes {
match state.get_sync_workspace_for_uri(&event.uri) {
Ok(sync_workspace) => {
let uri = sync_workspace.workspace_to_temp_url(&event.uri)?;
match event.typ {
FileChangeType::CHANGED => {
if event.uri.to_string().contains("Forc.toml") {
sync_workspace.sync_manifest()?;
// TODO: Recompile the project | see https://github.com/FuelLabs/sway/issues/7103
}
}
FileChangeType::DELETED => {
state.pid_locked_files.remove_dirty_flag(&event.uri)?;
let _ = state.documents.remove_document(&uri);
}
FileChangeType::CREATED => {
// TODO: handle this case
}
_ => {}
}
}
Err(err) => {
tracing::error!("Failed to get sync workspace for {}: {}", event.uri, err);
}
}
}
Ok(())
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-lsp/src/handlers/request.rs | sway-lsp/src/handlers/request.rs | //! This module is responsible for implementing handlers for Language Server
//! Protocol. This module specifically handles requests.
use crate::{
capabilities,
core::session::{self, build_plan, program_id_from_path},
lsp_ext,
server_state::ServerState,
utils::debug,
};
use forc_tracing::{tracing_subscriber, FmtSpan, TracingWriter};
use lsp_types::{
CodeLens, CompletionResponse, DocumentFormattingParams, DocumentSymbolResponse,
InitializeResult, InlayHint, InlayHintParams, PrepareRenameResponse, RenameParams,
SemanticTokensParams, SemanticTokensRangeParams, SemanticTokensRangeResult,
SemanticTokensResult, TextDocumentIdentifier, Url, WorkspaceEdit,
};
use std::{
fs::File,
io::Write,
path::{Path, PathBuf},
};
use sway_types::{Ident, Spanned};
use sway_utils::PerformanceData;
use tower_lsp::jsonrpc::Result;
use tracing::metadata::LevelFilter;
pub fn handle_initialize(
state: &ServerState,
params: &lsp_types::InitializeParams,
) -> Result<InitializeResult> {
if let Some(initialization_options) = ¶ms.initialization_options {
let mut config = state.config.write();
*config = serde_json::from_value(initialization_options.clone())
.ok()
.unwrap_or_default();
}
// Start a thread that will shutdown the server if the client process is no longer active.
if let Some(client_pid) = params.process_id {
state.spawn_client_heartbeat(client_pid as usize);
}
// Initializing tracing library based on the user's config
let config = state.config.read();
if config.logging.level != LevelFilter::OFF {
tracing_subscriber::fmt::Subscriber::builder()
.with_ansi(false)
.with_max_level(config.logging.level)
.with_span_events(FmtSpan::CLOSE)
.with_writer(TracingWriter::Stderr)
.init();
}
tracing::info!("Initializing the Sway Language Server");
Ok(InitializeResult {
server_info: None,
capabilities: crate::server_capabilities(),
..InitializeResult::default()
})
}
pub async fn handle_document_symbol(
state: &ServerState,
params: lsp_types::DocumentSymbolParams,
) -> Result<Option<lsp_types::DocumentSymbolResponse>> {
let _ = state.wait_for_parsing().await;
match state.uri_from_workspace(¶ms.text_document.uri) {
Ok(uri) => Ok(session::document_symbols(
&uri,
&state.token_map,
&state.engines.read(),
&state.compiled_programs,
)
.map(DocumentSymbolResponse::Nested)),
Err(err) => {
tracing::error!("{}", err.to_string());
Ok(None)
}
}
}
pub fn handle_goto_definition(
state: &ServerState,
params: lsp_types::GotoDefinitionParams,
) -> Result<Option<lsp_types::GotoDefinitionResponse>> {
match state.sync_and_uri_from_workspace(¶ms.text_document_position_params.text_document.uri)
{
Ok((sync, uri)) => {
let position = params.text_document_position_params.position;
Ok(session::token_definition_response(
&uri,
position,
&state.engines.read(),
&state.token_map,
&sync,
))
}
Err(err) => {
tracing::error!("{}", err.to_string());
Ok(None)
}
}
}
pub fn handle_completion(
state: &ServerState,
params: lsp_types::CompletionParams,
) -> Result<Option<lsp_types::CompletionResponse>> {
let trigger_char = params
.context
.as_ref()
.and_then(|ctx| ctx.trigger_character.as_deref())
.unwrap_or("");
let position = params.text_document_position.position;
match state.uri_from_workspace(¶ms.text_document_position.text_document.uri) {
Ok(uri) => Ok(session::completion_items(
&uri,
position,
trigger_char,
&state.token_map,
&state.engines.read(),
&state.compiled_programs,
)
.map(CompletionResponse::Array)),
Err(err) => {
tracing::error!("{}", err.to_string());
Ok(None)
}
}
}
pub fn handle_hover(
state: &ServerState,
params: lsp_types::HoverParams,
) -> Result<Option<lsp_types::Hover>> {
match state.sync_and_uri_from_workspace(¶ms.text_document_position_params.text_document.uri)
{
Ok((sync, uri)) => {
let position = params.text_document_position_params.position;
Ok(capabilities::hover::hover_data(
state,
sync,
&state.engines.read(),
&uri,
position,
))
}
Err(err) => {
tracing::error!("{}", err.to_string());
Ok(None)
}
}
}
pub fn handle_prepare_rename(
state: &ServerState,
params: lsp_types::TextDocumentPositionParams,
) -> Result<Option<PrepareRenameResponse>> {
match state.sync_and_uri_from_workspace(¶ms.text_document.uri) {
Ok((sync, uri)) => capabilities::rename::prepare_rename(
&state.engines.read(),
&state.token_map,
&uri,
params.position,
&sync,
)
.map(Some)
.or_else(|e| {
tracing::error!("{}", e);
Ok(None)
}),
Err(e) => {
tracing::error!("{}", e);
Ok(None)
}
}
}
pub fn handle_rename(state: &ServerState, params: RenameParams) -> Result<Option<WorkspaceEdit>> {
match state.sync_and_uri_from_workspace(¶ms.text_document_position.text_document.uri) {
Ok((sync, uri)) => {
let new_name = params.new_name;
let position = params.text_document_position.position;
capabilities::rename::rename(
&state.engines.read(),
&state.token_map,
new_name,
&uri,
position,
&sync,
)
.map(Some)
.or_else(|e| {
tracing::error!("{}", e);
Ok(None)
})
}
Err(e) => {
tracing::error!("{}", e);
Ok(None)
}
}
}
pub async fn handle_document_highlight(
state: &ServerState,
params: lsp_types::DocumentHighlightParams,
) -> Result<Option<Vec<lsp_types::DocumentHighlight>>> {
let _ = state.wait_for_parsing().await;
match state.uri_from_workspace(¶ms.text_document_position_params.text_document.uri) {
Ok(uri) => {
let position = params.text_document_position_params.position;
Ok(capabilities::highlight::get_highlights(
&state.engines.read(),
&state.token_map,
&uri,
position,
))
}
Err(err) => {
tracing::error!("{}", err.to_string());
Ok(None)
}
}
}
pub async fn handle_references(
state: &ServerState,
params: lsp_types::ReferenceParams,
) -> Result<Option<Vec<lsp_types::Location>>> {
let _ = state.wait_for_parsing().await;
match state.sync_and_uri_from_workspace(¶ms.text_document_position.text_document.uri) {
Ok((sync, uri)) => {
let position = params.text_document_position.position;
Ok(session::token_references(
&uri,
position,
&state.token_map,
&state.engines.read(),
&sync,
))
}
Err(err) => {
tracing::error!("{}", err.to_string());
Ok(None)
}
}
}
pub async fn handle_formatting(
state: &ServerState,
params: DocumentFormattingParams,
) -> Result<Option<Vec<lsp_types::TextEdit>>> {
let _ = state.wait_for_parsing().await;
state
.uri_and_session_from_workspace(¶ms.text_document.uri)
.and_then(|(uri, _)| {
capabilities::formatting::format_text(&state.documents, &uri).map(Some)
})
.or_else(|err| {
tracing::error!("{}", err.to_string());
Ok(None)
})
}
pub async fn handle_code_action(
state: &ServerState,
params: lsp_types::CodeActionParams,
) -> Result<Option<lsp_types::CodeActionResponse>> {
let _ = state.wait_for_parsing().await;
match state.uri_from_workspace(¶ms.text_document.uri) {
Ok(temp_uri) => Ok(capabilities::code_actions(
&state.engines.read(),
&state.token_map,
¶ms.range,
¶ms.text_document.uri,
&temp_uri,
¶ms.context.diagnostics,
&state.compiled_programs,
)),
Err(err) => {
tracing::error!("{}", err.to_string());
Ok(None)
}
}
}
pub async fn handle_code_lens(
state: &ServerState,
params: lsp_types::CodeLensParams,
) -> Result<Option<Vec<CodeLens>>> {
let _ = state.wait_for_parsing().await;
match state.uri_from_workspace(¶ms.text_document.uri) {
Ok(url) => Ok(Some(capabilities::code_lens::code_lens(
&state.runnables,
&url,
))),
Err(err) => {
tracing::error!("{}", err.to_string());
Ok(None)
}
}
}
pub async fn handle_semantic_tokens_range(
state: &ServerState,
params: SemanticTokensRangeParams,
) -> Result<Option<SemanticTokensRangeResult>> {
let _ = state.wait_for_parsing().await;
match state.uri_from_workspace(¶ms.text_document.uri) {
Ok(uri) => Ok(capabilities::semantic_tokens::semantic_tokens_range(
&state.token_map,
&uri,
¶ms.range,
)),
Err(err) => {
tracing::error!("{}", err.to_string());
Ok(None)
}
}
}
pub async fn handle_semantic_tokens_full(
state: &ServerState,
params: SemanticTokensParams,
) -> Result<Option<SemanticTokensResult>> {
let _ = state.wait_for_parsing().await;
match state.uri_from_workspace(¶ms.text_document.uri) {
Ok(uri) => Ok(capabilities::semantic_tokens::semantic_tokens_full(
&state.token_map,
&uri,
)),
Err(err) => {
tracing::error!("{}", err.to_string());
Ok(None)
}
}
}
pub async fn handle_inlay_hints(
state: &ServerState,
params: InlayHintParams,
) -> Result<Option<Vec<InlayHint>>> {
let _ = state.wait_for_parsing().await;
match state.uri_and_session_from_workspace(¶ms.text_document.uri) {
Ok((uri, _)) => {
let config = &state.config.read().inlay_hints;
Ok(capabilities::inlay_hints::inlay_hints(
&state.engines.read(),
&state.token_map,
&uri,
¶ms.range,
config,
))
}
Err(err) => {
tracing::error!("{}", err.to_string());
Ok(None)
}
}
}
/// This method is triggered by a command palette request in VScode
/// The 3 commands are: "show lexed ast", "show parsed ast" or "show typed ast"
///
/// If any of these commands are executed, the client requests this method
/// by calling the "sway/show_ast".
///
/// The function expects the URI of the current open file where the
/// request was made, and if the "lexed", "parsed" or "typed" ast was requested.
///
/// A formatted AST is written to a temporary file and the URI is
/// returned to the client so it can be opened and displayed in a
/// separate side panel.
pub fn handle_show_ast(
state: &ServerState,
params: &lsp_ext::ShowAstParams,
) -> Result<Option<TextDocumentIdentifier>> {
match state.uri_from_workspace(¶ms.text_document.uri) {
Ok(uri) => {
// Convert the Uri to a PathBuf
let path = uri.to_file_path().unwrap();
let program_id = program_id_from_path(&path, &state.engines.read()).unwrap();
let write_ast_to_file =
|path: &Path, ast_string: &String| -> Option<TextDocumentIdentifier> {
if let Ok(mut file) = File::create(path) {
let _ = writeln!(&mut file, "{ast_string}");
if let Ok(uri) = Url::from_file_path(path) {
// Return the tmp file path where the AST has been written to.
return Some(TextDocumentIdentifier::new(uri));
}
}
None
};
// Returns true if the current path matches the path of a submodule
let path_is_submodule = |ident: &Ident, path: &PathBuf| -> bool {
ident
.span()
.source_id()
.map(|p| state.engines.read().se().get_path(p))
== Some(path.clone())
};
let ast_path = PathBuf::from(params.save_path.path());
{
let program = state.compiled_programs.get(&program_id).unwrap();
match params.ast_kind.as_str() {
"lexed" => {
let lexed_program = program.value().lexed.clone();
Ok({
let mut formatted_ast = format!("{:#?}", program.lexed);
for (ident, submodule) in &lexed_program.root.submodules {
if path_is_submodule(ident, &path) {
// overwrite the root AST with the submodule AST
formatted_ast = format!("{:#?}", submodule.module.tree);
}
}
write_ast_to_file(ast_path.join("lexed.rs").as_path(), &formatted_ast)
})
}
"parsed" => {
let parsed_program = program.value().parsed.clone();
Ok({
// Initialize the string with the AST from the root
let mut formatted_ast =
format!("{:#?}", parsed_program.root.tree.root_nodes);
for (ident, submodule) in &parsed_program.root.submodules {
if path_is_submodule(ident, &path) {
// overwrite the root AST with the submodule AST
formatted_ast =
format!("{:#?}", submodule.module.tree.root_nodes);
}
}
write_ast_to_file(ast_path.join("parsed.rs").as_path(), &formatted_ast)
})
}
"typed" => {
let typed_program = program.value().typed.as_ref().unwrap();
Ok({
// Initialize the string with the AST from the root
let mut formatted_ast = debug::print_decl_engine_types(
&typed_program.root_module.all_nodes,
state.engines.read().de(),
);
for (ident, submodule) in &typed_program.root_module.submodules {
if path_is_submodule(ident, &path) {
// overwrite the root AST with the submodule AST
formatted_ast = debug::print_decl_engine_types(
&submodule.module.all_nodes,
state.engines.read().de(),
);
}
}
write_ast_to_file(ast_path.join("typed.rs").as_path(), &formatted_ast)
})
}
_ => Ok(None),
}
}
}
Err(err) => {
tracing::error!("{}", err.to_string());
Ok(None)
}
}
}
/// This method is triggered when the use hits enter or pastes a newline in the editor.
pub fn handle_on_enter(
state: &ServerState,
params: &lsp_ext::OnEnterParams,
) -> Result<Option<WorkspaceEdit>> {
match state.sync_and_uri_from_workspace(¶ms.text_document.uri) {
Ok((_, uri)) => {
// handle on_enter capabilities if they are enabled
Ok(capabilities::on_enter(
&state.config.read().on_enter,
&state.documents,
&uri,
params,
))
}
Err(err) => {
tracing::error!("{}", err.to_string());
Ok(None)
}
}
}
/// Returns a [String] of the GraphViz DOT representation of a graph.
pub fn handle_visualize(
_state: &ServerState,
params: &lsp_ext::VisualizeParams,
) -> Result<Option<String>> {
match params.graph_kind.as_str() {
"build_plan" => match build_plan(¶ms.text_document.uri) {
Ok(build_plan) => Ok(Some(
build_plan.visualize(Some("vscode://file".to_string())),
)),
Err(err) => {
tracing::error!("{}", err.to_string());
Ok(None)
}
},
_ => Ok(None),
}
}
/// This method is triggered by the test suite to request the latest compilation metrics.
pub(crate) fn metrics(state: &ServerState) -> Result<Option<Vec<(String, PerformanceData)>>> {
let mut metrics = vec![];
for item in state.compiled_programs.iter() {
let path = state
.engines
.read()
.se()
.get_manifest_path_from_program_id(item.key())
.unwrap()
.to_string_lossy()
.to_string();
metrics.push((path, item.value().metrics.clone()));
}
Ok(Some(metrics))
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-lsp/src/core/token_map_ext.rs | sway-lsp/src/core/token_map_ext.rs | //! This module provides the `TokenMapExt` trait, which extends iterators over tokens with
//! additional functionality, such as finding all references of a given token in a TokenMap.
//!
//! The `TokenMapExt` trait is implemented for any iterator that yields (Ident, Token) pairs.
use crate::core::token::{Token, TokenIdent};
use dashmap::mapref::multiple::RefMulti;
use sway_core::Engines;
/// A trait for extending iterators with the `all_references_of_token` method.
pub trait TokenMapExt<'s>: Sized {
/// Find all references in the TokenMap for a given token.
///
/// This is useful for the highlighting and renaming LSP capabilities.
fn all_references_of_token(
self,
token_to_match: &'s Token,
engines: &'s Engines,
) -> AllReferencesOfToken<'s, Self>;
}
/// Implement `TokenMapExt` for any iterator that yields `RefMulti` for `TokenIdent, Token` pairs.
impl<'s, I> TokenMapExt<'s> for I
where
I: Iterator<Item = RefMulti<'s, TokenIdent, Token>>,
{
fn all_references_of_token(
self,
token_to_match: &'s Token,
engines: &'s Engines,
) -> AllReferencesOfToken<'s, Self> {
AllReferencesOfToken {
token_to_match,
engines,
iter: self,
}
}
}
/// A custom iterator that returns all references of a given token.
pub struct AllReferencesOfToken<'s, I> {
token_to_match: &'s Token,
engines: &'s Engines,
iter: I,
}
impl<'s, I> Iterator for AllReferencesOfToken<'s, I>
where
I: Iterator<Item = RefMulti<'s, TokenIdent, Token>>,
{
type Item = RefMulti<'s, TokenIdent, Token>;
fn next(&mut self) -> Option<Self::Item> {
for entry in self.iter.by_ref() {
let (ident, token) = entry.pair();
let decl_ident_to_match = self.token_to_match.declared_token_ident(self.engines);
let is_same_type = decl_ident_to_match == token.declared_token_ident(self.engines);
let is_decl_of_token = Some(ident) == decl_ident_to_match.as_ref();
if decl_ident_to_match.is_some() && is_same_type || is_decl_of_token {
return Some(entry);
}
}
None
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-lsp/src/core/session.rs | sway-lsp/src/core/session.rs | use crate::{
capabilities::{
self,
diagnostic::DiagnosticMap,
runnable::{Runnable, RunnableMainFn, RunnableTestFn},
},
core::{
sync::SyncWorkspace,
token::{self, TypedAstToken},
token_map::{TokenMap, TokenMapExt},
},
error::{DirectoryError, DocumentError, LanguageServerError},
server_state::{self, CompilationContext, CompiledPrograms, RunnableMap},
traverse::{
dependency, lexed_tree::LexedTree, parsed_tree::ParsedTree, typed_tree::TypedTree,
ParseContext,
},
};
use forc_pkg as pkg;
use lsp_types::{
CompletionItem, DocumentSymbol, GotoDefinitionResponse, Location, Position, Range, Url,
};
use parking_lot::RwLock;
use pkg::{
manifest::{GenericManifestFile, ManifestFile},
BuildPlan,
};
use rayon::iter::{IntoParallelRefIterator, ParallelIterator};
use std::{
collections::HashMap,
ops::Deref,
path::PathBuf,
sync::{atomic::AtomicBool, Arc},
time::SystemTime,
};
use sway_ast::{attribute::Annotated, ItemKind};
use sway_core::{
decl_engine::DeclEngine,
language::{
lexed::LexedProgram,
parsed::{AstNode, ParseProgram},
ty::{self},
HasSubmodules,
},
BuildTarget, Engines, LspConfig, Programs,
};
use sway_error::{
error::CompileError,
handler::Handler,
warning::{CompileInfo, CompileWarning},
};
use sway_types::{ProgramId, SourceEngine, Spanned};
/// A `Session` is used to store information about a single member in a workspace.
///
/// The API provides methods for responding to LSP requests from the server.
#[derive(Debug)]
pub struct Session {
pub build_plan_cache: BuildPlanCache,
// Cached diagnostic results that require a lock to access. Readers will wait for writers to complete.
pub diagnostics: Arc<RwLock<DiagnosticMap>>,
}
impl Default for Session {
fn default() -> Self {
Self::new()
}
}
impl Session {
pub fn new() -> Self {
Session {
build_plan_cache: BuildPlanCache::default(),
diagnostics: Arc::new(RwLock::new(DiagnosticMap::new())),
}
}
}
/// Clean up memory in the [TypeEngine] and [DeclEngine] for the user's workspace.
pub fn garbage_collect_program(
engines: &mut Engines,
sync: &SyncWorkspace,
) -> Result<(), LanguageServerError> {
let _p = tracing::trace_span!("garbage_collect").entered();
let path = sync.temp_dir()?;
let program_id = { engines.se().get_program_id_from_manifest_path(&path) };
if let Some(program_id) = program_id {
engines.clear_program(&program_id);
}
Ok(())
}
/// Clean up memory in the [TypeEngine] and [DeclEngine] for the modified file.
pub fn garbage_collect_module(engines: &mut Engines, uri: &Url) -> Result<(), LanguageServerError> {
let path = uri.to_file_path().unwrap();
let source_id = { engines.se().get_source_id(&path) };
engines.clear_module(&source_id);
Ok(())
}
pub fn token_references(
url: &Url,
position: Position,
token_map: &TokenMap,
engines: &Engines,
sync: &SyncWorkspace,
) -> Option<Vec<Location>> {
let _p = tracing::trace_span!("token_references").entered();
let token_references: Vec<_> = token_map
.iter()
.all_references_of_token(token_map.token_at_position(url, position)?.value(), engines)
.filter_map(|item| {
let path = item.key().path.as_ref()?;
let uri = Url::from_file_path(path).ok()?;
sync.to_workspace_url(uri)
.map(|workspace_url| Location::new(workspace_url, item.key().range))
})
.collect();
Some(token_references)
}
pub fn token_ranges(
engines: &Engines,
token_map: &TokenMap,
url: &Url,
position: Position,
) -> Option<Vec<Range>> {
let _p = tracing::trace_span!("token_ranges").entered();
let mut token_ranges: Vec<_> = token_map
.tokens_for_file(url)
.all_references_of_token(token_map.token_at_position(url, position)?.value(), engines)
.map(|item| item.key().range)
.collect();
token_ranges.sort_by(|a, b| a.start.line.cmp(&b.start.line));
Some(token_ranges)
}
pub fn token_definition_response(
uri: &Url,
position: Position,
engines: &Engines,
token_map: &TokenMap,
sync: &SyncWorkspace,
) -> Option<GotoDefinitionResponse> {
let _p = tracing::trace_span!("token_definition_response").entered();
token_map
.token_at_position(uri, position)
.and_then(|item| item.value().declared_token_ident(engines))
.and_then(|decl_ident| {
decl_ident.path.and_then(|path| {
// We use ok() here because we don't care about propagating the error from from_file_path
Url::from_file_path(path).ok().and_then(|url| {
sync.to_workspace_url(url).map(|url| {
GotoDefinitionResponse::Scalar(Location::new(url, decl_ident.range))
})
})
})
})
}
pub fn completion_items(
uri: &Url,
position: Position,
trigger_char: &str,
token_map: &TokenMap,
engines: &Engines,
compiled_programs: &CompiledPrograms,
) -> Option<Vec<CompletionItem>> {
let _p = tracing::trace_span!("completion_items").entered();
let program = compiled_programs.program_from_uri(uri, engines)?;
let shifted_position = Position {
line: position.line,
character: position
.character
.saturating_sub(trigger_char.len() as u32 + 1),
};
let t = token_map.token_at_position(uri, shifted_position)?;
let ident_to_complete = t.key();
let fn_tokens = token_map.tokens_at_position(engines, uri, shifted_position, Some(true));
let fn_token = fn_tokens.first()?.value();
if let Some(TypedAstToken::TypedFunctionDeclaration(fn_decl)) = fn_token.as_typed() {
return Some(capabilities::completion::to_completion_items(
&program.value().typed.as_ref().unwrap().namespace,
engines,
ident_to_complete,
fn_decl,
position,
));
}
None
}
/// Generate hierarchical document symbols for the given file.
pub fn document_symbols(
url: &Url,
token_map: &TokenMap,
engines: &Engines,
compiled_programs: &CompiledPrograms,
) -> Option<Vec<DocumentSymbol>> {
let _p = tracing::trace_span!("document_symbols").entered();
let path = url.to_file_path().ok()?;
let program = compiled_programs.program_from_uri(url, engines)?;
let typed_program = program.value().typed.as_ref().unwrap().clone();
Some(capabilities::document_symbol::to_document_symbols(
url,
&path,
&typed_program,
engines,
token_map,
))
}
/// Create a [BuildPlan] from the given [Url] appropriate for the language server.
pub fn build_plan(uri: &Url) -> Result<BuildPlan, LanguageServerError> {
let _p = tracing::trace_span!("build_plan").entered();
let manifest_dir = PathBuf::from(uri.path());
let manifest =
ManifestFile::from_dir(manifest_dir).map_err(|_| DocumentError::ManifestFileNotFound {
dir: uri.path().into(),
})?;
let member_manifests =
manifest
.member_manifests()
.map_err(|_| DocumentError::MemberManifestsFailed {
dir: uri.path().into(),
})?;
let lock_path = manifest
.lock_path()
.map_err(|_| DocumentError::ManifestsLockPathFailed {
dir: uri.path().into(),
})?;
// TODO: Either we want LSP to deploy a local node in the background or we want this to
// point to Fuel operated IPFS node.
let ipfs_node = pkg::source::IPFSNode::Local;
pkg::BuildPlan::from_lock_and_manifests(&lock_path, &member_manifests, false, false, &ipfs_node)
.map_err(LanguageServerError::BuildPlanFailed)
}
pub fn compile(
build_plan: &BuildPlan,
engines: &Engines,
retrigger_compilation: Option<Arc<AtomicBool>>,
lsp_mode: Option<&LspConfig>,
) -> Result<Vec<(Option<Programs>, Handler)>, LanguageServerError> {
let _p = tracing::trace_span!("compile").entered();
pkg::check(
build_plan,
BuildTarget::default(),
true,
lsp_mode.cloned(),
true,
engines,
retrigger_compilation,
&[],
&[sway_features::Feature::NewEncoding],
sway_core::DbgGeneration::None,
)
.map_err(LanguageServerError::FailedToCompile)
}
type CompileResults = (Vec<CompileError>, Vec<CompileWarning>, Vec<CompileInfo>);
pub fn traverse(
member_path: PathBuf,
results: Vec<(Option<Programs>, Handler)>,
engines_original: Arc<RwLock<Engines>>,
engines_clone: &Engines,
token_map: &TokenMap,
compiled_programs: &CompiledPrograms,
modified_file: Option<&PathBuf>,
) -> Result<Option<CompileResults>, LanguageServerError> {
let _p = tracing::trace_span!("traverse").entered();
// Remove tokens for the modified file from the token map.
if let Some(path) = modified_file {
token_map.remove_tokens_for_file(path);
}
let mut diagnostics: CompileResults = (Vec::default(), Vec::default(), Vec::default());
for (value, handler) in results.into_iter() {
// We can convert these destructured elements to a Vec<Diagnostic> later on.
let current_diagnostics = handler.consume();
diagnostics = current_diagnostics;
let Some(Programs {
lexed,
parsed,
typed,
metrics,
}) = value.as_ref()
else {
continue;
};
// Ensure that the typed program result is Ok before proceeding.
// If it's an Err, it indicates a failure in generating the typed AST,
// and we should return an error rather than panicking on unwrap.
if typed.is_err() {
return Err(LanguageServerError::FailedToParse);
}
let program_id = typed
.as_ref()
.unwrap() // safe to unwrap because we checked for Err above
.namespace
.current_package_ref()
.program_id;
let program_path = engines_clone
.se()
.get_manifest_path_from_program_id(&program_id)
.unwrap();
// Check if the cached AST was returned by the compiler for the users workspace.
// If it was, then we need to use the original engines for traversal.
//
// This is due to the garbage collector removing types from the engines_clone
// and they have not been re-added due to compilation being skipped.
let engines_ref = engines_original.read();
let engines = if program_path == member_path && metrics.reused_programs > 0 {
&*engines_ref
} else {
engines_clone
};
// Convert the source_id to a path so we can use the manifest path to get the program_id.
// This is used to store the metrics for the module.
if let Some(source_id) = lexed.root.tree.value.span().source_id() {
let path = engines.se().get_path(source_id);
let program_id = program_id_from_path(&path, engines)?;
if let Some(modified_file) = &modified_file {
let modified_program_id = program_id_from_path(modified_file, engines)?;
// We can skip traversing the programs for this iteration as they are unchanged.
if program_id != modified_program_id {
// Update the metrics for the program before continuing. Otherwise we can't query if the program was reused.
if let Some(mut item) = compiled_programs.get_mut(&program_id) {
item.value_mut().metrics = metrics.clone();
}
continue;
}
}
}
let (root_module, root) = match &typed {
Ok(p) => (
p.root_module.clone(),
p.namespace.current_package_ref().clone(),
),
Err(e) => {
if let Some(root) = &e.root_module {
(root.deref().clone(), e.namespace.clone())
} else {
return Err(LanguageServerError::FailedToParse);
}
}
};
// Create context with write guards to make readers wait until the update to token_map is complete.
// This operation is fast because we already have the compile results.
let ctx = ParseContext::new(token_map, engines, &root);
// We do an extensive traversal of the users program to populate the token_map.
// Perhaps we should do this for the workspace now as well and not just the workspace member?
// if program_path == member_path {
if program_path
.to_str()
.unwrap()
.contains(SyncWorkspace::LSP_TEMP_PREFIX)
{
// First, populate our token_map with sway keywords.
let lexed_tree = LexedTree::new(&ctx);
lexed_tree.collect_module_kinds(lexed);
parse_lexed_program(lexed, &ctx, modified_file, |an, _ctx| {
lexed_tree.traverse_node(an)
});
// Next, populate our token_map with un-typed yet parsed ast nodes.
let parsed_tree = ParsedTree::new(&ctx);
parsed_tree.collect_module_spans(parsed);
parse_ast_to_tokens(parsed, &ctx, modified_file, |an, _ctx| {
parsed_tree.traverse_node(an)
});
// Finally, populate our token_map with typed ast nodes.
let typed_tree = TypedTree::new(&ctx);
typed_tree.collect_module_spans(&root_module);
parse_ast_to_typed_tokens(&root_module, &ctx, modified_file, |node, _ctx| {
typed_tree.traverse_node(node);
});
} else {
// Collect tokens from dependencies and the standard library prelude.
parse_ast_to_tokens(parsed, &ctx, modified_file, |an, ctx| {
dependency::collect_parsed_declaration(an, ctx);
});
parse_ast_to_typed_tokens(&root_module, &ctx, modified_file, |node, ctx| {
dependency::collect_typed_declaration(node, ctx);
});
}
// Update the compiled program in the cache.
let compiled_program = value.expect("value was checked above");
if let Some(mut item) = compiled_programs.get_mut(&program_id) {
*item.value_mut() = compiled_program;
} else {
compiled_programs.insert(program_id, compiled_program);
}
}
Ok(Some(diagnostics))
}
/// Parses the project and returns true if the compiler diagnostics are new and should be published.
pub fn parse_project(
uri: &Url,
engines_clone: &Engines,
retrigger_compilation: Option<Arc<AtomicBool>>,
ctx: &CompilationContext,
lsp_mode: Option<&LspConfig>,
) -> Result<(), LanguageServerError> {
let _p = tracing::trace_span!("parse_project").entered();
let engines_original = ctx.engines.clone();
let build_plan = ctx
.session
.build_plan_cache
.get_or_update(&ctx.sync.workspace_manifest_path(), || build_plan(uri))?;
let token_map = ctx.token_map.clone();
// Compile the project.
let results = compile(&build_plan, engines_clone, retrigger_compilation, lsp_mode)?;
// First check if results is empty or if all program values are None,
// indicating an error occurred in the compiler
if results.is_empty()
|| results
.iter()
.all(|(programs_opt, _)| programs_opt.is_none())
{
return Err(LanguageServerError::ProgramsIsNone);
}
let path = uri.to_file_path().unwrap();
let member_path = ctx
.sync
.member_path(uri)
.ok_or(DirectoryError::TempMemberDirNotFound)?;
// Next check that the member path is present in the results.
let found_program_for_member = results.iter().any(|(programs_opt, _handler)| {
programs_opt.as_ref().is_some_and(|programs| {
programs
.typed
.as_ref()
.ok()
.and_then(|typed| {
let program_id = typed.as_ref().namespace.current_package_ref().program_id();
engines_clone
.se()
.get_manifest_path_from_program_id(&program_id)
})
.is_some_and(|program_manifest_path| program_manifest_path == *member_path)
})
});
if !found_program_for_member {
// If we don't return an error here, then we will likely crash when trying to access the Engines
// during traversal or when creating runnables.
return Err(LanguageServerError::MemberProgramNotFound);
}
// Check if we need to reprocess the project.
let (needs_reprocessing, modified_file) =
server_state::needs_reprocessing(&ctx.token_map, &path, lsp_mode);
// Only traverse and create runnables if we have no tokens yet, or if a file was modified
if needs_reprocessing {
let diagnostics = traverse(
member_path,
results,
engines_original.clone(),
engines_clone,
&token_map,
&ctx.compiled_programs,
modified_file,
)?;
// Write diagnostics if not optimized build
if let Some(LspConfig {
optimized_build: false,
..
}) = &lsp_mode
{
if let Some((errors, warnings, infos)) = &diagnostics {
*ctx.session.diagnostics.write() = capabilities::diagnostic::get_diagnostics(
infos,
warnings,
errors,
engines_clone.se(),
);
}
}
if let Some(program) = ctx.compiled_programs.program_from_uri(uri, engines_clone) {
// Check if the cached AST was returned by the compiler for the users workspace.
// If it was, then we need to use the original engines.
let engines = if program.value().metrics.reused_programs > 0 {
&*engines_original.read()
} else {
engines_clone
};
create_runnables(
&ctx.runnables,
Some(program.value().typed.as_ref().unwrap()),
engines.de(),
engines.se(),
);
}
}
Ok(())
}
/// Parse the [LexedProgram] to populate the [TokenMap] with lexed nodes.
pub fn parse_lexed_program(
lexed_program: &LexedProgram,
ctx: &ParseContext,
modified_file: Option<&PathBuf>,
f: impl Fn(&Annotated<ItemKind>, &ParseContext) + Sync,
) {
let should_process = |item: &&Annotated<ItemKind>| {
modified_file
.map(|path| {
item.span()
.source_id()
.is_some_and(|id| ctx.engines.se().get_path(id) == *path)
})
.unwrap_or(true)
};
lexed_program
.root
.tree
.value
.items
.iter()
.chain(
lexed_program
.root
.submodules_recursive()
.flat_map(|(_, submodule)| &submodule.module.tree.value.items),
)
.filter(should_process)
.collect::<Vec<_>>()
.par_iter()
.for_each(|item| f(item, ctx));
}
/// Parse the [ParseProgram] AST to populate the [TokenMap] with parsed AST nodes.
fn parse_ast_to_tokens(
parse_program: &ParseProgram,
ctx: &ParseContext,
modified_file: Option<&PathBuf>,
f: impl Fn(&AstNode, &ParseContext) + Sync,
) {
let should_process = |node: &&AstNode| {
modified_file
.map(|path| {
node.span
.source_id()
.is_some_and(|id| ctx.engines.se().get_path(id) == *path)
})
.unwrap_or(true)
};
parse_program
.root
.tree
.root_nodes
.iter()
.chain(
parse_program
.root
.submodules_recursive()
.flat_map(|(_, submodule)| &submodule.module.tree.root_nodes),
)
.filter(should_process)
.collect::<Vec<_>>()
.par_iter()
.for_each(|n| f(n, ctx));
}
/// Parse the [ty::TyProgram] AST to populate the [TokenMap] with typed AST nodes.
fn parse_ast_to_typed_tokens(
root: &ty::TyModule,
ctx: &ParseContext,
modified_file: Option<&PathBuf>,
f: impl Fn(&ty::TyAstNode, &ParseContext) + Sync,
) {
let should_process = |node: &&ty::TyAstNode| {
modified_file
.map(|path| {
node.span
.source_id()
.is_some_and(|id| ctx.engines.se().get_path(id) == *path)
})
.unwrap_or(true)
};
root.all_nodes
.iter()
.chain(
root.submodules_recursive()
.flat_map(|(_, submodule)| &submodule.module.all_nodes),
)
.filter(should_process)
.collect::<Vec<_>>()
.par_iter()
.for_each(|n| f(n, ctx));
}
/// Create runnables for script main and all test functions.
fn create_runnables(
runnables: &RunnableMap,
typed_program: Option<&ty::TyProgram>,
decl_engine: &DeclEngine,
source_engine: &SourceEngine,
) {
let _p = tracing::trace_span!("create_runnables").entered();
let root_module = typed_program.map(|program| &program.root_module);
// Use a local map to collect all runnables per path first
let mut new_runnables: HashMap<PathBuf, Vec<Box<dyn Runnable>>> = HashMap::new();
// Insert runnable test functions.
for (decl, _) in root_module
.into_iter()
.flat_map(|x| x.test_fns_recursive(decl_engine))
{
// Get the span of the first attribute if it exists, otherwise use the span of the function name.
let span = decl
.attributes
.first()
.map_or_else(|| decl.name.span(), |attr| attr.span.clone());
if let Some(source_id) = span.source_id() {
let path = source_engine.get_path(source_id);
let runnable = Box::new(RunnableTestFn {
range: token::get_range_from_span(&span),
test_name: Some(decl.name.to_string()),
});
new_runnables.entry(path).or_default().push(runnable);
}
}
// Insert runnable main function if the program is a script.
if let Some(ty::TyProgramKind::Script {
ref main_function, ..
}) = typed_program.map(|x| &x.kind)
{
let main_function = decl_engine.get_function(main_function);
let span = main_function.name.span();
if let Some(source_id) = span.source_id() {
let path = source_engine.get_path(source_id);
let runnable = Box::new(RunnableMainFn {
range: token::get_range_from_span(&span),
tree_type: sway_core::language::parsed::TreeType::Script,
});
new_runnables.entry(path).or_default().push(runnable);
}
}
// Now overwrite each path's entry with the new complete vector
let runnables_to_insert: Vec<_> = new_runnables.into_iter().collect();
for (path, new_runnable_vec) in runnables_to_insert {
runnables.insert(path, new_runnable_vec);
}
}
/// Resolves a `ProgramId` from a given `path` using the manifest directory.
pub fn program_id_from_path(
path: &PathBuf,
engines: &Engines,
) -> Result<ProgramId, DirectoryError> {
let program_id = sway_utils::find_parent_manifest_dir(path)
.and_then(|manifest_path| {
engines
.se()
.get_program_id_from_manifest_path(&manifest_path)
})
.ok_or_else(|| DirectoryError::ProgramIdNotFound {
path: path.to_string_lossy().to_string(),
})?;
Ok(program_id)
}
/// A cache for storing and retrieving BuildPlan objects.
#[derive(Debug, Clone)]
pub struct BuildPlanCache {
/// The cached BuildPlan and its last update time
cache: Arc<RwLock<Option<(BuildPlan, SystemTime)>>>,
}
impl Default for BuildPlanCache {
fn default() -> Self {
Self {
cache: Arc::new(RwLock::new(None)),
}
}
}
impl BuildPlanCache {
/// Retrieves a BuildPlan from the cache or updates it if necessary.
pub fn get_or_update<F>(
&self,
manifest_path: &Option<PathBuf>,
update_fn: F,
) -> Result<BuildPlan, LanguageServerError>
where
F: FnOnce() -> Result<BuildPlan, LanguageServerError>,
{
let should_update = {
let cache = self.cache.read();
manifest_path
.as_ref()
.and_then(|path| path.metadata().ok()?.modified().ok())
.map_or(cache.is_none(), |time| {
cache.as_ref().is_none_or(|&(_, last)| time > last)
})
};
if should_update {
let new_plan = update_fn()?;
let mut cache = self.cache.write();
*cache = Some((new_plan.clone(), SystemTime::now()));
Ok(new_plan)
} else {
let cache = self.cache.read();
cache
.as_ref()
.map(|(plan, _)| plan.clone())
.ok_or(LanguageServerError::BuildPlanCacheIsEmpty)
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::config::GarbageCollectionConfig;
use dashmap::DashMap;
use sway_lsp_test_utils::{get_absolute_path, get_url};
#[test]
fn parse_project_returns_manifest_file_not_found() {
let dir = get_absolute_path("sway-lsp/tests/fixtures");
let uri = get_url(&dir);
let engines_original = Arc::new(RwLock::new(Engines::default()));
let engines = Engines::default();
let session = Arc::new(Session::new());
let sync = Arc::new(SyncWorkspace::new());
let token_map = Arc::new(TokenMap::new());
let ctx = CompilationContext {
session,
sync,
token_map,
engines: engines_original,
compiled_programs: Arc::new(CompiledPrograms::new()),
runnables: Arc::new(DashMap::new()),
optimized_build: false,
file_versions: Default::default(),
uri: uri.clone(),
version: None,
gc_options: GarbageCollectionConfig::default(),
};
let result = parse_project(&uri, &engines, None, &ctx, None)
.expect_err("expected ManifestFileNotFound");
assert!(matches!(
result,
LanguageServerError::DocumentError(
DocumentError::ManifestFileNotFound { dir: test_dir }
)
if test_dir == dir
));
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-lsp/src/core/document.rs | sway-lsp/src/core/document.rs | use std::{path::PathBuf, sync::Arc};
use crate::{
error::{DirectoryError, DocumentError, LanguageServerError},
utils::document,
};
use dashmap::DashMap;
use forc_util::fs_locking::PidFileLocking;
use lsp_types::{Position, Range, TextDocumentContentChangeEvent, Url};
use sway_utils::get_sway_files;
use tokio::{fs::File, io::AsyncWriteExt};
#[derive(Debug, Clone)]
pub struct TextDocument {
version: i32,
uri: String,
content: String,
line_offsets: Vec<usize>,
}
impl TextDocument {
pub async fn build_from_path(path: &str) -> Result<Self, DocumentError> {
tokio::fs::read_to_string(path)
.await
.map(|content| {
let line_offsets = TextDocument::calculate_line_offsets(&content);
Self {
version: 1,
uri: path.into(),
content,
line_offsets,
}
})
.map_err(|e| match e.kind() {
std::io::ErrorKind::NotFound => {
DocumentError::DocumentNotFound { path: path.into() }
}
std::io::ErrorKind::PermissionDenied => {
DocumentError::PermissionDenied { path: path.into() }
}
_ => DocumentError::IOError {
path: path.into(),
error: e.to_string(),
},
})
}
pub fn get_uri(&self) -> &str {
&self.uri
}
pub fn get_text(&self) -> &str {
&self.content
}
pub fn get_line(&self, line: usize) -> &str {
let start = self
.line_offsets
.get(line)
.copied()
.unwrap_or(self.content.len());
let end = self
.line_offsets
.get(line + 1)
.copied()
.unwrap_or(self.content.len());
&self.content[start..end]
}
pub fn apply_change(
&mut self,
change: &TextDocumentContentChangeEvent,
) -> Result<(), DocumentError> {
if let Some(range) = change.range {
self.validate_range(range)?;
let start_index = self.position_to_index(range.start);
let end_index = self.position_to_index(range.end);
self.content
.replace_range(start_index..end_index, &change.text);
} else {
self.content.clone_from(&change.text);
}
self.line_offsets = Self::calculate_line_offsets(&self.content);
self.version += 1;
Ok(())
}
fn validate_range(&self, range: Range) -> Result<(), DocumentError> {
let start = self.position_to_index(range.start);
let end = self.position_to_index(range.end);
if start > end || end > self.content.len() {
return Err(DocumentError::InvalidRange { range });
}
Ok(())
}
fn position_to_index(&self, position: Position) -> usize {
let line_offset = self
.line_offsets
.get(position.line as usize)
.copied()
.unwrap_or(self.content.len());
line_offset + position.character as usize
}
fn calculate_line_offsets(text: &str) -> Vec<usize> {
let mut offsets = vec![0];
for (i, c) in text.char_indices() {
if c == '\n' {
offsets.push(i + 1);
}
}
offsets
}
}
pub struct Documents(DashMap<String, TextDocument>);
impl Default for Documents {
fn default() -> Self {
Self::new()
}
}
impl Documents {
pub fn new() -> Self {
Documents(DashMap::new())
}
pub async fn handle_open_file(&self, uri: &Url) {
if !self.contains_key(uri.path()) {
if let Ok(text_document) = TextDocument::build_from_path(uri.path()).await {
let _ = self.store_document(text_document);
}
}
}
/// Asynchronously writes the changes to the file and updates the document.
pub async fn write_changes_to_file(
&self,
uri: &Url,
changes: &[TextDocumentContentChangeEvent],
) -> Result<(), LanguageServerError> {
let src = self.update_text_document(uri, changes)?;
let mut file =
File::create(uri.path())
.await
.map_err(|err| DocumentError::UnableToCreateFile {
path: uri.path().to_string(),
err: err.to_string(),
})?;
file.write_all(src.as_bytes())
.await
.map_err(|err| DocumentError::UnableToWriteFile {
path: uri.path().to_string(),
err: err.to_string(),
})?;
Ok(())
}
/// Update the document at the given [Url] with the Vec of changes returned by the client.
pub fn update_text_document(
&self,
uri: &Url,
changes: &[TextDocumentContentChangeEvent],
) -> Result<String, DocumentError> {
self.try_get_mut(uri.path())
.try_unwrap()
.ok_or_else(|| DocumentError::DocumentNotFound {
path: uri.path().to_string(),
})
.and_then(|mut document| {
for change in changes {
document.apply_change(change)?;
}
Ok(document.get_text().to_string())
})
}
/// Get the document at the given [Url].
pub fn get_text_document(&self, url: &Url) -> Result<TextDocument, DocumentError> {
self.try_get(url.path())
.try_unwrap()
.ok_or_else(|| DocumentError::DocumentNotFound {
path: url.path().to_string(),
})
.map(|document| document.clone())
}
/// Remove the text document.
pub fn remove_document(&self, url: &Url) -> Result<TextDocument, DocumentError> {
self.remove(url.path())
.ok_or_else(|| DocumentError::DocumentNotFound {
path: url.path().to_string(),
})
.map(|(_, text_document)| text_document)
}
/// Store the text document.
pub fn store_document(&self, text_document: TextDocument) -> Result<(), DocumentError> {
let uri = text_document.get_uri().to_string();
self.insert(uri.clone(), text_document).map_or(Ok(()), |_| {
Err(DocumentError::DocumentAlreadyStored { path: uri })
})
}
/// Populate with sway files found in the workspace.
pub async fn store_sway_files_from_temp(
&self,
temp_dir: PathBuf,
) -> Result<(), LanguageServerError> {
for path_str in get_sway_files(temp_dir).iter().filter_map(|fp| fp.to_str()) {
let text_doc = TextDocument::build_from_path(path_str).await?;
self.store_document(text_doc)?;
}
Ok(())
}
}
impl std::ops::Deref for Documents {
type Target = DashMap<String, TextDocument>;
fn deref(&self) -> &Self::Target {
&self.0
}
}
/// Manages process-based file locking for multiple files.
pub struct PidLockedFiles {
locks: DashMap<Url, Arc<PidFileLocking>>,
}
impl Default for PidLockedFiles {
fn default() -> Self {
Self::new()
}
}
impl PidLockedFiles {
pub fn new() -> Self {
Self {
locks: DashMap::new(),
}
}
/// Marks the specified file as "dirty" by creating a corresponding flag file.
///
/// This function ensures the necessary directory structure exists before creating the flag file.
/// If the file is already locked, this function will do nothing. This is to reduce the number of
/// unnecessary file IO operations.
pub fn mark_file_as_dirty(&self, uri: &Url) -> Result<(), LanguageServerError> {
if !self.locks.contains_key(uri) {
let path = document::get_path_from_url(uri)?;
let file_lock = Arc::new(PidFileLocking::lsp(path));
file_lock
.lock()
.map_err(|e| DirectoryError::LspLocksDirFailed(e.to_string()))?;
self.locks.insert(uri.clone(), file_lock);
}
Ok(())
}
/// Removes the corresponding flag file for the specified Url.
///
/// If the flag file does not exist, this function will do nothing.
pub fn remove_dirty_flag(&self, uri: &Url) -> Result<(), LanguageServerError> {
if let Some((uri, file_lock)) = self.locks.remove(uri) {
file_lock
.release()
.map_err(|err| DocumentError::UnableToRemoveFile {
path: uri.path().to_string(),
err: err.to_string(),
})?;
}
Ok(())
}
}
#[cfg(test)]
mod tests {
use super::*;
use sway_lsp_test_utils::get_absolute_path;
#[tokio::test]
async fn build_from_path_returns_text_document() {
let path = get_absolute_path("sway-lsp/tests/fixtures/cats.txt");
let result = TextDocument::build_from_path(&path).await;
assert!(result.is_ok(), "result = {result:?}");
let document = result.unwrap();
assert_eq!(document.version, 1);
assert_eq!(document.uri, path);
assert!(!document.content.is_empty());
assert!(!document.line_offsets.is_empty());
}
#[tokio::test]
async fn build_from_path_returns_document_not_found_error() {
let path = get_absolute_path("not/a/real/file/path");
let result = TextDocument::build_from_path(&path)
.await
.expect_err("expected DocumentNotFound");
assert_eq!(result, DocumentError::DocumentNotFound { path });
}
#[tokio::test]
async fn store_document_returns_empty_tuple() {
let documents = Documents::new();
let path = get_absolute_path("sway-lsp/tests/fixtures/cats.txt");
let document = TextDocument::build_from_path(&path).await.unwrap();
let result = documents.store_document(document);
assert!(result.is_ok());
}
#[tokio::test]
async fn store_document_returns_document_already_stored_error() {
let documents = Documents::new();
let path = get_absolute_path("sway-lsp/tests/fixtures/cats.txt");
let document = TextDocument::build_from_path(&path).await.unwrap();
documents
.store_document(document)
.expect("expected successfully stored");
let document = TextDocument::build_from_path(&path).await.unwrap();
let result = documents
.store_document(document)
.expect_err("expected DocumentAlreadyStored");
assert_eq!(result, DocumentError::DocumentAlreadyStored { path });
}
#[test]
fn get_line_returns_correct_line() {
let content = "line1\nline2\nline3".to_string();
let line_offsets = TextDocument::calculate_line_offsets(&content);
let document = TextDocument {
version: 1,
uri: "test.sw".into(),
content,
line_offsets,
};
assert_eq!(document.get_line(0), "line1\n");
assert_eq!(document.get_line(1), "line2\n");
assert_eq!(document.get_line(2), "line3");
}
#[test]
fn apply_change_updates_content_correctly() {
let content = "Hello, world!".to_string();
let line_offsets = TextDocument::calculate_line_offsets(&content);
let mut document = TextDocument {
version: 1,
uri: "test.sw".into(),
content,
line_offsets,
};
let change = TextDocumentContentChangeEvent {
range: Some(Range::new(Position::new(0, 7), Position::new(0, 12))),
range_length: None,
text: "Rust".into(),
};
document.apply_change(&change).unwrap();
assert_eq!(document.get_text(), "Hello, Rust!");
}
#[test]
fn position_to_index_works_correctly() {
let content = "line1\nline2\nline3".to_string();
let line_offsets = TextDocument::calculate_line_offsets(&content);
let document = TextDocument {
version: 1,
uri: "test.sw".into(),
content,
line_offsets,
};
assert_eq!(document.position_to_index(Position::new(1, 2)), 8);
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-lsp/src/core/sync.rs | sway-lsp/src/core/sync.rs | use crate::{
error::{DirectoryError, DocumentError, LanguageServerError},
utils::document::{get_path_from_url, get_url_from_path, get_url_from_span},
};
use dashmap::DashMap;
use forc_pkg::manifest::{GenericManifestFile, ManifestFile};
use lsp_types::Url;
use std::{
fs,
path::{Path, PathBuf},
};
use sway_types::{SourceEngine, Span};
use sway_utils::{
constants::{LOCK_FILE_NAME, MANIFEST_FILE_NAME},
SWAY_EXTENSION,
};
use tempfile::Builder;
#[derive(Debug, Eq, PartialEq, Hash)]
pub enum Directory {
Manifest,
Temp,
}
#[derive(Debug, Default)]
pub struct SyncWorkspace {
pub directories: DashMap<Directory, PathBuf>,
}
impl SyncWorkspace {
pub const LSP_TEMP_PREFIX: &'static str = "SWAY_LSP_TEMP_DIR";
pub fn new() -> Self {
Self::default()
}
/// Clean up the temp directory that was created once the server closes down.
pub fn remove_temp_dir(&self) {
if let Ok(dir) = self.temp_dir() {
// The `temp_path` we store is `random_dir/project_name`.
// So, we need to remove `random_dir` by getting the parent directory.
if let Some(parent_dir) = dir.parent() {
if parent_dir.file_name().is_some_and(|name| {
name.to_string_lossy()
.starts_with(SyncWorkspace::LSP_TEMP_PREFIX)
}) {
if let Err(e) = fs::remove_dir_all(parent_dir) {
tracing::warn!("Failed to remove temp base dir {:?}: {}", parent_dir, e);
} else {
tracing::debug!("Successfully removed temp base dir: {:?}", parent_dir);
}
}
}
}
}
pub fn create_temp_dir_from_workspace(
&self,
actual_workspace_root: &Path,
) -> Result<(), LanguageServerError> {
let root_dir_name = actual_workspace_root
.file_name()
.and_then(|name| name.to_str())
.ok_or_else(|| DirectoryError::CantExtractProjectName {
dir: actual_workspace_root.to_string_lossy().to_string(),
})?;
let temp_dir_guard = Builder::new()
.prefix(SyncWorkspace::LSP_TEMP_PREFIX)
.tempdir()
.map_err(|_| DirectoryError::TempDirFailed)?;
// Construct the path for our specific workspace clone *inside* the directory managed by temp_dir_guard.
let temp_workspace_base = temp_dir_guard.path().join(root_dir_name);
fs::create_dir_all(&temp_workspace_base).map_err(|io_err| {
tracing::error!(
"Failed to create subdirectory {:?} in temp: {}",
temp_workspace_base,
io_err
);
DirectoryError::TempDirFailed
})?;
let canonical_manifest_path = actual_workspace_root.canonicalize().map_err(|io_err| {
tracing::warn!(
"Failed to canonicalize manifest path {:?}: {}",
actual_workspace_root,
io_err
);
DirectoryError::CanonicalizeFailed
})?;
let canonical_temp_path = temp_workspace_base.canonicalize().map_err(|io_err| {
tracing::warn!(
"Failed to canonicalize temp path {:?}: {}",
temp_workspace_base,
io_err
);
DirectoryError::CanonicalizeFailed
})?;
self.directories
.insert(Directory::Manifest, canonical_manifest_path);
self.directories
.insert(Directory::Temp, canonical_temp_path.clone());
// Consume the guard to disable auto-cleanup.
let _ = temp_dir_guard.keep();
tracing::debug!(
"SyncWorkspace: Manifest dir set to {:?}, Temp dir set to {:?}",
actual_workspace_root,
canonical_temp_path
);
Ok(())
}
pub fn clone_manifest_dir_to_temp(&self) -> Result<(), DirectoryError> {
copy_dir_contents(self.manifest_dir()?, self.temp_dir()?)
.map_err(|_| DirectoryError::CopyContentsFailed)?;
Ok(())
}
/// Convert the Url path from the client to point to the same file in our temp folder
pub fn workspace_to_temp_url(&self, uri: &Url) -> Result<Url, DirectoryError> {
convert_url(uri, &self.temp_dir()?, &self.manifest_dir()?)
}
/// Convert the [Url] path from the temp folder to point to the same file in the users workspace.
pub(crate) fn temp_to_workspace_url(&self, uri: &Url) -> Result<Url, DirectoryError> {
convert_url(uri, &self.manifest_dir()?, &self.temp_dir()?)
}
/// If it is a path to a temp directory, convert the path in the [Span] to the same file in the user's
/// workspace. Otherwise, return the span as-is.
pub(crate) fn temp_to_workspace_span(
&self,
source_engine: &SourceEngine,
span: &Span,
) -> Result<Span, DirectoryError> {
let url = get_url_from_span(source_engine, span)?;
if is_path_in_temp_workspace(&url) {
let converted_url = convert_url(&url, &self.manifest_dir()?, &self.temp_dir()?)?;
let converted_path = get_path_from_url(&converted_url)?;
let source_id = source_engine.get_source_id(&converted_path);
let converted_span = Span::new(
span.src().clone(),
span.start(),
span.end(),
Some(source_id),
);
match converted_span {
Some(span) => Ok(span),
None => Err(DirectoryError::SpanFromPathFailed {
path: converted_path.to_string_lossy().to_string(),
}),
}
} else {
Ok(span.clone())
}
}
/// If path is part of the users workspace, then convert URL from temp to workspace dir.
/// Otherwise, pass through if it points to a dependency path
pub(crate) fn to_workspace_url(&self, url: Url) -> Option<Url> {
if is_path_in_temp_workspace(&url) {
Some(self.temp_to_workspace_url(&url).ok()?)
} else {
Some(url)
}
}
/// Returns the path to the Forc.toml of the workspace in the temp directory.
#[allow(dead_code)]
pub(crate) fn temp_manifest_path(&self) -> Option<PathBuf> {
self.temp_dir()
.map(|dir| dir.join(sway_utils::constants::MANIFEST_FILE_NAME))
.ok()
}
/// Returns the path to the Forc.toml of the workspace.
pub fn workspace_manifest_path(&self) -> Option<PathBuf> {
self.manifest_dir()
.map(|dir| dir.join(sway_utils::constants::MANIFEST_FILE_NAME))
.ok()
}
/// Returns the path to the Forc.toml of the workspace member containing the given TEMP URI.
/// This function assumes the input URI points to a file within the temporary cloned workspace.
pub(crate) fn member_manifest_path(&self, temp_uri: &Url) -> Option<PathBuf> {
let file_path_in_temp_member = get_path_from_url(temp_uri).ok()?;
let temp_workspace_root_dir = self.temp_dir().ok()?;
let manifest_file = ManifestFile::from_dir(&temp_workspace_root_dir).ok()?;
match manifest_file {
ManifestFile::Package(pkg_manifest) => file_path_in_temp_member
.starts_with(pkg_manifest.dir())
.then(|| pkg_manifest.path().to_path_buf()),
ManifestFile::Workspace(ws_manifest) => ws_manifest
.member_pkg_manifests()
.ok()?
.filter_map(Result::ok)
.find(|member_pkg| file_path_in_temp_member.starts_with(member_pkg.dir()))
.map(|member_pkg| member_pkg.path().to_path_buf()),
}
}
pub fn member_path(&self, temp_uri: &Url) -> Option<PathBuf> {
let p = self.member_manifest_path(temp_uri)?;
let dir = p.parent()?;
Some(dir.to_path_buf())
}
/// Read the Forc.toml and convert relative paths to absolute. Save into our temp directory.
pub fn sync_manifest(&self) -> Result<(), LanguageServerError> {
let actual_manifest_dir = self.manifest_dir()?;
let temp_manifest_dir = self.temp_dir()?;
// Load the manifest from the *actual* workspace root to determine if it's a package or workspace
match ManifestFile::from_dir(&actual_manifest_dir) {
Ok(ManifestFile::Package(pkg_manifest_file)) => {
let actual_pkg_manifest_path = pkg_manifest_file.path();
let temp_pkg_manifest_path = temp_manifest_dir.join(
actual_pkg_manifest_path
.file_name()
.unwrap_or_else(|| std::ffi::OsStr::new(MANIFEST_FILE_NAME)),
);
tracing::debug!(
"Syncing single package manifest: {:?} to temp: {:?}",
actual_pkg_manifest_path,
temp_pkg_manifest_path
);
edit_manifest_dependency_paths(
pkg_manifest_file.dir(),
actual_pkg_manifest_path,
&temp_pkg_manifest_path,
)?;
}
Ok(ManifestFile::Workspace(ws_manifest_file)) => {
// Workspace: iterate through members and sync each member's manifest
tracing::debug!("Syncing workspace members in: {:?}", actual_manifest_dir);
match ws_manifest_file.member_pkg_manifests() {
Ok(member_manifests_iter) => {
for member_result in member_manifests_iter {
match member_result {
Ok(actual_member_pkg_manifest) => {
let actual_member_manifest_path =
actual_member_pkg_manifest.path();
if let Ok(relative_member_path) = actual_member_manifest_path
.strip_prefix(&actual_manifest_dir)
{
let temp_member_manifest_path =
temp_manifest_dir.join(relative_member_path);
tracing::debug!(
"Syncing workspace member manifest: {:?} to temp: {:?}",
actual_member_manifest_path,
temp_member_manifest_path
);
edit_manifest_dependency_paths(
actual_member_pkg_manifest.dir(),
actual_member_manifest_path,
&temp_member_manifest_path,
)?;
} else {
tracing::error!(
"Could not determine relative path for member: {:?}",
actual_member_manifest_path
);
}
}
Err(e) => {
tracing::error!(
"Failed to load workspace member manifest: {}",
e
);
}
}
}
}
Err(e) => {
tracing::error!(
"Failed to get member manifests for workspace {:?}: {}",
actual_manifest_dir,
e
);
}
}
// Sync the root workspace Forc.toml itself
let actual_root_workspace_toml_path = ws_manifest_file.path();
let temp_root_workspace_toml_path = temp_manifest_dir.join(
actual_root_workspace_toml_path
.file_name()
.unwrap_or_else(|| std::ffi::OsStr::new(MANIFEST_FILE_NAME)),
);
tracing::debug!(
"Syncing root workspace manifest for patches: {:?} to temp: {:?}",
actual_root_workspace_toml_path,
temp_root_workspace_toml_path
);
edit_manifest_dependency_paths(
ws_manifest_file.dir(),
actual_root_workspace_toml_path,
&temp_root_workspace_toml_path,
)?;
}
Err(e) => {
tracing::error!(
"Failed to load manifest from actual directory {:?}: {}. Cannot sync manifest.",
actual_manifest_dir,
e
);
}
}
Ok(())
}
/// Return the path to the projects manifest directory.
pub(crate) fn manifest_dir(&self) -> Result<PathBuf, DirectoryError> {
self.directories
.try_get(&Directory::Manifest)
.try_unwrap()
.map(|item| item.value().clone())
.ok_or(DirectoryError::ManifestDirNotFound)
}
/// Return the path to the temporary directory that was created for the current session.
pub(crate) fn temp_dir(&self) -> Result<PathBuf, DirectoryError> {
self.directories
.try_get(&Directory::Temp)
.try_unwrap()
.map(|item| item.value().clone())
.ok_or(DirectoryError::TempDirNotFound)
}
}
/// Check if the current path is part of the users workspace.
/// Returns false if the path is from a dependency
pub(crate) fn is_path_in_temp_workspace(uri: &Url) -> bool {
uri.as_ref().contains(SyncWorkspace::LSP_TEMP_PREFIX)
}
fn convert_url(uri: &Url, from: &Path, to: &PathBuf) -> Result<Url, DirectoryError> {
let path = from.join(
PathBuf::from(uri.path())
.strip_prefix(to)
.map_err(DirectoryError::StripPrefixError)?,
);
get_url_from_path(&path)
}
/// Deserialize the manifest file and loop through the dependencies.
/// Check if the dependency is specifying a 'path'.
/// If so, check if the path is relative and convert the relative path to an absolute path.
/// Edit the toml entry using toml_edit with the absolute path.
/// Save the manifest to temp_dir/Forc.toml.
pub(crate) fn edit_manifest_dependency_paths(
manifset_dir: &Path,
manifest_path: &Path,
temp_manifest_path: &Path,
) -> Result<(), LanguageServerError> {
// Read and parse the original manifest
let manifest_content =
std::fs::read_to_string(manifest_path).map_err(|err| DocumentError::IOError {
path: manifest_path.to_string_lossy().to_string(),
error: err.to_string(),
})?;
let mut doc = manifest_content
.parse::<toml_edit::DocumentMut>()
.map_err(|err| DocumentError::IOError {
path: manifest_path.to_string_lossy().to_string(),
error: format!("Failed to parse TOML: {err}"),
})?;
let manifest =
ManifestFile::from_file(manifest_path).map_err(|err| DocumentError::IOError {
path: manifest_path.to_string_lossy().to_string(),
error: err.to_string(),
})?;
if let ManifestFile::Package(package) = manifest {
// Process dependencies if they exist
if let Some(deps) = &package.dependencies {
if let Some(deps_table) = doc.get_mut("dependencies").and_then(|v| v.as_table_mut()) {
process_dependencies(manifset_dir, deps, deps_table)?;
}
}
}
// Write the updated manifest to the temp file
std::fs::write(temp_manifest_path, doc.to_string()).map_err(|err| {
DocumentError::UnableToWriteFile {
path: temp_manifest_path.to_string_lossy().to_string(),
err: err.to_string(),
}
})?;
Ok(())
}
/// Process dependencies and convert relative paths to absolute
fn process_dependencies(
manifest_dir: &Path,
deps: &std::collections::BTreeMap<String, forc_pkg::manifest::Dependency>,
deps_table: &mut toml_edit::Table,
) -> Result<(), LanguageServerError> {
for (name, dependency) in deps {
if let forc_pkg::manifest::Dependency::Detailed(details) = dependency {
if let Some(rel_path) = &details.path {
// Convert relative path to absolute
let abs_path = manifest_dir
.join(rel_path)
.canonicalize()
.map_err(|_| DirectoryError::CanonicalizeFailed)?
.to_string_lossy()
.to_string();
// Update the path in the TOML document
if let Some(dep_item) = deps_table.get_mut(name) {
let path_value = toml_edit::Value::from(abs_path);
if let Some(table) = dep_item.as_inline_table_mut() {
table.insert("path", path_value);
}
}
}
}
}
Ok(())
}
/// Copies only the specified files from the source directory to the target directory.
/// This function targets files ending with `.sw`, and the specific files `Forc.toml` and `Forc.lock`.
/// It returns `Ok(true)` if any relevant files were copied over, and `Ok(false)` if no such files were found.
fn copy_dir_contents(
src_dir: impl AsRef<Path>,
target_dir: impl AsRef<Path>,
) -> std::io::Result<bool> {
let mut has_relevant_files = false;
for entry in fs::read_dir(&src_dir)? {
let entry = entry?;
let path = entry.path();
let ty = entry.file_type()?;
if ty.is_dir() {
// Recursively check the directory; if it has relevant files, create the target directory
if copy_dir_contents(&path, target_dir.as_ref().join(entry.file_name()))? {
has_relevant_files = true;
}
} else if let Some(file_name_os) = path.file_name() {
if let Some(file_name) = file_name_os.to_str() {
if file_name.ends_with(&format!(".{SWAY_EXTENSION}"))
|| file_name == MANIFEST_FILE_NAME
|| file_name == LOCK_FILE_NAME
{
if !has_relevant_files {
fs::create_dir_all(&target_dir)?;
has_relevant_files = true;
}
fs::copy(&path, target_dir.as_ref().join(file_name))?;
}
}
}
}
Ok(has_relevant_files)
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-lsp/src/core/token_map.rs | sway-lsp/src/core/token_map.rs | use crate::core::token::{self, Token, TokenIdent, TypedAstToken};
use dashmap::{
mapref::{
multiple::RefMulti,
one::{Ref, RefMut},
},
try_result::TryResult,
DashMap,
};
use lsp_types::{Position, Url};
use std::{path::PathBuf, thread, time::Duration};
use sway_core::{engine_threading::SpannedWithEngines, language::ty, type_system::TypeId, Engines};
use sway_types::{Ident, ProgramId};
// Re-export the TokenMapExt trait.
pub use crate::core::token_map_ext::TokenMapExt;
/// The TokenMap is the main data structure of the language server.
/// It stores all of the tokens that have been parsed and typechecked by the sway compiler.
///
/// The TokenMap is a wrapper around a [DashMap], which is a concurrent HashMap.
#[derive(Debug, Default)]
pub struct TokenMap(DashMap<TokenIdent, Token>);
impl<'a> TokenMap {
/// Create a new token map.
pub fn new() -> TokenMap {
TokenMap(DashMap::with_capacity(2048))
}
/// Attempts to get a mutable reference to a token with retries on lock.
/// Retries up to 14 times with increasing backoff (1ns, 10ns, 100ns, 500ns, 1µs, 10µs, 100µs, 1ms, 10ms, 50ms, 100ms, 200ms, 500ms, 1s).
pub fn try_get_mut_with_retry(
&'a self,
ident: &TokenIdent,
) -> Option<RefMut<'a, TokenIdent, Token>> {
const MAX_RETRIES: usize = 14;
let backoff_times = [
1,
10,
100,
500,
1_000,
10_000,
100_000,
1_000_000,
10_000_000,
50_000_000,
100_000_000,
200_000_000,
500_000_000,
1_000_000_000,
]; // Backoff times in nanoseconds
for sleep in backoff_times.iter().take(MAX_RETRIES) {
match self.try_get_mut(ident) {
TryResult::Present(token) => return Some(token),
TryResult::Absent => return None,
TryResult::Locked => {
// Wait for the specified backoff time before retrying
let backoff_time = Duration::from_nanos(*sleep);
thread::sleep(backoff_time);
}
}
}
tracing::error!(
"Failed to get token after {} retries: {:#?}",
MAX_RETRIES,
ident
);
None // Return None if all retries are exhausted
}
/// Return an Iterator of tokens belonging to the provided [ProgramId].
pub fn tokens_for_program(
&self,
program_id: ProgramId,
) -> impl Iterator<Item = RefMulti<'_, TokenIdent, Token>> {
self.iter().filter_map(move |entry| {
entry
.key()
.program_id()
.filter(|&pid| pid == program_id)
.map(|_| entry)
})
}
/// Return an Iterator of tokens belonging to the provided [Url].
pub fn tokens_for_file<'s>(
&'s self,
uri: &'s Url,
) -> impl Iterator<Item = RefMulti<'s, TokenIdent, Token>> + 's {
self.iter().filter_map(move |entry| {
let ident_path = entry.key().path.clone();
ident_path.as_ref().and_then(|path| {
if path.to_str() == Some(uri.path()) {
Some(entry)
} else {
None
}
})
})
}
/// Return an Iterator of tokens matching the given name.
pub fn tokens_for_name<'s>(
&'s self,
name: &'s String,
) -> impl Iterator<Item = RefMulti<'s, TokenIdent, Token>> + 's {
self.iter().filter_map(move |entry| {
let ident = entry.key();
if &ident.name == name {
Some(entry)
} else {
None
}
})
}
/// Given a cursor [Position], return the [TokenIdent] of a token in the
/// Iterator if one exists at that position.
pub fn idents_at_position<'s, I>(
&'s self,
cursor_position: Position,
tokens: I,
) -> Vec<TokenIdent>
where
I: Iterator<Item = RefMulti<'s, TokenIdent, Token>>,
{
tokens
.filter_map(|entry| {
let ident = entry.key();
if cursor_position >= ident.range.start && cursor_position <= ident.range.end {
Some(ident.clone())
} else {
None
}
})
.collect()
}
/// Returns the first parent declaration found at the given cursor position.
///
/// For example, if the cursor is inside a function body, this function returns the function declaration.
pub fn parent_decl_at_position<'s>(
&'s self,
engines: &'s Engines,
uri: &'s Url,
position: Position,
) -> Option<RefMulti<'s, TokenIdent, Token>> {
self.tokens_at_position(engines, uri, position, None)
.into_iter()
.find_map(|entry| {
let (_, token) = entry.pair();
if let Some(TypedAstToken::TypedDeclaration(_)) = &token.as_typed() {
Some(entry)
} else {
None
}
})
}
/// Returns the first collected tokens that is at the cursor position.
pub fn token_at_position<'s>(
&'s self,
uri: &'s Url,
position: Position,
) -> Option<Ref<'s, TokenIdent, Token>> {
let tokens = self.tokens_for_file(uri);
self.idents_at_position(position, tokens)
.first()
.and_then(|ident| self.try_get(ident).try_unwrap())
}
/// Returns all collected tokens that are at the given [Position] in the file.
/// If `functions_only` is true, it only returns tokens of type [TypedAstToken::TypedFunctionDeclaration].
///
/// This is different from `idents_at_position` because this searches the spans of token bodies, not
/// just the spans of the token idents. For example, if we want to find out what function declaration
/// the cursor is inside of, we need to search the body of the function declaration, not just the ident
/// of the function declaration (the function name).
pub fn tokens_at_position<'s>(
&'s self,
engines: &'s Engines,
uri: &'s Url,
position: Position,
functions_only: Option<bool>,
) -> Vec<RefMulti<'s, TokenIdent, Token>> {
let source_engine = engines.se();
self.tokens_for_file(uri)
.filter_map(move |entry| {
let (ident, token) = entry.pair();
let token_ident = match &token.as_typed() {
Some(TypedAstToken::TypedFunctionDeclaration(decl))
if functions_only == Some(true) =>
{
TokenIdent::new(&Ident::new(decl.span.clone()), source_engine)
}
Some(TypedAstToken::TypedDeclaration(decl)) => {
TokenIdent::new(&Ident::new(decl.span(engines)), source_engine)
}
_ => ident.clone(),
};
if position >= token_ident.range.start && position <= token_ident.range.end {
if functions_only == Some(true) {
if let Some(TypedAstToken::TypedFunctionDeclaration(_)) = &token.as_typed()
{
return Some(entry);
}
return None;
}
Some(entry)
} else {
None
}
})
.collect()
}
/// Uses the [TypeId] to find the associated [ty::TyDecl] in the TokenMap.
///
/// This is useful when dealing with tokens that are of the [sway_core::language::ty::TyExpression] type in the AST.
/// For example, we can then use the `return_type` field which is a [TypeId] to retrieve the declaration Token.
pub fn declaration_of_type_id(
&self,
engines: &Engines,
type_id: &TypeId,
) -> Option<ty::TyDecl> {
token::ident_of_type_id(engines, type_id)
.and_then(|decl_ident| self.try_get(&decl_ident).try_unwrap())
.map(|item| item.value().clone())
.and_then(|token| token.as_typed().cloned())
.and_then(|typed_token| match typed_token {
TypedAstToken::TypedDeclaration(dec) => Some(dec),
_ => None,
})
}
/// Returns the [ty::TyStructDecl] associated with the TypeId if it exists
/// within the TokenMap.
pub fn struct_declaration_of_type_id(
&self,
engines: &Engines,
type_id: &TypeId,
) -> Option<ty::TyStructDecl> {
self.declaration_of_type_id(engines, type_id)
.and_then(|decl| match decl {
ty::TyDecl::StructDecl(ty::StructDecl { decl_id, .. }) => {
Some((*engines.de().get_struct(&decl_id)).clone())
}
_ => None,
})
}
/// Remove all tokens for the given file from the token map.
pub fn remove_tokens_for_file(&self, path_to_remove: &PathBuf) {
self.0
.retain(|key, _value| key.path.as_ref() != Some(path_to_remove));
}
}
impl std::ops::Deref for TokenMap {
type Target = DashMap<TokenIdent, Token>;
fn deref(&self) -> &Self::Target {
&self.0
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-lsp/src/core/mod.rs | sway-lsp/src/core/mod.rs | pub mod document;
pub mod session;
pub mod sync;
pub(crate) mod token;
pub mod token_map;
pub mod token_map_ext;
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-lsp/src/core/token.rs | sway-lsp/src/core/token.rs | use lsp_types::{Position, Range};
use std::path::PathBuf;
use sway_ast::Intrinsic;
use sway_core::{
decl_engine::parsed_id::ParsedDeclId,
language::{
parsed::{
AbiCastExpression, AmbiguousPathExpression, Declaration, DelineatedPathExpression,
EnumVariant, Expression, FunctionApplicationExpression, FunctionParameter,
IncludeStatement, MethodApplicationExpression, Scrutinee, StorageField,
StorageNamespace, StructExpression, StructExpressionField, StructField,
StructScrutineeField, Supertrait, TraitFn, UseStatement,
},
ty,
},
transform::Attribute,
type_system::{TypeId, TypeInfo, TypeParameter},
Engines, GenericTypeArgument, TraitConstraint, TypeEngine,
};
use sway_types::{Ident, ProgramId, SourceEngine, SourceId, Span, Spanned};
/// The `ParsedAstToken` holds the types produced by the [sway_core::language::parsed::ParseProgram].
/// These tokens have not been type-checked.
/// See this issue https://github.com/FuelLabs/sway/issues/2257 for more information about why they are
/// useful to the language server.
#[derive(Debug, Clone)]
pub enum ParsedAstToken {
AbiCastExpression(AbiCastExpression),
AmbiguousPathExpression(AmbiguousPathExpression),
Attribute(Attribute),
Declaration(Declaration),
DelineatedPathExpression(DelineatedPathExpression),
EnumVariant(EnumVariant),
ErrorRecovery(Span),
Expression(Expression),
FunctionApplicationExpression(FunctionApplicationExpression),
FunctionParameter(FunctionParameter),
Ident(Ident),
ModuleName,
IncludeStatement(IncludeStatement),
Intrinsic(Intrinsic),
Keyword(Ident),
LibrarySpan(Span),
MethodApplicationExpression(MethodApplicationExpression),
Scrutinee(Scrutinee),
StorageField(StorageField),
StorageNamespace(StorageNamespace),
StructExpression(StructExpression),
StructExpressionField(StructExpressionField),
StructField(StructField),
StructScrutineeField(StructScrutineeField),
Supertrait(Supertrait),
TraitConstraint(TraitConstraint),
TraitFn(ParsedDeclId<TraitFn>),
TypeArgument(GenericTypeArgument),
TypeParameter(TypeParameter),
UseStatement(UseStatement),
}
/// The `TypedAstToken` holds the types produced by the [sway_core::language::ty::TyProgram].
#[allow(clippy::large_enum_variant)]
#[derive(Debug, Clone)]
pub enum TypedAstToken {
TypedTypeAliasDeclaration(ty::TyTypeAliasDecl),
TypedDeclaration(ty::TyDecl),
TypedExpression(ty::TyExpression),
TypedScrutinee(ty::TyScrutinee),
TyStructScrutineeField(ty::TyStructScrutineeField),
TypedConstantDeclaration(ty::TyConstantDecl),
TypedConfigurableDeclaration(ty::TyConfigurableDecl),
TypedConstGenericDeclaration(ty::TyConstGenericDecl),
TypedTraitTypeDeclaration(ty::TyTraitType),
TypedFunctionDeclaration(ty::TyFunctionDecl),
TypedFunctionParameter(ty::TyFunctionParameter),
TypedStructField(ty::TyStructField),
TypedEnumVariant(ty::TyEnumVariant),
TypedTraitFn(ty::TyTraitFn),
TypedSupertrait(Supertrait),
TypedStorageField(ty::TyStorageField),
TypedStorageAccess(ty::TyStorageAccess),
TypedStorageAccessDescriptor(ty::TyStorageAccessDescriptor),
TypedReassignment(ty::TyReassignment),
TypedArgument(GenericTypeArgument),
TypedParameter(TypeParameter),
TypedTraitConstraint(TraitConstraint),
TypedModuleName,
TypedIncludeStatement(ty::TyIncludeStatement),
TypedUseStatement(ty::TyUseStatement),
Ident(Ident),
}
/// These variants are used to represent the semantic type of the [Token].
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum SymbolKind {
/// Emitted for the boolean literals `true` and `false`.
BoolLiteral,
/// Emitted for builtin types like `u32`, and `str`.
BuiltinType,
/// Emitted for byte literals.
ByteLiteral,
/// Emitted for constants.
Const,
/// Emitted for derive helper attributes.
DeriveHelper,
/// Emitted for enums.
Enum,
/// Emitted for struct fields.
Field,
/// Emitted for free-standing & associated functions.
Function,
/// Emitted for compiler intrinsics.
Intrinsic,
/// Emitted for keywords.
Keyword,
/// Emitted for modules.
Module,
/// Emitted for numeric literals.
NumericLiteral,
/// Emitted for keywords.
ProgramTypeKeyword,
/// Emitted for the self function parameter and self path-specifier.
SelfKeyword,
/// Emitted for the Self type parameter.
SelfTypeKeyword,
/// Emitted for string literals.
StringLiteral,
/// Emitted for structs.
Struct,
/// Emitted for traits.
Trait,
/// Emitted for associated types.
TraitType,
/// Emitted for type aliases.
TypeAlias,
/// Emitted for type parameters.
TypeParameter,
/// Emitted for generic tokens that have no mapping.
Unknown,
/// Emitted for non-self function parameters.
ValueParam,
/// Emitted for enum variants.
Variant,
/// Emitted for locals.
Variable,
}
#[derive(Debug, Clone)]
pub enum TypeDefinition {
TypeId(TypeId),
Ident(Ident),
}
#[allow(clippy::large_enum_variant)]
#[derive(Debug, Clone)]
pub enum TokenAstNode {
Parsed(ParsedAstToken),
Typed(TypedAstToken),
}
/// The `Token` type is created during traversal of the parsed and typed AST's of a program.
/// It holds the parsed and typed data structures produced by the sway compiler.
/// It also holds the type definition & semantic type of the token if they could be inferred
/// during traversal of the AST's.
#[derive(Debug, Clone)]
pub struct Token {
pub ast_node: TokenAstNode,
pub type_def: Option<TypeDefinition>,
pub kind: SymbolKind,
}
impl Token {
/// Create a new token with the given [SymbolKind].
/// This function is intended to be used during traversal of the
/// [sway_core::language::parsed::ParseProgram] AST.
pub fn from_parsed(token: ParsedAstToken, kind: SymbolKind) -> Self {
Self {
ast_node: TokenAstNode::Parsed(token),
type_def: None,
kind,
}
}
/// Get the `AstToken`, if this is a parsed token.
pub fn as_parsed(&self) -> Option<&ParsedAstToken> {
match &self.ast_node {
TokenAstNode::Parsed(token) => Some(token),
_ => None,
}
}
/// Get the `TypedAstToken`, if this is a typed token.
pub fn as_typed(&self) -> Option<&TypedAstToken> {
match &self.ast_node {
TokenAstNode::Typed(token) => Some(token),
_ => None,
}
}
/// Return the [TokenIdent] of the declaration of the provided token.
pub fn declared_token_ident(&self, engines: &Engines) -> Option<TokenIdent> {
self.type_def.as_ref().and_then(|type_def| match type_def {
TypeDefinition::TypeId(type_id) => ident_of_type_id(engines, type_id),
TypeDefinition::Ident(ident) => Some(TokenIdent::new(ident, engines.se())),
})
}
}
/// A more convenient [Ident] type for use in the language server.
///
/// This type is used as the key in the [TokenMap]. It's constructed during AST traversal
/// where we compute the [Range] of the token and the convert [SourceId]'s to [PathBuf]'s.
/// Although this introduces a small amount of overhead while traversing, precomputing this
/// greatly speeds up performance in all other areas of the language server.
///
/// [TokenMap]: crate::core::token_map::TokenMap
/// [SourceId]: sway_types::SourceId
#[derive(Debug, Default, Clone, PartialEq, Eq)]
pub struct TokenIdent {
pub name: String,
pub range: Range,
pub path: Option<PathBuf>,
pub source_id: Option<SourceId>,
pub is_raw_ident: bool,
}
impl TokenIdent {
pub fn new(ident: &Ident, se: &SourceEngine) -> Self {
let source_id = ident.span().source_id().copied();
let path = source_id.as_ref().map(|source_id| se.get_path(source_id));
Self {
name: ident.span().str(),
range: get_range_from_span(&ident.span()),
path,
source_id,
is_raw_ident: ident.is_raw_ident(),
}
}
pub fn is_raw_ident(&self) -> bool {
self.is_raw_ident
}
pub fn program_id(&self) -> Option<ProgramId> {
self.source_id.map(|source_id| source_id.program_id())
}
}
impl std::hash::Hash for TokenIdent {
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
self.name.hash(state);
self.range.start.line.hash(state);
self.range.start.character.hash(state);
self.range.end.line.hash(state);
self.range.end.character.hash(state);
self.path.hash(state);
self.is_raw_ident.hash(state);
}
}
/// Check if the given method is a [`core::ops`] application desugared from short-hand syntax like / + * - etc.
pub fn desugared_op(prefixes: &[Ident]) -> bool {
let prefix0 = prefixes.first().map(sway_types::BaseIdent::as_str);
let prefix1 = prefixes.get(1).map(sway_types::BaseIdent::as_str);
if let (Some("core"), Some("ops")) = (prefix0, prefix1) {
return true;
}
false
}
/// Use the [TypeId] to look up the associated [TypeInfo] and return the [TokenIdent] if one is found.
pub fn ident_of_type_id(engines: &Engines, type_id: &TypeId) -> Option<TokenIdent> {
let ident = match &*engines.te().get(*type_id) {
TypeInfo::UnknownGeneric { name, .. } | TypeInfo::Alias { name, .. } => name.clone(),
TypeInfo::Enum(decl_ref) => engines.de().get_enum(decl_ref).call_path.suffix.clone(),
TypeInfo::Struct(decl_ref) => engines.de().get_struct(decl_ref).call_path.suffix.clone(),
TypeInfo::Custom {
qualified_call_path,
..
} => qualified_call_path.call_path.suffix.clone(),
_ => return None,
};
Some(TokenIdent::new(&ident, engines.se()))
}
/// Intended to be used during traversal of the [sway_core::language::parsed::ParseProgram] AST.
/// We can then use the [TypeInfo] to infer the semantic type of the token before type-checking.
pub fn type_info_to_symbol_kind(
type_engine: &TypeEngine,
type_info: &TypeInfo,
type_span: Option<&Span>,
) -> SymbolKind {
// This is necessary because the type engine resolves `Self` & `self` to the type it refers to.
// We want to keep the semantics of these keywords.
if let Some(type_span) = type_span {
if type_span.as_str() == "Self" {
return SymbolKind::SelfTypeKeyword;
} else if type_span.as_str() == "self" {
return SymbolKind::SelfKeyword;
}
}
match type_info {
TypeInfo::UnsignedInteger(..) | TypeInfo::Boolean | TypeInfo::B256 => {
SymbolKind::BuiltinType
}
TypeInfo::Numeric | TypeInfo::StringArray(..) => SymbolKind::NumericLiteral,
TypeInfo::Custom { .. } | TypeInfo::Struct { .. } | TypeInfo::Contract => {
SymbolKind::Struct
}
TypeInfo::Enum { .. } => SymbolKind::Enum,
TypeInfo::Array(elem_ty, ..) => {
let type_info = type_engine.get(elem_ty.type_id);
type_info_to_symbol_kind(type_engine, &type_info, Some(&elem_ty.span()))
}
TypeInfo::Slice(elem_ty) => {
let type_info = type_engine.get(elem_ty.type_id);
type_info_to_symbol_kind(type_engine, &type_info, Some(&elem_ty.span()))
}
_ => SymbolKind::Unknown,
}
}
/// Given a [Span], convert into a [Range] and return.
pub fn get_range_from_span(span: &Span) -> Range {
let start = span.start_line_col_one_index();
let end = span.end_line_col_one_index();
Range {
start: Position::new(start.line as u32 - 1, start.col as u32 - 1),
end: Position::new(end.line as u32 - 1, end.col as u32 - 1),
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-lsp/tests/lib.rs | sway-lsp/tests/lib.rs | #![recursion_limit = "256"]
pub mod integration;
use crate::integration::{code_actions, lsp};
use forc_pkg::manifest::{GenericManifestFile, ManifestFile};
use lsp_types::*;
use rayon::prelude::*;
use std::{
fs, panic,
path::PathBuf,
process::{Command, Stdio},
sync::Mutex,
};
use sway_lsp::{
config::LspClient,
handlers::{notification, request},
server_state::ServerState,
};
use sway_lsp_test_utils::*;
use tower_lsp::LspService;
/// Holds the information needed to check the response of a goto definition request.
#[derive(Debug)]
pub(crate) struct GotoDefinition<'a> {
req_uri: &'a Url,
req_line: u32,
req_char: u32,
def_line: u32,
def_start_char: u32,
def_end_char: u32,
def_path: &'a str,
}
/// Contains data required to evaluate a hover request response.
pub(crate) struct HoverDocumentation<'a> {
req_uri: &'a Url,
req_line: u32,
req_char: u32,
documentation: Vec<&'a str>,
}
/// Contains data required to evaluate a rename request.
pub(crate) struct Rename<'a> {
req_uri: &'a Url,
req_line: u32,
req_char: u32,
new_name: &'a str,
}
async fn open(server: &ServerState, entry_point: PathBuf) -> Url {
let (uri, sway_program) = load_sway_example(entry_point);
let params = DidOpenTextDocumentParams {
text_document: TextDocumentItem {
uri: uri.clone(),
language_id: "sway".to_string(),
version: 1,
text: sway_program,
},
};
let res = notification::handle_did_open_text_document(server, params).await;
assert!(res.is_ok());
uri
}
async fn init_and_open(service: &mut LspService<ServerState>, entry_point: PathBuf) -> Url {
let _ = lsp::initialize_request(service, &entry_point).await;
let _ = lsp::initialized_notification(service).await;
let (uri, sway_program) = load_sway_example(entry_point);
lsp::did_open_notification(service, &uri, &sway_program).await;
uri
}
pub async fn shutdown_and_exit(service: &mut LspService<ServerState>) {
let _ = lsp::shutdown_request(service).await;
lsp::exit_notification(service).await;
}
/// Executes an asynchronous block of code within a synchronous test function.
///
/// This macro simplifies the process of running asynchronous code inside
/// Rust tests, which are inherently synchronous. It creates a new Tokio runtime
/// and uses it to run the provided asynchronous code block to completion. This
/// approach is particularly useful in testing environments where asynchronous
/// operations need to be performed sequentially to avoid contention among async
/// resources.
///
/// Usage:
/// ```ignore
/// #[test]
/// fn my_async_test() {
/// run_async!({
/// // Your async code here.
/// });
/// }
/// ```
///
/// This was needed because directly using `#[tokio::test]` in a large test suite
/// with async operations can lead to issues such as test interference and resource
/// contention, which may result in flaky tests. By ensuring each test runs
/// sequentially with its own Tokio runtime, we mitigate these issues and improve
/// test reliability.
macro_rules! run_async {
($async_block:block) => {{
let rt = tokio::runtime::Runtime::new().expect("Failed to create a runtime");
rt.block_on(async { $async_block });
}};
}
// This macro allows us to spin up a server / client for testing
// It initializes and performs the necessary handshake and then loads
// the sway example that was passed into `example_dir`.
// It then runs the specific capability to test before gracefully shutting down.
// The capability argument is an async function.
macro_rules! test_lsp_capability {
($entry_point:expr, $capability:expr) => {{
let (mut service, _) = LspService::new(ServerState::new);
let uri = init_and_open(&mut service, $entry_point).await;
// Call the specific LSP capability function that was passed in.
let _ = $capability(&service.inner(), &uri).await;
shutdown_and_exit(&mut service).await;
}};
}
macro_rules! lsp_capability_test {
($test:ident, $capability:expr, $entry_path:expr) => {
#[test]
fn $test() {
run_async!({
test_lsp_capability!($entry_path, $capability);
});
}
};
}
#[test]
fn initialize() {
run_async!({
let (service, _) = LspService::new(ServerState::new);
let params = InitializeParams {
initialization_options: None,
..Default::default()
};
let _ = request::handle_initialize(service.inner(), ¶ms);
});
}
#[test]
fn did_open() {
run_async!({
let (mut service, _) = LspService::new(ServerState::new);
let _ = init_and_open(&mut service, e2e_test_dir().join("src/main.sw")).await;
service.inner().wait_for_parsing().await;
shutdown_and_exit(&mut service).await;
});
}
#[test]
fn did_open_all_std_lib_files() {
run_async!({
let (mut service, _) = LspService::new(ServerState::new);
let files = sway_utils::helpers::get_sway_files(std_lib_dir().join("src"));
for file in files {
eprintln!("opening file: {:?}", file.as_path());
// If the workspace is not initialized, we need to initialize it
// Otherwise, we can just open the file
let file_url = Url::from_file_path(&file).unwrap();
let uri = if !service.inner().is_workspace_initialized(&file_url) {
init_and_open(&mut service, file.to_path_buf()).await
} else {
open(service.inner(), file.to_path_buf()).await
};
// Make sure that semantic tokens are successfully returned for the file
let semantic_tokens = lsp::get_semantic_tokens_full(service.inner(), &uri).await;
assert!(!semantic_tokens.data.is_empty());
}
shutdown_and_exit(&mut service).await;
});
}
// Opens all members in the workspaces and assert that we are able to return semantic tokens for each workspace member.
// TODO: this seems to take much longer to run than the test above. Investigate why. https://github.com/FuelLabs/sway/issues/7233
#[test]
fn did_open_all_members_in_examples() {
run_async!({
let (mut service, _) = LspService::new(ServerState::new);
let examples_workspace_dir = sway_workspace_dir().join("examples");
let test_programs_workspace_dir = sway_workspace_dir().join(in_language_test_dir());
let sdk_harness_workspace_dir = sway_workspace_dir().join(sdk_harness_test_projects_dir());
let workspace_dirs = vec![
&examples_workspace_dir,
&test_programs_workspace_dir,
&sdk_harness_workspace_dir,
];
for workspace_dir in workspace_dirs {
let member_manifests = ManifestFile::from_dir(workspace_dir)
.unwrap()
.member_manifests()
.unwrap();
// Open all workspace members and assert that we are able to return semantic tokens for each workspace member.
for (name, package_manifest) in &member_manifests {
eprintln!("compiling {name:?}");
let dir = package_manifest.path().parent().unwrap();
// If the workspace is not initialized, we need to initialize it
// Otherwise, we can just open the file
let path = dir.join("src/main.sw");
let uri = if service.inner().sync_workspaces.is_empty() {
init_and_open(&mut service, path).await
} else {
open(service.inner(), path).await
};
// Make sure that program was parsed and the token map is populated
let sync = service.inner().get_sync_workspace_for_uri(&uri).unwrap();
let tmp_uri = sync.workspace_to_temp_url(&uri).unwrap();
let num_tokens_for_file =
service.inner().token_map.tokens_for_file(&tmp_uri).count();
assert!(num_tokens_for_file > 0);
// Make sure that semantic tokens are successfully returned for the file
let semantic_tokens = lsp::get_semantic_tokens_full(service.inner(), &uri).await;
assert!(!semantic_tokens.data.is_empty());
// Make sure that document symbols are successfully returned for the file
let document_symbols = lsp::get_document_symbols(service.inner(), &uri).await;
assert!(!document_symbols.is_empty());
}
}
shutdown_and_exit(&mut service).await;
});
}
#[test]
fn did_change() {
run_async!({
let (mut service, _) = LspService::new(ServerState::new);
let uri = init_and_open(&mut service, doc_comments_dir().join("src/main.sw")).await;
let _ = lsp::did_change_request(&mut service, &uri, 1, None).await;
service.inner().wait_for_parsing().await;
shutdown_and_exit(&mut service).await;
});
}
#[test]
fn sync_with_updates_to_manifest_in_workspace() {
run_async!({
let (mut service, _) = LspService::new(ServerState::new);
let workspace_dir = test_fixtures_dir().join("workspace");
let path = workspace_dir.join("test-contract/src/main.sw");
let workspace_uri = init_and_open(&mut service, path.clone()).await;
// add test-library as a dependency to the test-contract manifest file
let test_lib_string = "test-library = { path = \"../test-library\" }";
let test_contract_manifest = workspace_dir.join("test-contract/Forc.toml");
let mut manifest_content = fs::read_to_string(&test_contract_manifest).unwrap();
manifest_content.push_str(test_lib_string);
fs::write(&test_contract_manifest, &manifest_content).unwrap();
// notify the server that the manifest file has changed
let params = DidChangeWatchedFilesParams {
changes: vec![FileEvent {
uri: workspace_uri.clone(),
typ: FileChangeType::CHANGED,
}],
};
lsp::did_change_watched_files_notification(&mut service, params).await;
// Check that the build plan now has 3 items
let (sync, uri, session) = service
.inner()
.sync_uri_and_session_from_workspace(&workspace_uri)
.unwrap();
let build_plan = session
.build_plan_cache
.get_or_update(&sync.workspace_manifest_path(), || {
sway_lsp::core::session::build_plan(&uri)
})
.unwrap();
assert_eq!(build_plan.compilation_order().len(), 3);
// cleanup: remove the test-library from the test-contract manifest file
manifest_content = manifest_content.replace(test_lib_string, "");
fs::write(&test_contract_manifest, &manifest_content).unwrap();
shutdown_and_exit(&mut service).await;
});
}
#[allow(dead_code)]
// #[test]
fn did_cache_test() {
run_async!({
let (mut service, _) = LspService::build(ServerState::new)
.custom_method("sway/metrics", ServerState::metrics)
.finish();
let uri = init_and_open(&mut service, doc_comments_dir().join("src/main.sw")).await;
let _ = lsp::did_change_request(&mut service, &uri, 1, None).await;
service.inner().wait_for_parsing().await;
let metrics = lsp::metrics_request(&mut service).await;
assert!(metrics.len() >= 2);
for (path, metrics) in metrics {
if path.contains("sway-lib-std") {
assert!(metrics.reused_programs >= 1);
}
}
shutdown_and_exit(&mut service).await;
});
}
#[allow(dead_code)]
// #[test]
fn did_change_stress_test() {
run_async!({
let (mut service, _) = LspService::build(ServerState::new)
.custom_method("sway/metrics", ServerState::metrics)
.finish();
let bench_dir = sway_workspace_dir().join("sway-lsp/tests/fixtures/benchmark");
let uri = init_and_open(&mut service, bench_dir.join("src/main.sw")).await;
let times = 400;
for version in 0..times {
let _ = lsp::did_change_request(&mut service, &uri, version + 1, None).await;
if version == 0 {
service.inner().wait_for_parsing().await;
}
let metrics = lsp::metrics_request(&mut service).await;
for (path, metrics) in metrics {
if path.contains("sway-lib-std") {
assert!(metrics.reused_programs >= 1);
}
}
}
shutdown_and_exit(&mut service).await;
});
}
#[test]
fn did_change_stress_test_random_wait() {
run_async!({
let test_duration = tokio::time::Duration::from_secs(5 * 60); // 5 minutes timeout
let test_future = async {
setup_panic_hook();
let (mut service, _) = LspService::new(ServerState::new);
let example_dir = sway_workspace_dir()
.join(e2e_language_dir())
.join("generics_in_contract");
let uri = init_and_open(&mut service, example_dir.join("src/main.sw")).await;
let times = 60;
// Initialize cursor position
let mut cursor_line = 29;
for version in 0..times {
let params = lsp::simulate_keypress(&uri, version, &mut cursor_line);
let _ = lsp::did_change_request(&mut service, &uri, version, Some(params)).await;
if version == 0 {
service.inner().wait_for_parsing().await;
}
// wait for a random amount of time between 1-30ms
tokio::time::sleep(tokio::time::Duration::from_millis(
rand::random::<u64>() % 30 + 1,
))
.await;
// there is a 10% chance that a longer 100-800ms wait will be added
if rand::random::<u64>() % 10 < 1 {
tokio::time::sleep(tokio::time::Duration::from_millis(
rand::random::<u64>() % 700 + 100,
))
.await;
}
}
shutdown_and_exit(&mut service).await;
};
if tokio::time::timeout(test_duration, test_future)
.await
.is_err()
{
panic!(
"did_change_stress_test_random_wait did not complete within the timeout period."
);
}
});
}
// TODO: Fix this test, it goes in circles (def_start_char is expected to be 5 when its 7, 7 when its 5) Issue #7002
// #[test]
// fn lsp_syncs_with_workspace_edits() {
// run_async!({
// let (mut service, _) = LspService::new(ServerState::new);
// let uri = init_and_open(&mut service, doc_comments_dir().join("src/main.sw")).await;
// let mut go_to = GotoDefinition {
// req_uri: &uri,
// req_line: 44,
// req_char: 24,
// def_line: 19,
// def_start_char: 7,
// def_end_char: 11,
// def_path: uri.as_str(),
// };
// lsp::definition_check(service.inner(), &go_to).await;
// let _ = lsp::did_change_request(&mut service, &uri, 1, None).await;
// service.inner().wait_for_parsing().await;
// go_to.def_line = 20;
// lsp::definition_check_with_req_offset(service.inner(), &mut go_to, 45, 24).await;
// shutdown_and_exit(&mut service).await;
// });
// }
#[test]
fn compilation_succeeds_when_triggered_from_module() {
run_async!({
let (mut service, _) = LspService::new(ServerState::new);
let _ = init_and_open(
&mut service,
test_fixtures_dir().join("tokens/modules/src/test_mod.sw"),
)
.await;
shutdown_and_exit(&mut service).await;
});
}
#[test]
fn show_ast() {
run_async!({
let (mut service, _) = LspService::new(ServerState::new);
let uri = init_and_open(&mut service, e2e_test_dir().join("src/main.sw")).await;
lsp::show_ast_request(service.inner(), &uri, "typed", None).await;
shutdown_and_exit(&mut service).await;
});
}
#[test]
#[ignore = "`struct_field_access` test doesn't depend on `std` anymore which makes this test fail because the dependency graph is not the expected one."]
fn visualize() {
run_async!({
let (mut service, _) = LspService::new(ServerState::new);
let uri = init_and_open(&mut service, e2e_test_dir().join("src/main.sw")).await;
lsp::visualize_request(service.inner(), &uri, "build_plan").await;
shutdown_and_exit(&mut service).await;
});
}
#[test]
fn code_lens() {
run_async!({
let (mut service, _) = LspService::new(ServerState::new);
let uri = init_and_open(&mut service, runnables_test_dir().join("src/main.sw")).await;
let response = lsp::code_lens_request(service.inner(), &uri).await;
let expected = vec![
CodeLens {
range: Range {
start: Position {
line: 4,
character: 3,
},
end: Position {
line: 4,
character: 7,
},
},
command: Some(lsp_types::Command {
title: "▶︎ Run".to_string(),
command: "sway.runScript".to_string(),
arguments: None,
}),
data: None,
},
CodeLens {
range: Range {
start: Position {
line: 8,
character: 0,
},
end: Position {
line: 8,
character: 7,
},
},
command: Some(lsp_types::Command {
title: "▶︎ Run Test".to_string(),
command: "sway.runTests".to_string(),
arguments: Some(vec![serde_json::json!({
"name": "test_foo"
})]),
}),
data: None,
},
CodeLens {
range: Range {
start: Position {
line: 13,
character: 0,
},
end: Position {
line: 13,
character: 7,
},
},
command: Some(lsp_types::Command {
title: "▶︎ Run Test".to_string(),
command: "sway.runTests".to_string(),
arguments: Some(vec![serde_json::json!({
"name": "test_bar"
})]),
}),
data: None,
},
];
assert_eq!(response.unwrap(), expected);
let uri = open(service.inner(), runnables_test_dir().join("src/other.sw")).await;
let response = lsp::code_lens_request(service.inner(), &uri).await;
let expected = vec![CodeLens {
range: Range {
start: Position {
line: 2,
character: 0,
},
end: Position {
line: 2,
character: 7,
},
},
command: Some(lsp_types::Command {
title: "▶︎ Run Test".to_string(),
command: "sway.runTests".to_string(),
arguments: Some(vec![serde_json::json!({
"name": "test_baz"
})]),
}),
data: None,
}];
assert_eq!(response.unwrap(), expected);
shutdown_and_exit(&mut service).await;
});
}
//------------------- GO TO DEFINITION -------------------//
#[test]
fn go_to_definition() {
run_async!({
let (mut service, _) = LspService::new(ServerState::new);
let uri = init_and_open(&mut service, doc_comments_dir().join("src/main.sw")).await;
let go_to = GotoDefinition {
req_uri: &uri,
req_line: 44,
req_char: 24,
def_line: 19,
def_start_char: 7,
def_end_char: 11,
def_path: uri.as_str(),
};
lsp::definition_check(service.inner(), &go_to).await;
shutdown_and_exit(&mut service).await;
});
}
#[test]
fn go_to_definition_for_fields() {
run_async!({
let (mut service, _) = LspService::new(ServerState::new);
let uri = init_and_open(
&mut service,
test_fixtures_dir().join("tokens/fields/src/main.sw"),
)
.await;
let mut opt_go_to = GotoDefinition {
req_uri: &uri,
req_line: 5,
req_char: 8,
def_line: 85,
def_start_char: 9,
def_end_char: 15,
def_path: "sway-lib-std/src/option.sw",
};
// Option
lsp::definition_check(service.inner(), &opt_go_to).await;
lsp::definition_check_with_req_offset(service.inner(), &mut opt_go_to, 5, 16).await;
lsp::definition_check_with_req_offset(service.inner(), &mut opt_go_to, 9, 9).await;
lsp::definition_check_with_req_offset(service.inner(), &mut opt_go_to, 9, 16).await;
lsp::definition_check_with_req_offset(service.inner(), &mut opt_go_to, 13, 12).await;
lsp::definition_check_with_req_offset(service.inner(), &mut opt_go_to, 13, 19).await;
lsp::definition_check_with_req_offset(service.inner(), &mut opt_go_to, 13, 34).await;
lsp::definition_check_with_req_offset(service.inner(), &mut opt_go_to, 13, 47).await;
let opt_go_to = GotoDefinition {
req_uri: &uri,
req_line: 17,
req_char: 10,
def_line: 0,
def_start_char: 0,
def_end_char: 0,
def_path: "sway-lsp/tests/fixtures/tokens/fields/src/foo.sw",
};
// foo
lsp::definition_check(service.inner(), &opt_go_to).await;
let opt_go_to = GotoDefinition {
req_uri: &uri,
req_line: 17,
req_char: 15,
def_line: 2,
def_start_char: 11,
def_end_char: 14,
def_path: "sway-lsp/tests/fixtures/tokens/fields/src/foo.sw",
};
// Foo
lsp::definition_check(service.inner(), &opt_go_to).await;
shutdown_and_exit(&mut service).await;
});
}
#[test]
fn go_to_definition_inside_turbofish() {
run_async!({
let (mut service, _) = LspService::new(ServerState::new);
let uri = init_and_open(
&mut service,
test_fixtures_dir().join("tokens/turbofish/src/main.sw"),
)
.await;
let mut opt_go_to = GotoDefinition {
req_uri: &uri,
req_line: 15,
req_char: 12,
def_line: 85,
def_start_char: 9,
def_end_char: 15,
def_path: "sway-lib-std/src/option.sw",
};
// option.sw
lsp::definition_check(service.inner(), &opt_go_to).await;
lsp::definition_check_with_req_offset(service.inner(), &mut opt_go_to, 16, 17).await;
lsp::definition_check_with_req_offset(service.inner(), &mut opt_go_to, 17, 29).await;
lsp::definition_check_with_req_offset(service.inner(), &mut opt_go_to, 18, 19).await;
lsp::definition_check_with_req_offset(service.inner(), &mut opt_go_to, 20, 13).await;
lsp::definition_check_with_req_offset(service.inner(), &mut opt_go_to, 21, 19).await;
lsp::definition_check_with_req_offset(service.inner(), &mut opt_go_to, 22, 29).await;
lsp::definition_check_with_req_offset(service.inner(), &mut opt_go_to, 23, 18).await;
lsp::definition_check_with_req_offset(service.inner(), &mut opt_go_to, 24, 26).await;
let mut res_go_to = GotoDefinition {
req_uri: &uri,
req_line: 20,
req_char: 19,
def_line: 65,
def_start_char: 9,
def_end_char: 15,
def_path: "sway-lib-std/src/result.sw",
};
// result.sw
lsp::definition_check(service.inner(), &res_go_to).await;
lsp::definition_check_with_req_offset(service.inner(), &mut res_go_to, 21, 25).await;
lsp::definition_check_with_req_offset(service.inner(), &mut res_go_to, 22, 36).await;
lsp::definition_check_with_req_offset(service.inner(), &mut res_go_to, 23, 27).await;
lsp::definition_check_with_req_offset(service.inner(), &mut res_go_to, 24, 33).await;
shutdown_and_exit(&mut service).await;
});
}
#[test]
fn go_to_definition_for_matches() {
run_async!({
let (mut service, _) = LspService::new(ServerState::new);
let uri = init_and_open(
&mut service,
test_fixtures_dir().join("tokens/matches/src/main.sw"),
)
.await;
let mut go_to = GotoDefinition {
req_uri: &uri,
req_line: 14,
req_char: 10,
def_line: 10,
def_start_char: 6,
def_end_char: 19,
def_path: "sway-lsp/tests/fixtures/tokens/matches/src/main.sw",
};
// EXAMPLE_CONST
lsp::definition_check(service.inner(), &go_to).await;
lsp::definition_check_with_req_offset(service.inner(), &mut go_to, 19, 18).await;
lsp::definition_check_with_req_offset(service.inner(), &mut go_to, 22, 18).await;
// TODO: Enable the below check once this issue is fixed: https://github.com/FuelLabs/sway/issues/5221
// lsp::definition_check_with_req_offset(service.inner(), &mut go_to, 22, 30);
lsp::definition_check_with_req_offset(service.inner(), &mut go_to, 23, 16).await;
lsp::definition_check_with_req_offset(service.inner(), &mut go_to, 28, 38).await;
let go_to = GotoDefinition {
req_uri: &uri,
req_line: 15,
req_char: 13,
def_line: 15,
def_start_char: 8,
def_end_char: 9,
def_path: "sway-lsp/tests/fixtures/tokens/matches/src/main.sw",
};
// a => a + 1
lsp::definition_check(service.inner(), &go_to).await;
let mut go_to = GotoDefinition {
req_uri: &uri,
req_line: 25,
req_char: 19,
def_line: 85,
def_start_char: 9,
def_end_char: 15,
def_path: "sway-lib-std/src/option.sw",
};
// Option
lsp::definition_check(service.inner(), &go_to).await;
lsp::definition_check_with_req_offset(service.inner(), &mut go_to, 25, 33).await;
lsp::definition_check_with_req_offset(service.inner(), &mut go_to, 26, 11).await;
lsp::definition_check_with_req_offset(service.inner(), &mut go_to, 27, 11).await;
lsp::definition_check_with_req_offset(service.inner(), &mut go_to, 27, 22).await;
lsp::definition_check_with_req_offset(service.inner(), &mut go_to, 28, 11).await;
lsp::definition_check_with_req_offset(service.inner(), &mut go_to, 28, 22).await;
let mut go_to = GotoDefinition {
req_uri: &uri,
req_line: 25,
req_char: 27,
def_line: 89,
def_start_char: 4,
def_end_char: 8,
def_path: "sway-lib-std/src/option.sw",
};
// Some
lsp::definition_check(service.inner(), &go_to).await;
lsp::definition_check_with_req_offset(service.inner(), &mut go_to, 27, 17).await;
lsp::definition_check_with_req_offset(service.inner(), &mut go_to, 28, 17).await;
lsp::definition_check_with_req_offset(service.inner(), &mut go_to, 28, 30).await;
let mut go_to = GotoDefinition {
req_uri: &uri,
req_line: 26,
req_char: 17,
def_line: 87,
def_start_char: 4,
def_end_char: 8,
def_path: "sway-lib-std/src/option.sw",
};
// None
lsp::definition_check(service.inner(), &go_to).await;
lsp::definition_check_with_req_offset(service.inner(), &mut go_to, 27, 30).await;
let go_to = GotoDefinition {
req_uri: &uri,
req_line: 34,
req_char: 11,
def_line: 2,
def_start_char: 7,
def_end_char: 20,
def_path: "sway-lsp/tests/fixtures/tokens/matches/src/main.sw",
};
// ExampleStruct
lsp::definition_check(service.inner(), &go_to).await;
let go_to = GotoDefinition {
req_uri: &uri,
req_line: 34,
req_char: 26,
def_line: 3,
def_start_char: 4,
def_end_char: 12,
def_path: "sway-lsp/tests/fixtures/tokens/matches/src/main.sw",
};
// ExampleStruct.variable
lsp::definition_check(service.inner(), &go_to).await;
shutdown_and_exit(&mut service).await;
});
}
#[test]
fn go_to_definition_for_modules() {
run_async!({
let (mut service, _) = LspService::new(ServerState::new);
let uri = init_and_open(
&mut service,
test_fixtures_dir().join("tokens/modules/src/lib.sw"),
)
.await;
let opt_go_to = GotoDefinition {
req_uri: &uri,
req_line: 2,
req_char: 6,
def_line: 0,
def_start_char: 0,
def_end_char: 0,
def_path: "sway-lsp/tests/fixtures/tokens/modules/src/test_mod.sw",
};
// mod test_mod;
lsp::definition_check(service.inner(), &opt_go_to).await;
let uri = open(
service.inner(),
test_fixtures_dir().join("tokens/modules/src/test_mod.sw"),
)
.await;
let opt_go_to = GotoDefinition {
req_uri: &uri,
req_line: 2,
req_char: 6,
def_line: 0,
def_start_char: 0,
def_end_char: 0,
def_path: "sway-lsp/tests/fixtures/tokens/modules/src/test_mod/deep_mod.sw",
};
// mod deep_mod;
lsp::definition_check(service.inner(), &opt_go_to).await;
shutdown_and_exit(&mut service).await;
});
}
#[test]
fn go_to_definition_for_paths() {
run_async!({
let (mut service, _) = LspService::new(ServerState::new);
let uri = init_and_open(
&mut service,
test_fixtures_dir().join("tokens/paths/src/main.sw"),
)
.await;
let mut go_to = GotoDefinition {
req_uri: &uri,
req_line: 10,
req_char: 13,
def_line: 0,
def_start_char: 0,
def_end_char: 0,
def_path: "sway-lib-std/src/lib.sw",
};
// std
lsp::definition_check(service.inner(), &go_to).await;
lsp::definition_check_with_req_offset(service.inner(), &mut go_to, 12, 14).await;
lsp::definition_check_with_req_offset(service.inner(), &mut go_to, 16, 5).await;
lsp::definition_check_with_req_offset(service.inner(), &mut go_to, 22, 13).await;
lsp::definition_check_with_req_offset(service.inner(), &mut go_to, 7, 5).await;
let go_to = GotoDefinition {
req_uri: &uri,
req_line: 10,
req_char: 19,
def_line: 0,
def_start_char: 0,
def_end_char: 0,
def_path: "sway-lib-std/src/option.sw",
};
// option
lsp::definition_check(service.inner(), &go_to).await;
let mut go_to = GotoDefinition {
req_uri: &uri,
req_line: 10,
req_char: 27,
def_line: 85,
def_start_char: 9,
def_end_char: 15,
def_path: "sway-lib-std/src/option.sw",
};
// Option
lsp::definition_check(service.inner(), &go_to).await;
lsp::definition_check_with_req_offset(service.inner(), &mut go_to, 11, 14).await;
let go_to = GotoDefinition {
req_uri: &uri,
req_line: 12,
req_char: 17,
def_line: 0,
def_start_char: 0,
def_end_char: 0,
def_path: "sway-lib-std/src/vm.sw",
};
// vm
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | true |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-lsp/tests/integration/code_actions.rs | sway-lsp/tests/integration/code_actions.rs | //! This file contains methods used for simulating LSP code action json-rpc notifications and requests.
//! The methods are used to build and send requests and notifications to the LSP service
//! and assert the expected responses.
use lsp_types::*;
use pretty_assertions::assert_eq;
use serde_json::json;
use std::collections::HashMap;
use sway_lsp::{
capabilities::diagnostic::DiagnosticData, handlers::request, server_state::ServerState,
};
fn create_code_action(
uri: Url,
title: String,
changes: HashMap<Url, Vec<TextEdit>>,
disabled: Option<CodeActionDisabled>,
kind: Option<CodeActionKind>,
) -> CodeActionOrCommand {
CodeActionOrCommand::CodeAction(CodeAction {
title,
kind,
diagnostics: None,
edit: Some(WorkspaceEdit {
changes: Some(changes),
document_changes: None,
change_annotations: None,
}),
command: None,
is_preferred: None,
disabled,
data: Some(serde_json::to_value(uri).unwrap()),
})
}
fn create_code_action_params(
uri: Url,
range: Range,
diagnostics: Option<Vec<Diagnostic>>,
) -> CodeActionParams {
CodeActionParams {
text_document: TextDocumentIdentifier { uri },
range,
context: CodeActionContext {
diagnostics: diagnostics.unwrap_or_default(),
only: None,
trigger_kind: Some(CodeActionTriggerKind::AUTOMATIC),
},
work_done_progress_params: Default::default(),
partial_result_params: Default::default(),
}
}
fn create_diagnostic_from_data(range: Range, data: DiagnosticData) -> Option<Vec<Diagnostic>> {
Some(vec![Diagnostic {
range,
data: Some(json!(data)),
..Default::default()
}])
}
fn create_changes_map(uri: &Url, range: Range, new_text: &str) -> HashMap<Url, Vec<TextEdit>> {
HashMap::from([(
uri.clone(),
vec![TextEdit {
range,
new_text: new_text.to_string(),
}],
)])
}
fn create_changes_for_import(
uri: &Url,
line: u32,
end_char: u32,
call_path: &str,
newline: &str,
) -> HashMap<Url, Vec<TextEdit>> {
let range = Range {
start: Position { line, character: 0 },
end: Position {
line,
character: end_char,
},
};
create_changes_map(uri, range, &format!("use {call_path};{newline}"))
}
fn create_changes_for_qualify(
uri: &Url,
line: u32,
start_char: u32,
end_char: u32,
call_path: &str,
) -> HashMap<Url, Vec<TextEdit>> {
let range = Range {
start: Position {
line,
character: start_char,
},
end: Position {
line,
character: end_char,
},
};
create_changes_map(uri, range, call_path)
}
async fn send_request(server: &ServerState, params: &CodeActionParams) -> Vec<CodeActionOrCommand> {
request::handle_code_action(server, params.clone())
.await
.unwrap()
.unwrap_or_else(|| panic!("Empty response from server for request: {params:#?}"))
}
pub(crate) async fn code_action_abi_request(server: &ServerState, uri: &Url) {
let params = create_code_action_params(
uri.clone(),
Range {
start: Position {
line: 27,
character: 4,
},
end: Position {
line: 27,
character: 9,
},
},
None,
);
let changes = create_changes_map(
uri,
Range {
start: Position {
line: 31,
character: 0,
},
end: Position {
line: 31,
character: 0,
},
},
"\nimpl FooABI for Contract {\n fn main() -> u64 {}\n}\n",
);
let expected = vec![create_code_action(
uri.clone(),
"Generate impl for `FooABI`".to_string(),
changes,
None,
Some(CodeActionKind::REFACTOR),
)];
let actual = send_request(server, ¶ms).await;
assert_eq!(expected, actual);
}
pub(crate) async fn code_action_function_request(server: &ServerState, uri: &Url) {
let params = create_code_action_params(
uri.clone(),
Range {
start: Position {
line: 18,
character: 4,
},
end: Position {
line: 18,
character: 4,
},
},
None,
);
let changes = create_changes_map(uri, Range {
start: Position {
line: 18,
character: 0,
},
end: Position {
line: 18,
character: 0,
},
},
"/// Add a brief description.\n/// \n/// ### Additional Information\n/// \n/// Provide information beyond the core purpose or functionality.\n/// \n/// ### Reverts\n/// \n/// * List any cases where the function will revert\n/// \n/// ### Number of Storage Accesses\n/// \n/// * Reads: `0`\n/// * Writes: `0`\n/// * Clears: `0`\n/// \n/// ### Examples\n/// \n/// ```sway\n/// let x = test();\n/// ```\n");
let expected = vec![create_code_action(
uri.clone(),
"Generate a documentation template".to_string(),
changes,
None,
Some(CodeActionKind::REFACTOR),
)];
let actual = send_request(server, ¶ms).await;
assert_eq!(expected, actual);
}
pub(crate) async fn code_action_trait_fn_request(server: &ServerState, uri: &Url) {
let params = create_code_action_params(
uri.clone(),
Range {
start: Position {
line: 10,
character: 10,
},
end: Position {
line: 10,
character: 10,
},
},
None,
);
let changes = create_changes_map(uri, Range {
start: Position {
line: 10,
character: 0,
},
end: Position {
line: 10,
character: 0,
},
},
" /// Add a brief description.\n /// \n /// ### Additional Information\n /// \n /// Provide information beyond the core purpose or functionality.\n /// \n /// ### Returns\n /// \n /// * [abi::Empty] - Add description here\n /// \n /// ### Reverts\n /// \n /// * List any cases where the function will revert\n /// \n /// ### Number of Storage Accesses\n /// \n /// * Reads: `0`\n /// * Writes: `0`\n /// * Clears: `0`\n /// \n /// ### Examples\n /// \n /// ```sway\n /// let x = test_function();\n /// ```\n");
let expected = vec![create_code_action(
uri.clone(),
"Generate a documentation template".to_string(),
changes,
None,
Some(CodeActionKind::REFACTOR),
)];
let actual = send_request(server, ¶ms).await;
assert_eq!(expected, actual);
}
pub(crate) async fn code_action_struct_request(server: &ServerState, uri: &Url) {
let params = create_code_action_params(
uri.clone(),
Range {
start: Position {
line: 19,
character: 11,
},
end: Position {
line: 19,
character: 11,
},
},
None,
);
let mut expected: Vec<_> = Vec::new();
let changes = create_changes_map(
uri,
Range {
start: Position {
line: 25,
character: 0,
},
end: Position {
line: 25,
character: 0,
},
},
"\nimpl Data {\n \n}\n",
);
expected.push(create_code_action(
uri.clone(),
"Generate impl for `Data`".to_string(),
changes,
None,
Some(CodeActionKind::REFACTOR),
));
let changes = create_changes_map(uri, Range {
start: Position {
line: 25,
character: 0,
},
end: Position {
line: 25,
character: 0,
},
},
"\nimpl Data {\n fn new(value: NumberOrString, address: u64) -> Self { Self { value, address } }\n}\n");
expected.push(create_code_action(
uri.clone(),
"Generate `new`".to_string(),
changes,
None,
Some(CodeActionKind::REFACTOR),
));
let changes = create_changes_map(
uri,
Range {
start: Position {
line: 19,
character: 0,
},
end: Position {
line: 19,
character: 0,
},
},
"/// Add a brief description.\n/// \n/// ### Additional Information\n/// \n/// Provide information beyond the core purpose or functionality.\n");
expected.push(create_code_action(
uri.clone(),
"Generate a documentation template".to_string(),
changes,
None,
Some(CodeActionKind::REFACTOR),
));
let actual = send_request(server, ¶ms).await;
assert_eq!(expected, actual);
}
pub(crate) async fn code_action_struct_type_params_request(server: &ServerState, uri: &Url) {
let params = create_code_action_params(
uri.clone(),
Range {
start: Position {
line: 4,
character: 9,
},
end: Position {
line: 4,
character: 9,
},
},
None,
);
let mut expected: Vec<_> = Vec::new();
let changes = create_changes_map(
uri,
Range {
start: Position {
line: 7,
character: 0,
},
end: Position {
line: 7,
character: 0,
},
},
"\nimpl<T> Data<T> {\n \n}\n",
);
expected.push(create_code_action(
uri.clone(),
"Generate impl for `Data`".to_string(),
changes,
None,
Some(CodeActionKind::REFACTOR),
));
let changes = create_changes_map(
uri,
Range {
start: Position {
line: 9,
character: 0,
},
end: Position {
line: 9,
character: 0,
},
},
" fn new(value: T) -> Self { Self { value } }\n",
);
expected.push(create_code_action(
uri.clone(),
"Generate `new`".to_string(),
changes,
Some(CodeActionDisabled {
reason: "Struct Data already has a `new` function".to_string(),
}),
Some(CodeActionKind::REFACTOR),
));
let changes = create_changes_map(
uri,
Range {
start: Position {
line: 4,
character: 0,
},
end: Position {
line: 4,
character: 0,
},
},
"/// Add a brief description.\n/// \n/// ### Additional Information\n/// \n/// Provide information beyond the core purpose or functionality.\n");
expected.push(create_code_action(
uri.clone(),
"Generate a documentation template".to_string(),
changes,
None,
Some(CodeActionKind::REFACTOR),
));
let actual = send_request(server, ¶ms).await;
assert_eq!(expected, actual);
}
pub(crate) async fn code_action_struct_existing_impl_request(server: &ServerState, uri: &Url) {
let params = create_code_action_params(
uri.clone(),
Range {
start: Position {
line: 2,
character: 7,
},
end: Position {
line: 2,
character: 7,
},
},
None,
);
let mut expected: Vec<_> = Vec::new();
let changes = create_changes_map(
uri,
Range {
start: Position {
line: 6,
character: 0,
},
end: Position {
line: 6,
character: 0,
},
},
"\nimpl A {\n \n}\n",
);
expected.push(create_code_action(
uri.clone(),
"Generate impl for `A`".to_string(),
changes,
None,
Some(CodeActionKind::REFACTOR),
));
let changes = create_changes_map(
uri,
Range {
start: Position {
line: 6,
character: 0,
},
end: Position {
line: 6,
character: 0,
},
},
"\nimpl A {\n fn new(a: u64, b: u64) -> Self { Self { a, b } }\n}\n",
);
expected.push(create_code_action(
uri.clone(),
"Generate `new`".to_string(),
changes,
None,
Some(CodeActionKind::REFACTOR),
));
let changes = create_changes_map(
uri,
Range {
start: Position {
line: 2,
character: 0,
},
end: Position {
line: 2,
character: 0,
},
},
"/// Add a brief description.\n/// \n/// ### Additional Information\n/// \n/// Provide information beyond the core purpose or functionality.\n");
expected.push(create_code_action(
uri.clone(),
"Generate a documentation template".to_string(),
changes,
None,
Some(CodeActionKind::REFACTOR),
));
let actual = send_request(server, ¶ms).await;
assert_eq!(expected, actual);
}
pub(crate) async fn code_action_auto_import_struct_request(server: &ServerState, uri: &Url) {
// EvmAddress: external library
let range = Range {
start: Position {
line: 8,
character: 12,
},
end: Position {
line: 8,
character: 22,
},
};
let params = create_code_action_params(
uri.clone(),
range,
create_diagnostic_from_data(
range,
DiagnosticData {
unknown_symbol_name: Some("EvmAddress".to_string()),
},
),
);
let call_path = "std::vm::evm::evm_address::EvmAddress";
let expected = vec![
create_code_action(
uri.clone(),
format!("Import `{call_path}`"),
create_changes_for_import(uri, 5, 0, call_path, "\n"),
None,
Some(CodeActionKind::QUICKFIX),
),
create_code_action(
uri.clone(),
format!("Qualify as `{call_path}`"),
create_changes_for_qualify(uri, 8, 12, 22, call_path),
None,
Some(CodeActionKind::QUICKFIX),
),
];
let actual = send_request(server, ¶ms).await;
assert_eq!(expected, actual);
// DeepStruct: local library
let range = Range {
start: Position {
line: 17,
character: 12,
},
end: Position {
line: 17,
character: 22,
},
};
let params = create_code_action_params(
uri.clone(),
range,
create_diagnostic_from_data(
range,
DiagnosticData {
unknown_symbol_name: Some("DeepStruct".to_string()),
},
),
);
let call_path = "deep_mod::deeper_mod::DeepStruct";
let expected = vec![
create_code_action(
uri.clone(),
format!("Import `{call_path}`"),
create_changes_for_import(uri, 5, 0, call_path, "\n"),
None,
Some(CodeActionKind::QUICKFIX),
),
create_code_action(
uri.clone(),
format!("Qualify as `{call_path}`"),
create_changes_for_qualify(uri, 17, 12, 22, call_path),
None,
Some(CodeActionKind::QUICKFIX),
),
];
let actual = send_request(server, ¶ms).await;
assert_eq!(expected, actual);
}
pub(crate) async fn code_action_auto_import_enum_request(server: &ServerState, uri: &Url) {
// TODO: Add a test for an enum variant when https://github.com/FuelLabs/sway/issues/5188 is fixed.
// AuthError: external library
let range = Range {
start: Position {
line: 23,
character: 28,
},
end: Position {
line: 23,
character: 37,
},
};
let params = create_code_action_params(
uri.clone(),
range,
create_diagnostic_from_data(
range,
DiagnosticData {
unknown_symbol_name: Some("AuthError".to_string()),
},
),
);
let call_path = "std::auth::AuthError";
let expected = vec![
create_code_action(
uri.clone(),
format!("Import `{call_path}`"),
create_changes_for_import(uri, 5, 0, call_path, "\n"),
None,
Some(CodeActionKind::QUICKFIX),
),
create_code_action(
uri.clone(),
format!("Qualify as `{call_path}`"),
create_changes_for_qualify(uri, 23, 28, 37, call_path),
None,
Some(CodeActionKind::QUICKFIX),
),
];
let actual = send_request(server, ¶ms).await;
assert_eq!(expected, actual);
// DeepEnum: local library
let range = Range {
start: Position {
line: 16,
character: 11,
},
end: Position {
line: 16,
character: 19,
},
};
let params = create_code_action_params(
uri.clone(),
range,
create_diagnostic_from_data(
range,
DiagnosticData {
unknown_symbol_name: Some("DeepEnum".to_string()),
},
),
);
let call_path = "deep_mod::deeper_mod::DeepEnum";
let expected = vec![
create_code_action(
uri.clone(),
format!("Import `{call_path}`"),
create_changes_for_import(uri, 5, 0, call_path, "\n"),
None,
Some(CodeActionKind::QUICKFIX),
),
create_code_action(
uri.clone(),
format!("Qualify as `{call_path}`"),
create_changes_for_qualify(uri, 16, 11, 19, call_path),
None,
Some(CodeActionKind::QUICKFIX),
),
];
let actual = send_request(server, ¶ms).await;
assert_eq!(expected, actual);
}
pub(crate) async fn code_action_auto_import_function_request(server: &ServerState, uri: &Url) {
// TODO: external library, test with `overflow``
// Tracking issue: https://github.com/FuelLabs/sway/issues/5191
// deep_fun: local library
let range = Range {
start: Position {
line: 13,
character: 4,
},
end: Position {
line: 13,
character: 12,
},
};
let params = create_code_action_params(
uri.clone(),
range,
create_diagnostic_from_data(
range,
DiagnosticData {
unknown_symbol_name: Some("deep_fun".to_string()),
},
),
);
let call_path = "deep_mod::deeper_mod::deep_fun";
let expected = vec![
create_code_action(
uri.clone(),
format!("Import `{call_path}`"),
create_changes_for_import(uri, 5, 0, call_path, "\n"),
None,
Some(CodeActionKind::QUICKFIX),
),
create_code_action(
uri.clone(),
format!("Qualify as `{call_path}`"),
create_changes_for_qualify(uri, 13, 4, 12, call_path),
None,
Some(CodeActionKind::QUICKFIX),
),
];
let actual = send_request(server, ¶ms).await;
assert_eq!(expected, actual);
}
pub(crate) async fn code_action_auto_import_constant_request(server: &ServerState, uri: &Url) {
// TODO: external library, test with ZERO_B256
// Tracking issue: https://github.com/FuelLabs/sway/issues/5192
// TEST_CONST: import a constant from a local library
let range = Range {
start: Position {
line: 19,
character: 12,
},
end: Position {
line: 19,
character: 22,
},
};
let params = create_code_action_params(
uri.clone(),
range,
create_diagnostic_from_data(
range,
DiagnosticData {
unknown_symbol_name: Some("TEST_CONST".to_string()),
},
),
);
let call_path = "test_mod::TEST_CONST";
let expected = vec![
create_code_action(
uri.clone(),
format!("Import `{call_path}`"),
create_changes_for_import(uri, 5, 23, "test_mod::{TEST_CONST, test_fun}", ""),
None,
Some(CodeActionKind::QUICKFIX),
),
create_code_action(
uri.clone(),
format!("Qualify as `{call_path}`"),
create_changes_for_qualify(uri, 19, 12, 22, call_path),
None,
Some(CodeActionKind::QUICKFIX),
),
];
let actual = send_request(server, ¶ms).await;
assert_eq!(expected, actual);
}
pub(crate) async fn code_action_auto_import_trait_request(server: &ServerState, uri: &Url) {
// TryFrom: external library
let range = Range {
start: Position {
line: 34,
character: 5,
},
end: Position {
line: 34,
character: 12,
},
};
let params = create_code_action_params(
uri.clone(),
range,
create_diagnostic_from_data(
range,
DiagnosticData {
unknown_symbol_name: Some("TryFrom".to_string()),
},
),
);
let call_path = "std::convert::TryFrom";
let expected = vec![
create_code_action(
uri.clone(),
format!("Import `{call_path}`"),
create_changes_for_import(uri, 5, 0, call_path, "\n"),
None,
Some(CodeActionKind::QUICKFIX),
),
create_code_action(
uri.clone(),
format!("Qualify as `{call_path}`"),
create_changes_for_qualify(uri, 34, 5, 12, call_path),
None,
Some(CodeActionKind::QUICKFIX),
),
];
let actual = send_request(server, ¶ms).await;
assert_eq!(expected, actual);
// DeepTrait: local library
let range = Range {
start: Position {
line: 30,
character: 5,
},
end: Position {
line: 30,
character: 14,
},
};
let params = create_code_action_params(
uri.clone(),
range,
create_diagnostic_from_data(
range,
DiagnosticData {
unknown_symbol_name: Some("DeepTrait".to_string()),
},
),
);
let call_path = "deep_mod::deeper_mod::DeepTrait";
let expected = vec![
create_code_action(
uri.clone(),
format!("Import `{call_path}`"),
create_changes_for_import(uri, 5, 0, call_path, "\n"),
None,
Some(CodeActionKind::QUICKFIX),
),
create_code_action(
uri.clone(),
format!("Qualify as `{call_path}`"),
create_changes_for_qualify(uri, 30, 5, 14, call_path),
None,
Some(CodeActionKind::QUICKFIX),
),
];
let actual = send_request(server, ¶ms).await;
assert_eq!(expected, actual);
}
pub(crate) async fn code_action_auto_import_alias_request(server: &ServerState, uri: &Url) {
// TODO: find an example in an external library
// A: local library with multiple possible imports
let range = Range {
start: Position {
line: 14,
character: 4,
},
end: Position {
line: 14,
character: 5,
},
};
let params = create_code_action_params(
uri.clone(),
range,
create_diagnostic_from_data(
range,
DiagnosticData {
unknown_symbol_name: Some("A".to_string()),
},
),
);
let call_path1 = "deep_mod::deeper_mod::A";
let call_path2 = "test_mod::A";
let expected = vec![
create_code_action(
uri.clone(),
format!("Import `{call_path1}`"),
create_changes_for_import(uri, 5, 0, call_path1, "\n"),
None,
Some(CodeActionKind::QUICKFIX),
),
create_code_action(
uri.clone(),
format!("Import `{call_path2}`"),
create_changes_for_import(uri, 5, 23, "test_mod::{A, test_fun}", ""),
None,
Some(CodeActionKind::QUICKFIX),
),
create_code_action(
uri.clone(),
format!("Qualify as `{call_path1}`"),
create_changes_for_qualify(uri, 14, 4, 5, call_path1),
None,
Some(CodeActionKind::QUICKFIX),
),
create_code_action(
uri.clone(),
format!("Qualify as `{call_path2}`"),
create_changes_for_qualify(uri, 14, 4, 5, call_path2),
None,
Some(CodeActionKind::QUICKFIX),
),
];
let actual = send_request(server, ¶ms).await;
assert_eq!(expected, actual);
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-lsp/tests/integration/lsp.rs | sway-lsp/tests/integration/lsp.rs | //! This file contains the methods used for simulating LSP json-rpc notifications and requests.
//! The methods are used to build and send requests and notifications to the LSP service
//! and assert the expected responses.
use crate::{GotoDefinition, HoverDocumentation, Rename};
use assert_json_diff::assert_json_eq;
use forc_pkg::manifest::GenericManifestFile;
use forc_pkg::manifest::ManifestFile;
use regex::Regex;
use serde_json::json;
use std::{
borrow::Cow,
path::{Path, PathBuf},
};
use sway_lsp::{
handlers::request,
lsp_ext::{ShowAstParams, VisualizeParams},
server_state::ServerState,
};
use sway_utils::PerformanceData;
use tower::{Service, ServiceExt};
use tower_lsp::{
jsonrpc::{Id, Request, Response},
lsp_types::*,
ExitedError, LspService,
};
pub(crate) fn build_request_with_id(
method: impl Into<Cow<'static, str>>,
params: serde_json::Value,
id: impl Into<Id>,
) -> Request {
Request::build(method).params(params).id(id).finish()
}
pub(crate) async fn call_request(
service: &mut LspService<ServerState>,
req: Request,
) -> Result<Option<Response>, ExitedError> {
service.ready().await?.call(req).await
}
pub(crate) fn client_capabilities() -> ClientCapabilities {
ClientCapabilities {
workspace: Some(WorkspaceClientCapabilities {
workspace_folders: Some(true),
..Default::default()
}),
..Default::default()
}
}
pub(crate) async fn initialize_request(
service: &mut LspService<ServerState>,
entry_point: &Path,
) -> Request {
let search_dir = entry_point.parent().unwrap_or_else(|| Path::new(""));
let project_root_path_for_uri: PathBuf = match ManifestFile::from_dir(search_dir) {
Ok(manifest_file) => {
// Found a Forc.toml, use its directory
manifest_file.dir().to_path_buf()
}
Err(_) => {
// Forc.toml not found, assume search_dir is the intended project root for this test fixture.
// This is common for minimal test cases that might only have a src/main.sw
search_dir.to_path_buf()
}
};
let root_uri = Url::from_directory_path(&project_root_path_for_uri).unwrap_or_else(|_| {
panic!("Failed to create directory URL from project root: {project_root_path_for_uri:?}")
});
// Construct the InitializeParams using the defined client_capabilities
let params = json!({
"processId": Option::<u32>::None,
"rootUri": Some(root_uri),
"capabilities": client_capabilities(),
"initializationOptions": Option::<serde_json::Value>::None,
});
let initialize_request = build_request_with_id("initialize", params, 1); // Renamed for clarity
let response = call_request(service, initialize_request.clone()).await;
let expected_initialize_result = json!({ "capabilities": sway_lsp::server_capabilities() });
let expected_response = Response::from_ok(1.into(), expected_initialize_result);
assert!(
response.is_ok(),
"Initialize request failed: {:?}",
response.err()
);
assert_json_eq!(expected_response, response.ok().unwrap());
initialize_request
}
pub(crate) async fn initialized_notification(service: &mut LspService<ServerState>) {
let initialized = Request::build("initialized").finish();
let response = call_request(service, initialized).await;
assert_eq!(response, Ok(None));
}
pub(crate) async fn shutdown_request(service: &mut LspService<ServerState>) -> Request {
let shutdown = Request::build("shutdown").id(1).finish();
let response = call_request(service, shutdown.clone()).await;
let expected = Response::from_ok(1.into(), json!(null));
assert_json_eq!(expected, response.ok().unwrap());
shutdown
}
pub(crate) async fn exit_notification(service: &mut LspService<ServerState>) {
let exit = Request::build("exit").finish();
let response = call_request(service, exit.clone()).await;
assert_eq!(response, Ok(None));
}
pub(crate) async fn did_change_watched_files_notification(
service: &mut LspService<ServerState>,
params: DidChangeWatchedFilesParams,
) {
let params: serde_json::value::Value = serde_json::to_value(params).unwrap();
let did_change_watched_files = Request::build("workspace/didChangeWatchedFiles")
.params(params)
.finish();
let response = call_request(service, did_change_watched_files).await;
assert_eq!(response, Ok(None));
}
pub(crate) async fn did_open_notification(
service: &mut LspService<ServerState>,
uri: &Url,
text: &str,
) {
let params = json!({
"textDocument": {
"uri": uri,
"languageId": "sway",
"version": 1,
"text": text,
},
});
let did_open = Request::build("textDocument/didOpen")
.params(params)
.finish();
let response = call_request(service, did_open).await;
assert_eq!(response, Ok(None));
}
pub(crate) async fn did_change_request(
service: &mut LspService<ServerState>,
uri: &Url,
version: i32,
params: Option<DidChangeTextDocumentParams>,
) -> Request {
let params = params.unwrap_or_else(|| {
create_did_change_params(
uri,
version,
Position {
line: 1,
character: 0,
},
Position {
line: 1,
character: 0,
},
0,
)
});
let params: serde_json::value::Value = serde_json::to_value(params).unwrap();
let did_change = Request::build("textDocument/didChange")
.params(params)
.finish();
let response = call_request(service, did_change.clone()).await;
// make sure to set is_compiling to true so the wait_for_parsing method can properly synchnonize
service
.inner()
.is_compiling
.store(true, std::sync::atomic::Ordering::SeqCst);
assert_eq!(response, Ok(None));
did_change
}
/// Simulates a keypress at the current cursor position
/// 66% chance of enter keypress
/// 33% chance of backspace keypress
pub fn simulate_keypress(
uri: &Url,
version: i32,
cursor_line: &mut u32,
) -> DidChangeTextDocumentParams {
if rand::random::<u64>() % 3 < 2 {
// enter keypress at current cursor line
*cursor_line += 1;
create_did_change_params(
uri,
version,
Position {
line: *cursor_line - 1,
character: 0,
},
Position {
line: *cursor_line - 1,
character: 0,
},
0,
)
} else {
// backspace keypress at current cursor line
if *cursor_line > 1 {
*cursor_line -= 1;
}
create_did_change_params(
uri,
version,
Position {
line: *cursor_line,
character: 0,
},
Position {
line: *cursor_line + 1,
character: 0,
},
1,
)
}
}
pub(crate) async fn show_ast_request(
server: &ServerState,
uri: &Url,
ast_kind: &str,
save_path: Option<Url>,
) {
// The path where the AST will be written to.
// If no path is provided, the default path is "/tmp"
let save_path = match save_path {
Some(path) => path,
None => Url::from_file_path(Path::new("/tmp")).unwrap(),
};
let params = ShowAstParams {
text_document: TextDocumentIdentifier { uri: uri.clone() },
ast_kind: ast_kind.to_string(),
save_path: save_path.clone(),
};
let response = request::handle_show_ast(server, ¶ms);
let expected = TextDocumentIdentifier {
uri: Url::parse(&format!("{save_path}/{ast_kind}.rs")).unwrap(),
};
assert_eq!(expected, response.unwrap().unwrap());
}
pub(crate) async fn visualize_request(server: &ServerState, uri: &Url, graph_kind: &str) {
let params = VisualizeParams {
text_document: TextDocumentIdentifier { uri: uri.clone() },
graph_kind: graph_kind.to_string(),
};
let response = request::handle_visualize(server, ¶ms).unwrap().unwrap();
let re = Regex::new(r#"digraph \{
0 \[ label = "std" shape = box URL = "vscode://file/[[:ascii:]]+/sway-lib-std/Forc.toml"\]
1 \[ label = "struct_field_access" shape = box URL = "vscode://file/[[:ascii:]]+/struct_field_access/Forc.toml"\]
1 -> 0 \[ \]
\}
"#).unwrap();
assert!(!re.find(response.as_str()).unwrap().is_empty());
}
pub(crate) async fn metrics_request(
service: &mut LspService<ServerState>,
) -> Vec<(String, PerformanceData)> {
let request = Request::build("sway/metrics").id(1).finish();
let result = call_request(service, request.clone())
.await
.unwrap()
.unwrap();
let value = result.result().unwrap().as_array();
let mut res = vec![];
for v in value.unwrap().iter() {
let path = v.get(0).unwrap().as_str().unwrap();
let metric = serde_json::from_value(v.get(1).unwrap().clone()).unwrap();
res.push((path.to_string(), metric));
}
res
}
pub(crate) async fn get_semantic_tokens_full(server: &ServerState, uri: &Url) -> SemanticTokens {
let params = SemanticTokensParams {
text_document: TextDocumentIdentifier { uri: uri.clone() },
work_done_progress_params: Default::default(),
partial_result_params: Default::default(),
};
let response = request::handle_semantic_tokens_full(server, params)
.await
.unwrap();
if let Some(SemanticTokensResult::Tokens(tokens)) = response {
tokens
} else {
panic!("Expected semantic tokens response");
}
}
pub(crate) async fn get_document_symbols(server: &ServerState, uri: &Url) -> Vec<DocumentSymbol> {
let params = DocumentSymbolParams {
text_document: TextDocumentIdentifier { uri: uri.clone() },
work_done_progress_params: Default::default(),
partial_result_params: Default::default(),
};
let response = request::handle_document_symbol(server, params)
.await
.unwrap();
if let Some(DocumentSymbolResponse::Nested(symbols)) = response {
symbols
} else {
panic!("Expected nested document symbols response: {response:#?}");
}
}
pub(crate) async fn semantic_tokens_request(server: &ServerState, uri: &Url) {
let tokens = get_semantic_tokens_full(server, uri).await;
assert!(!tokens.data.is_empty());
}
pub(crate) async fn get_nested_document_symbols(
server: &ServerState,
uri: &Url,
) -> Vec<DocumentSymbol> {
let params = DocumentSymbolParams {
text_document: TextDocumentIdentifier { uri: uri.clone() },
work_done_progress_params: Default::default(),
partial_result_params: Default::default(),
};
if let Some(DocumentSymbolResponse::Nested(symbols)) =
request::handle_document_symbol(server, params)
.await
.unwrap()
{
symbols
} else {
panic!("Expected nested document symbols response");
}
}
pub(crate) async fn document_symbols_request(server: &ServerState, uri: &Url) {
let symbols = get_nested_document_symbols(server, uri).await;
// Check for enum with its variants
let enum_symbol = symbols
.iter()
.find(|s| s.name == "NumberOrString")
.expect("Should find NumberOrString enum");
assert_eq!(enum_symbol.kind, SymbolKind::ENUM);
let variants = enum_symbol
.children
.as_ref()
.expect("Enum should have variants");
assert_eq!(variants.len(), 2);
assert!(variants.iter().any(|v| v.name == "Number"));
assert!(variants.iter().any(|v| v.name == "String"));
// Check for struct with its fields
let struct_symbol = symbols
.iter()
.find(|s| s.name == "Data")
.expect("Should find Data struct");
assert_eq!(struct_symbol.kind, SymbolKind::STRUCT);
let fields = struct_symbol
.children
.as_ref()
.expect("Struct should have fields");
assert_eq!(fields.len(), 2);
assert!(fields
.iter()
.any(|f| f.name == "value" && f.detail.as_deref() == Some("NumberOrString")));
assert!(fields
.iter()
.any(|f| f.name == "address" && f.detail.as_deref() == Some("u64")));
// Check for impl with nested function and variable
let impl_symbol = symbols
.iter()
.find(|s| s.name == "impl FooABI for Contract")
.expect("Should find impl block");
let impl_fns = impl_symbol
.children
.as_ref()
.expect("Impl should have functions");
let main_fn = impl_fns
.iter()
.find(|f| f.name == "main")
.expect("Should find main function");
let vars = main_fn
.children
.as_ref()
.expect("Function should have variables");
assert!(vars
.iter()
.any(|v| v.name == "_data" && v.detail.as_deref() == Some("doc_comments::Data")));
}
pub(crate) async fn format_request(server: &ServerState, uri: &Url) {
let params = DocumentFormattingParams {
text_document: TextDocumentIdentifier { uri: uri.clone() },
options: FormattingOptions {
tab_size: 4,
insert_spaces: true,
..Default::default()
},
work_done_progress_params: Default::default(),
};
let response = request::handle_formatting(server, params).await.unwrap();
assert!(!response.unwrap().is_empty());
}
pub(crate) async fn highlight_request(server: &ServerState, uri: &Url) {
let params = DocumentHighlightParams {
text_document_position_params: TextDocumentPositionParams {
text_document: TextDocumentIdentifier { uri: uri.clone() },
position: Position {
line: 45,
character: 37,
},
},
work_done_progress_params: Default::default(),
partial_result_params: Default::default(),
};
let response = request::handle_document_highlight(server, params)
.await
.unwrap();
let expected = vec![
DocumentHighlight {
range: Range {
start: Position {
line: 10,
character: 4,
},
end: Position {
line: 10,
character: 10,
},
},
kind: None,
},
DocumentHighlight {
range: Range {
start: Position {
line: 45,
character: 35,
},
end: Position {
line: 45,
character: 41,
},
},
kind: None,
},
];
assert_eq!(expected, response.unwrap());
}
pub(crate) async fn references_request(server: &ServerState, uri: &Url) {
let params = ReferenceParams {
text_document_position: TextDocumentPositionParams {
text_document: TextDocumentIdentifier { uri: uri.clone() },
position: Position {
line: 15,
character: 22,
},
},
work_done_progress_params: Default::default(),
partial_result_params: Default::default(),
context: ReferenceContext {
include_declaration: false,
},
};
let create_location = |line: u32, start_char: u32, end_char: u32| -> Location {
Location {
uri: uri.clone(),
range: Range {
start: Position {
line,
character: start_char,
},
end: Position {
line,
character: end_char,
},
},
}
};
let mut response = request::handle_references(server, params)
.await
.unwrap()
.unwrap();
let mut expected = vec![
create_location(12, 7, 11),
create_location(15, 21, 25),
create_location(15, 14, 18),
create_location(13, 13, 17),
create_location(3, 5, 9),
create_location(14, 8, 12),
];
response.sort_by(|a, b| a.range.start.cmp(&b.range.start));
expected.sort_by(|a, b| a.range.start.cmp(&b.range.start));
assert_eq!(expected, response);
}
pub(crate) async fn code_lens_request(server: &ServerState, uri: &Url) -> Option<Vec<CodeLens>> {
let params = CodeLensParams {
text_document: TextDocumentIdentifier { uri: uri.clone() },
work_done_progress_params: Default::default(),
partial_result_params: Default::default(),
};
request::handle_code_lens(server, params).await.unwrap()
}
// pub(crate) async fn completion_request(server: &ServerState, uri: &Url) {
// let params = CompletionParams {
// text_document_position: TextDocumentPositionParams {
// text_document: TextDocumentIdentifier { uri: uri.clone() },
// position: Position {
// line: 19,
// character: 8,
// },
// },
// work_done_progress_params: Default::default(),
// partial_result_params: Default::default(),
// context: Some(CompletionContext {
// trigger_kind: CompletionTriggerKind::TRIGGER_CHARACTER,
// trigger_character: Some(".".to_string()),
// }),
// };
// let res = request::handle_completion(server, params).await.unwrap();
// let expected = CompletionResponse::Array(vec![
// CompletionItem {
// label: "a".to_string(),
// kind: Some(CompletionItemKind::FIELD),
// label_details: Some(CompletionItemLabelDetails {
// detail: None,
// description: Some("bool".to_string()),
// }),
// ..Default::default()
// },
// CompletionItem {
// label: "get(…)".to_string(),
// kind: Some(CompletionItemKind::METHOD),
// label_details: Some(CompletionItemLabelDetails {
// detail: None,
// description: Some("fn(self, MyStruct) -> MyStruct".to_string()),
// }),
// text_edit: Some(CompletionTextEdit::Edit(TextEdit {
// range: Range {
// start: Position {
// line: 19,
// character: 8,
// },
// end: Position {
// line: 19,
// character: 8,
// },
// },
// new_text: "get(foo)".to_string(),
// })),
// ..Default::default()
// },
// ]);
// assert_eq!(expected, res.unwrap());
// }
pub(crate) async fn definition_check<'a>(server: &ServerState, go_to: &'a GotoDefinition<'a>) {
let params = GotoDefinitionParams {
text_document_position_params: TextDocumentPositionParams {
text_document: TextDocumentIdentifier {
uri: go_to.req_uri.clone(),
},
position: Position {
line: go_to.req_line,
character: go_to.req_char,
},
},
work_done_progress_params: Default::default(),
partial_result_params: Default::default(),
};
let res = request::handle_goto_definition(server, params.clone()).unwrap();
let unwrapped_response = res.as_ref().unwrap_or_else(|| {
panic!(
"Failed to deserialize response: {:?} input: {:#?}",
res.clone(),
params.clone(),
);
});
if let GotoDefinitionResponse::Scalar(response) = unwrapped_response {
let uri = response.uri.as_str();
let range = json!({
"end": {
"character": go_to.def_end_char,
"line": go_to.def_line,
},
"start": {
"character": go_to.def_start_char,
"line": go_to.def_line,
}
});
assert_json_eq!(response.range, range);
assert!(
uri.ends_with(go_to.def_path),
"{} doesn't end with {}",
uri,
go_to.def_path,
);
} else {
panic!(
"Expected GotoDefinitionResponse::Scalar with input {:#?}, got {:?}",
params.clone(),
res.clone(),
);
}
}
pub(crate) async fn definition_check_with_req_offset(
server: &ServerState,
go_to: &mut GotoDefinition<'_>,
req_line: u32,
req_char: u32,
) {
go_to.req_line = req_line;
go_to.req_char = req_char;
definition_check(server, go_to).await;
}
pub(crate) async fn hover_request<'a>(
server: &ServerState,
hover_docs: &'a HoverDocumentation<'a>,
) {
let params = HoverParams {
text_document_position_params: TextDocumentPositionParams {
text_document: TextDocumentIdentifier {
uri: hover_docs.req_uri.clone(),
},
position: Position {
line: hover_docs.req_line,
character: hover_docs.req_char,
},
},
work_done_progress_params: Default::default(),
};
let res = request::handle_hover(server, params.clone()).unwrap();
let unwrapped_response = res.as_ref().unwrap_or_else(|| {
panic!(
"Failed to deserialize hover: {:?} input: {:#?}",
res.clone(),
params.clone(),
);
});
if let HoverContents::Markup(markup_content) = &unwrapped_response.contents {
hover_docs
.documentation
.iter()
.for_each(|text| assert!(markup_content.value.contains(text)));
} else {
panic!(
"Expected HoverContents::Markup with input {:#?}, got {:?}",
res.clone(),
params.clone(),
);
}
}
pub(crate) async fn prepare_rename_request<'a>(
server: &ServerState,
rename: &'a Rename<'a>,
) -> Option<PrepareRenameResponse> {
let params = TextDocumentPositionParams {
text_document: TextDocumentIdentifier {
uri: rename.req_uri.clone(),
},
position: Position {
line: rename.req_line,
character: rename.req_char,
},
};
request::handle_prepare_rename(server, params).unwrap()
}
pub(crate) async fn rename_request<'a>(
server: &ServerState,
rename: &'a Rename<'a>,
) -> WorkspaceEdit {
let params = RenameParams {
text_document_position: TextDocumentPositionParams {
text_document: TextDocumentIdentifier {
uri: rename.req_uri.clone(),
},
position: Position {
line: rename.req_line,
character: rename.req_char,
},
},
new_name: rename.new_name.to_string(),
work_done_progress_params: Default::default(),
};
let workspace_edit = request::handle_rename(server, params).unwrap();
workspace_edit.unwrap()
}
pub fn create_did_change_params(
uri: &Url,
version: i32,
start: Position,
end: Position,
range_length: u32,
) -> DidChangeTextDocumentParams {
DidChangeTextDocumentParams {
text_document: VersionedTextDocumentIdentifier {
uri: uri.clone(),
version,
},
content_changes: vec![TextDocumentContentChangeEvent {
range: Some(Range { start, end }),
range_length: Some(range_length),
text: "\n".into(),
}],
}
}
#[allow(dead_code)]
pub(crate) fn range_from_start_and_end_line(start_line: u32, end_line: u32) -> Range {
Range {
start: Position {
line: start_line,
character: 0,
},
end: Position {
line: end_line,
character: 0,
},
}
}
pub(crate) async fn get_inlay_hints_for_range(
server: &ServerState,
uri: &Url,
range: Range,
) -> Vec<InlayHint> {
let params = InlayHintParams {
text_document: TextDocumentIdentifier { uri: uri.clone() },
range,
work_done_progress_params: Default::default(),
};
request::handle_inlay_hints(server, params)
.await
.unwrap()
.unwrap()
}
pub(crate) async fn inlay_hints_request(server: &ServerState, uri: &Url) -> Option<Vec<InlayHint>> {
let range = Range {
start: Position {
line: 25,
character: 0,
},
end: Position {
line: 26,
character: 1,
},
};
let res = get_inlay_hints_for_range(server, uri, range).await;
let expected = vec![
InlayHint {
position: Position {
line: 25,
character: 25,
},
label: InlayHintLabel::String("foo: ".to_string()),
kind: Some(InlayHintKind::PARAMETER),
text_edits: None,
tooltip: None,
padding_left: Some(false),
padding_right: Some(false),
data: None,
},
InlayHint {
position: Position {
line: 25,
character: 28,
},
label: InlayHintLabel::String("bar: ".to_string()),
kind: Some(InlayHintKind::PARAMETER),
text_edits: None,
tooltip: None,
padding_left: Some(false),
padding_right: Some(false),
data: None,
},
InlayHint {
position: Position {
line: 25,
character: 31,
},
label: InlayHintLabel::String("long_argument_name: ".to_string()),
kind: Some(InlayHintKind::PARAMETER),
text_edits: None,
tooltip: None,
padding_left: Some(false),
padding_right: Some(false),
data: None,
},
InlayHint {
position: Position {
line: 25,
character: 10,
},
label: InlayHintLabel::String(": u64".to_string()),
kind: Some(InlayHintKind::TYPE),
text_edits: None,
tooltip: None,
padding_left: Some(false),
padding_right: Some(false),
data: None,
},
];
assert!(
compare_inlay_hint_vecs(&expected, &res),
"InlayHint vectors are not equal.\nExpected:\n{expected:#?}\n\nActual:\n{res:#?}"
);
Some(res)
}
// This is a helper function to compare two inlay hints. because PartialEq is not implemented for InlayHint
fn compare_inlay_hints(a: &InlayHint, b: &InlayHint) -> bool {
a.position == b.position
&& compare_inlay_hint_labels(&a.label, &b.label)
&& a.kind == b.kind
&& a.text_edits == b.text_edits
&& compare_inlay_hint_tooltips(&a.tooltip, &b.tooltip)
&& a.padding_left == b.padding_left
&& a.padding_right == b.padding_right
&& a.data == b.data
}
fn compare_inlay_hint_vecs(a: &[InlayHint], b: &[InlayHint]) -> bool {
a.len() == b.len() && a.iter().zip(b).all(|(a, b)| compare_inlay_hints(a, b))
}
fn compare_inlay_hint_labels(a: &InlayHintLabel, b: &InlayHintLabel) -> bool {
match (a, b) {
(InlayHintLabel::String(a), InlayHintLabel::String(b)) => a == b,
_ => false,
}
}
fn compare_inlay_hint_tooltips(a: &Option<InlayHintTooltip>, b: &Option<InlayHintTooltip>) -> bool {
match (a, b) {
(None, None) => true,
(Some(a), Some(b)) => match (a, b) {
(InlayHintTooltip::String(a), InlayHintTooltip::String(b)) => a == b,
(InlayHintTooltip::MarkupContent(a), InlayHintTooltip::MarkupContent(b)) => {
a.kind == b.kind && a.value == b.value
}
_ => false,
},
_ => false,
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-lsp/tests/integration/mod.rs | sway-lsp/tests/integration/mod.rs | pub(crate) mod code_actions;
pub(crate) mod lsp;
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-lsp/tests/utils/src/lib.rs | sway-lsp/tests/utils/src/lib.rs | use assert_json_diff::assert_json_include;
use futures::StreamExt;
use lsp_types::Url;
use rand::{rngs::SmallRng, Rng, SeedableRng};
use serde_json::Value;
use std::{
env, fs,
io::Read,
path::{Path, PathBuf},
};
use tokio::task::JoinHandle;
use tower_lsp::ClientSocket;
pub fn load_sway_example(src_path: PathBuf) -> (Url, String) {
let mut file = fs::File::open(&src_path).unwrap();
let mut sway_program = String::new();
file.read_to_string(&mut sway_program).unwrap();
let uri = Url::from_file_path(src_path).unwrap();
(uri, sway_program)
}
pub fn sway_workspace_dir() -> PathBuf {
env::current_dir().unwrap().parent().unwrap().to_path_buf()
}
pub fn in_language_test_dir() -> PathBuf {
PathBuf::from("test/src/in_language_tests")
}
pub fn sdk_harness_test_projects_dir() -> PathBuf {
PathBuf::from("test/src/sdk-harness")
}
pub fn e2e_language_dir() -> PathBuf {
PathBuf::from("test/src/e2e_vm_tests/test_programs/should_pass/language")
}
pub fn e2e_should_pass_dir() -> PathBuf {
PathBuf::from("test/src/e2e_vm_tests/test_programs/should_pass")
}
pub fn e2e_should_fail_dir() -> PathBuf {
PathBuf::from("test/src/e2e_vm_tests/test_programs/should_fail")
}
pub fn e2e_stdlib_dir() -> PathBuf {
PathBuf::from("test/src/e2e_vm_tests/test_programs/should_pass/stdlib")
}
pub fn e2e_unit_dir() -> PathBuf {
PathBuf::from("test/src/e2e_vm_tests/test_programs/should_pass/unit_tests")
}
pub fn e2e_test_dir() -> PathBuf {
sway_workspace_dir()
.join(e2e_language_dir())
.join("struct_field_access")
}
pub fn std_lib_dir() -> PathBuf {
sway_workspace_dir().join("sway-lib-std")
}
pub fn runnables_test_dir() -> PathBuf {
test_fixtures_dir().join("runnables")
}
pub fn test_fixtures_dir() -> PathBuf {
sway_workspace_dir().join("sway-lsp/tests/fixtures")
}
pub fn doc_comments_dir() -> PathBuf {
sway_workspace_dir()
.join(e2e_language_dir())
.join("doc_comments")
}
pub fn generic_impl_self_dir() -> PathBuf {
sway_workspace_dir()
.join(e2e_language_dir())
.join("generic_impl_self")
}
pub fn self_impl_reassignment_dir() -> PathBuf {
sway_workspace_dir()
.join(e2e_language_dir())
.join("self_impl_reassignment")
}
pub fn get_absolute_path(path: &str) -> String {
sway_workspace_dir().join(path).to_str().unwrap().into()
}
pub fn get_url(absolute_path: &str) -> Url {
Url::parse(&format!("file://{}", &absolute_path)).expect("expected URL")
}
pub fn get_fixture(path: PathBuf) -> Value {
let text = std::fs::read_to_string(path).expect("Failed to read file");
serde_json::from_str::<Value>(&text).expect("Failed to parse JSON")
}
pub fn sway_example_dir() -> PathBuf {
sway_workspace_dir().join("examples/storage_variables")
}
// Check if the given directory contains `Forc.toml` at its root.
pub fn dir_contains_forc_manifest(path: &Path) -> bool {
if let Ok(entries) = fs::read_dir(path) {
for entry in entries.flatten() {
if entry.path().file_name().and_then(|s| s.to_str()) == Some("Forc.toml") {
return true;
}
}
}
false
}
pub async fn assert_server_requests(
socket: ClientSocket,
expected_requests: Vec<Value>,
) -> JoinHandle<()> {
tokio::spawn(async move {
let request_stream = socket.take(expected_requests.len()).collect::<Vec<_>>();
let requests = request_stream.await;
assert_eq!(requests.len(), expected_requests.len());
for (actual, expected) in requests.iter().zip(expected_requests.iter()) {
assert_eq!(expected["method"], actual.method());
// Assert that all other expected fields are present without requiring
// all actual fields to be present. Specifically we need this for `uri`,
// which can't be hardcoded in the test.
assert_json_include!(
expected: expected,
actual: serde_json::to_value(actual.clone()).unwrap()
);
}
})
}
/// Introduces a random delay between 1 to 30 milliseconds with a chance of additional longer delays based on predefined probabilities.
pub async fn random_delay() {
// Create a thread-safe RNG
let mut rng = SmallRng::from_entropy();
// wait for a random amount of time between 1-30ms
tokio::time::sleep(tokio::time::Duration::from_millis(rng.gen_range(1..=30))).await;
// 20% chance to introduce a longer delay of 100 to 1200 milliseconds.
if rng.gen_ratio(2, 10) {
tokio::time::sleep(tokio::time::Duration::from_millis(
rng.gen_range(100..=1200),
))
.await;
}
}
/// Sets up the environment and a custom panic hook to print panic information and exit the program.
pub fn setup_panic_hook() {
// Enable backtrace to get more information about panic
std::env::set_var("RUST_BACKTRACE", "1");
// Take the default panic hook
let default_panic = std::panic::take_hook();
// Set a custom panic hook
std::panic::set_hook(Box::new(move |panic_info| {
// Invoke the default panic hook to print the panic message
default_panic(panic_info);
std::process::exit(1);
}));
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-lsp/benches/bench_main.rs | sway-lsp/benches/bench_main.rs | #![recursion_limit = "256"]
mod lsp_benchmarks;
use codspeed_criterion_compat::criterion_main;
// Use Jemalloc during benchmarks
#[global_allocator]
static GLOBAL: tikv_jemallocator::Jemalloc = tikv_jemallocator::Jemalloc;
criterion_main! {
lsp_benchmarks::token_map::benches,
lsp_benchmarks::requests::benches,
lsp_benchmarks::compile::benches,
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-lsp/benches/lsp_benchmarks/compile.rs | sway-lsp/benches/lsp_benchmarks/compile.rs | use codspeed_criterion_compat::{black_box, criterion_group, Criterion};
use forc_pkg::manifest::{GenericManifestFile, ManifestFile};
use lsp_types::Url;
use parking_lot::RwLock;
use std::sync::Arc;
use sway_core::Engines;
use sway_lsp::core::session;
use tokio::runtime::Runtime;
const NUM_DID_CHANGE_ITERATIONS: usize = 10;
fn benchmarks(c: &mut Criterion) {
let (uri, session, state, _, sync) = Runtime::new()
.unwrap()
.block_on(async { black_box(super::compile_test_project().await) });
let build_plan = session
.build_plan_cache
.get_or_update(&sync.workspace_manifest_path(), || {
session::build_plan(&uri)
})
.unwrap();
let mut lsp_mode = Some(sway_core::LspConfig {
optimized_build: false,
file_versions: Default::default(),
});
c.bench_function("compile", |b| {
b.iter(|| {
let engines = Engines::default();
let _ = black_box(
session::compile(&build_plan, &engines, None, lsp_mode.as_ref()).unwrap(),
);
})
});
c.bench_function("traverse", |b| {
let engines_original = Arc::new(RwLock::new(Engines::default()));
let engines_clone = engines_original.read().clone();
let results = black_box(
session::compile(&build_plan, &engines_clone, None, lsp_mode.as_ref()).unwrap(),
);
let member_path = sync.member_path(&uri).unwrap();
let modified_file = sway_lsp::server_state::modified_file(lsp_mode.as_ref());
b.iter(|| {
let _ = black_box(
session::traverse(
member_path.clone(),
results.clone(),
engines_original.clone(),
&engines_clone,
&state.token_map,
&state.compiled_programs,
modified_file,
)
.unwrap(),
);
})
});
lsp_mode.as_mut().unwrap().optimized_build = true;
c.bench_function("did_change_with_caching", |b| {
let engines = Engines::default();
b.iter(|| {
for _ in 0..NUM_DID_CHANGE_ITERATIONS {
let _ = black_box(
session::compile(&build_plan, &engines, None, lsp_mode.as_ref()).unwrap(),
);
}
})
});
let examples_workspace_dir = super::sway_workspace_dir().join("examples");
let member_manifests = ManifestFile::from_dir(examples_workspace_dir)
.unwrap()
.member_manifests()
.unwrap();
c.bench_function("open_all_example_workspace_members", |b| {
b.iter(|| {
let engines = Engines::default();
for package_manifest in member_manifests.values() {
let dir = Url::from_file_path(
package_manifest
.path()
.parent()
.unwrap()
.join("src/main.sw"),
)
.unwrap();
let build_plan = session::build_plan(&dir).unwrap();
let _ = black_box(
session::compile(&build_plan, &engines, None, lsp_mode.as_ref()).unwrap(),
);
}
})
});
// Remove the temp dir after the benchmarks are done
Runtime::new()
.unwrap()
.block_on(async { sync.remove_temp_dir() });
}
criterion_group! {
name = benches;
config = Criterion::default().measurement_time(std::time::Duration::from_secs(10)).sample_size(10);
targets = benchmarks
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-lsp/benches/lsp_benchmarks/token_map.rs | sway-lsp/benches/lsp_benchmarks/token_map.rs | use codspeed_criterion_compat::{black_box, criterion_group, Criterion};
use lsp_types::Position;
use tokio::runtime::Runtime;
fn benchmarks(c: &mut Criterion) {
let (uri, _, state, engines, sync) = Runtime::new()
.unwrap()
.block_on(async { black_box(super::compile_test_project().await) });
let position = Position::new(1716, 24);
let path = uri.to_file_path().unwrap();
let program_id = sway_lsp::core::session::program_id_from_path(&path, &engines).unwrap();
c.bench_function("tokens_for_program", |b| {
b.iter(|| {
let _: Vec<_> = state.token_map.tokens_for_program(program_id).collect();
})
});
c.bench_function("tokens_for_file", |b| {
b.iter(|| {
let _: Vec<_> = state.token_map.tokens_for_file(&uri).collect();
})
});
c.bench_function("idents_at_position", |b| {
b.iter(|| {
state
.token_map
.idents_at_position(position, state.token_map.iter())
})
});
c.bench_function("tokens_at_position", |b| {
b.iter(|| {
state
.token_map
.tokens_at_position(&engines, &uri, position, None)
})
});
c.bench_function("token_at_position", |b| {
b.iter(|| state.token_map.token_at_position(&uri, position))
});
c.bench_function("parent_decl_at_position", |b| {
b.iter(|| {
state
.token_map
.parent_decl_at_position(&engines, &uri, position)
})
});
// Remove the temp dir after the benchmarks are done
Runtime::new()
.unwrap()
.block_on(async { sync.remove_temp_dir() });
}
criterion_group! {
name = benches;
config = Criterion::default().measurement_time(std::time::Duration::from_secs(10));
targets = benchmarks
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-lsp/benches/lsp_benchmarks/mod.rs | sway-lsp/benches/lsp_benchmarks/mod.rs | pub mod compile;
pub mod requests;
pub mod token_map;
use lsp_types::Url;
use std::{path::PathBuf, sync::Arc};
use sway_core::{Engines, LspConfig};
use sway_lsp::{
config::GarbageCollectionConfig,
core::{
session::{self, Session},
sync::SyncWorkspace,
},
server_state::{CompilationContext, ServerState},
};
pub async fn compile_test_project() -> (Url, Arc<Session>, ServerState, Engines, Arc<SyncWorkspace>)
{
// Load the test project
let uri = Url::from_file_path(benchmark_dir().join("src/main.sw")).unwrap();
let state = ServerState::default();
let engines_clone = state.engines.read().clone();
let session = Arc::new(Session::new());
let sync = state.get_or_init_sync_workspace(&uri).await.unwrap();
let temp_uri = sync.workspace_to_temp_url(&uri).unwrap();
state.documents.handle_open_file(&temp_uri).await;
let ctx = CompilationContext {
session: session.clone(),
sync: sync.clone(),
token_map: state.token_map.clone(),
engines: state.engines.clone(),
compiled_programs: state.compiled_programs.clone(),
runnables: state.runnables.clone(),
optimized_build: false,
file_versions: Default::default(),
uri: uri.clone(),
version: None,
gc_options: GarbageCollectionConfig::default(),
};
let lsp_mode = Some(LspConfig {
optimized_build: ctx.optimized_build,
file_versions: ctx.file_versions.clone(),
});
// Compile the project
session::parse_project(&temp_uri, &engines_clone, None, &ctx, lsp_mode.as_ref()).unwrap();
(temp_uri, session, state, engines_clone, sync)
}
pub fn sway_workspace_dir() -> PathBuf {
std::env::current_dir()
.unwrap()
.parent()
.unwrap()
.to_path_buf()
}
pub fn benchmark_dir() -> PathBuf {
sway_workspace_dir().join("sway-lsp/tests/fixtures/benchmark")
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-lsp/benches/lsp_benchmarks/requests.rs | sway-lsp/benches/lsp_benchmarks/requests.rs | use codspeed_criterion_compat::{black_box, criterion_group, Criterion};
use lsp_types::{
CompletionResponse, DocumentSymbolResponse, Position, Range, TextDocumentContentChangeEvent,
TextDocumentIdentifier,
};
use sway_lsp::{capabilities, core::session, lsp_ext::OnEnterParams};
use tokio::runtime::Runtime;
fn benchmarks(c: &mut Criterion) {
let (uri, _, state, engines, sync) = Runtime::new()
.unwrap()
.block_on(async { black_box(super::compile_test_project().await) });
let config = sway_lsp::config::Config::default();
let position = Position::new(1717, 24);
let range = Range::new(Position::new(1628, 0), Position::new(1728, 0));
c.bench_function("semantic_tokens", |b| {
b.iter(|| capabilities::semantic_tokens::semantic_tokens_full(&state.token_map, &uri))
});
c.bench_function("document_symbol", |b| {
b.iter(|| {
session::document_symbols(&uri, &state.token_map, &engines, &state.compiled_programs)
.map(DocumentSymbolResponse::Nested)
})
});
c.bench_function("completion", |b| {
let position = Position::new(1698, 28);
b.iter(|| {
session::completion_items(
&uri,
position,
".",
&state.token_map,
&engines,
&state.compiled_programs,
)
.map(CompletionResponse::Array)
})
});
c.bench_function("hover", |b| {
b.iter(|| capabilities::hover::hover_data(&state, sync.clone(), &engines, &uri, position))
});
c.bench_function("highlight", |b| {
b.iter(|| {
capabilities::highlight::get_highlights(&engines, &state.token_map, &uri, position)
})
});
c.bench_function("find_all_references", |b| {
b.iter(|| session::token_references(&uri, position, &state.token_map, &engines, &sync))
});
c.bench_function("goto_definition", |b| {
b.iter(|| {
session::token_definition_response(&uri, position, &engines, &state.token_map, &sync)
})
});
c.bench_function("inlay_hints", |b| {
b.iter(|| {
capabilities::inlay_hints::inlay_hints(
&engines,
&state.token_map,
&uri,
&range,
&config.inlay_hints,
)
})
});
c.bench_function("prepare_rename", |b| {
b.iter(|| {
capabilities::rename::prepare_rename(&engines, &state.token_map, &uri, position, &sync)
})
});
c.bench_function("rename", |b| {
b.iter(|| {
capabilities::rename::rename(
&engines,
&state.token_map,
"new_token_name".to_string(),
&uri,
position,
&sync,
)
})
});
c.bench_function("code_action", |b| {
let range = Range::new(Position::new(4, 10), Position::new(4, 10));
b.iter(|| {
capabilities::code_actions::code_actions(
&engines,
&state.token_map,
&range,
&uri,
&uri,
&vec![],
&state.compiled_programs,
)
})
});
c.bench_function("code_lens", |b| {
b.iter(|| capabilities::code_lens::code_lens(&state.runnables, &uri.clone()))
});
c.bench_function("on_enter", |b| {
let params = OnEnterParams {
text_document: TextDocumentIdentifier::new(uri.clone()),
content_changes: vec![TextDocumentContentChangeEvent {
range: Some(Range::new(Position::new(3, 30), Position::new(3, 30))),
range_length: Some(0),
text: "\n".to_string(),
}],
};
b.iter(|| {
capabilities::on_enter::on_enter(&config.on_enter, &state.documents, &uri, ¶ms)
})
});
c.bench_function("format", |b| {
b.iter(|| capabilities::formatting::format_text(&state.documents, &uri))
});
// Remove the temp dir after the benchmarks are done
Runtime::new()
.unwrap()
.block_on(async { sync.remove_temp_dir() });
}
criterion_group! {
name = benches;
config = Criterion::default().measurement_time(std::time::Duration::from_secs(3));
targets = benchmarks
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-types/src/ast.rs | sway-types/src/ast.rs | #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub enum Delimiter {
Parenthesis,
Brace,
Bracket,
}
impl Delimiter {
pub const fn as_open_char(self) -> char {
match self {
Delimiter::Parenthesis => '(',
Delimiter::Brace => '{',
Delimiter::Bracket => '[',
}
}
pub const fn as_close_char(self) -> char {
match self {
Delimiter::Parenthesis => ')',
Delimiter::Brace => '}',
Delimiter::Bracket => ']',
}
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub enum PunctKind {
Semicolon,
Colon,
ForwardSlash,
Comma,
Star,
Add,
Sub,
LessThan,
GreaterThan,
Equals,
Dot,
Bang,
Percent,
Ampersand,
Caret,
Pipe,
Underscore,
Sharp,
}
impl PunctKind {
pub fn as_char(&self) -> char {
match self {
PunctKind::Semicolon => ';',
PunctKind::Colon => ':',
PunctKind::ForwardSlash => '/',
PunctKind::Comma => ',',
PunctKind::Star => '*',
PunctKind::Add => '+',
PunctKind::Sub => '-',
PunctKind::LessThan => '<',
PunctKind::GreaterThan => '>',
PunctKind::Equals => '=',
PunctKind::Dot => '.',
PunctKind::Bang => '!',
PunctKind::Percent => '%',
PunctKind::Ampersand => '&',
PunctKind::Caret => '^',
PunctKind::Pipe => '|',
PunctKind::Underscore => '_',
PunctKind::Sharp => '#',
}
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-types/src/lib.rs | sway-types/src/lib.rs | use fuel_asm::Word;
use fuel_crypto::Hasher;
use fuel_tx::{Bytes32, ContractId};
use serde::{Deserialize, Serialize};
use std::hash::Hash;
use std::path::{Path, PathBuf};
use std::{io, iter, slice};
pub mod constants;
pub mod ident;
pub mod u256;
pub use ident::*;
pub mod integer_bits;
pub mod source_engine;
pub use source_engine::*;
pub mod span;
pub use span::*;
pub mod style;
pub mod ast;
pub type Id = [u8; Bytes32::LEN];
pub type Contract = [u8; ContractId::LEN];
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)]
pub struct Position {
pub line: usize,
pub col: usize,
}
/// Based on `<https://llvm.org/docs/CoverageMappingFormat.html#source-code-range>`
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)]
pub struct Range {
/// Beginning of the code range
pub start: Position,
/// End of the code range (inclusive)
pub end: Position,
}
impl Range {
pub const fn is_valid(&self) -> bool {
self.start.line < self.end.line
|| self.start.line == self.end.line && self.start.col <= self.end.col
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)]
pub struct Instruction {
/// Relative to the `$is`
pub pc: Word,
/// Code range that translates to this point
pub range: Range,
/// Exit from the current scope?
pub exit: bool,
}
impl Instruction {
pub fn to_bytes(&self) -> [u8; 41] {
let mut bytes = [0u8; 41];
// Always convert to `u64` to avoid architectural variants of the bytes representation that
// could lead to arch-dependent unique IDs
bytes[..8].copy_from_slice(&(self.pc).to_be_bytes());
bytes[8..16].copy_from_slice(&(self.range.start.line as u64).to_be_bytes());
bytes[16..24].copy_from_slice(&(self.range.start.col as u64).to_be_bytes());
bytes[24..32].copy_from_slice(&(self.range.end.line as u64).to_be_bytes());
bytes[32..40].copy_from_slice(&(self.range.end.col as u64).to_be_bytes());
bytes[40] = self.exit as u8;
bytes
}
pub fn bytes<'a>(iter: impl Iterator<Item = &'a Self>) -> Vec<u8> {
// Need to return owned bytes because flatten is not supported by 1.53 for arrays bigger
// than 32 bytes
iter.map(Self::to_bytes)
.fold::<Vec<u8>, _>(vec![], |mut v, b| {
v.extend(b);
v
})
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize, PartialOrd, Ord)]
pub struct ProgramId(u16);
impl ProgramId {
pub fn new(id: u16) -> Self {
Self(id)
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize, PartialOrd, Ord)]
pub struct SourceId(u32);
impl SourceId {
const SOURCE_ID_BITS: u32 = 20;
const SOURCE_ID_MASK: u32 = (1 << Self::SOURCE_ID_BITS) - 1;
/// Create a combined ID from program and source IDs.
pub fn new(program_id: u16, source_id: u32) -> Self {
SourceId(((program_id as u32) << Self::SOURCE_ID_BITS) | source_id)
}
/// The [ProgramId] that this [SourceId] was created from.
pub fn program_id(&self) -> ProgramId {
ProgramId::new((self.0 >> Self::SOURCE_ID_BITS) as u16)
}
/// ID of the source file without the [ProgramId] component.
pub fn source_id(&self) -> u32 {
self.0 & Self::SOURCE_ID_MASK
}
}
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, PartialOrd, Ord)]
pub struct Source {
/// Absolute path to the source file
path: PathBuf,
}
impl<T> From<T> for Source
where
T: Into<PathBuf>,
{
fn from(path: T) -> Self {
Self { path: path.into() }
}
}
impl AsRef<PathBuf> for Source {
fn as_ref(&self) -> &PathBuf {
&self.path
}
}
impl AsRef<Path> for Source {
fn as_ref(&self) -> &Path {
self.path.as_ref()
}
}
impl AsMut<PathBuf> for Source {
fn as_mut(&mut self) -> &mut PathBuf {
&mut self.path
}
}
impl Source {
pub fn bytes(&self) -> io::Result<slice::Iter<'_, u8>> {
Ok(self
.path
.as_path()
.to_str()
.ok_or_else(|| {
io::Error::other("Failed to get the string representation of the path!")
})?
.as_bytes()
.iter())
}
}
/// Contract call stack frame representation
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
pub struct CallFrame {
/// Deterministic representation of the frame
id: Id,
/// Contract that contains the bytecodes of this frame. Currently only scripts are supported
contract: Contract,
/// Sway source code that compiles to this frame
source: Source,
/// Range of code that represents this frame
range: Range,
/// Set of instructions that describes this frame
program: Vec<Instruction>,
}
impl CallFrame {
pub fn new(
contract: ContractId,
source: Source,
range: Range,
program: Vec<Instruction>,
) -> io::Result<Self> {
Context::validate_source(&source)?;
Context::validate_range(iter::once(&range).chain(program.iter().map(|p| &p.range)))?;
let contract = Contract::from(contract);
let id = Context::id_from_repr(
Instruction::bytes(program.iter())
.iter()
.chain(contract.iter())
.chain(source.bytes()?),
);
Ok(Self {
id,
contract,
source,
range,
program,
})
}
pub const fn id(&self) -> &Id {
&self.id
}
pub const fn source(&self) -> &Source {
&self.source
}
pub const fn range(&self) -> &Range {
&self.range
}
pub fn program(&self) -> &[Instruction] {
self.program.as_slice()
}
pub fn contract(&self) -> ContractId {
self.contract.into()
}
}
/// Transaction script interpreter representation
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
pub struct TransactionScript {
/// Deterministic representation of the script
id: Id,
/// Sway source code that compiles to this script
source: Source,
/// Range of code that represents this script
range: Range,
/// Set of instructions that describes this script
program: Vec<Instruction>,
}
impl TransactionScript {
pub fn new(source: Source, range: Range, program: Vec<Instruction>) -> io::Result<Self> {
Context::validate_source(&source)?;
Context::validate_range(iter::once(&range).chain(program.iter().map(|p| &p.range)))?;
let id = Context::id_from_repr(
Instruction::bytes(program.iter())
.iter()
.chain(source.bytes()?),
);
Ok(Self {
id,
source,
range,
program,
})
}
pub const fn id(&self) -> &Id {
&self.id
}
pub const fn source(&self) -> &Source {
&self.source
}
pub const fn range(&self) -> &Range {
&self.range
}
pub fn program(&self) -> &[Instruction] {
self.program.as_slice()
}
}
// Representation of a debug context to be mapped from a sway source and consumed by the DAP-sway
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
pub enum Context {
CallFrame(CallFrame),
TransactionScript(TransactionScript),
}
impl From<CallFrame> for Context {
fn from(frame: CallFrame) -> Self {
Self::CallFrame(frame)
}
}
impl From<TransactionScript> for Context {
fn from(script: TransactionScript) -> Self {
Self::TransactionScript(script)
}
}
impl Context {
pub fn validate_source<P>(path: P) -> io::Result<()>
where
P: AsRef<Path>,
{
if !path.as_ref().is_absolute() {
return Err(io::Error::new(
io::ErrorKind::InvalidData,
"The source path must be absolute!",
));
}
if !path.as_ref().is_file() {
return Err(io::Error::new(
io::ErrorKind::InvalidData,
"The source path must be a valid Sway source file!",
));
}
if !path.as_ref().exists() {
return Err(io::Error::new(
io::ErrorKind::NotFound,
"The source path must point to an existing file!",
));
}
Ok(())
}
pub fn validate_range<'a>(mut range: impl Iterator<Item = &'a Range>) -> io::Result<()> {
if !range.any(|r| !r.is_valid()) {
Err(io::Error::new(
io::ErrorKind::InvalidData,
"The provided source range is inconsistent!",
))
} else {
Ok(())
}
}
pub fn id_from_repr<'a>(bytes: impl Iterator<Item = &'a u8>) -> Id {
let bytes: Vec<u8> = bytes.copied().collect();
*Hasher::hash(bytes.as_slice())
}
pub const fn id(&self) -> &Id {
match self {
Self::CallFrame(t) => t.id(),
Self::TransactionScript(t) => t.id(),
}
}
pub const fn source(&self) -> &Source {
match self {
Self::CallFrame(t) => t.source(),
Self::TransactionScript(t) => t.source(),
}
}
pub const fn range(&self) -> &Range {
match self {
Self::CallFrame(t) => t.range(),
Self::TransactionScript(t) => t.range(),
}
}
pub fn program(&self) -> &[Instruction] {
match self {
Self::CallFrame(t) => t.program(),
Self::TransactionScript(t) => t.program(),
}
}
pub fn contract(&self) -> Option<ContractId> {
match self {
Self::CallFrame(t) => Some(t.contract()),
_ => None,
}
}
}
pub type FxBuildHasher = std::hash::BuildHasherDefault<rustc_hash::FxHasher>;
pub type FxIndexMap<K, V> = indexmap::IndexMap<K, V, FxBuildHasher>;
pub type FxIndexSet<K> = indexmap::IndexSet<K, FxBuildHasher>;
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-types/src/integer_bits.rs | sway-types/src/integer_bits.rs | use std::fmt;
#[derive(Eq, PartialEq, Hash, Debug, Clone, Copy, PartialOrd, Ord)]
pub enum IntegerBits {
Eight,
Sixteen,
ThirtyTwo,
SixtyFour,
V256,
}
impl IntegerBits {
/// Returns if `v` would overflow using `self` bits or not.
pub fn would_overflow(&self, v: u64) -> bool {
if v == 0 {
return false;
}
let needed_bits = v.ilog2() + 1;
let bits = match self {
IntegerBits::Eight => 8,
IntegerBits::Sixteen => 16,
IntegerBits::ThirtyTwo => 32,
IntegerBits::SixtyFour => 64,
IntegerBits::V256 => return false,
};
needed_bits > bits
}
}
#[test]
fn would_overflow_tests() {
assert!(!IntegerBits::Eight.would_overflow(0));
assert!(!IntegerBits::Eight.would_overflow(0xFF));
assert!(IntegerBits::Eight.would_overflow(0x100));
assert!(!IntegerBits::Sixteen.would_overflow(0xFFFF));
assert!(IntegerBits::Sixteen.would_overflow(0x10000));
assert!(!IntegerBits::ThirtyTwo.would_overflow(0xFFFFFFFF));
assert!(IntegerBits::ThirtyTwo.would_overflow(0x100000000));
assert!(!IntegerBits::SixtyFour.would_overflow(0xFFFFFFFFFFFFFFFF));
}
impl fmt::Display for IntegerBits {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
use IntegerBits::{Eight, Sixteen, SixtyFour, ThirtyTwo, V256};
let s = match self {
Eight => "eight",
Sixteen => "sixteen",
ThirtyTwo => "thirty two",
SixtyFour => "sixty four",
V256 => "256",
};
write!(f, "{s}")
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-types/src/u256.rs | sway-types/src/u256.rs | use num_bigint::{BigUint, ParseBigIntError, TryFromBigIntError};
use num_traits::Zero;
use serde::{Deserialize, Serialize};
use std::ops::{Not, Shl, Shr};
use thiserror::Error;
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone, Hash, Serialize, Deserialize)]
pub struct U256(BigUint);
impl U256 {
pub fn from_be_bytes(bytes: &[u8; 32]) -> Self {
let v = BigUint::from_bytes_be(bytes.as_slice());
Self(v)
}
pub fn to_be_bytes(&self) -> [u8; 32] {
let mut v = self.0.to_bytes_be();
let mut bytes = vec![0u8; 32 - v.len()];
bytes.append(&mut v);
assert!(bytes.len() == 32);
bytes.try_into().expect("unexpected vector size")
}
pub fn checked_add(&self, other: &U256) -> Option<U256> {
let r = &self.0 + &other.0;
(r.bits() <= 256).then_some(Self(r))
}
pub fn checked_sub(&self, other: &U256) -> Option<U256> {
(self.0 >= other.0).then(|| Self(&self.0 - &other.0))
}
pub fn checked_mul(&self, other: &U256) -> Option<U256> {
let r = &self.0 * &other.0;
(r.bits() <= 256).then_some(Self(r))
}
pub fn checked_div(&self, other: &U256) -> Option<U256> {
other.0.is_zero().not().then(|| Self(&self.0 / &other.0))
}
pub fn shr(&self, other: &u64) -> U256 {
U256((&self.0).shr(other))
}
pub fn checked_shl(&self, other: &u64) -> Option<U256> {
let r = (&self.0).shl(other);
(r.bits() <= 256).then_some(Self(r))
}
pub fn checked_rem(&self, other: &U256) -> Option<U256> {
if other.0 == BigUint::ZERO {
None
} else {
Some(U256(&self.0 % &other.0))
}
}
pub fn is_zero(&self) -> bool {
self.0.is_zero()
}
}
impl std::fmt::Display for U256 {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.0)
}
}
impl std::fmt::LowerHex for U256 {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{:x}", self.0)
}
}
impl std::fmt::UpperHex for U256 {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{:X}", self.0)
}
}
impl std::fmt::Binary for U256 {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{:b}", self.0)
}
}
impl From<BigUint> for U256 {
fn from(value: BigUint) -> Self {
Self(value)
}
}
impl From<u64> for U256 {
fn from(value: u64) -> Self {
Self(BigUint::from(value))
}
}
impl TryFrom<U256> for u64 {
type Error = Error;
fn try_from(value: U256) -> Result<Self, Self::Error> {
value.0.try_into().map_err(Error::TryIntoBigIntError)
}
}
#[derive(Error, Debug)]
pub enum Error {
#[error("{0}")]
ParseBigIntError(ParseBigIntError),
#[error("{0}")]
TryIntoBigIntError(TryFromBigIntError<BigUint>),
}
impl std::str::FromStr for U256 {
type Err = Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let v = BigUint::from_str(s).map_err(Error::ParseBigIntError)?;
Ok(Self(v))
}
}
impl<'a> std::ops::BitAnd<&'a U256> for &'a U256 {
type Output = U256;
fn bitand(self, rhs: Self) -> Self::Output {
U256((&self.0).bitand(&rhs.0))
}
}
impl<'a> std::ops::BitOr<&'a U256> for &'a U256 {
type Output = U256;
fn bitor(self, rhs: Self) -> Self::Output {
U256((&self.0).bitor(&rhs.0))
}
}
impl<'a> std::ops::BitXor<&'a U256> for &'a U256 {
type Output = U256;
fn bitxor(self, rhs: Self) -> Self::Output {
U256((&self.0).bitxor(&rhs.0))
}
}
impl<'a> std::ops::Rem<&'a U256> for &'a U256 {
type Output = U256;
fn rem(self, rhs: Self) -> Self::Output {
U256((&self.0).rem(&rhs.0))
}
}
impl std::ops::Not for &U256 {
type Output = U256;
fn not(self) -> Self::Output {
let mut bytes = self.to_be_bytes();
bytes.iter_mut().for_each(|b| *b = !*b);
U256(BigUint::from_bytes_be(&bytes))
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-types/src/ident.rs | sway-types/src/ident.rs | use crate::{span::Span, Spanned};
use serde::{Deserialize, Serialize};
use std::{
borrow::Cow,
cmp::{Ord, Ordering},
fmt,
hash::{Hash, Hasher},
sync::Arc,
};
pub trait Named {
fn name(&self) -> &BaseIdent;
}
#[derive(Clone, Serialize, Deserialize)]
pub struct BaseIdent {
name_override_opt: Option<Arc<String>>,
span: Span,
is_raw_ident: bool,
}
impl BaseIdent {
pub fn as_str(&self) -> &str {
self.name_override_opt
.as_deref()
.map(|x| x.as_str())
.unwrap_or_else(|| self.span.as_str())
}
/// Returns the identifier as a string, prefixed with `r#` if it is a raw identifier.
pub fn as_raw_ident_str(&self) -> Cow<str> {
if self.is_raw_ident {
Cow::Owned(format!("r#{}", self.as_str()))
} else {
Cow::Borrowed(self.as_str())
}
}
pub fn is_raw_ident(&self) -> bool {
self.is_raw_ident
}
pub fn name_override_opt(&self) -> Option<&str> {
self.name_override_opt.as_deref().map(|x| x.as_str())
}
pub fn new(span: Span) -> Ident {
let span = span.trim();
Ident {
name_override_opt: None,
span,
is_raw_ident: false,
}
}
pub fn new_no_trim(span: Span) -> Ident {
Ident {
name_override_opt: None,
span,
is_raw_ident: false,
}
}
pub fn new_with_raw(span: Span, is_raw_ident: bool) -> Ident {
let span = span.trim();
Ident {
name_override_opt: None,
span,
is_raw_ident,
}
}
pub fn new_with_override(name_override: String, span: Span) -> Ident {
Ident {
name_override_opt: Some(Arc::new(name_override)),
span,
is_raw_ident: false,
}
}
pub fn new_no_span(name: String) -> Ident {
Ident {
name_override_opt: Some(Arc::new(name)),
span: Span::dummy(),
is_raw_ident: false,
}
}
pub fn dummy() -> Ident {
Ident {
name_override_opt: Some(Arc::new("foo".into())),
span: Span::dummy(),
is_raw_ident: false,
}
}
}
/// An [Ident] is an _identifier_ with a corresponding `span` from which it was derived.
/// It relies on a custom implementation of Hash which only looks at its textual name
/// representation, so that namespacing isn't reliant on the span itself, which will
/// often be different.
pub type Ident = BaseIdent;
impl Hash for Ident {
fn hash<H: Hasher>(&self, state: &mut H) {
self.as_str().hash(state);
}
}
impl PartialEq for Ident {
fn eq(&self, other: &Self) -> bool {
self.as_str() == other.as_str()
}
}
impl Ord for Ident {
fn cmp(&self, other: &Self) -> Ordering {
self.as_str().cmp(other.as_str())
}
}
impl PartialOrd for Ident {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
}
}
impl Eq for Ident {}
impl Spanned for Ident {
fn span(&self) -> Span {
self.span.clone()
}
}
impl fmt::Display for Ident {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
write!(formatter, "{}", self.as_str())
}
}
impl fmt::Debug for Ident {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
write!(formatter, "{}", self.as_str())
}
}
/// An [IdentUnique] is an _identifier_ with a corresponding `span` from which it was derived.
/// Its hash and equality implementation takes the full span into account, meaning that identifiers
/// are considered unique if they originate from different files.
#[derive(Debug, Clone)]
pub struct IdentUnique(BaseIdent);
impl From<Ident> for IdentUnique {
fn from(item: Ident) -> Self {
IdentUnique(item)
}
}
impl From<&Ident> for IdentUnique {
fn from(item: &Ident) -> Self {
IdentUnique(item.clone())
}
}
impl From<&IdentUnique> for Ident {
fn from(item: &IdentUnique) -> Self {
Ident {
name_override_opt: item.0.name_override_opt().map(|s| Arc::new(s.to_string())),
span: item.0.span(),
is_raw_ident: item.0.is_raw_ident(),
}
}
}
impl std::ops::Deref for IdentUnique {
type Target = Ident;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl Hash for IdentUnique {
fn hash<H: Hasher>(&self, state: &mut H) {
self.0.span().hash(state);
self.0.as_str().hash(state);
}
}
impl PartialEq for IdentUnique {
fn eq(&self, other: &Self) -> bool {
self.0.as_str() == other.0.as_str() && self.0.span() == other.0.span()
}
}
impl Eq for IdentUnique {}
impl Spanned for IdentUnique {
fn span(&self) -> Span {
self.0.span()
}
}
impl fmt::Display for IdentUnique {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
write!(formatter, "{}", self.0.as_str())
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-types/src/source_engine.rs | sway-types/src/source_engine.rs | use crate::{span, LineCol, ProgramId, SourceId, Span};
use parking_lot::RwLock;
use std::{
collections::{BTreeSet, HashMap},
path::{Path, PathBuf},
str::FromStr,
};
use toml::Table;
/// The Source Engine manages a relationship between file paths and their corresponding
/// integer-based source IDs. Additionally, it maintains the reverse - a map that traces
/// back from a source ID to its original file path. The primary objective of this
/// system is to enable clients that need to reference a file path to do so using an
/// integer-based ID. This numeric representation can be stored more efficiently as
/// a key in a hashmap.
/// The Source Engine is designed to be thread-safe. Its internal structures are
/// secured by the RwLock mechanism. This allows its functions to be invoked using
/// a straightforward non-mutable reference, ensuring safe concurrent access.
#[derive(Debug, Default)]
pub struct SourceEngine {
next_source_id: RwLock<u32>,
path_to_source_map: RwLock<HashMap<PathBuf, SourceId>>,
source_to_path_map: RwLock<HashMap<SourceId, PathBuf>>,
source_to_buffer_map: RwLock<HashMap<SourceId, span::Source>>,
next_program_id: RwLock<u16>,
manifest_path_to_program_map: RwLock<HashMap<PathBuf, ProgramId>>,
/// Stores the package name and version for manifest path,
/// if available in the Forc.toml file, or the fallback package name
/// coming from the last part of the manifest path, and an empty version string.
manifest_path_to_package_info: RwLock<HashMap<PathBuf, (String, String)>>,
module_to_sources_map: RwLock<HashMap<ProgramId, BTreeSet<SourceId>>>,
}
impl Clone for SourceEngine {
fn clone(&self) -> Self {
SourceEngine {
next_source_id: RwLock::new(*self.next_source_id.read()),
path_to_source_map: RwLock::new(self.path_to_source_map.read().clone()),
source_to_path_map: RwLock::new(self.source_to_path_map.read().clone()),
source_to_buffer_map: RwLock::new(self.source_to_buffer_map.read().clone()),
next_program_id: RwLock::new(*self.next_program_id.read()),
manifest_path_to_program_map: RwLock::new(
self.manifest_path_to_program_map.read().clone(),
),
manifest_path_to_package_info: RwLock::new(
self.manifest_path_to_package_info.read().clone(),
),
module_to_sources_map: RwLock::new(self.module_to_sources_map.read().clone()),
}
}
}
impl SourceEngine {
const AUTOGENERATED_PATH: &'static str = "<autogenerated>";
pub fn is_span_in_autogenerated(&self, span: &crate::Span) -> Option<bool> {
span.source_id().map(|s| self.is_source_id_autogenerated(s))
}
pub fn is_source_id_autogenerated(&self, source_id: &SourceId) -> bool {
self.get_path(source_id)
.display()
.to_string()
.contains("<autogenerated>")
}
/// Fetch the cached source buffer for `source_id`, replacing it if the caller
/// provides newer text so future span lookups observe the latest file contents.
pub fn get_or_create_source_buffer(
&self,
source_id: &SourceId,
source: span::Source,
) -> span::Source {
let mut map = self.source_to_buffer_map.write();
if let Some(existing) = map.get_mut(source_id) {
// Replace the cached source if the contents have changed so that
// subsequent spans reflect the latest file text.
if existing.text != source.text {
*existing = source;
}
existing.clone()
} else {
map.insert(*source_id, source.clone());
source
}
}
/// This function retrieves an integer-based source ID for a provided path buffer.
/// If an ID already exists for the given path, the function will return that
/// existing ID. If not, a new ID will be created.
pub fn get_source_id(&self, path: &PathBuf) -> SourceId {
{
let source_map = self.path_to_source_map.read();
if source_map.contains_key(path) {
return source_map.get(path).copied().unwrap();
}
}
let program_id = self.get_or_create_program_id_from_manifest_path(path);
self.get_source_id_with_program_id(path, program_id)
}
pub fn get_source_id_with_program_id(&self, path: &PathBuf, program_id: ProgramId) -> SourceId {
{
let source_map = self.path_to_source_map.read();
if source_map.contains_key(path) {
return source_map.get(path).copied().unwrap();
}
}
let source_id = SourceId::new(program_id.0, *self.next_source_id.read());
{
let mut next_id = self.next_source_id.write();
*next_id += 1;
let mut source_map = self.path_to_source_map.write();
source_map.insert(path.clone(), source_id);
let mut path_map = self.source_to_path_map.write();
path_map.insert(source_id, path.clone());
}
let mut module_map = self.module_to_sources_map.write();
module_map.entry(program_id).or_default().insert(source_id);
source_id
}
/// Return the associated autogenerated pseudo file for the passed `source_id`.
/// Example: main.autogenerated.sw for main.sw
///
/// Returns `None`, if `source_id` does not have a valid path.
pub fn get_associated_autogenerated_source_id(&self, source_id: &SourceId) -> Option<SourceId> {
let path = self.get_path(source_id);
let file_name = PathBuf::from_str(path.file_name()?.to_str()?).ok()?;
let path = path.with_file_name(format!(
"{}.{}.{}",
file_name.file_stem()?.to_str()?,
Self::AUTOGENERATED_PATH,
file_name.extension()?.to_str()?
));
Some(self.get_source_id_with_program_id(&path, source_id.program_id()))
}
/// This function provides the file path corresponding to a specified source ID.
pub fn get_path(&self, source_id: &SourceId) -> PathBuf {
self.source_to_path_map
.read()
.get(source_id)
.unwrap()
.clone()
}
/// This function provides the [ProgramId] corresponding to a specified manifest file path.
pub fn get_program_id_from_manifest_path(&self, path: impl AsRef<Path>) -> Option<ProgramId> {
let manifest_path = sway_utils::find_parent_manifest_dir(&path)
.unwrap_or_else(|| path.as_ref().to_path_buf());
self.manifest_path_to_program_map
.read()
.get(&manifest_path)
.copied()
}
pub fn get_or_create_program_id_from_manifest_path(&self, path: &PathBuf) -> ProgramId {
let manifest_path = sway_utils::find_parent_manifest_dir(path).unwrap_or(path.clone());
let mut module_map = self.manifest_path_to_program_map.write();
*module_map.entry(manifest_path.clone()).or_insert_with(|| {
let mut next_id = self.next_program_id.write();
*next_id += 1;
ProgramId::new(*next_id)
})
}
/// Returns the [PathBuf] associated with the provided [ProgramId], if it exists in the manifest_path_to_program_map.
pub fn get_manifest_path_from_program_id(&self, program_id: &ProgramId) -> Option<PathBuf> {
let path_to_module_map = self.manifest_path_to_program_map.read();
path_to_module_map
.iter()
.find(|(_, &id)| id == *program_id)
.map(|(path, _)| path.clone())
}
/// This function provides the file name (with extension) corresponding to a specified source ID.
pub fn get_file_name(&self, source_id: &SourceId) -> Option<String> {
self.get_path(source_id)
.as_path()
.file_name()
.map(|file_name| file_name.to_string_lossy())
.map(|file_name| file_name.to_string())
}
pub fn all_files(&self) -> Vec<PathBuf> {
let s = self.source_to_path_map.read();
let mut v = s.values().cloned().collect::<Vec<_>>();
v.sort();
v
}
pub fn get_source_ids_from_program_id(
&self,
program_id: ProgramId,
) -> Option<BTreeSet<SourceId>> {
let s = self.module_to_sources_map.read();
s.get(&program_id).cloned()
}
// TODO: Do we want to parse Forc.toml files here, within the `SourceEngine`?
// Currently, we don't have any other option to obtain the original package name and version.
// But ideally, this and potentially other information should be passed to the compiler
// from the tooling layer.
fn get_package_name_and_version(&self, manifest_path: &Path) -> (String, String) {
fn get_fallback_package_name_and_version(manifest_path: &Path) -> (String, String) {
// As a fallback, use the last part of the manifest path as the package name.
// This should actually never happen, because we should always have a valid
// Forc.toml file at the manifest path.
let package_dir_name = manifest_path
.iter()
.next_back()
.map(|p| p.to_string_lossy().to_string())
.unwrap_or_else(|| "<unknown>".to_string());
(package_dir_name, String::new())
}
fn get_project_field(toml: &Table, field_name: &str) -> Option<String> {
toml.get("project")
.and_then(|v| v.get(field_name))
.and_then(|field| field.as_str())
.map(|value| value.to_string())
}
let forc_toml_path = manifest_path.join("Forc.toml");
if !forc_toml_path.exists() {
return get_fallback_package_name_and_version(manifest_path);
}
let content = match std::fs::read_to_string(&forc_toml_path) {
Ok(content) => content,
Err(_) => return get_fallback_package_name_and_version(manifest_path),
};
let toml = match content.parse::<Table>() {
Ok(toml) => toml,
Err(_) => return get_fallback_package_name_and_version(manifest_path),
};
let package_name = get_project_field(&toml, "name").unwrap_or("<unknown>".to_string());
let package_version = get_project_field(&toml, "version").unwrap_or_default();
(package_name, package_version)
}
pub fn get_source_location(&self, span: &Span) -> SourceLocation {
let Some(source_id) = span.source_id() else {
return SourceLocation::unknown();
};
let source_file = self.get_path(source_id);
// Find the manifest path from the source file path.
let program_id = self
.get_program_id_from_manifest_path(&source_file)
.expect("the `source_file` is retrieved from the `SourceEngine::get_path` function so the manifest path and program id must exist");
let manifest_path = self.get_manifest_path_from_program_id(&program_id).expect(
"the `program_id` is retrieved from the `SourceEngine` so the manifest path must exist",
);
// TODO: Use HashSet::get_or_insert_with once it gets available (currently experimental).
// See: https://doc.rust-lang.org/std/collections/struct.HashSet.html#method.get_or_insert_with
let mut package_infos = self.manifest_path_to_package_info.write();
let (package_name, package_version) = &package_infos
.entry(manifest_path.clone())
.or_insert_with(|| self.get_package_name_and_version(&manifest_path));
let pkg = if package_version.is_empty() {
package_name.clone()
} else {
format!("{package_name}@{package_version}")
};
// Get the relative path of the source file with respect to the manifest path.
let source_file = source_file
.strip_prefix(&manifest_path)
.expect("the `manifest_path` is a parent of the `source_file`")
.to_string_lossy();
let source_file = if let Some(source_file_no_leading_slash) = source_file.strip_prefix("/")
{
source_file_no_leading_slash.to_string()
} else {
source_file.to_string()
};
SourceLocation {
pkg,
file: source_file,
loc: span.start_line_col_one_index(),
}
}
}
/// A location in a source code file, represented by a package name, relative file path,
/// and line/column within the file.
#[derive(Default, Debug, Clone, PartialEq, Eq, Hash)]
pub struct SourceLocation {
/// The name and the version of the package that contains the source file,
/// in the format "name@version".
///
/// E.g., "my_lib@0.1.0".
///
/// The version is optional and may be omitted. E.g., "my_lib".
pub pkg: String,
/// The path to the source file, relative to the package root.
///
/// E.g., "src/lib.rs".
pub file: String,
pub loc: LineCol,
}
impl SourceLocation {
/// Creates a new unknown `SourceLocation` instance, which has
/// package and file set to "<unknown>".
pub fn unknown() -> Self {
Self {
pkg: "<unknown>".to_string(),
file: "<unknown>".to_string(),
loc: LineCol::default(),
}
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-types/src/span.rs | sway-types/src/span.rs | use crate::SourceId;
use lazy_static::lazy_static;
use serde::{Deserialize, Serialize};
use std::{
cmp,
fmt::{self, Display},
hash::Hash,
sync::Arc,
};
lazy_static! {
static ref DUMMY_SPAN: Span = Span::new(
Source {
text: Arc::from(""),
line_starts: Arc::new(vec![])
},
0,
0,
None
)
.unwrap();
}
// remote="Self" is a serde pattern for post-deserialization code.
// See https://github.com/serde-rs/serde/issues/1118#issuecomment-1320706758
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(transparent, remote = "Self")]
pub struct Source {
pub text: Arc<str>,
#[serde(skip)]
pub line_starts: Arc<Vec<usize>>,
}
impl serde::Serialize for Source {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
Self::serialize(self, serializer)
}
}
impl<'de> serde::Deserialize<'de> for Source {
fn deserialize<D: serde::Deserializer<'de>>(deserializer: D) -> Result<Self, D::Error> {
let mut src = Self::deserialize(deserializer)?;
src.line_starts = Self::calc_line_starts(&src.text);
Ok(src)
}
}
impl Source {
fn calc_line_starts(text: &str) -> Arc<Vec<usize>> {
let mut lines_starts = Vec::with_capacity(text.len() / 80);
lines_starts.push(0);
for (idx, c) in text.char_indices() {
if c == '\n' {
lines_starts.push(idx + c.len_utf8())
}
}
Arc::new(lines_starts)
}
pub fn new(text: &str) -> Self {
Self {
text: Arc::from(text),
line_starts: Self::calc_line_starts(text),
}
}
/// Both lines and columns start at index 0
pub fn line_col_zero_index(&self, position: usize) -> LineCol {
if position > self.text.len() || self.text.is_empty() {
LineCol { line: 0, col: 0 }
} else {
let (line, line_start) = match self.line_starts.binary_search(&position) {
Ok(line) => (line, self.line_starts.get(line)),
Err(0) => (0, None),
Err(line) => (line - 1, self.line_starts.get(line - 1)),
};
line_start.map_or(LineCol { line: 0, col: 0 }, |line_start| LineCol {
line,
col: position - line_start,
})
}
}
/// Both lines and columns start at index 1
pub fn line_col_one_index(&self, position: usize) -> LineCol {
let LineCol { line, col } = self.line_col_zero_index(position);
LineCol {
line: line + 1,
col: col + 1,
}
}
}
impl From<&str> for Source {
fn from(value: &str) -> Self {
Self::new(value)
}
}
/// Represents a span of the source code in a specific file.
#[derive(Clone, Serialize, Deserialize)]
pub struct Span {
/// The original source code.
src: Source,
/// The byte position in the string of the start of the span.
start: usize,
/// The byte position in the string of the end of the span.
end: usize,
/// The source ID of the file from which this span originated.
source_id: Option<SourceId>,
}
impl PartialOrd for Span {
fn partial_cmp(&self, other: &Self) -> Option<cmp::Ordering> {
if !Arc::ptr_eq(&self.src.text, &other.src.text) {
None
} else {
match self.start.partial_cmp(&other.start) {
Some(core::cmp::Ordering::Equal) => self.end.partial_cmp(&other.end),
ord => ord,
}
}
}
}
impl Hash for Span {
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
self.start.hash(state);
self.end.hash(state);
self.source_id.hash(state);
}
}
impl PartialEq for Span {
fn eq(&self, other: &Self) -> bool {
self.start == other.start && self.end == other.end && self.source_id == other.source_id
}
}
impl Eq for Span {}
impl From<Span> for std::ops::Range<usize> {
fn from(value: Span) -> Self {
Self {
start: value.start,
end: value.end,
}
}
}
impl Span {
pub fn dummy() -> Span {
DUMMY_SPAN.clone()
}
pub fn new(src: Source, start: usize, end: usize, source: Option<SourceId>) -> Option<Span> {
let _ = src.text.get(start..end)?;
Some(Span {
src,
start,
end,
source_id: source,
})
}
/// Creates an empty [Span], means a span whose [Span::start] and [Span::end] are the same.
/// The resulting empty [Span] will point to the start of the provided `span` and
/// be in the same file.
pub fn empty_at_start(span: &Span) -> Span {
Span::new(
span.src().clone(),
span.start(),
span.start(),
span.source_id().copied(),
)
.expect("the existing `span` is a valid `Span`")
}
/// Creates an empty [Span], means a span whose [Span::start] and [Span::end] are the same.
/// The resulting empty [Span] will point to the end of the provided `span` and
/// be in the same file.
pub fn empty_at_end(span: &Span) -> Span {
Span::new(
span.src().clone(),
span.end(),
span.end(),
span.source_id().copied(),
)
.expect("the existing `span` is a valid `Span`")
}
pub fn from_string(source: String) -> Span {
let len = source.len();
Span::new(Source::new(&source), 0, len, None).unwrap()
}
pub fn src(&self) -> &Source {
&self.src
}
pub fn source_id(&self) -> Option<&SourceId> {
self.source_id.as_ref()
}
pub fn start(&self) -> usize {
self.start
}
pub fn end(&self) -> usize {
self.end
}
/// Both lines and columns start at index 1
pub fn start_line_col_one_index(&self) -> LineCol {
self.src.line_col_one_index(self.start)
}
/// Both lines and columns start at index 1
pub fn end_line_col_one_index(&self) -> LineCol {
self.src.line_col_one_index(self.end)
}
/// Returns an empty [Span] that points to the start of `self`.
pub fn start_span(&self) -> Span {
Self::empty_at_start(self)
}
/// Returns an empty [Span] that points to the end of `self`.
pub fn end_span(&self) -> Span {
Self::empty_at_end(self)
}
pub fn str(self) -> String {
self.as_str().to_owned()
}
pub fn as_str(&self) -> &str {
&self.src.text[self.start..self.end]
}
pub fn input(&self) -> &str {
&self.src.text
}
pub fn trim(self) -> Span {
let start_delta = self.as_str().len() - self.as_str().trim_start().len();
let end_delta = self.as_str().len() - self.as_str().trim_end().len();
Span {
src: self.src,
start: self.start + start_delta,
end: self.end - end_delta,
source_id: self.source_id,
}
}
/// Creates a new span that points to very next char of the current span.
///
/// ```ignore
/// let
/// ^ <- span returned
/// ^^^ <- original span
/// ```
pub fn next_char_utf8(&self) -> Option<Span> {
let char = self.src.text[self.end..].chars().next()?;
Some(Span {
src: self.src.clone(),
source_id: self.source_id,
start: self.end,
end: self.end + char.len_utf8(),
})
}
/// This panics if the spans are not from the same file. This should
/// only be used on spans that are actually next to each other.
pub fn join(s1: Span, s2: &Span) -> Span {
assert!(
Arc::ptr_eq(&s1.src.text, &s2.src.text) && s1.source_id == s2.source_id,
"Spans from different files cannot be joined.",
);
Span {
src: s1.src,
start: cmp::min(s1.start, s2.start),
end: cmp::max(s1.end, s2.end),
source_id: s1.source_id,
}
}
pub fn join_all(spans: impl IntoIterator<Item = Span>) -> Span {
spans
.into_iter()
.reduce(|s1: Span, s2: Span| Span::join(s1, &s2))
.unwrap_or_else(Span::dummy)
}
/// Returns the line and column start and end using index 1.
pub fn line_col_one_index(&self) -> LineColRange {
LineColRange {
start: self.start_line_col_one_index(),
end: self.end_line_col_one_index(),
}
}
pub fn is_dummy(&self) -> bool {
self.eq(&DUMMY_SPAN)
}
pub fn is_empty(&self) -> bool {
self.start == self.end
}
/// Returns true if `self` contains `other`.
pub fn contains(&self, other: &Span) -> bool {
Arc::ptr_eq(&self.src.text, &other.src.text)
&& self.source_id == other.source_id
&& self.start <= other.start
&& self.end >= other.end
}
/// Returns a subset of this span until the first occurrence of the passed text.
pub fn subset_first_of(&self, needle: &str) -> Option<Span> {
let text = &self.src().text;
let needle_offset = text[self.start..].find(needle)?;
Span::new(
self.src().clone(),
self.start,
self.start + needle_offset,
self.source_id().cloned(),
)
}
}
impl fmt::Debug for Span {
#[cfg(not(feature = "no-span-debug"))]
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
fmt.debug_struct("Span")
.field("src (ptr)", &self.src.text.as_ptr())
.field("source_id", &self.source_id)
.field("start", &self.start)
.field("end", &self.end)
.field("as_str()", &self.as_str())
.finish()
}
#[cfg(feature = "no-span-debug")]
fn fmt(&self, _fmt: &mut fmt::Formatter) -> fmt::Result {
Ok(())
}
}
pub trait Spanned {
fn span(&self) -> Span;
}
impl<T: Spanned> Spanned for Box<T> {
fn span(&self) -> Span {
(**self).span()
}
}
#[derive(Clone, Copy, Debug, Default, Serialize, Deserialize, PartialEq, Eq, Hash)]
pub struct LineCol {
pub line: usize,
pub col: usize,
}
pub struct LineColRange {
pub start: LineCol,
pub end: LineCol,
}
impl Display for LineColRange {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_fmt(format_args!("({}, {})", self.start, self.end))
}
}
impl Display for LineCol {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_fmt(format_args!("line {}:{}", self.line, self.col))
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-types/src/style.rs | sway-types/src/style.rs | /// Find the first index in the string which separates a lowercase character from an uppercase
/// character. Used for splitting words in a CamelCase style identifier.
fn find_camel_case_word_boundary(name: &str) -> Option<usize> {
let mut previous_char_was_lowercase = false;
for (index, c) in name.char_indices() {
if c.is_uppercase() && previous_char_was_lowercase {
return Some(index);
}
previous_char_was_lowercase = c.is_lowercase();
}
None
}
/// Split a CamelCase style identifier into words.
fn camel_case_split_words(mut name: &str) -> impl Iterator<Item = &str> {
std::iter::from_fn(move || {
if name.is_empty() {
return None;
}
let index = find_camel_case_word_boundary(name).unwrap_or(name.len());
let word = &name[..index];
name = &name[index..];
Some(word)
})
}
/// Split a string of unknown style into words.
fn split_words(name: &str) -> impl Iterator<Item = &str> {
name.split('_').flat_map(camel_case_split_words)
}
/// Detect whether an name is written in snake_case.
pub fn is_snake_case(name: &str) -> bool {
let trimmed = name.trim_start_matches('_');
!trimmed.contains("__") && !trimmed.contains(char::is_uppercase)
}
/// Detect whether a name is written in SCREAMING_SNAKE_CASE.
pub fn is_screaming_snake_case(name: &str) -> bool {
let trimmed = name.trim_start_matches('_');
!trimmed.contains("__") && !trimmed.contains(char::is_lowercase)
}
/// Detect whether a name is written in UpperCamelCase.
pub fn is_upper_camel_case(name: &str) -> bool {
let trimmed = name.trim_start_matches('_');
!trimmed.contains('_') && !trimmed.starts_with(char::is_lowercase)
}
/// Convert an identifier into snake_case. This is a best-guess at what the name would look
/// like if it were expressed in the correct style.
pub fn to_snake_case(name: &str) -> String {
let mut ret = String::with_capacity(name.len());
let (leading_underscores, trimmed) =
name.split_at(name.find(|c| c != '_').unwrap_or(name.len()));
ret.push_str(leading_underscores);
let mut words = split_words(trimmed);
if let Some(word) = words.next() {
ret.extend(word.chars().flat_map(char::to_lowercase));
for word in words {
ret.push('_');
ret.extend(word.chars().flat_map(char::to_lowercase));
}
}
ret
}
/// Convert a name into SCREAMING_SNAKE_CASE. This is a best-guess at what the name
/// would look like if it were expressed in the correct style.
pub fn to_screaming_snake_case(name: &str) -> String {
let mut ret = String::with_capacity(name.len());
let (leading_underscores, trimmed) =
name.split_at(name.find(|c| c != '_').unwrap_or(name.len()));
ret.push_str(leading_underscores);
let mut words = split_words(trimmed);
if let Some(word) = words.next() {
ret.extend(word.chars().flat_map(char::to_uppercase));
for word in words {
ret.push('_');
ret.extend(word.chars().flat_map(char::to_uppercase));
}
}
ret
}
/// Convert an identifier into UpperCamelCase. This is a best-guess at what the identifier would
/// look like if it were expressed in the correct style.
pub fn to_upper_camel_case(name: &str) -> String {
let mut ret = String::with_capacity(name.len());
let (leading_underscores, trimmed) =
name.split_at(name.find(|c| c != '_').unwrap_or(name.len()));
ret.push_str(leading_underscores);
for word in split_words(trimmed) {
let mut chars = word.chars();
if let Some(c) = chars.next() {
ret.extend(c.to_uppercase());
ret.extend(chars.flat_map(char::to_lowercase));
}
}
ret
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn detect_styles() {
let snake_case_idents = [
"hello",
"__hello",
"blah32",
"some_words_here",
"___some_words_here",
];
let screaming_snake_case_idents = ["SOME_WORDS_HERE", "___SOME_WORDS_HERE"];
let upper_camel_case_idents = [
"Hello",
"__Hello",
"Blah32",
"SomeWordsHere",
"___SomeWordsHere",
];
let screaming_snake_case_or_upper_camel_case_idents = ["HELLO", "__HELLO", "BLAH32"];
let styleless_idents = ["Mix_Of_Things", "__Mix_Of_Things", "FooBar_123"];
for ident in &snake_case_idents {
assert!(is_snake_case(ident));
assert!(!is_screaming_snake_case(ident));
assert!(!is_upper_camel_case(ident));
}
for ident in &screaming_snake_case_idents {
assert!(!is_snake_case(ident));
assert!(is_screaming_snake_case(ident));
assert!(!is_upper_camel_case(ident));
}
for ident in &upper_camel_case_idents {
assert!(!is_snake_case(ident));
assert!(!is_screaming_snake_case(ident));
assert!(is_upper_camel_case(ident));
}
for ident in &screaming_snake_case_or_upper_camel_case_idents {
assert!(!is_snake_case(ident));
assert!(is_screaming_snake_case(ident));
assert!(is_upper_camel_case(ident));
}
for ident in &styleless_idents {
assert!(!is_snake_case(ident));
assert!(!is_screaming_snake_case(ident));
assert!(!is_upper_camel_case(ident));
}
}
#[test]
fn convert_to_snake_case() {
assert_eq!("hello", to_snake_case("HELLO"));
assert_eq!("___hello", to_snake_case("___HELLO"));
assert_eq!("blah32", to_snake_case("BLAH32"));
assert_eq!("some_words_here", to_snake_case("SOME_WORDS_HERE"));
assert_eq!("___some_words_here", to_snake_case("___SOME_WORDS_HERE"));
assert_eq!("hello", to_snake_case("Hello"));
assert_eq!("___hello", to_snake_case("___Hello"));
assert_eq!("blah32", to_snake_case("Blah32"));
assert_eq!("some_words_here", to_snake_case("SomeWordsHere"));
assert_eq!("___some_words_here", to_snake_case("___SomeWordsHere"));
assert_eq!("some_words_here", to_snake_case("someWordsHere"));
assert_eq!("___some_words_here", to_snake_case("___someWordsHere"));
assert_eq!("mix_of_things", to_snake_case("Mix_Of_Things"));
assert_eq!("__mix_of_things", to_snake_case("__Mix_Of_Things"));
assert_eq!("foo_bar_123", to_snake_case("FooBar_123"));
}
#[test]
fn convert_to_screaming_snake_case() {
assert_eq!("HELLO", to_screaming_snake_case("hello"));
assert_eq!("___HELLO", to_screaming_snake_case("___hello"));
assert_eq!("BLAH32", to_screaming_snake_case("blah32"));
assert_eq!(
"SOME_WORDS_HERE",
to_screaming_snake_case("some_words_here")
);
assert_eq!(
"___SOME_WORDS_HERE",
to_screaming_snake_case("___some_words_here")
);
assert_eq!("HELLO", to_screaming_snake_case("Hello"));
assert_eq!("___HELLO", to_screaming_snake_case("___Hello"));
assert_eq!("BLAH32", to_screaming_snake_case("Blah32"));
assert_eq!("SOME_WORDS_HERE", to_screaming_snake_case("SomeWordsHere"));
assert_eq!(
"___SOME_WORDS_HERE",
to_screaming_snake_case("___SomeWordsHere")
);
assert_eq!("SOME_WORDS_HERE", to_screaming_snake_case("someWordsHere"));
assert_eq!(
"___SOME_WORDS_HERE",
to_screaming_snake_case("___someWordsHere")
);
assert_eq!("MIX_OF_THINGS", to_screaming_snake_case("Mix_Of_Things"));
assert_eq!(
"__MIX_OF_THINGS",
to_screaming_snake_case("__Mix_Of_Things")
);
assert_eq!("FOO_BAR_123", to_screaming_snake_case("FooBar_123"));
}
#[test]
fn convert_to_upper_camel_case() {
assert_eq!("Hello", to_upper_camel_case("hello"));
assert_eq!("___Hello", to_upper_camel_case("___hello"));
assert_eq!("Blah32", to_upper_camel_case("blah32"));
assert_eq!("SomeWordsHere", to_upper_camel_case("some_words_here"));
assert_eq!(
"___SomeWordsHere",
to_upper_camel_case("___some_words_here")
);
assert_eq!("Hello", to_upper_camel_case("HELLO"));
assert_eq!("___Hello", to_upper_camel_case("___HELLO"));
assert_eq!("Blah32", to_upper_camel_case("BLAH32"));
assert_eq!("SomeWordsHere", to_upper_camel_case("SOME_WORDS_HERE"));
assert_eq!(
"___SomeWordsHere",
to_upper_camel_case("___SOME_WORDS_HERE")
);
assert_eq!("SomeWordsHere", to_upper_camel_case("someWordsHere"));
assert_eq!("___SomeWordsHere", to_upper_camel_case("___someWordsHere"));
assert_eq!("MixOfThings", to_upper_camel_case("Mix_Of_Things"));
assert_eq!("__MixOfThings", to_upper_camel_case("__Mix_Of_Things"));
assert_eq!("FooBar123", to_upper_camel_case("FooBar_123"));
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-types/src/constants.rs | sway-types/src/constants.rs | //! Configurable yet non-changing constants for the compiler.
/// The default extension of [LANGUAGE_NAME] files, e.g. `main.sw`.
pub const DEFAULT_FILE_EXTENSION: &str = "sw";
/// After a large language name change PR, we decided to make this configurable. :)
pub const LANGUAGE_NAME: &str = "Sway";
/// The size, in bytes, of a single word in the FuelVM.
pub const VM_WORD_SIZE: u64 = 8;
pub const CONTRACT_CALL_GAS_PARAMETER_NAME: &str = "gas";
pub const CONTRACT_CALL_COINS_PARAMETER_NAME: &str = "coins";
pub const CONTRACT_CALL_COINS_PARAMETER_DEFAULT_VALUE: u64 = 0;
pub const CONTRACT_CALL_ASSET_ID_PARAMETER_NAME: &str = "asset_id";
pub const CONTRACT_CALL_ASSET_ID_PARAMETER_DEFAULT_VALUE: [u8; 32] = [0; 32];
/// The default entry point for scripts and predicates.
pub const DEFAULT_ENTRY_POINT_FN_NAME: &str = "main";
pub const STD: &str = "std";
pub const PRELUDE: &str = "prelude";
pub const CONTRACT_ID: &str = "CONTRACT_ID";
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/test/src/reduced_std_libs.rs | test/src/reduced_std_libs.rs | //! This module contains functions for creating reduced versions of the `std` library.
use anyhow::{bail, Context, Ok, Result};
use core::result::Result::Ok as CoreOk;
use std::{
fs,
path::{Path, PathBuf},
};
pub(crate) const REDUCED_STD_LIBS_DIR_NAME: &str = "reduced_std_libs";
const REDUCED_LIB_CONFIG_FILE_NAME: &str = "reduced_lib.config";
/// Creates the reduced versions of `std` libraries based on the list of
/// modules defined in [REDUCED_LIB_CONFIG_FILE_NAME] file for each reduced library
/// available in the [REDUCED_STD_LIBS_DIR_NAME].
pub fn create() -> Result<()> {
let manifest_dir = env!("CARGO_MANIFEST_DIR");
let reduced_libs_dir = format!("{manifest_dir}/src/e2e_vm_tests/{REDUCED_STD_LIBS_DIR_NAME}");
let std_lib_src_dir = format!("{manifest_dir}/../sway-lib-std/src");
create_reduced_std_libs(&std_lib_src_dir, &reduced_libs_dir)
.context("Cannot create reduced versions of the Sway Standard Library.")?;
Ok(())
}
fn create_reduced_std_libs(std_lib_src_dir: &str, reduced_libs_dir: &str) -> Result<()> {
let std_lib_src_dir = Path::new(std_lib_src_dir);
let reduced_libs_dir = Path::new(reduced_libs_dir);
for reduced_lib_dir in get_reduced_libs(reduced_libs_dir)? {
let reduced_lib_config = reduced_lib_dir.join("reduced_lib.config");
if !reduced_lib_config.exists() {
bail!(format!("The config file \"{REDUCED_LIB_CONFIG_FILE_NAME}\" cannot be found for the reduced standard library \"{}\".\nThe config file must be at this location: {}",
reduced_lib_dir.components().next_back().unwrap().as_os_str().to_string_lossy(),
reduced_lib_config.as_os_str().to_string_lossy()
));
}
let modules = get_modules_from_config(&reduced_lib_config)?;
for module in modules {
let std_lib_module_path = std_lib_src_dir.join(&module);
let reduced_lib_module_path = reduced_lib_dir.join("src").join(&module);
copy_module(&std_lib_module_path, &reduced_lib_module_path)?;
}
}
Ok(())
}
fn get_reduced_libs(reduced_libs_dir: &Path) -> Result<Vec<PathBuf>> {
let mut reduced_libs = Vec::new();
let entries = fs::read_dir(reduced_libs_dir)?;
for entry in entries.flatten() {
if entry.metadata()?.is_dir() {
reduced_libs.push(entry.path())
}
}
Ok(reduced_libs)
}
fn get_modules_from_config(config_file: &Path) -> Result<Vec<String>> {
let config = fs::read_to_string(config_file)?;
let lines = config
.lines()
.map(|line| line.to_string())
.collect::<Vec<_>>();
Ok(lines)
}
fn copy_module(from: &Path, to: &Path) -> Result<()> {
let from_metadata = match fs::metadata(from) {
CoreOk(from_metadata) => from_metadata,
Err(err) => bail!("Cannot get metadata for module file {from:#?}: {err}"),
};
let to_metadata = fs::metadata(to);
let should_copy = match to_metadata {
CoreOk(to_metadata) => {
let to_modification_time = to_metadata.modified()?;
let from_modification_time = from_metadata.modified()?;
from_modification_time > to_modification_time
}
Err(_) => true, // Destination file doesn't exist, copy always.
};
if should_copy {
fs::create_dir_all(to.parent().unwrap())?;
if let Err(err) = fs::copy(from, to) {
bail!("Cannot copy module {from:#?}: {err}")
};
}
Ok(())
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/test/src/main.rs | test/src/main.rs | mod e2e_vm_tests;
mod ir_generation;
mod reduced_std_libs;
mod snapshot;
mod test_consistency;
use anyhow::Result;
use clap::Parser;
use forc::cli::shared::{IrCliOpt, PrintAsmCliOpt};
use forc_test::GasCostsSource;
use forc_tracing::init_tracing_subscriber;
use fuel_vm::prelude::GasCostsValues;
use std::str::FromStr;
use sway_core::{BuildTarget, IrCli, PrintAsm};
use tracing::Instrument;
#[derive(Parser)]
struct Cli {
/// Only run tests matching this regex
#[arg(value_name = "REGEX")]
include: Option<regex::Regex>,
/// Exclude tests matching this regex
#[arg(long, short, value_name = "REGEX")]
exclude: Option<regex::Regex>,
/// Skip all tests until a test matches this regex
#[arg(long, value_name = "REGEX")]
skip_until: Option<regex::Regex>,
/// Only run tests with ABI JSON output validation
#[arg(long, visible_alias = "abi")]
abi_only: bool,
/// Only run tests with no `std` dependencies
#[arg(long, visible_alias = "no_std")]
no_std_only: bool,
/// Only run tests that deploy contracts
#[arg(long, visible_alias = "contract")]
contract_only: bool,
/// Only run tests that run "forc test"
#[arg(long, visible_alias = "forc-test")]
forc_test_only: bool,
/// Only run the first test
#[arg(long, visible_alias = "first")]
first_only: bool,
/// Only run the tests that emit performance data (gas usages and bytecode sizes)
#[arg(long)]
perf_only: bool,
/// Print out warnings, errors, and output of print options
///
/// This option is ignored if tests are run in parallel.
#[arg(long, env = "SWAY_TEST_VERBOSE")]
verbose: bool,
/// Compile Sway code in release mode
#[arg(long, short)]
release: bool,
/// Intended for use in CI to ensure test lock files are up to date
#[arg(long)]
locked: bool,
/// Build target
#[arg(long, visible_alias = "target")]
build_target: Option<String>,
/// Update all output files
#[arg(long)]
update_output_files: bool,
/// Print out the specified IR (separate options with comma), if the verbose option is on
///
/// This option is ignored if tests are run in parallel.
#[arg(long, num_args(1..=18), value_parser = clap::builder::PossibleValuesParser::new(IrCliOpt::cli_options()))]
print_ir: Option<Vec<String>>,
/// Verify the generated Sway IR (Intermediate Representation).
#[arg(long, value_parser = clap::builder::PossibleValuesParser::new(IrCliOpt::cli_options()))]
verify_ir: Option<Vec<String>>,
/// Print out the specified ASM (separate options with comma), if the verbose option is on
///
/// This option is ignored if tests are run in parallel.
#[arg(long, num_args(1..=5), value_parser = clap::builder::PossibleValuesParser::new(&PrintAsmCliOpt::CLI_OPTIONS))]
print_asm: Option<Vec<String>>,
/// Print out the final bytecode, if the verbose option is on
///
/// This option is ignored if tests are run in parallel.
#[arg(long)]
print_bytecode: bool,
#[command(flatten)]
experimental: sway_features::CliFields,
/// Only run tests of a particular kind
#[arg(long, short, num_args(1..=4), value_parser = clap::builder::PossibleValuesParser::new(&TestKindOpt::CLI_OPTIONS))]
kind: Option<Vec<String>>,
/// Run only the exact test provided by an absolute path to a `test.toml` or `test.<feature>.toml` file
///
/// This flag is used internally for parallel test execution, and is not intended for general use.
#[arg(long, hide = true)]
exact: Option<String>,
/// Run tests sequentially (not in parallel)
#[arg(long, short)]
sequential: bool,
/// Write compilation output (e.g., bytecode, ABI JSON, storage slots JSON, etc.) to the filesystem
///
/// This is primarily useful for troubleshooting test failures.
/// Output files are written to the `out` directory within each test's directory.
///
/// This option is ignored if tests are run in parallel.
#[arg(long)]
write_output: bool,
/// Write performance data (gas usages and bytecode sizes) to the filesystem
///
/// Output files are written to the `test/perf_out` directory.
#[arg(long)]
perf: bool,
/// Source of the gas costs values used to calculate gas costs of
/// unit tests and scripts executions.
///
/// If not provided, a built-in set of gas costs values will be used.
/// These are the gas costs values of the Fuel mainnet as of time of
/// the release of the `forc` version being used.
///
/// The mainnet and testnet options will fetch the current gas costs values from
/// their respective networks.
///
/// Alternatively, the gas costs values can be specified as a file path
/// to a local JSON file containing the gas costs values.
///
/// This option is ignored if tests are run in parallel.
///
/// [possible values: built-in, mainnet, testnet, <FILE_PATH>]
#[clap(long)]
pub gas_costs: Option<GasCostsSource>,
}
#[derive(Default, Debug, Clone)]
pub struct TestKind {
pub e2e: bool,
pub ir: bool,
pub snapshot: bool,
}
impl TestKind {
fn all() -> Self {
Self {
e2e: true,
ir: true,
snapshot: true,
}
}
}
pub struct TestKindOpt(pub TestKind);
impl TestKindOpt {
const E2E: &'static str = "e2e";
const IR: &'static str = "ir";
const SNAPSHOT: &'static str = "snapshot";
const ALL: &'static str = "all";
pub const CLI_OPTIONS: [&'static str; 4] = [Self::E2E, Self::IR, Self::SNAPSHOT, Self::ALL];
}
impl From<&Vec<String>> for TestKindOpt {
fn from(value: &Vec<String>) -> Self {
let contains_opt = |opt: &str| value.iter().any(|val| *val == opt);
let test_kind = if contains_opt(Self::ALL) {
TestKind::all()
} else {
TestKind {
e2e: contains_opt(Self::E2E),
ir: contains_opt(Self::IR),
snapshot: contains_opt(Self::SNAPSHOT),
}
};
Self(test_kind)
}
}
#[derive(Debug, Clone)]
pub struct FilterConfig {
pub include: Option<regex::Regex>,
pub exclude: Option<regex::Regex>,
pub skip_until: Option<regex::Regex>,
pub abi_only: bool,
pub no_std_only: bool,
pub contract_only: bool,
pub first_only: bool,
pub forc_test_only: bool,
pub perf_only: bool,
}
#[derive(Debug, Clone)]
pub struct RunConfig {
pub build_target: BuildTarget,
pub locked: bool,
pub verbose: bool,
pub release: bool,
pub update_output_files: bool,
pub print_ir: IrCli,
pub verify_ir: IrCli,
pub print_asm: PrintAsm,
pub print_bytecode: bool,
pub experimental: sway_features::CliFields,
pub write_output: bool,
pub perf: bool,
pub gas_costs_values: GasCostsValues,
}
#[derive(Debug, Clone)]
pub struct RunKindConfig {
pub kind: TestKind,
pub sequential: bool,
}
// We want to use the "current_thread" flavor because running
// Tokio runtime on another thread brings only overhead with
// no benefits, especially when running tests in parallel.
#[tokio::main(flavor = "current_thread")]
async fn main() -> Result<()> {
init_tracing_subscriber(Default::default());
let cli = Cli::parse();
let build_target = match cli.build_target {
Some(target) => match BuildTarget::from_str(target.as_str()) {
Ok(target) => target,
_ => panic!("Unexpected build target: {}", target),
},
None => BuildTarget::default(),
};
if let Some(exact) = &cli.exact {
if !std::fs::exists(exact).unwrap_or(false) {
panic!("The --exact test path does not exist: {exact}\nThe --exact path must be an absolute path to an existing `test.toml` or `test.<feature>.toml` file");
}
let run_config = RunConfig {
// Take over options that are supported when running tests in parallel.
locked: cli.locked,
release: cli.release,
build_target,
experimental: cli.experimental,
update_output_files: cli.update_output_files,
verify_ir: cli
.verify_ir
.as_ref()
.map_or(IrCli::default(), |opts| IrCliOpt::from(opts).0),
perf: cli.perf,
// Always use the built-in gas costs values when running tests in parallel.
gas_costs_values: GasCostsValues::default(),
// Ignore options that are not supported when running tests in parallel.
print_ir: IrCli::none(),
print_asm: PrintAsm::none(),
print_bytecode: false,
write_output: false,
verbose: false,
};
e2e_vm_tests::run_exact(exact, &run_config).await?;
return Ok(());
}
let run_kind_config = RunKindConfig {
kind: cli
.kind
.as_ref()
.map_or(TestKind::all(), |opts| TestKindOpt::from(opts).0),
sequential: cli.sequential,
};
let filter_config = FilterConfig {
include: cli.include.clone(),
exclude: cli.exclude,
skip_until: cli.skip_until,
abi_only: cli.abi_only,
no_std_only: cli.no_std_only,
contract_only: cli.contract_only,
forc_test_only: cli.forc_test_only,
first_only: cli.first_only,
perf_only: cli.perf_only,
};
let run_config = RunConfig {
locked: cli.locked,
verbose: cli.verbose,
release: cli.release,
build_target,
experimental: cli.experimental,
update_output_files: cli.update_output_files,
print_ir: cli
.print_ir
.as_ref()
.map_or(IrCli::default(), |opts| IrCliOpt::from(opts).0),
verify_ir: cli
.verify_ir
.as_ref()
.map_or(IrCli::default(), |opts| IrCliOpt::from(opts).0),
print_asm: cli
.print_asm
.as_ref()
.map_or(PrintAsm::default(), |opts| PrintAsmCliOpt::from(opts).0),
print_bytecode: cli.print_bytecode,
write_output: cli.write_output,
perf: cli.perf,
gas_costs_values: cli.gas_costs.unwrap_or_default().provide_gas_costs()?,
};
// Check that the tests are consistent
test_consistency::check()?;
// Create reduced versions of the `std` library
reduced_std_libs::create()?;
// Run E2E tests
if run_kind_config.kind.e2e {
if run_kind_config.sequential {
e2e_vm_tests::run_sequentially(&filter_config, &run_config)
.instrument(tracing::trace_span!("E2E"))
.await?;
} else {
e2e_vm_tests::run_in_parallel(&filter_config, &run_config)
.instrument(tracing::trace_span!("E2E"))
.await?;
}
}
// Run IR tests
if run_kind_config.kind.ir && !filter_config.first_only {
println!("\n");
ir_generation::run(filter_config.include.as_ref(), cli.verbose, &run_config)
.instrument(tracing::trace_span!("IR"))
.await?;
}
// Run snapshot tests
if run_kind_config.kind.snapshot && !filter_config.first_only {
println!("\n");
snapshot::run(filter_config.include.as_ref())
.instrument(tracing::trace_span!("SNAPSHOT"))
.await?;
}
Ok(())
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/test/src/test_consistency.rs | test/src/test_consistency.rs | //! This module contains checks that ensure consistency of the tests.
use anyhow::{anyhow, bail, Context, Ok, Result};
use std::path::{Path, PathBuf};
use toml::{Table, Value};
use crate::reduced_std_libs::REDUCED_STD_LIBS_DIR_NAME;
pub(crate) fn check() -> Result<()> {
let manifest_dir = env!("CARGO_MANIFEST_DIR");
let all_tests_dir = PathBuf::from(format!("{manifest_dir}/src"));
check_test_forc_tomls(&all_tests_dir)?;
check_redundant_gitignore_files(&all_tests_dir)?;
Ok(())
}
fn check_redundant_gitignore_files(all_tests_dir: &Path) -> Result<()> {
let mut gitignores = vec![];
find_gitignores(&PathBuf::from(all_tests_dir), &mut gitignores);
return if gitignores.is_empty() {
Ok(())
} else {
let mut gitignores = gitignores
.iter()
.map(|file| file.to_string_lossy().to_string())
.collect::<Vec<_>>();
gitignores.sort();
Err(anyhow!("Redundant .gitignore files.\nTo fix the error, delete these redundant .gitignore files:\n{}", gitignores.join("\n")))
};
fn find_gitignores(path: &Path, gitignores: &mut Vec<PathBuf>) {
const IN_LANGUAGE_TESTS_GITIGNORE: &str = "in_language_tests/.gitignore";
if path.is_dir() {
for entry in std::fs::read_dir(path).unwrap() {
let entry = entry.unwrap().path();
let entry_name = entry.to_str().unwrap();
if entry_name.contains(REDUCED_STD_LIBS_DIR_NAME)
|| entry_name.contains(IN_LANGUAGE_TESTS_GITIGNORE)
{
continue;
}
find_gitignores(&entry, gitignores);
}
} else if path.is_file()
&& path
.file_name()
.map(|f| f.eq_ignore_ascii_case(".gitignore"))
.unwrap_or(false)
{
gitignores.push(path.to_path_buf());
}
}
}
/// Checks that every Forc.toml file has the authors, license,
/// and the name property properly set and that the std library
/// is properly imported.
fn check_test_forc_tomls(all_tests_dir: &Path) -> Result<()> {
let mut forc_tomls = vec![];
find_test_forc_tomls(&PathBuf::from(all_tests_dir), &mut forc_tomls);
for forc_toml_path in forc_tomls {
let forc_toml_file_name = forc_toml_path.as_os_str().to_string_lossy();
let content = std::fs::read_to_string(&forc_toml_path).unwrap();
let toml = content.parse::<Table>().unwrap();
// Skip over workspace configs. We want to test only project configs.
if content.starts_with("[workspace]") {
continue;
}
let project_name = forc_toml_path
.parent()
.unwrap()
.file_name()
.unwrap()
.to_string_lossy();
check_test_forc_toml(&content, &toml, &project_name)
.context(format!("Invalid test Forc.toml: {forc_toml_file_name}"))?;
}
return Ok(());
fn check_test_forc_toml(content: &str, toml: &Table, project_name: &str) -> Result<()> {
let mut errors = vec![];
if let Some(error) = check_project_authors_field(toml).err() {
errors.push(error.to_string());
}
if let Some(error) = check_project_license_field(toml).err() {
errors.push(error.to_string());
}
if let Some(error) = check_project_name_field(content, toml, project_name).err() {
errors.push(error.to_string());
}
if let Some(error) = check_std_dependency(toml).err() {
errors.push(error.to_string());
}
if !errors.is_empty() {
bail!("{}", errors.join("\n"));
}
Ok(())
}
fn check_std_dependency(toml: &Table) -> Result<()> {
return if let Some(implicit_std) = toml.get("project").and_then(|v| v.get("implicit-std")) {
match implicit_std.as_bool() {
Some(true) => Err(anyhow!("'project.implicit-std' cannot be set to `true` in tests. To import the standard library use, e.g., `std = {{ path = \"../<...>/sway-lib-std\" }}`.")),
Some(false) => Ok(()),
_ => Err(anyhow!("'project.implicit-std' value is invalid: `{implicit_std}`. In tests 'project.implicit-std' must be set to `false`.")),
}
} else {
// 'implicit-std' is not explicitly set.
// Since the default value for 'implicit-std' is `true` we either need to
// set it explicitly to `false`, or explicitly import local std library.
let imported_std = imported_lib(toml, "std");
if imported_std.is_none() {
Err(anyhow!("`implicit-std` is `true` by default. Either explicitly set it to `false`, or import the standard library by using, e.g., `std = {{ path = \"../<...>/sway-lib-std\" }}`."))
} else {
// At least one of the libraries is imported.
// Let's check that the local library is imported.
if imported_std.is_some() {
check_local_import(imported_std.unwrap(), "std")?;
}
Ok(())
}
};
fn imported_lib<'a>(toml: &'a Table, lib_name: &str) -> Option<&'a Value> {
if let Some(import) = toml.get("dependencies").and_then(|v| v.get(lib_name)) {
Some(import)
// We don't have the straight import with the lib name but it can be
// that we use some other name. In that case, the 'package' field still
// must have the lib name. Let's try to find that one.
} else if let Some(import) = toml
.get("dependencies")
.and_then(|v| v.as_table())
.and_then(|t| {
t.values().find(|v| {
v.get("package")
.is_some_and(|p| p.as_str().unwrap_or_default() == lib_name)
})
})
{
Some(import)
} else {
// We can have the library defined in the patch section.
toml.get("patch")
.and_then(|patch| patch.get("https://github.com/fuellabs/sway"))
.and_then(|v| v.get(lib_name))
}
}
fn check_local_import(lib: &Value, lib_name: &str) -> Result<()> {
let is_local_import = lib
.get("path")
.map(|path| {
let path = path.as_str().unwrap_or_default();
path.ends_with(&format!("../../sway-lib-{lib_name}"))
|| path
.contains(&format!("../../{REDUCED_STD_LIBS_DIR_NAME}/sway-lib-std-"))
})
.unwrap_or_default();
if is_local_import {
Ok(())
} else {
Err(anyhow!("'{lib_name}' library is not properly imported. It must be imported from the Sway repository by using a relative path, e.g., `{lib_name} = {{ path = \"../<...>/sway-lib-{lib_name}\" }}`."))
}
}
}
fn check_project_authors_field(toml: &Table) -> Result<()> {
const AUTHOR: &str = "Fuel Labs <contact@fuel.sh>";
if let Some(field) = toml.get("project").and_then(|v| v.get("authors")) {
let err = |field: &Value| {
Err(anyhow!("'project.authors' value is invalid: `{field}`. 'project.authors' field is mandatory and must be set to `[\"{AUTHOR}\"]`."))
};
match field.as_array() {
Some(value) if value.len() == 1 => match value[0].as_str() {
Some(value) if value == AUTHOR => Ok(()),
_ => err(field),
},
_ => err(field),
}
} else {
Err(anyhow!("'project.authors' field not found. 'project.authors' field is mandatory and must be set to `[\"{AUTHOR}\"]`."))
}
}
fn check_project_license_field(toml: &Table) -> Result<()> {
check_mandatory_project_field(toml, "license", "Apache-2.0")
}
fn check_project_name_field(content: &str, toml: &Table, name: &str) -> Result<()> {
// In some tests, e.g., when testing workspaces we will
// want to have specific project names. In that case, mark
// the `name` field with this comment to skip testing it.
// Parsed TOML does not contain information about comments.
// That's why we need the whole string `content` here.
if content.contains("# NAME_NO_CHECK") {
return Ok(());
}
check_mandatory_project_field(toml, "name", name)
}
fn check_mandatory_project_field(
toml: &Table,
field_name: &str,
field_value: &str,
) -> Result<()> {
if let Some(field) = toml.get("project").and_then(|v| v.get(field_name)) {
match field.as_str() {
Some(value) if value == field_value => Ok(()),
_ => Err(anyhow!("'project.{field_name}' value is invalid: `{field}`. 'project.{field_name}' field is mandatory and must be set to `\"{field_value}\"`.")),
}
} else {
Err(anyhow!("'project.{field_name}' field not found. 'project.{field_name}' field is mandatory and must be set to `\"{field_value}\"`."))
}
}
fn find_test_forc_tomls(path: &Path, forc_tomls: &mut Vec<PathBuf>) {
if path.is_dir() {
for entry in std::fs::read_dir(path).unwrap() {
let entry = entry.unwrap();
if entry
.path()
.to_str()
.unwrap()
.contains(REDUCED_STD_LIBS_DIR_NAME)
{
continue;
}
find_test_forc_tomls(&entry.path(), forc_tomls);
}
} else if path.is_file()
&& path
.file_name()
.map(|f| f.eq_ignore_ascii_case("forc.toml"))
.unwrap_or(false)
{
forc_tomls.push(path.to_path_buf());
}
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/test/src/ir_generation/mod.rs | test/src/ir_generation/mod.rs | use std::{
fs,
ops::Not,
path::{Path, PathBuf},
};
use anyhow::Result;
use colored::Colorize;
use sway_core::{
compile_ir_context_to_finalized_asm, compile_to_ast,
ir_generation::compile_program,
namespace::{self, Package},
BuildConfig, BuildTarget, Engines, OptLevel, PanicOccurrences, PanickingCallOccurrences,
};
use sway_error::handler::Handler;
use sway_features::ExperimentalFeatures;
use sway_ir::{
create_fn_inline_pass, register_known_passes, Backtrace, PassGroup, PassManager,
ARG_DEMOTION_NAME, CONST_DEMOTION_NAME, DCE_NAME, MEMCPYOPT_NAME, MISC_DEMOTION_NAME,
RET_DEMOTION_NAME,
};
use sway_types::ProgramId;
use crate::RunConfig;
enum Checker {
Ir,
Asm,
OptimizedIr { passes: Vec<String> },
}
impl Checker {
/// Builds and configures checkers based on file comments. Every check between checkers directive
/// are collected into the last started checker, "::check-ir::" being the default at the start
/// of the file.
/// Example:
///
/// ```sway
/// // ::check-ir::
/// // ::check-ir-optimized::
/// // ::check-ir-asm::
/// ```
///
/// # ::check-ir-optimized::
///
/// Optimized IR checker can be configured with `pass: <PASSNAME or o1>`. When
/// `o1` is chosen, all the configured passes are chosen automatically.
///
/// ```sway
/// // ::check-ir-optimized::
/// // pass: o1
/// ```
pub fn new(input: impl AsRef<str>) -> Vec<(Checker, Option<filecheck::Checker>)> {
let input = input.as_ref();
let mut checkers: Vec<(Checker, String)> = vec![(Checker::Ir, "".to_string())];
for line in input.lines() {
if line.contains("::check-ir::") && !matches!(checkers.last(), Some((Checker::Ir, _))) {
checkers.push((Checker::Ir, "".to_string()));
}
if line.contains("::check-asm::") {
checkers.push((Checker::Asm, "".to_string()));
}
if line.contains("::check-ir-optimized::") {
checkers.push((Checker::OptimizedIr { passes: vec![] }, "".to_string()));
}
if let Some(pass) = line.strip_prefix("// pass: ") {
if let Some((Checker::OptimizedIr { passes }, _)) = checkers.last_mut() {
passes.push(pass.trim().to_string());
}
}
if line.starts_with("//") {
let s = checkers.last_mut().unwrap();
s.1.push_str(line);
s.1.push('\n');
}
}
let mut new_checkers = vec![];
for (k, v) in checkers {
let ir_checker = filecheck::CheckerBuilder::new()
.text(&v)
.unwrap()
.finish()
.is_empty()
.not()
.then(|| {
filecheck::CheckerBuilder::new()
.text(
"regex: VAL=\\bv\\d+v\\d+\\b\n\
regex: ID=[_[:alpha:]][_0-9[:alpha:]]*\n\
regex: MD=!\\d+\n",
)
.unwrap()
.text(&v)
.unwrap()
.finish()
});
new_checkers.push((k, ir_checker));
}
new_checkers
}
}
/// Will print `filecheck` report using colors: normal lines will be dimmed,
/// matches will be green and misses will be red.
fn pretty_print_error_report(error: &str) {
let mut stash = vec![];
let mut lines = error.lines().peekable();
while let Some(current) = lines.next() {
if current.starts_with("> ") {
match lines.peek() {
Some(next) if next.contains("^~") => {
stash.push(current);
}
_ => println!("{}", current.bright_black()),
}
} else if current.starts_with("Matched") && current.contains("not: ") {
for line in stash.drain(..) {
if line.contains("^~") {
println!("{}", line.red())
} else {
println!("{}", line.bold())
}
}
println!("{}", current.red())
} else if current.starts_with("Matched") {
for line in stash.drain(..) {
if line.contains("^~") {
println!("{}", line.green())
} else {
println!("{}", line.bold())
}
}
println!("{current}")
} else if current.starts_with("Define") {
println!("{current}")
} else if current.starts_with("Missed") && current.contains("check: ") {
for line in stash.drain(..) {
if line.contains("^~") {
println!("{}", line.red())
} else {
println!("{}", line.bold())
}
}
println!("{}", current.red())
} else if current.starts_with("Missed") && current.contains("not: ") {
for line in stash.drain(..) {
if line.contains("^~") {
println!("{}", line.green())
} else {
println!("{}", line.bold())
}
}
println!("{current}")
} else {
stash.push(current);
}
}
}
pub(super) async fn run(
filter_regex: Option<®ex::Regex>,
verbose: bool,
run_config: &RunConfig,
) -> Result<()> {
// Create new initial namespace for every test by reusing the precompiled
// standard libraries. The namespace, thus its root module, must have the
// name set.
const PACKAGE_NAME: &str = "test_lib";
let core_lib_name = sway_types::Ident::new_no_span(PACKAGE_NAME.to_string());
// Compile std library and reuse it when compiling tests.
let engines = Engines::default();
let build_target = BuildTarget::default();
let std_package = compile_std(build_target, &engines, run_config);
// Find all the tests.
let all_tests = discover_test_files();
let total_test_count = all_tests.len();
let mut run_test_count = 0;
all_tests
.into_iter()
.filter_map(|path| {
// Filter against the regex.
if path.to_str()
.and_then(|path_str| filter_regex.map(|regex| regex.is_match(path_str)))
.unwrap_or(true) {
// Read entire file.
let input_bytes = fs::read(&path).expect("Read entire Sway source.");
let input = String::from_utf8_lossy(&input_bytes);
let checkers = Checker::new(&input);
let mut optimisation_inline = false;
let mut target_fuelvm = false;
if let Some(first_line) = input.lines().next() {
optimisation_inline = first_line.contains("optimisation-inline");
target_fuelvm = first_line.contains("target-fuelvm");
}
Some((
path,
input_bytes,
checkers,
optimisation_inline,
target_fuelvm,
))
} else {
None
}
})
.for_each(
|(path, sway_str, checkers, optimisation_inline, target_fuelvm)| {
let test_file_name = path.file_name().unwrap().to_string_lossy().to_string();
tracing::info!("Testing {} ...", test_file_name.bold());
let experimental = ExperimentalFeatures {
new_encoding: false, // IR tests still need encoding v1 off.
// TODO: Properly support experimental features in IR tests.
..Default::default()
};
// TODO: Properly support backtrace build option in IR tests.
let backtrace = Backtrace::default();
// Compile to AST. We need to provide a faux build config otherwise the IR will have
// no span metadata.
let bld_cfg = sway_core::BuildConfig::root_from_file_name_and_manifest_path(
path.clone(),
PathBuf::from("/"),
build_target,
sway_core::DbgGeneration::Full,
);
// Include unit tests in the build.
let bld_cfg = bld_cfg.with_include_tests(true);
let sway_str = String::from_utf8_lossy(&sway_str);
let handler = Handler::default();
let mut initial_namespace = Package::new(core_lib_name.clone(), None, ProgramId::new(0), false);
initial_namespace.add_external("std".to_owned(), std_package.clone());
let compile_res = compile_to_ast(
&handler,
&engines,
sway_str.as_ref().into(),
initial_namespace,
Some(&bld_cfg),
PACKAGE_NAME,
None,
experimental
);
let (errors, _warnings, _infos) = handler.consume();
if !errors.is_empty() {
panic!(
"Failed to compile test {}:\n{}",
path.display(),
errors
.iter()
.map(|err| err.to_string())
.collect::<Vec<_>>()
.as_slice()
.join("\n")
);
}
let programs = compile_res
.expect("there were no errors, so there should be a program");
if verbose {
println!("Declaration Engine");
println!("-----------------------");
println!("{}", engines.de().pretty_print(&engines));
}
let typed_program = programs.typed.as_ref().unwrap();
// Compile to IR.
let include_tests = true;
let mut panic_occurrences = PanicOccurrences::default();
let mut panicking_call_occurrences = PanickingCallOccurrences::default();
let mut ir = compile_program(typed_program, &mut panic_occurrences, &mut panicking_call_occurrences, include_tests, &engines, experimental, backtrace)
.unwrap_or_else(|e| {
use sway_types::span::Spanned;
let e = e[0].clone();
let span = e.span();
panic!(
"Failed to compile test {}:\nError \"{e}\" at {}:{}\nCode: \"{}\"",
path.display(),
span.start(),
span.end(),
span.as_str()
);
});
ir.verify()
.unwrap_or_else(|err| {
panic!("IR verification failed for test {}:\n{err}", path.display());
});
// Perform Fuel target specific passes if requested.
if target_fuelvm {
// Manually run the FuelVM target passes. This will be encapsulated into an
// official `PassGroup` eventually.
let mut pass_mgr = PassManager::default();
let mut pass_group = PassGroup::default();
register_known_passes(&mut pass_mgr);
pass_group.append_pass(CONST_DEMOTION_NAME);
pass_group.append_pass(ARG_DEMOTION_NAME);
pass_group.append_pass(RET_DEMOTION_NAME);
pass_group.append_pass(MISC_DEMOTION_NAME);
pass_group.append_pass(MEMCPYOPT_NAME);
pass_group.append_pass(DCE_NAME);
if pass_mgr.run(&mut ir, &pass_group).is_err() {
panic!(
"Failed to compile test {}:\n{}",
path.display(),
errors
.iter()
.map(|err| err.to_string())
.collect::<Vec<_>>()
.as_slice()
.join("\n")
);
}
}
let ir_output = sway_ir::printer::to_string(&ir);
for (k, checker) in checkers {
match (k, checker) {
(Checker::Ir, Some(checker)) => {
match checker.explain(&ir_output, filecheck::NO_VARIABLES)
{
Ok((success, error)) if !success || verbose => {
if !success || verbose {
println!("{}", "::check-ir::".bold());
pretty_print_error_report(&error);
}
if !success {
panic!("check-ir filecheck failed. See above.");
}
}
Err(e) => {
panic!("check-ir filecheck directive error: {e}");
}
_ => (),
};
}
(Checker::Ir, None) => {
panic!(
"IR test for {test_file_name} is missing mandatory FileCheck directives.\n\n\
Here's the IR output:\n{ir_output}",
);
}
(Checker::OptimizedIr { passes }, Some(checker)) => {
if passes.is_empty() {
panic!("No optimization passes were specified for ::check-ir-optimized::. Use `// pass: <PASSNAME>` in the very next line.");
}
let mut group = PassGroup::default();
for pass in passes {
if pass == "o1" {
group = sway_ir::create_o1_pass_group();
} else {
// pass needs a 'static str
let pass = Box::leak(Box::new(pass));
group.append_pass(pass.as_str());
}
}
let mut pass_mgr = PassManager::default();
register_known_passes(&mut pass_mgr);
// Parse the IR again avoiding mutating the original ir
let mut ir = sway_ir::parser::parse(
&ir_output,
engines.se(),
experimental,
backtrace,
)
.unwrap_or_else(|e| panic!("{}: {e}\n{ir_output}", path.display()));
let _ = pass_mgr.run(&mut ir, &group);
let ir_output = sway_ir::printer::to_string(&ir);
match checker.explain(&ir_output, filecheck::NO_VARIABLES)
{
Ok((success, error)) if !success || verbose => {
if !success || verbose {
println!("{}", "::check-ir-optimized::".bold());
pretty_print_error_report(&error);
}
if !success {
panic!("check-ir-optimized filecheck failed. See above.");
}
}
Err(e) => {
panic!("check-ir-optimized filecheck directive error: {e}");
}
_ => (),
};
}
(Checker::Asm, Some(checker)) => {
if optimisation_inline {
let mut pass_mgr = PassManager::default();
let mut pmgr_config = PassGroup::default();
let inline = pass_mgr.register(create_fn_inline_pass());
pmgr_config.append_pass(inline);
let inline_res = pass_mgr.run(&mut ir, &pmgr_config);
if inline_res.is_err() {
panic!(
"Failed to compile test {}:\n{}",
path.display(),
errors
.iter()
.map(|err| err.to_string())
.collect::<Vec<_>>()
.as_slice()
.join("\n")
);
}
}
// Compile to ASM.
let handler = Handler::default();
let asm_result = compile_ir_context_to_finalized_asm(
&handler,
&ir,
Some(&BuildConfig::dummy_for_asm_generation().with_optimization_level(OptLevel::Opt1))
);
let (errors, _warnings, _infos) = handler.consume();
if asm_result.is_err() || !errors.is_empty() {
println!("Errors when compiling {test_file_name} IR to ASM:\n");
for e in errors {
println!("{e}\n");
}
panic!();
};
let asm_output = asm_result
.map(|asm| format!("{asm}"))
.expect("Failed to stringify ASM for {test_file_name}.");
if checker.is_empty() {
panic!(
"ASM test for {} has the '::check-asm::' marker \
but is missing directives.\n\
Please either remove the marker or add some.\n\n\
Here's the ASM output:\n{asm_output}",
path.file_name().unwrap().to_string_lossy()
);
}
// Do ASM checks.
match checker.explain(&asm_output, filecheck::NO_VARIABLES) {
Ok((success, error)) => {
if !success || verbose {
println!("{}", "::check-asm::".bold());
pretty_print_error_report(&error);
}
if !success {
panic!("check-asm filecheck for {test_file_name} failed. See above.");
}
}
Err(e) => {
panic!("check-asm filecheck directive errors for {test_file_name}: {e}");
}
};
}
(_, _) => {
todo!("Unknown checker");
}
}
}
// Parse the IR again, and print it yet again to make sure that IR de/serialisation works.
let parsed_ir = sway_ir::parser::parse(&ir_output, engines.se(), experimental, backtrace)
.unwrap_or_else(|e| panic!("{}: {e}\n{ir_output}", path.display()));
let parsed_ir_output = sway_ir::printer::to_string(&parsed_ir);
let parsed_ir_2 = sway_ir::parser::parse(&parsed_ir_output, engines.se(), experimental, backtrace)
.unwrap_or_else(|e| panic!("{}: {e}\n{parsed_ir_output}", path.display()));
let parsed_ir_output_2 = sway_ir::printer::to_string(&parsed_ir_2);
if parsed_ir_output_2 != parsed_ir_output {
println!("Deserialized IR:");
tracing::error!("{}", prettydiff::diff_lines(&parsed_ir_output, &parsed_ir_output_2));
panic!("{} failed IR (de)serialization.", path.display());
}
run_test_count += 1;
},
);
if run_test_count == 0 {
tracing::warn!(
"No IR generation tests were run. Regex filter \"{}\" filtered out all {} tests.",
filter_regex
.map(|regex| regex.to_string())
.unwrap_or_default(),
total_test_count,
);
} else {
tracing::info!("_________________________________");
tracing::info!(
"IR tests result: {}. {} total, {} passed; {} failed; {} disabled",
"ok".green().bold(),
total_test_count,
run_test_count,
0,
total_test_count - run_test_count
);
}
// TODO: Make this return an Err once the panics above are converted to an error
Ok(())
}
fn discover_test_files() -> Vec<PathBuf> {
fn recursive_search(path: &Path, test_files: &mut Vec<PathBuf>) {
if path.is_dir() {
for entry in fs::read_dir(path).unwrap() {
recursive_search(&entry.unwrap().path(), test_files);
}
} else if path.is_file() && path.extension().map(|ext| ext == "sw").unwrap_or(false) {
test_files.push(path.to_path_buf());
}
}
let manifest_dir = env!("CARGO_MANIFEST_DIR");
let tests_root_dir = format!("{manifest_dir}/src/ir_generation/tests");
let mut test_files = Vec::new();
recursive_search(&PathBuf::from(tests_root_dir), &mut test_files);
test_files
}
fn compile_std(
build_target: BuildTarget,
engines: &Engines,
run_config: &RunConfig,
) -> namespace::Package {
let manifest_dir = env!("CARGO_MANIFEST_DIR");
let libstd_root_dir = format!("{manifest_dir}/../sway-lib-std");
let check_cmd = forc::cli::CheckCommand {
build_target,
path: Some(libstd_root_dir),
offline_mode: true,
terse_mode: true,
disable_tests: false,
locked: false,
ipfs_node: None,
experimental: run_config.experimental.clone(),
dump_impls: None,
};
let res = match forc::test::forc_check::check(check_cmd, engines) {
Ok(res) => res,
Err(err) => {
panic!("Failed to compile sway-lib-std for IR tests: {err:?}")
}
};
match res.0 {
Some(typed_program) => typed_program.namespace.current_package_ref().clone(),
_ => {
let (errors, _warnings, _infos) = res.1.consume();
for err in errors {
println!("{err:?}");
}
panic!("Failed to compile sway-lib-std for IR tests.");
}
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/test/src/sdk-harness/test_projects/harness.rs | test/src/sdk-harness/test_projects/harness.rs | // Add test modules here:
mod abi_impl_methods_callable;
mod asset_id;
mod asset_ops;
mod auth;
mod block;
mod call_frames;
mod configurables_in_contract;
mod configurables_in_script;
mod context;
mod contract_bytecode;
mod ec_recover;
mod ec_recover_and_match_predicate;
mod events;
mod evm;
mod evm_ec_recover;
mod exponentiation;
mod generics_in_abi;
mod logging;
mod low_level_call;
mod messages;
mod methods;
mod option_field_order;
mod option_in_abi;
mod parsing_logs;
mod predicate_data_simple;
mod predicate_data_struct;
mod predicate_panic_expression;
mod private_struct_fields_in_storage_and_abi;
mod registers;
mod result_in_abi;
mod result_option_expect;
mod run_external_proxy;
mod run_external_proxy_with_storage;
mod script_data;
mod storage;
mod storage_access;
mod storage_bytes;
mod storage_init;
mod storage_map;
mod storage_map_nested;
mod storage_string;
mod storage_vec;
mod storage_vec_nested;
mod storage_vec_of_storage_string;
mod storage_vec_to_vec;
mod string_slice;
mod superabi;
mod superabi_supertrait;
mod time;
mod tx_fields;
mod type_aliases;
mod vec_in_abi;
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/test/src/sdk-harness/test_projects/evm/mod.rs | test/src/sdk-harness/test_projects/evm/mod.rs | use fuels::{
prelude::*,
types::{Bits256, ContractId, EvmAddress},
};
abigen!(Contract(
name = "EvmTestContract",
abi = "test_projects/evm/out/release/evm-abi.json"
));
async fn get_evm_test_instance() -> (EvmTestContract<Wallet>, ContractId) {
let wallet = launch_provider_and_get_wallet().await.unwrap();
let id = Contract::load_from(
"test_projects/evm/out/release/evm.bin",
LoadConfiguration::default(),
)
.unwrap()
.deploy(&wallet, TxPolicies::default())
.await
.unwrap()
.contract_id;
let instance = EvmTestContract::new(id.clone(), wallet);
(instance, id.into())
}
#[tokio::test]
async fn can_call_from_literal() {
let (instance, _) = get_evm_test_instance().await;
let result = instance
.methods()
.evm_address_from_literal()
.call()
.await
.unwrap();
assert_eq!(
EvmAddress::from(
Bits256::from_hex_str(
"0x0606060606060606060606060606060606060606060606060606060606060606",
)
.unwrap()
),
result.value
);
}
#[tokio::test]
async fn can_call_from_argument() {
let (instance, _) = get_evm_test_instance().await;
let raw_address = [7; 32];
let result = instance
.methods()
.evm_address_from_argument(Bits256(raw_address))
.call()
.await
.unwrap();
assert_eq!(EvmAddress::from(Bits256(raw_address)), result.value);
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/test/src/sdk-harness/test_projects/storage/mod.rs | test/src/sdk-harness/test_projects/storage/mod.rs | use fuels::{
prelude::*,
types::{Bits256, SizedAsciiString},
};
abigen!(Contract(
name = "TestStorageContract",
abi = "test_projects/storage/out/release/storage-abi.json",
));
async fn get_test_storage_instance() -> TestStorageContract<Wallet> {
let wallet = launch_provider_and_get_wallet().await.unwrap();
let id = Contract::load_from(
"test_projects/storage/out/release/storage.bin",
LoadConfiguration::default(),
)
.unwrap()
.deploy(&wallet, TxPolicies::default())
.await
.unwrap()
.contract_id;
TestStorageContract::new(id.clone(), wallet)
}
#[tokio::test]
async fn can_store_and_get_bool() {
let instance = get_test_storage_instance().await;
let b = true;
// Test store
instance.methods().store_bool(b).call().await.unwrap();
let result = instance.methods().get_bool().call().await.unwrap();
assert_eq!(result.value, Some(b));
}
#[tokio::test]
async fn can_store_and_get_u8() {
let instance = get_test_storage_instance().await;
let n = 8;
// Test store
instance.methods().store_u8(n).call().await.unwrap();
let result = instance.methods().get_u8().call().await.unwrap();
assert_eq!(result.value, Some(n));
}
#[tokio::test]
async fn can_store_and_get_u16() {
let instance = get_test_storage_instance().await;
let n = 16;
// Test store
instance.methods().store_u16(n).call().await.unwrap();
let result = instance.methods().get_u16().call().await.unwrap();
assert_eq!(result.value, Some(n));
}
#[tokio::test]
async fn can_store_and_get_u32() {
let instance = get_test_storage_instance().await;
let n = 32;
// Test store
instance.methods().store_u32(n).call().await.unwrap();
let result = instance.methods().get_u32().call().await.unwrap();
assert_eq!(result.value, Some(n));
}
#[tokio::test]
async fn can_store_and_get_u64() {
let instance = get_test_storage_instance().await;
let n = 64;
// Test store
instance.methods().store_u64(n).call().await.unwrap();
let result = instance.methods().get_u64().call().await.unwrap();
assert_eq!(result.value, Some(n));
}
#[tokio::test]
async fn can_store_b256() {
let instance = get_test_storage_instance().await;
let n: Bits256 = Bits256([2; 32]);
// Test store
instance.methods().store_b256(n).call().await.unwrap();
let result = instance.methods().get_b256().call().await.unwrap();
assert_eq!(result.value, Some(n));
}
#[tokio::test]
async fn can_store_small_struct() {
let instance = get_test_storage_instance().await;
let s = SmallStruct { x: 42 };
// Test store
instance
.methods()
.store_small_struct(s.clone())
.call()
.await
.unwrap();
let result = instance.methods().get_small_struct().call().await.unwrap();
assert_eq!(result.value, Some(s));
}
#[tokio::test]
async fn can_store_medium_struct() {
let instance = get_test_storage_instance().await;
let s = MediumStruct { x: 42, y: 66 };
// Test store
instance
.methods()
.store_medium_struct(s.clone())
.call()
.await
.unwrap();
let result = instance.methods().get_medium_struct().call().await.unwrap();
assert_eq!(result.value, Some(s));
}
#[tokio::test]
async fn can_store_large_struct() {
let instance = get_test_storage_instance().await;
let s = LargeStruct {
x: 13,
y: Bits256([6; 32]),
z: 77,
};
// Test store
instance
.methods()
.store_large_struct(s.clone())
.call()
.await
.unwrap();
let result = instance.methods().get_large_struct().call().await.unwrap();
assert_eq!(result.value, Some(s));
}
#[tokio::test]
async fn can_store_very_large_struct() {
let instance = get_test_storage_instance().await;
let s = VeryLargeStruct {
x: 42,
y: Bits256([9; 32]),
z: Bits256([7; 32]),
};
instance
.methods()
.store_very_large_struct(s.clone())
.call()
.await
.unwrap();
let result = instance
.methods()
.get_very_large_struct()
.call()
.await
.unwrap();
assert_eq!(result.value, Some(s));
}
#[tokio::test]
async fn can_store_enum() {
let instance = get_test_storage_instance().await;
let e1 = StorageEnum::V1(Bits256([3; 32]));
// Test store
instance
.methods()
.store_enum(e1.clone())
.call()
.await
.unwrap();
let result = instance.methods().get_enum().call().await.unwrap();
assert_eq!(result.value, Some(e1));
let e2 = StorageEnum::V2(99);
instance
.methods()
.store_enum(e2.clone())
.call()
.await
.unwrap();
let result = instance.methods().get_enum().call().await.unwrap();
assert_eq!(result.value, Some(e2));
let e3 = StorageEnum::V3(Bits256([4; 32]));
instance
.methods()
.store_enum(e3.clone())
.call()
.await
.unwrap();
let result = instance.methods().get_enum().call().await.unwrap();
assert_eq!(result.value, Some(e3));
}
#[tokio::test]
async fn can_store_tuple() {
let instance = get_test_storage_instance().await;
let t = (Bits256([7; 32]), 8, Bits256([6; 32]));
// Test store
instance.methods().store_tuple(t).call().await.unwrap();
let result = instance.methods().get_tuple().call().await.unwrap();
assert_eq!(result.value, Some(t));
}
#[tokio::test]
async fn can_store_string() {
let instance = get_test_storage_instance().await;
let s = "fastest_modular_execution_layer".to_string();
// Test store
instance
.methods()
.store_string(SizedAsciiString::try_from(s.clone()).unwrap())
.call()
.await
.unwrap();
let result = instance.methods().get_string().call().await.unwrap();
assert_eq!(result.value, Some(SizedAsciiString::try_from(s).unwrap()));
}
#[tokio::test]
async fn can_store_array() {
let instance = get_test_storage_instance().await;
let a = [Bits256([153; 32]), Bits256([136; 32]), Bits256([119; 32])];
// Test store
instance.methods().store_array().call().await.unwrap();
let result = instance.methods().get_array().call().await.unwrap();
assert_eq!(result.value, Some(a));
}
#[tokio::test]
async fn can_store_non_inlined() {
let instance = get_test_storage_instance().await;
let result = instance.methods().storage_in_call().call().await.unwrap();
assert_eq!(result.value, 333);
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/test/src/sdk-harness/test_projects/storage_map_nested/mod.rs | test/src/sdk-harness/test_projects/storage_map_nested/mod.rs | use fuels::prelude::*;
abigen!(Contract(
name = "TestStorageMapNestedContract",
abi = "test_projects/storage_map_nested/out/release/storage_map_nested-abi.json",
));
async fn test_storage_map_nested_instance() -> TestStorageMapNestedContract<Wallet> {
let wallet = launch_provider_and_get_wallet().await.unwrap();
let id = Contract::load_from(
"test_projects/storage_map_nested/out/release/storage_map_nested.bin",
LoadConfiguration::default(),
)
.unwrap()
.deploy(&wallet, TxPolicies::default())
.await
.unwrap()
.contract_id;
TestStorageMapNestedContract::new(id.clone(), wallet)
}
#[tokio::test]
async fn nested_map_1_access() {
let methods = test_storage_map_nested_instance().await.methods();
methods.nested_map_1_access().call().await.unwrap();
}
#[tokio::test]
async fn nested_map_2_access() {
let methods = test_storage_map_nested_instance().await.methods();
methods.nested_map_2_access().call().await.unwrap();
}
#[tokio::test]
async fn nested_map_3_access() {
let methods = test_storage_map_nested_instance().await.methods();
methods.nested_map_3_access().call().await.unwrap();
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/test/src/sdk-harness/test_projects/auth/mod.rs | test/src/sdk-harness/test_projects/auth/mod.rs | use fuels::{
accounts::{predicate::Predicate, signers::private_key::PrivateKeySigner},
prelude::*,
tx::UtxoId,
types::{
coin::{Coin},
coin_type::CoinType,
input::Input,
message::{Message, MessageStatus},
Bytes32, ContractId,
},
};
use std::str::FromStr;
abigen!(
Contract(
name = "AuthContract",
abi = "test_artifacts/auth_testing_contract/out/release/auth_testing_contract-abi.json"
),
Contract(
name = "AuthCallerContract",
abi = "test_artifacts/auth_caller_contract/out/release/auth_caller_contract-abi.json"
),
Predicate(
name = "AuthPredicate",
abi = "test_artifacts/auth_predicate/out/release/auth_predicate-abi.json"
),
);
#[tokio::test]
async fn is_external_from_sdk() {
let (auth_instance, _, _, _, _) = get_contracts().await;
let result = auth_instance
.methods()
.is_caller_external()
.call()
.await
.unwrap();
assert!(result.value);
}
#[tokio::test]
async fn msg_sender_from_sdk() {
let (auth_instance, _, _, _, wallet) = get_contracts().await;
let result = auth_instance
.methods()
.returns_msg_sender_address(wallet.address())
.call()
.await
.unwrap();
assert!(result.value);
}
#[tokio::test]
async fn msg_sender_from_contract() {
let (auth_instance, auth_id, caller_instance, caller_id, _) = get_contracts().await;
let result = caller_instance
.methods()
.call_auth_contract(auth_id, caller_id)
.with_contracts(&[&auth_instance])
.call()
.await
.unwrap();
assert!(result.value);
}
#[tokio::test]
async fn input_message_msg_sender_from_contract() {
// Wallet
let wallet_signer = PrivateKeySigner::random(&mut rand::thread_rng());
let deployment_signer = PrivateKeySigner::random(&mut rand::thread_rng());
// Setup coins and messages
let coins = setup_single_asset_coins(wallet_signer.address(), AssetId::BASE, 100, 1000);
let coins_2 = setup_single_asset_coins(deployment_signer.address(), AssetId::BASE, 100, 1000);
let total_coins = [coins, coins_2].concat();
let msg = setup_single_message(
Address::default(),
wallet_signer.address(),
DEFAULT_COIN_AMOUNT,
10.into(),
vec![],
);
let provider = setup_test_provider(total_coins.clone(), vec![msg.clone()], None, None)
.await
.unwrap();
let wallet = Wallet::new(wallet_signer, provider.clone());
let deployer_wallet = Wallet::new(deployment_signer, provider.clone());
// Setup contract
let id = Contract::load_from(
"test_artifacts/auth_testing_contract/out/release/auth_testing_contract.bin",
LoadConfiguration::default(),
)
.unwrap()
.deploy(&deployer_wallet, TxPolicies::default())
.await
.unwrap()
.contract_id;
let instance = AuthContract::new(id.clone(), wallet.clone());
// Start building transactions
let call_handler = instance.methods().returns_msg_sender_address(msg.recipient);
let mut tb = call_handler
.transaction_builder()
.await
.unwrap()
.enable_burn(true);
// Inputs
tb.inputs_mut().push(Input::ResourceSigned {
resource: CoinType::Message(
wallet
.get_messages()
.await
.unwrap()
.first()
.unwrap()
.clone(),
),
});
// Build transaction
tb.add_signer(wallet.signer().clone()).unwrap();
let tx = tb.build(provider.clone()).await.unwrap();
// Send and verify
let tx_status = provider.send_transaction_and_await_commit(tx).await.unwrap();
let response = call_handler.get_response(tx_status).unwrap();
assert!(response.value);
}
#[tokio::test]
async fn caller_addresses_from_messages() {
let signer_1 = PrivateKeySigner::random(&mut rand::thread_rng());
let signer_2 = PrivateKeySigner::random(&mut rand::thread_rng());
let signer_3 = PrivateKeySigner::random(&mut rand::thread_rng());
let signer_4 = PrivateKeySigner::random(&mut rand::thread_rng());
// Setup message
let message_amount = 10;
let message1 = Message {
sender: signer_1.address(),
recipient: signer_1.address(),
nonce: 0.into(),
amount: message_amount,
data: vec![],
da_height: 0,
status: MessageStatus::Unspent,
};
let message2 = Message {
sender: signer_2.address(),
recipient: signer_2.address(),
nonce: 1.into(),
amount: message_amount,
data: vec![],
da_height: 0,
status: MessageStatus::Unspent,
};
let message3 = Message {
sender: signer_3.address(),
recipient: signer_3.address(),
nonce: 2.into(),
amount: message_amount,
data: vec![],
da_height: 0,
status: MessageStatus::Unspent,
};
let mut message_vec: Vec<Message> = Vec::new();
message_vec.push(message1);
message_vec.push(message2);
message_vec.push(message3);
// Setup Coin
let coin_amount = 10;
let coin = Coin {
owner: signer_4.address(),
utxo_id: UtxoId::new(Bytes32::zeroed(), 0),
amount: coin_amount,
asset_id: AssetId::default(),
};
let mut node_config = NodeConfig::default();
node_config.starting_gas_price = 0;
let provider = setup_test_provider(vec![coin], message_vec, Some(node_config), None)
.await
.unwrap();
let wallet1 = Wallet::new(signer_1, provider.clone());
let wallet2 = Wallet::new(signer_2, provider.clone());
let wallet3 = Wallet::new(signer_3, provider.clone());
let wallet4 = Wallet::new(signer_4, provider.clone());
let id_1 = Contract::load_from(
"test_artifacts/auth_testing_contract/out/release/auth_testing_contract.bin",
LoadConfiguration::default(),
)
.unwrap()
.deploy(&wallet4, TxPolicies::default())
.await
.unwrap()
.contract_id;
let auth_instance = AuthContract::new(id_1.clone(), wallet4.clone());
let result = auth_instance
.methods()
.returns_caller_addresses()
.call()
.await
.unwrap();
assert_eq!(result.value, vec![Address::from(*wallet4.address())]);
// Start building transactions
let call_handler = auth_instance.methods().returns_caller_addresses();
let mut tb = call_handler.transaction_builder().await.unwrap();
// Inputs
tb.inputs_mut().push(Input::ResourceSigned {
resource: CoinType::Message(setup_single_message(
wallet1.address(),
wallet1.address(),
message_amount,
0.into(),
vec![],
)),
});
tb.inputs_mut().push(Input::ResourceSigned {
resource: CoinType::Message(setup_single_message(
wallet2.address(),
wallet2.address(),
message_amount,
1.into(),
vec![],
)),
});
tb.inputs_mut().push(Input::ResourceSigned {
resource: CoinType::Message(setup_single_message(
wallet3.address(),
wallet3.address(),
message_amount,
2.into(),
vec![],
)),
});
// Build transaction
tb.add_signer(wallet1.signer().clone()).unwrap();
tb.add_signer(wallet2.signer().clone()).unwrap();
tb.add_signer(wallet3.signer().clone()).unwrap();
let provider = wallet1.provider();
let tx = tb.enable_burn(true).build(provider.clone()).await.unwrap();
// Send and verify
let tx_status = provider.send_transaction_and_await_commit(tx).await.unwrap();
let result = call_handler.get_response(tx_status).unwrap();
assert!(result
.value
.contains(&Address::from(wallet1.address())));
assert!(result
.value
.contains(&Address::from(wallet2.address())));
assert!(result
.value
.contains(&Address::from(wallet3.address())));
}
#[tokio::test]
async fn caller_addresses_from_coins() {
let signer_1 = PrivateKeySigner::random(&mut rand::thread_rng());
let signer_2 = PrivateKeySigner::random(&mut rand::thread_rng());
let signer_3 = PrivateKeySigner::random(&mut rand::thread_rng());
let signer_4 = PrivateKeySigner::random(&mut rand::thread_rng());
// Setup Coin
let coin_amount = 10;
let coin1 = Coin {
owner: signer_1.address(),
utxo_id: UtxoId::new(Bytes32::zeroed(), 0),
amount: coin_amount,
asset_id: AssetId::default(),
};
let coin2 = Coin {
owner: signer_2.address(),
utxo_id: UtxoId::new(Bytes32::zeroed(), 1),
amount: coin_amount,
asset_id: AssetId::default(),
};
let coin3 = Coin {
owner: signer_3.address(),
utxo_id: UtxoId::new(Bytes32::zeroed(), 2),
amount: coin_amount,
asset_id: AssetId::default(),
};
let coin4 = Coin {
owner: signer_4.address(),
utxo_id: UtxoId::new(Bytes32::zeroed(), 3),
amount: coin_amount,
asset_id: AssetId::default(),
};
let mut coin_vec: Vec<Coin> = Vec::new();
coin_vec.push(coin1);
coin_vec.push(coin2);
coin_vec.push(coin3);
coin_vec.push(coin4);
let mut node_config = NodeConfig::default();
node_config.starting_gas_price = 0;
let provider = setup_test_provider(coin_vec, vec![], Some(node_config), None)
.await
.unwrap();
let wallet1 = Wallet::new(signer_1, provider.clone());
let wallet2 = Wallet::new(signer_2, provider.clone());
let wallet3 = Wallet::new(signer_3, provider.clone());
let wallet4 = Wallet::new(signer_4, provider.clone());
let id_1 = Contract::load_from(
"test_artifacts/auth_testing_contract/out/release/auth_testing_contract.bin",
LoadConfiguration::default(),
)
.unwrap()
.deploy(&wallet4, TxPolicies::default())
.await
.unwrap()
.contract_id;
let auth_instance = AuthContract::new(id_1.clone(), wallet4.clone());
let result = auth_instance
.methods()
.returns_caller_addresses()
.call()
.await
.unwrap();
assert_eq!(result.value, vec![Address::from(*wallet4.address())]);
// Start building transactions
let call_handler = auth_instance.methods().returns_caller_addresses();
let mut tb = call_handler.transaction_builder().await.unwrap();
// Inputs
tb.inputs_mut().push(Input::ResourceSigned {
resource: CoinType::Coin(Coin {
owner: wallet1.address(),
utxo_id: UtxoId::new(Bytes32::zeroed(), 0),
amount: coin_amount,
asset_id: AssetId::default(),
}),
});
tb.inputs_mut().push(Input::ResourceSigned {
resource: CoinType::Coin(Coin {
owner: wallet2.address(),
utxo_id: UtxoId::new(Bytes32::zeroed(), 1),
amount: coin_amount,
asset_id: AssetId::default(),
}),
});
tb.inputs_mut().push(Input::ResourceSigned {
resource: CoinType::Coin(Coin {
owner: wallet3.address(),
utxo_id: UtxoId::new(Bytes32::zeroed(), 2),
amount: coin_amount,
asset_id: AssetId::default(),
}),
});
// Build transaction
tb.add_signer(wallet1.signer().clone()).unwrap();
tb.add_signer(wallet2.signer().clone()).unwrap();
tb.add_signer(wallet3.signer().clone()).unwrap();
let provider = wallet1.provider();
let tx = tb.enable_burn(true).build(provider.clone()).await.unwrap();
// Send and verify
let tx_status = provider.send_transaction_and_await_commit(tx).await.unwrap();
let result = call_handler.get_response(tx_status).unwrap();
assert!(result
.value
.contains(&Address::from(wallet1.address())));
assert!(result
.value
.contains(&Address::from(wallet2.address())));
assert!(result
.value
.contains(&Address::from(wallet3.address())));
}
#[tokio::test]
async fn caller_addresses_from_coins_and_messages() {
let signer_1 = PrivateKeySigner::random(&mut rand::thread_rng());
let signer_2 = PrivateKeySigner::random(&mut rand::thread_rng());
let signer_3 = PrivateKeySigner::random(&mut rand::thread_rng());
let signer_4 = PrivateKeySigner::random(&mut rand::thread_rng());
let message_amount = 10;
let message1 = Message {
sender: signer_1.address(),
recipient: signer_1.address(),
nonce: 0.into(),
amount: message_amount,
data: vec![],
da_height: 0,
status: MessageStatus::Unspent,
};
// Setup Coin
let coin_amount = 10;
let coin2 = Coin {
owner: signer_2.address(),
utxo_id: UtxoId::new(Bytes32::zeroed(), 1),
amount: coin_amount,
asset_id: AssetId::default(),
};
let coin3 = Coin {
owner: signer_3.address(),
utxo_id: UtxoId::new(Bytes32::zeroed(), 2),
amount: coin_amount,
asset_id: AssetId::default(),
};
let coin4 = Coin {
owner: signer_4.address(),
utxo_id: UtxoId::new(Bytes32::zeroed(), 3),
amount: coin_amount,
asset_id: AssetId::default(),
};
let mut coin_vec: Vec<Coin> = Vec::new();
coin_vec.push(coin2);
coin_vec.push(coin3);
coin_vec.push(coin4);
let mut node_config = NodeConfig::default();
node_config.starting_gas_price = 0;
let provider = setup_test_provider(coin_vec, vec![message1], Some(node_config), None)
.await
.unwrap();
let wallet1 = Wallet::new(signer_1, provider.clone());
let wallet2 = Wallet::new(signer_2, provider.clone());
let wallet3 = Wallet::new(signer_3, provider.clone());
let wallet4 = Wallet::new(signer_4, provider.clone());
let id_1 = Contract::load_from(
"test_artifacts/auth_testing_contract/out/release/auth_testing_contract.bin",
LoadConfiguration::default(),
)
.unwrap()
.deploy(&wallet4, TxPolicies::default())
.await
.unwrap()
.contract_id;
let auth_instance = AuthContract::new(id_1.clone(), wallet4.clone());
let result = auth_instance
.methods()
.returns_caller_addresses()
.call()
.await
.unwrap();
assert_eq!(result.value, vec![Address::from(*wallet4.address())]);
// Start building transactions
let call_handler = auth_instance.methods().returns_caller_addresses();
let mut tb = call_handler.transaction_builder().await.unwrap();
// Inputs
tb.inputs_mut().push(Input::ResourceSigned {
resource: CoinType::Message(setup_single_message(
wallet1.address(),
wallet1.address(),
message_amount,
0.into(),
vec![],
)),
});
tb.inputs_mut().push(Input::ResourceSigned {
resource: CoinType::Coin(Coin {
owner: wallet2.address(),
utxo_id: UtxoId::new(Bytes32::zeroed(), 1),
amount: coin_amount,
asset_id: AssetId::default(),
}),
});
tb.inputs_mut().push(Input::ResourceSigned {
resource: CoinType::Coin(Coin {
owner: wallet3.address(),
utxo_id: UtxoId::new(Bytes32::zeroed(), 2),
amount: coin_amount,
asset_id: AssetId::default(),
}),
});
// Build transaction
tb.add_signer(wallet1.signer().clone()).unwrap();
tb.add_signer(wallet2.signer().clone()).unwrap();
tb.add_signer(wallet3.signer().clone()).unwrap();
let provider = wallet1.provider();
let tx = tb.enable_burn(true).build(provider.clone()).await.unwrap();
// Send and verify
let tx_status = provider.send_transaction_and_await_commit(tx).await.unwrap();
let result = call_handler.get_response(tx_status).unwrap();
assert!(result
.value
.contains(&Address::from(wallet1.address())));
assert!(result
.value
.contains(&Address::from(wallet2.address())));
assert!(result
.value
.contains(&Address::from(wallet3.address())));
}
async fn get_contracts() -> (
AuthContract<Wallet>,
ContractId,
AuthCallerContract<Wallet>,
ContractId,
Wallet,
) {
let wallet = launch_provider_and_get_wallet().await.unwrap();
let id_1 = Contract::load_from(
"test_artifacts/auth_testing_contract/out/release/auth_testing_contract.bin",
LoadConfiguration::default(),
)
.unwrap()
.deploy(&wallet, TxPolicies::default())
.await
.unwrap()
.contract_id;
let id_2 = Contract::load_from(
"test_artifacts/auth_caller_contract/out/release/auth_caller_contract.bin",
LoadConfiguration::default(),
)
.unwrap()
.deploy(&wallet, TxPolicies::default())
.await
.unwrap()
.contract_id;
let instance_1 = AuthContract::new(id_1.clone(), wallet.clone());
let instance_2 = AuthCallerContract::new(id_2.clone(), wallet.clone());
(instance_1, id_1.into(), instance_2, id_2.into(), wallet)
}
#[tokio::test]
async fn can_get_predicate_address() {
// Setup Wallets
let asset_id = AssetId::default();
let wallets_config = WalletsConfig::new_multiple_assets(
2,
vec![AssetConfig {
id: asset_id,
num_coins: 1,
coin_amount: 1_000,
}],
);
let mut node_config = NodeConfig::default();
node_config.starting_gas_price = 0;
let wallets = &launch_custom_provider_and_get_wallets(wallets_config, Some(node_config), None)
.await
.unwrap();
let first_wallet = &wallets[0];
let second_wallet = &wallets[1];
// Setup predicate.
let hex_predicate_address: &str =
"0xe549825429a82fde8b7906f216485a8a7641f9dab25252dabed67eb4e6262b29";
let predicate_address =
Address::from_str(hex_predicate_address).expect("failed to create Address from string");
let predicate_data = AuthPredicateEncoder::default()
.encode_data(predicate_address)
.unwrap();
let predicate: Predicate =
Predicate::load_from("test_artifacts/auth_predicate/out/release/auth_predicate.bin")
.unwrap()
.with_provider(first_wallet.try_provider().unwrap().clone())
.with_data(predicate_data);
// If this test fails, it can be that the predicate address got changed.
// Uncomment the next line, get the predicate address, and update it above.
// dbg!(&predicate);
// Next, we lock some assets in this predicate using the first wallet:
// First wallet transfers amount to predicate.
first_wallet
.transfer(predicate.address(), 500, asset_id, TxPolicies::default())
.await
.unwrap();
// Check predicate balance.
let balance = predicate
.get_asset_balance(&AssetId::default())
.await
.unwrap();
assert_eq!(balance, 500);
// Then we can transfer assets owned by the predicate via the Account trait:
let amount_to_unlock = 500;
// Will transfer if the correct predicate address is passed as an argument to the predicate
predicate
.transfer(
second_wallet.address(),
amount_to_unlock,
asset_id,
TxPolicies::default(),
)
.await
.unwrap();
// Predicate balance is zero.
let balance = predicate
.get_asset_balance(&AssetId::default())
.await
.unwrap();
assert_eq!(balance, 0);
// Second wallet balance is updated.
let balance = second_wallet
.get_asset_balance(&AssetId::default())
.await
.unwrap();
assert_eq!(balance, 1500);
}
#[tokio::test]
#[should_panic]
async fn when_incorrect_predicate_address_passed() {
// Setup Wallets
let asset_id = AssetId::default();
let wallets_config = WalletsConfig::new_multiple_assets(
2,
vec![AssetConfig {
id: asset_id,
num_coins: 1,
coin_amount: 1_000,
}],
);
let wallets = &launch_custom_provider_and_get_wallets(wallets_config, None, None)
.await
.unwrap();
let first_wallet = &wallets[0];
let second_wallet = &wallets[1];
// Setup predicate with incorrect address.
let hex_predicate_address: &str =
"0x61c2fbc40e1fe1602f8734928a6f1bf76d5d70f9c0407e6dc74e4bfdfb7ac392";
let predicate_address =
Address::from_str(hex_predicate_address).expect("failed to create Address from string");
let predicate_data = AuthPredicateEncoder::default()
.encode_data(predicate_address)
.unwrap();
let predicate: Predicate =
Predicate::load_from("test_artifacts/auth_predicate/out/release/auth_predicate.bin")
.unwrap()
.with_provider(first_wallet.try_provider().unwrap().clone())
.with_data(predicate_data);
// Next, we lock some assets in this predicate using the first wallet:
// First wallet transfers amount to predicate.
first_wallet
.transfer(predicate.address(), 500, asset_id, TxPolicies::default())
.await
.unwrap();
// Check predicate balance.
let balance = predicate
.get_asset_balance(&AssetId::default())
.await
.unwrap();
assert_eq!(balance, 500);
// Then we can transfer assets owned by the predicate via the Account trait:
let amount_to_unlock = 500;
// Will should fail to transfer
predicate
.transfer(
second_wallet.address(),
amount_to_unlock,
asset_id,
TxPolicies::default(),
)
.await
.unwrap();
}
#[tokio::test]
async fn can_get_predicate_address_in_message() {
// Setup predicate address.
let hex_predicate_address: &str =
"0xe549825429a82fde8b7906f216485a8a7641f9dab25252dabed67eb4e6262b29";
let predicate_address =
Address::from_str(hex_predicate_address).expect("failed to create Address from string");
// Setup message
let message_amount = 1;
let message = Message {
sender: Address::default(),
recipient: predicate_address,
nonce: 0.into(),
amount: message_amount,
data: vec![],
da_height: 0,
status: MessageStatus::Unspent,
};
let mut message_vec: Vec<Message> = Vec::new();
message_vec.push(message);
// Setup Coin
let coin_amount = 0;
let coin = Coin {
owner: predicate_address,
utxo_id: UtxoId::new(Bytes32::zeroed(), 0),
amount: coin_amount,
asset_id: AssetId::default(),
};
let mut coin_vec: Vec<Coin> = Vec::new();
coin_vec.push(coin);
let mut node_config = NodeConfig::default();
node_config.starting_gas_price = 0;
let provider = setup_test_provider(coin_vec, message_vec, Some(node_config), None)
.await
.unwrap();
let wallet = Wallet::random(&mut rand::thread_rng(), provider);
// Setup predicate.
let predicate_data = AuthPredicateEncoder::default()
.encode_data(predicate_address)
.unwrap();
let predicate: Predicate =
Predicate::load_from("test_artifacts/auth_predicate/out/release/auth_predicate.bin")
.unwrap()
.with_provider(wallet.try_provider().unwrap().clone())
.with_data(predicate_data);
// If this test fails, it can be that the predicate address got changed.
// Uncomment the next line, get the predicate address, and update it above.
// dbg!(&predicate);
// Check predicate balance.
let balance = predicate
.get_asset_balance(&AssetId::default())
.await
.unwrap();
assert_eq!(balance, message_amount as u128);
// Spend the message
predicate
.transfer(
wallet.address(),
message_amount,
AssetId::default(),
TxPolicies::default(),
)
.await
.unwrap();
// The predicate has spent the funds
let predicate_balance = predicate
.get_asset_balance(&AssetId::default())
.await
.unwrap();
assert_eq!(predicate_balance, 0);
// Funds were transferred
let wallet_balance = wallet.get_asset_balance(&AssetId::default()).await.unwrap();
assert_eq!(wallet_balance, message_amount as u128);
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/test/src/sdk-harness/test_projects/run_external_proxy/mod.rs | test/src/sdk-harness/test_projects/run_external_proxy/mod.rs | use fuels::{prelude::*, types::Bits256};
abigen!(Contract(
name = "RunExternalProxyContract",
abi = "test_projects/run_external_proxy/out/release/run_external_proxy-abi.json",
));
#[tokio::test]
async fn run_external_can_proxy_call() {
let wallet = launch_provider_and_get_wallet().await.unwrap();
let target_id = Contract::load_from(
"test_projects/run_external_target/out/release/run_external_target.bin",
LoadConfiguration::default()
.with_storage_configuration(StorageConfiguration::default().with_autoload(false)),
)
.unwrap()
.deploy(&wallet, TxPolicies::default())
.await
.unwrap()
.contract_id;
let configurables = RunExternalProxyContractConfigurables::default()
.with_TARGET(target_id.clone().into())
.unwrap();
let id = Contract::load_from(
"test_projects/run_external_proxy/out/release/run_external_proxy.bin",
LoadConfiguration::default().with_configurables(configurables),
)
.unwrap()
.deploy(&wallet, TxPolicies::default())
.await
.unwrap()
.contract_id;
let instance = RunExternalProxyContract::new(id.clone(), wallet);
// Call "large_value"
// Will call run_external_proxy::large_value
// that will call run_external_target::large_value
// and return the value doubled.
let result = instance
.methods()
.large_value()
.with_contract_ids(&[target_id.clone().into()])
.call()
.await
.unwrap();
for r in result.tx_status.receipts.iter() {
match r {
Receipt::LogData { data, .. } => {
if let Some(data) = data {
if data.len() > 8 {
if let Ok(s) = std::str::from_utf8(&data[8..]) {
print!("{:?} ", s);
}
}
println!("{:?}", data);
}
}
_ => {}
}
}
let expected_large =
Bits256::from_hex_str("0x00000000000000000000000059F2f1fCfE2474fD5F0b9BA1E73ca90b143Eb8d0")
.unwrap();
assert_eq!(result.value, expected_large);
// Call "double_value"
// Will call run_external_proxy::double_value
// that will call run_external_target::double_value
// and return the value doubled.
let result = instance
.methods()
.double_value(42)
.with_contract_ids(&[target_id.clone().into()])
.call()
.await
.unwrap();
for r in result.tx_status.receipts.iter() {
match r {
Receipt::LogData { data, .. } => {
if let Some(data) = data {
if data.len() > 8 {
if let Ok(s) = std::str::from_utf8(&data[8..]) {
print!("{:?} ", s);
}
}
println!("{:?}", data);
}
}
_ => {}
}
}
assert_eq!(result.value, 84);
// Call "does_not_exist_in_the_target"
// Will call run_external_proxy::does_not_exist_in_the_target
// it will proxy the call to run_external_target,
// and endup in the fallback, fn that will triple the input value
let result = instance
.methods()
.does_not_exist_in_the_target(42)
.with_contract_ids(&[target_id.into()])
.call()
.await
.unwrap();
assert_eq!(result.value, 126);
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/test/src/sdk-harness/test_projects/run_external_proxy_with_storage/mod.rs | test/src/sdk-harness/test_projects/run_external_proxy_with_storage/mod.rs | use fuels::{prelude::*, types::Bits256};
abigen!(Contract(
name = "RunExternalProxyContract",
abi = "test_projects/run_external_proxy_with_storage/out/release/run_external_proxy_with_storage-abi.json",
));
#[tokio::test]
async fn run_external_can_proxy_call() {
let wallet = launch_provider_and_get_wallet().await.unwrap();
let storage_configuration =
StorageConfiguration::default().add_slot_overrides_from_file("test_projects/run_external_target_with_storage/out/release/run_external_target_with_storage-storage_slots.json").unwrap();
let target_id = Contract::load_from(
"test_projects/run_external_target_with_storage/out/release/run_external_target_with_storage.bin",
LoadConfiguration::default()
.with_storage_configuration(storage_configuration.clone()),
)
.unwrap()
.deploy(&wallet, TxPolicies::default())
.await
.unwrap()
.contract_id;
let configurables = RunExternalProxyContractConfigurables::default()
.with_TARGET(target_id.clone().into())
.unwrap();
let id = Contract::load_from(
"test_projects/run_external_proxy_with_storage/out/release/run_external_proxy_with_storage.bin",
LoadConfiguration::default().with_configurables(configurables).with_storage_configuration(storage_configuration),
)
.unwrap()
.deploy(&wallet, TxPolicies::default())
.await
.unwrap()
.contract_id;
let instance = RunExternalProxyContract::new(id.clone(), wallet);
// Call "large_value"
// Will call run_external_proxy::large_value
// that will call run_external_target::large_value
// and return the value doubled.
let result = instance
.methods()
.large_value()
.with_contract_ids(&[target_id.clone().into()])
.call()
.await
.unwrap();
for r in result.tx_status.receipts.iter() {
match r {
Receipt::LogData { data, .. } => {
if let Some(data) = data {
if data.len() > 8 {
if let Ok(s) = std::str::from_utf8(&data[8..]) {
print!("{:?} ", s);
}
}
println!("{:?}", data);
}
}
_ => {}
}
}
let expected_large =
Bits256::from_hex_str("0x00000000000000000000000059F2f1fCfE2474fD5F0b9BA1E73ca90b143Eb8d0")
.unwrap();
assert_eq!(result.value, expected_large);
// Call "double_value"
// Will call run_external_proxy::double_value
// that will call run_external_target::double_value
// and return the value doubled.
let result = instance
.methods()
.double_value(42)
.with_contract_ids(&[target_id.clone().into()])
.call()
.await
.unwrap();
for r in result.tx_status.receipts.iter() {
match r {
Receipt::LogData { data, .. } => {
if let Some(data) = data {
if data.len() > 8 {
if let Ok(s) = std::str::from_utf8(&data[8..]) {
print!("{:?} ", s);
}
}
println!("{:?}", data);
}
}
_ => {}
}
}
assert_eq!(result.value, 84);
// Call "does_not_exist_in_the_target"
// Will call run_external_proxy::does_not_exist_in_the_target
// it will proxy the call to run_external_target,
// and endup in the fallback, fn that will triple the input value
let result = instance
.methods()
.does_not_exist_in_the_target(42)
.with_contract_ids(&[target_id.into()])
.call()
.await
.unwrap();
assert_eq!(result.value, 126);
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/test/src/sdk-harness/test_projects/storage_vec/testgen.rs | test/src/sdk-harness/test_projects/storage_vec/testgen.rs | #[macro_export]
macro_rules! testgen {
(
// Name of the module to create.
$module_name:ident,
// Path to the contract ABI (string literal required for `abigen!`).
$abi_path:expr,
// Type to test, as a string literal (required for binary and storage file names).
$type_label:expr,
// Type to test, as a Rust type declaration (required for function signatures).
$type_declaration:ty,
// Arguments of type `$type_declaration` to use in tests.
$arg0:expr,
$arg1:expr,
$arg2:expr,
$arg3:expr,
$arg4:expr
) => {
pub mod $module_name {
use fuels::prelude::*;
abigen!(Contract(
name = "MyContract",
abi = $abi_path
));
// Silences `super::*` warning; required for user-defined types.
#[allow(unused_imports)]
pub mod setup {
use super::*;
pub async fn get_contract_instance() -> MyContract<Wallet> {
let wallet = launch_provider_and_get_wallet().await.unwrap();
let id = Contract::load_from(
&format!(
"test_artifacts/storage_vec/svec_{}/out/release/svec_{}.bin",
$type_label,
$type_label,
),
LoadConfiguration::default()
.with_storage_configuration(StorageConfiguration::default()
.add_slot_overrides_from_file(
&format!(
"test_artifacts/storage_vec/svec_{}/out/release/svec_{}-storage_slots.json",
$type_label,
$type_label,
)
)
.unwrap()),
)
.unwrap()
.deploy(&wallet, TxPolicies::default())
.await
.unwrap()
.contract_id;
MyContract::new(id.clone(), wallet)
}
}
// Silences `super::*` warning; required for user-defined types.
#[allow(unused_imports)]
pub mod wrappers {
use super::*;
pub async fn push(instance: &MyContract<Wallet>, value: $type_declaration) {
instance.methods()
.push(value)
.call()
.await
.unwrap();
}
pub async fn pop(instance: &MyContract<Wallet>) -> $type_declaration {
instance.methods()
.pop()
.call()
.await
.unwrap()
.value
}
pub async fn get(instance: &MyContract<Wallet>, index: u64) -> $type_declaration {
instance.methods()
.get(index)
.call()
.await
.unwrap()
.value
}
pub async fn remove(instance: &MyContract<Wallet>, index: u64) -> $type_declaration {
instance.methods()
.remove(index)
.call()
.await
.unwrap()
.value
}
pub async fn swap_remove(instance: &MyContract<Wallet>, index: u64) -> $type_declaration {
instance.methods()
.swap_remove(index)
.call()
.await
.unwrap()
.value
}
pub async fn set(instance: &MyContract<Wallet>, index: u64, value: $type_declaration) {
instance.methods()
.set(index, value)
.call()
.await
.unwrap();
}
pub async fn insert(instance: &MyContract<Wallet>, index: u64, value: $type_declaration) {
instance.methods()
.insert(index, value)
.call()
.await
.unwrap();
}
pub async fn len(instance: &MyContract<Wallet>) -> u64 {
instance.methods()
.len()
.call()
.await
.unwrap()
.value
}
pub async fn is_empty(instance: &MyContract<Wallet>) -> bool {
instance.methods()
.is_empty()
.call()
.await
.unwrap()
.value
}
pub async fn clear(instance: &MyContract<Wallet>) {
instance.methods()
.clear()
.call()
.await
.unwrap();
}
pub async fn swap(instance: &MyContract<Wallet>, index_0: u64, index_1: u64) {
instance.methods()
.swap(index_0, index_1)
.call()
.await
.unwrap();
}
pub async fn first(instance: &MyContract<Wallet>) -> $type_declaration {
instance.methods()
.first()
.call()
.await
.unwrap()
.value
}
pub async fn last(instance: &MyContract<Wallet>) -> $type_declaration {
instance.methods()
.last()
.call()
.await
.unwrap()
.value
}
pub async fn reverse(instance: &MyContract<Wallet>) {
instance.methods()
.reverse()
.call()
.await
.unwrap();
}
pub async fn fill(instance: &MyContract<Wallet>, value: $type_declaration) {
instance.methods()
.fill(value)
.call()
.await
.unwrap();
}
pub async fn resize(instance: &MyContract<Wallet>, new_len: u64, value: $type_declaration) {
instance.methods()
.resize(new_len, value)
.call()
.await
.unwrap();
}
}
// Silences `super::*` warning; required for user-defined types.
#[allow(unused_imports)]
pub mod success {
use super::{
*,
setup::get_contract_instance,
wrappers::*,
};
#[tokio::test]
async fn can_push() {
let instance = get_contract_instance().await;
push(&instance, $arg0).await;
assert_eq!(get(&instance, 0).await, $arg0);
assert_eq!(len(&instance).await, 1);
}
#[tokio::test]
async fn can_pop() {
let instance = get_contract_instance().await;
push(&instance, $arg0).await;
assert_eq!(len(&instance).await, 1);
assert_eq!(get(&instance, 0).await, $arg0);
assert_eq!(pop(&instance).await, $arg0);
assert_eq!(len(&instance).await, 0);
}
#[tokio::test]
async fn can_get() {
let instance = get_contract_instance().await;
push(&instance, $arg0).await;
assert_eq!(get(&instance, 0).await, $arg0);
}
#[tokio::test]
async fn can_remove() {
let instance = get_contract_instance().await;
push(&instance, $arg0).await;
push(&instance, $arg1).await;
push(&instance, $arg2).await;
push(&instance, $arg3).await;
assert_eq!(len(&instance).await, 4);
assert_eq!(get(&instance, 0).await, $arg0);
assert_eq!(get(&instance, 1).await, $arg1);
assert_eq!(get(&instance, 2).await, $arg2);
assert_eq!(get(&instance, 3).await, $arg3);
assert_eq!(remove(&instance, 2).await, $arg2);
assert_eq!(len(&instance).await, 3);
assert_eq!(get(&instance, 0).await, $arg0);
assert_eq!(get(&instance, 1).await, $arg1);
assert_eq!(get(&instance, 2).await, $arg3);
}
#[tokio::test]
async fn can_swap_remove() {
let instance = get_contract_instance().await;
push(&instance, $arg0).await;
push(&instance, $arg1).await;
push(&instance, $arg2).await;
push(&instance, $arg3).await;
assert_eq!(len(&instance).await, 4);
assert_eq!(get(&instance, 0).await, $arg0);
assert_eq!(get(&instance, 1).await, $arg1);
assert_eq!(get(&instance, 2).await, $arg2);
assert_eq!(get(&instance, 3).await, $arg3);
assert_eq!(swap_remove(&instance, 1).await, $arg1);
assert_eq!(len(&instance).await, 3);
assert_eq!(get(&instance, 0).await, $arg0);
assert_eq!(get(&instance, 1).await, $arg3);
assert_eq!(get(&instance, 2).await, $arg2);
}
#[tokio::test]
async fn can_set() {
let instance = get_contract_instance().await;
push(&instance, $arg0).await;
push(&instance, $arg1).await;
push(&instance, $arg2).await;
push(&instance, $arg3).await;
assert_eq!(len(&instance).await, 4);
assert_eq!(get(&instance, 0).await, $arg0);
assert_eq!(get(&instance, 1).await, $arg1);
assert_eq!(get(&instance, 2).await, $arg2);
assert_eq!(get(&instance, 3).await, $arg3);
set(&instance, 0, $arg3).await;
set(&instance, 1, $arg2).await;
set(&instance, 2, $arg1).await;
set(&instance, 3, $arg0).await;
assert_eq!(get(&instance, 0).await, $arg3);
assert_eq!(get(&instance, 1).await, $arg2);
assert_eq!(get(&instance, 2).await, $arg1);
assert_eq!(get(&instance, 3).await, $arg0);
}
#[tokio::test]
async fn can_insert() {
let instance = get_contract_instance().await;
insert(&instance, 0, $arg0).await;
assert_eq!(len(&instance).await, 1);
assert_eq!(get(&instance, 0).await, $arg0);
push(&instance, $arg1).await;
push(&instance, $arg2).await;
push(&instance, $arg3).await;
assert_eq!(len(&instance).await, 4);
assert_eq!(get(&instance, 0).await, $arg0);
assert_eq!(get(&instance, 1).await, $arg1);
assert_eq!(get(&instance, 2).await, $arg2);
assert_eq!(get(&instance, 3).await, $arg3);
insert(&instance, 1, $arg4).await;
assert_eq!(len(&instance).await, 5);
assert_eq!(get(&instance, 0).await, $arg0);
assert_eq!(get(&instance, 1).await, $arg4);
assert_eq!(get(&instance, 2).await, $arg1);
assert_eq!(get(&instance, 3).await, $arg2);
assert_eq!(get(&instance, 4).await, $arg3);
}
#[tokio::test]
async fn can_get_len() {
let instance = get_contract_instance().await;
assert_eq!(len(&instance).await, 0);
push(&instance, $arg0).await;
assert_eq!(len(&instance).await, 1);
push(&instance, $arg1).await;
assert_eq!(len(&instance).await, 2);
}
#[tokio::test]
async fn can_confirm_emptiness() {
let instance = get_contract_instance().await;
assert!(is_empty(&instance).await);
push(&instance, $arg0).await;
assert!(!is_empty(&instance).await);
clear(&instance).await;
assert!(is_empty(&instance).await);
}
#[tokio::test]
async fn can_clear() {
let instance = get_contract_instance().await;
push(&instance, $arg0).await;
assert!(!is_empty(&instance).await);
clear(&instance).await;
assert!(is_empty(&instance).await);
push(&instance, $arg0).await;
push(&instance, $arg1).await;
push(&instance, $arg2).await;
push(&instance, $arg3).await;
assert!(!is_empty(&instance).await);
clear(&instance).await;
assert!(is_empty(&instance).await);
}
#[tokio::test]
async fn can_swap() {
let instance = get_contract_instance().await;
push(&instance, $arg0).await;
push(&instance, $arg1).await;
push(&instance, $arg2).await;
push(&instance, $arg3).await;
assert_eq!(len(&instance).await, 4);
assert_eq!(get(&instance, 0).await, $arg0);
assert_eq!(get(&instance, 1).await, $arg1);
assert_eq!(get(&instance, 2).await, $arg2);
assert_eq!(get(&instance, 3).await, $arg3);
swap(&instance, 0, 3).await;
swap(&instance, 1, 2).await;
assert_eq!(get(&instance, 0).await, $arg3);
assert_eq!(get(&instance, 1).await, $arg2);
assert_eq!(get(&instance, 2).await, $arg1);
assert_eq!(get(&instance, 3).await, $arg0);
}
#[tokio::test]
async fn can_get_first() {
let instance = get_contract_instance().await;
push(&instance, $arg0).await;
push(&instance, $arg1).await;
assert_eq!(len(&instance).await, 2);
assert_eq!(get(&instance, 0).await, $arg0);
assert_eq!(get(&instance, 1).await, $arg1);
assert_eq!(first(&instance).await, $arg0);
}
#[tokio::test]
async fn can_get_last() {
let instance = get_contract_instance().await;
push(&instance, $arg0).await;
push(&instance, $arg1).await;
assert_eq!(len(&instance).await, 2);
assert_eq!(get(&instance, 0).await, $arg0);
assert_eq!(get(&instance, 1).await, $arg1);
assert_eq!(last(&instance).await, $arg1);
}
#[tokio::test]
async fn can_reverse_even_len() {
let instance = get_contract_instance().await;
push(&instance, $arg0).await;
push(&instance, $arg1).await;
push(&instance, $arg2).await;
push(&instance, $arg3).await;
assert_eq!(len(&instance).await, 4);
assert_eq!(get(&instance, 0).await, $arg0);
assert_eq!(get(&instance, 1).await, $arg1);
assert_eq!(get(&instance, 2).await, $arg2);
assert_eq!(get(&instance, 3).await, $arg3);
reverse(&instance).await;
assert_eq!(len(&instance).await, 4);
assert_eq!(get(&instance, 0).await, $arg3);
assert_eq!(get(&instance, 1).await, $arg2);
assert_eq!(get(&instance, 2).await, $arg1);
assert_eq!(get(&instance, 3).await, $arg0);
reverse(&instance).await;
assert_eq!(len(&instance).await, 4);
assert_eq!(get(&instance, 0).await, $arg0);
assert_eq!(get(&instance, 1).await, $arg1);
assert_eq!(get(&instance, 2).await, $arg2);
assert_eq!(get(&instance, 3).await, $arg3);
}
#[tokio::test]
async fn can_reverse_odd_len() {
let instance = get_contract_instance().await;
push(&instance, $arg0).await;
push(&instance, $arg1).await;
push(&instance, $arg2).await;
assert_eq!(len(&instance).await, 3);
assert_eq!(get(&instance, 0).await, $arg0);
assert_eq!(get(&instance, 1).await, $arg1);
assert_eq!(get(&instance, 2).await, $arg2);
reverse(&instance).await;
assert_eq!(len(&instance).await, 3);
assert_eq!(get(&instance, 0).await, $arg2);
assert_eq!(get(&instance, 1).await, $arg1);
assert_eq!(get(&instance, 2).await, $arg0);
reverse(&instance).await;
assert_eq!(len(&instance).await, 3);
assert_eq!(get(&instance, 0).await, $arg0);
assert_eq!(get(&instance, 1).await, $arg1);
assert_eq!(get(&instance, 2).await, $arg2);
}
#[tokio::test]
async fn can_fill() {
let instance = get_contract_instance().await;
push(&instance, $arg0).await;
push(&instance, $arg1).await;
push(&instance, $arg2).await;
push(&instance, $arg3).await;
assert_eq!(len(&instance).await, 4);
assert_eq!(get(&instance, 0).await, $arg0);
assert_eq!(get(&instance, 1).await, $arg1);
assert_eq!(get(&instance, 2).await, $arg2);
assert_eq!(get(&instance, 3).await, $arg3);
fill(&instance, $arg4).await;
assert_eq!(len(&instance).await, 4);
assert_eq!(get(&instance, 0).await, $arg4);
assert_eq!(get(&instance, 1).await, $arg4);
assert_eq!(get(&instance, 2).await, $arg4);
assert_eq!(get(&instance, 3).await, $arg4);
}
#[tokio::test]
async fn can_resize_up() {
let instance = get_contract_instance().await;
push(&instance, $arg0).await;
push(&instance, $arg1).await;
push(&instance, $arg2).await;
push(&instance, $arg3).await;
assert_eq!(len(&instance).await, 4);
assert_eq!(get(&instance, 0).await, $arg0);
assert_eq!(get(&instance, 1).await, $arg1);
assert_eq!(get(&instance, 2).await, $arg2);
assert_eq!(get(&instance, 3).await, $arg3);
resize(&instance, 6, $arg4).await;
assert_eq!(len(&instance).await, 6);
assert_eq!(get(&instance, 0).await, $arg0);
assert_eq!(get(&instance, 1).await, $arg1);
assert_eq!(get(&instance, 2).await, $arg2);
assert_eq!(get(&instance, 3).await, $arg3);
assert_eq!(get(&instance, 4).await, $arg4);
assert_eq!(get(&instance, 5).await, $arg4);
}
#[tokio::test]
async fn can_resize_down() {
let instance = get_contract_instance().await;
push(&instance, $arg0).await;
push(&instance, $arg1).await;
push(&instance, $arg2).await;
push(&instance, $arg3).await;
assert_eq!(len(&instance).await, 4);
assert_eq!(get(&instance, 0).await, $arg0);
assert_eq!(get(&instance, 1).await, $arg1);
assert_eq!(get(&instance, 2).await, $arg2);
assert_eq!(get(&instance, 3).await, $arg3);
resize(&instance, 2, $arg4).await;
assert_eq!(len(&instance).await, 2);
assert_eq!(get(&instance, 0).await, $arg0);
assert_eq!(get(&instance, 1).await, $arg1);
}
}
// Silences `super::*` warning; required for user-defined types.
#[allow(unused_imports)]
pub mod failure {
use super::{
*,
setup::get_contract_instance,
wrappers::*,
};
#[tokio::test]
#[should_panic(expected = "revert_id: Some(0)")]
async fn cant_pop() {
let instance = get_contract_instance().await;
let _ = pop(&instance).await;
}
#[tokio::test]
#[should_panic(expected = "revert_id: Some(0)")]
async fn cant_get() {
let instance = get_contract_instance().await;
get(&instance, 0).await;
}
#[tokio::test]
#[should_panic(expected = "revert_id: Some(18446744073709486084)")]
async fn cant_remove() {
let instance = get_contract_instance().await;
let _ = remove(&instance, 0).await;
}
#[tokio::test]
#[should_panic(expected = "revert_id: Some(18446744073709486084)")]
async fn cant_swap_remove() {
let instance = get_contract_instance().await;
let _ = swap_remove(&instance, 0).await;
}
#[tokio::test]
#[should_panic(expected = "revert_id: Some(18446744073709486084)")]
async fn cant_set() {
let instance = get_contract_instance().await;
set(&instance, 1, $arg1).await;
}
#[tokio::test]
#[should_panic(expected = "revert_id: Some(18446744073709486084)")]
async fn cant_insert() {
let instance = get_contract_instance().await;
insert(&instance, 1, $arg1).await;
}
#[tokio::test]
#[should_panic(expected = "revert_id: Some(0)")]
async fn cant_get_first() {
let instance = get_contract_instance().await;
let _ = first(&instance).await;
}
#[tokio::test]
#[should_panic(expected = "revert_id: Some(0)")]
async fn cant_get_last() {
let instance = get_contract_instance().await;
let _ = last(&instance).await;
}
#[tokio::test]
#[should_panic(expected = "revert_id: Some(18446744073709486084)")]
async fn cant_swap() {
let instance = get_contract_instance().await;
let _ = swap(&instance, 0, 1).await;
}
}
}
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/test/src/sdk-harness/test_projects/storage_vec/svec_u32.rs | test/src/sdk-harness/test_projects/storage_vec/svec_u32.rs | testgen!(
test_u32_vec,
"test_artifacts/storage_vec/svec_u32/out/release/svec_u32-abi.json",
"u32",
u32,
1u32,
2u32,
3u32,
4u32,
5u32
);
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/test/src/sdk-harness/test_projects/storage_vec/svec_u64.rs | test/src/sdk-harness/test_projects/storage_vec/svec_u64.rs | testgen!(
test_u64_vec,
"test_artifacts/storage_vec/svec_u64/out/release/svec_u64-abi.json",
"u64",
u64,
1u64,
2u64,
3u64,
4u64,
5u64
);
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/test/src/sdk-harness/test_projects/storage_vec/svec_u16.rs | test/src/sdk-harness/test_projects/storage_vec/svec_u16.rs | testgen!(
test_u16_vec,
"test_artifacts/storage_vec/svec_u16/out/release/svec_u16-abi.json",
"u16",
u16,
1u16,
2u16,
3u16,
4u16,
5u16
);
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/test/src/sdk-harness/test_projects/storage_vec/svec_b256.rs | test/src/sdk-harness/test_projects/storage_vec/svec_b256.rs | testgen!(
test_b256_vec,
"test_artifacts/storage_vec/svec_b256/out/release/svec_b256-abi.json",
"b256",
::fuels::types::Bits256,
::fuels::types::Bits256([1; 32]),
::fuels::types::Bits256([2; 32]),
::fuels::types::Bits256([3; 32]),
::fuels::types::Bits256([4; 32]),
::fuels::types::Bits256([5; 32])
);
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/test/src/sdk-harness/test_projects/storage_vec/svec_array.rs | test/src/sdk-harness/test_projects/storage_vec/svec_array.rs | testgen!(
test_array_vec,
"test_artifacts/storage_vec/svec_array/out/release/svec_array-abi.json",
"array",
[u8; 3],
[1; 3],
[2; 3],
[3; 3],
[4; 3],
[5; 3]
);
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/test/src/sdk-harness/test_projects/storage_vec/svec_tuple.rs | test/src/sdk-harness/test_projects/storage_vec/svec_tuple.rs | testgen!(
test_tuple_vec,
"test_artifacts/storage_vec/svec_tuple/out/release/svec_tuple-abi.json",
"tuple",
(u8, u8, u8),
(1, 1, 1),
(2, 2, 2),
(4, 4, 4),
(5, 5, 5),
(6, 6, 6)
);
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/test/src/sdk-harness/test_projects/storage_vec/mod.rs | test/src/sdk-harness/test_projects/storage_vec/mod.rs | #[macro_use]
mod testgen;
mod svec_array;
mod svec_b256;
mod svec_bool;
mod svec_enum;
mod svec_str;
mod svec_struct;
mod svec_tuple;
mod svec_u16;
mod svec_u32;
mod svec_u64;
mod svec_u8;
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/test/src/sdk-harness/test_projects/storage_vec/svec_struct.rs | test/src/sdk-harness/test_projects/storage_vec/svec_struct.rs | testgen!(
test_struct_vec,
"test_artifacts/storage_vec/svec_struct/out/release/svec_struct-abi.json",
"struct",
TestStruct,
TestStruct { a: true, b: 1 },
TestStruct { a: false, b: 2 },
TestStruct { a: true, b: 3 },
TestStruct { a: false, b: 4 },
TestStruct { a: true, b: 5 }
);
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/test/src/sdk-harness/test_projects/storage_vec/svec_enum.rs | test/src/sdk-harness/test_projects/storage_vec/svec_enum.rs | testgen!(
test_enum_vec,
"test_artifacts/storage_vec/svec_enum/out/release/svec_enum-abi.json",
"enum",
TestEnum,
TestEnum::A(true),
TestEnum::A(false),
TestEnum::B(1),
TestEnum::B(3),
TestEnum::B(2)
);
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/test/src/sdk-harness/test_projects/storage_vec/svec_u8.rs | test/src/sdk-harness/test_projects/storage_vec/svec_u8.rs | testgen!(
test_u8_vec,
"test_artifacts/storage_vec/svec_u8/out/release/svec_u8-abi.json",
"u8",
u8,
1u8,
2u8,
3u8,
4u8,
5u8
);
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/test/src/sdk-harness/test_projects/storage_vec/svec_str.rs | test/src/sdk-harness/test_projects/storage_vec/svec_str.rs | testgen!(
test_str_vec,
"test_artifacts/storage_vec/svec_str/out/release/svec_str-abi.json",
"str",
::fuels::types::SizedAsciiString::<4>,
::fuels::types::SizedAsciiString::<4>::new("yeet".to_string()).unwrap(),
::fuels::types::SizedAsciiString::<4>::new("meow".to_string()).unwrap(),
::fuels::types::SizedAsciiString::<4>::new("kekw".to_string()).unwrap(),
::fuels::types::SizedAsciiString::<4>::new("gmgn".to_string()).unwrap(),
::fuels::types::SizedAsciiString::<4>::new("sway".to_string()).unwrap()
);
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/test/src/sdk-harness/test_projects/storage_vec/svec_bool.rs | test/src/sdk-harness/test_projects/storage_vec/svec_bool.rs | testgen!(
test_bool_vec,
"test_artifacts/storage_vec/svec_bool/out/release/svec_bool-abi.json",
"bool",
bool,
true,
true,
false,
true,
false
);
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/test/src/sdk-harness/test_projects/predicate_data_simple/mod.rs | test/src/sdk-harness/test_projects/predicate_data_simple/mod.rs | use fuel_vm::fuel_asm::{op, RegId};
use fuel_vm::fuel_tx;
use fuel_vm::fuel_tx::{Address, AssetId, Output};
use fuels::{
core::codec::{ABIEncoder, EncoderConfig},
prelude::*,
types::{input::Input, transaction_builders::ScriptTransactionBuilder, Token},
};
use std::str::FromStr;
async fn setup() -> (Vec<u8>, Address, Wallet, u64, AssetId) {
let predicate_code =
std::fs::read("test_projects/predicate_data_simple/out/release/predicate_data_simple.bin")
.unwrap();
let predicate_address = fuel_tx::Input::predicate_owner(&predicate_code);
let mut node_config = NodeConfig::default();
node_config.starting_gas_price = 0;
let mut wallets = launch_custom_provider_and_get_wallets(
WalletsConfig::new(Some(1), None, None),
Some(node_config),
None,
)
.await
.unwrap();
let wallet = wallets.pop().unwrap();
(
predicate_code,
predicate_address,
wallet,
1000,
AssetId::default(),
)
}
async fn create_predicate(
predicate_address: Address,
wallet: &Wallet,
amount_to_predicate: u64,
asset_id: AssetId,
) {
let provider = wallet.provider();
let wallet_coins = wallet
.get_asset_inputs_for_amount(
asset_id,
wallet.get_asset_balance(&asset_id).await.unwrap().into(),
None,
)
.await
.unwrap();
let output_coin = Output::coin(predicate_address, amount_to_predicate, asset_id);
let output_change = Output::change(wallet.clone().address().into(), 0, asset_id);
let mut tx = ScriptTransactionBuilder::prepare_transfer(
wallet_coins,
vec![output_coin, output_change],
Default::default(),
)
.with_script(op::ret(RegId::ONE).to_bytes().to_vec());
tx.add_signer(wallet.signer().clone()).unwrap();
let tx = tx.build(provider).await.unwrap();
provider.send_transaction_and_await_commit(tx).await.unwrap();
}
async fn submit_to_predicate(
predicate_code: Vec<u8>,
predicate_address: Address,
wallet: &Wallet,
amount_to_predicate: u64,
asset_id: AssetId,
receiver_address: Address,
predicate_data: Vec<u8>,
) -> Result<()> {
let filter = ResourceFilter {
from: predicate_address,
asset_id: Some(asset_id),
amount: amount_to_predicate.into(),
..Default::default()
};
let utxo_predicate_hash = wallet
.provider()
.get_spendable_resources(filter)
.await
.unwrap();
let mut inputs = vec![];
let mut total_amount_in_predicate = 0;
for coin in utxo_predicate_hash {
inputs.push(Input::resource_predicate(
coin.clone(),
predicate_code.to_vec(),
predicate_data.clone(),
));
total_amount_in_predicate += coin.amount();
}
let output_coin = Output::coin(receiver_address, total_amount_in_predicate - 1, asset_id);
let output_change = Output::change(predicate_address, 0, asset_id);
let provider = wallet.provider();
let new_tx = ScriptTransactionBuilder::prepare_transfer(
inputs,
vec![output_coin, output_change],
Default::default(),
)
.with_tx_policies(TxPolicies::default().with_tip(1))
.build(provider)
.await
.unwrap();
wallet.provider().send_transaction_and_await_commit(new_tx).await.map(|_| ())
}
async fn get_balance(wallet: &Wallet, address: Address, asset_id: AssetId) -> u128 {
wallet
.provider()
.get_asset_balance(&address.into(), &asset_id)
.await
.unwrap()
}
#[tokio::test]
async fn valid_predicate_data_simple() {
let arg = Token::U32(12345_u32);
let args: Vec<Token> = vec![arg];
let predicate_data = ABIEncoder::new(EncoderConfig::default())
.encode(&args)
.unwrap();
let receiver_address =
Address::from_str("0xd926978a28a565531a06cbf5fab5402d6ee2021e5a5dce2d2f7c61e5521be109")
.unwrap();
let (predicate_code, predicate_address, wallet, amount_to_predicate, asset_id) = setup().await;
create_predicate(predicate_address, &wallet, amount_to_predicate, asset_id).await;
let receiver_balance_before = get_balance(&wallet, receiver_address, asset_id).await;
assert_eq!(receiver_balance_before, 0);
submit_to_predicate(
predicate_code,
predicate_address,
&wallet,
amount_to_predicate,
asset_id,
receiver_address,
predicate_data,
)
.await
.expect("Failed to submit to predicate");
let receiver_balance_after = get_balance(&wallet, receiver_address, asset_id).await;
assert_eq!(
receiver_balance_before + amount_to_predicate as u128 - 1,
receiver_balance_after
);
let predicate_balance = get_balance(&wallet, predicate_address, asset_id).await;
assert_eq!(predicate_balance, 0);
}
#[tokio::test]
async fn invalid_predicate_data_simple() {
let arg = Token::U32(1001_u32);
let args: Vec<Token> = vec![arg];
let predicate_data = ABIEncoder::new(EncoderConfig::default())
.encode(&args)
.unwrap();
let receiver_address =
Address::from_str("0xde97d8624a438121b86a1956544bd72ed68cd69f2c99555b08b1e8c51ffd511c")
.unwrap();
let (predicate_code, predicate_address, wallet, amount_to_predicate, asset_id) = setup().await;
create_predicate(predicate_address, &wallet, amount_to_predicate, asset_id).await;
let receiver_balance_before = get_balance(&wallet, receiver_address, asset_id).await;
assert_eq!(receiver_balance_before, 0);
submit_to_predicate(
predicate_code,
predicate_address,
&wallet,
amount_to_predicate,
asset_id,
receiver_address,
predicate_data,
)
.await
.expect_err("Submitting to predicate should have failed");
let receiver_balance_after = get_balance(&wallet, receiver_address, asset_id).await;
assert_eq!(receiver_balance_before, receiver_balance_after);
let predicate_balance = get_balance(&wallet, predicate_address, asset_id).await;
assert_eq!(predicate_balance, amount_to_predicate as u128);
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/test/src/sdk-harness/test_projects/storage_map/try_insert.rs | test/src/sdk-harness/test_projects/storage_map/try_insert.rs | use super::*;
#[macro_export]
macro_rules! generate_try_insert_tests {
($input_string:expr, $key:expr, $value1:expr, $value2:expr) => {
paste::paste! {
#[tokio::test]
async fn [<try_insert_ $input_string _exists>]() {
let instance = test_storage_map_instance().await;
instance.methods().[<insert_into_ $input_string _map>]($key, $value1).call().await.unwrap();
let prev = instance.methods().[<get_from_ $input_string _map>]($key).call().await.unwrap().value;
assert_eq!(prev, Some($value1));
let result = instance.methods().[<try_insert_into_ $input_string _map>]($key, $value2).call().await.unwrap().value;
assert_eq!(result, Err(StorageMapError::OccupiedError($value1)));
let after = instance.methods().[<get_from_ $input_string _map>]($key).call().await.unwrap().value;
assert_eq!(after, Some($value1));
}
#[tokio::test]
async fn [<try_insert_ $input_string _does_not_exist>]() {
let instance = test_storage_map_instance().await;
let prev = instance.methods().[<get_from_ $input_string _map>]($key).call().await.unwrap().value;
assert_eq!(prev, None);
let result = instance.methods().[<try_insert_into_ $input_string _map>]($key, $value2).call().await.unwrap().value;
assert_eq!(result, Ok($value2));
let after = instance.methods().[<get_from_ $input_string _map>]($key).call().await.unwrap().value;
assert_eq!(after, Some($value2));
}
}
};
}
generate_try_insert_tests!(u64_to_bool, 1, true, false);
generate_try_insert_tests!(u64_to_u8, 1, 1, 2);
generate_try_insert_tests!(u64_to_u16, 1, 1, 2);
generate_try_insert_tests!(u64_to_u32, 1, 1, 2);
generate_try_insert_tests!(u64_to_u64, 1, 1, 2);
generate_try_insert_tests!(
u64_to_tuple,
1,
(Bits256([1; 32]), 1, true),
(Bits256([2; 32]), 2, false)
);
generate_try_insert_tests!(
u64_to_struct,
1,
Struct {
x: 1,
y: Bits256([1; 32]),
z: Bits256([2; 32])
},
Struct {
x: 2,
y: Bits256([3; 32]),
z: Bits256([4; 32])
}
);
generate_try_insert_tests!(u64_to_enum, 1, Enum::V1(Bits256([1; 32])), Enum::V2(2));
generate_try_insert_tests!(
u64_to_str,
1,
SizedAsciiString::try_from("aaaaaaaaaA").unwrap(),
SizedAsciiString::try_from("bbbbbbbbbB").unwrap()
);
generate_try_insert_tests!(
u64_to_array,
1,
[Bits256([1; 32]); 3],
[Bits256([2; 32]); 3]
);
generate_try_insert_tests!(bool_to_u64, true, 1, 2);
generate_try_insert_tests!(u8_to_u64, 1, 1, 2);
generate_try_insert_tests!(u16_to_u64, 1, 1, 2);
generate_try_insert_tests!(u32_to_u64, 1, 1, 2);
generate_try_insert_tests!(tuple_to_u64, (Bits256([1; 32]), 1, true), 1, 2);
generate_try_insert_tests!(
struct_to_u64,
Struct {
x: 1,
y: Bits256([1; 32]),
z: Bits256([2; 32])
},
1,
2
);
generate_try_insert_tests!(enum_to_u64, Enum::V1(Bits256([1; 32])), 1, 2);
generate_try_insert_tests!(
str_to_u64,
SizedAsciiString::try_from("aaaaaaaaaA").unwrap(),
1,
2
);
generate_try_insert_tests!(array_to_u64, [Bits256([1; 32]); 3], 1, 2);
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/test/src/sdk-harness/test_projects/storage_map/mod.rs | test/src/sdk-harness/test_projects/storage_map/mod.rs | use fuels::{
prelude::*,
types::{Bits256, SizedAsciiString},
};
pub mod try_insert;
abigen!(Contract(
name = "TestStorageMapContract",
abi = "test_projects/storage_map/out/release/storage_map-abi.json",
));
async fn test_storage_map_instance() -> TestStorageMapContract<Wallet> {
let wallet = launch_provider_and_get_wallet().await.unwrap();
let id = Contract::load_from(
"test_projects/storage_map/out/release/storage_map.bin",
LoadConfiguration::default(),
)
.unwrap()
.deploy(&wallet, TxPolicies::default())
.await
.unwrap()
.contract_id;
TestStorageMapContract::new(id.clone(), wallet)
}
mod u64_to {
use super::*;
#[tokio::test]
async fn bool_map() {
let instance = test_storage_map_instance().await;
let (key1, key2, key3) = (1, 2, 3);
let (val1, val2, val3) = (true, false, true);
// Nothing to read just yet
assert_eq!(
instance
.methods()
.get_from_u64_to_bool_map(key1)
.call()
.await
.unwrap()
.value,
None
);
// Insert into u64 -> T storage maps
instance
.methods()
.insert_into_u64_to_bool_map(key1, val1)
.call()
.await
.unwrap();
instance
.methods()
.insert_into_u64_to_bool_map(key2, val2)
.call()
.await
.unwrap();
instance
.methods()
.insert_into_u64_to_bool_map(key3, val3)
.call()
.await
.unwrap();
// Get from u64 -> T storage maps
assert_eq!(
instance
.methods()
.get_from_u64_to_bool_map(key1)
.call()
.await
.unwrap()
.value,
Some(val1)
);
assert_eq!(
instance
.methods()
.get_from_u64_to_bool_map(key2)
.call()
.await
.unwrap()
.value,
Some(val2)
);
assert_eq!(
instance
.methods()
.get_from_u64_to_bool_map(key3)
.call()
.await
.unwrap()
.value,
Some(val3)
);
// Test `remove`
assert!(
instance
.methods()
.remove_from_u64_to_bool_map(key1)
.call()
.await
.unwrap()
.value
);
assert_eq!(
instance
.methods()
.get_from_u64_to_bool_map(key1)
.call()
.await
.unwrap()
.value,
None
);
}
#[tokio::test]
async fn u8_map() {
let instance = test_storage_map_instance().await;
let (key1, key2, key3) = (1, 2, 3);
let (val1, val2, val3) = (8, 66, 99);
// Nothing to read just yet
assert_eq!(
instance
.methods()
.get_from_u64_to_u8_map(key1)
.call()
.await
.unwrap()
.value,
None
);
instance
.methods()
.insert_into_u64_to_u8_map(key1, val1)
.call()
.await
.unwrap();
instance
.methods()
.insert_into_u64_to_u8_map(key2, val2)
.call()
.await
.unwrap();
instance
.methods()
.insert_into_u64_to_u8_map(key3, val3)
.call()
.await
.unwrap();
assert_eq!(
instance
.methods()
.get_from_u64_to_u8_map(key1)
.call()
.await
.unwrap()
.value,
Some(val1)
);
assert_eq!(
instance
.methods()
.get_from_u64_to_u8_map(key2)
.call()
.await
.unwrap()
.value,
Some(val2)
);
assert_eq!(
instance
.methods()
.get_from_u64_to_u8_map(key3)
.call()
.await
.unwrap()
.value,
Some(val3)
);
// Test `remove`
assert!(
instance
.methods()
.remove_from_u64_to_u8_map(key1)
.call()
.await
.unwrap()
.value
);
assert_eq!(
instance
.methods()
.get_from_u64_to_u8_map(key1)
.call()
.await
.unwrap()
.value,
None
);
}
#[tokio::test]
async fn u16_map() {
let instance = test_storage_map_instance().await;
let (key1, key2, key3) = (6, 9, 1);
let (val1, val2, val3) = (9, 42, 100);
// Nothing to read just yet
assert_eq!(
instance
.methods()
.get_from_u64_to_u16_map(key1)
.call()
.await
.unwrap()
.value,
None
);
instance
.methods()
.insert_into_u64_to_u16_map(key1, val1)
.call()
.await
.unwrap();
instance
.methods()
.insert_into_u64_to_u16_map(key2, val2)
.call()
.await
.unwrap();
instance
.methods()
.insert_into_u64_to_u16_map(key3, val3)
.call()
.await
.unwrap();
assert_eq!(
instance
.methods()
.get_from_u64_to_u16_map(key1)
.call()
.await
.unwrap()
.value,
Some(val1)
);
assert_eq!(
instance
.methods()
.get_from_u64_to_u16_map(key2)
.call()
.await
.unwrap()
.value,
Some(val2)
);
assert_eq!(
instance
.methods()
.get_from_u64_to_u16_map(key3)
.call()
.await
.unwrap()
.value,
Some(val3)
);
// Test `remove`
assert!(
instance
.methods()
.remove_from_u64_to_u16_map(key1)
.call()
.await
.unwrap()
.value,
);
assert_eq!(
instance
.methods()
.get_from_u64_to_u16_map(key1)
.call()
.await
.unwrap()
.value,
None
);
}
#[tokio::test]
async fn u32_map() {
let instance = test_storage_map_instance().await;
let (key1, key2, key3) = (5, 99, 10);
let (val1, val2, val3) = (90, 2, 100);
// Nothing to read just yet
assert_eq!(
instance
.methods()
.get_from_u64_to_u32_map(key1)
.call()
.await
.unwrap()
.value,
None
);
instance
.methods()
.insert_into_u64_to_u32_map(key1, val1)
.call()
.await
.unwrap();
instance
.methods()
.insert_into_u64_to_u32_map(key2, val2)
.call()
.await
.unwrap();
instance
.methods()
.insert_into_u64_to_u32_map(key3, val3)
.call()
.await
.unwrap();
assert_eq!(
instance
.methods()
.get_from_u64_to_u32_map(key1)
.call()
.await
.unwrap()
.value,
Some(val1)
);
assert_eq!(
instance
.methods()
.get_from_u64_to_u32_map(key2)
.call()
.await
.unwrap()
.value,
Some(val2)
);
assert_eq!(
instance
.methods()
.get_from_u64_to_u32_map(key3)
.call()
.await
.unwrap()
.value,
Some(val3)
);
// Test `remove`
assert!(
instance
.methods()
.remove_from_u64_to_u32_map(key1)
.call()
.await
.unwrap()
.value,
);
assert_eq!(
instance
.methods()
.get_from_u64_to_u32_map(key1)
.call()
.await
.unwrap()
.value,
None
);
}
#[tokio::test]
async fn u64_map() {
let instance = test_storage_map_instance().await;
let (key1, key2, key3) = (50, 99, 1);
let (val1, val2, val3) = (90, 20, 10);
// Nothing to read just yet
assert_eq!(
instance
.methods()
.get_from_u64_to_u64_map(key1)
.call()
.await
.unwrap()
.value,
None
);
instance
.methods()
.insert_into_u64_to_u64_map(key1, val1)
.call()
.await
.unwrap();
instance
.methods()
.insert_into_u64_to_u64_map(key2, val2)
.call()
.await
.unwrap();
instance
.methods()
.insert_into_u64_to_u64_map(key3, val3)
.call()
.await
.unwrap();
assert_eq!(
instance
.methods()
.get_from_u64_to_u64_map(key1)
.call()
.await
.unwrap()
.value,
Some(val1)
);
assert_eq!(
instance
.methods()
.get_from_u64_to_u64_map(key2)
.call()
.await
.unwrap()
.value,
Some(val2)
);
assert_eq!(
instance
.methods()
.get_from_u64_to_u64_map(key3)
.call()
.await
.unwrap()
.value,
Some(val3)
);
// Test `remove`
assert!(
instance
.methods()
.remove_from_u64_to_u64_map(key1)
.call()
.await
.unwrap()
.value,
);
assert_eq!(
instance
.methods()
.get_from_u64_to_u64_map(key1)
.call()
.await
.unwrap()
.value,
None
);
}
#[tokio::test]
async fn tuple_map() {
let instance = test_storage_map_instance().await;
let (key1, key2, key3) = (50, 99, 10);
let (val1, val2, val3) = (
(Bits256([1; 32]), 42, true),
(Bits256([2; 32]), 24, true),
(Bits256([3; 32]), 99, true),
);
// Nothing to read just yet
assert_eq!(
instance
.methods()
.get_from_u64_to_tuple_map(key1)
.call()
.await
.unwrap()
.value,
None
);
instance
.methods()
.insert_into_u64_to_tuple_map(key1, val1)
.call()
.await
.unwrap();
instance
.methods()
.insert_into_u64_to_tuple_map(key2, val2)
.call()
.await
.unwrap();
instance
.methods()
.insert_into_u64_to_tuple_map(key3, val3)
.call()
.await
.unwrap();
assert_eq!(
instance
.methods()
.get_from_u64_to_tuple_map(key1)
.call()
.await
.unwrap()
.value,
Some(val1)
);
assert_eq!(
instance
.methods()
.get_from_u64_to_tuple_map(key2)
.call()
.await
.unwrap()
.value,
Some(val2)
);
assert_eq!(
instance
.methods()
.get_from_u64_to_tuple_map(key3)
.call()
.await
.unwrap()
.value,
Some(val3)
);
// Test `remove`
assert!(
instance
.methods()
.remove_from_u64_to_tuple_map(key1)
.call()
.await
.unwrap()
.value,
);
assert_eq!(
instance
.methods()
.get_from_u64_to_tuple_map(key1)
.call()
.await
.unwrap()
.value,
None
);
}
#[tokio::test]
async fn struct_map() {
let instance = test_storage_map_instance().await;
let (key1, key2, key3) = (5, 9, 1);
let (val1, val2, val3) = (
Struct {
x: 42,
y: Bits256([66; 32]),
z: Bits256([99; 32]),
},
Struct {
x: 24,
y: Bits256([11; 32]),
z: Bits256([90; 32]),
},
Struct {
x: 77,
y: Bits256([55; 32]),
z: Bits256([12; 32]),
},
);
// Nothing to read just yet
assert_eq!(
instance
.methods()
.get_from_u64_to_struct_map(key1)
.call()
.await
.unwrap()
.value,
None
);
instance
.methods()
.insert_into_u64_to_struct_map(key1, val1.clone())
.call()
.await
.unwrap();
instance
.methods()
.insert_into_u64_to_struct_map(key2, val2.clone())
.call()
.await
.unwrap();
instance
.methods()
.insert_into_u64_to_struct_map(key3, val3.clone())
.call()
.await
.unwrap();
assert_eq!(
instance
.methods()
.get_from_u64_to_struct_map(key1)
.call()
.await
.unwrap()
.value,
Some(val1)
);
assert_eq!(
instance
.methods()
.get_from_u64_to_struct_map(key2)
.call()
.await
.unwrap()
.value,
Some(val2)
);
assert_eq!(
instance
.methods()
.get_from_u64_to_struct_map(key3)
.call()
.await
.unwrap()
.value,
Some(val3)
);
// Test `remove`
assert!(
instance
.methods()
.remove_from_u64_to_struct_map(key1)
.call()
.await
.unwrap()
.value,
);
assert_eq!(
instance
.methods()
.get_from_u64_to_struct_map(key1)
.call()
.await
.unwrap()
.value,
None
);
}
#[tokio::test]
async fn enum_map() {
let instance = test_storage_map_instance().await;
let (key1, key2, key3) = (44, 17, 1000);
let (val1, val2, val3) = (
Enum::V1(Bits256([66; 32])),
Enum::V2(42),
Enum::V3(Bits256([42; 32])),
);
// Nothing to read just yet
assert_eq!(
instance
.methods()
.get_from_u64_to_enum_map(key1)
.call()
.await
.unwrap()
.value,
None
);
instance
.methods()
.insert_into_u64_to_enum_map(key1, val1.clone())
.call()
.await
.unwrap();
instance
.methods()
.insert_into_u64_to_enum_map(key2, val2.clone())
.call()
.await
.unwrap();
instance
.methods()
.insert_into_u64_to_enum_map(key3, val3.clone())
.call()
.await
.unwrap();
assert_eq!(
instance
.methods()
.get_from_u64_to_enum_map(key1)
.call()
.await
.unwrap()
.value,
Some(val1)
);
assert_eq!(
instance
.methods()
.get_from_u64_to_enum_map(key2)
.call()
.await
.unwrap()
.value,
Some(val2)
);
assert_eq!(
instance
.methods()
.get_from_u64_to_enum_map(key3)
.call()
.await
.unwrap()
.value,
Some(val3)
);
// Test `remove`
assert!(
instance
.methods()
.remove_from_u64_to_enum_map(key1)
.call()
.await
.unwrap()
.value,
);
assert_eq!(
instance
.methods()
.get_from_u64_to_enum_map(key1)
.call()
.await
.unwrap()
.value,
None
);
}
#[tokio::test]
async fn string_map() {
let instance = test_storage_map_instance().await;
let (key1, key2, key3) = (9001, 1980, 1000);
let (val1, val2, val3) = (
"aaaaaaaaaA",
"bbbbbbbbbB",
"cccccccccC",
);
// Nothing to read just yet
assert_eq!(
instance
.methods()
.get_from_u64_to_str_map(key1)
.call()
.await
.unwrap()
.value,
None
);
instance
.methods()
.insert_into_u64_to_str_map(key1, SizedAsciiString::try_from(val1).unwrap())
.call()
.await
.unwrap();
instance
.methods()
.insert_into_u64_to_str_map(key2, SizedAsciiString::try_from(val2).unwrap())
.call()
.await
.unwrap();
instance
.methods()
.insert_into_u64_to_str_map(key3, SizedAsciiString::try_from(val3).unwrap())
.call()
.await
.unwrap();
assert_eq!(
instance
.methods()
.get_from_u64_to_str_map(key1)
.call()
.await
.unwrap()
.value,
Some(SizedAsciiString::try_from(val1).unwrap())
);
assert_eq!(
instance
.methods()
.get_from_u64_to_str_map(key2)
.call()
.await
.unwrap()
.value,
Some(SizedAsciiString::try_from(val2).unwrap())
);
assert_eq!(
instance
.methods()
.get_from_u64_to_str_map(key3)
.call()
.await
.unwrap()
.value,
Some(SizedAsciiString::try_from(val3).unwrap())
);
// Test `remove`
assert!(
instance
.methods()
.remove_from_u64_to_str_map(key1)
.call()
.await
.unwrap()
.value,
);
assert_eq!(
instance
.methods()
.get_from_u64_to_str_map(key1)
.call()
.await
.unwrap()
.value,
None
);
}
}
mod to_u64_map {
use super::*;
#[tokio::test]
async fn from_bool() {
let instance = test_storage_map_instance().await;
let (key1, key2) = (true, false);
let (val1, val2) = (1, 2);
// Nothing to read just yet
assert_eq!(
instance
.methods()
.get_from_bool_to_u64_map(key1)
.call()
.await
.unwrap()
.value,
None
);
instance
.methods()
.insert_into_bool_to_u64_map(key1, val1)
.call()
.await
.unwrap();
instance
.methods()
.insert_into_bool_to_u64_map(key2, val2)
.call()
.await
.unwrap();
assert_eq!(
instance
.methods()
.get_from_bool_to_u64_map(key1)
.call()
.await
.unwrap()
.value,
Some(val1)
);
assert_eq!(
instance
.methods()
.get_from_bool_to_u64_map(key2)
.call()
.await
.unwrap()
.value,
Some(val2)
);
// Test `remove`
assert!(
instance
.methods()
.remove_from_bool_to_u64_map(key1)
.call()
.await
.unwrap()
.value
);
assert_eq!(
instance
.methods()
.get_from_bool_to_u64_map(key1)
.call()
.await
.unwrap()
.value,
None
);
}
#[tokio::test]
async fn from_u8() {
let instance = test_storage_map_instance().await;
let (key1, key2, key3) = (8, 66, 99);
let (val1, val2, val3) = (1, 2, 3);
// Nothing to read just yet
assert_eq!(
instance
.methods()
.get_from_u8_to_u64_map(key1)
.call()
.await
.unwrap()
.value,
None
);
instance
.methods()
.insert_into_u8_to_u64_map(key1, val1)
.call()
.await
.unwrap();
instance
.methods()
.insert_into_u8_to_u64_map(key2, val2)
.call()
.await
.unwrap();
instance
.methods()
.insert_into_u8_to_u64_map(key3, val3)
.call()
.await
.unwrap();
assert_eq!(
instance
.methods()
.get_from_u8_to_u64_map(key1)
.call()
.await
.unwrap()
.value,
Some(val1)
);
assert_eq!(
instance
.methods()
.get_from_u8_to_u64_map(key2)
.call()
.await
.unwrap()
.value,
Some(val2)
);
assert_eq!(
instance
.methods()
.get_from_u8_to_u64_map(key3)
.call()
.await
.unwrap()
.value,
Some(val3)
);
// Test `remove`
assert!(
instance
.methods()
.remove_from_u8_to_u64_map(key1)
.call()
.await
.unwrap()
.value,
);
assert_eq!(
instance
.methods()
.get_from_u8_to_u64_map(key1)
.call()
.await
.unwrap()
.value,
None
);
}
#[tokio::test]
async fn from_u16() {
let instance = test_storage_map_instance().await;
let (key1, key2, key3) = (9, 42, 100);
let (val1, val2, val3) = (6, 9, 1);
// Nothing to read just yet
assert_eq!(
instance
.methods()
.get_from_u16_to_u64_map(key1)
.call()
.await
.unwrap()
.value,
None
);
instance
.methods()
.insert_into_u16_to_u64_map(key1, val1)
.call()
.await
.unwrap();
instance
.methods()
.insert_into_u16_to_u64_map(key2, val2)
.call()
.await
.unwrap();
instance
.methods()
.insert_into_u16_to_u64_map(key3, val3)
.call()
.await
.unwrap();
assert_eq!(
instance
.methods()
.get_from_u16_to_u64_map(key1)
.call()
.await
.unwrap()
.value,
Some(val1)
);
assert_eq!(
instance
.methods()
.get_from_u16_to_u64_map(key2)
.call()
.await
.unwrap()
.value,
Some(val2)
);
assert_eq!(
instance
.methods()
.get_from_u16_to_u64_map(key3)
.call()
.await
.unwrap()
.value,
Some(val3)
);
// Test `remove`
assert!(
instance
.methods()
.remove_from_u16_to_u64_map(key1)
.call()
.await
.unwrap()
.value,
);
assert_eq!(
instance
.methods()
.get_from_u16_to_u64_map(key1)
.call()
.await
.unwrap()
.value,
None
);
}
#[tokio::test]
async fn from_u32() {
let instance = test_storage_map_instance().await;
let (key1, key2, key3) = (90, 2, 100);
let (val1, val2, val3) = (5, 99, 10);
// Nothing to read just yet
assert_eq!(
instance
.methods()
.get_from_u32_to_u64_map(key1)
.call()
.await
.unwrap()
.value,
None
);
instance
.methods()
.insert_into_u32_to_u64_map(key1, val1)
.call()
.await
.unwrap();
instance
.methods()
.insert_into_u32_to_u64_map(key2, val2)
.call()
.await
.unwrap();
instance
.methods()
.insert_into_u32_to_u64_map(key3, val3)
.call()
.await
.unwrap();
assert_eq!(
instance
.methods()
.get_from_u32_to_u64_map(key1)
.call()
.await
.unwrap()
.value,
Some(val1)
);
assert_eq!(
instance
.methods()
.get_from_u32_to_u64_map(key2)
.call()
.await
.unwrap()
.value,
Some(val2)
);
assert_eq!(
instance
.methods()
.get_from_u32_to_u64_map(key3)
.call()
.await
.unwrap()
.value,
Some(val3)
);
// Test `remove`
assert!(
instance
.methods()
.remove_from_u32_to_u64_map(key1)
.call()
.await
.unwrap()
.value,
);
assert_eq!(
instance
.methods()
.get_from_u32_to_u64_map(key1)
.call()
.await
.unwrap()
.value,
None
);
}
#[tokio::test]
async fn from_tuple() {
let instance = test_storage_map_instance().await;
let (key1, key2, key3) = (
(Bits256([1; 32]), 42, true),
(Bits256([2; 32]), 24, true),
(Bits256([3; 32]), 99, true),
);
let (val1, val2, val3) = (50, 99, 10);
// Nothing to read just yet
assert_eq!(
instance
.methods()
.get_from_tuple_to_u64_map(key1)
.call()
.await
.unwrap()
.value,
None
);
instance
.methods()
.insert_into_tuple_to_u64_map(key1, val1)
.call()
.await
.unwrap();
instance
.methods()
.insert_into_tuple_to_u64_map(key2, val2)
.call()
.await
.unwrap();
instance
.methods()
.insert_into_tuple_to_u64_map(key3, val3)
.call()
.await
.unwrap();
assert_eq!(
instance
.methods()
.get_from_tuple_to_u64_map(key1)
.call()
.await
.unwrap()
.value,
Some(val1)
);
assert_eq!(
instance
.methods()
.get_from_tuple_to_u64_map(key2)
.call()
.await
.unwrap()
.value,
Some(val2)
);
assert_eq!(
instance
.methods()
.get_from_tuple_to_u64_map(key3)
.call()
.await
.unwrap()
.value,
Some(val3)
);
// Test `remove`
assert!(
instance
.methods()
.remove_from_tuple_to_u64_map(key1)
.call()
.await
.unwrap()
.value,
);
assert_eq!(
instance
.methods()
.get_from_tuple_to_u64_map(key1)
.call()
.await
.unwrap()
.value,
None
);
}
#[tokio::test]
async fn from_struct() {
let instance = test_storage_map_instance().await;
let (key1, key2, key3) = (
Struct {
x: 42,
y: Bits256([66; 32]),
z: Bits256([99; 32]),
},
Struct {
x: 24,
y: Bits256([11; 32]),
z: Bits256([90; 32]),
},
Struct {
x: 77,
y: Bits256([55; 32]),
z: Bits256([12; 32]),
},
);
let (val1, val2, val3) = (5, 9, 1);
// Nothing to read just yet
assert_eq!(
instance
.methods()
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | true |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/test/src/sdk-harness/test_projects/tx_fields/mod.rs | test/src/sdk-harness/test_projects/tx_fields/mod.rs | use fuel_vm::fuel_crypto::Hasher;
use fuel_vm::fuel_tx::{ContractId, Input as TxInput};
use fuels::core::codec::EncoderConfig;
use fuels::types::transaction_builders::TransactionBuilder;
use fuels::{
accounts::{predicate::Predicate, signers::private_key::PrivateKeySigner},
prelude::*,
tx::StorageSlot,
types::{input::Input as SdkInput, output::Output as SdkOutput, Bits256, ChainId},
};
use std::fs;
const MESSAGE_DATA: [u8; 3] = [1u8, 2u8, 3u8];
const TX_CONTRACT_BYTECODE_PATH: &str = "test_artifacts/tx_contract/out/release/tx_contract.bin";
const TX_OUTPUT_PREDICATE_BYTECODE_PATH: &str =
"test_artifacts/tx_output_predicate/out/release/tx_output_predicate.bin";
const TX_OUTPUT_CONTRACT_BYTECODE_PATH: &str =
"test_artifacts/tx_output_contract/out/release/tx_output_contract.bin";
const TX_FIELDS_PREDICATE_BYTECODE_PATH: &str = "test_projects/tx_fields/out/release/tx_fields.bin";
const TX_CONTRACT_CREATION_PREDICATE_BYTECODE_PATH: &str =
"test_artifacts/tx_output_contract_creation_predicate/out/release/tx_output_contract_creation_predicate.bin";
const TX_TYPE_PREDICATE_BYTECODE_PATH: &str =
"test_artifacts/tx_type_predicate/out/release/tx_type_predicate.bin";
const TX_WITNESS_PREDICATE_BYTECODE_PATH: &str =
"test_artifacts/tx_witness_predicate/out/release/tx_witness_predicate.bin";
const TX_INPUT_COUNT_PREDICATE_BYTECODE_PATH: &str =
"test_artifacts/tx_input_count_predicate/out/release/tx_input_count_predicate.bin";
const TX_OUTPUT_COUNT_PREDICATE_BYTECODE_PATH: &str =
"test_artifacts/tx_output_count_predicate/out/release/tx_output_count_predicate.bin";
use crate::tx_fields::Output as SwayOutput;
use crate::tx_fields::Transaction as SwayTransaction;
abigen!(
Contract(
name = "TxContractTest",
abi = "test_artifacts/tx_contract/out/release/tx_contract-abi.json",
),
Contract(
name = "TxOutputContract",
abi = "test_artifacts/tx_output_contract/out/release/tx_output_contract-abi.json",
),
Predicate(
name = "TestPredicate",
abi = "test_projects/tx_fields/out/release/tx_fields-abi.json"
),
Predicate(
name = "TestOutputPredicate",
abi = "test_artifacts/tx_output_predicate/out/release/tx_output_predicate-abi.json"
),
Predicate(
name = "TestTxTypePredicate",
abi = "test_artifacts/tx_type_predicate/out/release/tx_type_predicate-abi.json"
),
Predicate(
name = "TestTxWitnessPredicate",
abi = "test_artifacts/tx_witness_predicate/out/release/tx_witness_predicate-abi.json"
),
Predicate(
name = "TestTxInputCountPredicate",
abi = "test_artifacts/tx_input_count_predicate/out/release/tx_input_count_predicate-abi.json"
),
Predicate(
name = "TestTxOutputCountPredicate",
abi = "test_artifacts/tx_output_count_predicate/out/release/tx_output_count_predicate-abi.json"
)
);
async fn get_contracts(msg_has_data: bool) -> (TxContractTest<Wallet>, ContractId, Wallet, Wallet) {
let wallet_signer = PrivateKeySigner::random(&mut rand::thread_rng());
let deployment_signer = PrivateKeySigner::random(&mut rand::thread_rng());
let mut deployment_coins = setup_single_asset_coins(
deployment_signer.address(),
AssetId::BASE,
120,
DEFAULT_COIN_AMOUNT,
);
let mut coins = setup_single_asset_coins(wallet_signer.address(), AssetId::BASE, 100, 1000);
coins.append(&mut deployment_coins);
let msg = setup_single_message(
Address::default(),
wallet_signer.address(),
DEFAULT_COIN_AMOUNT,
69.into(),
if msg_has_data {
MESSAGE_DATA.to_vec()
} else {
vec![]
},
);
let provider = setup_test_provider(coins.clone(), vec![msg], None, None)
.await
.unwrap();
let wallet = Wallet::new(wallet_signer, provider.clone());
let deployment_wallet = Wallet::new(deployment_signer, provider);
let contract_id = Contract::load_from(TX_CONTRACT_BYTECODE_PATH, LoadConfiguration::default())
.unwrap()
.deploy(&deployment_wallet, TxPolicies::default())
.await
.unwrap()
.contract_id;
let instance = TxContractTest::new(contract_id.clone(), wallet.clone());
(instance, contract_id.into(), wallet, deployment_wallet)
}
async fn generate_predicate_inputs(amount: u64, wallet: &Wallet) -> (Vec<u8>, SdkInput, TxInput) {
let provider = wallet.provider();
let predicate = Predicate::load_from(TX_FIELDS_PREDICATE_BYTECODE_PATH)
.unwrap()
.with_provider(provider.clone());
let predicate_code = predicate.code().to_vec();
let predicate_root = predicate.address();
let balance: u128 = wallet.get_asset_balance(&AssetId::default()).await.unwrap();
assert!(balance >= amount as u128);
wallet
.transfer(
predicate_root,
amount,
AssetId::default(),
TxPolicies::default(),
)
.await
.unwrap();
let predicate_input = predicate
.get_asset_inputs_for_amount(AssetId::default(), amount.into(), None)
.await
.unwrap()
.first()
.unwrap()
.to_owned();
let message = &wallet.get_messages().await.unwrap()[0];
let predicate_address: Address = predicate.address().into();
let predicate_message = TxInput::message_coin_predicate(
message.sender.clone().into(),
predicate_address,
message.amount,
message.nonce,
0,
predicate_code.clone(),
vec![],
);
(predicate_code, predicate_input, predicate_message)
}
async fn setup_output_predicate(
index: u64,
expected_output_type: SwayOutput,
) -> (Wallet, Wallet, Predicate, AssetId, AssetId) {
let asset_id1 = AssetId::default();
let asset_id2 = AssetId::new([2u8; 32]);
let wallets_config = WalletsConfig::new_multiple_assets(
2,
vec![
AssetConfig {
id: asset_id1,
num_coins: 1,
coin_amount: 1_000,
},
AssetConfig {
id: asset_id2,
num_coins: 1,
coin_amount: 1_000,
},
],
);
let mut wallets = launch_custom_provider_and_get_wallets(wallets_config, None, None)
.await
.unwrap();
let wallet1 = wallets.pop().unwrap();
let wallet2 = wallets.pop().unwrap();
let predicate_data = TestOutputPredicateEncoder::default()
.encode_data(
index,
Bits256([0u8; 32]),
Bits256(*wallet1.address()),
expected_output_type,
)
.unwrap();
let predicate = Predicate::load_from(TX_OUTPUT_PREDICATE_BYTECODE_PATH)
.unwrap()
.with_data(predicate_data)
.with_provider(wallet1.try_provider().unwrap().clone());
wallet1
.transfer(predicate.address(), 100, asset_id1, TxPolicies::default())
.await
.unwrap();
wallet1
.transfer(predicate.address(), 100, asset_id2, TxPolicies::default())
.await
.unwrap();
(wallet1, wallet2, predicate, asset_id1, asset_id2)
}
mod tx {
use super::*;
use fuels::types::{coin_type::CoinType, transaction::Transaction};
#[tokio::test]
async fn can_get_tx_type() {
let (contract_instance, _, _, _) = get_contracts(true).await;
let result = contract_instance
.methods()
.get_tx_type()
.call()
.await
.unwrap();
// Script transactions are of type = 0
assert_eq!(result.value, super::Transaction::Script);
}
#[tokio::test]
async fn can_get_tip() {
let (contract_instance, _, _, _) = get_contracts(true).await;
let tip = 3;
let result = contract_instance
.methods()
.get_tx_tip()
.with_tx_policies(TxPolicies::default().with_tip(tip))
.call()
.await
.unwrap();
assert_eq!(result.value, Some(tip));
let no_tip = contract_instance
.methods()
.get_tx_tip()
.call()
.await
.unwrap();
assert_eq!(no_tip.value, None);
}
#[tokio::test]
async fn can_get_script_gas_limit() {
let (contract_instance, _, _, _) = get_contracts(true).await;
let script_gas_limit = 1792384;
let result = contract_instance
.methods()
.get_script_gas_limit()
.with_tx_policies(TxPolicies::default().with_script_gas_limit(script_gas_limit))
.call()
.await
.unwrap();
assert_eq!(result.value, script_gas_limit);
}
#[tokio::test]
async fn can_get_maturity() {
let (contract_instance, _, _, _) = get_contracts(true).await;
let maturity = 0;
let result = contract_instance
.methods()
.get_tx_maturity()
.with_tx_policies(TxPolicies::default().with_maturity(maturity))
.call()
.await
.unwrap();
assert_eq!(result.value, Some(maturity as u32));
// Assert none is returned with no maturity
let no_maturity = contract_instance
.methods()
.get_tx_maturity()
.call()
.await
.unwrap();
assert_eq!(no_maturity.value, None);
}
#[tokio::test]
async fn can_get_expiration() {
let (contract_instance, _, wallet, _) = get_contracts(true).await;
let provider = wallet.try_provider().unwrap();
// This should be an error because we are not at the genesis block
let err = contract_instance
.methods()
.get_tx_expiration()
.with_tx_policies(TxPolicies::default().with_expiration(0))
.call()
.await
.expect_err("expiration reached");
assert!(err.to_string().contains("TransactionExpiration"));
let result = contract_instance
.methods()
.get_tx_expiration()
.with_tx_policies(TxPolicies::default().with_expiration(10))
.call()
.await
.unwrap();
assert_eq!(result.value, Some(10 as u32));
let result = contract_instance
.methods()
.get_tx_expiration()
.with_tx_policies(TxPolicies::default().with_expiration(1234567890))
.call()
.await
.unwrap();
assert_eq!(result.value, Some(1234567890 as u32));
let result = contract_instance
.methods()
.get_tx_expiration()
.with_tx_policies(TxPolicies::default().with_expiration(u32::MAX.into()))
.call()
.await
.unwrap();
assert_eq!(result.value, Some(u32::MAX));
// Assert none is returned with no expiration
let no_expiration = contract_instance
.methods()
.get_tx_expiration()
.call()
.await
.unwrap();
assert_eq!(no_expiration.value, None);
// Assert tx errors after expiration
let _ = provider.produce_blocks(15, None).await;
let err = contract_instance
.methods()
.get_tx_expiration()
.with_tx_policies(TxPolicies::default().with_expiration(10))
.call()
.await
.expect_err("expiration reached");
assert!(err.to_string().contains("TransactionExpiration"));
}
#[tokio::test]
async fn can_get_script_length() {
let (contract_instance, _, _, _) = get_contracts(true).await;
// TODO use programmatic script length https://github.com/FuelLabs/fuels-rs/issues/181
let script_length = 24;
let result = contract_instance
.methods()
.get_tx_script_length()
.call()
.await
.unwrap();
assert_eq!(result.value, Some(script_length));
}
#[tokio::test]
async fn can_get_script_data_length() {
let (contract_instance, _, _, _) = get_contracts(true).await;
// TODO make this programmatic.
let script_data_length = 121;
let result = contract_instance
.methods()
.get_tx_script_data_length()
.call()
.await
.unwrap();
assert_eq!(result.value, Some(script_data_length));
}
#[tokio::test]
async fn can_get_inputs_count() {
let (contract_instance, _, wallet, _) = get_contracts(false).await;
let message = &wallet.get_messages().await.unwrap()[0];
let response = contract_instance
.methods()
.get_tx_inputs_count()
.with_inputs(vec![SdkInput::ResourceSigned {
resource: CoinType::Message(message.clone()),
}])
.call()
.await
.unwrap();
assert_eq!(response.value, 2u64);
}
#[tokio::test]
async fn can_get_outputs_count() {
let (contract_instance, _, _, _) = get_contracts(true).await;
let call_handler = contract_instance.methods().get_tx_outputs_count();
let tx = call_handler.build_tx().await.unwrap();
let outputs = tx.outputs();
let result = contract_instance
.methods()
.get_tx_outputs_count()
.call()
.await
.unwrap();
assert_eq!(result.value, outputs.len() as u16);
}
#[tokio::test]
async fn can_get_witnesses_count() {
let (contract_instance, _, _, _) = get_contracts(true).await;
let witnesses_count = 1;
let result = contract_instance
.methods()
.get_tx_witnesses_count()
.call()
.await
.unwrap();
assert_eq!(result.value, witnesses_count);
}
#[tokio::test]
async fn can_get_witness_data_length() {
let (contract_instance, _, _, _) = get_contracts(true).await;
let result = contract_instance
.methods()
.get_tx_witness_data_length(0)
.call()
.await
.unwrap();
assert_eq!(result.value, Some(64));
}
#[tokio::test]
async fn can_get_witness_data() {
let (contract_instance, _, wallet, _) = get_contracts(true).await;
let handler = contract_instance.methods().get_tx_witness_data(0);
let tx = handler.build_tx().await.unwrap();
let witnesses = tx.witnesses().clone();
let provider = wallet.provider();
let tx_status = provider.send_transaction_and_await_commit(tx).await.unwrap();
let receipts = tx_status
.take_receipts_checked(None)
.unwrap();
assert_eq!(receipts[1].data().unwrap()[8..72], *witnesses[0].as_vec());
}
#[tokio::test]
async fn can_get_script_bytecode_hash() {
let (contract_instance, _, _, _) = get_contracts(true).await;
let tx = contract_instance
.methods()
.get_tx_script_bytecode_hash()
.build_tx()
.await
.unwrap();
let script = tx.script();
let hash = if script.len() > 1 {
Hasher::hash(script)
} else {
Hasher::hash(vec![])
};
let result = contract_instance
.methods()
.get_tx_script_bytecode_hash()
.call()
.await
.unwrap();
assert_eq!(result.value.unwrap(), Bits256(*hash));
}
#[tokio::test]
async fn can_get_tx_id() {
let (contract_instance, _, wallet, _) = get_contracts(true).await;
let handler = contract_instance.methods().get_tx_id();
let tx = handler.build_tx().await.unwrap();
let provider = wallet.provider();
let tx_status = provider.send_transaction_and_await_commit(tx.clone()).await.unwrap();
let receipts = tx_status
.take_receipts_checked(None)
.unwrap();
let byte_array: [u8; 32] = *tx.id(ChainId::new(0));
assert_eq!(receipts[1].data().unwrap(), byte_array);
}
#[tokio::test]
async fn can_get_tx_upload() {
// Prepare wallet and provider
let signer = PrivateKeySigner::random(&mut rand::thread_rng());
let num_coins = 100;
let coins = setup_single_asset_coins(
signer.address(),
AssetId::zeroed(),
num_coins,
DEFAULT_COIN_AMOUNT,
);
let provider = setup_test_provider(coins, vec![], None, None)
.await
.unwrap();
let wallet = Wallet::new(signer, provider.clone());
let consensus_params = provider.consensus_parameters().await.unwrap();
let base_asset_id = consensus_params.base_asset_id();
// Get the predicate
let predicate_data = TestTxTypePredicateEncoder::default()
.encode_data(SwayTransaction::Upload)
.unwrap();
let predicate: Predicate = Predicate::load_from(TX_TYPE_PREDICATE_BYTECODE_PATH)
.unwrap()
.with_provider(provider.clone())
.with_data(predicate_data);
let predicate_coin_amount = 100;
// Predicate has no funds
let predicate_balance = predicate.get_asset_balance(base_asset_id).await.unwrap();
assert_eq!(predicate_balance, 0);
// Prepare bytecode and subsections
let bytecode = fs::read(TX_CONTRACT_BYTECODE_PATH).unwrap();
let subsection_size = 65536;
let subsections = UploadSubsection::split_bytecode(&bytecode, subsection_size).unwrap();
// Transfer enough funds to the predicate for each subsection
for _ in subsections.clone() {
wallet
.transfer(
predicate.address(),
predicate_coin_amount,
*base_asset_id,
TxPolicies::default(),
)
.await
.unwrap();
}
// Predicate has funds
let predicate_balance = predicate.get_asset_balance(base_asset_id).await.unwrap();
assert_eq!(
predicate_balance as usize,
predicate_coin_amount as usize * subsections.len()
);
// Upload each subsection in a separate transaction and include the predicate with the transaction.
for subsection in subsections {
let mut builder = UploadTransactionBuilder::prepare_subsection_upload(
subsection,
TxPolicies::default(),
);
// Inputs for predicate
let predicate_input = predicate
.get_asset_inputs_for_amount(*base_asset_id, 1, None)
.await
.unwrap();
// Outputs for predicate
let predicate_output =
wallet.get_asset_outputs_for_amount(wallet.address(), *base_asset_id, 1);
// Append the predicate to the transaction
builder.inputs.push(predicate_input.get(0).unwrap().clone());
builder
.outputs
.push(predicate_output.get(0).unwrap().clone());
wallet.add_witnesses(&mut builder).unwrap();
wallet.adjust_for_fee(&mut builder, 0).await.unwrap();
// Submit the transaction
let tx = builder.build(&provider).await.unwrap();
provider.send_transaction_and_await_commit(tx).await.unwrap();
}
// The predicate has spent it's funds
let predicate_balance = predicate.get_asset_balance(base_asset_id).await.unwrap();
assert_eq!(predicate_balance, 0);
}
#[tokio::test]
async fn can_get_witness_in_tx_upload() {
// Prepare wallet and provider
let signer = PrivateKeySigner::random(&mut rand::thread_rng());
let num_coins = 100;
let coins = setup_single_asset_coins(
signer.address(),
AssetId::zeroed(),
num_coins,
DEFAULT_COIN_AMOUNT,
);
let provider = setup_test_provider(coins, vec![], None, None)
.await
.unwrap();
let wallet = Wallet::new(signer, provider.clone());
let consensus_params = provider.consensus_parameters().await.unwrap();
let base_asset_id = consensus_params.base_asset_id();
// Prepare bytecode and subsections
let bytecode = fs::read(TX_CONTRACT_BYTECODE_PATH).unwrap();
let subsection_size = 65536;
let subsections = UploadSubsection::split_bytecode(&bytecode, subsection_size).unwrap();
// Upload each subsection in a separate transaction and include the predicate with the transaction.
for subsection in subsections.clone() {
let mut builder = UploadTransactionBuilder::prepare_subsection_upload(
subsection,
TxPolicies::default(),
);
// Prepare the predicate
let witnesses = builder.witnesses().clone();
let predicate_data = TestTxWitnessPredicateEncoder::new(EncoderConfig {
max_depth: 10,
max_tokens: 100_000,
})
.encode_data(
0,
witnesses.len() as u64 + 1,
witnesses[0].as_vec().len() as u64,
witnesses[0].as_vec().as_slice()[0..64].try_into().unwrap(),
)
.unwrap();
let predicate: Predicate = Predicate::load_from(TX_WITNESS_PREDICATE_BYTECODE_PATH)
.unwrap()
.with_provider(provider.clone())
.with_data(predicate_data);
let predicate_coin_amount = 100;
// Predicate has no funds
let predicate_balance = predicate.get_asset_balance(base_asset_id).await.unwrap();
assert_eq!(predicate_balance, 0);
wallet
.transfer(
predicate.address(),
predicate_coin_amount,
*base_asset_id,
TxPolicies::default(),
)
.await
.unwrap();
// Predicate has funds
let predicate_balance = predicate.get_asset_balance(base_asset_id).await.unwrap();
assert_eq!(
predicate_balance as usize,
predicate_coin_amount as usize * subsections.len()
);
// Inputs for predicate
let predicate_input = predicate
.get_asset_inputs_for_amount(*base_asset_id, 1, None)
.await
.unwrap();
// Outputs for predicate
let predicate_output =
wallet.get_asset_outputs_for_amount(wallet.address(), *base_asset_id, 1);
// Append the predicate to the transaction
builder.inputs.push(predicate_input.get(0).unwrap().clone());
builder
.outputs
.push(predicate_output.get(0).unwrap().clone());
wallet.add_witnesses(&mut builder).unwrap();
wallet.adjust_for_fee(&mut builder, 0).await.unwrap();
// Submit the transaction
let tx = builder.build(&provider).await.unwrap();
provider.send_transaction_and_await_commit(tx).await.unwrap();
// The predicate has spent it's funds
let predicate_balance = predicate.get_asset_balance(base_asset_id).await.unwrap();
assert_eq!(predicate_balance, 0);
}
}
#[tokio::test]
async fn can_get_tx_blob() {
// Prepare wallet and provider
let signer = PrivateKeySigner::random(&mut rand::thread_rng());
let num_coins = 100;
let coins = setup_single_asset_coins(
signer.address(),
AssetId::zeroed(),
num_coins,
DEFAULT_COIN_AMOUNT,
);
let provider = setup_test_provider(coins, vec![], None, None)
.await
.unwrap();
let wallet = Wallet::new(signer, provider.clone());
let consensus_params = provider.consensus_parameters().await.unwrap();
let base_asset_id = consensus_params.base_asset_id();
// Get the predicate
let predicate_data = TestTxTypePredicateEncoder::default()
.encode_data(SwayTransaction::Blob)
.unwrap();
let predicate: Predicate = Predicate::load_from(TX_TYPE_PREDICATE_BYTECODE_PATH)
.unwrap()
.with_provider(provider.clone())
.with_data(predicate_data);
let predicate_coin_amount = 100;
// Predicate has no funds
let predicate_balance = predicate.get_asset_balance(base_asset_id).await.unwrap();
assert_eq!(predicate_balance, 0);
// Transfer enough funds to the predicate
wallet
.transfer(
predicate.address(),
predicate_coin_amount,
*base_asset_id,
TxPolicies::default(),
)
.await
.unwrap();
// Predicate has funds
let predicate_balance = predicate.get_asset_balance(base_asset_id).await.unwrap();
assert_eq!(predicate_balance as usize, predicate_coin_amount as usize);
// Prepare blobs
let max_words_per_blob = 10_000;
let blobs = Contract::load_from(TX_CONTRACT_BYTECODE_PATH, LoadConfiguration::default())
.unwrap()
.convert_to_loader(max_words_per_blob)
.unwrap()
.blobs()
.to_vec();
let blob = blobs[0].clone();
// Inputs for predicate
let predicate_input = predicate
.get_asset_inputs_for_amount(*base_asset_id, 1, None)
.await
.unwrap();
// Outputs for predicate
let predicate_output =
wallet.get_asset_outputs_for_amount(wallet.address(), *base_asset_id, 1);
let mut builder = BlobTransactionBuilder::default().with_blob(blob);
// Append the predicate to the transaction
builder.inputs.push(predicate_input.get(0).unwrap().clone());
builder
.outputs
.push(predicate_output.get(0).unwrap().clone());
wallet.adjust_for_fee(&mut builder, 0).await.unwrap();
wallet.add_witnesses(&mut builder).unwrap();
let tx = builder.build(&provider).await.unwrap();
provider
.send_transaction_and_await_commit(tx)
.await
.unwrap()
.check(None)
.unwrap();
// The predicate has spent it's funds
let predicate_balance = predicate.get_asset_balance(base_asset_id).await.unwrap();
assert_eq!(predicate_balance, 0);
}
#[tokio::test]
async fn can_get_witness_in_tx_blob() {
// Prepare wallet and provider
let signer = PrivateKeySigner::random(&mut rand::thread_rng());
let num_coins = 100;
let coins = setup_single_asset_coins(
signer.address(),
AssetId::zeroed(),
num_coins,
DEFAULT_COIN_AMOUNT,
);
let provider = setup_test_provider(coins, vec![], None, None)
.await
.unwrap();
let wallet = Wallet::new(signer, provider.clone());
let consensus_params = provider.consensus_parameters().await.unwrap();
let base_asset_id = consensus_params.base_asset_id();
// Prepare blobs
let max_words_per_blob = 10_000;
let blobs = Contract::load_from(TX_CONTRACT_BYTECODE_PATH, LoadConfiguration::default())
.unwrap()
.convert_to_loader(max_words_per_blob)
.unwrap()
.blobs()
.to_vec();
let blob = blobs[0].clone();
let mut builder = BlobTransactionBuilder::default().with_blob(blob.clone());
// Prepare the predicate
let predicate_data = TestTxWitnessPredicateEncoder::new(EncoderConfig {
max_depth: 10,
max_tokens: 100_000,
})
.encode_data(
// Blob and witnesses are just wrappers for Vec<u8>, and function the same in case of Transaction::Blob, so using blobs here instead of witnesses
0,
blobs.len() as u64 + 1,
blob.len() as u64,
blob.bytes()[0..64].try_into().unwrap(),
)
.unwrap();
let predicate: Predicate = Predicate::load_from(TX_WITNESS_PREDICATE_BYTECODE_PATH)
.unwrap()
.with_provider(provider.clone())
.with_data(predicate_data);
let predicate_coin_amount = 100;
// Predicate has no funds
let predicate_balance = predicate.get_asset_balance(base_asset_id).await.unwrap();
assert_eq!(predicate_balance, 0);
wallet
.transfer(
predicate.address(),
predicate_coin_amount,
*base_asset_id,
TxPolicies::default(),
)
.await
.unwrap();
// Predicate has funds
let predicate_balance = predicate.get_asset_balance(base_asset_id).await.unwrap();
assert_eq!(predicate_balance as usize, predicate_coin_amount as usize);
// Inputs for predicate
let predicate_input = predicate
.get_asset_inputs_for_amount(*base_asset_id, 1, None)
.await
.unwrap();
// Outputs for predicate
let predicate_output =
wallet.get_asset_outputs_for_amount(wallet.address(), *base_asset_id, 1);
// Append the predicate to the transaction
builder.inputs.push(predicate_input.get(0).unwrap().clone());
builder
.outputs
.push(predicate_output.get(0).unwrap().clone());
wallet.add_witnesses(&mut builder).unwrap();
wallet.adjust_for_fee(&mut builder, 0).await.unwrap();
let tx = builder.build(provider.clone()).await.unwrap();
provider.send_transaction_and_await_commit(tx).await.unwrap();
// The predicate has spent it's funds
let predicate_balance = predicate.get_asset_balance(base_asset_id).await.unwrap();
assert_eq!(predicate_balance, 0);
}
}
mod inputs {
use super::*;
mod success {
use super::*;
#[tokio::test]
async fn can_get_input_type() {
let (contract_instance, _, _, _) = get_contracts(true).await;
let result = contract_instance
.methods()
.get_input_type(0)
.call()
.await
.unwrap();
assert_eq!(result.value, Some(Input::Contract));
let result = contract_instance
.methods()
.get_input_type(1)
.call()
.await
.unwrap();
assert_eq!(result.value, Some(Input::Coin));
// Assert invalid index returns None
let result = contract_instance
.methods()
.get_input_type(100) // 100 is a very high input index
.call()
.await
.unwrap();
assert_eq!(result.value, None);
}
#[tokio::test]
async fn can_get_tx_input_amount() {
let default_amount = 1000;
let (contract_instance, _, _, _) = get_contracts(true).await;
let result = contract_instance
.methods()
.get_input_amount(1)
.call()
.await
.unwrap();
assert_eq!(result.value, Some(default_amount));
// Assert invalid index returns None
let result = contract_instance
.methods()
.get_input_amount(0)
.call()
.await
.unwrap();
assert_eq!(result.value, None);
}
#[tokio::test]
async fn can_get_tx_input_coin_owner() {
let (contract_instance, _, wallet, _) = get_contracts(true).await;
let owner_result = contract_instance
.methods()
.get_input_coin_owner(1)
.call()
.await
.unwrap();
assert_eq!(owner_result.value, Some(wallet.address().into()));
// Assert invalid index returns None
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | true |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/test/src/sdk-harness/test_projects/type_aliases/mod.rs | test/src/sdk-harness/test_projects/type_aliases/mod.rs | use fuels::{
prelude::*,
types::{Bits256, Identity, SizedAsciiString},
};
abigen!(Contract(
name = "TypeAliasesTestContract",
abi = "test_projects/type_aliases/out/release/type_aliases-abi.json"
));
async fn get_type_aliases_instance() -> (TypeAliasesTestContract<Wallet>, ContractId) {
let wallet = launch_provider_and_get_wallet().await.unwrap();
let id = Contract::load_from(
"test_projects/type_aliases/out/release/type_aliases.bin",
LoadConfiguration::default(),
)
.unwrap()
.deploy(&wallet, TxPolicies::default())
.await
.unwrap()
.contract_id;
let instance = TypeAliasesTestContract::new(id.clone(), wallet);
(instance, id.into())
}
#[tokio::test]
async fn test_foo() -> Result<()> {
let (instance, _id) = get_type_aliases_instance().await;
let contract_methods = instance.methods();
let x = Bits256([1u8; 32]);
let y = [
Identity::ContractId(ContractId::new([1u8; 32])),
Identity::ContractId(ContractId::new([1u8; 32])),
];
let z = IdentityAliasWrapper { i: y[0].clone() };
let w = Generic { f: z.clone() };
let u = (x, x);
let s = SizedAsciiString::try_from("fuelfuel0").unwrap();
let (x_result, y_result, z_result, w_result, u_result, s_result) = contract_methods
.foo(x, y.clone(), z.clone(), w.clone(), u, s.clone())
.call()
.await
.unwrap()
.value;
assert_eq!(x, x_result);
assert_eq!(y, y_result);
assert_eq!(z, z_result);
assert_eq!(w, w_result);
assert_eq!(u, u_result);
assert_eq!(s, s_result);
Ok(())
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/test/src/sdk-harness/test_projects/string_slice/mod.rs | test/src/sdk-harness/test_projects/string_slice/mod.rs | use fuel_vm::fuel_asm::{op, RegId};
use fuel_vm::fuel_tx;
use fuel_vm::fuel_tx::{Address, AssetId, Output};
use fuels::types::StaticStringToken;
use fuels::{
core::codec::{ABIEncoder, EncoderConfig},
prelude::*,
types::{input::Input, transaction_builders::ScriptTransactionBuilder, Token},
};
use std::str::FromStr;
abigen!(Contract(
name = "TestStringSlicePredicate",
abi = "test_projects/string_slice/string_slice_predicate/out/release/string_slice_predicate-abi.json",
));
async fn setup() -> (Vec<u8>, Address, Wallet, u64, AssetId) {
let predicate_code = std::fs::read(
"test_projects/string_slice/string_slice_predicate/out/release/string_slice_predicate.bin",
)
.unwrap();
let predicate_address = fuel_tx::Input::predicate_owner(&predicate_code);
let mut node_config = NodeConfig::default();
node_config.starting_gas_price = 0;
let mut wallets = launch_custom_provider_and_get_wallets(
WalletsConfig::new(Some(1), None, None),
Some(node_config),
None,
)
.await
.unwrap();
let wallet = wallets.pop().unwrap();
(
predicate_code,
predicate_address,
wallet,
1000,
AssetId::default(),
)
}
async fn create_predicate(
predicate_address: Address,
wallet: &Wallet,
amount_to_predicate: u64,
asset_id: AssetId,
) {
let wallet_coins = wallet
.get_asset_inputs_for_amount(
asset_id,
wallet.get_asset_balance(&asset_id).await.unwrap().into(),
None,
)
.await
.unwrap();
let provider = wallet.provider();
let output_coin = Output::coin(predicate_address, amount_to_predicate, asset_id);
let output_change = Output::change(wallet.address().into(), 0, asset_id);
let mut tx = ScriptTransactionBuilder::prepare_transfer(
wallet_coins,
vec![output_coin, output_change],
Default::default(),
)
.with_script(op::ret(RegId::ONE).to_bytes().to_vec());
tx.add_signer(wallet.signer().clone()).unwrap();
let tx = tx.build(provider).await.unwrap();
provider.send_transaction_and_await_commit(tx).await.unwrap();
}
async fn get_balance(wallet: &Wallet, address: Address, asset_id: AssetId) -> u128 {
wallet
.provider()
.get_asset_balance(&address.into(), &asset_id)
.await
.unwrap()
}
async fn submit_to_predicate(
predicate_code: Vec<u8>,
predicate_address: Address,
wallet: &Wallet,
amount_to_predicate: u64,
asset_id: AssetId,
receiver_address: Address,
predicate_data: Vec<u8>,
) {
let filter = ResourceFilter {
from: predicate_address.into(),
asset_id: Some(asset_id),
amount: amount_to_predicate.into(),
..Default::default()
};
let provider = wallet.provider();
let utxo_predicate_hash = provider.get_spendable_resources(filter).await.unwrap();
let mut inputs = vec![];
let mut total_amount_in_predicate = 0;
for coin in utxo_predicate_hash {
inputs.push(Input::resource_predicate(
coin.clone(),
predicate_code.to_vec(),
predicate_data.clone(),
));
total_amount_in_predicate += coin.amount();
}
let output_coin = Output::coin(receiver_address, total_amount_in_predicate, asset_id);
let output_change = Output::change(predicate_address, 0, asset_id);
let new_tx = ScriptTransactionBuilder::prepare_transfer(
inputs,
vec![output_coin, output_change],
Default::default(),
)
.build(provider)
.await
.unwrap();
let _call_result = provider.send_transaction_and_await_commit(new_tx).await;
}
#[tokio::test]
async fn test_string_slice_predicate() {
let predicate_data = ABIEncoder::new(EncoderConfig::default())
.encode(&[Token::StringSlice(StaticStringToken::new(
"a".into(),
Some(1),
))])
.unwrap();
let receiver_address =
Address::from_str("0xde97d8624a438121b86a1956544bd72ed68cd69f2c99555b08b1e8c51ffd511c")
.unwrap();
let (predicate_code, predicate_address, wallet, amount_to_predicate, asset_id) = setup().await;
create_predicate(predicate_address, &wallet, amount_to_predicate, asset_id).await;
let receiver_balance_before = get_balance(&wallet, receiver_address, asset_id).await;
assert_eq!(receiver_balance_before, 0);
submit_to_predicate(
predicate_code,
predicate_address,
&wallet,
amount_to_predicate,
asset_id,
receiver_address,
predicate_data,
)
.await;
let receiver_balance_after = get_balance(&wallet, receiver_address, asset_id).await;
assert_eq!(
receiver_balance_before + amount_to_predicate as u128,
receiver_balance_after
);
let predicate_balance = get_balance(&wallet, predicate_address, asset_id).await;
assert_eq!(predicate_balance, 0);
}
#[tokio::test]
async fn script_string_slice() -> Result<()> {
setup_program_test!(
Wallets("wallet"),
Abigen(Script(
name = "MyScript",
project = "test_projects/string_slice/script_string_slice",
)),
LoadScript(
name = "script_instance",
script = "MyScript",
wallet = "wallet"
)
);
let response = script_instance
.main("script-input".try_into()?)
.call()
.await?;
assert_eq!(response.value, "script-return");
Ok(())
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/test/src/sdk-harness/test_projects/contract_bytecode/mod.rs | test/src/sdk-harness/test_projects/contract_bytecode/mod.rs | use fuel_vm::fuel_tx::Contract as FuelsTxContract;
use fuels::{prelude::*, types::Bits256};
abigen!(Contract(
name = "ContractBytecodeTest",
abi = "test_projects/contract_bytecode/out/release/contract_bytecode-abi.json"
));
#[tokio::test]
async fn can_get_bytecode_root() {
let wallet = launch_provider_and_get_wallet().await.unwrap();
let (contract_instance, id) = get_test_contract_instance(wallet).await;
let bytecode_root = contract_instance
.methods()
.get_contract_bytecode_root(ContractId::from(id.clone()))
.with_contracts(&[&contract_instance])
.call()
.await
.unwrap()
.value;
let contract_bytecode =
std::fs::read("test_projects/contract_bytecode/out/release/contract_bytecode.bin").unwrap();
let expected_bytecode_root = Bits256(*FuelsTxContract::root_from_code(contract_bytecode));
assert_eq!(expected_bytecode_root, bytecode_root);
}
async fn get_test_contract_instance(
wallet: Wallet,
) -> (ContractBytecodeTest<Wallet>, ContractId) {
let id = Contract::load_from(
"test_projects/contract_bytecode/out/release/contract_bytecode.bin",
LoadConfiguration::default(),
)
.unwrap()
.deploy(&wallet, TxPolicies::default())
.await
.unwrap()
.contract_id;
let instance = ContractBytecodeTest::new(id.clone(), wallet);
(instance, id)
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/test/src/sdk-harness/test_projects/logging/mod.rs | test/src/sdk-harness/test_projects/logging/mod.rs | use fuels::prelude::*;
#[tokio::test]
async fn run_valid() -> Result<()> {
abigen!(Script(
name = "Logging",
abi = "test_projects/logging/out/release/logging-abi.json",
));
let wallet = launch_provider_and_get_wallet().await.unwrap();
let bin_path = "test_projects/logging/out/release/logging.bin";
let instance = Logging::new(wallet.clone(), bin_path);
let response = instance.main().call().await?;
let correct_hex =
hex::decode("ef86afa9696cf0dc6385e2c407a6e159a1103cefb7e2ae0636fb33d3cb2a9e4a");
assert_eq!(
correct_hex.unwrap(),
response.tx_status.receipts[0].data().unwrap()
);
Ok(())
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/test/src/sdk-harness/test_projects/script_bytecode/mod.rs | test/src/sdk-harness/test_projects/script_bytecode/mod.rs | use fuel_core::service::{Config, FuelService};
use fuel_core_client::{
client::FuelClient,
fuel_tx::{Bytes32, Receipt, Transaction},
};
use fuel_crypto::Hasher;
use fuels::prelude::*;
use fuels_contract::script::Script;
pub async fn run_compiled_script(binary_filepath: &str) -> Result<Vec<Receipt>, Error> {
let script_binary = std::fs::read(binary_filepath)?;
let server = FuelService::new_node(Config::local_node()).await.unwrap();
let client = FuelClient::from(server.bound_address);
let tx = Transaction::Script {
gas_price: 0,
gas_limit: 1000000,
maturity: 0,
byte_price: 0,
receipts_root: Default::default(),
script: script_binary, // Pass the compiled script into the tx
script_data: vec![],
inputs: vec![],
outputs: vec![],
witnesses: vec![vec![].into()],
metadata: None,
};
let script = Script::new(tx);
script.call(&client).await
}
#[tokio::test]
async fn check_script_bytecode_hash() {
// Calculate padded script hash (since memory is read in whole words, the bytecode will be padded)
let mut script_bytecode =
std::fs::read("test_projects/script_bytecode/out/release/script_bytecode.bin")
.unwrap()
.to_vec();
let padding = script_bytecode.len() % 8;
script_bytecode.append(&mut vec![0; padding]);
let script_hash = Hasher::hash(&script_bytecode); // This is the hard that must be hard-coded in the predicate
// Run script and get the hash it returns
let path_to_bin = "test_projects/script_bytecode/out/release/script_bytecode.bin";
let return_val = run_compiled_script(path_to_bin).await.unwrap();
let script_hash_from_vm =
unsafe { Bytes32::from_slice_unchecked(return_val[0].data().unwrap()) };
assert_eq!(script_hash_from_vm, script_hash);
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/test/src/sdk-harness/test_projects/methods/mod.rs | test/src/sdk-harness/test_projects/methods/mod.rs | use fuels::prelude::*;
abigen!(Contract(
name = "MethodsContract",
abi = "test_artifacts/methods_contract/out/release/methods_contract-abi.json",
));
#[tokio::test]
async fn run_methods_test() {
let wallet = launch_provider_and_get_wallet().await.unwrap();
let instance = get_methods_instance(wallet).await;
let result = instance
.methods()
.test_function()
.with_tx_policies(TxPolicies::default().with_script_gas_limit(2757))
.call()
.await
.unwrap();
// Increase the limit above and uncomment the line below to see how many gas is being used
// run with --nocapture
// dbg!(&result);
assert!(result.value);
}
async fn get_methods_instance(wallet: Wallet) -> MethodsContract<Wallet> {
let id = Contract::load_from(
"test_artifacts/methods_contract/out/release/methods_contract.bin",
LoadConfiguration::default(),
)
.unwrap()
.deploy(&wallet, TxPolicies::default())
.await
.unwrap()
.contract_id;
MethodsContract::new(id.clone(), wallet)
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/test/src/sdk-harness/test_projects/option_in_abi/mod.rs | test/src/sdk-harness/test_projects/option_in_abi/mod.rs | use fuels::{prelude::*, types::Bits256};
use std::str::FromStr;
abigen!(Contract(
name = "OptionInAbiTestContract",
abi = "test_projects/option_in_abi/out/release/option_in_abi-abi.json"
));
async fn get_option_in_abi_instance() -> (OptionInAbiTestContract<Wallet>, ContractId) {
let wallet = launch_provider_and_get_wallet().await.unwrap();
let id = Contract::load_from(
"test_projects/option_in_abi/out/release/option_in_abi.bin",
LoadConfiguration::default(),
)
.unwrap()
.deploy(&wallet, TxPolicies::default())
.await
.unwrap()
.contract_id;
let instance = OptionInAbiTestContract::new(id.clone(), wallet);
(instance, id.into())
}
#[tokio::test]
async fn test_bool() -> Result<()> {
let (instance, _id) = get_option_in_abi_instance().await;
let contract_methods = instance.methods();
let input = Some(true);
let response = contract_methods.bool_test(input).call().await?;
assert_eq!(input, response.value);
let input = Some(false);
let response = contract_methods.bool_test(input).call().await?;
assert_eq!(input, response.value);
let input = None;
let response = contract_methods.bool_test(input).call().await?;
assert_eq!(input, response.value);
Ok(())
}
#[tokio::test]
async fn test_u8() -> Result<()> {
let (instance, _id) = get_option_in_abi_instance().await;
let contract_methods = instance.methods();
let input = Some(42);
let response = contract_methods.u8_test(input).call().await?;
assert_eq!(input, response.value);
let input = None;
let response = contract_methods.u8_test(input).call().await?;
assert_eq!(input, response.value);
Ok(())
}
#[tokio::test]
async fn test_u16() -> Result<()> {
let (instance, _id) = get_option_in_abi_instance().await;
let contract_methods = instance.methods();
let input = Some(42);
let response = contract_methods.u16_test(input).call().await?;
assert_eq!(input, response.value);
let input = None;
let response = contract_methods.u16_test(input).call().await?;
assert_eq!(input, response.value);
Ok(())
}
#[tokio::test]
async fn test_u32() -> Result<()> {
let (instance, _id) = get_option_in_abi_instance().await;
let contract_methods = instance.methods();
let input = Some(42);
let response = contract_methods.u32_test(input).call().await?;
assert_eq!(input, response.value);
let input = None;
let response = contract_methods.u32_test(input).call().await?;
assert_eq!(input, response.value);
Ok(())
}
#[tokio::test]
async fn test_u64() -> Result<()> {
let (instance, _id) = get_option_in_abi_instance().await;
let contract_methods = instance.methods();
let input = Some(42);
let response = contract_methods.u64_test(input).call().await?;
assert_eq!(input, response.value);
let input = None;
let response = contract_methods.u64_test(input).call().await?;
assert_eq!(input, response.value);
Ok(())
}
#[tokio::test]
async fn test_b256() -> Result<()> {
let (instance, _id) = get_option_in_abi_instance().await;
let contract_methods = instance.methods();
let input = Some(Bits256([1u8; 32]));
let response = contract_methods.b256_test(input).call().await?;
assert_eq!(input, response.value);
let input = None;
let response = contract_methods.b256_test(input).call().await?;
assert_eq!(input, response.value);
Ok(())
}
#[tokio::test]
async fn test_struct() -> Result<()> {
let (instance, _id) = get_option_in_abi_instance().await;
let contract_methods = instance.methods();
let input = Some(MyStruct {
first_field: Some(
Address::from_str("0x4242424242424242424242424242424242424242424242424242424242424242")
.unwrap(),
),
second_field: 42,
});
let response = contract_methods.struct_test(input.clone()).call().await?;
assert_eq!(input, response.value);
let input = Some(MyStruct {
first_field: None,
second_field: 42,
});
let response = contract_methods.struct_test(input.clone()).call().await?;
assert_eq!(input, response.value);
let input = None;
let response = contract_methods.struct_test(input.clone()).call().await?;
assert_eq!(input, response.value);
Ok(())
}
#[tokio::test]
async fn test_tuple() -> Result<()> {
let (instance, _id) = get_option_in_abi_instance().await;
let contract_methods = instance.methods();
let input = Some((
Some(
Address::from_str("0x4242424242424242424242424242424242424242424242424242424242424242")
.unwrap(),
),
42,
));
let response = contract_methods.tuple_test(input).call().await?;
assert_eq!(input, response.value);
let input = Some((None, 42));
let response = contract_methods.tuple_test(input).call().await?;
assert_eq!(input, response.value);
let input = None;
let response = contract_methods.tuple_test(input).call().await?;
assert_eq!(input, response.value);
Ok(())
}
#[tokio::test]
async fn test_enum() -> Result<()> {
let (instance, _id) = get_option_in_abi_instance().await;
let contract_methods = instance.methods();
let input = Some(MyEnum::FirstVariant(Some(
Address::from_str("0x4242424242424242424242424242424242424242424242424242424242424242")
.unwrap(),
)));
let response = contract_methods.enum_test(input.clone()).call().await?;
assert_eq!(input, response.value);
let input = Some(MyEnum::FirstVariant(None));
let response = contract_methods.enum_test(input.clone()).call().await?;
assert_eq!(input, response.value);
let input = Some(MyEnum::SecondVariant(42));
let response = contract_methods.enum_test(input.clone()).call().await?;
assert_eq!(input, response.value);
let input = None;
let response = contract_methods.enum_test(input.clone()).call().await?;
assert_eq!(input, response.value);
Ok(())
}
#[tokio::test]
async fn test_array() -> Result<()> {
let (instance, _id) = get_option_in_abi_instance().await;
let contract_methods = instance.methods();
let input = Some([
Some(
Address::from_str("0x4242424242424242424242424242424242424242424242424242424242424242")
.unwrap(),
),
Some(
Address::from_str("0x6969696969696969696969696969696969696969696969696969696969696969")
.unwrap(),
),
Some(
Address::from_str("0x9999999999999999999999999999999999999999999999999999999999999999")
.unwrap(),
),
]);
let response = contract_methods.array_test(input).call().await?;
assert_eq!(input, response.value);
let input = Some([
None,
Some(
Address::from_str("0x6969696969696969696969696969696969696969696969696969696969696969")
.unwrap(),
),
None,
]);
let response = contract_methods.array_test(input).call().await?;
assert_eq!(input, response.value);
let input = Some([None, None, None]);
let response = contract_methods.array_test(input).call().await?;
assert_eq!(input, response.value);
let input = None;
let response = contract_methods.array_test(input).call().await?;
assert_eq!(input, response.value);
Ok(())
}
#[tokio::test]
async fn test_string() -> Result<()> {
let (instance, _id) = get_option_in_abi_instance().await;
let contract_methods = instance.methods();
let input = Some("fuel".try_into().unwrap());
let response = contract_methods.string_test(input.clone()).call().await?;
assert_eq!(input, response.value);
let input = None;
let response = contract_methods.string_test(input.clone()).call().await?;
assert_eq!(input, response.value);
Ok(())
}
#[tokio::test]
async fn test_result_in_option() -> Result<()> {
let (instance, _id) = get_option_in_abi_instance().await;
let contract_methods = instance.methods();
let input = Some(Ok("fuel".try_into().unwrap()));
let response = contract_methods
.result_in_option_test(input.clone())
.call()
.await?;
assert_eq!(input, response.value);
let input = Some(Err(SomeError::SomeErrorString("error".try_into().unwrap())));
let response = contract_methods
.result_in_option_test(input.clone())
.call()
.await?;
assert_eq!(input, response.value);
let input = None;
let response = contract_methods
.result_in_option_test(input.clone())
.call()
.await?;
assert_eq!(input, response.value);
Ok(())
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/test/src/sdk-harness/test_projects/call_frames/mod.rs | test/src/sdk-harness/test_projects/call_frames/mod.rs | use fuels::{prelude::*, types::ContractId};
abigen!(Contract(
name = "CallFramesTestContract",
abi = "test_projects/call_frames/out/release/call_frames-abi.json"
));
async fn get_call_frames_instance() -> (CallFramesTestContract<Wallet>, ContractId) {
let wallet = launch_provider_and_get_wallet().await.unwrap();
let id = Contract::load_from(
"test_projects/call_frames/out/release/call_frames.bin",
LoadConfiguration::default(),
)
.unwrap()
.deploy(&wallet, TxPolicies::default())
.await
.unwrap()
.contract_id;
let instance = CallFramesTestContract::new(id.clone(), wallet);
(instance, id.into())
}
#[tokio::test]
async fn can_get_id_contract_id_this() {
let (instance, id) = get_call_frames_instance().await;
let result = instance
.methods()
.get_id_contract_id_this()
.call()
.await
.unwrap();
assert_eq!(result.value, id);
}
#[tokio::test]
async fn can_get_code_size() {
let (instance, _id) = get_call_frames_instance().await;
let result = instance.methods().get_code_size().call().await.unwrap();
// Check if codesize is between 1000 and 7000. Arbitrary endpoints, current codesize is 7208
// but the lower bound future proofs against compiler optimizations
dbg!(result.value);
assert!(result.value > 1000 && result.value < 7300);
}
#[tokio::test]
async fn can_get_first_param() {
let (instance, _id) = get_call_frames_instance().await;
let result = instance.methods().get_first_param().call().await.unwrap();
assert_eq!(result.value, 10480);
}
#[tokio::test]
async fn can_get_second_param_u64() {
let (instance, _id) = get_call_frames_instance().await;
let result = instance
.methods()
.get_second_param_u64(101)
.call()
.await
.unwrap();
assert_eq!(result.value, 10508);
}
#[tokio::test]
async fn can_get_second_param_bool() {
let (instance, _id) = get_call_frames_instance().await;
let result = instance.methods().get_second_param_bool(true);
let result = result.call().await.unwrap();
assert!(result.value);
}
#[tokio::test]
async fn can_get_second_param_struct() {
let (instance, _id) = get_call_frames_instance().await;
let expected = TestStruct {
value_0: 42,
value_1: true,
};
let result = instance
.methods()
.get_second_param_struct(expected.clone())
.call()
.await
.unwrap();
assert_eq!(result.value, expected);
}
#[tokio::test]
async fn can_get_second_param_multiple_params() {
let (instance, _id) = get_call_frames_instance().await;
let result = instance
.methods()
.get_second_param_multiple_params(true, 42)
.call()
.await
.unwrap();
assert_eq!(result.value, (true, 42));
}
#[tokio::test]
async fn can_get_second_param_multiple_params2() {
let (instance, _id) = get_call_frames_instance().await;
let expected_struct = TestStruct {
value_0: 42,
value_1: true,
};
let expected_struct2 = TestStruct2 { value: 100 };
let result = instance
.methods()
.get_second_param_multiple_params2(300, expected_struct.clone(), expected_struct2.clone())
.call()
.await
.unwrap();
assert_eq!(result.value, (300, expected_struct, expected_struct2));
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/test/src/sdk-harness/test_projects/block/mod.rs | test/src/sdk-harness/test_projects/block/mod.rs | use fuels::{prelude::*, types::Bits256};
use tai64::Tai64;
use tokio::time::{sleep, Duration};
abigen!(Contract(
name = "BlockTestContract",
abi = "test_projects/block/out/release/block-abi.json"
));
async fn get_block_instance() -> (BlockTestContract<Wallet>, ContractId, Provider) {
let wallet = launch_provider_and_get_wallet().await.unwrap();
let provider = wallet.provider();
let id = Contract::load_from(
"test_projects/block/out/release/block.bin",
LoadConfiguration::default(),
)
.unwrap()
.deploy(&wallet, TxPolicies::default())
.await
.unwrap()
.contract_id;
let instance = BlockTestContract::new(id.clone(), wallet.clone());
(instance, id.into(), provider.clone())
}
#[tokio::test]
async fn can_get_block_height() {
let (instance, _id, _) = get_block_instance().await;
let block_0 = instance.methods().get_block_height().call().await.unwrap();
let block_1 = instance.methods().get_block_height().call().await.unwrap();
let block_2 = instance.methods().get_block_height().call().await.unwrap();
// Probably consecutive blocks but we may have multiple tx per block so be conservative to
// guarantee the stability of the test
assert!(block_1.value <= block_0.value + 1);
assert!(block_2.value <= block_1.value + 1);
}
#[tokio::test]
async fn can_get_header_hash_of_block() {
let (instance, _id, _) = get_block_instance().await;
let block_1 = instance.methods().get_block_height().call().await.unwrap();
let _block_2 = instance.methods().get_block_height().call().await.unwrap();
let result = instance
.methods()
.get_block_header_hash(block_1.value)
.call()
.await
.unwrap();
// TODO: when SDK supports getting block-header hash, compare it to hash returned by Sway std::block::block_header_hash()
assert_ne!(
result.value,
Bits256::from_hex_str("0x0000000000000000000000000000000000000000000000000000000000000000")
.unwrap()
);
}
#[tokio::test]
async fn can_get_timestamp() {
let (instance, _id, _) = get_block_instance().await;
let block_0_time = instance.methods().get_timestamp().call().await.unwrap();
let now = Tai64::now();
// This should really be zero in most cases, but be conservative to guarantee the stability of
// the test
assert!(now.0 - block_0_time.value <= 1);
// Wait 1 seconds and request another block
sleep(Duration::from_secs(1)).await;
let block_1_time = instance.methods().get_timestamp().call().await.unwrap();
// The difference should be 1 second in most cases, but be conservative to guarantee the
// stability of the test
assert!(
1 <= block_1_time.value - block_0_time.value
&& block_1_time.value - block_0_time.value <= 2
);
// Wait 2 seconds and request another block
sleep(Duration::from_secs(2)).await;
let block_2_time = instance.methods().get_timestamp().call().await.unwrap();
// The difference should be 2 seconds in most cases, but be conservative to guarantee the
// stability of the test
assert!(
2 <= block_2_time.value - block_1_time.value
&& block_2_time.value - block_1_time.value <= 3
);
}
#[tokio::test]
async fn can_get_timestamp_of_block() {
let (instance, _id, _) = get_block_instance().await;
let block_0 = instance
.methods()
.get_block_and_timestamp()
.call()
.await
.unwrap();
sleep(Duration::from_secs(1)).await;
let block_1 = instance
.methods()
.get_block_and_timestamp()
.call()
.await
.unwrap();
sleep(Duration::from_secs(2)).await;
let block_2 = instance
.methods()
.get_block_and_timestamp()
.call()
.await
.unwrap();
// Check that the result of `timestamp_of_block` matches the recorded result of `timestamp()`
// above called via `get_block_and_timestamp`.
assert_eq!(
instance
.methods()
.get_timestamp_of_block(block_0.value.0)
.call()
.await
.unwrap()
.value,
block_0.value.1
);
assert_eq!(
instance
.methods()
.get_timestamp_of_block(block_1.value.0)
.call()
.await
.unwrap()
.value,
block_1.value.1
);
assert_eq!(
instance
.methods()
.get_timestamp_of_block(block_2.value.0)
.call()
.await
.unwrap()
.value,
block_2.value.1
);
}
#[tokio::test]
async fn can_get_chain_id() {
let (instance, _id, provider) = get_block_instance().await;
let id = instance
.methods()
.get_chain_id()
.call()
.await
.unwrap();
assert_eq!(id.value, *provider.consensus_parameters().await.unwrap().chain_id());
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/test/src/sdk-harness/test_projects/messages/mod.rs | test/src/sdk-harness/test_projects/messages/mod.rs | use fuels::{prelude::*, types::Bits256};
abigen!(Contract(
name = "TestMessagesContract",
abi = "test_projects/messages/out/release/messages-abi.json"
));
async fn get_messages_contract_instance() -> (TestMessagesContract<Wallet>, ContractId, Wallet) {
let num_wallets = 1;
let coins_per_wallet = 1;
let amount_per_coin = 1_000_000;
let config = WalletsConfig::new(
Some(num_wallets),
Some(coins_per_wallet),
Some(amount_per_coin),
);
let wallets = launch_custom_provider_and_get_wallets(config, None, None)
.await
.unwrap();
let messages_contract_id = Contract::load_from(
"test_projects/messages/out/release/messages.bin",
LoadConfiguration::default(),
)
.unwrap()
.deploy(&wallets[0], TxPolicies::default())
.await
.unwrap()
.contract_id;
// Send assets to the contract to be able withdraw via `smo`.
wallets[0]
.force_transfer_to_contract(
messages_contract_id,
amount_per_coin >> 1,
AssetId::BASE,
TxPolicies::default(),
)
.await
.unwrap();
let messages_instance =
TestMessagesContract::new(messages_contract_id.clone(), wallets[0].clone());
(
messages_instance,
messages_contract_id.into(),
wallets[0].clone(),
)
}
#[tokio::test]
async fn can_send_bool_message() {
let (messages_instance, messages_contract_id, wallet) = get_messages_contract_instance().await;
let recipient_address: Address = wallet.address().into();
let message = true;
let amount = 33u64;
let call_response = messages_instance
.methods()
.send_typed_message_bool(Bits256(*recipient_address), message, amount)
.call()
.await
.unwrap();
let message_receipt = call_response
.tx_status
.receipts
.iter()
.find(|&r| matches!(r, fuels::tx::Receipt::MessageOut { .. }))
.unwrap();
assert_eq!(*messages_contract_id, **message_receipt.sender().unwrap());
assert_eq!(&recipient_address, message_receipt.recipient().unwrap());
assert_eq!(amount, message_receipt.amount().unwrap());
assert_eq!(9, message_receipt.len().unwrap()); // smo ID + 1 bytes
assert_eq!(vec![1], message_receipt.data().unwrap()[8..9]);
}
#[tokio::test]
async fn can_send_u8_message() {
let (messages_instance, messages_contract_id, wallet) = get_messages_contract_instance().await;
let recipient_address: Address = wallet.address().into();
let message = 42u8;
let amount = 33u64;
let call_response = messages_instance
.methods()
.send_typed_message_u8(Bits256(*recipient_address), message, amount)
.call()
.await
.unwrap();
let message_receipt = call_response
.tx_status
.receipts
.iter()
.find(|&r| matches!(r, fuels::tx::Receipt::MessageOut { .. }))
.unwrap();
assert_eq!(*messages_contract_id, **message_receipt.sender().unwrap());
assert_eq!(&recipient_address, message_receipt.recipient().unwrap());
assert_eq!(amount, message_receipt.amount().unwrap());
assert_eq!(9, message_receipt.len().unwrap()); // smo ID + 8 bytes
assert_eq!(vec![42], message_receipt.data().unwrap()[8..9]);
}
#[tokio::test]
async fn can_send_u16_message() {
let (messages_instance, messages_contract_id, wallet) = get_messages_contract_instance().await;
let recipient_address: Address = wallet.address().into();
let message = 42u16;
let amount = 33u64;
let call_response = messages_instance
.methods()
.send_typed_message_u16(Bits256(*recipient_address), message, amount)
.call()
.await
.unwrap();
let message_receipt = call_response
.tx_status
.receipts
.iter()
.find(|&r| matches!(r, fuels::tx::Receipt::MessageOut { .. }))
.unwrap();
assert_eq!(*messages_contract_id, **message_receipt.sender().unwrap());
assert_eq!(&recipient_address, message_receipt.recipient().unwrap());
assert_eq!(amount, message_receipt.amount().unwrap());
assert_eq!(16, message_receipt.len().unwrap()); // smo ID + 8 bytes
assert_eq!(
vec![0, 0, 0, 0, 0, 0, 0, 42],
message_receipt.data().unwrap()[8..16]
);
}
#[tokio::test]
async fn can_send_u32_message() {
let (messages_instance, messages_contract_id, wallet) = get_messages_contract_instance().await;
let recipient_address: Address = wallet.address().into();
let message = 42u32;
let amount = 33u64;
let call_response = messages_instance
.methods()
.send_typed_message_u32(Bits256(*recipient_address), message, amount)
.call()
.await
.unwrap();
let message_receipt = call_response
.tx_status
.receipts
.iter()
.find(|&r| matches!(r, fuels::tx::Receipt::MessageOut { .. }))
.unwrap();
assert_eq!(*messages_contract_id, **message_receipt.sender().unwrap());
assert_eq!(&recipient_address, message_receipt.recipient().unwrap());
assert_eq!(amount, message_receipt.amount().unwrap());
assert_eq!(16, message_receipt.len().unwrap()); // smo ID + 8 bytes
assert_eq!(
vec![0, 0, 0, 0, 0, 0, 0, 42],
message_receipt.data().unwrap()[8..16]
);
}
#[tokio::test]
async fn can_send_u64_message() {
let (messages_instance, messages_contract_id, wallet) = get_messages_contract_instance().await;
let recipient_address: Address = wallet.address().into();
let message = 42u64;
let amount = 33u64;
let call_response = messages_instance
.methods()
.send_typed_message_u64(Bits256(*recipient_address), message, amount)
.call()
.await
.unwrap();
let message_receipt = call_response
.tx_status
.receipts
.iter()
.find(|&r| matches!(r, fuels::tx::Receipt::MessageOut { .. }))
.unwrap();
assert_eq!(*messages_contract_id, **message_receipt.sender().unwrap());
assert_eq!(&recipient_address, message_receipt.recipient().unwrap());
assert_eq!(amount, message_receipt.amount().unwrap());
assert_eq!(16, message_receipt.len().unwrap()); // smo ID + 8 bytes
assert_eq!(
vec![0, 0, 0, 0, 0, 0, 0, 42],
message_receipt.data().unwrap()[8..16]
);
}
#[tokio::test]
async fn can_send_b256_message() {
let (messages_instance, messages_contract_id, wallet) = get_messages_contract_instance().await;
let recipient_address: Address = wallet.address().into();
let message = [1u8; 32];
let amount = 33u64;
let call_response = messages_instance
.methods()
.send_typed_message_b256(Bits256(*recipient_address), Bits256(message), amount)
.call()
.await
.unwrap();
let message_receipt = call_response
.tx_status
.receipts
.iter()
.find(|&r| matches!(r, fuels::tx::Receipt::MessageOut { .. }))
.unwrap();
assert_eq!(*messages_contract_id, **message_receipt.sender().unwrap());
assert_eq!(&recipient_address, message_receipt.recipient().unwrap());
assert_eq!(amount, message_receipt.amount().unwrap());
assert_eq!(40, message_receipt.len().unwrap()); // smo ID + 32 bytes
assert_eq!(message.to_vec(), message_receipt.data().unwrap()[8..40]);
}
#[tokio::test]
async fn can_send_struct_message() {
let (messages_instance, messages_contract_id, wallet) = get_messages_contract_instance().await;
let recipient_address: Address = wallet.address().into();
let message = MyStruct {
first_field: 42,
second_field: 69,
};
let amount = 33u64;
let call_response = messages_instance
.methods()
.send_typed_message_struct(Bits256(*recipient_address), message, amount)
.call()
.await
.unwrap();
let message_receipt = call_response
.tx_status
.receipts
.iter()
.find(|&r| matches!(r, fuels::tx::Receipt::MessageOut { .. }))
.unwrap();
assert_eq!(*messages_contract_id, **message_receipt.sender().unwrap());
assert_eq!(&recipient_address, message_receipt.recipient().unwrap());
assert_eq!(amount, message_receipt.amount().unwrap());
assert_eq!(24, message_receipt.len().unwrap()); // smo ID + 16 bytes
assert_eq!(
[
0, 0, 0, 0, 0, 0, 0, 42, // first field
0, 0, 0, 0, 0, 0, 0, 69, // second field
],
message_receipt.data().unwrap()[8..24]
);
}
#[tokio::test]
async fn can_send_enum_message() {
let (messages_instance, messages_contract_id, wallet) = get_messages_contract_instance().await;
let recipient_address: Address = wallet.address().into();
let message = MyEnum::<Bits256>::SecondVariant(42);
let amount = 33u64;
let call_response = messages_instance
.methods()
.send_typed_message_enum(Bits256(*recipient_address), message, amount)
.call()
.await
.unwrap();
let message_receipt = call_response
.tx_status
.receipts
.iter()
.find(|&r| matches!(r, fuels::tx::Receipt::MessageOut { .. }))
.unwrap();
assert_eq!(*messages_contract_id, **message_receipt.sender().unwrap());
assert_eq!(&recipient_address, message_receipt.recipient().unwrap());
assert_eq!(amount, message_receipt.amount().unwrap());
assert_eq!(48, message_receipt.len().unwrap()); // smo ID + 8 bytes (tag) + 32 bytes (largest variant)
assert_eq!(
[
0, 0, 0, 0, 0, 0, 0, 1, // tag
0, 0, 0, 0, 0, 0, 0, 0, // padding
0, 0, 0, 0, 0, 0, 0, 0, // padding
0, 0, 0, 0, 0, 0, 0, 0, // padding
0, 0, 0, 0, 0, 0, 0, 42, // padding
],
message_receipt.data().unwrap()[8..48]
);
}
#[tokio::test]
async fn can_send_array_message() {
let (messages_instance, messages_contract_id, wallet) = get_messages_contract_instance().await;
let recipient_address: Address = wallet.address().into();
let message = [42, 43, 44];
let amount = 33u64;
let call_response = messages_instance
.methods()
.send_typed_message_array(Bits256(*recipient_address), message, amount)
.call()
.await
.unwrap();
let message_receipt = call_response
.tx_status
.receipts
.iter()
.find(|&r| matches!(r, fuels::tx::Receipt::MessageOut { .. }))
.unwrap();
assert_eq!(*messages_contract_id, **message_receipt.sender().unwrap());
assert_eq!(&recipient_address, message_receipt.recipient().unwrap());
assert_eq!(amount, message_receipt.amount().unwrap());
assert_eq!(32, message_receipt.len().unwrap()); // smo ID + 24 bytes
assert_eq!(
[
0, 0, 0, 0, 0, 0, 0, 42, // first element
0, 0, 0, 0, 0, 0, 0, 43, // second element
0, 0, 0, 0, 0, 0, 0, 44, // third element
],
message_receipt.data().unwrap()[8..32]
);
}
#[tokio::test]
async fn can_send_string_message() {
let (messages_instance, messages_contract_id, wallet) = get_messages_contract_instance().await;
let recipient_address: Address = wallet.address().into();
let message = "fuel";
let amount = 33u64;
let call_response = messages_instance
.methods()
.send_typed_message_string(
Bits256(*recipient_address),
message.try_into().unwrap(),
amount,
)
.call()
.await
.unwrap();
let message_receipt = call_response
.tx_status
.receipts
.iter()
.find(|&r| matches!(r, fuels::tx::Receipt::MessageOut { .. }))
.unwrap();
assert_eq!(*messages_contract_id, **message_receipt.sender().unwrap());
assert_eq!(&recipient_address, message_receipt.recipient().unwrap());
assert_eq!(amount, message_receipt.amount().unwrap());
assert_eq!(16, message_receipt.len().unwrap()); // smo ID + 4 bytes
assert_eq!(
[
102, // 'f'
117, // 'u'
101, // 'e'
108, // 'l'
],
message_receipt.data().unwrap()[8..12]
);
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/test/src/sdk-harness/test_projects/time/mod.rs | test/src/sdk-harness/test_projects/time/mod.rs | use fuels::prelude::*;
use tokio::time::{sleep, Duration};
use std::time::{SystemTime, UNIX_EPOCH};
abigen!(Contract(
name = "TimeTestContract",
abi = "test_projects/time/out/release/time-abi.json"
));
async fn get_block_instance() -> (TimeTestContract<Wallet>, ContractId, Provider) {
let wallet = launch_provider_and_get_wallet().await.unwrap();
let provider = wallet.provider();
let id = Contract::load_from(
"test_projects/time/out/release/time.bin",
LoadConfiguration::default(),
)
.unwrap()
.deploy(&wallet, TxPolicies::default())
.await
.unwrap()
.contract_id;
let instance = TimeTestContract::new(id.clone(), wallet.clone());
(instance, id.into(), provider.clone())
}
#[tokio::test]
async fn can_get_unix_timestamp() {
let (instance, _id, _) = get_block_instance().await;
let block_0_time = instance.methods().get_now().call().await.unwrap();
let now = SystemTime::now()
.duration_since(UNIX_EPOCH)
.expect("Time went backwards")
.as_secs();
// This should really be zero in most cases, but be conservative to guarantee the stability of
// the test
assert!(now - block_0_time.value.unix <= 1);
// Wait 1 seconds and request another block
sleep(Duration::from_secs(1)).await;
let block_1_time = instance.methods().get_now().call().await.unwrap();
// The difference should be 1 second in most cases, but be conservative to guarantee the
// stability of the test
assert!(
1 <= block_1_time.value.unix - block_0_time.value.unix
&& block_1_time.value.unix - block_0_time.value.unix <= 2
);
// Wait 2 seconds and request another block
sleep(Duration::from_secs(2)).await;
let block_2_time = instance.methods().get_now().call().await.unwrap();
// The difference should be 2 seconds in most cases, but be conservative to guarantee the
// stability of the test
assert!(
2 <= block_2_time.value.unix - block_1_time.value.unix
&& block_2_time.value.unix - block_1_time.value.unix <= 3
);
}
#[tokio::test]
async fn can_get_unix_timestamp_of_block() {
let (instance, _id, _) = get_block_instance().await;
let block_0 = instance
.methods()
.get_height_and_time()
.call()
.await
.unwrap();
sleep(Duration::from_secs(1)).await;
let block_1 = instance
.methods()
.get_height_and_time()
.call()
.await
.unwrap();
sleep(Duration::from_secs(2)).await;
let block_2 = instance
.methods()
.get_height_and_time()
.call()
.await
.unwrap();
// Check that the result of `get_height_and_time` matches the recorded result of `Time::now()`
// above called via `get_height_and_time`.
assert_eq!(
instance
.methods()
.get_block(block_0.value.0)
.call()
.await
.unwrap()
.value,
block_0.value.1
);
assert_eq!(
instance
.methods()
.get_block(block_1.value.0)
.call()
.await
.unwrap()
.value,
block_1.value.1
);
assert_eq!(
instance
.methods()
.get_block(block_2.value.0)
.call()
.await
.unwrap()
.value,
block_2.value.1
);
}
#[tokio::test]
async fn can_convert_to_unix_time() {
let (instance, _id, _) = get_block_instance().await;
let (time_1, tia64_1) = instance
.methods()
.get_time_and_tia64()
.call()
.await
.unwrap()
.value;
sleep(Duration::from_secs(1)).await;
let (time_2, tia64_2) = instance
.methods()
.get_time_and_tia64()
.call()
.await
.unwrap()
.value;
sleep(Duration::from_secs(2)).await;
let (time_3, tia64_3) = instance
.methods()
.get_time_and_tia64()
.call()
.await
.unwrap()
.value;
assert_eq!(
instance
.methods()
.from_tia64(tia64_1)
.call()
.await
.unwrap()
.value,
time_1
);
assert_eq!(
instance
.methods()
.from_tia64(tia64_2)
.call()
.await
.unwrap()
.value,
time_2
);
assert_eq!(
instance
.methods()
.from_tia64(tia64_3)
.call()
.await
.unwrap()
.value,
time_3
);
}
#[tokio::test]
async fn can_convert_to_tai64_time() {
let (instance, _id, _) = get_block_instance().await;
let (time_1, tia64_1) = instance
.methods()
.get_time_and_tia64()
.call()
.await
.unwrap()
.value;
sleep(Duration::from_secs(1)).await;
let (time_2, tia64_2) = instance
.methods()
.get_time_and_tia64()
.call()
.await
.unwrap()
.value;
sleep(Duration::from_secs(2)).await;
let (time_3, tia64_3) = instance
.methods()
.get_time_and_tia64()
.call()
.await
.unwrap()
.value;
assert_eq!(
instance
.methods()
.into_tai64(time_1)
.call()
.await
.unwrap()
.value,
tia64_1
);
assert_eq!(
instance
.methods()
.into_tai64(time_2)
.call()
.await
.unwrap()
.value,
tia64_2
);
assert_eq!(
instance
.methods()
.into_tai64(time_3)
.call()
.await
.unwrap()
.value,
tia64_3
);
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/test/src/sdk-harness/test_projects/configurables_in_contract/mod.rs | test/src/sdk-harness/test_projects/configurables_in_contract/mod.rs | use fuels::{prelude::*, types::SizedAsciiString};
// TODO Remove ignore when SDK supports encoding V1 for configurables
// https://github.com/FuelLabs/sway/issues/5727
#[tokio::test]
#[ignore]
async fn contract_uses_default_configurables() -> Result<()> {
abigen!(Contract(
name = "MyContract",
abi =
"test_projects/configurables_in_contract/out/release/configurables_in_contract-abi.json"
));
let wallet = launch_provider_and_get_wallet().await.unwrap();
let contract_id = Contract::load_from(
"test_projects/configurables_in_contract/out/release/configurables_in_contract.bin",
LoadConfiguration::default(),
)
.unwrap()
.deploy(&wallet, TxPolicies::default())
.await?
.contract_id;
let contract_instance = MyContract::new(contract_id, wallet.clone());
let response = contract_instance
.methods()
.return_configurables()
.call()
.await?;
let expected_value = (
8u8,
true,
[253u32, 254u32, 255u32],
"fuel".try_into()?,
StructWithGeneric {
field_1: 8u8,
field_2: 16,
},
EnumWithGeneric::VariantOne(true),
Address::new([0u8; 32]),
ContractId::new([0u8; 32]),
);
assert_eq!(response.value, expected_value);
Ok(())
}
// TODO Remove ignore when SDK supports encoding V1 for configurables
// https://github.com/FuelLabs/sway/issues/5727
#[tokio::test]
#[ignore]
async fn contract_configurables() -> Result<()> {
abigen!(Contract(
name = "MyContract",
abi =
"test_projects/configurables_in_contract/out/release/configurables_in_contract-abi.json"
));
let wallet = launch_provider_and_get_wallet().await.unwrap();
let new_str: SizedAsciiString<4> = "FUEL".try_into()?;
let new_struct = StructWithGeneric {
field_1: 16u8,
field_2: 32,
};
let new_enum = EnumWithGeneric::VariantTwo;
let new_address = Address::new([1u8; 32]);
let new_contract_id = ContractId::new([1u8; 32]);
let configurables = MyContractConfigurables::default()
.with_STR_4(new_str.clone())?
.with_STRUCT(new_struct.clone())?
.with_ENUM(new_enum.clone())?
.with_ADDRESS(new_address.clone())?
.with_MY_CONTRACT_ID(new_contract_id.clone())?;
let contract_id = Contract::load_from(
"test_projects/configurables_in_contract/out/release/configurables_in_contract.bin",
LoadConfiguration::default().with_configurables(configurables),
)?
.deploy(&wallet, TxPolicies::default())
.await?
.contract_id;
let contract_instance = MyContract::new(contract_id, wallet.clone());
let response = contract_instance
.methods()
.return_configurables()
.call()
.await?;
let expected_value = (
8u8,
true,
[253u32, 254u32, 255u32],
new_str,
new_struct,
new_enum,
new_address,
new_contract_id,
);
assert_eq!(response.value, expected_value);
Ok(())
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/test/src/sdk-harness/test_projects/configurables_in_script/mod.rs | test/src/sdk-harness/test_projects/configurables_in_script/mod.rs | use fuels::{prelude::*, types::SizedAsciiString};
// TODO Remove ignore when SDK supports encoding V1 for configurables
// https://github.com/FuelLabs/sway/issues/5727
#[tokio::test]
#[ignore]
async fn script_uses_default_configurables() -> Result<()> {
abigen!(Script(
name = "MyScript",
abi = "test_projects/configurables_in_script/out/release/configurables_in_script-abi.json"
));
let wallet = launch_provider_and_get_wallet().await.unwrap();
let bin_path = "test_projects/configurables_in_script/out/release/configurables_in_script.bin";
let instance = MyScript::new(wallet, bin_path);
let response = instance.main().call().await?;
let expected_value = (
8u8,
true,
[253u32, 254u32, 255u32],
"fuel".try_into()?,
StructWithGeneric {
field_1: 8u8,
field_2: 16,
},
EnumWithGeneric::VariantOne(true),
);
assert_eq!(response.value, expected_value);
Ok(())
}
// TODO Remove ignore when SDK supports encoding V1 for configurables
// https://github.com/FuelLabs/sway/issues/5727
#[tokio::test]
#[ignore]
async fn script_configurables() -> Result<()> {
abigen!(Script(
name = "MyScript",
abi = "test_projects/configurables_in_script/out/release/configurables_in_script-abi.json"
));
let wallet = launch_provider_and_get_wallet().await.unwrap();
let bin_path = "test_projects/configurables_in_script/out/release/configurables_in_script.bin";
let instance = MyScript::new(wallet, bin_path);
let new_str: SizedAsciiString<4> = "FUEL".try_into()?;
let new_struct = StructWithGeneric {
field_1: 16u8,
field_2: 32,
};
let new_enum = EnumWithGeneric::VariantTwo;
let configurables = MyScriptConfigurables::default()
.with_STR_4(new_str.clone())?
.with_STRUCT(new_struct.clone())?
.with_ENUM(new_enum.clone())?;
let response = instance
.with_configurables(configurables)
.main()
.call()
.await?;
let expected_value = (
8u8,
true,
[253u32, 254u32, 255u32],
new_str,
new_struct,
new_enum,
);
pretty_assertions::assert_eq!(response.value, expected_value);
Ok(())
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/test/src/sdk-harness/test_projects/script_data/mod.rs | test/src/sdk-harness/test_projects/script_data/mod.rs | use assert_matches::assert_matches;
use fuels::{prelude::*, tx::Receipt, types::transaction_builders::ScriptTransactionBuilder};
async fn call_script(script_data: Vec<u8>) -> Result<Vec<Receipt>> {
let wallet = launch_provider_and_get_wallet().await.unwrap();
let provider = wallet.provider();
let wallet_coins = wallet
.get_asset_inputs_for_amount(
AssetId::default(),
wallet
.get_asset_balance(&AssetId::default())
.await
.unwrap()
.into(),
None,
)
.await
.unwrap();
let mut tx =
ScriptTransactionBuilder::prepare_transfer(wallet_coins, vec![], Default::default())
.with_script(std::fs::read(
"test_projects/script_data/out/release/script_data.bin",
)?)
.with_script_data(script_data)
.enable_burn(true);
tx.add_signer(wallet.signer().clone()).unwrap();
let tx = tx.build(provider).await?;
let provider = wallet.provider();
let tx_status = provider.send_transaction_and_await_commit(tx).await.unwrap();
tx_status
.take_receipts_checked(None)
}
#[tokio::test]
async fn script_data() {
let correct_hex =
hex::decode("ef86afa9696cf0dc6385e2c407a6e159a1103cefb7e2ae0636fb33d3cb2a9e4a").unwrap();
let call_result = call_script(correct_hex.clone()).await;
assert_matches!(call_result, Ok(_));
let receipts = call_result.unwrap();
assert_eq!(correct_hex, receipts[0].data().unwrap());
let bad_hex =
hex::decode("bad6afa9696cf0dc6385e2c407a6e159a1103cefb7e2ae0636fb33d3cb2a9e4a").unwrap();
let call_result = call_script(bad_hex).await;
assert_matches!(call_result, Err(_));
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/test/src/sdk-harness/test_projects/exponentiation/mod.rs | test/src/sdk-harness/test_projects/exponentiation/mod.rs | use fuels::prelude::*;
use fuels::types::ContractId;
abigen!(Contract(
name = "TestPowContract",
abi = "test_artifacts/pow/out/release/pow-abi.json"
));
#[tokio::test]
#[should_panic(expected = "ArithmeticOverflow")]
async fn overflowing_pow_u64_panics() {
let wallet = launch_provider_and_get_wallet().await.unwrap();
let (pow_instance, _) = get_pow_test_instance(wallet).await;
pow_instance
.methods()
.u64_overflow(100u64, 100u32)
.call()
.await
.unwrap();
}
#[tokio::test]
// TODO won't overflow until https://github.com/FuelLabs/fuel-specs/issues/90 lands
// #[should_panic(expected = "ArithmeticOverflow")]
// Temporary fix in: https://github.com/FuelLabs/sway/pull/6340
#[should_panic]
async fn overflowing_pow_u32_panics() {
let wallet = launch_provider_and_get_wallet().await.unwrap();
let (pow_instance, _) = get_pow_test_instance(wallet).await;
pow_instance
.methods()
.u32_overflow(10u32, 11u32)
.call()
.await
.unwrap();
}
#[tokio::test]
#[should_panic(expected = "ArithmeticOverflow")]
async fn overflowing_pow_u32_panics_max() {
let wallet = launch_provider_and_get_wallet().await.unwrap();
let (pow_instance, _) = get_pow_test_instance(wallet).await;
pow_instance
.methods()
.u32_overflow(u32::MAX, u32::MAX)
.call()
.await
.unwrap();
}
#[tokio::test]
// TODO won't overflow until https://github.com/FuelLabs/fuel-specs/issues/90 lands
// #[should_panic(expected = "ArithmeticOverflow")]
// Temporary fix in: https://github.com/FuelLabs/sway/pull/6340
#[should_panic]
async fn overflowing_pow_u16_panics() {
let wallet = launch_provider_and_get_wallet().await.unwrap();
let (pow_instance, _) = get_pow_test_instance(wallet).await;
pow_instance
.methods()
.u16_overflow(10u16, 5u32)
.call()
.await
.unwrap();
}
#[tokio::test]
#[should_panic(expected = "ArithmeticOverflow")]
async fn overflowing_pow_u16_panics_max() {
let wallet = launch_provider_and_get_wallet().await.unwrap();
let (pow_instance, _) = get_pow_test_instance(wallet).await;
pow_instance
.methods()
.u16_overflow(u16::MAX, u32::MAX)
.call()
.await
.unwrap();
}
#[tokio::test]
// TODO won't overflow until https://github.com/FuelLabs/fuel-specs/issues/90 lands
// #[should_panic(expected = "ArithmeticOverflow")]
// Temporary fix in: https://github.com/FuelLabs/sway/pull/6340
#[should_panic]
async fn overflowing_pow_u8_panics() {
let wallet = launch_provider_and_get_wallet().await.unwrap();
let (pow_instance, _) = get_pow_test_instance(wallet).await;
pow_instance
.methods()
.u8_overflow(10u8, 3u32)
.call()
.await
.unwrap();
}
#[tokio::test]
#[should_panic(expected = "ArithmeticOverflow")]
async fn overflowing_pow_u8_panics_max() {
let wallet = launch_provider_and_get_wallet().await.unwrap();
let (pow_instance, _) = get_pow_test_instance(wallet).await;
pow_instance
.methods()
.u8_overflow(u8::MAX, u32::MAX)
.call()
.await
.unwrap();
}
async fn get_pow_test_instance(wallet: Wallet) -> (TestPowContract<Wallet>, ContractId) {
let pow_id = Contract::load_from(
"test_artifacts/pow/out/release/pow.bin",
LoadConfiguration::default(),
)
.unwrap()
.deploy(&wallet, TxPolicies::default())
.await
.unwrap()
.contract_id;
let pow_instance = TestPowContract::new(pow_id.clone(), wallet);
(pow_instance, pow_id.into())
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/test/src/sdk-harness/test_projects/storage_vec_to_vec/mod.rs | test/src/sdk-harness/test_projects/storage_vec_to_vec/mod.rs | use fuels::prelude::*;
abigen!(Contract(
name = "TestStorageVecToVecContract",
abi = "test_projects/storage_vec_to_vec/out/release/storage_vec_to_vec-abi.json",
));
async fn test_storage_vec_to_vec_instance() -> TestStorageVecToVecContract<Wallet> {
let wallet = launch_provider_and_get_wallet().await.unwrap();
let id = Contract::load_from(
"test_projects/storage_vec_to_vec/out/release/storage_vec_to_vec.bin",
LoadConfiguration::default(),
)
.unwrap()
.deploy(&wallet, TxPolicies::default())
.await
.unwrap()
.contract_id;
TestStorageVecToVecContract::new(id.clone(), wallet)
}
#[tokio::test]
async fn test_conversion_u64() {
let instance = test_storage_vec_to_vec_instance().await;
let test_vec = vec![5, 7, 9, 11];
let _ = instance
.methods()
.store_vec_u64(test_vec.clone())
.call()
.await;
let returned_vec = instance
.methods()
.read_vec_u64()
.call()
.await
.unwrap()
.value;
assert_eq!(returned_vec.len(), 4);
assert_eq!(returned_vec, test_vec);
}
#[tokio::test]
async fn test_push_u64() {
let instance = test_storage_vec_to_vec_instance().await;
let test_vec = vec![5, 7, 9, 11];
let _ = instance
.methods()
.store_vec_u64(test_vec.clone())
.call()
.await;
let _ = instance.methods().push_vec_u64(13).call().await;
let returned_vec = instance
.methods()
.read_vec_u64()
.call()
.await
.unwrap()
.value;
let mut expected_vec = test_vec;
expected_vec.push(13);
assert_eq!(returned_vec.len(), 5);
assert_eq!(returned_vec, expected_vec);
}
#[tokio::test]
async fn test_pop_u64() {
let instance = test_storage_vec_to_vec_instance().await;
let test_vec = vec![5, 7, 9, 11];
let _ = instance
.methods()
.store_vec_u64(test_vec.clone())
.call()
.await;
assert_eq!(
11,
instance.methods().pop_vec_u64().call().await.unwrap().value
);
let returned_vec = instance
.methods()
.read_vec_u64()
.call()
.await
.unwrap()
.value;
let mut expected_vec = test_vec;
expected_vec.pop();
assert_eq!(returned_vec.len(), 3);
assert_eq!(returned_vec, expected_vec);
}
#[tokio::test]
async fn test_conversion_struct() {
let instance = test_storage_vec_to_vec_instance().await;
let test_vec = vec![
TestStruct {
val_1: 0,
val_2: 1,
val_3: 2,
},
TestStruct {
val_1: 1,
val_2: 2,
val_3: 3,
},
TestStruct {
val_1: 2,
val_2: 3,
val_3: 4,
},
TestStruct {
val_1: 3,
val_2: 4,
val_3: 5,
},
];
let _ = instance
.methods()
.store_vec_struct(test_vec.clone())
.call()
.await;
let returned_vec = instance
.methods()
.read_vec_struct()
.call()
.await
.unwrap()
.value;
assert_eq!(returned_vec.len(), 4);
assert_eq!(returned_vec, test_vec);
}
#[tokio::test]
async fn test_push_struct() {
let instance = test_storage_vec_to_vec_instance().await;
let test_vec = vec![
TestStruct {
val_1: 0,
val_2: 1,
val_3: 2,
},
TestStruct {
val_1: 1,
val_2: 2,
val_3: 3,
},
TestStruct {
val_1: 2,
val_2: 3,
val_3: 4,
},
TestStruct {
val_1: 3,
val_2: 4,
val_3: 5,
},
];
let test_struct = TestStruct {
val_1: 4,
val_2: 5,
val_3: 6,
};
let _ = instance
.methods()
.store_vec_struct(test_vec.clone())
.call()
.await;
let _ = instance
.methods()
.push_vec_struct(test_struct.clone())
.call()
.await;
let returned_vec = instance
.methods()
.read_vec_struct()
.call()
.await
.unwrap()
.value;
let mut expected_vec = test_vec;
expected_vec.push(test_struct);
assert_eq!(returned_vec.len(), 5);
assert_eq!(returned_vec, expected_vec);
}
#[tokio::test]
async fn test_pop_struct() {
let instance = test_storage_vec_to_vec_instance().await;
let test_struct = TestStruct {
val_1: 3,
val_2: 4,
val_3: 5,
};
let test_vec = vec![
TestStruct {
val_1: 0,
val_2: 1,
val_3: 2,
},
TestStruct {
val_1: 1,
val_2: 2,
val_3: 3,
},
TestStruct {
val_1: 2,
val_2: 3,
val_3: 4,
},
test_struct.clone(),
];
let _ = instance
.methods()
.store_vec_struct(test_vec.clone())
.call()
.await;
assert_eq!(
test_struct,
instance
.methods()
.pop_vec_struct()
.call()
.await
.unwrap()
.value
);
let returned_vec = instance
.methods()
.read_vec_struct()
.call()
.await
.unwrap()
.value;
let mut expected_vec = test_vec;
expected_vec.pop();
assert_eq!(returned_vec.len(), 3);
assert_eq!(returned_vec, expected_vec);
}
#[tokio::test]
async fn test_conversion_u8() {
let instance = test_storage_vec_to_vec_instance().await;
let test_vec = vec![5u8, 7u8, 9u8, 11u8];
let _ = instance
.methods()
.store_vec_u8(test_vec.clone())
.call()
.await;
let returned_vec = instance.methods().read_vec_u8().call().await.unwrap().value;
assert_eq!(returned_vec.len(), 4);
assert_eq!(returned_vec, test_vec);
}
#[tokio::test]
async fn test_push_u8() {
let instance = test_storage_vec_to_vec_instance().await;
let test_vec = vec![5u8, 7u8, 9u8, 11u8];
let _ = instance
.methods()
.store_vec_u8(test_vec.clone())
.call()
.await;
let _ = instance.methods().push_vec_u8(13u8).call().await;
let returned_vec = instance.methods().read_vec_u8().call().await.unwrap().value;
let mut expected_vec = test_vec;
expected_vec.push(13u8);
assert_eq!(returned_vec.len(), 5);
assert_eq!(returned_vec, expected_vec);
}
#[tokio::test]
async fn test_pop_u8() {
let instance = test_storage_vec_to_vec_instance().await;
let test_vec = vec![5u8, 7u8, 9u8, 11u8];
let _ = instance
.methods()
.store_vec_u8(test_vec.clone())
.call()
.await;
assert_eq!(
11u8,
instance.methods().pop_vec_u8().call().await.unwrap().value
);
let returned_vec = instance.methods().read_vec_u8().call().await.unwrap().value;
let mut expected_vec = test_vec;
expected_vec.pop();
assert_eq!(returned_vec.len(), 3);
assert_eq!(returned_vec, expected_vec);
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.