repo stringlengths 6 65 | file_url stringlengths 81 311 | file_path stringlengths 6 227 | content stringlengths 0 32.8k | language stringclasses 1
value | license stringclasses 7
values | commit_sha stringlengths 40 40 | retrieved_at stringdate 2026-01-04 15:31:58 2026-01-04 20:25:31 | truncated bool 2
classes |
|---|---|---|---|---|---|---|---|---|
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/swayfmt/src/module/mod.rs | swayfmt/src/module/mod.rs | use crate::{
comments::write_comments,
formatter::*,
utils::map::byte_span::{self, ByteSpan, LeafSpans},
};
use std::fmt::Write;
use sway_ast::{
keywords::{
ContractToken, Keyword, LibraryToken, PredicateToken, ScriptToken, SemicolonToken, Token,
},
Item, ItemKind, Module, ModuleKind,
};
use sway_types::Spanned;
pub(crate) mod item;
pub(crate) mod submodule;
impl Format for Module {
fn format(
&self,
formatted_code: &mut FormattedCode,
formatter: &mut Formatter,
) -> Result<(), FormatterError> {
write_comments(formatted_code, 0..self.span().start(), formatter)?;
self.kind.format(formatted_code, formatter)?;
writeln!(formatted_code, "{}", SemicolonToken::AS_STR)?;
// Format comments between module kind declaration and rest of items
if !self.items.is_empty() {
write_comments(
formatted_code,
0..self.items.first().unwrap().span().start(),
formatter,
)?;
}
let iter = self.items.iter();
let mut prev_item: Option<&Item> = None;
for item in iter.clone() {
if let Some(prev_item) = prev_item {
write_comments(
formatted_code,
prev_item.span().end()..item.span().start(),
formatter,
)?;
}
item.format(formatted_code, formatter)?;
if let ItemKind::Submodule { .. } = item.value {
// Do not print a newline after a submodule
} else {
writeln!(formatted_code)?;
}
prev_item = Some(item);
}
if let Some(prev_item) = prev_item {
write_comments(
formatted_code,
prev_item.span().end()..self.span().end(),
formatter,
)?;
}
Ok(())
}
}
impl Format for ModuleKind {
fn format(
&self,
formatted_code: &mut FormattedCode,
_formatter: &mut Formatter,
) -> Result<(), FormatterError> {
match self {
ModuleKind::Script { script_token: _ } => {
write!(formatted_code, "{}", ScriptToken::AS_STR)?
}
ModuleKind::Contract { contract_token: _ } => {
write!(formatted_code, "{}", ContractToken::AS_STR)?
}
ModuleKind::Predicate { predicate_token: _ } => {
write!(formatted_code, "{}", PredicateToken::AS_STR)?
}
ModuleKind::Library { library_token: _ } => {
write!(formatted_code, "{}", LibraryToken::AS_STR)?;
}
};
Ok(())
}
}
impl LeafSpans for Module {
fn leaf_spans(&self) -> Vec<ByteSpan> {
let mut collected_spans = vec![byte_span::STARTING_BYTE_SPAN];
collected_spans.append(&mut self.kind.leaf_spans());
collected_spans.push(ByteSpan::from(self.semicolon_token.span()));
collected_spans.append(&mut self.items.leaf_spans());
collected_spans
}
}
impl LeafSpans for ModuleKind {
fn leaf_spans(&self) -> Vec<ByteSpan> {
match self {
ModuleKind::Script { script_token } => {
vec![ByteSpan::from(script_token.span())]
}
ModuleKind::Contract { contract_token } => {
vec![ByteSpan::from(contract_token.span())]
}
ModuleKind::Predicate { predicate_token } => {
vec![ByteSpan::from(predicate_token.span())]
}
ModuleKind::Library { library_token } => {
vec![ByteSpan::from(library_token.span())]
}
}
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/swayfmt/src/module/item.rs | swayfmt/src/module/item.rs | use crate::{
formatter::*,
utils::map::byte_span::{ByteSpan, LeafSpans},
};
use sway_ast::ItemKind::{self, *};
impl Format for ItemKind {
fn format(
&self,
formatted_code: &mut FormattedCode,
formatter: &mut Formatter,
) -> Result<(), FormatterError> {
match self {
Submodule(item_mod) => item_mod.format(formatted_code, formatter),
Use(item_use) => item_use.format(formatted_code, formatter),
Struct(item_struct) => item_struct.format(formatted_code, formatter),
Enum(item_enum) => item_enum.format(formatted_code, formatter),
Fn(item_fn) => item_fn.format(formatted_code, formatter),
Trait(item_trait) => item_trait.format(formatted_code, formatter),
Impl(item_impl) => item_impl.format(formatted_code, formatter),
Abi(item_abi) => item_abi.format(formatted_code, formatter),
Const(item_const) => item_const.format(formatted_code, formatter),
Storage(item_storage) => item_storage.format(formatted_code, formatter),
Configurable(item_configurable) => item_configurable.format(formatted_code, formatter),
TypeAlias(item_type_alias) => item_type_alias.format(formatted_code, formatter),
Error(_, _) => Ok(()),
}
}
}
impl LeafSpans for ItemKind {
fn leaf_spans(&self) -> Vec<ByteSpan> {
match self {
Submodule(item_mod) => item_mod.leaf_spans(),
Struct(item_struct) => item_struct.leaf_spans(),
Enum(item_enum) => item_enum.leaf_spans(),
Fn(item_fn) => item_fn.leaf_spans(),
Abi(item_abi) => item_abi.leaf_spans(),
Const(item_const) => item_const.leaf_spans(),
Storage(item_storage) => item_storage.leaf_spans(),
Trait(item_trait) => item_trait.leaf_spans(),
Impl(item_impl) => item_impl.leaf_spans(),
Use(item_use) => item_use.leaf_spans(),
Configurable(item_configurable) => item_configurable.leaf_spans(),
TypeAlias(item_type_alias) => item_type_alias.leaf_spans(),
Error(spans, _) => {
vec![sway_types::Span::join_all(spans.iter().cloned()).into()]
}
}
}
}
pub trait ItemLenChars {
fn len_chars(&self) -> Result<usize, FormatterError>;
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/swayfmt/tests/mod.rs | swayfmt/tests/mod.rs | use indoc::indoc;
use swayfmt::{config::user_def::FieldAlignment, Formatter};
use test_macros::assert_eq_pretty;
/// Takes a configured formatter as input and formats a given input and checks the actual output against an
/// expected output. There are two format passes to ensure that the received output does not change on a second pass.
fn check_with_formatter(unformatted: &str, expected: &str, formatter: &mut Formatter) {
let first_formatted = Formatter::format(formatter, unformatted.into()).unwrap();
assert_eq_pretty!(first_formatted, expected);
let second_formatted = Formatter::format(formatter, first_formatted.as_str().into()).unwrap();
assert_eq_pretty!(second_formatted, first_formatted);
}
/// Formats a given input and checks the actual output against an expected
/// output by calling check_with_formatter() with a default Formatter as input.
fn check(unformatted: &str, expected: &str) {
let mut formatter = Formatter::default();
check_with_formatter(unformatted, expected, &mut formatter);
}
#[test]
fn module_doc_comments_persist() {
check(
indoc! {r#"
//! this is a module level doc comment
library;
"#},
indoc! {r#"
//! this is a module level doc comment
library;
"#},
)
}
#[test]
fn conserve_pub_mod() {
check(
indoc! {r#"
contract;
pub mod foo;
"#},
indoc! {r#"
contract;
pub mod foo;
"#},
)
}
#[test]
fn const_spacing() {
check(
indoc! {r#"
contract;
pub const TEST:u16=10;
"#},
indoc! {r#"
contract;
pub const TEST: u16 = 10;
"#},
)
}
#[test]
fn struct_alignment() {
let mut formatter = Formatter::default();
formatter.config.structures.field_alignment = FieldAlignment::AlignFields(40);
check_with_formatter(
indoc! {r#"
contract;
pub struct Foo<T, P> {
barbazfoo: u64,
baz : bool,
}
"#},
indoc! {r#"
contract;
pub struct Foo<T, P> {
barbazfoo : u64,
baz : bool,
}
"#},
&mut formatter,
);
}
#[test]
#[ignore = "Bug in `swayfmt`. Activate this test once https://github.com/FuelLabs/sway/issues/6805 is fixed."]
fn struct_alignment_without_trailing_comma() {
// The last struct field does not have trailing comma.
let mut formatter = Formatter::default();
formatter.config.structures.field_alignment = FieldAlignment::AlignFields(40);
check_with_formatter(
indoc! {r#"
contract;
pub struct Foo<T, P> {
barbazfoo: u64,
baz : bool
}
"#},
indoc! {r#"
contract;
pub struct Foo<T, P> {
barbazfoo : u64,
baz : bool,
}
"#},
&mut formatter,
);
}
#[test]
fn struct_alignment_with_public_fields() {
let mut formatter = Formatter::default();
formatter.config.structures.field_alignment = FieldAlignment::AlignFields(40);
check_with_formatter(
indoc! {r#"
contract;
pub struct Foo<T, P> {
barbazfoo: u64,
pub baz : bool,
}
"#},
indoc! {r#"
contract;
pub struct Foo<T, P> {
barbazfoo : u64,
pub baz : bool,
}
"#},
&mut formatter,
);
check_with_formatter(
indoc! {r#"
contract;
pub struct Foo<T, P> {
pub barbazfoo: u64,
baz : bool,
}
"#},
indoc! {r#"
contract;
pub struct Foo<T, P> {
pub barbazfoo : u64,
baz : bool,
}
"#},
&mut formatter,
);
}
#[test]
#[ignore = "Bug in `swayfmt`. Activate this test once https://github.com/FuelLabs/sway/issues/6805 is fixed."]
fn struct_alignment_with_public_fields_without_trailing_comma() {
// The last struct field does not have trailing comma.
let mut formatter = Formatter::default();
formatter.config.structures.field_alignment = FieldAlignment::AlignFields(40);
check_with_formatter(
indoc! {r#"
contract;
pub struct Foo<T, P> {
barbazfoo: u64,
pub baz : bool
}
"#},
indoc! {r#"
contract;
pub struct Foo<T, P> {
barbazfoo : u64,
pub baz : bool,
}
"#},
&mut formatter,
);
check_with_formatter(
indoc! {r#"
contract;
pub struct Foo<T, P> {
pub barbazfoo: u64,
baz : bool
}
"#},
indoc! {r#"
contract;
pub struct Foo<T, P> {
pub barbazfoo : u64,
baz : bool,
}
"#},
&mut formatter,
);
}
#[test]
fn struct_public_fields() {
let mut formatter = Formatter::default();
formatter.config.structures.field_alignment = FieldAlignment::Off;
check_with_formatter(
indoc! {r#"
contract;
pub struct Foo<T, P> {
pub barbaz: T,
foo: u64,
pub baz : bool,
}
"#},
indoc! {r#"
contract;
pub struct Foo<T, P> {
pub barbaz: T,
foo: u64,
pub baz: bool,
}
"#},
&mut formatter,
);
}
#[test]
fn struct_public_fields_without_trailing_comma() {
// The last struct field does not have trailing comma.
let mut formatter = Formatter::default();
formatter.config.structures.field_alignment = FieldAlignment::Off;
check_with_formatter(
indoc! {r#"
contract;
pub struct Foo<T, P> {
pub barbaz: T,
foo: u64,
pub baz : bool
}
"#},
indoc! {r#"
contract;
pub struct Foo<T, P> {
pub barbaz: T,
foo: u64,
pub baz: bool,
}
"#},
&mut formatter,
);
}
#[test]
fn struct_add_ending_comma() {
check(
indoc! {r#"
contract;
pub struct Foo {
bar: u64,
baz: bool
}
"#},
indoc! {r#"
contract;
pub struct Foo {
bar: u64,
baz: bool,
}
"#},
);
}
#[test]
fn enum_without_variant_alignment() {
check(
indoc! {r#"
contract;
enum Color {
Blue: (), Green: (),
Red: (),
Silver: () ,
Grey: () , }
"#},
indoc! {r#"
contract;
enum Color {
Blue: (),
Green: (),
Red: (),
Silver: (),
Grey: (),
}
"#},
);
}
#[test]
fn enum_with_variant_alignment() {
let mut formatter = Formatter::default();
formatter.config.structures.field_alignment = FieldAlignment::AlignFields(20);
check_with_formatter(
indoc! {r#"
contract;
enum Color {
Blue: (), Green: (),
Red: (),
Silver: () ,
Grey: () , }
"#},
indoc! {r#"
contract;
enum Color {
Blue : (),
Green : (),
Red : (),
Silver : (),
Grey : (),
}
"#},
&mut formatter,
);
}
#[test]
fn enum_without_variant_alignment_without_trailing_comma() {
// The last enum variant does not have trailing comma.
check(
indoc! {r#"
contract;
enum Color {
Blue: (), Green : (),
Red : (),
Silver: () ,
Grey: () }
"#},
indoc! {r#"
contract;
enum Color {
Blue: (),
Green: (),
Red: (),
Silver: (),
Grey: (),
}
"#},
);
}
#[test]
#[ignore = "Bug in `swayfmt`. Activate this test once https://github.com/FuelLabs/sway/issues/6805 is fixed."]
fn enum_with_variant_alignment_without_trailing_comma() {
// The last enum variant does not have trailing comma.
let mut formatter = Formatter::default();
formatter.config.structures.field_alignment = FieldAlignment::AlignFields(20);
check_with_formatter(
indoc! {r#"
contract;
enum Color {
Blue: (), Green : (),
Red : (),
Silver: () ,
Grey: () }
"#},
indoc! {r#"
contract;
enum Color {
Blue : (),
Green : (),
Red : (),
Silver : (),
Grey : (),
}
"#},
&mut formatter,
);
}
#[test]
fn configurable_without_alignment() {
check(
indoc! {r#"
contract;
configurable {
Blue: u64 = 0, Green: u64 = 0,
Red: u64=0,
Silver: u64= 0,
Grey: u64 =0, }
"#},
indoc! {r#"
contract;
configurable {
Blue: u64 = 0,
Green: u64 = 0,
Red: u64 = 0,
Silver: u64 = 0,
Grey: u64 = 0,
}
"#},
);
}
#[test]
fn configurable_with_alignment() {
let mut formatter = Formatter::default();
formatter.config.structures.field_alignment = FieldAlignment::AlignFields(20);
check_with_formatter(
indoc! {r#"
contract;
configurable {
Blue: u64 = 0, Green: u64 = 0,
Red: u64=0,
Silver: u64= 0,
Grey: u64 =0, }
"#},
indoc! {r#"
contract;
configurable {
Blue : u64 = 0,
Green : u64 = 0,
Red : u64 = 0,
Silver : u64 = 0,
Grey : u64 = 0,
}
"#},
&mut formatter,
);
}
#[test]
fn configurable_without_alignment_without_trailing_comma() {
// The last configurable does not have trailing comma.
check(
indoc! {r#"
contract;
configurable {
Blue: u64 = 0, Green: u64 = 0,
Red: u64=0,
Silver: u64= 0,
Grey: u64 =0 }
"#},
indoc! {r#"
contract;
configurable {
Blue: u64 = 0,
Green: u64 = 0,
Red: u64 = 0,
Silver: u64 = 0,
Grey: u64 = 0,
}
"#},
);
}
#[test]
#[ignore = "Bug in `swayfmt`. Activate this test once https://github.com/FuelLabs/sway/issues/6805 is fixed."]
fn configurable_with_alignment_without_trailing_comma() {
// The last configurable does not have trailing comma.
let mut formatter = Formatter::default();
formatter.config.structures.field_alignment = FieldAlignment::AlignFields(20);
check_with_formatter(
indoc! {r#"
contract;
configurable {
Blue: u64 = 0, Green: u64 = 0,
Red: u64=0,
Silver: u64= 0,
Grey: u64 =0 }
"#},
indoc! {r#"
contract;
configurable {
Blue : u64 = 0,
Green : u64 = 0,
Red : u64 = 0,
Silver : u64 = 0,
Grey : u64 = 0,
}
"#},
&mut formatter,
);
}
#[test]
fn item_abi_with_generics_and_attributes() {
check(
indoc! {r#"
contract;
abi StorageMapExample {
#[storage(write)]fn insert_into_map1(key: u64, value: u64);
fn hello(key: u64, value: u64);
}"#},
indoc! {r#"
contract;
abi StorageMapExample {
#[storage(write)]
fn insert_into_map1(key: u64, value: u64);
fn hello(key: u64, value: u64);
}
"#},
);
}
#[test]
fn multi_items() {
check(
indoc! {r#"
contract;
pub const TEST: u16 = 10;
pub const TEST1: u16 = 10;"#},
indoc! {r#"
contract;
pub const TEST: u16 = 10;
pub const TEST1: u16 = 10;
"#},
);
}
#[test]
fn ty_formatting() {
check(
indoc! {r#"
contract;
enum TestTy {
Infer:
_,
Array : [u8;
40],
String: str[
4
],
PathType : root::
example::
some_type,
TupleNil: (),
Tuple: ( u64,
u32
),
}"#},
indoc! {r#"
contract;
enum TestTy {
Infer: _,
Array: [u8; 40],
String: str[4],
PathType: root::example::some_type,
TupleNil: (),
Tuple: (u64, u32),
}
"#},
);
}
#[test]
fn storage_without_alignment() {
check(
indoc! {r#"
contract;
struct Type1 {
foo: u64,
}
struct Type2 {
bar: u64,
}
storage {
var1: Type1=Type1{ foo: 8 },
var2: Type2=Type2{ bar: 9 },
ns1 { var3: u64 = 1, ns2 { var4: u64 = 1 } },
}
"#},
indoc! {r#"
contract;
struct Type1 {
foo: u64,
}
struct Type2 {
bar: u64,
}
storage {
var1: Type1 = Type1 { foo: 8 },
var2: Type2 = Type2 { bar: 9 },
ns1 {
var3: u64 = 1,
ns2 {
var4: u64 = 1,
},
},
}
"#},
);
}
#[test]
fn storage_with_alignment() {
let mut formatter = Formatter::default();
formatter.config.structures.field_alignment = FieldAlignment::AlignFields(50);
check_with_formatter(
indoc! {r#"
contract;
struct Type1 {
foo: u64,
}
struct Type2 {
bar: u64,
}
storage {
long_var_name: Type1=Type1{ foo: 8 },
var2: Type2=Type2{ bar: 9 },
ns1 { var3: u64 = 1, ns2 { var4: u64 = 1, }, }, var5: u64 = 1
}
"#},
indoc! {r#"
contract;
struct Type1 {
foo : u64,
}
struct Type2 {
bar : u64,
}
storage {
long_var_name : Type1 = Type1 { foo: 8 },
var2 : Type2 = Type2 { bar: 9 },
ns1 {
var3 : u64 = 1,
ns2 {
var4 : u64 = 1,
},
},
var5 : u64 = 1
}
"#},
&mut formatter,
);
}
#[test]
fn storage_initializer() {
check(
indoc! {r#"
contract;
struct Type1 {
x: u64,
y: u64,
}
struct Type2 {
w: b256,
z: bool,
}
storage {
var1: Type1 = Type1 {
x: 0,
y:
0,
},
var2: Type2 = Type2 { w: 0x0000000000000000000000000000000000000000000000000000000000000000,z: false,
},
}"#},
indoc! {r#"
contract;
struct Type1 {
x: u64,
y: u64,
}
struct Type2 {
w: b256,
z: bool,
}
storage {
var1: Type1 = Type1 { x: 0, y: 0 },
var2: Type2 = Type2 {
w: 0x0000000000000000000000000000000000000000000000000000000000000000,
z: false,
},
}
"#},
);
}
#[test]
fn item_fn() {
check(
indoc! {r#"
contract;
pub fn hello( person: String ) -> String {let greeting = 42;greeting.to_string()}
fn goodbye() -> usize {let farewell: usize = 5; farewell }"#},
indoc! {r#"
contract;
pub fn hello(person: String) -> String {
let greeting = 42;
greeting.to_string()
}
fn goodbye() -> usize {
let farewell: usize = 5;
farewell
}
"#},
);
}
#[test]
fn same_line_where() {
check(
indoc! {r#"
contract;
pub fn hello( person: String ) -> String where T: Eq,{let greeting = 42;greeting.to_string()}"#},
indoc! {r#"
contract;
pub fn hello(person: String) -> String
where
T: Eq,
{
let greeting = 42;
greeting.to_string()
}
"#},
);
}
#[test]
fn trait_and_super_trait() {
check(
indoc! {r#"
library;
trait Person{ fn name( self )->String;fn age( self )->usize; }
trait Student:Person {fn university(self) -> String;}
trait Programmer {fn fav_language(self) -> String;}
trait CompSciStudent: Programmer+Student {fn git_username(self) -> String;}
trait TraitWithGenerics<T> where T: String {fn from(b: T) -> Self;}"#},
indoc! {r#"
library;
trait Person {
fn name(self) -> String;
fn age(self) -> usize;
}
trait Student: Person {
fn university(self) -> String;
}
trait Programmer {
fn fav_language(self) -> String;
}
trait CompSciStudent: Programmer + Student {
fn git_username(self) -> String;
}
trait TraitWithGenerics<T>
where
T: String,
{
fn from(b: T) -> Self;
}
"#},
);
}
#[test]
fn method_calls() {
let mut formatter = Formatter::default();
formatter.config.structures.small_structures_single_line = true;
formatter.config.whitespace.max_width = 220;
check_with_formatter(
indoc! {r#"
script;
struct Opts {
gas: u64,
coins: u64,
id: ContractId,
}
fn main( ) -> bool{
let default_gas = 1_000_000_000_000 ;let fuelcoin_id = ContractId::from(0x018f59fe434b323a5054e7bb41de983f4926a3c5d3e4e1f9f33b5f0f0e611889);
let balance_test_id = ContractId :: from( 0x597e5ddb1a6bec92a96a73e4f0bc6f6e3e7b21f5e03e1c812cd63cffac480463 ) ;
let fuel_coin = abi( TestFuelCoin, fuelcoin_id.into( ) ) ;
assert(fuelcoin_balance == 0);
fuel_coin.mint {
gas: default_gas
}
(11);
fuelcoin_balance = balance_of(fuelcoin_id, fuelcoin_id);
assert( fuelcoin_balance == 11 ) ;
fuel_coin.burn {
gas: default_gas
}
(7);
fuelcoin_balance = balance_of(fuelcoin_id, fuelcoin_id);
assert(fuelcoin_balance == 4);
fuel_coin.force_transfer {
gas: default_gas
}
(3, fuelcoin_id, balance_test_id);
fuelcoin_balance = balance_of(fuelcoin_id, fuelcoin_id);
let balance_test_contract_balance = balance_of(fuelcoin_id, balance_test_id);
assert(fuelcoin_balance == 1);
assert(balance_test_contract_balance == 3);
true
}"#},
indoc! {r#"
script;
struct Opts {
gas: u64,
coins: u64,
id: ContractId,
}
fn main() -> bool {
let default_gas = 1_000_000_000_000;
let fuelcoin_id = ContractId::from(0x018f59fe434b323a5054e7bb41de983f4926a3c5d3e4e1f9f33b5f0f0e611889);
let balance_test_id = ContractId::from(0x597e5ddb1a6bec92a96a73e4f0bc6f6e3e7b21f5e03e1c812cd63cffac480463);
let fuel_coin = abi(TestFuelCoin, fuelcoin_id.into());
assert(fuelcoin_balance == 0);
fuel_coin.mint { gas: default_gas }(11);
fuelcoin_balance = balance_of(fuelcoin_id, fuelcoin_id);
assert(fuelcoin_balance == 11);
fuel_coin.burn { gas: default_gas }(7);
fuelcoin_balance = balance_of(fuelcoin_id, fuelcoin_id);
assert(fuelcoin_balance == 4);
fuel_coin.force_transfer {
gas: default_gas,
}(3, fuelcoin_id, balance_test_id);
fuelcoin_balance = balance_of(fuelcoin_id, fuelcoin_id);
let balance_test_contract_balance = balance_of(fuelcoin_id, balance_test_id);
assert(fuelcoin_balance == 1);
assert(balance_test_contract_balance == 3);
true
}
"#},
&mut formatter,
);
}
#[test]
fn struct_comments() {
check(
indoc! {r#"
contract;
// This is a comment, for this one to be placed correctly we need to have Module visitor implemented
pub struct Foo { // Here is a comment
// Trying some ASCII art
baz:u64,
bazzz:u64
// ________ ___ ___ _______ ___ ___ ________ ________ ________
// |\ _____\\ \|\ \|\ ___ \ |\ \ |\ \ |\ __ \|\ __ \|\ ____\
// \ \ \__/\ \ \\\ \ \ __/|\ \ \ \ \ \ \ \ \|\ \ \ \|\ /\ \ \___|_
// \ \ __\\ \ \\\ \ \ \_|/_\ \ \ \ \ \ \ \ __ \ \ __ \ \_____ \
// \ \ \_| \ \ \\\ \ \ \_|\ \ \ \____ \ \ \____\ \ \ \ \ \ \|\ \|____|\ \
// \ \__\ \ \_______\ \_______\ \_______\ \ \_______\ \__\ \__\ \_______\____\_\ \
// \|__| \|_______|\|_______|\|_______| \|_______|\|__|\|__|\|_______|\_________\
// \|_________|
}
"#},
indoc! {r#"
contract;
// This is a comment, for this one to be placed correctly we need to have Module visitor implemented
pub struct Foo { // Here is a comment
// Trying some ASCII art
baz: u64,
bazzz: u64,
// ________ ___ ___ _______ ___ ___ ________ ________ ________
// |\ _____\\ \|\ \|\ ___ \ |\ \ |\ \ |\ __ \|\ __ \|\ ____\
// \ \ \__/\ \ \\\ \ \ __/|\ \ \ \ \ \ \ \ \|\ \ \ \|\ /\ \ \___|_
// \ \ __\\ \ \\\ \ \ \_|/_\ \ \ \ \ \ \ \ __ \ \ __ \ \_____ \
// \ \ \_| \ \ \\\ \ \ \_|\ \ \ \____ \ \ \____\ \ \ \ \ \ \|\ \|____|\ \
// \ \__\ \ \_______\ \_______\ \_______\ \ \_______\ \__\ \__\ \_______\____\_\ \
// \|__| \|_______|\|_______|\|_______| \|_______|\|__|\|__|\|_______|\_________\
// \|_________|
}
"#},
);
}
#[test]
fn comments_empty_struct() {
check(
indoc! {r#"
contract;
struct AlignMyComments {
// Align here please
// Overindented comment
// Underindented comment
}"#},
indoc! {r#"
contract;
struct AlignMyComments {
// Align here please
// Overindented comment
// Underindented comment
}
"#},
);
}
#[test]
fn comments_empty_traits() {
check(
indoc! {r#"
contract;
trait AlignMyComments {
// Align here please
// Overindented comment
// Underindented comment
}"#},
indoc! {r#"
contract;
trait AlignMyComments {
// Align here please
// Overindented comment
// Underindented comment
}
"#},
);
}
#[test]
fn comments_empty_fns() {
check(
indoc! {r#"
contract;
fn single_comment_same_line() { /* a comment */ }
fn single_comment_same_line_trailing() { // a comment
}
fn single_comment() -> bool {
// TODO: This is a TODO
}
fn multiline_comments() {
// Multi
// line
// comment
}"#},
indoc! {r#"
contract;
fn single_comment_same_line() { /* a comment */ }
fn single_comment_same_line_trailing() { // a comment
}
fn single_comment() -> bool {
// TODO: This is a TODO
}
fn multiline_comments() {
// Multi
// line
// comment
}
"#},
);
}
#[test]
fn enum_comments() {
check(
indoc! {r#"
contract;
pub enum Bazz { // Here is a comment
// Trying some ASCII art
baz: (),
bazzz: (),//-----
//--D--
//-----
}
"#},
indoc! {r#"
contract;
pub enum Bazz { // Here is a comment
// Trying some ASCII art
baz: (),
bazzz: (), //-----
//--D--
//-----
}
"#},
);
}
#[test]
fn fn_comments() {
check(
indoc! {r#"
contract;
// This is a comment before a fn
// This is another comment before a fn
fn hello_world( baz: /* this is a comment */ u64) { let x = 5; // This is a comment inside the block
}
"#},
indoc! {r#"
contract;
// This is a comment before a fn
// This is another comment before a fn
fn hello_world(baz: /* this is a comment */ u64) {
let x = 5; // This is a comment inside the block
}
"#},
);
}
#[test]
fn abi_comments() {
check(
indoc! {r#"
contract;
// This is an abi
abi StorageMapExample {
// insert_into_map is blah blah
#[storage(write)] // this is some other comment
fn insert_into_map(key: u64, value: u64);
// this is the last comment inside the StorageMapExample
}
// This is another abi
abi AnotherAbi {
// insert_into_map is blah blah
#[storage(write)]
fn update_map(key: u64, value: u64);
// this is some other comment
fn read(key: u64);
}
abi CommentsInBetween {
fn foo();
// This should not collapse below
// this is a comment
fn bar();
}
// This is another abi
abi Empty {
// Empty abi
}
"#},
indoc! {r#"
contract;
// This is an abi
abi StorageMapExample {
// insert_into_map is blah blah
#[storage(write)] // this is some other comment
fn insert_into_map(key: u64, value: u64);
// this is the last comment inside the StorageMapExample
}
// This is another abi
abi AnotherAbi {
// insert_into_map is blah blah
#[storage(write)]
fn update_map(key: u64, value: u64);
// this is some other comment
fn read(key: u64);
}
abi CommentsInBetween {
fn foo();
// This should not collapse below
// this is a comment
fn bar();
}
// This is another abi
abi Empty {
// Empty abi
}
"#},
);
}
#[test]
fn const_comments() {
check(
indoc! {r#"
contract;
pub const /* TEST: blah blah tests */ TEST: u16 = 10; // This is a comment next to a const"#},
indoc! {r#"
contract;
pub const /* TEST: blah blah tests */ TEST: u16 = 10; // This is a comment next to a const
"#},
);
}
#[test]
fn storage_comments() {
check(
indoc! {r#"
contract;
struct Type1 {
foo: u64,
}
struct Type2 {
bar: u64,
}
storage {
// Testing a comment inside storage
long_var_name: Type1=Type1{ foo: 8},
// Testing another comment
var2: Type2 = Type2{bar:9} // This is the last comment
}"#},
indoc! {r#"
contract;
struct Type1 {
foo: u64,
}
struct Type2 {
bar: u64,
}
storage {
// Testing a comment inside storage
long_var_name: Type1 = Type1 { foo: 8 },
// Testing another comment
var2: Type2 = Type2 { bar: 9 }, // This is the last comment
}
"#},
);
}
#[test]
fn trait_comments() {
check(
indoc! {r#"
contract;
// This is the programmer trait
trait Programmer {
// Returns fav languages of this Programmer.
fn fav_language(self) -> String;
}"#},
indoc! {r#"
contract;
// This is the programmer trait
trait Programmer {
// Returns fav languages of this Programmer.
fn fav_language(self) -> String;
}
"#},
);
}
#[test]
fn where_comment() {
check(
indoc! {r#"
contract;
pub fn hello( person: String ) -> String where /* This is next to where */ T: Eq, /*Here is a comment*/{let greeting = 42;greeting.to_string()}"#},
indoc! {r#"
contract;
pub fn hello(person: String) -> String
where /* This is next to where */
T: Eq, /*Here is a comment*/
{
let greeting = 42;
greeting.to_string()
}
"#},
);
}
#[test]
fn impl_spacing() {
check(
indoc! {r#"
script;
struct Foo {
bar: u64,
baz: bool,
}
trait Qux {
fn is_baz_true(self) -> bool;
}
impl<A , B , const N : u64> Qux<A, B> for
Foo
where
A : Qux,
B: Qux ,
{fn is_baz_true(self) -> bool {
self.baz
}}"#},
indoc! {r#"
script;
struct Foo {
bar: u64,
baz: bool,
}
trait Qux {
fn is_baz_true(self) -> bool;
}
impl<A, B, const N: u64> Qux<A, B> for Foo
where
A: Qux,
B: Qux,
{
fn is_baz_true(self) -> bool {
self.baz
}
}
"#},
);
}
#[test]
fn impl_without_generics() {
check(
indoc! {r#"
script;
struct Foo {
bar: u64,
baz: bool,
}
trait Qux {
fn is_baz_true(self) -> bool;
}
impl Qux for
Foo
{fn is_baz_true(self) -> bool {
self.baz
}}"#},
indoc! {r#"
script;
struct Foo {
bar: u64,
baz: bool,
}
trait Qux {
fn is_baz_true(self) -> bool;
}
impl Qux for Foo {
fn is_baz_true(self) -> bool {
self.baz
}
}
"#},
);
}
#[test]
fn newline_sequence_formatting() {
check(
indoc! {r#"
script;
fn main() {
let number: u64 = 10;
let number2: u64 = 20;
let number3: u64 = 30;
}"#},
indoc! {r#"
script;
fn main() {
let number: u64 = 10;
let number2: u64 = 20;
let number3: u64 = 30;
}
"#},
);
}
#[test]
fn inner_doc_comments() {
check(
indoc! {r#"
script;
enum Color {
//! Color is a Sway enum
blue: (),
red: ()
}
fn main() {
}"#},
indoc! {r#"
script;
enum Color {
//! Color is a Sway enum
blue: (),
red: (),
}
fn main() {}
"#},
);
}
#[test]
fn outer_doc_comments() {
check(
indoc! {r#"
script;
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | true |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-ir/src/variable.rs | sway-ir/src/variable.rs | //! A value representing a function-local variable.
use crate::{
context::Context,
irtype::{Type, TypeContent},
pretty::DebugWithContext,
Constant,
};
/// A wrapper around an [ECS](https://github.com/orlp/slotmap) handle into the
/// [`Context`].
#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash, DebugWithContext)]
pub struct LocalVar(#[in_context(local_vars)] pub slotmap::DefaultKey);
#[doc(hidden)]
#[derive(Clone, DebugWithContext)]
pub struct LocalVarContent {
pub ptr_ty: Type,
pub initializer: Option<Constant>,
pub mutable: bool,
}
impl LocalVar {
/// Return a new local of a specific type with an optional [`Constant`] initializer. If a
/// local is marked as mutable then it is guaranteed to be on the stack rather than in
/// read-only memory.
pub fn new(
context: &mut Context,
ty: Type,
initializer: Option<Constant>,
mutable: bool,
) -> Self {
let ptr_ty = Type::new_typed_pointer(context, ty);
let content = LocalVarContent {
ptr_ty,
initializer,
mutable,
};
LocalVar(context.local_vars.insert(content))
}
/// Return the type of this local variable, which is always a pointer.
pub fn get_type(&self, context: &Context) -> Type {
context.local_vars[self.0].ptr_ty
}
/// Return the inner (pointed to) type.
pub fn get_inner_type(&self, context: &Context) -> Type {
let TypeContent::TypedPointer(inner_ty) = self.get_type(context).get_content(context)
else {
unreachable!("Local var type is always a pointer.")
};
*inner_ty
}
/// Return the initializer for this local variable.
pub fn get_initializer<'a>(&self, context: &'a Context) -> Option<&'a Constant> {
context.local_vars[self.0].initializer.as_ref()
}
/// Return whether this local variable is mutable.
pub fn is_mutable(&self, context: &Context) -> bool {
context.local_vars[self.0].mutable
}
/// Change this local variable's mutability.
pub fn set_mutable(&self, context: &mut Context, mutable: bool) {
context.local_vars[self.0].mutable = mutable;
}
}
/// A wrapper around an [ECS](https://github.com/orlp/slotmap) handle into the
/// [`Context`].
#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash, DebugWithContext)]
pub struct GlobalVar(#[in_context(global_vars)] pub slotmap::DefaultKey);
#[doc(hidden)]
#[derive(Clone, DebugWithContext)]
pub struct GlobalVarContent {
pub ptr_ty: Type,
pub initializer: Option<Constant>,
pub mutable: bool,
}
impl GlobalVar {
/// Return a new Global of a specific type with an optional [`Constant`] initializer. If a
/// Global is marked as mutable then it is guaranteed to be on the stack rather than in
/// read-only memory.
pub fn new(
context: &mut Context,
ty: Type,
initializer: Option<Constant>,
mutable: bool,
) -> Self {
let ptr_ty = Type::new_typed_pointer(context, ty);
let content = GlobalVarContent {
ptr_ty,
initializer,
mutable,
};
GlobalVar(context.global_vars.insert(content))
}
/// Return the type of this Global variable, which is always a pointer.
pub fn get_type(&self, context: &Context) -> Type {
context.global_vars[self.0].ptr_ty
}
/// Return the inner (pointed to) type.
pub fn get_inner_type(&self, context: &Context) -> Type {
let TypeContent::TypedPointer(inner_ty) = self.get_type(context).get_content(context)
else {
unreachable!("Global var type is always a pointer.")
};
*inner_ty
}
/// Return the initializer for this Global variable.
pub fn get_initializer<'a>(&self, context: &'a Context) -> Option<&'a Constant> {
context.global_vars[self.0].initializer.as_ref()
}
/// Return whether this Global variable is mutable.
pub fn is_mutable(&self, context: &Context) -> bool {
context.global_vars[self.0].mutable
}
/// Change this Global variable's mutability.
pub fn set_mutable(&self, context: &mut Context, mutable: bool) {
context.global_vars[self.0].mutable = mutable;
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-ir/src/lib.rs | sway-ir/src/lib.rs | //! Sway-IR is a library providing an intermediate representation for the
//! [Sway](https://github.com/FuelLabs/sway) compiler pipeline.
//!
//! It is inspired heavily by [LLVM](https://llvm.org/docs/LangRef.html) although it aims to remain
//! a much simpler system, providing only that which is required by Sway to target the Fuel virtual
//! machine. It takes the form of a
//! [static single assignment](https://en.wikipedia.org/wiki/Static_single_assignment_form) graph
//! and is designed primarily to allow executable code optimization transforms powerful, yet remain
//! relatively simple.
//!
//! The core data type is [`Context`] which contains all the IR state in an entity-component style
//! system. A [`Context`] contains one or more [`Module`]s, which in turn contain one or more
//! [`Function`]s. [`Function`]s have a set of arguments, contain a collection of local variables
//! and one or more [`Block`]s. [`Block`]s contain lists of [`Instruction`]s and may be joined as a
//! graph to represent program control flow.
//!
//! Other important data types are [`Value`], [`Type`] and [`Constant`]. Function arguments, local
//! variables, instructions and constants are all [`Value`]s.
//!
//! The optimization passes are found in the [optimize] module.
//!
//! # Note:
//!
//! Most of the public data types used in this library are in fact wrappers around a handle into
//! the context. The context uses the
//! [slotmap](https://github.com/orlp/slotmap) crate to maintain an entity
//! component system, or ECS.
//!
//! The nature of SSA is that it represents a graph of modules, functions, basic blocks and
//! instructions, which in Rust could be represented using references, [`Box`]es or [`std::rc::Rc`]
//! pointers. But the nature of optimization passes are to transform these graphs into generally
//! smaller or at least somehow more efficient versions of themselves, and so to avoid a lot of
//! copying and the interior mutability problem Sway-IR uses the ECS. Each handle implements
//! [`Copy`] and so is cheap to pass around by value, making changes to the ECS context simpler in
//! terms of satisfying the Rust borrow checker.
// For now it's easiest to just export absolutely everything to core_lang, we can refine the public
// API when it's closer to finished.
pub mod analysis;
pub use analysis::*;
pub mod asm;
pub use asm::*;
pub mod block;
pub use block::*;
pub mod constant;
pub use constant::*;
pub mod context;
pub use context::*;
pub mod error;
pub use error::*;
pub mod function;
pub use function::*;
pub mod instruction;
pub use instruction::*;
pub mod irtype;
pub use irtype::*;
pub mod metadata;
pub use metadata::*;
pub mod module;
pub use module::*;
pub mod optimize;
pub use optimize::*;
pub mod parser;
pub use parser::*;
pub mod variable;
pub use variable::*;
pub mod storage_key;
pub use storage_key::*;
pub mod pass_manager;
pub use pass_manager::*;
pub mod pretty;
pub use pretty::*;
pub mod printer;
pub use printer::*;
pub mod value;
pub use value::*;
pub mod verify;
pub use verify::*;
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-ir/src/asm.rs | sway-ir/src/asm.rs | //! An 'asm' block represents an opaque set of Fuel VM assembly instructions, embedded in place and
//! intended to be inserted as is into the assembly code generation.
//!
//! An [`AsmBlock`] has symbols for arguments and an optional return name and contains a list of
//! [`AsmInstruction`].
//!
//! The syntax in Sway for asm blocks is shown by this example, and [`AsmBlock`] represents it
//! symbolically:
//!
//! ```text
//! asm(r1: self, r2: other, r3) {
//! add r3 r2 r1;
//! r3: u64
//! }
//! ```
use sway_types::ident::Ident;
use crate::{
context::Context, irtype::Type, metadata::MetadataIndex, pretty::DebugWithContext, value::Value,
};
#[doc(hidden)]
#[derive(Clone, Debug, DebugWithContext)]
pub struct AsmBlock {
pub args_names: Vec<Ident>,
pub body: Vec<AsmInstruction>,
pub return_type: Type,
pub return_name: Option<Ident>,
}
#[derive(Clone, Debug)]
pub struct AsmArg {
pub name: Ident,
pub initializer: Option<Value>,
}
#[derive(Clone, Debug)]
pub struct AsmInstruction {
pub op_name: Ident,
pub args: Vec<Ident>,
pub immediate: Option<Ident>,
pub metadata: Option<MetadataIndex>,
}
impl AsmInstruction {
pub fn log_no_span(
ra: impl Into<String>,
rb: impl Into<String>,
rc: impl Into<String>,
rd: impl Into<String>,
) -> Self {
AsmInstruction {
op_name: Ident::new(sway_types::Span::from_string("log".into())),
args: vec![
Ident::new_no_span(ra.into()),
Ident::new_no_span(rb.into()),
Ident::new_no_span(rc.into()),
Ident::new_no_span(rd.into()),
],
immediate: None,
metadata: None,
}
}
pub fn lw_no_span(
dst: impl Into<String>,
src: impl Into<String>,
offset: impl Into<String>,
) -> Self {
AsmInstruction {
op_name: Ident::new(sway_types::Span::from_string("lw".into())),
args: vec![
Ident::new_no_span(dst.into()),
Ident::new_no_span(src.into()),
],
immediate: Some(Ident::new_no_span(offset.into())),
metadata: None,
}
}
pub fn mul_no_span(dst: impl Into<String>, a: impl Into<String>, b: impl Into<String>) -> Self {
AsmInstruction {
op_name: Ident::new(sway_types::Span::from_string("mul".into())),
args: vec![
Ident::new_no_span(dst.into()),
Ident::new_no_span(a.into()),
Ident::new_no_span(b.into()),
],
immediate: None,
metadata: None,
}
}
pub fn add_no_span(dst: impl Into<String>, a: impl Into<String>, b: impl Into<String>) -> Self {
AsmInstruction {
op_name: Ident::new(sway_types::Span::from_string("add".into())),
args: vec![
Ident::new_no_span(dst.into()),
Ident::new_no_span(a.into()),
Ident::new_no_span(b.into()),
],
immediate: None,
metadata: None,
}
}
pub fn sub_no_span(dst: impl Into<String>, a: impl Into<String>, b: impl Into<String>) -> Self {
AsmInstruction {
op_name: Ident::new(sway_types::Span::from_string("sub".into())),
args: vec![
Ident::new_no_span(dst.into()),
Ident::new_no_span(a.into()),
Ident::new_no_span(b.into()),
],
immediate: None,
metadata: None,
}
}
}
impl AsmBlock {
/// Create a new [`AsmBlock`] in the passed context and return its handle.
pub fn new(
args_names: Vec<Ident>,
body: Vec<AsmInstruction>,
return_type: Type,
return_name: Option<Ident>,
) -> Self {
AsmBlock {
args_names,
body,
return_type,
return_name,
}
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-ir/src/instruction.rs | sway-ir/src/instruction.rs | //! Instructions for data manipulation, but mostly control flow.
//!
//! Since Sway abstracts most low level operations behind traits they are translated into function
//! calls which contain ASM blocks.
//!
//! Unfortunately, using opaque ASM blocks limits the effectiveness of certain optimizations and
//! this should be addressed in the future, perhaps by using compiler intrinsic calls instead of
//! the ASM blocks where possible. See: https://github.com/FuelLabs/sway/issues/855,
use rustc_hash::FxHashMap;
use sway_types::Ident;
use crate::{
asm::{AsmArg, AsmBlock},
block::Block,
context::Context,
function::Function,
irtype::Type,
pretty::DebugWithContext,
value::{Value, ValueDatum},
variable::LocalVar,
AsmInstruction, ConstantContent, GlobalVar, Module, StorageKey,
};
#[derive(Debug, Clone, DebugWithContext)]
pub struct BranchToWithArgs {
pub block: Block,
pub args: Vec<Value>,
}
#[derive(Debug, Clone, DebugWithContext)]
pub struct Instruction {
pub parent: Block,
pub op: InstOp,
}
impl Instruction {
pub fn get_type(&self, context: &Context) -> Option<Type> {
self.op.get_type(context)
}
/// Replace `old_val` with `new_val` if it is referenced by this instruction's arguments.
pub fn replace_values(&mut self, replace_map: &FxHashMap<Value, Value>) {
self.op.replace_values(replace_map)
}
/// Get the function containing this instruction
pub fn get_function(&self, context: &Context) -> Function {
context.blocks[self.parent.0].function
}
}
#[derive(Debug, Clone, DebugWithContext)]
pub enum InstOp {
/// An opaque list of ASM instructions passed directly to codegen.
AsmBlock(AsmBlock, Vec<AsmArg>),
/// Unary arithmetic operations
UnaryOp {
op: UnaryOpKind,
arg: Value,
},
/// Binary arithmetic operations
BinaryOp {
op: BinaryOpKind,
arg1: Value,
arg2: Value,
},
/// Cast the type of a value without changing its actual content.
BitCast(Value, Type),
/// An unconditional jump.
Branch(BranchToWithArgs),
/// A function call with a list of arguments.
Call(Function, Vec<Value>),
/// Cast a value's type from one pointer to another.
CastPtr(Value, Type),
/// Comparison between two values using various comparators and returning a boolean.
Cmp(Predicate, Value, Value),
/// A conditional jump with the boolean condition value and true or false destinations.
ConditionalBranch {
cond_value: Value,
true_block: BranchToWithArgs,
false_block: BranchToWithArgs,
},
/// A contract call with a list of arguments
ContractCall {
return_type: Type,
name: Option<String>,
params: Value,
coins: Value,
asset_id: Value,
gas: Value,
},
/// Umbrella instruction variant for FuelVM-specific instructions
FuelVm(FuelVmInstruction),
/// Return a pointer to a local variable.
GetLocal(LocalVar),
/// Return a pointer to a global variable.
GetGlobal(GlobalVar),
/// Return a pointer to a configurable.
GetConfig(Module, String),
GetStorageKey(StorageKey),
/// Translate a pointer from a base to a nested element in an aggregate type.
GetElemPtr {
base: Value,
elem_ptr_ty: Type,
indices: Vec<Value>,
},
/// Re-interpret an integer value as pointer of some type
IntToPtr(Value, Type),
/// Read a value from a memory pointer.
Load(Value),
/// Copy a specified number of bytes between pointers.
MemCopyBytes {
dst_val_ptr: Value,
src_val_ptr: Value,
byte_len: u64,
},
/// Copy a value from one pointer to another.
MemCopyVal {
dst_val_ptr: Value,
src_val_ptr: Value,
},
/// Clear a value, fills with zero
MemClearVal {
dst_val_ptr: Value,
},
/// No-op, handy as a placeholder instruction.
Nop,
/// Cast a pointer to an integer.
PtrToInt(Value, Type),
/// Return from a function.
Ret(Value, Type),
/// Write a value to a memory pointer.
Store {
dst_val_ptr: Value,
stored_val: Value,
},
/// Allocate `count` objects of type `ty` on the heap.
Alloc {
ty: Type,
count: Value,
},
}
/// Metadata describing a logged event.
///
/// The value is packed into a `u64` by [`Self::encoded`] using the following layout:
/// - bits 63..=56: `version`
/// - bits 55..=48: `is_event` flag (stored as an 8-bit boolean for compatibility)
/// - bits 47..=40: `is_indexed` flag (stored as an 8-bit boolean for compatibility)
/// - bits 39..=32: `event_type_size` in bytes (fixed-size indexed payloads only)
/// - bits 31..=16: `num_elements` (number of indexed fields)
/// - bits 15..=0: reserved for future use (currently zeroed)
#[derive(Debug, Clone, Copy, DebugWithContext)]
pub struct LogEventData {
version: u8,
is_event: bool,
is_indexed: bool,
event_type_size: u8,
num_elements: u16,
}
impl Default for LogEventData {
fn default() -> Self {
Self {
version: Self::CURRENT_VERSION,
is_event: false,
is_indexed: false,
event_type_size: 0,
num_elements: 0,
}
}
}
impl LogEventData {
pub const CURRENT_VERSION: u8 = 0;
pub fn new(
version: u8,
is_event: bool,
is_indexed: bool,
event_type_size: u8,
num_elements: u16,
) -> Self {
Self {
version,
is_event,
is_indexed,
event_type_size,
num_elements,
}
}
pub fn for_event(indexed_field_size: Option<u8>, indexed_field_count: u16) -> Self {
match (indexed_field_size, indexed_field_count) {
(Some(size), count) if count > 0 => Self {
version: Self::CURRENT_VERSION,
is_event: true,
is_indexed: true,
event_type_size: size,
num_elements: count,
},
_ => Self {
version: Self::CURRENT_VERSION,
is_event: true,
is_indexed: false,
event_type_size: 0,
num_elements: 0,
},
}
}
pub fn version(&self) -> u8 {
self.version
}
pub fn is_event(&self) -> bool {
self.is_event
}
pub fn is_indexed(&self) -> bool {
self.is_indexed
}
pub fn event_type_size(&self) -> u8 {
self.event_type_size
}
pub fn num_elements(&self) -> u16 {
self.num_elements
}
/// Returns the packed representation consumed by the backend.
///
/// Boolean flags are stored as bytes—rather than single bits—to match the
/// FuelVM ABI expectations, leaving the lower 16 bits available for future
/// extensions without breaking the encoding.
pub fn encoded(&self) -> u64 {
(u64::from(self.version) << 56)
| (u64::from(self.is_event as u8) << 48)
| (u64::from(self.is_indexed as u8) << 40)
| (u64::from(self.event_type_size) << 32)
| (u64::from(self.num_elements) << 16)
}
}
#[derive(Debug, Clone, DebugWithContext)]
pub enum FuelVmInstruction {
Gtf {
index: Value,
tx_field_id: u64,
},
/// Logs a value along with an identifier.
Log {
log_val: Value,
log_ty: Type,
log_id: Value,
log_data: Option<LogEventData>,
},
/// Reads a special register in the VM.
ReadRegister(Register),
/// Revert VM execution.
Revert(Value),
/// - Sends a message to an output via the `smo` FuelVM instruction.
/// - The first operand must be a `B256` representing the recipient.
/// - The second operand is the message data being sent.
/// - `message_size` and `coins` must be of type `U64`.
Smo {
recipient: Value,
message: Value,
message_size: Value,
coins: Value,
},
/// Clears `number_of_slots` storage slots (`b256` each) starting at key `key`.
StateClear {
key: Value,
number_of_slots: Value,
},
/// Reads `number_of_slots` slots (`b256` each) from storage starting at key `key` and stores
/// them in memory starting at address `load_val`.
StateLoadQuadWord {
load_val: Value,
key: Value,
number_of_slots: Value,
},
/// Reads and returns single word from a storage slot.
StateLoadWord(Value),
/// Stores `number_of_slots` slots (`b256` each) starting at address `stored_val` in memory into
/// storage starting at key `key`. `key` must be a `b256`.
StateStoreQuadWord {
stored_val: Value,
key: Value,
number_of_slots: Value,
},
/// Writes a single word to a storage slot. `key` must be a `b256` and the type of `stored_val`
/// must be a `u64`.
StateStoreWord {
stored_val: Value,
key: Value,
},
WideUnaryOp {
op: UnaryOpKind,
result: Value,
arg: Value,
},
WideBinaryOp {
op: BinaryOpKind,
result: Value,
arg1: Value,
arg2: Value,
},
WideModularOp {
op: BinaryOpKind,
result: Value,
arg1: Value,
arg2: Value,
arg3: Value,
},
WideCmpOp {
op: Predicate,
arg1: Value,
arg2: Value,
},
JmpMem,
Retd {
ptr: Value,
len: Value,
},
}
/// Comparison operations.
#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq)]
pub enum Predicate {
Equal,
LessThan,
GreaterThan,
}
#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq)]
pub enum UnaryOpKind {
Not,
}
#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq)]
pub enum BinaryOpKind {
Add,
Sub,
Mul,
Div,
And,
Or,
Xor,
Mod,
Rsh,
Lsh,
}
/// Special registers in the Fuel Virtual Machine.
#[derive(Debug, Clone, Copy, Hash)]
pub enum Register {
/// Contains overflow/underflow of addition, subtraction, and multiplication.
Of,
/// The program counter. Memory address of the current instruction.
Pc,
/// Memory address of bottom of current writable stack area.
Ssp,
/// Memory address on top of current writable stack area (points to free memory).
Sp,
/// Memory address of beginning of current call frame.
Fp,
/// Memory address below the current bottom of the heap (points to free memory).
Hp,
/// Error codes for particular operations.
Error,
/// Remaining gas globally.
Ggas,
/// Remaining gas in the context.
Cgas,
/// Received balance for this context.
Bal,
/// Pointer to the start of the currently-executing code.
Is,
/// Return value or pointer.
Ret,
/// Return value length in bytes.
Retl,
/// Flags register.
Flag,
}
impl InstOp {
/// Some [`Instruction`]s can return a value, but for some a return value doesn't make sense.
///
/// Those which perform side effects such as writing to memory and also terminators such as
/// `Ret` do not have a type.
pub fn get_type(&self, context: &Context) -> Option<Type> {
match self {
// These all return something in particular.
InstOp::AsmBlock(asm_block, _) => Some(asm_block.return_type),
InstOp::UnaryOp { arg, .. } => arg.get_type(context),
InstOp::BinaryOp { arg1, .. } => arg1.get_type(context),
InstOp::BitCast(_, ty) => Some(*ty),
InstOp::Call(function, _) => Some(context.functions[function.0].return_type),
InstOp::CastPtr(_val, ty) => Some(*ty),
InstOp::Cmp(..) => Some(Type::get_bool(context)),
InstOp::ContractCall { return_type, .. } => Some(*return_type),
InstOp::FuelVm(FuelVmInstruction::Gtf { .. }) => Some(Type::get_uint64(context)),
InstOp::FuelVm(FuelVmInstruction::Log { .. }) => Some(Type::get_unit(context)),
InstOp::FuelVm(FuelVmInstruction::ReadRegister(_)) => Some(Type::get_uint64(context)),
InstOp::FuelVm(FuelVmInstruction::Smo { .. }) => Some(Type::get_unit(context)),
// Load needs to strip the pointer from the source type.
InstOp::Load(ptr_val) => match &context.values[ptr_val.0].value {
ValueDatum::Argument(arg) => arg.ty.get_pointee_type(context),
ValueDatum::Constant(cons) => {
cons.get_content(context).ty.get_pointee_type(context)
}
ValueDatum::Instruction(ins) => ins
.get_type(context)
.and_then(|ty| ty.get_pointee_type(context)),
},
// These return pointer types.
InstOp::GetElemPtr { elem_ptr_ty, .. } => Some(*elem_ptr_ty),
InstOp::GetLocal(local_var) => Some(local_var.get_type(context)),
InstOp::GetGlobal(global_var) => Some(global_var.get_type(context)),
InstOp::GetConfig(module, name) => Some(match module.get_config(context, name)? {
crate::ConfigContent::V0 { ptr_ty, .. } => *ptr_ty,
crate::ConfigContent::V1 { ptr_ty, .. } => *ptr_ty,
}),
InstOp::GetStorageKey(storage_key) => Some(storage_key.get_type(context)),
InstOp::Alloc { ty: _, count: _ } => Some(Type::get_ptr(context)),
// Use for casting between pointers and pointer-width integers.
InstOp::IntToPtr(_, ptr_ty) => Some(*ptr_ty),
InstOp::PtrToInt(_, int_ty) => Some(*int_ty),
// These are all terminators which don't return, essentially. No type.
InstOp::Branch(_)
| InstOp::ConditionalBranch { .. }
| InstOp::FuelVm(
FuelVmInstruction::Revert(..)
| FuelVmInstruction::JmpMem
| FuelVmInstruction::Retd { .. },
)
| InstOp::Ret(..) => None,
// No-op is also no-type.
InstOp::Nop => None,
// State load returns a u64, other state ops return a bool.
InstOp::FuelVm(FuelVmInstruction::StateLoadWord(_)) => Some(Type::get_uint64(context)),
InstOp::FuelVm(FuelVmInstruction::StateClear { .. })
| InstOp::FuelVm(FuelVmInstruction::StateLoadQuadWord { .. })
| InstOp::FuelVm(FuelVmInstruction::StateStoreQuadWord { .. })
| InstOp::FuelVm(FuelVmInstruction::StateStoreWord { .. }) => {
Some(Type::get_bool(context))
}
// Memory writes return unit.
InstOp::MemCopyBytes { .. }
| InstOp::MemCopyVal { .. }
| InstOp::MemClearVal { .. }
| InstOp::Store { .. } => Some(Type::get_unit(context)),
// Wide Operations
InstOp::FuelVm(FuelVmInstruction::WideUnaryOp { result, .. }) => {
result.get_type(context)
}
InstOp::FuelVm(FuelVmInstruction::WideBinaryOp { result, .. }) => {
result.get_type(context)
}
InstOp::FuelVm(FuelVmInstruction::WideCmpOp { .. }) => Some(Type::get_bool(context)),
InstOp::FuelVm(FuelVmInstruction::WideModularOp { result, .. }) => {
result.get_type(context)
}
}
}
pub fn get_operands(&self) -> Vec<Value> {
match self {
InstOp::AsmBlock(_, args) => args.iter().filter_map(|aa| aa.initializer).collect(),
InstOp::BitCast(v, _) => vec![*v],
InstOp::UnaryOp { op: _, arg } => vec![*arg],
InstOp::BinaryOp { op: _, arg1, arg2 } => vec![*arg1, *arg2],
InstOp::Branch(BranchToWithArgs { args, .. }) => args.clone(),
InstOp::Call(_, vs) => vs.clone(),
InstOp::CastPtr(val, _ty) => vec![*val],
InstOp::Cmp(_, lhs, rhs) => vec![*lhs, *rhs],
InstOp::ConditionalBranch {
cond_value,
true_block,
false_block,
} => {
let mut v = vec![*cond_value];
v.extend_from_slice(&true_block.args);
v.extend_from_slice(&false_block.args);
v
}
InstOp::ContractCall {
return_type: _,
name: _,
params,
coins,
asset_id,
gas,
} => vec![*params, *coins, *asset_id, *gas],
InstOp::GetElemPtr {
base,
elem_ptr_ty: _,
indices,
} => {
let mut vals = indices.clone();
vals.push(*base);
vals
}
InstOp::GetLocal(_local_var) => {
// `GetLocal` returns an SSA `Value` but does not take any as an operand.
vec![]
}
InstOp::GetGlobal(_global_var) => {
// `GetGlobal` returns an SSA `Value` but does not take any as an operand.
vec![]
}
InstOp::GetConfig(_, _) => {
// `GetConfig` returns an SSA `Value` but does not take any as an operand.
vec![]
}
InstOp::GetStorageKey(_) => {
// `GetStorageKey` returns an SSA `Value` but does not take any as an operand.
vec![]
}
InstOp::Alloc { ty: _, count } => vec![*count],
InstOp::IntToPtr(v, _) => vec![*v],
InstOp::Load(v) => vec![*v],
InstOp::MemCopyBytes {
dst_val_ptr,
src_val_ptr,
byte_len: _,
} => {
vec![*dst_val_ptr, *src_val_ptr]
}
InstOp::MemCopyVal {
dst_val_ptr,
src_val_ptr,
} => {
vec![*dst_val_ptr, *src_val_ptr]
}
InstOp::MemClearVal { dst_val_ptr } => {
vec![*dst_val_ptr]
}
InstOp::Nop => vec![],
InstOp::PtrToInt(v, _) => vec![*v],
InstOp::Ret(v, _) => vec![*v],
InstOp::Store {
dst_val_ptr,
stored_val,
} => {
vec![*dst_val_ptr, *stored_val]
}
InstOp::FuelVm(fuel_vm_instr) => match fuel_vm_instr {
FuelVmInstruction::Gtf {
index,
tx_field_id: _,
} => vec![*index],
FuelVmInstruction::Log {
log_val, log_id, ..
} => vec![*log_val, *log_id],
FuelVmInstruction::ReadRegister(_) => vec![],
FuelVmInstruction::Revert(v) => vec![*v],
FuelVmInstruction::JmpMem => vec![],
FuelVmInstruction::Smo {
recipient,
message,
message_size,
coins,
} => vec![*recipient, *message, *message_size, *coins],
FuelVmInstruction::StateClear {
key,
number_of_slots,
} => vec![*key, *number_of_slots],
FuelVmInstruction::StateLoadQuadWord {
load_val,
key,
number_of_slots,
} => vec![*load_val, *key, *number_of_slots],
FuelVmInstruction::StateLoadWord(key) => vec![*key],
FuelVmInstruction::StateStoreQuadWord {
stored_val,
key,
number_of_slots,
} => {
vec![*stored_val, *key, *number_of_slots]
}
FuelVmInstruction::StateStoreWord { stored_val, key } => vec![*stored_val, *key],
FuelVmInstruction::WideUnaryOp { arg, result, .. } => vec![*result, *arg],
FuelVmInstruction::WideBinaryOp {
arg1, arg2, result, ..
} => vec![*result, *arg1, *arg2],
FuelVmInstruction::WideCmpOp { arg1, arg2, .. } => vec![*arg1, *arg2],
FuelVmInstruction::WideModularOp {
result,
arg1,
arg2,
arg3,
..
} => vec![*result, *arg1, *arg2, *arg3],
FuelVmInstruction::Retd { ptr, len } => {
vec![*ptr, *len]
}
},
}
}
/// Set the operand at the given index to the provided value.
/// The indices are in the same order as returned by `get_operands`.
pub fn set_operand(&mut self, replacement: Value, idx: usize) {
match self {
InstOp::AsmBlock(_, args) => {
// Because get_operand only returns operands that have an
// initializer, we also iterate over only those, to match indices.
let mut cur_idx = 0;
for arg in args.iter_mut() {
if let Some(_asm_arg) = arg.initializer {
if cur_idx == idx {
arg.initializer = Some(replacement);
return;
}
cur_idx += 1;
}
}
panic!("Invalid index for AsmBlock");
}
InstOp::BitCast(v, _) | InstOp::UnaryOp { arg: v, .. } => {
if idx == 0 {
*v = replacement;
} else {
panic!("Invalid index for Op");
}
}
InstOp::BinaryOp { op: _, arg1, arg2 } => {
if idx == 0 {
*arg1 = replacement;
} else if idx == 1 {
*arg2 = replacement;
} else {
panic!("Invalid index for BinaryOp");
}
}
InstOp::Branch(BranchToWithArgs { args, .. }) => {
if idx < args.len() {
args[idx] = replacement;
} else {
panic!("Invalid index for Branch");
}
}
InstOp::Call(_, vs) => {
if idx < vs.len() {
vs[idx] = replacement;
} else {
panic!("Invalid index for Call");
}
}
InstOp::CastPtr(val, _ty) => {
if idx == 0 {
*val = replacement;
} else {
panic!("Invalid index for CastPtr");
}
}
InstOp::Cmp(_, lhs, rhs) => {
if idx == 0 {
*lhs = replacement;
} else if idx == 1 {
*rhs = replacement;
} else {
panic!("Invalid index for Cmp");
}
}
InstOp::ConditionalBranch {
cond_value,
true_block,
false_block,
} => {
if idx == 0 {
*cond_value = replacement;
} else if idx - 1 < true_block.args.len() {
true_block.args[idx - 1] = replacement;
} else if idx - 1 - true_block.args.len() < false_block.args.len() {
false_block.args[idx - 1 - true_block.args.len()] = replacement;
} else {
panic!("Invalid index for ConditionalBranch");
}
}
InstOp::ContractCall {
return_type: _,
name: _,
params,
coins,
asset_id,
gas,
} => {
if idx == 0 {
*params = replacement;
} else if idx == 1 {
*coins = replacement;
} else if idx == 2 {
*asset_id = replacement;
} else if idx == 3 {
*gas = replacement;
} else {
panic!("Invalid index for ContractCall");
}
}
InstOp::GetElemPtr {
base,
elem_ptr_ty: _,
indices,
} => {
use std::cmp::Ordering;
match idx.cmp(&indices.len()) {
Ordering::Less => {
indices[idx] = replacement;
}
Ordering::Equal => {
*base = replacement;
}
Ordering::Greater => {
panic!("Invalid index for GetElemPtr");
}
}
}
InstOp::GetLocal(_local_var) => {
// `GetLocal` returns an SSA `Value` but does not take any as an operand.
panic!("Invalid index for GetLocal");
}
InstOp::GetGlobal(_global_var) => {
// `GetGlobal` returns an SSA `Value` but does not take any as an operand.
panic!("Invalid index for GetGlobal");
}
InstOp::GetConfig(_, _) => {
// `GetConfig` returns an SSA `Value` but does not take any as an operand.
panic!("Invalid index for GetConfig");
}
InstOp::GetStorageKey(_) => {
// `GetStorageKey` returns an SSA `Value` but does not take any as an operand.
panic!("Invalid index for GetStorageKey");
}
InstOp::Alloc { ty: _, count } => {
if idx == 0 {
*count = replacement;
} else {
panic!("Invalid index for Alloc");
}
}
InstOp::IntToPtr(v, _) => {
if idx == 0 {
*v = replacement;
} else {
panic!("Invalid index for IntToPtr");
}
}
InstOp::Load(v) => {
if idx == 0 {
*v = replacement;
} else {
panic!("Invalid index for Load");
}
}
InstOp::MemCopyBytes {
dst_val_ptr,
src_val_ptr,
byte_len: _,
} => {
if idx == 0 {
*dst_val_ptr = replacement;
} else if idx == 1 {
*src_val_ptr = replacement;
} else {
panic!("Invalid index for MemCopyBytes");
}
}
InstOp::MemCopyVal {
dst_val_ptr,
src_val_ptr,
} => {
if idx == 0 {
*dst_val_ptr = replacement;
} else if idx == 1 {
*src_val_ptr = replacement;
} else {
panic!("Invalid index for MemCopyVal");
}
}
InstOp::MemClearVal { dst_val_ptr } => {
if idx == 0 {
*dst_val_ptr = replacement;
} else {
panic!("Invalid index for MemClearVal");
}
}
InstOp::Nop => (),
InstOp::PtrToInt(v, _) => {
if idx == 0 {
*v = replacement;
} else {
panic!("Invalid index for PtrToInt");
}
}
InstOp::Ret(v, _) => {
if idx == 0 {
*v = replacement;
} else {
panic!("Invalid index for Ret");
}
}
InstOp::Store {
dst_val_ptr,
stored_val,
} => {
if idx == 0 {
*dst_val_ptr = replacement;
} else if idx == 1 {
*stored_val = replacement;
} else {
panic!("Invalid index for Store");
}
}
InstOp::FuelVm(fuel_vm_instr) => match fuel_vm_instr {
FuelVmInstruction::Gtf {
index,
tx_field_id: _,
} => {
if idx == 0 {
*index = replacement;
} else {
panic!("Invalid index for Gtf");
}
}
FuelVmInstruction::Log {
log_val, log_id, ..
} => {
if idx == 0 {
*log_val = replacement;
} else if idx == 1 {
*log_id = replacement;
} else {
panic!("Invalid index for Log");
}
}
FuelVmInstruction::ReadRegister(_) => {
// `ReadRegister` returns an SSA `Value` but does not take any as an operand.
panic!("Invalid index for ReadRegister");
}
FuelVmInstruction::Revert(v) => {
if idx == 0 {
*v = replacement;
} else {
panic!("Invalid index for Revert");
}
}
FuelVmInstruction::JmpMem => {
// `JmpMem` does not take any operand.
panic!("Invalid index for JmpMem");
}
FuelVmInstruction::Smo {
recipient,
message,
message_size,
coins,
} => {
if idx == 0 {
*recipient = replacement;
} else if idx == 1 {
*message = replacement;
} else if idx == 2 {
*message_size = replacement;
} else if idx == 3 {
*coins = replacement;
} else {
panic!("Invalid index for Smo");
}
}
FuelVmInstruction::StateClear {
key,
number_of_slots,
} => {
if idx == 0 {
*key = replacement;
} else if idx == 1 {
*number_of_slots = replacement;
} else {
panic!("Invalid index for StateClear");
}
}
FuelVmInstruction::StateLoadQuadWord {
load_val,
key,
number_of_slots,
} => {
if idx == 0 {
*load_val = replacement;
} else if idx == 1 {
*key = replacement;
} else if idx == 2 {
*number_of_slots = replacement;
} else {
panic!("Invalid index for StateLoadQuadWord");
}
}
FuelVmInstruction::StateLoadWord(key) => {
if idx == 0 {
*key = replacement;
} else {
panic!("Invalid index for StateLoadWord");
}
}
FuelVmInstruction::StateStoreQuadWord {
stored_val,
key,
number_of_slots,
} => {
if idx == 0 {
*stored_val = replacement;
} else if idx == 1 {
*key = replacement;
} else if idx == 2 {
*number_of_slots = replacement;
} else {
panic!("Invalid index for StateStoreQuadWord");
}
}
FuelVmInstruction::StateStoreWord { stored_val, key } => {
if idx == 0 {
*stored_val = replacement;
} else if idx == 1 {
*key = replacement;
} else {
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | true |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-ir/src/parser.rs | sway-ir/src/parser.rs | //! A parser for the printed IR, useful mostly for testing.
use sway_features::ExperimentalFeatures;
use sway_types::SourceEngine;
use crate::{context::Context, error::IrError, Backtrace};
// -------------------------------------------------------------------------------------------------
/// Parse a string produced by [`crate::printer::to_string`] into a new [`Context`].
pub fn parse<'eng>(
input: &str,
source_engine: &'eng SourceEngine,
experimental: ExperimentalFeatures,
backtrace: Backtrace,
) -> Result<Context<'eng>, IrError> {
let irmod = ir_builder::parser::ir_descrs(input).map_err(|err| {
let found = if input.len() - err.location.offset <= 20 {
&input[err.location.offset..]
} else {
&input[err.location.offset..][..20]
};
IrError::ParseFailure(err.to_string(), found.into())
})?;
let ir = ir_builder::build_context(irmod, source_engine, experimental, backtrace)?;
ir.verify()?;
Ok(ir)
}
// -------------------------------------------------------------------------------------------------
mod ir_builder {
use slotmap::KeyData;
use std::convert::TryFrom;
use sway_features::ExperimentalFeatures;
use sway_types::{ident::Ident, span::Span, u256::U256, SourceEngine};
type MdIdxRef = u64;
peg::parser! {
pub(in crate::parser) grammar parser() for str {
pub(in crate::parser) rule ir_descrs() -> IrAstModule
= _ sop:script_or_predicate() eoi() {
sop
}
/ _ c:contract() eoi() {
c
}
rule script_or_predicate() -> IrAstModule
= kind:module_kind() "{" _ configs:init_config()* _ global_vars:global_var()* _ fn_decls:fn_decl()* "}" _
metadata:metadata_decls() {
IrAstModule {
kind,
configs,
global_vars,
storage_keys: vec![],
fn_decls,
metadata
}
}
rule module_kind() -> Kind
= "script" _ { Kind::Script }
/ "predicate" _ { Kind::Predicate }
rule contract() -> IrAstModule
= "contract" _ "{" _
configs:init_config()* _ global_vars:global_var()* _ storage_keys:storage_key()* _ fn_decls:fn_decl()* "}" _
metadata:metadata_decls() {
IrAstModule {
kind: crate::module::Kind::Contract,
configs,
global_vars,
storage_keys,
fn_decls,
metadata
}
}
rule global_var() -> IrAstGlobalVar
= "global" _ m:("mut" _)? name:path() _ ":" _ ty:ast_ty() init:global_init()? {
IrAstGlobalVar {
name,
ty,
init,
mutable: m.is_some(),
}
}
rule global_init() -> IrAstOperation
= "=" _ cv:op_const() {
cv
}
rule config_encoded_bytes() -> Vec<u8>
= "0x" s:$(hex_digit()*) _ {
hex_string_to_vec(s)
}
rule init_config() -> IrAstConfig
= value_name:value_assign() "config" _ val_ty:ast_ty() _ "," _ decode_fn:id() _ "," _ encoded_bytes:config_encoded_bytes()
metadata:comma_metadata_idx()? {
IrAstConfig {
value_name,
ty: val_ty,
encoded_bytes,
decode_fn,
metadata,
}
}
rule storage_key() -> IrAstStorageKey
= "storage_key" _ namespaces:path() "." fields:field_access() _ "=" _ slot:constant() _ offset:storage_key_offset()? _ field_id:storage_key_field_id()? {
IrAstStorageKey {
namespaces,
fields,
slot,
offset,
field_id,
}
}
rule storage_key_offset() -> IrAstConst
= ":" _ offset:constant() {
offset
}
rule storage_key_field_id() -> IrAstConst
= ":" _ field_id:constant() {
field_id
}
rule fn_decl() -> IrAstFnDecl
= is_public:is_public() _ is_original_entry:is_original_entry() _ is_entry:is_entry() _ is_fallback:is_fallback() _ "fn" _
name:id() _ selector:selector_id()? _ "(" _
args:(block_arg() ** comma()) ")" _ "->" _ ret_type:ast_ty()
metadata:comma_metadata_idx()? "{" _
locals:fn_local()*
blocks:block_decl()*
"}" _ {
// TODO: Remove once old decoding is removed.
// In the case of old decoding, every entry is at the same time an original entry, but in the IR
// we mark them only as `entry`s so there is a bit of information lost at the roundtrip.
// Remove this hack to recognize the new encoding once it becomes the only encoding.
let is_original_entry = is_original_entry || (is_entry && !name.starts_with("__entry"));
IrAstFnDecl {
name,
args,
ret_type,
is_public,
metadata,
locals,
blocks,
selector,
is_entry,
is_original_entry,
is_fallback,
}
}
rule is_public() -> bool
= "pub" _ { true }
/ "" _ { false }
rule is_entry() -> bool
= "entry" _ { true }
/ "" _ { false }
rule is_original_entry() -> bool
= "entry_orig" _ { true }
/ "" _ { false }
rule is_fallback() -> bool
= "fallback" _ { true }
/ "" _ { false }
rule selector_id() -> [u8; 4]
= "<" _ s:$(['0'..='9' | 'a'..='f' | 'A'..='F']*<8>) _ ">" _ {
string_to_hex::<4>(s)
}
rule block_arg() -> (IrAstTy, String, Option<MdIdxRef>)
= name:id() mdi:metadata_idx()? ":" _ ty:ast_ty() {
(ty, name, mdi)
}
rule fn_local() -> (IrAstTy, String, Option<IrAstOperation>, bool)
= "local" _ m:("mut" _)? ty:ast_ty() name:id() init:fn_local_init()? {
(ty, name, init, m.is_some())
}
rule fn_local_init() -> IrAstOperation
= "=" _ cv:op_const() {
cv
}
rule block_decl() -> IrAstBlock
= label:id() "(" _ args:(block_arg() ** comma()) ")" _
":" _ instructions: instr_decl()* {
IrAstBlock {
label,
args,
instructions
}
}
rule instr_decl() -> IrAstInstruction
= value_name:value_assign()? op:operation() metadata:comma_metadata_idx()? {
IrAstInstruction {
value_name,
op,
metadata,
}
}
rule value_assign() -> String
= name:id() "=" _ {
name
}
rule metadata_idx() -> MdIdxRef
= "!" idx:decimal() {
idx
}
rule comma_metadata_idx() -> MdIdxRef
= "," _ mdi:metadata_idx() {
mdi
}
rule unary_op_kind() -> UnaryOpKind
= "not" _ { UnaryOpKind::Not }
rule binary_op_kind() -> BinaryOpKind
= "add" _ { BinaryOpKind::Add }
/ "sub" _ { BinaryOpKind::Sub }
/ "mul" _ { BinaryOpKind::Mul }
/ "div" _ { BinaryOpKind::Div }
/ "and" _ { BinaryOpKind::And }
/ "or" _ { BinaryOpKind::Or }
/ "xor" _ { BinaryOpKind::Xor }
/ "mod" _ { BinaryOpKind::Mod }
/ "rsh" _ { BinaryOpKind::Rsh }
/ "lsh" _ { BinaryOpKind::Lsh }
rule operation() -> IrAstOperation
= op_asm()
/ op_wide_unary()
/ op_wide_binary()
/ op_wide_cmp()
/ op_retd()
/ op_branch()
/ op_bitcast()
/ op_unary()
/ op_binary()
/ op_call()
/ op_cast_ptr()
/ op_cbr()
/ op_cmp()
/ op_const()
/ op_contract_call()
/ op_get_elem_ptr()
/ op_get_local()
/ op_get_global()
/ op_get_config()
/ op_get_storage_key()
/ op_gtf()
/ op_int_to_ptr()
/ op_load()
/ op_log()
/ op_mem_copy_bytes()
/ op_mem_copy_val()
/ op_mem_clear_val()
/ op_nop()
/ op_ptr_to_int()
/ op_read_register()
/ op_ret()
/ op_revert()
/ op_jmp_mem()
/ op_smo()
/ op_state_load_quad_word()
/ op_state_load_word()
/ op_state_store_quad_word()
/ op_state_store_word()
/ op_store()
/ op_alloc()
rule op_asm() -> IrAstOperation
= "asm" _ "(" _ args:(asm_arg() ** comma()) ")" _ ret:asm_ret() meta_idx:comma_metadata_idx()? "{" _
ops:asm_op()*
"}" _ {
IrAstOperation::Asm(
args,
ret.0,
ret.1,
ops,
meta_idx
)
}
rule op_bitcast() -> IrAstOperation
= "bitcast" _ val:id() "to" _ ty:ast_ty() {
IrAstOperation::BitCast(val, ty)
}
rule op_unary() -> IrAstOperation
= op: unary_op_kind() arg1:id() {
IrAstOperation::UnaryOp(op, arg1)
}
rule op_wide_modular_operation() -> IrAstOperation
= "wide" _ op:binary_op_kind() arg1:id() comma() arg2:id() comma() arg3:id() "to" _ result:id() {
IrAstOperation::WideModularOp(op, arg1, arg2, arg3, result)
}
rule op_wide_unary() -> IrAstOperation
= "wide" _ op:unary_op_kind() arg:id() "to" _ result:id() {
IrAstOperation::WideUnaryOp(op, arg, result)
}
rule op_wide_binary() -> IrAstOperation
= "wide" _ op:binary_op_kind() arg1:id() comma() arg2:id() "to" _ result:id() {
IrAstOperation::WideBinaryOp(op, arg1, arg2, result)
}
rule op_wide_cmp() -> IrAstOperation
= "wide" _ "cmp" _ op:cmp_pred() arg1:id() arg2:id() {
IrAstOperation::WideCmp(op, arg1, arg2)
}
rule op_retd() -> IrAstOperation
= "retd" _ arg1:id() _ arg2:id() {
IrAstOperation::Retd(arg1, arg2)
}
rule op_binary() -> IrAstOperation
= op: binary_op_kind() arg1:id() comma() arg2:id() {
IrAstOperation::BinaryOp(op, arg1, arg2)
}
rule op_branch() -> IrAstOperation
= "br" _ to_block:id() "(" _ args:(id() ** comma()) ")" _ {
IrAstOperation::Br(to_block, args)
}
rule op_call() -> IrAstOperation
= "call" _ callee:id() "(" _ args:(id() ** comma()) ")" _ {
IrAstOperation::Call(callee, args)
}
rule op_cast_ptr() -> IrAstOperation
= "cast_ptr" _ val:id() "to" _ ty:ast_ty() {
IrAstOperation::CastPtr(val, ty)
}
rule op_cbr() -> IrAstOperation
= "cbr" _ cond:id() comma() tblock:id()
"(" _ targs:(id() ** comma()) ")" _
comma() fblock:id() "(" _ fargs:(id() ** comma()) ")" _ {
IrAstOperation::Cbr(cond, tblock, targs, fblock, fargs)
}
rule op_cmp() -> IrAstOperation
= "cmp" _ p:cmp_pred() l:id() r:id() {
IrAstOperation::Cmp(p, l, r)
}
rule op_const() -> IrAstOperation
= "const" _ val_ty:ast_ty() cv:constant() {
IrAstOperation::Const(val_ty, cv)
}
rule op_contract_call() -> IrAstOperation
= "contract_call" _
ty:ast_ty() _ name:id() _
params:id() comma() coins:id() comma() asset_id:id() comma() gas:id() _ {
IrAstOperation::ContractCall(ty, name, params, coins, asset_id, gas)
}
rule op_get_elem_ptr() -> IrAstOperation
= "get_elem_ptr" _ base:id() comma() ty:ast_ty() comma() idcs:(id() ++ comma()) {
IrAstOperation::GetElemPtr(base, ty, idcs)
}
rule op_get_local() -> IrAstOperation
= "get_local" _ ast_ty() comma() name:id() {
IrAstOperation::GetLocal(name)
}
rule op_get_global() -> IrAstOperation
= "get_global" _ ast_ty() comma() name:path() {
IrAstOperation::GetGlobal(name)
}
rule op_get_config() -> IrAstOperation
= "get_config" _ ast_ty() comma() name:id() {
IrAstOperation::GetConfig(name)
}
rule op_get_storage_key() -> IrAstOperation
= "get_storage_key" _ ast_ty() comma() namespaces:path() "." fields:field_access() {
IrAstOperation::GetStorageKey(format!("{}.{}", namespaces.join("::"), fields.join(".")))
}
rule op_gtf() -> IrAstOperation
= "gtf" _ index:id() comma() tx_field_id:decimal() {
IrAstOperation::Gtf(index, tx_field_id)
}
rule op_int_to_ptr() -> IrAstOperation
= "int_to_ptr" _ val:id() "to" _ ty:ast_ty() {
IrAstOperation::IntToPtr(val, ty)
}
rule op_alloc() -> IrAstOperation
= "alloc" _ ty:ast_ty() "x" _ count:id() {
IrAstOperation::Alloc(ty, count)
}
rule op_load() -> IrAstOperation
= "load" _ src:id() {
IrAstOperation::Load(src)
}
rule log_event_data() -> LogEventData
= _ "log_data" _ "(" _
"version" _ ":" _ version:decimal() comma()
"is_event" _ ":" _ is_event:bool_lit() comma()
"is_indexed" _ ":" _ is_indexed:bool_lit() comma()
"event_type_size" _ ":" _ event_type_size:decimal() comma()
"num_elements" _ ":" _ num_elements:decimal()
_ ")" {
LogEventData::new(
u8::try_from(version).expect("log event data version must fit in u8"),
is_event,
is_indexed,
u8::try_from(event_type_size)
.expect("log event data size must fit in u8"),
u16::try_from(num_elements)
.expect("log event data count must fit in u16"),
)
}
rule op_log() -> IrAstOperation
= "log" _ log_ty:ast_ty() log_val:id() comma() log_id:id() log_data:log_event_data()? _ {
IrAstOperation::Log(log_ty, log_val, log_id, log_data)
}
rule op_mem_copy_bytes() -> IrAstOperation
= "mem_copy_bytes" _ dst_name:id() comma() src_name:id() comma() len:decimal() {
IrAstOperation::MemCopyBytes(dst_name, src_name, len)
}
rule op_mem_copy_val() -> IrAstOperation
= "mem_copy_val" _ dst_name:id() comma() src_name:id() {
IrAstOperation::MemCopyVal(dst_name, src_name)
}
rule op_mem_clear_val() -> IrAstOperation
= "mem_clear_val" _ dst_name:id() {
IrAstOperation::MemClearVal(dst_name)
}
rule op_nop() -> IrAstOperation
= "nop" _ {
IrAstOperation::Nop
}
rule op_ptr_to_int() -> IrAstOperation
= "ptr_to_int" _ val:id() "to" _ ty:ast_ty() {
IrAstOperation::PtrToInt(val, ty)
}
rule op_read_register() -> IrAstOperation
= "read_register" _ r:reg_name() {
IrAstOperation::ReadRegister(r)
}
rule op_ret() -> IrAstOperation
= "ret" _ ty:ast_ty() vn:id() {
IrAstOperation::Ret(ty, vn)
}
rule op_revert() -> IrAstOperation
= "revert" _ vn:id() {
IrAstOperation::Revert(vn)
}
rule op_jmp_mem() -> IrAstOperation
= "jmp_mem" _ {
IrAstOperation::JmpMem
}
rule op_smo() -> IrAstOperation
= "smo" _
recipient_and_message:id() comma() message_size:id() comma() output_index:id() comma() coins:id() _ {
IrAstOperation::Smo(recipient_and_message, message_size, output_index, coins)
}
rule op_state_clear() -> IrAstOperation
= "state_clear" _ "key" _ key:id() comma() number_of_slots:id() {
IrAstOperation::StateClear(key, number_of_slots)
}
rule op_state_load_quad_word() -> IrAstOperation
= "state_load_quad_word" _ dst:id() comma() "key" _ key:id() comma() number_of_slots:id() {
IrAstOperation::StateLoadQuadWord(dst, key, number_of_slots)
}
rule op_state_load_word() -> IrAstOperation
= "state_load_word" _ "key" _ key:id() {
IrAstOperation::StateLoadWord(key)
}
rule op_state_store_quad_word() -> IrAstOperation
= "state_store_quad_word" _ src:id() comma() "key" _ key:id() comma() number_of_slots:id() {
IrAstOperation::StateStoreQuadWord(src, key, number_of_slots)
}
rule op_state_store_word() -> IrAstOperation
= "state_store_word" _ src:id() comma() "key" _ key:id() {
IrAstOperation::StateStoreWord(src, key)
}
rule op_store() -> IrAstOperation
= "store" _ val:id() "to" _ dst:id() {
IrAstOperation::Store(val, dst)
}
rule cmp_pred() -> Predicate
= "eq" _ { Predicate::Equal }
/ "gt" _ { Predicate::GreaterThan }
/ "lt" _ { Predicate::LessThan }
rule reg_name() -> String
= r:$("of" / "pc" / "ssp" / "sp" / "fp" / "hp" / "err" / "ggas" / "cgas" / "bal" / "is" / "ret" / "retl" / "flag") _ {
r.to_string()
}
rule asm_arg() -> (Ident, Option<IrAstAsmArgInit>)
= name:id_id() init:asm_arg_init()? {
(name, init)
}
rule asm_arg_init() -> IrAstAsmArgInit
= ":" _ imm:constant() {
IrAstAsmArgInit::Imm(imm)
}
/ ":" _ var:id() {
IrAstAsmArgInit::Var(var)
}
rule asm_ret() -> (IrAstTy, Option<Ident>)
= "->" _ ty:ast_ty() ret:id_id()? {
(ty, ret)
}
rule asm_op() -> IrAstAsmOp
= name:id_id() args:asm_op_arg()* imm:asm_op_arg_imm()? meta_idx:comma_metadata_idx()? {
IrAstAsmOp {
name,
args,
imm,
meta_idx
}
}
rule asm_op_arg() -> Ident
= !asm_op_arg_imm() arg:id_id() {
arg
}
rule asm_op_arg_imm() -> Ident
= imm:$("i" d:decimal()) {
Ident::new(Span::new(imm.into(), 0, imm.len(), None).unwrap())
}
rule constant() -> IrAstConst
= value:constant_value() meta_idx:metadata_idx()? {
IrAstConst {
value,
meta_idx
}
}
rule constant_value() -> IrAstConstValue
= "()" _ { IrAstConstValue::Unit }
/ "true" _ { IrAstConstValue::Bool(true) }
/ "false" _ { IrAstConstValue::Bool(false) }
/ "0x" s:$(hex_digit()*<64>) _ {
IrAstConstValue::Hex256(string_to_hex::<32>(s))
}
/ n:decimal() { IrAstConstValue::Number(n) }
/ string_const()
/ array_const()
/ struct_const()
rule string_const() -> IrAstConstValue
= ['"'] chs:str_char()* ['"'] _ {
IrAstConstValue::String(chs)
}
rule str_char() -> u8
// Match any of the printable characters except '"' and '\'.
= c:$([' ' | '!' | '#'..='[' | ']'..='~']) {
*c.as_bytes().first().unwrap()
}
/ "\\x" h:hex_digit() l:hex_digit() {
(h << 4) | l
}
// There may be a better way to do this, dunno. In `str_char()` we're parsing '\xHH'
// from a hex byte to a u8. We do it by parsing each hex nybble into a u8 and then OR
// them together. In hex_digit(), to convert e.g., 'c' to 12, we match the pattern,
// convert the str into a u8 iterator, take the first value which is the ascii digit,
// convert the 'A'-'F' to uppercase by setting the 6th bit (0x20) and subtracting the
// right offset. Fiddly.
rule hex_digit() -> u8
= d:$(['0'..='9']) {
d.as_bytes().first().unwrap() - b'0'
}
/ d:$(['a'..='f' | 'A'..='F']) {
(d.as_bytes().first().unwrap() | 0x20) - b'a' + 10
}
rule array_const() -> IrAstConstValue
= "[" _ els:(field_or_element_const() ++ comma()) "]" _ {
let el_ty = els[0].0.clone();
let els = els.into_iter().map(|(_, cv)| cv).collect::<Vec<_>>();
IrAstConstValue::Array(el_ty, els)
}
rule struct_const() -> IrAstConstValue
= "{" _ flds:(field_or_element_const() ** comma()) "}" _ {
IrAstConstValue::Struct(flds)
}
rule field_or_element_const() -> (IrAstTy, IrAstConst)
= ty:ast_ty() cv:constant() {
(ty, cv)
}
/ ty:ast_ty() "undef" _ {
(ty.clone(), IrAstConst { value: IrAstConstValue::Undef, meta_idx: None })
}
rule ast_ty() -> IrAstTy
= ("unit" / "()") _ { IrAstTy::Unit }
/ "bool" _ { IrAstTy::Bool }
/ "u8" _ { IrAstTy::U8 }
/ "u64" _ { IrAstTy::U64 }
/ "u256" _ { IrAstTy::U256 }
/ "b256" _ { IrAstTy::B256 }
/ "slice" _ { IrAstTy::Slice }
/ "__slice" _ "[" _ ty:ast_ty() "]" _ { IrAstTy::TypedSlice(Box::new(ty)) }
/ "string" _ "<" _ sz:decimal() ">" _ { IrAstTy::String(sz) }
/ array_ty()
/ struct_ty()
/ union_ty()
/ "__ptr" _ ty:ast_ty() _ { IrAstTy::TypedPtr(Box::new(ty)) }
/ "ptr" _ { IrAstTy::Ptr }
/ "never" _ { IrAstTy::Never }
rule array_ty() -> IrAstTy
= "[" _ ty:ast_ty() ";" _ c:decimal() "]" _ {
IrAstTy::Array(Box::new(ty), c)
}
rule union_ty() -> IrAstTy
= "(" _ tys:(ast_ty() ++ ("|" _)) ")" _ {
IrAstTy::Union(tys)
}
rule struct_ty() -> IrAstTy
= "{" _ tys:(ast_ty() ** comma()) "}" _ {
IrAstTy::Struct(tys)
}
rule id() -> String
= !(ast_ty() (" " "\n")) id:$(id_char0() id_char()*) _ {
id.to_owned()
}
rule id_id() -> Ident
= !(ast_ty() (" " "\n")) id:$(id_char0() id_char()*) _ {
Ident::new(Span::new(id.into(), 0, id.len(), None).unwrap())
}
rule path() -> Vec<String>
= (id() ** "::")
rule field_access() -> Vec<String>
= (id() ** ".")
// Metadata decls are sensitive to the newlines since the assignee idx could belong to
// the previous decl otherwise. e.g.,
//
// !1 = blah !2
// !2 = 42
//
// If we did not make newlines significant we could parse the first struct as
// `!1 = blah !2 !2` and then get an error on the following `=`.
//
// An alternative is to put some other delimiter around naked indices, but using
// newlines below hasn't been that painful, so that'll do for now.
rule metadata_decls() -> Vec<(MdIdxRef, IrMetadatum)>
= ds:(metadata_decl() ** nl()) _ {
ds
}
rule metadata_decl() -> (MdIdxRef, IrMetadatum)
= idx:metadata_idx() "=" _ item:metadata_item() {
(idx, item)
}
// This rule (uniquely) does NOT discard the newline whitespace. `__` matches only
// spaces.
rule metadata_item() -> IrMetadatum
= i:dec_digits() __ {
IrMetadatum::Integer(i)
}
/ "!" idx:dec_digits() __ {
IrMetadatum::Index(idx)
}
/ ['"'] s:$(([^ '"' | '\\'] / ['\\'] ['\\' | '"' ])+) ['"'] __ {
// Metadata strings are printed with '\\' escaped on parsing we unescape it.
IrMetadatum::String(s.to_owned().replace("\\\\", "\\"))
}
/ tag:$(id_char0() id_char()*) __ els:metadata_item()* {
IrMetadatum::Struct(tag.to_owned(), els)
}
/ "(" _ els:metadata_idx()*<2,> ")" __ {
// Lists must contain at least 2 items, otherwise they needn't be lists.
IrMetadatum::List(els)
}
rule id_char0()
= quiet!{ ['A'..='Z' | 'a'..='z' | '_'] }
rule id_char()
= quiet!{ id_char0() / ['0'..='9'] }
rule decimal() -> u64
= d:dec_digits() _ {
d
}
rule bool_lit() -> bool
= "true" _ { true }
/ "false" _ { false }
// String of decimal digits without discarding whitespace. (Useful for newline
// sensitive metadata).
rule dec_digits() -> u64
= ds:$("0" / ['1'..='9'] ['0'..='9']*) {
ds.parse::<u64>().unwrap()
}
rule comma()
= quiet!{ "," _ }
rule _()
= quiet!{ (space() / nl() / comment())* }
rule __()
= quiet!{ (space() / comment())* }
rule space()
= [' ' | '\t']
rule nl()
= ['\n' | '\r']
rule comment()
= "//" (!nl() [_])* nl()
rule eoi()
= ![_] / expected!("end of input")
}
}
// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
use crate::{
asm::{AsmArg, AsmInstruction},
block::Block,
constant::{ConstantContent, ConstantValue},
context::Context,
error::IrError,
function::Function,
instruction::{InstOp, Predicate, Register},
irtype::Type,
metadata::{MetadataIndex, Metadatum},
module::{Kind, Module},
value::Value,
variable::LocalVar,
Backtrace, BinaryOpKind, BlockArgument, ConfigContent, Constant, GlobalVar, Instruction,
LogEventData, StorageKey, UnaryOpKind, B256,
};
#[derive(Debug)]
pub(super) struct IrAstModule {
kind: Kind,
configs: Vec<IrAstConfig>,
global_vars: Vec<IrAstGlobalVar>,
storage_keys: Vec<IrAstStorageKey>,
fn_decls: Vec<IrAstFnDecl>,
metadata: Vec<(MdIdxRef, IrMetadatum)>,
}
#[derive(Debug)]
pub(super) struct IrAstGlobalVar {
name: Vec<String>,
ty: IrAstTy,
init: Option<IrAstOperation>,
mutable: bool,
}
#[derive(Debug)]
pub(super) struct IrAstStorageKey {
namespaces: Vec<String>,
fields: Vec<String>,
slot: IrAstConst,
offset: Option<IrAstConst>,
field_id: Option<IrAstConst>,
}
#[derive(Debug)]
struct IrAstFnDecl {
name: String,
args: Vec<(IrAstTy, String, Option<MdIdxRef>)>,
ret_type: IrAstTy,
is_public: bool,
metadata: Option<MdIdxRef>,
locals: Vec<(IrAstTy, String, Option<IrAstOperation>, bool)>,
blocks: Vec<IrAstBlock>,
selector: Option<[u8; 4]>,
is_entry: bool,
is_original_entry: bool,
is_fallback: bool,
}
#[derive(Debug)]
struct IrAstBlock {
label: String,
args: Vec<(IrAstTy, String, Option<MdIdxRef>)>,
instructions: Vec<IrAstInstruction>,
}
#[derive(Debug)]
struct IrAstInstruction {
value_name: Option<String>,
op: IrAstOperation,
metadata: Option<MdIdxRef>,
}
#[derive(Debug)]
enum IrAstOperation {
Asm(
Vec<(Ident, Option<IrAstAsmArgInit>)>,
IrAstTy,
Option<Ident>,
Vec<IrAstAsmOp>,
Option<MdIdxRef>,
),
BitCast(String, IrAstTy),
UnaryOp(UnaryOpKind, String),
BinaryOp(BinaryOpKind, String, String),
Br(String, Vec<String>),
Call(String, Vec<String>),
CastPtr(String, IrAstTy),
Cbr(String, String, Vec<String>, String, Vec<String>),
Cmp(Predicate, String, String),
Const(IrAstTy, IrAstConst),
ContractCall(IrAstTy, String, String, String, String, String),
GetElemPtr(String, IrAstTy, Vec<String>),
GetLocal(String),
GetGlobal(Vec<String>),
GetConfig(String),
GetStorageKey(String),
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | true |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-ir/src/pass_manager.rs | sway-ir/src/pass_manager.rs | use crate::{
create_arg_demotion_pass, create_arg_pointee_mutability_tagger_pass, create_ccp_pass,
create_const_demotion_pass, create_const_folding_pass, create_cse_pass, create_dce_pass,
create_dom_fronts_pass, create_dominators_pass, create_escaped_symbols_pass,
create_fn_dedup_debug_profile_pass, create_fn_dedup_release_profile_pass,
create_fn_inline_pass, create_globals_dce_pass, create_mem2reg_pass, create_memcpyopt_pass,
create_memcpyprop_reverse_pass, create_misc_demotion_pass, create_module_printer_pass,
create_module_verifier_pass, create_postorder_pass, create_ret_demotion_pass,
create_simplify_cfg_pass, create_sroa_pass, Context, Function, IrError, Module,
ARG_DEMOTION_NAME, ARG_POINTEE_MUTABILITY_TAGGER_NAME, CCP_NAME, CONST_DEMOTION_NAME,
CONST_FOLDING_NAME, CSE_NAME, DCE_NAME, FN_DEDUP_DEBUG_PROFILE_NAME,
FN_DEDUP_RELEASE_PROFILE_NAME, FN_INLINE_NAME, GLOBALS_DCE_NAME, MEM2REG_NAME, MEMCPYOPT_NAME,
MEMCPYPROP_REVERSE_NAME, MISC_DEMOTION_NAME, RET_DEMOTION_NAME, SIMPLIFY_CFG_NAME, SROA_NAME,
};
use downcast_rs::{impl_downcast, Downcast};
use rustc_hash::FxHashMap;
use std::{
any::{type_name, TypeId},
collections::{hash_map, HashSet},
};
/// Result of an analysis. Specific result must be downcasted to.
pub trait AnalysisResultT: Downcast {}
impl_downcast!(AnalysisResultT);
pub type AnalysisResult = Box<dyn AnalysisResultT>;
/// Program scope over which a pass executes.
pub trait PassScope {
fn get_arena_idx(&self) -> slotmap::DefaultKey;
}
impl PassScope for Module {
fn get_arena_idx(&self) -> slotmap::DefaultKey {
self.0
}
}
impl PassScope for Function {
fn get_arena_idx(&self) -> slotmap::DefaultKey {
self.0
}
}
/// Is a pass an Analysis or a Transformation over the IR?
#[derive(Clone)]
pub enum PassMutability<S: PassScope> {
/// An analysis pass, producing an analysis result.
Analysis(fn(&Context, analyses: &AnalysisResults, S) -> Result<AnalysisResult, IrError>),
/// A pass over the IR that can possibly modify it.
Transform(fn(&mut Context, analyses: &AnalysisResults, S) -> Result<bool, IrError>),
}
/// A concrete version of [PassScope].
#[derive(Clone)]
pub enum ScopedPass {
ModulePass(PassMutability<Module>),
FunctionPass(PassMutability<Function>),
}
/// An analysis or transformation pass.
pub struct Pass {
/// Pass identifier.
pub name: &'static str,
/// A short description.
pub descr: &'static str,
/// Other passes that this pass depends on.
pub deps: Vec<&'static str>,
/// The executor.
pub runner: ScopedPass,
}
impl Pass {
pub fn is_analysis(&self) -> bool {
match &self.runner {
ScopedPass::ModulePass(pm) => matches!(pm, PassMutability::Analysis(_)),
ScopedPass::FunctionPass(pm) => matches!(pm, PassMutability::Analysis(_)),
}
}
pub fn is_transform(&self) -> bool {
!self.is_analysis()
}
pub fn is_module_pass(&self) -> bool {
matches!(self.runner, ScopedPass::ModulePass(_))
}
pub fn is_function_pass(&self) -> bool {
matches!(self.runner, ScopedPass::FunctionPass(_))
}
}
#[derive(Default)]
pub struct AnalysisResults {
// Hash from (AnalysisResultT, (PassScope, Scope Identity)) to an actual result.
results: FxHashMap<(TypeId, (TypeId, slotmap::DefaultKey)), AnalysisResult>,
name_typeid_map: FxHashMap<&'static str, TypeId>,
}
impl AnalysisResults {
/// Get the results of an analysis.
/// Example analyses.get_analysis_result::<DomTreeAnalysis>(foo).
pub fn get_analysis_result<T: AnalysisResultT, S: PassScope + 'static>(&self, scope: S) -> &T {
self.results
.get(&(
TypeId::of::<T>(),
(TypeId::of::<S>(), scope.get_arena_idx()),
))
.unwrap_or_else(|| {
panic!(
"Internal error. Analysis result {} unavailable for {} with idx {:?}",
type_name::<T>(),
type_name::<S>(),
scope.get_arena_idx()
)
})
.downcast_ref()
.expect("AnalysisResult: Incorrect type")
}
/// Is an analysis result available at the given scope?
fn is_analysis_result_available<S: PassScope + 'static>(
&self,
name: &'static str,
scope: S,
) -> bool {
self.name_typeid_map
.get(name)
.and_then(|result_typeid| {
self.results
.get(&(*result_typeid, (TypeId::of::<S>(), scope.get_arena_idx())))
})
.is_some()
}
/// Add a new result.
fn add_result<S: PassScope + 'static>(
&mut self,
name: &'static str,
scope: S,
result: AnalysisResult,
) {
let result_typeid = (*result).type_id();
self.results.insert(
(result_typeid, (TypeId::of::<S>(), scope.get_arena_idx())),
result,
);
self.name_typeid_map.insert(name, result_typeid);
}
/// Invalidate all results at a given scope.
fn invalidate_all_results_at_scope<S: PassScope + 'static>(&mut self, scope: S) {
self.results
.retain(|(_result_typeid, (scope_typeid, scope_idx)), _v| {
(*scope_typeid, *scope_idx) != (TypeId::of::<S>(), scope.get_arena_idx())
});
}
}
/// Options for printing [Pass]es in case of running them with printing requested.
///
/// Note that states of IR can always be printed by injecting the module printer pass
/// and just running the passes. That approach however offers less control over the
/// printing. E.g., requiring the printing to happen only if the previous passes
/// modified the IR cannot be done by simply injecting a module printer.
#[derive(Debug)]
pub struct PrintPassesOpts {
pub initial: bool,
pub r#final: bool,
pub modified_only: bool,
pub passes: HashSet<String>,
}
/// Options for verifying [Pass]es in case of running them with verifying requested.
///
/// Note that states of IR can always be verified by injecting the module verifier pass
/// and just running the passes. That approach however offers less control over the
/// verification. E.g., requiring the verification to happen only if the previous passes
/// modified the IR cannot be done by simply injecting a module verifier.
#[derive(Debug)]
pub struct VerifyPassesOpts {
pub initial: bool,
pub r#final: bool,
pub modified_only: bool,
pub passes: HashSet<String>,
}
#[derive(Default)]
pub struct PassManager {
passes: FxHashMap<&'static str, Pass>,
analyses: AnalysisResults,
}
impl PassManager {
pub const OPTIMIZATION_PASSES: [&'static str; 15] = [
FN_INLINE_NAME,
SIMPLIFY_CFG_NAME,
SROA_NAME,
DCE_NAME,
GLOBALS_DCE_NAME,
FN_DEDUP_RELEASE_PROFILE_NAME,
FN_DEDUP_DEBUG_PROFILE_NAME,
MEM2REG_NAME,
MEMCPYOPT_NAME,
MEMCPYPROP_REVERSE_NAME,
CONST_FOLDING_NAME,
ARG_DEMOTION_NAME,
CONST_DEMOTION_NAME,
RET_DEMOTION_NAME,
MISC_DEMOTION_NAME,
];
/// Register a pass. Should be called only once for each pass.
pub fn register(&mut self, pass: Pass) -> &'static str {
for dep in &pass.deps {
if let Some(dep_t) = self.lookup_registered_pass(dep) {
if dep_t.is_transform() {
panic!(
"Pass {} cannot depend on a transformation pass {}",
pass.name, dep
);
}
if pass.is_function_pass() && dep_t.is_module_pass() {
panic!(
"Function pass {} cannot depend on module pass {}",
pass.name, dep
);
}
} else {
panic!(
"Pass {} depends on a (yet) unregistered pass {}",
pass.name, dep
);
}
}
let pass_name = pass.name;
match self.passes.entry(pass.name) {
hash_map::Entry::Occupied(_) => {
panic!("Trying to register an already registered pass");
}
hash_map::Entry::Vacant(entry) => {
entry.insert(pass);
}
}
pass_name
}
fn actually_run(&mut self, ir: &mut Context, pass: &'static str) -> Result<bool, IrError> {
let mut modified = false;
fn run_module_pass(
pm: &mut PassManager,
ir: &mut Context,
pass: &'static str,
module: Module,
) -> Result<bool, IrError> {
let mut modified = false;
let pass_t = pm.passes.get(pass).expect("Unregistered pass");
for dep in pass_t.deps.clone() {
let dep_t = pm.passes.get(dep).expect("Unregistered dependent pass");
// If pass registration allows transformations as dependents, we could remove this I guess.
assert!(dep_t.is_analysis());
match dep_t.runner {
ScopedPass::ModulePass(_) => {
if !pm.analyses.is_analysis_result_available(dep, module) {
run_module_pass(pm, ir, dep, module)?;
}
}
ScopedPass::FunctionPass(_) => {
for f in module.function_iter(ir) {
if !pm.analyses.is_analysis_result_available(dep, f) {
run_function_pass(pm, ir, dep, f)?;
}
}
}
}
}
// Get the pass again to satisfy the borrow checker.
let pass_t = pm.passes.get(pass).expect("Unregistered pass");
let ScopedPass::ModulePass(mp) = pass_t.runner.clone() else {
panic!("Expected a module pass");
};
match mp {
PassMutability::Analysis(analysis) => {
let result = analysis(ir, &pm.analyses, module)?;
pm.analyses.add_result(pass, module, result);
}
PassMutability::Transform(transform) => {
if transform(ir, &pm.analyses, module)? {
pm.analyses.invalidate_all_results_at_scope(module);
for f in module.function_iter(ir) {
pm.analyses.invalidate_all_results_at_scope(f);
}
modified = true;
}
}
}
Ok(modified)
}
fn run_function_pass(
pm: &mut PassManager,
ir: &mut Context,
pass: &'static str,
function: Function,
) -> Result<bool, IrError> {
let mut modified = false;
let pass_t = pm.passes.get(pass).expect("Unregistered pass");
for dep in pass_t.deps.clone() {
let dep_t = pm.passes.get(dep).expect("Unregistered dependent pass");
// If pass registration allows transformations as dependents, we could remove this I guess.
assert!(dep_t.is_analysis());
match dep_t.runner {
ScopedPass::ModulePass(_) => {
panic!("Function pass {pass} cannot depend on module pass {dep}")
}
ScopedPass::FunctionPass(_) => {
if !pm.analyses.is_analysis_result_available(dep, function) {
run_function_pass(pm, ir, dep, function)?;
};
}
}
}
// Get the pass again to satisfy the borrow checker.
let pass_t = pm.passes.get(pass).expect("Unregistered pass");
let ScopedPass::FunctionPass(fp) = pass_t.runner.clone() else {
panic!("Expected a function pass");
};
match fp {
PassMutability::Analysis(analysis) => {
let result = analysis(ir, &pm.analyses, function)?;
pm.analyses.add_result(pass, function, result);
}
PassMutability::Transform(transform) => {
if transform(ir, &pm.analyses, function)? {
pm.analyses.invalidate_all_results_at_scope(function);
modified = true;
}
}
}
Ok(modified)
}
for m in ir.module_iter() {
let pass_t = self.passes.get(pass).expect("Unregistered pass");
let pass_runner = pass_t.runner.clone();
match pass_runner {
ScopedPass::ModulePass(_) => {
modified |= run_module_pass(self, ir, pass, m)?;
}
ScopedPass::FunctionPass(_) => {
for f in m.function_iter(ir) {
modified |= run_function_pass(self, ir, pass, f)?;
}
}
}
}
Ok(modified)
}
/// Run the `passes` and return true if the `passes` modify the initial `ir`.
pub fn run(&mut self, ir: &mut Context, passes: &PassGroup) -> Result<bool, IrError> {
let mut modified = false;
for pass in passes.flatten_pass_group() {
modified |= self.actually_run(ir, pass)?;
}
Ok(modified)
}
/// Run the `passes` and return true if the `passes` modify the initial `ir`.
/// The IR states are printed and verified according to the options provided.
pub fn run_with_print_verify(
&mut self,
ir: &mut Context,
passes: &PassGroup,
print_opts: &PrintPassesOpts,
verify_opts: &VerifyPassesOpts,
) -> Result<bool, IrError> {
// Empty IRs are result of compiling dependencies. We don't want to print those.
fn ir_is_empty(ir: &Context) -> bool {
ir.functions.is_empty()
&& ir.blocks.is_empty()
&& ir.values.is_empty()
&& ir.local_vars.is_empty()
}
fn print_ir_after_pass(ir: &Context, pass: &Pass) {
if !ir_is_empty(ir) {
println!("// IR: [{}] {}", pass.name, pass.descr);
println!("{ir}");
}
}
fn print_initial_or_final_ir(ir: &Context, initial_or_final: &'static str) {
if !ir_is_empty(ir) {
println!("// IR: {initial_or_final}");
println!("{ir}");
}
}
if print_opts.initial {
print_initial_or_final_ir(ir, "Initial");
}
if verify_opts.initial {
ir.verify()?;
}
let mut modified = false;
for pass in passes.flatten_pass_group() {
let modified_in_pass = self.actually_run(ir, pass)?;
if print_opts.passes.contains(pass) && (!print_opts.modified_only || modified_in_pass) {
print_ir_after_pass(ir, self.lookup_registered_pass(pass).unwrap());
}
modified |= modified_in_pass;
if verify_opts.passes.contains(pass) && (!verify_opts.modified_only || modified_in_pass)
{
ir.verify()?;
}
}
if print_opts.r#final {
print_initial_or_final_ir(ir, "Final");
}
if verify_opts.r#final {
ir.verify()?;
}
Ok(modified)
}
/// Get reference to a registered pass.
pub fn lookup_registered_pass(&self, name: &str) -> Option<&Pass> {
self.passes.get(name)
}
pub fn help_text(&self) -> String {
let summary = self
.passes
.iter()
.map(|(name, pass)| format!(" {name:16} - {}", pass.descr))
.collect::<Vec<_>>()
.join("\n");
format!("Valid pass names are:\n\n{summary}",)
}
}
/// A group of passes.
/// Can contain sub-groups.
#[derive(Default)]
pub struct PassGroup(Vec<PassOrGroup>);
/// An individual pass, or a group (with possible subgroup) of passes.
pub enum PassOrGroup {
Pass(&'static str),
Group(PassGroup),
}
impl PassGroup {
// Flatten a group of passes into an ordered list.
fn flatten_pass_group(&self) -> Vec<&'static str> {
let mut output = Vec::<&str>::new();
fn inner(output: &mut Vec<&str>, input: &PassGroup) {
for pass_or_group in &input.0 {
match pass_or_group {
PassOrGroup::Pass(pass) => output.push(pass),
PassOrGroup::Group(pg) => inner(output, pg),
}
}
}
inner(&mut output, self);
output
}
/// Append a pass to this group.
pub fn append_pass(&mut self, pass: &'static str) {
self.0.push(PassOrGroup::Pass(pass));
}
/// Append a pass group.
pub fn append_group(&mut self, group: PassGroup) {
self.0.push(PassOrGroup::Group(group));
}
}
/// A convenience utility to register known passes.
pub fn register_known_passes(pm: &mut PassManager) {
// Analysis passes.
pm.register(create_postorder_pass());
pm.register(create_dominators_pass());
pm.register(create_dom_fronts_pass());
pm.register(create_escaped_symbols_pass());
pm.register(create_module_printer_pass());
pm.register(create_module_verifier_pass());
// Optimization passes.
pm.register(create_arg_pointee_mutability_tagger_pass());
pm.register(create_fn_dedup_release_profile_pass());
pm.register(create_fn_dedup_debug_profile_pass());
pm.register(create_mem2reg_pass());
pm.register(create_sroa_pass());
pm.register(create_fn_inline_pass());
pm.register(create_const_folding_pass());
pm.register(create_ccp_pass());
pm.register(create_simplify_cfg_pass());
pm.register(create_globals_dce_pass());
pm.register(create_dce_pass());
pm.register(create_cse_pass());
pm.register(create_arg_demotion_pass());
pm.register(create_const_demotion_pass());
pm.register(create_ret_demotion_pass());
pm.register(create_misc_demotion_pass());
pm.register(create_memcpyopt_pass());
pm.register(create_memcpyprop_reverse_pass());
}
pub fn create_o1_pass_group() -> PassGroup {
// Create a create_ccp_passo specify which passes we want to run now.
let mut o1 = PassGroup::default();
// Configure to run our passes.
o1.append_pass(MEM2REG_NAME);
o1.append_pass(FN_DEDUP_RELEASE_PROFILE_NAME);
o1.append_pass(FN_INLINE_NAME);
o1.append_pass(ARG_POINTEE_MUTABILITY_TAGGER_NAME);
o1.append_pass(SIMPLIFY_CFG_NAME);
o1.append_pass(GLOBALS_DCE_NAME);
o1.append_pass(DCE_NAME);
o1.append_pass(FN_INLINE_NAME);
o1.append_pass(ARG_POINTEE_MUTABILITY_TAGGER_NAME);
o1.append_pass(CCP_NAME);
o1.append_pass(CONST_FOLDING_NAME);
o1.append_pass(SIMPLIFY_CFG_NAME);
o1.append_pass(CSE_NAME);
o1.append_pass(CONST_FOLDING_NAME);
o1.append_pass(SIMPLIFY_CFG_NAME);
o1.append_pass(GLOBALS_DCE_NAME);
o1.append_pass(DCE_NAME);
o1.append_pass(FN_DEDUP_RELEASE_PROFILE_NAME);
o1
}
/// Utility to insert a pass after every pass in the given group `pg`.
/// It preserves the `pg` group's structure. This means if `pg` has subgroups
/// and those have subgroups, the resulting [PassGroup] will have the
/// same subgroups, but with the `pass` inserted after every pass in every
/// subgroup, as well as all passes outside of any groups.
pub fn insert_after_each(pg: PassGroup, pass: &'static str) -> PassGroup {
fn insert_after_each_rec(pg: PassGroup, pass: &'static str) -> Vec<PassOrGroup> {
pg.0.into_iter()
.flat_map(|p_o_g| match p_o_g {
PassOrGroup::Group(group) => vec![PassOrGroup::Group(PassGroup(
insert_after_each_rec(group, pass),
))],
PassOrGroup::Pass(_) => vec![p_o_g, PassOrGroup::Pass(pass)],
})
.collect()
}
PassGroup(insert_after_each_rec(pg, pass))
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-ir/src/module.rs | sway-ir/src/module.rs | //! A scope containing a collection of [`Function`]s and constant values.
//!
//! A module also has a 'kind' corresponding to the different Sway module types.
use std::{cell::Cell, collections::BTreeMap};
use crate::{
context::Context,
function::{Function, FunctionIterator},
Constant, GlobalVar, MetadataIndex, StorageKey, Type,
};
/// A wrapper around an [ECS](https://github.com/orlp/slotmap) handle into the
/// [`Context`].
#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)]
pub struct Module(pub slotmap::DefaultKey);
#[doc(hidden)]
pub struct ModuleContent {
pub kind: Kind,
pub functions: Vec<Function>,
pub global_variables: BTreeMap<Vec<String>, GlobalVar>,
pub configs: BTreeMap<String, ConfigContent>,
pub storage_keys: BTreeMap<String, StorageKey>,
}
#[derive(Clone, Debug)]
pub enum ConfigContent {
V0 {
name: String,
ty: Type,
ptr_ty: Type,
constant: Constant,
opt_metadata: Option<MetadataIndex>,
},
V1 {
name: String,
ty: Type,
ptr_ty: Type,
encoded_bytes: Vec<u8>,
decode_fn: Cell<Function>,
opt_metadata: Option<MetadataIndex>,
},
}
/// The different 'kinds' of Sway module: `Contract`, `Library`, `Predicate` or `Script`.
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
pub enum Kind {
Contract,
Library,
Predicate,
Script,
}
impl Module {
/// Return a new module of a specific kind.
pub fn new(context: &mut Context, kind: Kind) -> Module {
let content = ModuleContent {
kind,
functions: Vec::new(),
global_variables: BTreeMap::new(),
configs: BTreeMap::new(),
storage_keys: BTreeMap::new(),
};
Module(context.modules.insert(content))
}
/// Get this module's [`Kind`].
pub fn get_kind(&self, context: &Context) -> Kind {
context.modules[self.0].kind
}
/// Return an iterator over each of the [`Function`]s in this module.
pub fn function_iter(&self, context: &Context) -> FunctionIterator {
FunctionIterator::new(context, self)
}
/// Add a global variable value to this module.
pub fn add_global_variable(
&self,
context: &mut Context,
call_path: Vec<String>,
const_val: GlobalVar,
) {
context.modules[self.0]
.global_variables
.insert(call_path, const_val);
}
/// Add a value to the module global storage, by forcing the name to be unique if needed.
///
/// Will use the provided name as a hint and eventually rename it to guarantee insertion.
pub fn new_unique_global_var(
&self,
context: &mut Context,
name: String,
local_type: Type,
initializer: Option<Constant>,
mutable: bool,
) -> GlobalVar {
let module = &context.modules[self.0];
let new_name = if module.global_variables.contains_key(&vec![name.clone()]) {
// Assuming that we'll eventually find a unique name by appending numbers to the old
// one...
(0..)
.find_map(|n| {
let candidate = format!("{name}{n}");
if module
.global_variables
.contains_key(&vec![candidate.clone()])
{
None
} else {
Some(candidate)
}
})
.unwrap()
} else {
name
};
let gv = GlobalVar::new(context, local_type, initializer, mutable);
self.add_global_variable(context, vec![new_name], gv);
gv
}
/// Get a named global variable from this module, if found.
pub fn get_global_variable(
&self,
context: &Context,
call_path: &Vec<String>,
) -> Option<GlobalVar> {
context.modules[self.0]
.global_variables
.get(call_path)
.copied()
}
/// Lookup global variable name.
pub fn lookup_global_variable_name(
&self,
context: &Context,
global: &GlobalVar,
) -> Option<String> {
context.modules[self.0]
.global_variables
.iter()
.find(|(_key, val)| *val == global)
.map(|(key, _)| key.join("::"))
}
/// Add a config value to this module.
pub fn add_config(&self, context: &mut Context, name: String, content: ConfigContent) {
context.modules[self.0].configs.insert(name, content);
}
/// Get a named config content from this module, if found.
pub fn get_config<'a>(&self, context: &'a Context, name: &str) -> Option<&'a ConfigContent> {
context.modules[self.0].configs.get(name)
}
/// Add a storage key value to this module.
pub fn add_storage_key(&self, context: &mut Context, path: String, storage_key: StorageKey) {
context.modules[self.0]
.storage_keys
.insert(path, storage_key);
}
/// Get a storage key with the given `path` from this module, if found.
pub fn get_storage_key<'a>(&self, context: &'a Context, path: &str) -> Option<&'a StorageKey> {
context.modules[self.0].storage_keys.get(path)
}
/// Lookup storage key path.
pub fn lookup_storage_key_path<'a>(
&self,
context: &'a Context,
storage_key: &StorageKey,
) -> Option<&'a str> {
context.modules[self.0]
.storage_keys
.iter()
.find(|(_key, val)| *val == storage_key)
.map(|(key, _)| key.as_str())
}
/// Removed a function from the module. Returns true if function was found and removed.
///
/// **Use with care! Be sure the function is not an entry point nor called at any stage.**
pub fn remove_function(&self, context: &mut Context, function: &Function) {
context
.modules
.get_mut(self.0)
.expect("Module must exist in context.")
.functions
.retain(|mod_fn| mod_fn != function);
}
pub fn iter_configs<'a>(
&'a self,
context: &'a Context,
) -> impl Iterator<Item = &'a ConfigContent> + 'a {
context.modules[self.0].configs.values()
}
}
/// An iterator over [`Module`]s within a [`Context`].
pub struct ModuleIterator {
modules: Vec<slotmap::DefaultKey>,
next: usize,
}
impl ModuleIterator {
/// Return a new [`Module`] iterator.
pub fn new(context: &Context) -> ModuleIterator {
// Copy all the current modules indices, so they may be modified in the context during
// iteration.
ModuleIterator {
modules: context.modules.iter().map(|pair| pair.0).collect(),
next: 0,
}
}
}
impl Iterator for ModuleIterator {
type Item = Module;
fn next(&mut self) -> Option<Module> {
if self.next < self.modules.len() {
let idx = self.next;
self.next += 1;
Some(Module(self.modules[idx]))
} else {
None
}
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-ir/src/analysis.rs | sway-ir/src/analysis.rs | pub mod call_graph;
pub use call_graph::*;
pub mod dominator;
pub use dominator::*;
pub mod memory_utils;
pub use memory_utils::*;
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-ir/src/optimize.rs | sway-ir/src/optimize.rs | //! A collection of optimization passes.
//!
//! Each of these modules are a collection of typical code optimisation passes.
//!
//! Currently there is no pass manager, as there are only a couple of passes, but this is something
//! which will be added in the future.
//!
//! So each of the functions under this module will return a boolean indicating whether a
//! modification to the IR was made. Typically the passes will be just re-run until they no longer
//! make any such modifications, implying they've optimized as much possible.
//!
//! When writing passes one should keep in mind that when a modification is made then any iterators
//! over blocks or instructions can be invalidated, and starting over is a safer option than trying
//! to attempt multiple changes at once.
pub mod arg_demotion;
pub use arg_demotion::*;
pub mod arg_mutability_tagger;
pub use arg_mutability_tagger::*;
pub mod const_demotion;
pub use const_demotion::*;
pub mod constants;
pub use constants::*;
pub mod conditional_constprop;
pub use conditional_constprop::*;
pub mod cse;
pub use cse::*;
pub mod dce;
pub use dce::*;
pub mod inline;
pub use inline::*;
pub mod mem2reg;
pub use mem2reg::*;
pub mod memcpyopt;
pub use memcpyopt::*;
pub mod misc_demotion;
pub use misc_demotion::*;
pub mod ret_demotion;
pub use ret_demotion::*;
pub mod simplify_cfg;
pub use simplify_cfg::*;
pub mod sroa;
pub use sroa::*;
pub mod fn_dedup;
pub use fn_dedup::*;
mod target_fuel;
#[cfg(test)]
pub mod tests {
use crate::{Backtrace, PassGroup, PassManager};
use sway_features::ExperimentalFeatures;
use sway_types::SourceEngine;
/// This function parses the IR text representation and run the specified optimizers passes.
/// Then, depending on the `expected` parameter it checks if the IR was optimized or not.
///
/// This comparison is done by capturing all instructions with metadata "!0".
///
/// For example:
///
/// ```rust, ignore
/// assert_optimization(
/// &[CONST_FOLDING_NAME],
/// "entry fn main() -> u64 {
/// entry():
/// l = const u64 1
/// r = const u64 2
/// result = add l, r, !0
/// ret u64 result
/// }",
/// ["const u64 3"],
/// );
/// ```
pub(crate) fn assert_optimization<'a>(
passes: &[&'static str],
body: &str,
expected: Option<impl IntoIterator<Item = &'a str>>,
) {
let source_engine = SourceEngine::default();
let mut context = crate::parse(
&format!(
"script {{
{body}
}}
!0 = \"a.sw\"
"
),
&source_engine,
ExperimentalFeatures::default(),
Backtrace::default(),
)
.unwrap();
let mut pass_manager = PassManager::default();
crate::register_known_passes(&mut pass_manager);
let mut group = PassGroup::default();
for pass in passes {
group.append_pass(pass);
}
let before = context.to_string();
let modified = pass_manager.run(&mut context, &group).unwrap();
let after = context.to_string();
// print diff to help debug
if std::env::args().any(|x| x == "--nocapture") {
println!("{}", prettydiff::diff_lines(&before, &after));
}
assert_eq!(expected.is_some(), modified);
let Some(expected) = expected else {
return;
};
let actual = context
.to_string()
.lines()
.filter_map(|x| {
if x.contains(", !") {
Some(format!("{}\n", x.trim()))
} else {
None
}
})
.collect::<Vec<String>>();
assert!(!actual.is_empty());
let mut expected_matches = actual.len();
for (actual, expected) in actual.iter().zip(expected) {
if !actual.contains(expected) {
panic!("Actual: {actual:?} does not contains expected: {expected:?}. (Run with --nocapture to see a diff)");
} else {
expected_matches -= 1;
}
}
assert_eq!(expected_matches, 0);
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-ir/src/value.rs | sway-ir/src/value.rs | //! The base descriptor for various values within the IR.
//!
//! [`Value`]s can be function arguments, constants and instructions. [`Instruction`]s generally
//! refer to each other and to constants via the [`Value`] wrapper.
//!
//! Like most IR data structures they are `Copy` and cheap to pass around by value. They are
//! therefore also easy to replace, a common practice for optimization passes.
use rustc_hash::FxHashMap;
use crate::{
block::BlockArgument,
context::Context,
instruction::InstOp,
irtype::Type,
metadata::{combine, MetadataIndex},
pretty::DebugWithContext,
Block, Constant, Instruction,
};
/// A wrapper around an [ECS](https://github.com/orlp/slotmap) handle into the
/// [`Context`].
#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash, DebugWithContext)]
pub struct Value(#[in_context(values)] pub slotmap::DefaultKey);
#[doc(hidden)]
#[derive(Debug, Clone, DebugWithContext)]
pub struct ValueContent {
pub value: ValueDatum,
pub metadata: Option<MetadataIndex>,
}
#[doc(hidden)]
#[derive(Debug, Clone, DebugWithContext)]
pub enum ValueDatum {
Argument(BlockArgument),
Constant(Constant),
Instruction(Instruction),
}
impl Value {
/// Return a new argument [`Value`].
pub fn new_argument(context: &mut Context, arg: BlockArgument) -> Value {
let content = ValueContent {
value: ValueDatum::Argument(arg),
metadata: None,
};
Value(context.values.insert(content))
}
/// Return a new constant [`Value`].
pub fn new_constant(context: &mut Context, constant: Constant) -> Value {
let content = ValueContent {
value: ValueDatum::Constant(constant),
metadata: None,
};
Value(context.values.insert(content))
}
/// Return a new `u64` constant [`Value`] set to `value`.
pub fn new_u64_constant(context: &mut Context, value: u64) -> Value {
let constant = crate::ConstantContent::new_uint(context, 64, value);
let constant = Constant::unique(context, constant);
Self::new_constant(context, constant)
}
/// Return a new instruction [`Value`].
pub fn new_instruction(context: &mut Context, block: Block, instruction: InstOp) -> Value {
let content = ValueContent {
value: ValueDatum::Instruction(Instruction {
op: instruction,
parent: block,
}),
metadata: None,
};
Value(context.values.insert(content))
}
/// Add some metadata to this value.
///
/// As a convenience the `md_idx` argument is an `Option`, in which case this function is a
/// no-op.
///
/// If there is no existing metadata then the new metadata are added alone. Otherwise the new
/// metadatum are added to the list of metadata.
pub fn add_metadatum(self, context: &mut Context, md_idx: Option<MetadataIndex>) -> Self {
if md_idx.is_some() {
let orig_md = context.values[self.0].metadata;
let new_md = combine(context, &orig_md, &md_idx);
context.values[self.0].metadata = new_md;
}
self
}
/// Return this value's metadata.
pub fn get_metadata(&self, context: &Context) -> Option<MetadataIndex> {
context.values[self.0].metadata
}
/// Return whether this is a constant value.
pub fn is_constant(&self, context: &Context) -> bool {
matches!(context.values[self.0].value, ValueDatum::Constant(_))
}
/// Return whether this value is an instruction, and specifically a 'terminator'.
///
/// A terminator is always the last instruction in a block (and may not appear anywhere else)
/// and is either a branch or return.
pub fn is_terminator(&self, context: &Context) -> bool {
match &context.values[self.0].value {
ValueDatum::Instruction(Instruction { op, .. }) => op.is_terminator(),
ValueDatum::Argument(..) | ValueDatum::Constant(..) => false,
}
}
/// If this value is an instruction and if any of its parameters is `old_val` then replace them
/// with `new_val`.
pub fn replace_instruction_value(&self, context: &mut Context, old_val: Value, new_val: Value) {
self.replace_instruction_values(context, &FxHashMap::from_iter([(old_val, new_val)]))
}
/// If this value is an instruction and if any of its parameters is in `replace_map` as
/// a key, replace it with the mapped value.
pub fn replace_instruction_values(
&self,
context: &mut Context,
replace_map: &FxHashMap<Value, Value>,
) {
if let ValueDatum::Instruction(instruction) =
&mut context.values.get_mut(self.0).unwrap().value
{
instruction.op.replace_values(replace_map);
}
}
/// Replace this value with another one, in-place.
pub fn replace(&self, context: &mut Context, other: ValueDatum) {
context.values[self.0].value = other;
}
/// Get a reference to this value as an instruction, iff it is one.
pub fn get_instruction<'a>(&self, context: &'a Context) -> Option<&'a Instruction> {
if let ValueDatum::Instruction(instruction) = &context.values[self.0].value {
Some(instruction)
} else {
None
}
}
/// Get a mutable reference to this value as an instruction, iff it is one.
pub fn get_instruction_mut<'a>(&self, context: &'a mut Context) -> Option<&'a mut Instruction> {
if let ValueDatum::Instruction(instruction) =
&mut context.values.get_mut(self.0).unwrap().value
{
Some(instruction)
} else {
None
}
}
/// Get a reference to this value as a constant, iff it is one.
pub fn get_constant<'a>(&self, context: &'a Context) -> Option<&'a Constant> {
if let ValueDatum::Constant(cn) = &context.values[self.0].value {
Some(cn)
} else {
None
}
}
/// Get a reference to this value as an argument, iff it is one.
pub fn get_argument<'a>(&self, context: &'a Context) -> Option<&'a BlockArgument> {
if let ValueDatum::Argument(arg) = &context.values[self.0].value {
Some(arg)
} else {
None
}
}
/// Get a mutable reference to this value as an argument, iff it is one.
pub fn get_argument_mut<'a>(&self, context: &'a mut Context) -> Option<&'a mut BlockArgument> {
if let ValueDatum::Argument(arg) = &mut context.values[self.0].value {
Some(arg)
} else {
None
}
}
/// Get the type for this value, if found.
///
/// Arguments and constants always have a type, but only some instructions do.
pub fn get_type(&self, context: &Context) -> Option<Type> {
match &context.values[self.0].value {
ValueDatum::Argument(BlockArgument { ty, .. }) => Some(*ty),
ValueDatum::Constant(c) => Some(c.get_content(context).ty),
ValueDatum::Instruction(ins) => ins.get_type(context),
}
}
/// Get the pointer inner type for this value, iff it is a pointer.
pub fn match_ptr_type(&self, context: &Context) -> Option<Type> {
self.get_type(context)
.and_then(|ty| ty.get_pointee_type(context))
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-ir/src/function.rs | sway-ir/src/function.rs | //! A typical function data type.
//!
//! [`Function`] is named, takes zero or more arguments and has an optional return value. It
//! contains a collection of [`Block`]s.
//!
//! It also maintains a collection of local values which can be typically regarded as variables
//! existing in the function scope.
use std::collections::{BTreeMap, HashMap};
use std::fmt::Write;
use rustc_hash::{FxHashMap, FxHashSet};
use crate::{
block::{Block, BlockIterator, Label},
context::Context,
error::IrError,
irtype::Type,
metadata::MetadataIndex,
module::Module,
value::{Value, ValueDatum},
variable::{LocalVar, LocalVarContent},
BlockArgument, BranchToWithArgs,
};
use crate::{Constant, InstOp};
/// A wrapper around an [ECS](https://github.com/orlp/slotmap) handle into the
/// [`Context`].
#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)]
pub struct Function(pub slotmap::DefaultKey);
#[doc(hidden)]
pub struct FunctionContent {
pub name: String,
/// Display string representing the function in the ABI errors
/// related context (in "errorCodes" and "panickingCalls" sections).
// TODO: Explore how and if we should lazy evaluate `abi_errors_display`,
// only for functions that are actually used in ABI errors context.
// Having it precomputed for every function is a simple design.
// Lazy evaluation might be much more complex to implement and
// a premature optimization, considering that even for large
// project we compile <1500 functions.
pub abi_errors_display: String,
pub arguments: Vec<(String, Value)>,
pub return_type: Type,
pub blocks: Vec<Block>,
pub module: Module,
pub is_public: bool,
pub is_entry: bool,
/// True if the function was an entry, before getting wrapped
/// by the `__entry` function. E.g, a script `main` function.
pub is_original_entry: bool,
pub is_fallback: bool,
pub selector: Option<[u8; 4]>,
pub metadata: Option<MetadataIndex>,
pub local_storage: BTreeMap<String, LocalVar>, // BTree rather than Hash for deterministic ordering.
next_label_idx: u64,
}
impl Function {
/// Return a new [`Function`] handle.
///
/// Creates a [`Function`] in the `context` within `module` and returns a handle.
///
/// `name`, `args`, `return_type` and `is_public` are the usual suspects. `selector` is a
/// special value used for Sway contract calls; much like `name` is unique and not particularly
/// used elsewhere in the IR.
#[allow(clippy::too_many_arguments)]
pub fn new(
context: &mut Context,
module: Module,
name: String,
abi_errors_display: String,
args: Vec<(String, Type, Option<MetadataIndex>)>,
return_type: Type,
selector: Option<[u8; 4]>,
is_public: bool,
is_entry: bool,
is_original_entry: bool,
is_fallback: bool,
metadata: Option<MetadataIndex>,
) -> Function {
let content = FunctionContent {
name,
abi_errors_display,
// Arguments to a function are the arguments to its entry block.
// We set it up after creating the entry block below.
arguments: Vec::new(),
return_type,
blocks: Vec::new(),
module,
is_public,
is_entry,
is_original_entry,
is_fallback,
selector,
metadata,
local_storage: BTreeMap::new(),
next_label_idx: 0,
};
let func = Function(context.functions.insert(content));
context.modules[module.0].functions.push(func);
let entry_block = Block::new(context, func, Some("entry".to_owned()));
context
.functions
.get_mut(func.0)
.unwrap()
.blocks
.push(entry_block);
// Setup the arguments.
let arguments: Vec<_> = args
.into_iter()
.enumerate()
.map(|(idx, (name, ty, arg_metadata))| {
(
name,
Value::new_argument(
context,
BlockArgument {
block: entry_block,
idx,
ty,
is_immutable: false,
},
)
.add_metadatum(context, arg_metadata),
)
})
.collect();
context
.functions
.get_mut(func.0)
.unwrap()
.arguments
.clone_from(&arguments);
let (_, arg_vals): (Vec<_>, Vec<_>) = arguments.iter().cloned().unzip();
context.blocks.get_mut(entry_block.0).unwrap().args = arg_vals;
func
}
/// Create and append a new [`Block`] to this function.
pub fn create_block(&self, context: &mut Context, label: Option<Label>) -> Block {
let block = Block::new(context, *self, label);
let func = context.functions.get_mut(self.0).unwrap();
func.blocks.push(block);
block
}
/// Create and insert a new [`Block`] into this function.
///
/// The new block is inserted before `other`.
pub fn create_block_before(
&self,
context: &mut Context,
other: &Block,
label: Option<Label>,
) -> Result<Block, IrError> {
let block_idx = context.functions[self.0]
.blocks
.iter()
.position(|block| block == other)
.ok_or_else(|| {
let label = &context.blocks[other.0].label;
IrError::MissingBlock(label.clone())
})?;
let new_block = Block::new(context, *self, label);
context.functions[self.0]
.blocks
.insert(block_idx, new_block);
Ok(new_block)
}
/// Create and insert a new [`Block`] into this function.
///
/// The new block is inserted after `other`.
pub fn create_block_after(
&self,
context: &mut Context,
other: &Block,
label: Option<Label>,
) -> Result<Block, IrError> {
// We need to create the new block first (even though we may not use it on Err below) since
// we can't borrow context mutably twice.
let new_block = Block::new(context, *self, label);
let func = context.functions.get_mut(self.0).unwrap();
func.blocks
.iter()
.position(|block| block == other)
.map(|idx| {
func.blocks.insert(idx + 1, new_block);
new_block
})
.ok_or_else(|| {
let label = &context.blocks[other.0].label;
IrError::MissingBlock(label.clone())
})
}
/// Remove a [`Block`] from this function.
///
/// > Care must be taken to ensure the block has no predecessors otherwise the function will be
/// > made invalid.
pub fn remove_block(&self, context: &mut Context, block: &Block) -> Result<(), IrError> {
let label = block.get_label(context);
let func = context.functions.get_mut(self.0).unwrap();
let block_idx = func
.blocks
.iter()
.position(|b| b == block)
.ok_or(IrError::RemoveMissingBlock(label))?;
func.blocks.remove(block_idx);
Ok(())
}
/// Remove instructions from function that satisfy a given predicate.
pub fn remove_instructions<T: Fn(Value) -> bool>(&self, context: &mut Context, pred: T) {
for block in context.functions[self.0].blocks.clone() {
block.remove_instructions(context, &pred);
}
}
/// Get a new unique block label.
///
/// If `hint` is `None` then the label will be in the form `"blockN"` where N is an
/// incrementing decimal.
///
/// Otherwise if the hint is already unique to this function it will be returned. If not
/// already unique it will have N appended to it until it is unique.
pub fn get_unique_label(&self, context: &mut Context, hint: Option<String>) -> String {
match hint {
Some(hint) => {
if context.functions[self.0]
.blocks
.iter()
.any(|block| context.blocks[block.0].label == hint)
{
let idx = self.get_next_label_idx(context);
self.get_unique_label(context, Some(format!("{hint}{idx}")))
} else {
hint
}
}
None => {
let idx = self.get_next_label_idx(context);
self.get_unique_label(context, Some(format!("block{idx}")))
}
}
}
fn get_next_label_idx(&self, context: &mut Context) -> u64 {
let func = context.functions.get_mut(self.0).unwrap();
let idx = func.next_label_idx;
func.next_label_idx += 1;
idx
}
/// Return the number of blocks in this function.
pub fn num_blocks(&self, context: &Context) -> usize {
context.functions[self.0].blocks.len()
}
/// Return the number of instructions in this function.
///
/// The [crate::InstOp::AsmBlock] is counted as a single instruction,
/// regardless of the number of [crate::asm::AsmInstruction]s in the ASM block.
/// E.g., even if the ASM block is empty and contains no instructions, it
/// will still be counted as a single instruction.
///
/// If you want to count every ASM instruction as an instruction, use
/// `num_instructions_incl_asm_instructions` instead.
pub fn num_instructions(&self, context: &Context) -> usize {
self.block_iter(context)
.map(|block| block.num_instructions(context))
.sum()
}
/// Return the number of instructions in this function, including
/// the [crate::asm::AsmInstruction]s found in [crate::InstOp::AsmBlock]s.
///
/// Every [crate::asm::AsmInstruction] encountered in any of the ASM blocks
/// will be counted as an instruction. The [crate::InstOp::AsmBlock] itself
/// is not counted but rather replaced with the number of ASM instructions
/// found in the block. In other words, empty ASM blocks do not count as
/// instructions.
///
/// If you want to count [crate::InstOp::AsmBlock]s as single instructions, use
/// `num_instructions` instead.
pub fn num_instructions_incl_asm_instructions(&self, context: &Context) -> usize {
self.instruction_iter(context).fold(0, |num, (_, value)| {
match &value
.get_instruction(context)
.expect("We are iterating through the instructions.")
.op
{
InstOp::AsmBlock(asm, _) => num + asm.body.len(),
_ => num + 1,
}
})
}
/// Return the function name.
pub fn get_name<'a>(&self, context: &'a Context) -> &'a str {
&context.functions[self.0].name
}
/// Return the display string representing the function in the ABI errors
/// related context, in the "errorCodes" and "panickingCalls" sections.
pub fn get_abi_errors_display(&self, context: &Context) -> String {
context.functions[self.0].abi_errors_display.clone()
}
/// Return the module that this function belongs to.
pub fn get_module(&self, context: &Context) -> Module {
context.functions[self.0].module
}
/// Return the function entry (i.e., the first) block.
pub fn get_entry_block(&self, context: &Context) -> Block {
context.functions[self.0].blocks[0]
}
/// Return the attached metadata.
pub fn get_metadata(&self, context: &Context) -> Option<MetadataIndex> {
context.functions[self.0].metadata
}
/// Whether this function has a valid selector.
pub fn has_selector(&self, context: &Context) -> bool {
context.functions[self.0].selector.is_some()
}
/// Return the function selector, if it has one.
pub fn get_selector(&self, context: &Context) -> Option<[u8; 4]> {
context.functions[self.0].selector
}
/// Whether or not the function is a program entry point, i.e. `main`, `#[test]` fns or abi
/// methods.
pub fn is_entry(&self, context: &Context) -> bool {
context.functions[self.0].is_entry
}
/// Whether or not the function was a program entry point, i.e. `main`, `#[test]` fns or abi
/// methods, before it got wrapped within the `__entry` function.
pub fn is_original_entry(&self, context: &Context) -> bool {
context.functions[self.0].is_original_entry
}
/// Whether or not this function is a contract fallback function
pub fn is_fallback(&self, context: &Context) -> bool {
context.functions[self.0].is_fallback
}
// Get the function return type.
pub fn get_return_type(&self, context: &Context) -> Type {
context.functions[self.0].return_type
}
// Set a new function return type.
pub fn set_return_type(&self, context: &mut Context, new_ret_type: Type) {
context.functions.get_mut(self.0).unwrap().return_type = new_ret_type
}
/// Get the number of args.
pub fn num_args(&self, context: &Context) -> usize {
context.functions[self.0].arguments.len()
}
/// Get an arg value by name, if found.
pub fn get_arg(&self, context: &Context, name: &str) -> Option<Value> {
context.functions[self.0]
.arguments
.iter()
.find_map(|(arg_name, val)| (arg_name == name).then_some(val))
.copied()
}
/// Append an extra argument to the function signature.
///
/// NOTE: `arg` must be a `BlockArgument` value with the correct index otherwise `add_arg` will
/// panic.
pub fn add_arg<S: Into<String>>(&self, context: &mut Context, name: S, arg: Value) {
match context.values[arg.0].value {
ValueDatum::Argument(BlockArgument { idx, .. })
if idx == context.functions[self.0].arguments.len() =>
{
context.functions[self.0].arguments.push((name.into(), arg));
}
_ => panic!("Inconsistent function argument being added"),
}
}
/// Find the name of an arg by value.
pub fn lookup_arg_name<'a>(&self, context: &'a Context, value: &Value) -> Option<&'a String> {
context.functions[self.0]
.arguments
.iter()
.find_map(|(name, arg_val)| (arg_val == value).then_some(name))
}
/// Return an iterator for each of the function arguments.
pub fn args_iter<'a>(&self, context: &'a Context) -> impl Iterator<Item = &'a (String, Value)> {
context.functions[self.0].arguments.iter()
}
/// Is argument `i` marked immutable?
pub fn is_arg_immutable(&self, context: &Context, i: usize) -> bool {
if let Some((_, val)) = context.functions[self.0].arguments.get(i) {
if let ValueDatum::Argument(arg) = &context.values[val.0].value {
return arg.is_immutable;
}
}
false
}
/// Get a pointer to a local value by name, if found.
pub fn get_local_var(&self, context: &Context, name: &str) -> Option<LocalVar> {
context.functions[self.0].local_storage.get(name).copied()
}
/// Find the name of a local value by pointer.
pub fn lookup_local_name<'a>(
&self,
context: &'a Context,
var: &LocalVar,
) -> Option<&'a String> {
context.functions[self.0]
.local_storage
.iter()
.find_map(|(name, local_var)| if local_var == var { Some(name) } else { None })
}
/// Add a value to the function local storage.
///
/// The name must be unique to this function else an error is returned.
pub fn new_local_var(
&self,
context: &mut Context,
name: String,
local_type: Type,
initializer: Option<Constant>,
mutable: bool,
) -> Result<LocalVar, IrError> {
let var = LocalVar::new(context, local_type, initializer, mutable);
let func = context.functions.get_mut(self.0).unwrap();
func.local_storage
.insert(name.clone(), var)
.map(|_| Err(IrError::FunctionLocalClobbered(func.name.clone(), name)))
.unwrap_or(Ok(var))
}
/// Add a value to the function local storage, by forcing the name to be unique if needed.
///
/// Will use the provided name as a hint and rename to guarantee insertion.
pub fn new_unique_local_var(
&self,
context: &mut Context,
name: String,
local_type: Type,
initializer: Option<Constant>,
mutable: bool,
) -> LocalVar {
let func = &context.functions[self.0];
let new_name = if func.local_storage.contains_key(&name) {
// Assuming that we'll eventually find a unique name by appending numbers to the old
// one...
(0..)
.find_map(|n| {
let candidate = format!("{name}{n}");
if func.local_storage.contains_key(&candidate) {
None
} else {
Some(candidate)
}
})
.unwrap()
} else {
name
};
self.new_local_var(context, new_name, local_type, initializer, mutable)
.unwrap()
}
/// Return an iterator to all of the values in this function's local storage.
pub fn locals_iter<'a>(
&self,
context: &'a Context,
) -> impl Iterator<Item = (&'a String, &'a LocalVar)> {
context.functions[self.0].local_storage.iter()
}
/// Remove given list of locals
pub fn remove_locals(&self, context: &mut Context, removals: &Vec<String>) {
for remove in removals {
if let Some(local) = context.functions[self.0].local_storage.remove(remove) {
context.local_vars.remove(local.0);
}
}
}
/// Merge values from another [`Function`] into this one.
///
/// The names of the merged values are guaranteed to be unique via the use of
/// [`Function::new_unique_local_var`].
///
/// Returns a map from the original pointers to the newly merged pointers.
pub fn merge_locals_from(
&self,
context: &mut Context,
other: Function,
) -> HashMap<LocalVar, LocalVar> {
let mut var_map = HashMap::new();
let old_vars: Vec<(String, LocalVar, LocalVarContent)> = context.functions[other.0]
.local_storage
.iter()
.map(|(name, var)| (name.clone(), *var, context.local_vars[var.0].clone()))
.collect();
for (name, old_var, old_var_content) in old_vars {
let old_ty = old_var_content
.ptr_ty
.get_pointee_type(context)
.expect("LocalVar types are always pointers.");
let new_var = self.new_unique_local_var(
context,
name.clone(),
old_ty,
old_var_content.initializer,
old_var_content.mutable,
);
var_map.insert(old_var, new_var);
}
var_map
}
/// Return an iterator to each block in this function.
pub fn block_iter(&self, context: &Context) -> BlockIterator {
BlockIterator::new(context, self)
}
/// Return an iterator to each instruction in each block in this function.
///
/// This is a convenience method for when all instructions in a function need to be inspected.
/// The instruction value is returned from the iterator along with the block it belongs to.
pub fn instruction_iter<'a>(
&self,
context: &'a Context,
) -> impl Iterator<Item = (Block, Value)> + 'a {
context.functions[self.0]
.blocks
.iter()
.flat_map(move |block| {
block
.instruction_iter(context)
.map(move |ins_val| (*block, ins_val))
})
}
/// Replace a value with another within this function.
///
/// This is a convenience method which iterates over this function's blocks and calls
/// [`Block::replace_values`] in turn.
///
/// `starting_block` is an optimisation for when the first possible reference to `old_val` is
/// known.
pub fn replace_values(
&self,
context: &mut Context,
replace_map: &FxHashMap<Value, Value>,
starting_block: Option<Block>,
) {
let mut block_iter = self.block_iter(context).peekable();
if let Some(ref starting_block) = starting_block {
// Skip blocks until we hit the starting block.
while block_iter
.next_if(|block| block != starting_block)
.is_some()
{}
}
for block in block_iter {
block.replace_values(context, replace_map);
}
}
pub fn replace_value(
&self,
context: &mut Context,
old_val: Value,
new_val: Value,
starting_block: Option<Block>,
) {
let mut map = FxHashMap::<Value, Value>::default();
map.insert(old_val, new_val);
self.replace_values(context, &map, starting_block);
}
/// A graphviz dot graph of the control-flow-graph.
pub fn dot_cfg(&self, context: &Context) -> String {
let mut worklist = Vec::<Block>::new();
let mut visited = FxHashSet::<Block>::default();
let entry = self.get_entry_block(context);
let mut res = format!("digraph {} {{\n", self.get_name(context));
worklist.push(entry);
while let Some(n) = worklist.pop() {
visited.insert(n);
for BranchToWithArgs { block: n_succ, .. } in n.successors(context) {
let _ = writeln!(
res,
"\t{} -> {}\n",
n.get_label(context),
n_succ.get_label(context)
);
if !visited.contains(&n_succ) {
worklist.push(n_succ);
}
}
}
res += "}\n";
res
}
}
/// An iterator over each [`Function`] in a [`Module`].
pub struct FunctionIterator {
functions: Vec<slotmap::DefaultKey>,
next: usize,
}
impl FunctionIterator {
/// Return a new iterator for the functions in `module`.
pub fn new(context: &Context, module: &Module) -> FunctionIterator {
// Copy all the current modules indices, so they may be modified in the context during
// iteration.
FunctionIterator {
functions: context.modules[module.0]
.functions
.iter()
.map(|func| func.0)
.collect(),
next: 0,
}
}
}
impl Iterator for FunctionIterator {
type Item = Function;
fn next(&mut self) -> Option<Function> {
if self.next < self.functions.len() {
let idx = self.next;
self.next += 1;
Some(Function(self.functions[idx]))
} else {
None
}
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-ir/src/block.rs | sway-ir/src/block.rs | //! Represents a 'basic block' of [`Instruction`]s in a control flow graph.
//!
//! [`Block`]s contain zero or more _non-terminating_ instructions and at most one _terminating_
//! instruction or _terminator_. Terminators are either branches or a return instruction and are
//! the last instruction in the block.
//!
//! Blocks also contain a single 'phi' instruction at its start. In
//! [SSA](https://en.wikipedia.org/wiki/Static_single_assignment_form) form 'phi' instructions are
//! used to merge values from preceding blocks.
//!
//! Every [`Function`] has at least one block, the first of which is usually labeled `entry`.
use rustc_hash::{FxHashMap, FxHashSet};
use crate::{
context::Context,
error::IrError,
function::Function,
instruction::{FuelVmInstruction, InstOp},
value::{Value, ValueDatum},
BranchToWithArgs, DebugWithContext, Instruction, InstructionInserter, InstructionIterator,
Module, Type,
};
/// A wrapper around an [ECS](https://github.com/orlp/slotmap) handle into the
/// [`Context`].
#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash, DebugWithContext)]
pub struct Block(pub slotmap::DefaultKey);
impl Block {
pub fn get_module<'a>(&self, context: &'a Context) -> &'a Module {
let f = context.blocks[self.0].function;
&context.functions[f.0].module
}
}
#[doc(hidden)]
pub struct BlockContent {
/// Block label, useful for printing.
pub label: Label,
/// The function containing this block.
pub function: Function,
/// List of instructions in the block.
pub(crate) instructions: Vec<Value>,
/// Block arguments: Another form of SSA PHIs.
pub args: Vec<Value>,
/// CFG predecessors
pub preds: FxHashSet<Block>,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, DebugWithContext)]
pub struct BlockArgument {
/// The block of which this is an argument.
pub block: Block,
/// idx'th argument of the block.
pub idx: usize,
pub ty: Type,
/// Is this known to be immutable?
pub is_immutable: bool,
}
impl BlockArgument {
/// Get the actual parameter passed to this block argument from `from_block`.
pub fn get_val_coming_from(&self, context: &Context, from_block: &Block) -> Option<Value> {
for BranchToWithArgs {
block: succ_block,
args,
} in from_block.successors(context)
{
if succ_block == self.block {
return Some(args[self.idx]);
}
}
None
}
/// Get the [Value] that this argument represents.
pub fn as_value(&self, context: &Context) -> Value {
self.block.get_arg(context, self.idx).unwrap()
}
}
/// Each block may be explicitly named. A [`Label`] is a simple `String` synonym.
pub type Label = String;
impl Block {
/// Return a new block handle.
///
/// Creates a new Block belonging to `function` in the context and returns its handle. `label`
/// is optional and is used only when printing the IR.
pub fn new(context: &mut Context, function: Function, label: Option<String>) -> Block {
let label = function.get_unique_label(context, label);
let content = BlockContent {
label,
function,
instructions: vec![],
args: vec![],
preds: FxHashSet::default(),
};
Block(context.blocks.insert(content))
}
/// Get the parent function for this block.
pub fn get_function(&self, context: &Context) -> Function {
context.blocks[self.0].function
}
/// Create a new [`InstructionInserter`] to more easily append instructions to this block.
pub fn append<'a, 'eng>(
&self,
context: &'a mut Context<'eng>,
) -> InstructionInserter<'a, 'eng> {
InstructionInserter::new(context, *self, crate::InsertionPosition::End)
}
/// Get the label of this block. If it wasn't given one upon creation it will be a generated
/// label.
pub fn get_label(&self, context: &Context) -> String {
context.blocks[self.0].label.clone()
}
/// Set the label of this block. If the label isn't unique it will be made so.
pub fn set_label(&self, context: &mut Context, new_label: Option<Label>) {
let unique_label = self
.get_function(context)
.get_unique_label(context, new_label);
context.blocks[self.0].label = unique_label;
}
/// Get the number of instructions in this block.
pub fn num_instructions(&self, context: &Context) -> usize {
context.blocks[self.0].instructions.len()
}
/// Get the i'th block arg.
pub fn get_arg(&self, context: &Context, index: usize) -> Option<Value> {
context.blocks[self.0].args.get(index).cloned()
}
/// Get the number of predecessor blocks, i.e., blocks which branch to this one.
pub fn num_predecessors(&self, context: &Context) -> usize {
context.blocks[self.0].preds.len()
}
/// Add a new block argument of type `ty`. Returns its index.
pub fn new_arg(&self, context: &mut Context, ty: Type) -> usize {
let idx = context.blocks[self.0].args.len();
let arg_val = Value::new_argument(
context,
BlockArgument {
block: *self,
idx,
ty,
is_immutable: false,
},
);
context.blocks[self.0].args.push(arg_val);
idx
}
pub fn set_arg(&self, context: &mut Context, arg: Value) {
match context.values[arg.0].value {
ValueDatum::Argument(BlockArgument {
block,
idx,
ty: _,
is_immutable: _,
}) if block == *self && idx < context.blocks[self.0].args.len() => {
context.blocks[self.0].args[idx] = arg;
}
_ => panic!("Inconsistent block argument being set"),
}
}
/// Add a block argument, asserts that `arg` is suitable here.
pub fn add_arg(&self, context: &mut Context, arg: Value) {
match context.values[arg.0].value {
ValueDatum::Argument(BlockArgument {
block,
idx,
ty: _,
is_immutable: _,
}) if block == *self && idx == context.blocks[self.0].args.len() => {
context.blocks[self.0].args.push(arg);
}
_ => panic!("Inconsistent block argument being added"),
}
}
/// Get an iterator over this block's args.
pub fn arg_iter<'a>(&'a self, context: &'a Context) -> impl Iterator<Item = &'a Value> {
context.blocks[self.0].args.iter()
}
/// How many args does this block have?
pub fn num_args(&self, context: &Context) -> usize {
context.blocks[self.0].args.len()
}
/// Get an iterator over this block's predecessor blocks.
pub fn pred_iter<'a>(&'a self, context: &'a Context) -> impl Iterator<Item = &'a Block> {
context.blocks[self.0].preds.iter()
}
/// Add `from_block` to the set of predecessors of this block.
pub fn add_pred(&self, context: &mut Context, from_block: &Block) {
context.blocks[self.0].preds.insert(*from_block);
}
/// Remove `from_block` from the set of predecessors of this block.
pub fn remove_pred(&self, context: &mut Context, from_block: &Block) {
context.blocks[self.0].preds.remove(from_block);
}
/// Replace a `old_source` with `new_source` as a predecessor.
pub fn replace_pred(&self, context: &mut Context, old_source: &Block, new_source: &Block) {
self.remove_pred(context, old_source);
self.add_pred(context, new_source);
}
/// Get instruction at position `pos`.
///
/// Returns `None` if block is empty or if `pos` is out of bounds.
pub fn get_instruction_at(&self, context: &Context, pos: usize) -> Option<Value> {
context.blocks[self.0].instructions.get(pos).cloned()
}
/// Get a reference to the final instruction in the block, provided it is a terminator.
///
/// Returns `None` if the final instruction is not a terminator. This can only happen during IR
/// generation when the block is still being populated.
pub fn get_terminator<'a>(&self, context: &'a Context) -> Option<&'a Instruction> {
context.blocks[self.0].instructions.last().and_then(|val| {
// It's guaranteed to be an instruction value.
if let ValueDatum::Instruction(term_inst) = &context.values[val.0].value {
if term_inst.op.is_terminator() {
Some(term_inst)
} else {
None
}
} else {
None
}
})
}
/// Get a mutable reference to the final instruction in the block, provided it is a terminator.
///
/// Returns `None` if the final instruction is not a terminator. This can only happen during IR
/// generation when the block is still being populated.
pub fn get_terminator_mut<'a>(&self, context: &'a mut Context) -> Option<&'a mut Instruction> {
context.blocks[self.0].instructions.last().and_then(|val| {
// It's guaranteed to be an instruction value.
if let ValueDatum::Instruction(term_inst) = &mut context.values[val.0].value {
if term_inst.op.is_terminator() {
Some(term_inst)
} else {
None
}
} else {
None
}
})
}
/// Get the CFG successors (and the parameters passed to them) of this block.
pub(super) fn successors<'a>(&'a self, context: &'a Context) -> Vec<BranchToWithArgs> {
match self.get_terminator(context) {
Some(Instruction {
op:
InstOp::ConditionalBranch {
true_block,
false_block,
..
},
..
}) => vec![true_block.clone(), false_block.clone()],
Some(Instruction {
op: InstOp::Branch(block),
..
}) => vec![block.clone()],
_otherwise => Vec::new(),
}
}
/// For a particular successor (if it indeed is one), get the arguments passed.
pub fn get_succ_params(&self, context: &Context, succ: &Block) -> Vec<Value> {
self.successors(context)
.iter()
.find(|branch| &branch.block == succ)
.map_or(vec![], |branch| branch.args.clone())
}
/// For a particular successor (if it indeed is one), get a mut ref to parameters passed.
pub fn get_succ_params_mut<'a>(
&'a self,
context: &'a mut Context,
succ: &Block,
) -> Option<&'a mut Vec<Value>> {
match self.get_terminator_mut(context) {
Some(Instruction {
op:
InstOp::ConditionalBranch {
true_block,
false_block,
..
},
..
}) => {
if true_block.block == *succ {
Some(&mut true_block.args)
} else if false_block.block == *succ {
Some(&mut false_block.args)
} else {
None
}
}
Some(Instruction {
op: InstOp::Branch(block),
..
}) if block.block == *succ => Some(&mut block.args),
_ => None,
}
}
/// Replace successor `old_succ` with `new_succ`.
/// Updates `preds` of both `old_succ` and `new_succ`.
pub(super) fn replace_successor(
&self,
context: &mut Context,
old_succ: Block,
new_succ: Block,
new_params: Vec<Value>,
) {
let mut modified = false;
if let Some(term) = self.get_terminator_mut(context) {
match term {
Instruction {
op:
InstOp::ConditionalBranch {
true_block:
BranchToWithArgs {
block: true_block,
args: true_opds,
},
false_block:
BranchToWithArgs {
block: false_block,
args: false_opds,
},
cond_value: _,
},
..
} => {
if old_succ == *true_block {
modified = true;
*true_block = new_succ;
true_opds.clone_from(&new_params);
}
if old_succ == *false_block {
modified = true;
*false_block = new_succ;
*false_opds = new_params
}
}
Instruction {
op: InstOp::Branch(BranchToWithArgs { block, args }),
..
} if *block == old_succ => {
*block = new_succ;
*args = new_params;
modified = true;
}
_ => (),
}
}
if modified {
old_succ.remove_pred(context, self);
new_succ.add_pred(context, self);
}
}
/// Return whether this block is already terminated by non-branching instructions,
/// means with instructions that cause either revert, or local or context returns.
/// Those instructions are: [InstOp::Ret], [FuelVmInstruction::Retd],
/// [FuelVmInstruction::JmpMem], and [FuelVmInstruction::Revert]).
pub fn is_terminated_by_return_or_revert(&self, context: &Context) -> bool {
self.get_terminator(context).is_some_and(|i| {
matches!(
i,
Instruction {
op: InstOp::Ret(..)
| InstOp::FuelVm(
FuelVmInstruction::Revert(..)
| FuelVmInstruction::JmpMem
| FuelVmInstruction::Retd { .. }
),
..
}
)
})
}
/// Replace a value within this block.
///
/// For every instruction within the block, any reference to `old_val` is replaced with
/// `new_val`.
pub fn replace_values(&self, context: &mut Context, replace_map: &FxHashMap<Value, Value>) {
for ins_idx in 0..context.blocks[self.0].instructions.len() {
let ins = context.blocks[self.0].instructions[ins_idx];
ins.replace_instruction_values(context, replace_map);
}
}
/// Remove an instruction from this block.
///
/// **NOTE:** We must be very careful! We mustn't remove the phi or the terminator. Some
/// extra checks should probably be performed here to avoid corruption! Ideally we use get a
/// user/uses system implemented. Using `Vec::remove()` is also O(n) which we may want to
/// avoid someday.
pub fn remove_instruction(&self, context: &mut Context, instr_val: Value) {
let ins = &mut context.blocks[self.0].instructions;
if let Some(pos) = ins.iter().position(|iv| *iv == instr_val) {
ins.remove(pos);
}
}
/// Remove an instruction at position `pos` from this block.
///
/// **NOTE:** We must be very careful! We mustn't remove the phi or the terminator. Some
/// extra checks should probably be performed here to avoid corruption! Ideally we use get a
/// user/uses system implemented. Using `Vec::remove()` is also O(n) which we may want to
/// avoid someday.
pub fn remove_instruction_at(&self, context: &mut Context, pos: usize) {
context.blocks[self.0].instructions.remove(pos);
}
/// Remove the last instruction in the block.
///
/// **NOTE:** The caller must be very careful if removing the terminator.
pub fn remove_last_instruction(&self, context: &mut Context) {
context.blocks[self.0].instructions.pop();
}
/// Remove instructions from block that satisfy a given predicate.
pub fn remove_instructions<T: Fn(Value) -> bool>(&self, context: &mut Context, pred: T) {
let ins = &mut context.blocks[self.0].instructions;
ins.retain(|value| !pred(*value));
}
/// Clear the current instruction list and take the one provided.
pub fn take_body(&self, context: &mut Context, new_insts: Vec<Value>) {
let _ = std::mem::replace(&mut (context.blocks[self.0].instructions), new_insts);
for inst in &context.blocks[self.0].instructions {
let ValueDatum::Instruction(inst) = &mut context.values[inst.0].value else {
continue;
};
inst.parent = *self;
}
}
/// Insert instruction(s) at the beginning of the block.
pub fn prepend_instructions(&self, context: &mut Context, mut insts: Vec<Value>) {
let block_ins = &mut context.blocks[self.0].instructions;
insts.append(block_ins);
context.blocks[self.0].instructions = insts;
}
/// Replace an instruction in this block with another. Will return a ValueNotFound on error.
/// Any use of the old instruction value will also be replaced by the new value throughout the
/// owning function if `replace_uses` is set.
pub fn replace_instruction(
&self,
context: &mut Context,
old_instr_val: Value,
new_instr_val: Value,
replace_uses: bool,
) -> Result<(), IrError> {
match context.blocks[self.0]
.instructions
.iter_mut()
.find(|instr_val| *instr_val == &old_instr_val)
{
None => Err(IrError::ValueNotFound(
"Attempting to replace instruction.".to_owned(),
)),
Some(instr_val) => {
*instr_val = new_instr_val;
if replace_uses {
self.get_function(context).replace_value(
context,
old_instr_val,
new_instr_val,
Some(*self),
);
}
Ok(())
}
}
}
/// Split the block into two.
///
/// This will create a new block and move the instructions at and following `split_idx` to it.
/// Returns both blocks.
pub fn split_at(&self, context: &mut Context, split_idx: usize) -> (Block, Block) {
let function = context.blocks[self.0].function;
if split_idx == 0 {
// We can just create a new empty block and put it before this one. We know that it
// will succeed because self is definitely in the function, so we can unwrap().
//
// If self is the entry block then for now we need to rename it from 'entry' and call
// our new block 'entry'.
let new_block_name = (*self == self.get_function(context).get_entry_block(context))
.then(|| {
self.set_label(context, None);
"entry".to_owned()
});
let new_block = function
.create_block_before(context, self, new_block_name)
.unwrap();
// Move the block arguments to the new block. We collect because we want to mutate next.
#[allow(clippy::needless_collect)]
let args: Vec<_> = self.arg_iter(context).copied().collect();
for arg in args.into_iter() {
match &mut context.values[arg.0].value {
ValueDatum::Argument(BlockArgument {
block,
idx: _,
ty: _,
is_immutable: _,
}) => {
// We modify the Value in place to be a BlockArgument for the new block.
*block = new_block;
}
_ => unreachable!("Block arg value inconsistent"),
}
new_block.add_arg(context, arg);
}
context.blocks[self.0].args.clear();
(new_block, *self)
} else {
// Again, we know that it will succeed because self is definitely in the function, and
// so we can unwrap().
let new_block = function.create_block_after(context, self, None).unwrap();
// Split the instructions at the index and append them to the new block.
let mut tail_instructions = context.blocks[self.0].instructions.split_off(split_idx);
// Update the parent of tail_instructions.
for instr in &tail_instructions {
instr.get_instruction_mut(context).unwrap().parent = new_block;
}
context.blocks[new_block.0]
.instructions
.append(&mut tail_instructions);
// If the terminator of the old block (now the new block) was a branch then we need to
// update the destination block's preds.
//
// Copying the candidate blocks and putting them in a vector to avoid borrowing context
// as immutable and then mutable in the loop body.
for to_block in match new_block.get_terminator(context) {
Some(Instruction {
op: InstOp::Branch(to_block),
..
}) => {
vec![to_block.block]
}
Some(Instruction {
op:
InstOp::ConditionalBranch {
true_block,
false_block,
..
},
..
}) => {
vec![true_block.block, false_block.block]
}
_ => Vec::new(),
} {
to_block.replace_pred(context, self, &new_block);
}
(*self, new_block)
}
}
/// Return an instruction iterator for each instruction in this block.
pub fn instruction_iter(&self, context: &Context) -> InstructionIterator {
InstructionIterator::new(context, self)
}
}
/// An iterator over each block in a [`Function`].
pub struct BlockIterator {
blocks: Vec<slotmap::DefaultKey>,
next: usize,
}
impl BlockIterator {
/// Return a new iterator for each block in `function`.
pub fn new(context: &Context, function: &Function) -> Self {
// Copy all the current block indices, so they may be modified in the context during
// iteration.
BlockIterator {
blocks: context.functions[function.0]
.blocks
.iter()
.map(|block| block.0)
.collect(),
next: 0,
}
}
}
impl Iterator for BlockIterator {
type Item = Block;
fn next(&mut self) -> Option<Block> {
if self.next < self.blocks.len() {
let idx = self.next;
self.next += 1;
Some(Block(self.blocks[idx]))
} else {
None
}
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-ir/src/error.rs | sway-ir/src/error.rs | /// These errors are for internal IR failures, not designed to be useful to a Sway developer, but
/// more for users of the `sway-ir` crate, i.e., compiler developers.
///
/// XXX They're not very rich and could do with a little more verbosity.
#[derive(Debug)]
pub enum IrError {
FunctionLocalClobbered(String, String),
InvalidMetadatum(String),
InvalidPhi,
MisplacedTerminator(String),
MissingBlock(String),
MissingTerminator(String),
ParseFailure(String, String),
RemoveMissingBlock(String),
ValueNotFound(String),
InconsistentParent(String, String, String),
VerifyArgumentValueIsNotArgument(String),
VerifyUnaryOpIncorrectArgType,
VerifyBinaryOpIncorrectArgType,
VerifyBitcastBetweenInvalidTypes(String, String),
VerifyBitcastUnknownSourceType,
VerifyEntryBlockHasPredecessors(String, Vec<String>),
VerifyBlockArgMalformed,
VerifyBranchParamsMismatch,
VerifyBranchToMissingBlock(String),
VerifyCallArgTypeMismatch(String, String, String),
VerifyCallToMissingFunction(String),
VerifyCmpBadTypes(String, String),
VerifyCmpTypeMismatch(String, String),
VerifyCmpUnknownTypes,
VerifyConditionExprNotABool,
VerifyContractCallBadTypes(String),
VerifyGepElementTypeNonPointer,
VerifyGepFromNonPointer(String, Option<Value>),
VerifyGepInconsistentTypes(String, Option<crate::Value>),
VerifyGepOnNonAggregate,
VerifyGetNonExistentLocalVarPointer,
VerifyGetNonExistentGlobalVarPointer,
VerifyGetNonExistentConfigPointer,
VerifyGetNonExistentStorageKeyPointer,
VerifyGlobalMissingInitializer(String),
VerifyInsertElementOfIncorrectType,
VerifyInsertValueOfIncorrectType,
VerifyIntToPtrFromNonIntegerType(String),
VerifyIntToPtrToNonPointer(String),
VerifyIntToPtrUnknownSourceType,
VerifyAllocCountNotUint64,
VerifyInvalidGtfIndexType,
VerifyLoadFromNonPointer(String),
VerifyLocalMissingInitializer(String, String),
VerifyLogId,
VerifyLogMismatchedTypes,
VerifyLogEventDataVersion(u8),
VerifyLogEventDataInvalid(String),
VerifyMemcopyNonPointer(String),
VerifyMemcopyMismatchedTypes(String, String),
VerifyMemClearValNonPointer(String),
VerifyPtrCastFromNonPointer(String),
VerifyPtrCastToNonPointer(String),
VerifyPtrToIntToNonInteger(String),
VerifyReturnMismatchedTypes(String),
VerifyRevertCodeBadType,
VerifySmoBadMessageType,
VerifySmoCoins,
VerifySmoMessageSize,
VerifySmoRecipientNonPointer(String),
VerifySmoMessageNonPointer(String),
VerifySmoRecipientBadType,
VerifyStateAccessNumOfSlots,
VerifyStateAccessQuadNonPointer(String),
VerifyStateDestBadType(String),
VerifyStateKeyBadType,
VerifyStateKeyNonPointer(String),
VerifyStoreMismatchedTypes(Option<Value>),
VerifyStoreToNonPointer(String),
VerifyUntypedValuePassedToFunction,
}
impl IrError {
pub(crate) fn get_problematic_value(&self) -> Option<&Value> {
match self {
Self::VerifyGepFromNonPointer(_, v) => v.as_ref(),
Self::VerifyGepInconsistentTypes(_, v) => v.as_ref(),
Self::VerifyStoreMismatchedTypes(v) => v.as_ref(),
_ => None,
}
}
}
impl std::error::Error for IrError {}
use std::fmt;
use crate::Value;
use itertools::Itertools;
impl fmt::Display for IrError {
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
match self {
IrError::FunctionLocalClobbered(fn_str, var_str) => write!(
f,
"Local storage for function {fn_str} already has an entry for variable {var_str}."
),
IrError::InvalidMetadatum(why_str) => {
write!(f, "Unable to convert from invalid metadatum: {why_str}.")
}
IrError::InvalidPhi => write!(
f,
"Phi instruction has invalid block or value reference list."
),
IrError::MisplacedTerminator(blk_str) => {
write!(f, "Block {blk_str} has a misplaced terminator.")
}
IrError::MissingBlock(blk_str) => write!(f, "Unable to find block {blk_str}."),
IrError::MissingTerminator(blk_str) => {
write!(f, "Block {blk_str} is missing its terminator.")
}
IrError::ParseFailure(expecting, found) => {
write!(
f,
"Parse failure: expecting '{expecting}', found '{found}'."
)
}
IrError::RemoveMissingBlock(blk_str) => {
write!(f, "Unable to remove block {blk_str}; not found.")
}
IrError::ValueNotFound(reason) => {
write!(f, "Invalid value: {reason}.")
}
IrError::InconsistentParent(entity, expected_parent, found_parent) => {
write!(
f,
"For IR Entity (module/function/block) {entity}, expected parent to be {expected_parent}, \
but found {found_parent}."
)
}
IrError::VerifyArgumentValueIsNotArgument(callee) => write!(
f,
"Verification failed: Argument specifier for function '{callee}' is not an \
argument value."
),
IrError::VerifyBitcastUnknownSourceType => write!(
f,
"Verification failed: Bitcast unable to determine source type."
),
IrError::VerifyBitcastBetweenInvalidTypes(from_ty, to_ty) => write!(
f,
"Verification failed: Bitcast not allowed from a {from_ty} to a {to_ty}."
),
IrError::VerifyUnaryOpIncorrectArgType => {
write!(
f,
"Verification failed: Incorrect argument type for unary op"
)
}
IrError::VerifyBinaryOpIncorrectArgType => {
write!(
f,
"Verification failed: Incorrect argument type(s) for binary op"
)
}
IrError::VerifyBranchToMissingBlock(label) => {
write!(
f,
"Verification failed: \
Branch to block '{label}' is not a block in the current function."
)
}
IrError::VerifyCallArgTypeMismatch(callee, caller_ty, callee_ty) => {
write!(
f,
"Verification failed: Type mismatch found for call to '{callee}': {caller_ty} is not a {callee_ty}."
)
}
IrError::VerifyCallToMissingFunction(callee) => {
write!(
f,
"Verification failed: Call to invalid function '{callee}'."
)
}
IrError::VerifyCmpBadTypes(lhs_ty, rhs_ty) => {
write!(
f,
"Verification failed: Cannot compare non-integer types {lhs_ty} and {rhs_ty}."
)
}
IrError::VerifyCmpTypeMismatch(lhs_ty, rhs_ty) => {
write!(
f,
"Verification failed: \
Cannot compare values with different widths of {lhs_ty} and {rhs_ty}."
)
}
IrError::VerifyCmpUnknownTypes => {
write!(
f,
"Verification failed: Unable to determine type(s) of compared value(s)."
)
}
IrError::VerifyConditionExprNotABool => {
write!(
f,
"Verification failed: Expression used for conditional is not a boolean."
)
}
IrError::VerifyContractCallBadTypes(arg_name) => {
write!(
f,
"Verification failed: \
Argument {arg_name} passed to contract call has the incorrect type."
)
}
IrError::VerifyGepElementTypeNonPointer => {
write!(f, "Verification failed: GEP on a non-pointer.")
}
IrError::VerifyGepInconsistentTypes(error, _) => {
write!(
f,
"Verification failed: Struct field type mismatch: ({error})."
)
}
IrError::VerifyGepFromNonPointer(ty, _) => {
write!(
f,
"Verification failed: Struct access must be to a pointer value, not a {ty}."
)
}
IrError::VerifyGepOnNonAggregate => {
write!(
f,
"Verification failed: Attempt to access a field from a non struct."
)
}
IrError::VerifyGetNonExistentLocalVarPointer => {
write!(
f,
"Verification failed: Attempt to get pointer not found in function local variables."
)
}
IrError::VerifyGetNonExistentGlobalVarPointer => {
write!(
f,
"Verification failed: Attempt to get pointer not found in module global variables."
)
}
IrError::VerifyGetNonExistentConfigPointer => {
write!(
f,
"Verification failed: Attempt to get pointer not found in module configurables."
)
}
IrError::VerifyGetNonExistentStorageKeyPointer => {
write!(
f,
"Verification failed: Attempt to get pointer not found in module storage keys."
)
}
IrError::VerifyInsertElementOfIncorrectType => {
write!(
f,
"Verification failed: Attempt to insert value of incorrect type into an array."
)
}
IrError::VerifyInsertValueOfIncorrectType => {
write!(
f,
"Verification failed: Attempt to insert value of incorrect type into a struct."
)
}
IrError::VerifyIntToPtrFromNonIntegerType(ty) => {
write!(f, "Verification failed: int_to_ptr cannot be from a {ty}.")
}
IrError::VerifyIntToPtrToNonPointer(ty) => {
write!(
f,
"Verification failed: int_to_ptr cannot be to a non-pointer {ty}."
)
}
IrError::VerifyIntToPtrUnknownSourceType => write!(
f,
"Verification failed: int_to_ptr unable to determine source type."
),
IrError::VerifyAllocCountNotUint64 => {
write!(
f,
"Verification failed: alloc instruction count must be a u64 integer."
)
}
IrError::VerifyLoadFromNonPointer(ty) => {
write!(
f,
"Verification failed: Load cannot be from a non-pointer {ty}."
)
}
IrError::VerifyMemcopyNonPointer(ty) => {
write!(
f,
"Verification failed: mem_copy cannot be to or from a non-pointer {ty}.",
)
}
IrError::VerifyMemcopyMismatchedTypes(dst_ty, src_ty) => {
write!(
f,
"Verification failed: mem_copy cannot be from {src_ty} pointer to {dst_ty} \
pointer.",
)
}
IrError::VerifyMemClearValNonPointer(ty) => {
write!(
f,
"Verification failed: mem_clear_val argument is not a pointer {ty}.",
)
}
IrError::VerifyReturnMismatchedTypes(fn_str) => write!(
f,
"Verification failed: \
Function {fn_str} return type must match its RET instructions."
),
IrError::VerifyEntryBlockHasPredecessors(function_name, predecessors) => {
let plural_s = if predecessors.len() == 1 { "" } else { "s" };
write!(
f,
"Verification failed: Entry block of the function \"{function_name}\" has {}predecessor{}. \
The predecessor{} {} {}.",
if predecessors.len() == 1 {
"a "
} else {
""
},
plural_s,
plural_s,
if predecessors.len() == 1 {
"is"
} else {
"are"
},
predecessors.iter().map(|block_label| format!("\"{block_label}\"")).collect_vec().join(", ")
)
}
IrError::VerifyBlockArgMalformed => {
write!(f, "Verification failed: Block argument is malformed")
}
IrError::VerifyBranchParamsMismatch => {
write!(
f,
"Verification failed: Block parameter passed in branch is malformed"
)
}
IrError::VerifyPtrCastFromNonPointer(ty) => {
write!(
f,
"Verification failed: Pointer cast from non pointer {ty}."
)
}
IrError::VerifyPtrCastToNonPointer(ty) => {
write!(f, "Verification failed: Pointer cast to non pointer {ty}.")
}
IrError::VerifyPtrToIntToNonInteger(ty) => {
write!(f, "Verification failed: Pointer cast to non integer {ty}.")
}
IrError::VerifyStateAccessNumOfSlots => {
write!(
f,
"Verification failed: Number of slots for state access must be an integer."
)
}
IrError::VerifyStateAccessQuadNonPointer(ty) => {
write!(
f,
"Verification failed: \
State quad access must be to or from a pointer, not a {ty}."
)
}
IrError::VerifyStateKeyBadType => {
write!(
f,
"Verification failed: State load or store key must be a b256 pointer."
)
}
IrError::VerifyStateKeyNonPointer(ty) => {
write!(
f,
"Verification failed: State load or store key must be a pointer, not a {ty}."
)
}
IrError::VerifyStateDestBadType(ty) => {
write!(
f,
"Verification failed: State access operation must be to a {ty} pointer."
)
}
IrError::VerifyStoreMismatchedTypes(_) => {
write!(
f,
"Verification failed: Store value and pointer type mismatch."
)
}
IrError::VerifyStoreToNonPointer(ty) => {
write!(f, "Store must be to a pointer, not a {ty}.")
}
IrError::VerifyUntypedValuePassedToFunction => write!(
f,
"Verification failed: An untyped/void value has been passed to a function call."
),
IrError::VerifyInvalidGtfIndexType => write!(
f,
"Verification failed: An non-integer value has been passed to a 'gtf' instruction."
),
IrError::VerifyLogId => {
write!(f, "Verification failed: log ID must be an integer.")
}
IrError::VerifyLogMismatchedTypes => {
write!(
f,
"Verification failed: log type must match the type of the value being logged."
)
}
IrError::VerifyLogEventDataVersion(version) => {
write!(
f,
"Verification failed: unsupported log event metadata version {version}."
)
}
IrError::VerifyLogEventDataInvalid(reason) => {
write!(
f,
"Verification failed: invalid log event metadata ({reason})."
)
}
IrError::VerifyRevertCodeBadType => {
write!(
f,
"Verification failed: error code for revert must be a u64."
)
}
IrError::VerifySmoRecipientBadType => {
write!(
f,
"Verification failed: the `smo` must have a `b256` as its first argument."
)
}
IrError::VerifySmoBadMessageType => {
write!(
f,
"Verification failed: the second arg of of `smo` must be a struct."
)
}
IrError::VerifySmoMessageSize => {
write!(
f,
"Verification failed: smo message size must be an integer."
)
}
IrError::VerifySmoRecipientNonPointer(ty) => {
write!(
f,
"Verification failed: the first arg of `smo` cannot be a non-pointer of {ty}."
)
}
IrError::VerifySmoMessageNonPointer(ty) => {
write!(
f,
"Verification failed: the second arg of `smo` cannot be a non-pointer of {ty}."
)
}
IrError::VerifySmoCoins => {
write!(
f,
"Verification failed: smo coins value must be an integer."
)
}
IrError::VerifyGlobalMissingInitializer(global_name) => {
write!(
f,
"Verification failed: Immutable global variable {global_name}\
is missing an initializer."
)
}
IrError::VerifyLocalMissingInitializer(local_name, func_name) => {
write!(
f,
"Verification failed: Immutable local variable {local_name} in function \
{func_name} is missing an initializer."
)
}
}
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-ir/src/printer.rs | sway-ir/src/printer.rs | //! Print (or serialize) IR to human and machine readable text.
//!
//! This module implements a document based pretty-printer. A couple of 3rd party pretty printing
//! crates were assessed but didn't seem to work as well as this simple version, which is quite
//! effective.
use std::collections::{BTreeMap, HashMap};
use slotmap::Key;
use sway_types::SourceEngine;
use crate::{
asm::*,
block::Block,
constant::{ConstantContent, ConstantValue},
context::Context,
function::{Function, FunctionContent},
instruction::{FuelVmInstruction, InstOp, Predicate, Register},
metadata::{MetadataIndex, Metadatum},
module::{Kind, ModuleContent},
value::{Value, ValueContent, ValueDatum},
AnalysisResult, AnalysisResultT, AnalysisResults, BinaryOpKind, BlockArgument, ConfigContent,
IrError, Module, Pass, PassMutability, ScopedPass, UnaryOpKind,
};
#[derive(Debug)]
pub(crate) enum Doc {
Empty,
Space,
Comma,
Text(String),
Line(Box<Doc>),
Pair(Box<Doc>, Box<Doc>),
List(Vec<Doc>),
ListSep(Vec<Doc>, Box<Doc>),
Parens(Box<Doc>),
Indent(i64, Box<Doc>),
}
impl Doc {
pub(crate) fn text<S: Into<String>>(s: S) -> Self {
Doc::Text(s.into())
}
fn line(doc: Doc) -> Self {
Doc::Line(Box::new(doc))
}
pub(crate) fn text_line<S: Into<String>>(s: S) -> Self {
Doc::Line(Box::new(Doc::Text(s.into())))
}
fn indent(n: i64, doc: Doc) -> Doc {
Doc::Indent(n, Box::new(doc))
}
fn list_sep(docs: Vec<Doc>, sep: Doc) -> Doc {
Doc::ListSep(docs, Box::new(sep))
}
fn in_parens_comma_sep(docs: Vec<Doc>) -> Doc {
Doc::Parens(Box::new(Doc::list_sep(docs, Doc::Comma)))
}
pub(crate) fn append(self, doc: Doc) -> Doc {
match (&self, &doc) {
(Doc::Empty, _) => doc,
(_, Doc::Empty) => self,
_ => Doc::Pair(Box::new(self), Box::new(doc)),
}
}
fn and(self, doc: Doc) -> Doc {
match doc {
Doc::Empty => doc,
_ => Doc::Pair(Box::new(self), Box::new(doc)),
}
}
pub(crate) fn build(self) -> String {
build_doc(self, 0)
}
}
/// Pretty-print a whole [`Context`] to a string.
///
/// The output from this function must always be suitable for [crate::parser::parse].
pub fn to_string(context: &Context) -> String {
context_print(context, &|_, doc| doc)
}
pub(crate) fn context_print(context: &Context, map_doc: &impl Fn(&Value, Doc) -> Doc) -> String {
let mut md_namer = MetadataNamer::default();
context
.modules
.iter()
.fold(Doc::Empty, |doc, (_, module)| {
doc.append(module_to_doc(context, &mut md_namer, module, map_doc))
})
.append(md_namer.to_doc(context))
.build()
}
pub(crate) fn block_print(
context: &Context,
function: Function,
block: Block,
map_doc: &impl Fn(&Value, Doc) -> Doc,
) -> String {
let mut md_namer = MetadataNamer::default();
let mut namer = Namer::new(function);
block_to_doc(context, &mut md_namer, &mut namer, &block, map_doc).build()
}
pub struct ModulePrinterResult;
impl AnalysisResultT for ModulePrinterResult {}
/// Pass to print a module to stdout.
pub fn module_printer_pass(
context: &Context,
_analyses: &AnalysisResults,
module: Module,
) -> Result<AnalysisResult, IrError> {
let mut md_namer = MetadataNamer::default();
print!(
"{}",
module_to_doc(
context,
&mut md_namer,
context.modules.get(module.0).unwrap(),
&|_, doc| doc
)
.append(md_namer.to_doc(context))
.build()
);
Ok(Box::new(ModulePrinterResult))
}
/// Print a module to stdout.
pub fn module_print(context: &Context, _analyses: &AnalysisResults, module: Module) {
let mut md_namer = MetadataNamer::default();
println!(
"{}",
module_to_doc(
context,
&mut md_namer,
context.modules.get(module.0).unwrap(),
&|_, doc| doc
)
.append(md_namer.to_doc(context))
.build()
);
}
/// Print a function to stdout.
pub fn function_print<W: std::fmt::Write>(
w: &mut W,
context: &Context,
function: Function,
metadata: bool,
) -> Result<(), std::fmt::Error> {
let mut md_namer = MetadataNamer::default();
let doc = function_to_doc(
context,
&mut md_namer,
&mut Namer::new(function),
context.functions.get(function.0).unwrap(),
&|_, doc| doc,
);
let doc = if metadata {
doc.append(md_namer.to_doc(context))
} else {
doc
};
write!(w, "{}", doc.build())
}
/// Print an instruction to stdout.
pub fn instruction_print(context: &Context, ins_value: &Value) {
let mut md_namer = MetadataNamer::default();
let block = ins_value
.get_instruction(context)
.expect("Calling instruction printer on non-instruction value")
.parent;
let function = block.get_function(context);
let mut namer = Namer::new(function);
println!(
"{}",
instruction_to_doc(context, &mut md_namer, &mut namer, &block, ins_value).build()
);
}
pub const MODULE_PRINTER_NAME: &str = "module-printer";
pub fn create_module_printer_pass() -> Pass {
Pass {
name: MODULE_PRINTER_NAME,
descr: "Print module to stdout",
deps: vec![],
runner: ScopedPass::ModulePass(PassMutability::Analysis(module_printer_pass)),
}
}
fn module_to_doc<'a>(
context: &'a Context,
md_namer: &mut MetadataNamer,
module: &'a ModuleContent,
map_doc: &impl Fn(&Value, Doc) -> Doc,
) -> Doc {
Doc::line(Doc::Text(format!(
"{} {{",
match module.kind {
Kind::Contract => "contract",
Kind::Library => "library",
Kind::Predicate => "predicate",
Kind::Script => "script",
}
)))
.append(Doc::indent(
4,
Doc::List(
module
.configs
.values()
.map(|value| config_to_doc(context, value, md_namer))
.collect(),
),
))
.append(if !module.configs.is_empty() {
Doc::line(Doc::Empty)
} else {
Doc::Empty
})
.append(Doc::indent(
4,
Doc::List(
module
.global_variables
.iter()
.map(|(name, var)| {
let var_content = &context.global_vars[var.0];
let init_doc = match &var_content.initializer {
Some(const_val) => Doc::text(format!(
" = const {}",
const_val.get_content(context).as_lit_string(context)
)),
None => Doc::Empty,
};
let mut_string = if var_content.mutable { "mut " } else { "" };
Doc::line(
Doc::text(format!(
"{}global {} : {}",
mut_string,
name.join("::"),
var.get_inner_type(context).as_string(context),
))
.append(init_doc),
)
})
.collect(),
),
))
.append(if !module.global_variables.is_empty() {
Doc::line(Doc::Empty)
} else {
Doc::Empty
})
.append(Doc::indent(
4,
Doc::List(
module
.storage_keys
.iter()
.map(|(name, storage_key)| {
let (slot, offset, field_id) = storage_key.get_parts(context);
Doc::line(
// If the storage key's path doesn't have struct field names,
// which is 99% of the time, we will display only the slot,
// to avoid clattering.
Doc::text(format!(
"storage_key {name} = 0x{slot:x}{}{}",
if offset != 0 || slot != field_id {
format!(" : {offset}")
} else {
"".to_string()
},
if slot != field_id {
format!(" : 0x{field_id:x}")
} else {
"".to_string()
},
)),
)
})
.collect(),
),
))
.append(if !module.storage_keys.is_empty() {
Doc::line(Doc::Empty)
} else {
Doc::Empty
})
.append(Doc::indent(
4,
Doc::list_sep(
module
.functions
.iter()
.map(|function| {
function_to_doc(
context,
md_namer,
&mut Namer::new(*function),
&context.functions[function.0],
map_doc,
)
})
.collect(),
Doc::line(Doc::Empty),
),
))
.append(Doc::text_line("}"))
}
fn config_to_doc(
context: &Context,
configurable: &ConfigContent,
md_namer: &mut MetadataNamer,
) -> Doc {
match configurable {
ConfigContent::V0 {
name,
constant,
opt_metadata,
..
} => Doc::line(
Doc::text(format!(
"{} = config {}",
name,
constant.get_content(context).as_lit_string(context)
))
.append(md_namer.md_idx_to_doc(context, opt_metadata)),
),
ConfigContent::V1 {
name,
ty,
encoded_bytes,
decode_fn,
opt_metadata,
..
} => {
let ty = ty.as_string(context);
let bytes = encoded_bytes
.iter()
.map(|b| format!("{b:02x}"))
.collect::<Vec<String>>()
.concat();
Doc::line(
Doc::text(format!(
"{} = config {}, {}, 0x{}",
name,
ty,
decode_fn.get().get_name(context),
bytes,
))
.append(md_namer.md_idx_to_doc(context, opt_metadata)),
)
}
}
}
fn function_to_doc<'a>(
context: &'a Context,
md_namer: &mut MetadataNamer,
namer: &mut Namer,
function: &'a FunctionContent,
map_doc: &impl Fn(&Value, Doc) -> Doc,
) -> Doc {
let public = if function.is_public { "pub " } else { "" };
let entry = if function.is_entry { "entry " } else { "" };
// TODO: Remove outer `if` once old encoding is fully removed.
// This is an intentional "complication" so that we see
// explicit using of `new_encoding` here.
// For the time being, for the old encoding, we don't want
// to show both `entry` and `entry_orig` although both
// values will be true.
// TODO: When removing old encoding, remove also the TODO in the
// `rule fn_decl()` definition of the IR parser.
let original_entry = if context.experimental.new_encoding {
if function.is_original_entry {
"entry_orig "
} else {
""
}
} else if !function.is_entry && function.is_original_entry {
"entry_orig "
} else {
""
};
let fallback = if function.is_fallback {
"fallback "
} else {
""
};
Doc::line(
Doc::text(format!(
"{}{}{}{}fn {}",
public, entry, original_entry, fallback, function.name
))
.append(
function
.selector
.map(|bytes| {
Doc::text(format!(
"<{:02x}{:02x}{:02x}{:02x}>",
bytes[0], bytes[1], bytes[2], bytes[3]
))
})
.unwrap_or(Doc::Empty),
)
.append(Doc::in_parens_comma_sep(
function
.arguments
.iter()
.map(|(name, arg_val)| {
if let ValueContent {
value: ValueDatum::Argument(BlockArgument { ty, .. }),
metadata,
..
} = &context.values[arg_val.0]
{
Doc::text(name)
.append(
Doc::Space.and(md_namer.md_idx_to_doc_no_comma(context, metadata)),
)
.append(Doc::text(format!(": {}", ty.as_string(context))))
} else {
unreachable!("Unexpected non argument value for function arguments.")
}
})
.collect(),
))
.append(Doc::text(format!(
" -> {}",
function.return_type.as_string(context)
)))
.append(md_namer.md_idx_to_doc(context, &function.metadata))
.append(Doc::text(" {")),
)
.append(Doc::indent(
4,
Doc::list_sep(
vec![
Doc::List(
function
.local_storage
.iter()
.map(|(name, var)| {
let var_content = &context.local_vars[var.0];
let init_doc = match &var_content.initializer {
Some(const_val) => Doc::text(format!(
" = const {}",
const_val.get_content(context).as_lit_string(context)
)),
None => Doc::Empty,
};
let mut_str = if var_content.mutable { "mut " } else { "" };
Doc::line(
// Print the inner, pointed-to type in the locals list.
Doc::text(format!(
"local {mut_str}{} {name}",
var.get_inner_type(context).as_string(context)
))
.append(init_doc),
)
})
.collect(),
),
Doc::list_sep(
function
.blocks
.iter()
.map(|block| block_to_doc(context, md_namer, namer, block, map_doc))
.collect(),
Doc::line(Doc::Empty),
),
],
Doc::line(Doc::Empty),
),
))
.append(Doc::text_line("}"))
}
fn block_to_doc(
context: &Context,
md_namer: &mut MetadataNamer,
namer: &mut Namer,
block: &Block,
map_doc: &impl Fn(&Value, Doc) -> Doc,
) -> Doc {
let block_content = &context.blocks[block.0];
Doc::line(
Doc::text(block_content.label.to_string()).append(
Doc::in_parens_comma_sep(
block
.arg_iter(context)
.map(|arg_val| {
Doc::text(namer.name(context, arg_val)).append(Doc::text(format!(
": {}",
arg_val.get_type(context).unwrap().as_string(context)
)))
})
.collect(),
)
.append(Doc::Text(":".to_string())),
),
)
.append(Doc::List(
block
.instruction_iter(context)
.map(|current_value| {
let doc = instruction_to_doc(context, md_namer, namer, block, ¤t_value);
(map_doc)(¤t_value, doc)
})
.collect(),
))
}
fn constant_to_doc(
context: &Context,
md_namer: &mut MetadataNamer,
namer: &mut Namer,
const_val: &Value,
) -> Doc {
if let ValueContent {
value: ValueDatum::Constant(constant),
metadata,
} = &context.values[const_val.0]
{
Doc::line(
Doc::text(format!(
"{} = const {}",
namer.name(context, const_val),
constant.get_content(context).as_lit_string(context)
))
.append(md_namer.md_idx_to_doc(context, metadata)),
)
} else {
unreachable!("Not a constant value.")
}
}
fn maybe_constant_to_doc(
context: &Context,
md_namer: &mut MetadataNamer,
namer: &mut Namer,
maybe_const_val: &Value,
) -> Doc {
// Create a new doc only if value is new and unknown, and is a constant.
if !namer.is_known(maybe_const_val) && maybe_const_val.is_constant(context) {
constant_to_doc(context, md_namer, namer, maybe_const_val)
} else {
Doc::Empty
}
}
fn instruction_to_doc<'a>(
context: &'a Context,
md_namer: &mut MetadataNamer,
namer: &mut Namer,
block: &Block,
ins_value: &'a Value,
) -> Doc {
if let ValueContent {
value: ValueDatum::Instruction(instruction),
metadata,
} = &context.values[ins_value.0]
{
match &instruction.op {
InstOp::AsmBlock(asm, args) => {
asm_block_to_doc(context, md_namer, namer, ins_value, asm, args, metadata)
}
InstOp::BitCast(value, ty) => maybe_constant_to_doc(context, md_namer, namer, value)
.append(Doc::line(
Doc::text(format!(
"{} = bitcast {} to {}",
namer.name(context, ins_value),
namer.name(context, value),
ty.as_string(context),
))
.append(md_namer.md_idx_to_doc(context, metadata)),
)),
InstOp::Alloc { ty, count } => maybe_constant_to_doc(context, md_namer, namer, count)
.append(Doc::line(
Doc::text(format!(
"{} = alloc {} x {}",
namer.name(context, ins_value),
ty.as_string(context),
namer.name(context, count),
))
.append(md_namer.md_idx_to_doc(context, metadata)),
)),
InstOp::UnaryOp { op, arg } => {
let op_str = match op {
UnaryOpKind::Not => "not",
};
maybe_constant_to_doc(context, md_namer, namer, arg).append(Doc::line(
Doc::text(format!(
"{} = {op_str} {}",
namer.name(context, ins_value),
namer.name(context, arg),
))
.append(md_namer.md_idx_to_doc(context, metadata)),
))
}
InstOp::BinaryOp { op, arg1, arg2 } => {
let op_str = match op {
BinaryOpKind::Add => "add",
BinaryOpKind::Sub => "sub",
BinaryOpKind::Mul => "mul",
BinaryOpKind::Div => "div",
BinaryOpKind::And => "and",
BinaryOpKind::Or => "or",
BinaryOpKind::Xor => "xor",
BinaryOpKind::Mod => "mod",
BinaryOpKind::Rsh => "rsh",
BinaryOpKind::Lsh => "lsh",
};
maybe_constant_to_doc(context, md_namer, namer, arg1)
.append(maybe_constant_to_doc(context, md_namer, namer, arg2))
.append(Doc::line(
Doc::text(format!(
"{} = {op_str} {}, {}",
namer.name(context, ins_value),
namer.name(context, arg1),
namer.name(context, arg2),
))
.append(md_namer.md_idx_to_doc(context, metadata)),
))
}
InstOp::Branch(to_block) =>
// Handle possibly constant block parameters
{
to_block
.args
.iter()
.fold(Doc::Empty, |doc, param| {
doc.append(maybe_constant_to_doc(context, md_namer, namer, param))
})
.append(Doc::line(
Doc::text(format!("br {}", context.blocks[to_block.block.0].label,))
.append(
Doc::in_parens_comma_sep(
to_block
.args
.iter()
.map(|arg_val| Doc::text(namer.name(context, arg_val)))
.collect(),
)
.append(md_namer.md_idx_to_doc(context, metadata)),
),
))
}
InstOp::Call(func, args) => args
.iter()
.fold(Doc::Empty, |doc, arg_val| {
doc.append(maybe_constant_to_doc(context, md_namer, namer, arg_val))
})
.append(Doc::line(
Doc::text(format!(
"{} = call {}",
namer.name(context, ins_value),
context.functions[func.0].name
))
.append(Doc::in_parens_comma_sep(
args.iter()
.map(|arg_val| Doc::text(namer.name(context, arg_val)))
.collect(),
))
.append(md_namer.md_idx_to_doc(context, metadata)),
)),
InstOp::CastPtr(val, ty) => Doc::line(
Doc::text(format!(
"{} = cast_ptr {} to {}",
namer.name(context, ins_value),
namer.name(context, val),
ty.as_string(context)
))
.append(md_namer.md_idx_to_doc(context, metadata)),
),
InstOp::Cmp(pred, lhs_value, rhs_value) => {
let pred_str = match pred {
Predicate::Equal => "eq",
Predicate::LessThan => "lt",
Predicate::GreaterThan => "gt",
};
maybe_constant_to_doc(context, md_namer, namer, lhs_value)
.append(maybe_constant_to_doc(context, md_namer, namer, rhs_value))
.append(Doc::line(
Doc::text(format!(
"{} = cmp {pred_str} {} {}",
namer.name(context, ins_value),
namer.name(context, lhs_value),
namer.name(context, rhs_value),
))
.append(md_namer.md_idx_to_doc(context, metadata)),
))
}
InstOp::ConditionalBranch {
cond_value,
true_block,
false_block,
} => {
let true_label = &context.blocks[true_block.block.0].label;
let false_label = &context.blocks[false_block.block.0].label;
// Handle possibly constant block parameters
let doc = true_block.args.iter().fold(
maybe_constant_to_doc(context, md_namer, namer, cond_value),
|doc, param| doc.append(maybe_constant_to_doc(context, md_namer, namer, param)),
);
let doc = false_block.args.iter().fold(doc, |doc, param| {
doc.append(maybe_constant_to_doc(context, md_namer, namer, param))
});
doc.append(Doc::line(
Doc::text(format!("cbr {}", namer.name(context, cond_value),)).append(
Doc::text(format!(", {true_label}")).append(
Doc::in_parens_comma_sep(
true_block
.args
.iter()
.map(|arg_val| Doc::text(namer.name(context, arg_val)))
.collect(),
)
.append(
Doc::text(format!(", {false_label}")).append(
Doc::in_parens_comma_sep(
false_block
.args
.iter()
.map(|arg_val| Doc::text(namer.name(context, arg_val)))
.collect(),
)
.append(md_namer.md_idx_to_doc(context, metadata)),
),
),
),
),
))
}
InstOp::ContractCall {
return_type,
name,
params,
coins,
asset_id,
gas,
} => maybe_constant_to_doc(context, md_namer, namer, coins)
.append(maybe_constant_to_doc(context, md_namer, namer, asset_id))
.append(maybe_constant_to_doc(context, md_namer, namer, gas))
.append(Doc::line(
Doc::text(format!(
"{} = contract_call {} {} {}, {}, {}, {}",
namer.name(context, ins_value),
return_type.as_string(context),
name.as_deref().unwrap_or(""),
namer.name(context, params),
namer.name(context, coins),
namer.name(context, asset_id),
namer.name(context, gas),
))
.append(md_namer.md_idx_to_doc(context, metadata)),
)),
InstOp::FuelVm(fuel_vm_instr) => match fuel_vm_instr {
FuelVmInstruction::Gtf { index, tx_field_id } => {
maybe_constant_to_doc(context, md_namer, namer, index).append(Doc::line(
Doc::text(format!(
"{} = gtf {}, {}",
namer.name(context, ins_value),
namer.name(context, index),
tx_field_id,
))
.append(md_namer.md_idx_to_doc(context, metadata)),
))
}
FuelVmInstruction::Log {
log_val,
log_ty,
log_id,
log_data,
} => {
let log_val_doc = maybe_constant_to_doc(context, md_namer, namer, log_val);
let log_id_doc = maybe_constant_to_doc(context, md_namer, namer, log_id);
let log_val_name = namer.name(context, log_val);
let log_id_name = namer.name(context, log_id);
let base_doc = Doc::text(format!(
"log {} {}, {}",
log_ty.as_string(context),
log_val_name,
log_id_name,
));
let log_doc = if let Some(data) = log_data {
base_doc
.append(Doc::Space)
.append(Doc::text(format!(
"log_data(version: {}, is_event: {}, is_indexed: {}, event_type_size: {}, num_elements: {})",
data.version(),
data.is_event(),
data.is_indexed(),
data.event_type_size(),
data.num_elements(),
)))
} else {
base_doc
};
log_val_doc.append(log_id_doc).append(Doc::line(
log_doc.append(md_namer.md_idx_to_doc(context, metadata)),
))
}
FuelVmInstruction::ReadRegister(reg) => Doc::line(
Doc::text(format!(
"{} = read_register {}",
namer.name(context, ins_value),
match reg {
Register::Of => "of",
Register::Pc => "pc",
Register::Ssp => "ssp",
Register::Sp => "sp",
Register::Fp => "fp",
Register::Hp => "hp",
Register::Error => "err",
Register::Ggas => "ggas",
Register::Cgas => "cgas",
Register::Bal => "bal",
Register::Is => "is",
Register::Ret => "ret",
Register::Retl => "retl",
Register::Flag => "flag",
},
))
.append(md_namer.md_idx_to_doc(context, metadata)),
),
FuelVmInstruction::Revert(v) => maybe_constant_to_doc(context, md_namer, namer, v)
.append(Doc::line(
Doc::text(format!("revert {}", namer.name(context, v),))
.append(md_namer.md_idx_to_doc(context, metadata)),
)),
FuelVmInstruction::JmpMem => Doc::line(
Doc::text("jmp_mem".to_string())
.append(md_namer.md_idx_to_doc(context, metadata)),
),
FuelVmInstruction::Smo {
recipient,
message,
message_size,
coins,
} => maybe_constant_to_doc(context, md_namer, namer, recipient)
.append(maybe_constant_to_doc(context, md_namer, namer, message))
.append(maybe_constant_to_doc(
context,
md_namer,
namer,
message_size,
))
.append(maybe_constant_to_doc(context, md_namer, namer, coins))
.append(Doc::line(
Doc::text(format!(
"smo {}, {}, {}, {}",
namer.name(context, recipient),
namer.name(context, message),
namer.name(context, message_size),
namer.name(context, coins),
))
.append(md_namer.md_idx_to_doc(context, metadata)),
)),
FuelVmInstruction::StateClear {
key,
number_of_slots,
} => maybe_constant_to_doc(context, md_namer, namer, number_of_slots).append(
Doc::line(
Doc::text(format!(
"state_clear key {}, {}",
namer.name(context, key),
namer.name(context, number_of_slots),
))
.append(md_namer.md_idx_to_doc(context, metadata)),
),
),
FuelVmInstruction::StateLoadQuadWord {
load_val,
key,
number_of_slots,
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | true |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-ir/src/irtype.rs | sway-ir/src/irtype.rs | //! Each of the valid `Value` types.
//!
//! These generally mimic the Sway types with a couple of exceptions:
//! - [`Type::Unit`] is still a discrete type rather than an empty tuple. This may change in the
//! future.
//! - [`Type::Union`] is a sum type which resembles a C union. Each member of the union uses the
//! same storage and the size of the union is the size of the largest member.
//!
//! [`Aggregate`] is an abstract collection of [`Type`]s used for structs, unions and arrays,
//! though see below for future improvements around splitting arrays into a different construct.
use crate::{context::Context, pretty::DebugWithContext, ConstantContent, ConstantValue, Value};
#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)]
pub struct Type(pub slotmap::DefaultKey);
impl DebugWithContext for Type {
fn fmt_with_context(
&self,
formatter: &mut std::fmt::Formatter,
context: &Context,
) -> std::fmt::Result {
self.get_content(context)
.fmt_with_context(formatter, context)
}
}
#[derive(Debug, Clone, DebugWithContext, Hash, PartialEq, Eq)]
pub enum TypeContent {
Never,
Unit,
Bool,
Uint(u16),
B256,
StringSlice,
StringArray(u64),
Array(Type, u64),
Union(Vec<Type>),
Struct(Vec<Type>),
Slice,
Pointer,
TypedPointer(Type),
TypedSlice(Type),
}
impl Type {
fn get_or_create_unique_type(context: &mut Context, t: TypeContent) -> Type {
// Trying to avoiding cloning t unless we're creating a new type.
#[allow(clippy::map_entry)]
if !context.type_map.contains_key(&t) {
let new_type = Type(context.types.insert(t.clone()));
context.type_map.insert(t, new_type);
new_type
} else {
context.type_map.get(&t).copied().unwrap()
}
}
/// Get Type if it already exists.
pub fn get_type(context: &Context, t: &TypeContent) -> Option<Type> {
context.type_map.get(t).copied()
}
pub fn create_basic_types(context: &mut Context) {
Self::get_or_create_unique_type(context, TypeContent::Never);
Self::get_or_create_unique_type(context, TypeContent::Unit);
Self::get_or_create_unique_type(context, TypeContent::Bool);
Self::get_or_create_unique_type(context, TypeContent::Uint(8));
Self::get_or_create_unique_type(context, TypeContent::Uint(16));
Self::get_or_create_unique_type(context, TypeContent::Uint(32));
Self::get_or_create_unique_type(context, TypeContent::Uint(64));
Self::get_or_create_unique_type(context, TypeContent::Uint(256));
Self::get_or_create_unique_type(context, TypeContent::B256);
Self::get_or_create_unique_type(context, TypeContent::Slice);
Self::get_or_create_unique_type(context, TypeContent::Pointer);
}
/// Get the content for this [Type].
pub fn get_content<'a>(&self, context: &'a Context) -> &'a TypeContent {
&context.types[self.0]
}
/// Get never type
pub fn get_never(context: &Context) -> Type {
Self::get_type(context, &TypeContent::Never).expect("create_basic_types not called")
}
/// Get unit type
pub fn get_unit(context: &Context) -> Type {
Self::get_type(context, &TypeContent::Unit).expect("create_basic_types not called")
}
/// Get bool type
pub fn get_bool(context: &Context) -> Type {
Self::get_type(context, &TypeContent::Bool).expect("create_basic_types not called")
}
/// New unsigned integer type
pub fn new_uint(context: &mut Context, width: u16) -> Type {
Self::get_or_create_unique_type(context, TypeContent::Uint(width))
}
/// New u8 type
pub fn get_uint8(context: &Context) -> Type {
Self::get_type(context, &TypeContent::Uint(8)).expect("create_basic_types not called")
}
/// New u16 type
pub fn get_uint16(context: &Context) -> Type {
Self::get_type(context, &TypeContent::Uint(16)).expect("create_basic_types not called")
}
/// New u32 type
pub fn get_uint32(context: &Context) -> Type {
Self::get_type(context, &TypeContent::Uint(32)).expect("create_basic_types not called")
}
/// New u64 type
pub fn get_uint64(context: &Context) -> Type {
Self::get_type(context, &TypeContent::Uint(64)).expect("create_basic_types not called")
}
/// New u64 type
pub fn get_uint256(context: &Context) -> Type {
Self::get_type(context, &TypeContent::Uint(256)).expect("create_basic_types not called")
}
/// Get unsigned integer type
pub fn get_uint(context: &Context, width: u16) -> Option<Type> {
Self::get_type(context, &TypeContent::Uint(width))
}
/// Get B256 type
pub fn get_b256(context: &Context) -> Type {
Self::get_type(context, &TypeContent::B256).expect("create_basic_types not called")
}
/// Get untyped pointer type
pub fn get_ptr(context: &Context) -> Type {
Self::get_type(context, &TypeContent::Pointer).expect("create_basic_types not called")
}
pub fn new_untyped_slice(context: &mut Context) -> Type {
Self::get_or_create_unique_type(context, TypeContent::Slice)
}
/// Get string type
pub fn new_string_array(context: &mut Context, len: u64) -> Type {
Self::get_or_create_unique_type(context, TypeContent::StringArray(len))
}
/// Get array type
pub fn new_array(context: &mut Context, elm_ty: Type, len: u64) -> Type {
Self::get_or_create_unique_type(context, TypeContent::Array(elm_ty, len))
}
/// Get union type
pub fn new_union(context: &mut Context, fields: Vec<Type>) -> Type {
Self::get_or_create_unique_type(context, TypeContent::Union(fields))
}
/// Get struct type
pub fn new_struct(context: &mut Context, fields: Vec<Type>) -> Type {
Self::get_or_create_unique_type(context, TypeContent::Struct(fields))
}
/// New pointer type
pub fn new_typed_pointer(context: &mut Context, to_ty: Type) -> Type {
Self::get_or_create_unique_type(context, TypeContent::TypedPointer(to_ty))
}
/// Get slice type
pub fn get_slice(context: &Context) -> Type {
Self::get_type(context, &TypeContent::Slice).expect("create_basic_types not called")
}
/// Get typed slice type
pub fn get_typed_slice(context: &mut Context, item_ty: Type) -> Type {
Self::get_or_create_unique_type(context, TypeContent::TypedSlice(item_ty))
}
/// Return a string representation of type, used for printing.
pub fn as_string(&self, context: &Context) -> String {
let sep_types_str = |agg_content: &Vec<Type>, sep: &str| {
agg_content
.iter()
.map(|ty| ty.as_string(context))
.collect::<Vec<_>>()
.join(sep)
};
match self.get_content(context) {
TypeContent::Never => "never".into(),
TypeContent::Unit => "()".into(),
TypeContent::Bool => "bool".into(),
TypeContent::Uint(nbits) => format!("u{nbits}"),
TypeContent::B256 => "b256".into(),
TypeContent::StringSlice => "str".into(),
TypeContent::StringArray(n) => format!("string<{n}>"),
TypeContent::Array(ty, cnt) => {
format!("[{}; {}]", ty.as_string(context), cnt)
}
TypeContent::Union(agg) => {
format!("( {} )", sep_types_str(agg, " | "))
}
TypeContent::Struct(agg) => {
format!("{{ {} }}", sep_types_str(agg, ", "))
}
TypeContent::Slice => "slice".into(),
TypeContent::Pointer => "ptr".into(),
TypeContent::TypedSlice(ty) => format!("__slice[{}]", ty.as_string(context)),
TypeContent::TypedPointer(ty) => format!("__ptr {}", ty.as_string(context)),
}
}
/// Compare a type to this one for equivalence.
/// `PartialEq` does not take into account the special case for Unions below.
pub fn eq(&self, context: &Context, other: &Type) -> bool {
match (self.get_content(context), other.get_content(context)) {
(TypeContent::Unit, TypeContent::Unit) => true,
(TypeContent::Bool, TypeContent::Bool) => true,
(TypeContent::Uint(l), TypeContent::Uint(r)) => l == r,
(TypeContent::B256, TypeContent::B256) => true,
(TypeContent::StringSlice, TypeContent::StringSlice) => true,
(TypeContent::StringArray(l), TypeContent::StringArray(r)) => l == r,
(TypeContent::Array(l, llen), TypeContent::Array(r, rlen)) => {
llen == rlen && l.eq(context, r)
}
(TypeContent::TypedSlice(l), TypeContent::TypedSlice(r)) => l.eq(context, r),
(TypeContent::Struct(l), TypeContent::Struct(r))
| (TypeContent::Union(l), TypeContent::Union(r)) => {
l.len() == r.len() && l.iter().zip(r.iter()).all(|(l, r)| l.eq(context, r))
}
// Unions are special. We say unions are equivalent to any of their variant types.
(_, TypeContent::Union(_)) => other.eq(context, self),
(TypeContent::Union(l), _) => l.iter().any(|field_ty| other.eq(context, field_ty)),
// Never type can coerce into any other type.
(TypeContent::Never, _) => true,
(TypeContent::Slice, TypeContent::Slice) => true,
(TypeContent::Pointer, TypeContent::Pointer) => true,
(TypeContent::TypedPointer(l), TypeContent::TypedPointer(r)) => l.eq(context, r),
_ => false,
}
}
/// Is Never type
pub fn is_never(&self, context: &Context) -> bool {
matches!(*self.get_content(context), TypeContent::Never)
}
/// Is bool type
pub fn is_bool(&self, context: &Context) -> bool {
matches!(*self.get_content(context), TypeContent::Bool)
}
/// Is unit type
pub fn is_unit(&self, context: &Context) -> bool {
matches!(*self.get_content(context), TypeContent::Unit)
}
/// Is unsigned integer type
pub fn is_uint(&self, context: &Context) -> bool {
matches!(*self.get_content(context), TypeContent::Uint(_))
}
/// Is u8 type
pub fn is_uint8(&self, context: &Context) -> bool {
matches!(*self.get_content(context), TypeContent::Uint(8))
}
/// Is u32 type
pub fn is_uint32(&self, context: &Context) -> bool {
matches!(*self.get_content(context), TypeContent::Uint(32))
}
/// Is u64 type
pub fn is_uint64(&self, context: &Context) -> bool {
matches!(*self.get_content(context), TypeContent::Uint(64))
}
/// Is unsigned integer type of specific width
pub fn is_uint_of(&self, context: &Context, width: u16) -> bool {
matches!(*self.get_content(context), TypeContent::Uint(width_) if width == width_)
}
/// Is B256 type
pub fn is_b256(&self, context: &Context) -> bool {
matches!(*self.get_content(context), TypeContent::B256)
}
/// Is string type
pub fn is_string_slice(&self, context: &Context) -> bool {
matches!(*self.get_content(context), TypeContent::StringSlice)
}
/// Is string type
pub fn is_string_array(&self, context: &Context) -> bool {
matches!(*self.get_content(context), TypeContent::StringArray(_))
}
/// Is array type
pub fn is_array(&self, context: &Context) -> bool {
matches!(*self.get_content(context), TypeContent::Array(..))
}
/// Is union type
pub fn is_union(&self, context: &Context) -> bool {
matches!(*self.get_content(context), TypeContent::Union(_))
}
/// Is struct type
pub fn is_struct(&self, context: &Context) -> bool {
matches!(*self.get_content(context), TypeContent::Struct(_))
}
/// Is enum type
pub fn is_enum(&self, context: &Context) -> bool {
// We have to do some painful special handling here for enums, which are tagged unions.
// This really should be handled by the IR more explicitly and is something that will
// hopefully be addressed by https://github.com/FuelLabs/sway/issues/2819#issuecomment-1256930392
// Enums are at the moment represented as structs with two fields, first one being
// the tag and second the union of variants. Enums are the only place we currently use unions
// which makes the below heuristics valid.
if !self.is_struct(context) {
return false;
}
let field_tys = self.get_field_types(context);
field_tys.len() == 2 && field_tys[0].is_uint(context) && field_tys[1].is_union(context)
}
/// Is aggregate type: struct, union, enum or array.
pub fn is_aggregate(&self, context: &Context) -> bool {
// Notice that enums are structs of tags and unions.
self.is_struct(context) || self.is_union(context) || self.is_array(context)
}
/// Returns true if `self` is a slice type.
pub fn is_slice(&self, context: &Context) -> bool {
matches!(*self.get_content(context), TypeContent::Slice)
}
// TODO: (REFERENCES) Check all the usages of `is_ptr`.
/// Returns true if `self` is a pointer type.
pub fn is_ptr(&self, context: &Context) -> bool {
matches!(
*self.get_content(context),
TypeContent::TypedPointer(_) | TypeContent::Pointer
)
}
/// Get pointed to type iff `self`` is a pointer.
pub fn get_pointee_type(&self, context: &Context) -> Option<Type> {
if let TypeContent::TypedPointer(to_ty) = self.get_content(context) {
Some(*to_ty)
} else {
None
}
}
/// Get width of an integer type.
pub fn get_uint_width(&self, context: &Context) -> Option<u16> {
if let TypeContent::Uint(width) = self.get_content(context) {
Some(*width)
} else {
None
}
}
/// What's the type of the struct/array value indexed by indices.
pub fn get_indexed_type(&self, context: &Context, indices: &[u64]) -> Option<Type> {
if indices.is_empty() {
return None;
}
indices.iter().try_fold(*self, |ty, idx| {
ty.get_field_type(context, *idx)
.or_else(|| match ty.get_content(context) {
TypeContent::Array(ty, len) if idx < len => Some(*ty),
_ => None,
})
})
}
/// What's the type of the struct/array value indexed by indices.
pub fn get_value_indexed_type(&self, context: &Context, indices: &[Value]) -> Option<Type> {
// Fetch the field type from the vector of Values. If the value is a constant int then
// unwrap it and try to fetch the field type (which will fail for arrays) otherwise (i.e.,
// not a constant int or not a struct) fetch the array element type, which will fail for
// non-arrays.
indices.iter().try_fold(*self, |ty, idx_val| {
idx_val
.get_constant(context)
.and_then(|const_ref| {
if let ConstantValue::Uint(n) = const_ref.get_content(context).value {
Some(n)
} else {
None
}
})
.and_then(|idx| ty.get_field_type(context, idx))
.or_else(|| ty.get_array_elem_type(context))
})
}
/// What's the offset, in bytes, of the indexed element?
/// Returns `None` on invalid indices.
/// Panics if `self` is not an aggregate (struct, union, or array).
pub fn get_indexed_offset(&self, context: &Context, indices: &[u64]) -> Option<u64> {
indices
.iter()
.try_fold((*self, 0), |(ty, accum_offset), idx| {
if ty.is_struct(context) {
// Sum up all sizes of all previous fields.
// Every struct field is aligned to word boundary.
let prev_idxs_offset = (0..(*idx)).try_fold(0, |accum, pre_idx| {
ty.get_field_type(context, pre_idx)
.map(|field_ty| accum + field_ty.size(context).in_bytes_aligned())
})?;
ty.get_field_type(context, *idx)
.map(|field_ty| (field_ty, accum_offset + prev_idxs_offset))
} else if ty.is_union(context) {
// Union variants have their raw size in bytes and are
// left padded within the union.
let union_size_in_bytes = ty.size(context).in_bytes();
ty.get_field_type(context, *idx).map(|field_ty| {
(
field_ty,
accum_offset
+ (union_size_in_bytes - field_ty.size(context).in_bytes()),
)
})
} else {
assert!(
ty.is_array(context),
"Expected aggregate type. Got {}.",
ty.as_string(context)
);
// size_of_element * idx will be the offset of idx.
ty.get_array_elem_type(context).map(|elm_ty| {
let prev_idxs_offset = ty
.get_array_elem_type(context)
.unwrap()
.size(context)
.in_bytes()
* idx;
(elm_ty, accum_offset + prev_idxs_offset)
})
}
})
.map(|pair| pair.1)
}
/// What's the offset, in bytes, of the value indexed element?
/// It may not always be possible to determine statically.
pub fn get_value_indexed_offset(&self, context: &Context, indices: &[Value]) -> Option<u64> {
let const_indices: Vec<_> = indices
.iter()
.map_while(|idx| {
if let Some(ConstantContent {
value: ConstantValue::Uint(idx),
ty: _,
}) = idx.get_constant(context).map(|c| c.get_content(context))
{
Some(*idx)
} else {
None
}
})
.collect();
(const_indices.len() == indices.len())
.then(|| self.get_indexed_offset(context, &const_indices))
.flatten()
}
pub fn get_field_type(&self, context: &Context, idx: u64) -> Option<Type> {
if let TypeContent::Struct(fields) | TypeContent::Union(fields) = self.get_content(context)
{
fields.get(idx as usize).cloned()
} else {
// Trying to index a non-aggregate.
None
}
}
/// Get the type of the array element, if applicable.
pub fn get_array_elem_type(&self, context: &Context) -> Option<Type> {
if let TypeContent::Array(ty, _) = *self.get_content(context) {
Some(ty)
} else {
None
}
}
/// Get the type of the array element, if applicable.
pub fn get_typed_slice_elem_type(&self, context: &Context) -> Option<Type> {
if let TypeContent::TypedSlice(ty) = *self.get_content(context) {
Some(ty)
} else {
None
}
}
/// Get the length of the array , if applicable.
pub fn get_array_len(&self, context: &Context) -> Option<u64> {
if let TypeContent::Array(_, n) = *self.get_content(context) {
Some(n)
} else {
None
}
}
/// Get the length of a string.
pub fn get_string_len(&self, context: &Context) -> Option<u64> {
if let TypeContent::StringArray(n) = *self.get_content(context) {
Some(n)
} else {
None
}
}
/// Get the type of each field of a struct Type. Empty vector otherwise.
pub fn get_field_types(&self, context: &Context) -> Vec<Type> {
match self.get_content(context) {
TypeContent::Struct(fields) | TypeContent::Union(fields) => fields.clone(),
_ => vec![],
}
}
/// Get the offset, in bytes, and the [Type] of the struct field at the index `field_idx`, if `self` is a struct,
/// otherwise `None`.
/// Panics if the `field_idx` is out of bounds.
pub fn get_struct_field_offset_and_type(
&self,
context: &Context,
field_idx: u64,
) -> Option<(u64, Type)> {
if !self.is_struct(context) {
return None;
}
let field_idx = field_idx as usize;
let field_types = self.get_field_types(context);
let field_offs_in_bytes = field_types
.iter()
.take(field_idx)
.map(|field_ty| {
// Struct fields are aligned to word boundary.
field_ty.size(context).in_bytes_aligned()
})
.sum::<u64>();
Some((field_offs_in_bytes, field_types[field_idx]))
}
/// Get the offset, in bytes, and the [Type] of the union field at the index `field_idx`, if `self` is a union,
/// otherwise `None`.
/// Panics if the `field_idx` is out of bounds.
pub fn get_union_field_offset_and_type(
&self,
context: &Context,
field_idx: u64,
) -> Option<(u64, Type)> {
if !self.is_union(context) {
return None;
}
let field_idx = field_idx as usize;
let field_type = self.get_field_types(context)[field_idx];
let union_size_in_bytes = self.size(context).in_bytes();
// Union variants have their raw size in bytes and are
// left padded within the union.
let field_size_in_bytes = field_type.size(context).in_bytes();
// The union fields are at offset (union_size - field_size) due to left padding.
Some((union_size_in_bytes - field_size_in_bytes, field_type))
}
/// Returns the memory size of the [Type].
/// The returned `TypeSize::in_bytes` will provide the raw memory size of the `self`,
/// when it's not embedded in an aggregate.
pub fn size(&self, context: &Context) -> TypeSize {
match self.get_content(context) {
TypeContent::Unit | TypeContent::Never => TypeSize::new(0),
TypeContent::Uint(8) | TypeContent::Bool => TypeSize::new(1),
// All integers larger than a byte are words since FuelVM only has memory operations on those two units.
TypeContent::Uint(16)
| TypeContent::Uint(32)
| TypeContent::Uint(64)
| TypeContent::TypedPointer(_)
| TypeContent::Pointer => TypeSize::new(8),
TypeContent::Uint(256) => TypeSize::new(32),
TypeContent::Uint(_) => unreachable!(),
TypeContent::Slice => TypeSize::new(16),
TypeContent::TypedSlice(..) => TypeSize::new(16),
TypeContent::B256 => TypeSize::new(32),
TypeContent::StringSlice => TypeSize::new(16),
TypeContent::StringArray(n) => {
TypeSize::new(super::size_bytes_round_up_to_word_alignment!(*n))
}
TypeContent::Array(el_ty, cnt) => TypeSize::new(cnt * el_ty.size(context).in_bytes()),
TypeContent::Struct(field_tys) => {
// Sum up all the field sizes, aligned to words.
TypeSize::new(
field_tys
.iter()
.map(|field_ty| field_ty.size(context).in_bytes_aligned())
.sum(),
)
}
TypeContent::Union(field_tys) => {
// Find the max size for field sizes.
TypeSize::new(
field_tys
.iter()
.map(|field_ty| field_ty.size(context).in_bytes_aligned())
.max()
.unwrap_or(0),
)
}
}
}
}
// This is a mouthful...
#[macro_export]
macro_rules! size_bytes_round_up_to_word_alignment {
($bytes_expr: expr) => {
($bytes_expr + 7) - (($bytes_expr + 7) % 8)
};
}
/// A helper to check if an Option<Type> value is of a particular Type.
pub trait TypeOption {
fn is(&self, pred: fn(&Type, &Context) -> bool, context: &Context) -> bool;
}
impl TypeOption for Option<Type> {
fn is(&self, pred: fn(&Type, &Context) -> bool, context: &Context) -> bool {
self.filter(|ty| pred(ty, context)).is_some()
}
}
/// Provides information about a size of a type, raw and aligned to word boundaries.
#[derive(Clone, Debug)]
pub struct TypeSize {
size_in_bytes: u64,
}
impl TypeSize {
pub fn new(size_in_bytes: u64) -> Self {
Self { size_in_bytes }
}
/// Returns the actual (unaligned) size of the type in bytes.
pub fn in_bytes(&self) -> u64 {
self.size_in_bytes
}
/// Returns the size of the type in bytes, aligned to word boundary.
pub fn in_bytes_aligned(&self) -> u64 {
(self.size_in_bytes + 7) - ((self.size_in_bytes + 7) % 8)
}
/// Returns the size of the type in words (aligned to word boundary).
pub fn in_words(&self) -> u64 {
self.size_in_bytes.div_ceil(8)
}
}
/// Provides information about padding expected when laying values in memory.
/// Padding depends on the type of the value, but also on the embedding of
/// the value in aggregates. E.g., in an array of `u8`, each `u8` is "padded"
/// to its size of one byte while as a struct field, it will be right padded
/// to 8 bytes.
#[derive(Clone, Debug, serde::Serialize)]
pub enum Padding {
Left { target_size: usize },
Right { target_size: usize },
}
impl Padding {
/// Returns the default [Padding] for `u8`.
pub fn default_for_u8(_value: u8) -> Self {
// Dummy _value is used only to ensure correct usage at the call site.
Self::Right { target_size: 1 }
}
/// Returns the default [Padding] for `u64`.
pub fn default_for_u64(_value: u64) -> Self {
// Dummy _value is used only to ensure correct usage at the call site.
Self::Right { target_size: 8 }
}
/// Returns the default [Padding] for a byte array.
pub fn default_for_byte_array(value: &[u8]) -> Self {
Self::Right {
target_size: value.len(),
}
}
/// Returns the default [Padding] for an aggregate.
/// `aggregate_size` is the overall size of the aggregate in bytes.
pub fn default_for_aggregate(aggregate_size: usize) -> Self {
Self::Right {
target_size: aggregate_size,
}
}
/// The target size in bytes.
pub fn target_size(&self) -> usize {
use Padding::*;
match self {
Left { target_size } | Right { target_size } => *target_size,
}
}
}
#[cfg(test)]
mod tests {
pub use super::*;
/// Unit tests in this module document and assert decisions on memory layout.
mod memory_layout {
use super::*;
use crate::{Backtrace, Context};
use once_cell::sync::Lazy;
use sway_features::ExperimentalFeatures;
use sway_types::SourceEngine;
#[test]
/// Bool, when not embedded in aggregates, has a size of 1 byte.
fn boolean() {
let context = create_context();
let s_bool = Type::get_bool(&context).size(&context);
assert_eq!(s_bool.in_bytes(), 1);
}
#[test]
/// Unit, when not embedded in aggregates, has a size of 0 bytes.
fn unit() {
let context = create_context();
let s_unit = Type::get_unit(&context).size(&context);
assert_eq!(s_unit.in_bytes(), 0);
}
#[test]
/// `u8`, when not embedded in aggregates, has a size of 1 byte.
fn unsigned_u8() {
let context = create_context();
let s_u8 = Type::get_uint8(&context).size(&context);
assert_eq!(s_u8.in_bytes(), 1);
}
#[test]
/// `u16`, `u32`, and `u64,`, when not embedded in aggregates, have a size of 8 bytes/1 word.
fn unsigned_u16_u32_u64() {
let context = create_context();
let s_u16 = Type::get_uint16(&context).size(&context);
let s_u32 = Type::get_uint32(&context).size(&context);
let s_u64 = Type::get_uint64(&context).size(&context);
assert_eq!(s_u16.in_bytes(), 8);
assert_eq!(s_u16.in_bytes(), s_u16.in_bytes_aligned());
assert_eq!(s_u32.in_bytes(), 8);
assert_eq!(s_u32.in_bytes(), s_u32.in_bytes_aligned());
assert_eq!(s_u64.in_bytes(), 8);
assert_eq!(s_u64.in_bytes(), s_u64.in_bytes_aligned());
}
#[test]
/// `u256`, when not embedded in aggregates, has a size of 32 bytes.
fn unsigned_u256() {
let context = create_context();
let s_u256 = Type::get_uint256(&context).size(&context);
assert_eq!(s_u256.in_bytes(), 32);
assert_eq!(s_u256.in_bytes(), s_u256.in_bytes_aligned());
}
#[test]
/// Pointer to any type, when not embedded in aggregates, has a size of 8 bytes/1 word.
fn pointer() {
let mut context = create_context();
for ty in all_sample_types(&mut context) {
let s_ptr = Type::new_typed_pointer(&mut context, ty).size(&context);
assert_eq!(s_ptr.in_bytes(), 8);
assert_eq!(s_ptr.in_bytes(), s_ptr.in_bytes_aligned());
}
assert_eq!(Type::get_ptr(&context).size(&context).in_bytes(), 8);
assert_eq!(
Type::get_ptr(&context).size(&context).in_bytes(),
Type::get_ptr(&context).size(&context).in_bytes_aligned()
);
}
#[test]
/// Slice, when not embedded in aggregates, has a size of 16 bytes/2 words.
/// The first word is the pointer to the actual content, and the second the
/// length of the slice.
fn slice() {
let context = create_context();
let s_slice = Type::get_slice(&context).size(&context);
assert_eq!(s_slice.in_bytes(), 16);
assert_eq!(s_slice.in_bytes(), s_slice.in_bytes_aligned());
}
#[test]
/// `B256`, when not embedded in aggregates, has a size of 32 bytes.
fn b256() {
let context = create_context();
let s_b256 = Type::get_b256(&context).size(&context);
assert_eq!(s_b256.in_bytes(), 32);
assert_eq!(s_b256.in_bytes(), s_b256.in_bytes_aligned());
}
#[test]
/// String slice, when not embedded in aggregates, has a size of 16 bytes/2 words.
/// The first word is the pointer to the actual content, and the second the
/// length of the slice.
fn string_slice() {
let mut context = create_context();
let s_slice = Type::get_or_create_unique_type(&mut context, TypeContent::StringSlice)
.size(&context);
assert_eq!(s_slice.in_bytes(), 16);
assert_eq!(s_slice.in_bytes(), s_slice.in_bytes_aligned());
}
#[test]
/// String array, when not embedded in aggregates, has a size in bytes of its length, aligned to the word boundary.
/// Note that this differs from other arrays, which are packed but not, in addition, aligned to the word boundary.
/// The reason we have the alignment/padding in case of string arrays, is because of the current ABI encoding.
/// The output receipt returned by a contract call can be a string array, and the way the output is encoded
/// (at least for small strings) is by literally putting the ASCII bytes in the return value register.
/// For string arrays smaller than 8 bytes this poses a problem, because we have to fill the register with something
/// or start reading memory that isn't ours. And the workaround was to simply pad all string arrays with zeroes so
/// they're all at least 8 bytes long.
/// Thus, changing this behavior would be a breaking change in ABI compatibility.
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | true |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-ir/src/verify.rs | sway-ir/src/verify.rs | //! Code to validate the IR in a [`Context`].
//!
//! During creation, deserialization and optimization the IR should be verified to be in a
//! consistent valid state, using the functions in this module.
use itertools::Itertools;
use crate::{
context::Context,
error::IrError,
function::Function,
instruction::{FuelVmInstruction, InstOp, Predicate},
irtype::Type,
metadata::{MetadataIndex, Metadatum},
printer,
value::{Value, ValueDatum},
variable::LocalVar,
AnalysisResult, AnalysisResultT, AnalysisResults, BinaryOpKind, Block, BlockArgument,
BranchToWithArgs, Doc, GlobalVar, LogEventData, Module, Pass, PassMutability, ScopedPass,
StorageKey, TypeOption, UnaryOpKind,
};
pub struct ModuleVerifierResult;
impl AnalysisResultT for ModuleVerifierResult {}
/// Verify module
pub fn module_verifier(
context: &Context,
_analyses: &AnalysisResults,
module: Module,
) -> Result<AnalysisResult, IrError> {
context.verify_module(module)?;
Ok(Box::new(ModuleVerifierResult))
}
pub const MODULE_VERIFIER_NAME: &str = "module-verifier";
pub fn create_module_verifier_pass() -> Pass {
Pass {
name: MODULE_VERIFIER_NAME,
descr: "Verify module",
deps: vec![],
runner: ScopedPass::ModulePass(PassMutability::Analysis(module_verifier)),
}
}
impl Context<'_> {
/// Verify the contents of this [`Context`] is valid.
pub fn verify(&self) -> Result<(), IrError> {
for (module, _) in &self.modules {
let module = Module(module);
self.verify_module(module)?;
}
Ok(())
}
fn verify_module(&self, module: Module) -> Result<(), IrError> {
for function in module.function_iter(self) {
self.verify_function(module, function)?;
}
// Check that globals have initializers if they are not mutable.
for global in &self.modules[module.0].global_variables {
if !global.1.is_mutable(self) && global.1.get_initializer(self).is_none() {
let global_name = module.lookup_global_variable_name(self, global.1);
return Err(IrError::VerifyGlobalMissingInitializer(
global_name.unwrap_or_else(|| "<unknown>".to_owned()),
));
}
}
Ok(())
}
fn verify_function(&self, cur_module: Module, function: Function) -> Result<(), IrError> {
if function.get_module(self) != cur_module {
return Err(IrError::InconsistentParent(
function.get_name(self).into(),
format!("Module_Index_{:?}", cur_module.0),
format!("Module_Index_{:?}", function.get_module(self).0),
));
}
let entry_block = function.get_entry_block(self);
if entry_block.num_predecessors(self) != 0 {
return Err(IrError::VerifyEntryBlockHasPredecessors(
function.get_name(self).to_string(),
entry_block
.pred_iter(self)
.map(|block| block.get_label(self))
.collect(),
));
}
// Ensure that the entry block arguments are same as function arguments.
if function.num_args(self) != entry_block.num_args(self) {
return Err(IrError::VerifyBlockArgMalformed);
}
for ((_, func_arg), block_arg) in function.args_iter(self).zip(entry_block.arg_iter(self)) {
if func_arg != block_arg {
return Err(IrError::VerifyBlockArgMalformed);
}
}
// Check that locals have initializers if they aren't mutable.
// TODO: This check is disabled because we incorrect create
// immutable locals without initializers at many places.
// for local in &self.functions[function.0].local_storage {
// if !local.1.is_mutable(self) && local.1.get_initializer(self).is_none() {
// return Err(IrError::VerifyLocalMissingInitializer(
// local.0.to_string(),
// function.get_name(self).to_string(),
// ));
// }
// }
for block in function.block_iter(self) {
self.verify_block(cur_module, function, block)?;
}
self.verify_metadata(function.get_metadata(self))?;
Ok(())
}
fn verify_block(
&self,
cur_module: Module,
cur_function: Function,
cur_block: Block,
) -> Result<(), IrError> {
if cur_block.get_function(self) != cur_function {
return Err(IrError::InconsistentParent(
cur_block.get_label(self),
cur_function.get_name(self).into(),
cur_block.get_function(self).get_name(self).into(),
));
}
if cur_block.num_instructions(self) <= 1 && cur_block.num_predecessors(self) == 0 {
// Empty unreferenced blocks are a harmless artefact.
return Ok(());
}
for (arg_idx, arg_val) in cur_block.arg_iter(self).enumerate() {
match self.values[arg_val.0].value {
ValueDatum::Argument(BlockArgument { idx, .. }) if idx == arg_idx => (),
_ => return Err(IrError::VerifyBlockArgMalformed),
}
}
let r = InstructionVerifier {
context: self,
cur_module,
cur_function,
cur_block,
}
.verify_instructions();
// Help to understand the verification failure
// If the error knows the problematic value, prints everything with the error highlighted,
// if not, print only the block to help pinpoint the issue
if let Err(error) = &r {
println!(
"Verification failed at {}::{}",
cur_function.get_name(self),
cur_block.get_label(self)
);
let block = if let Some(problematic_value) = error.get_problematic_value() {
printer::context_print(self, &|current_value: &Value, doc: Doc| {
if *current_value == *problematic_value {
doc.append(Doc::text_line(format!("\x1b[0;31m^ {error}\x1b[0m")))
} else {
doc
}
})
} else {
printer::block_print(self, cur_function, cur_block, &|_, doc| doc)
};
println!("{block}");
}
r?;
let (last_is_term, num_terms) =
cur_block
.instruction_iter(self)
.fold((false, 0), |(_, n), ins| {
if ins.is_terminator(self) {
(true, n + 1)
} else {
(false, n)
}
});
if !last_is_term {
Err(IrError::MissingTerminator(
cur_block.get_label(self).clone(),
))
} else if num_terms != 1 {
Err(IrError::MisplacedTerminator(
cur_block.get_label(self).clone(),
))
} else {
Ok(())
}
}
fn verify_metadata(&self, md_idx: Option<MetadataIndex>) -> Result<(), IrError> {
// For now we check only that struct tags are valid identifiers.
if let Some(md_idx) = md_idx {
match &self.metadata[md_idx.0] {
Metadatum::List(md_idcs) => {
for md_idx in md_idcs {
self.verify_metadata(Some(*md_idx))?;
}
}
Metadatum::Struct(tag, ..) => {
// We could import Regex to match it, but it's a simple identifier style pattern:
// alpha start char, alphanumeric for the rest, or underscore anywhere.
if tag.is_empty() {
return Err(IrError::InvalidMetadatum(
"Struct has empty tag.".to_owned(),
));
}
let mut chs = tag.chars();
let ch0 = chs.next().unwrap();
if !(ch0.is_ascii_alphabetic() || ch0 == '_')
|| chs.any(|ch| !(ch.is_ascii_alphanumeric() || ch == '_'))
{
return Err(IrError::InvalidMetadatum(format!(
"Invalid struct tag: '{tag}'."
)));
}
}
_otherwise => (),
}
}
Ok(())
}
}
struct InstructionVerifier<'a, 'eng> {
context: &'a Context<'eng>,
cur_module: Module,
cur_function: Function,
cur_block: Block,
}
impl InstructionVerifier<'_, '_> {
fn verify_instructions(&self) -> Result<(), IrError> {
for ins in self.cur_block.instruction_iter(self.context) {
let value_content = &self.context.values[ins.0];
let ValueDatum::Instruction(instruction) = &value_content.value else {
unreachable!("The value must be an instruction, because it is retrieved via block instruction iterator.")
};
if instruction.parent != self.cur_block {
return Err(IrError::InconsistentParent(
format!("Instr_{:?}", ins.0),
self.cur_block.get_label(self.context),
instruction.parent.get_label(self.context),
));
}
match &instruction.op {
InstOp::AsmBlock(..) => (),
InstOp::BitCast(value, ty) => self.verify_bitcast(value, ty)?,
InstOp::UnaryOp { op, arg } => self.verify_unary_op(op, arg)?,
InstOp::BinaryOp { op, arg1, arg2 } => self.verify_binary_op(op, arg1, arg2)?,
InstOp::Branch(block) => self.verify_br(block)?,
InstOp::Call(func, args) => self.verify_call(func, args)?,
InstOp::CastPtr(val, ty) => self.verify_cast_ptr(val, ty)?,
InstOp::Cmp(pred, lhs_value, rhs_value) => {
self.verify_cmp(pred, lhs_value, rhs_value)?
}
InstOp::ConditionalBranch {
cond_value,
true_block,
false_block,
} => self.verify_cbr(cond_value, true_block, false_block)?,
InstOp::ContractCall {
params,
coins,
asset_id,
gas,
..
} => self.verify_contract_call(params, coins, asset_id, gas)?,
// XXX move the fuelvm verification into a module
InstOp::FuelVm(fuel_vm_instr) => match fuel_vm_instr {
FuelVmInstruction::Gtf { index, tx_field_id } => {
self.verify_gtf(index, tx_field_id)?
}
FuelVmInstruction::Log {
log_val,
log_ty,
log_id,
log_data,
} => self.verify_log(log_val, log_ty, log_id, log_data)?,
FuelVmInstruction::ReadRegister(_) => (),
FuelVmInstruction::JmpMem => (),
FuelVmInstruction::Revert(val) => self.verify_revert(val)?,
FuelVmInstruction::Smo {
recipient,
message,
message_size,
coins,
} => self.verify_smo(recipient, message, message_size, coins)?,
FuelVmInstruction::StateClear {
key,
number_of_slots,
} => self.verify_state_clear(key, number_of_slots)?,
FuelVmInstruction::StateLoadWord(key) => self.verify_state_load_word(key)?,
FuelVmInstruction::StateLoadQuadWord {
load_val: dst_val,
key,
number_of_slots,
}
| FuelVmInstruction::StateStoreQuadWord {
stored_val: dst_val,
key,
number_of_slots,
} => self.verify_state_access_quad(dst_val, key, number_of_slots)?,
FuelVmInstruction::StateStoreWord {
stored_val: dst_val,
key,
} => self.verify_state_store_word(dst_val, key)?,
FuelVmInstruction::WideUnaryOp { op, result, arg } => {
self.verify_wide_unary_op(op, result, arg)?
}
FuelVmInstruction::WideBinaryOp {
op,
result,
arg1,
arg2,
} => self.verify_wide_binary_op(op, result, arg1, arg2)?,
FuelVmInstruction::WideModularOp {
op,
result,
arg1,
arg2,
arg3,
} => self.verify_wide_modular_op(op, result, arg1, arg2, arg3)?,
FuelVmInstruction::WideCmpOp { op, arg1, arg2 } => {
self.verify_wide_cmp(op, arg1, arg2)?
}
FuelVmInstruction::Retd { .. } => (),
},
InstOp::GetElemPtr {
base,
elem_ptr_ty,
indices,
} => self.verify_get_elem_ptr(&ins, base, elem_ptr_ty, indices)?,
InstOp::GetLocal(local_var) => self.verify_get_local(local_var)?,
InstOp::GetGlobal(global_var) => self.verify_get_global(global_var)?,
InstOp::GetConfig(_, name) => self.verify_get_config(self.cur_module, name)?,
InstOp::GetStorageKey(storage_key) => self.verify_get_storage_key(storage_key)?,
InstOp::IntToPtr(value, ty) => self.verify_int_to_ptr(value, ty)?,
InstOp::Load(ptr) => self.verify_load(ptr)?,
InstOp::Alloc { ty, count } => self.verify_alloc(ty, count)?,
InstOp::MemCopyBytes {
dst_val_ptr,
src_val_ptr,
byte_len,
} => self.verify_mem_copy_bytes(dst_val_ptr, src_val_ptr, byte_len)?,
InstOp::MemCopyVal {
dst_val_ptr,
src_val_ptr,
} => self.verify_mem_copy_val(dst_val_ptr, src_val_ptr)?,
InstOp::MemClearVal { dst_val_ptr } => self.verify_mem_clear_val(dst_val_ptr)?,
InstOp::Nop => (),
InstOp::PtrToInt(val, ty) => self.verify_ptr_to_int(val, ty)?,
InstOp::Ret(val, ty) => self.verify_ret(val, ty)?,
InstOp::Store {
dst_val_ptr,
stored_val,
} => self.verify_store(&ins, dst_val_ptr, stored_val)?,
};
// Verify the instruction metadata too.
self.context.verify_metadata(value_content.metadata)?;
}
Ok(())
}
fn verify_bitcast(&self, value: &Value, ty: &Type) -> Result<(), IrError> {
// The bitsize of bools and unit is 1 which obviously won't match a typical uint. LLVM
// would use `trunc` or `zext` to make types match sizes before casting. Until we have
// similar we'll just make sure the sizes are <= 64 bits.
let val_ty = value
.get_type(self.context)
.ok_or(IrError::VerifyBitcastUnknownSourceType)?;
if self.type_bit_size(&val_ty).is_some_and(|sz| sz > 64)
|| self.type_bit_size(ty).is_some_and(|sz| sz > 64)
{
Err(IrError::VerifyBitcastBetweenInvalidTypes(
val_ty.as_string(self.context),
ty.as_string(self.context),
))
} else {
Ok(())
}
}
fn verify_unary_op(&self, op: &UnaryOpKind, arg: &Value) -> Result<(), IrError> {
let arg_ty = arg
.get_type(self.context)
.ok_or(IrError::VerifyUnaryOpIncorrectArgType)?;
match op {
UnaryOpKind::Not => {
if !arg_ty.is_uint(self.context) && !arg_ty.is_b256(self.context) {
return Err(IrError::VerifyUnaryOpIncorrectArgType);
}
}
}
Ok(())
}
fn verify_wide_cmp(&self, _: &Predicate, arg1: &Value, arg2: &Value) -> Result<(), IrError> {
let arg1_ty = arg1
.get_type(self.context)
.ok_or(IrError::VerifyBinaryOpIncorrectArgType)?;
let arg2_ty = arg2
.get_type(self.context)
.ok_or(IrError::VerifyBinaryOpIncorrectArgType)?;
if arg1_ty.is_ptr(self.context) && arg2_ty.is_ptr(self.context) {
Ok(())
} else {
Err(IrError::VerifyBinaryOpIncorrectArgType)
}
}
fn verify_wide_modular_op(
&self,
_op: &BinaryOpKind,
result: &Value,
arg1: &Value,
arg2: &Value,
arg3: &Value,
) -> Result<(), IrError> {
let result_ty = result
.get_type(self.context)
.ok_or(IrError::VerifyBinaryOpIncorrectArgType)?;
let arg1_ty = arg1
.get_type(self.context)
.ok_or(IrError::VerifyBinaryOpIncorrectArgType)?;
let arg2_ty = arg2
.get_type(self.context)
.ok_or(IrError::VerifyBinaryOpIncorrectArgType)?;
let arg3_ty = arg3
.get_type(self.context)
.ok_or(IrError::VerifyBinaryOpIncorrectArgType)?;
if !arg1_ty.is_ptr(self.context)
|| !arg2_ty.is_ptr(self.context)
|| !arg3_ty.is_ptr(self.context)
|| !result_ty.is_ptr(self.context)
{
return Err(IrError::VerifyBinaryOpIncorrectArgType);
}
Ok(())
}
fn verify_wide_binary_op(
&self,
op: &BinaryOpKind,
result: &Value,
arg1: &Value,
arg2: &Value,
) -> Result<(), IrError> {
let result_ty = result
.get_type(self.context)
.ok_or(IrError::VerifyBinaryOpIncorrectArgType)?;
let arg1_ty = arg1
.get_type(self.context)
.ok_or(IrError::VerifyBinaryOpIncorrectArgType)?;
let arg2_ty = arg2
.get_type(self.context)
.ok_or(IrError::VerifyBinaryOpIncorrectArgType)?;
match op {
// Shifts rhs are 64 bits
BinaryOpKind::Lsh | BinaryOpKind::Rsh => {
if !arg1_ty.is_ptr(self.context)
|| !arg2_ty.is_uint64(self.context)
|| !result_ty.is_ptr(self.context)
{
return Err(IrError::VerifyBinaryOpIncorrectArgType);
}
}
BinaryOpKind::Add
| BinaryOpKind::Sub
| BinaryOpKind::Mul
| BinaryOpKind::Div
| BinaryOpKind::And
| BinaryOpKind::Or
| BinaryOpKind::Xor
| BinaryOpKind::Mod => {
if !arg1_ty.is_ptr(self.context)
|| !arg2_ty.is_ptr(self.context)
|| !result_ty.is_ptr(self.context)
{
return Err(IrError::VerifyBinaryOpIncorrectArgType);
}
}
}
Ok(())
}
fn verify_wide_unary_op(
&self,
_op: &UnaryOpKind,
result: &Value,
arg: &Value,
) -> Result<(), IrError> {
let result_ty = result
.get_type(self.context)
.ok_or(IrError::VerifyBinaryOpIncorrectArgType)?;
let arg_ty = arg
.get_type(self.context)
.ok_or(IrError::VerifyBinaryOpIncorrectArgType)?;
if !arg_ty.is_ptr(self.context) || !result_ty.is_ptr(self.context) {
return Err(IrError::VerifyBinaryOpIncorrectArgType);
}
Ok(())
}
fn verify_binary_op(
&self,
op: &BinaryOpKind,
arg1: &Value,
arg2: &Value,
) -> Result<(), IrError> {
let arg1_ty = arg1
.get_type(self.context)
.ok_or(IrError::VerifyBinaryOpIncorrectArgType)?;
let arg2_ty = arg2
.get_type(self.context)
.ok_or(IrError::VerifyBinaryOpIncorrectArgType)?;
match op {
// Shifts can have the rhs with different type
BinaryOpKind::Lsh | BinaryOpKind::Rsh => {
let is_lhs_ok = arg1_ty.is_uint(self.context) || arg1_ty.is_b256(self.context);
if !is_lhs_ok || !arg2_ty.is_uint(self.context) {
return Err(IrError::VerifyBinaryOpIncorrectArgType);
}
}
BinaryOpKind::Add | BinaryOpKind::Sub => {
if !(arg1_ty.eq(self.context, &arg2_ty) && arg1_ty.is_uint(self.context)
|| arg1_ty.is_ptr(self.context) && arg2_ty.is_uint64(self.context))
{
return Err(IrError::VerifyBinaryOpIncorrectArgType);
}
}
BinaryOpKind::Mul | BinaryOpKind::Div | BinaryOpKind::Mod => {
if !arg1_ty.eq(self.context, &arg2_ty) || !arg1_ty.is_uint(self.context) {
return Err(IrError::VerifyBinaryOpIncorrectArgType);
}
}
BinaryOpKind::And | BinaryOpKind::Or | BinaryOpKind::Xor => {
if !arg1_ty.eq(self.context, &arg2_ty)
|| !(arg1_ty.is_uint(self.context) || arg1_ty.is_b256(self.context))
{
return Err(IrError::VerifyBinaryOpIncorrectArgType);
}
}
}
Ok(())
}
fn verify_br(&self, dest_block: &BranchToWithArgs) -> Result<(), IrError> {
if !self
.cur_function
.block_iter(self.context)
.contains(&dest_block.block)
{
Err(IrError::VerifyBranchToMissingBlock(
self.context.blocks[dest_block.block.0].label.clone(),
))
} else {
self.verify_dest_args(dest_block)
}
}
fn verify_call(&self, callee: &Function, args: &[Value]) -> Result<(), IrError> {
let callee_content = &self.context.functions[callee.0];
if !self.cur_module.function_iter(self.context).contains(callee) {
return Err(IrError::VerifyCallToMissingFunction(
callee_content.name.clone(),
));
}
let callee_arg_types = callee_content
.arguments
.iter()
.map(|(_, arg_val)| {
if let ValueDatum::Argument(BlockArgument { ty, .. }) =
&self.context.values[arg_val.0].value
{
Ok(*ty)
} else {
Err(IrError::VerifyArgumentValueIsNotArgument(
callee_content.name.clone(),
))
}
})
.collect::<Result<Vec<Type>, IrError>>()?;
for (opt_caller_arg_type, callee_arg_type) in args
.iter()
.map(|val| val.get_type(self.context))
.zip(callee_arg_types.iter())
{
if opt_caller_arg_type.is_none() {
return Err(IrError::VerifyUntypedValuePassedToFunction);
}
let caller_arg_type = opt_caller_arg_type.as_ref().unwrap();
if !caller_arg_type.eq(self.context, callee_arg_type) {
return Err(IrError::VerifyCallArgTypeMismatch(
callee_content.name.clone(),
caller_arg_type.as_string(self.context),
callee_arg_type.as_string(self.context),
));
}
}
Ok(())
}
fn verify_cast_ptr(&self, val: &Value, ty: &Type) -> Result<(), IrError> {
if !(val
.get_type(self.context)
.is_some_and(|ty| ty.is_ptr(self.context)))
{
let ty = val
.get_type(self.context)
.map(|ty| ty.as_string(self.context))
.unwrap_or("Unknown".into());
return Err(IrError::VerifyPtrCastFromNonPointer(ty));
}
if !ty.is_ptr(self.context) {
Err(IrError::VerifyPtrCastToNonPointer(
ty.as_string(self.context),
))
} else {
Ok(())
}
}
fn verify_dest_args(&self, dest: &BranchToWithArgs) -> Result<(), IrError> {
if dest.block.num_args(self.context) != dest.args.len() {
return Err(IrError::VerifyBranchParamsMismatch);
}
for (arg_idx, dest_param) in dest.block.arg_iter(self.context).enumerate() {
match dest.args.get(arg_idx) {
Some(actual)
if dest_param
.get_type(self.context)
.unwrap()
.eq(self.context, &actual.get_type(self.context).unwrap()) => {}
_ =>
// TODO: https://github.com/FuelLabs/sway/pull/2880
{
// return Err(IrError::VerifyBranchParamsMismatch)
}
}
}
Ok(())
}
fn verify_cbr(
&self,
cond_val: &Value,
true_block: &BranchToWithArgs,
false_block: &BranchToWithArgs,
) -> Result<(), IrError> {
if !cond_val
.get_type(self.context)
.is(Type::is_bool, self.context)
{
Err(IrError::VerifyConditionExprNotABool)
} else if !self
.cur_function
.block_iter(self.context)
.contains(&true_block.block)
{
Err(IrError::VerifyBranchToMissingBlock(
self.context.blocks[true_block.block.0].label.clone(),
))
} else if !self
.cur_function
.block_iter(self.context)
.contains(&false_block.block)
{
Err(IrError::VerifyBranchToMissingBlock(
self.context.blocks[false_block.block.0].label.clone(),
))
} else {
self.verify_dest_args(true_block)
.and_then(|()| self.verify_dest_args(false_block))
}
}
fn verify_cmp(
&self,
_pred: &Predicate,
lhs_value: &Value,
rhs_value: &Value,
) -> Result<(), IrError> {
// Comparisons must be between integers or equivalent pointers at this stage.
match (
lhs_value.get_type(self.context),
rhs_value.get_type(self.context),
) {
(Some(lhs_ty), Some(rhs_ty)) => {
if !lhs_ty.eq(self.context, &rhs_ty) {
Err(IrError::VerifyCmpTypeMismatch(
lhs_ty.as_string(self.context),
rhs_ty.as_string(self.context),
))
} else if lhs_ty.is_bool(self.context)
|| lhs_ty.is_uint(self.context)
|| lhs_ty.is_ptr(self.context)
|| lhs_ty.is_b256(self.context)
{
Ok(())
} else {
Err(IrError::VerifyCmpBadTypes(
lhs_ty.as_string(self.context),
rhs_ty.as_string(self.context),
))
}
}
_otherwise => Err(IrError::VerifyCmpUnknownTypes),
}
}
fn verify_contract_call(
&self,
params: &Value,
coins: &Value,
asset_id: &Value,
gas: &Value,
) -> Result<(), IrError> {
if !self.context.experimental.new_encoding {
// - The params must be a struct with the B256 address, u64 selector and u64 address to
// user args.
// - The coins and gas must be u64s.
// - The asset_id must be a B256
let fields = params
.get_type(self.context)
.and_then(|ty| ty.get_pointee_type(self.context))
.map_or_else(std::vec::Vec::new, |ty| ty.get_field_types(self.context));
if fields.len() != 3
|| !fields[0].is_b256(self.context)
|| !fields[1].is_uint64(self.context)
|| !fields[2].is_uint64(self.context)
{
Err(IrError::VerifyContractCallBadTypes("params".to_owned()))
} else {
Ok(())
}
.and_then(|_| {
if coins
.get_type(self.context)
.is(Type::is_uint64, self.context)
{
Ok(())
} else {
Err(IrError::VerifyContractCallBadTypes("coins".to_owned()))
}
})
.and_then(|_| {
if asset_id
.get_type(self.context)
.and_then(|ty| ty.get_pointee_type(self.context))
.is(Type::is_b256, self.context)
{
Ok(())
} else {
Err(IrError::VerifyContractCallBadTypes("asset_id".to_owned()))
}
})
.and_then(|_| {
if gas.get_type(self.context).is(Type::is_uint64, self.context) {
Ok(())
} else {
Err(IrError::VerifyContractCallBadTypes("gas".to_owned()))
}
})
} else {
Ok(())
}
}
fn verify_get_elem_ptr(
&self,
ins: &Value,
base: &Value,
elem_ptr_ty: &Type,
indices: &[Value],
) -> Result<(), IrError> {
let base_ty =
self.get_ptr_type(base, |s| IrError::VerifyGepFromNonPointer(s, Some(*ins)))?;
if !base_ty.is_aggregate(self.context) {
return Err(IrError::VerifyGepOnNonAggregate);
}
let Some(elem_inner_ty) = elem_ptr_ty.get_pointee_type(self.context) else {
return Err(IrError::VerifyGepElementTypeNonPointer);
};
if indices.is_empty() {
return Err(IrError::VerifyGepInconsistentTypes(
"Empty Indices".into(),
Some(*base),
));
}
let index_ty = base_ty.get_value_indexed_type(self.context, indices);
if self.opt_ty_not_eq(&Some(elem_inner_ty), &index_ty) {
return Err(IrError::VerifyGepInconsistentTypes(
format!(
"Element type \"{}\" versus index type {:?}",
elem_inner_ty.as_string(self.context),
index_ty.map(|x| x.as_string(self.context))
),
Some(*ins),
));
}
Ok(())
}
fn verify_get_local(&self, local_var: &LocalVar) -> Result<(), IrError> {
if !self.context.functions[self.cur_function.0]
.local_storage
.values()
.any(|var| var == local_var)
{
Err(IrError::VerifyGetNonExistentLocalVarPointer)
} else {
Ok(())
}
}
fn verify_get_global(&self, global_var: &GlobalVar) -> Result<(), IrError> {
if !self.context.modules[self.cur_module.0]
.global_variables
.values()
.any(|var| var == global_var)
{
Err(IrError::VerifyGetNonExistentGlobalVarPointer)
} else {
Ok(())
}
}
fn verify_get_config(&self, module: Module, name: &str) -> Result<(), IrError> {
if !self.context.modules[module.0].configs.contains_key(name) {
Err(IrError::VerifyGetNonExistentConfigPointer)
} else {
Ok(())
}
}
fn verify_get_storage_key(&self, storage_key: &StorageKey) -> Result<(), IrError> {
if !self.context.modules[self.cur_module.0]
.storage_keys
.values()
.any(|key| key == storage_key)
{
Err(IrError::VerifyGetNonExistentStorageKeyPointer)
} else {
Ok(())
}
}
fn verify_gtf(&self, index: &Value, _tx_field_id: &u64) -> Result<(), IrError> {
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | true |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-ir/src/constant.rs | sway-ir/src/constant.rs | //! [`Constant`] is a typed constant value.
use std::hash::{Hash, Hasher};
use crate::{context::Context, irtype::Type, pretty::DebugWithContext, value::Value, Padding};
use rustc_hash::FxHasher;
use sway_types::u256::U256;
/// A wrapper around an [ECS](https://github.com/orlp/slotmap) handle into the
/// [`Context`].
#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash, DebugWithContext)]
pub struct Constant(#[in_context(values)] pub slotmap::DefaultKey);
impl Constant {
/// Get or create a unique constant with given contents.
pub fn unique(context: &mut Context, constant: ConstantContent) -> Constant {
let mut hasher = FxHasher::default();
constant.hash(&mut hasher);
let hash = hasher.finish();
// Insert a new entry if it doesn't exist.
context.constants_map.entry(hash).or_default();
let constants = context.constants_map.get(&hash).unwrap();
// If the constant already exists, return it.
for c in constants.iter() {
if context.constants.get(c.0).unwrap().eq(context, &constant) {
return *c;
}
}
let constant = Constant(context.constants.insert(constant));
// Re-borrow the constants map (mutably this time) to insert the new constant.
let constants = context.constants_map.get_mut(&hash).unwrap();
constants.push(constant);
constant
}
/// Get the contents of a unique constant
pub fn get_content<'a>(&self, context: &'a Context) -> &'a ConstantContent {
context
.constants
.get(self.0)
.expect("Constants are global immutable data, they must live through the context")
}
}
/// A [`Type`] and constant value, including [`ConstantValue::Undef`] for uninitialized constants.
#[derive(Debug, Clone, DebugWithContext, Hash)]
pub struct ConstantContent {
pub ty: Type,
pub value: ConstantValue,
}
pub type B256 = U256;
/// A constant representation of each of the supported [`Type`]s.
#[derive(Debug, Clone, DebugWithContext, Hash)]
pub enum ConstantValue {
Undef,
Unit,
Bool(bool),
Uint(u64),
U256(U256),
B256(B256),
String(Vec<u8>),
Array(Vec<ConstantContent>),
Slice(Vec<ConstantContent>),
Struct(Vec<ConstantContent>),
Reference(Box<ConstantContent>),
RawUntypedSlice(Vec<u8>),
}
/// A [Constant] with its required [Padding].
/// If the [Padding] is `None` the default [Padding] for the
/// [Constant] type is expected.
type ConstantWithPadding<'a> = (&'a ConstantContent, Option<Padding>);
impl ConstantContent {
pub fn new_unit(context: &Context) -> Self {
ConstantContent {
ty: Type::get_unit(context),
value: ConstantValue::Unit,
}
}
pub fn new_bool(context: &Context, b: bool) -> Self {
ConstantContent {
ty: Type::get_bool(context),
value: ConstantValue::Bool(b),
}
}
/// For numbers bigger than u64 see `new_uint256`.
pub fn new_uint(context: &mut Context, nbits: u16, n: u64) -> Self {
ConstantContent {
ty: Type::new_uint(context, nbits),
value: match nbits {
256 => ConstantValue::U256(n.into()),
_ => ConstantValue::Uint(n),
},
}
}
pub fn new_uint256(context: &mut Context, n: U256) -> Self {
ConstantContent {
ty: Type::new_uint(context, 256),
value: ConstantValue::U256(n),
}
}
pub fn new_b256(context: &Context, bytes: [u8; 32]) -> Self {
ConstantContent {
ty: Type::get_b256(context),
value: ConstantValue::B256(B256::from_be_bytes(&bytes)),
}
}
pub fn new_string(context: &mut Context, string: Vec<u8>) -> Self {
ConstantContent {
ty: Type::new_string_array(context, string.len() as u64),
value: ConstantValue::String(string),
}
}
pub fn new_untyped_slice(context: &mut Context, bytes: Vec<u8>) -> Self {
ConstantContent {
ty: Type::new_untyped_slice(context),
value: ConstantValue::RawUntypedSlice(bytes),
}
}
pub fn new_array(context: &mut Context, elm_ty: Type, elems: Vec<ConstantContent>) -> Self {
ConstantContent {
ty: Type::new_array(context, elm_ty, elems.len() as u64),
value: ConstantValue::Array(elems),
}
}
pub fn new_struct(
context: &mut Context,
field_tys: Vec<Type>,
fields: Vec<ConstantContent>,
) -> Self {
ConstantContent {
ty: Type::new_struct(context, field_tys),
value: ConstantValue::Struct(fields),
}
}
pub fn get_undef(ty: Type) -> Self {
ConstantContent {
ty,
value: ConstantValue::Undef,
}
}
pub fn get_unit(context: &mut Context) -> Value {
let new_const_contents = ConstantContent::new_unit(context);
let new_const = Constant::unique(context, new_const_contents);
Value::new_constant(context, new_const)
}
pub fn get_bool(context: &mut Context, value: bool) -> Value {
let new_const_contents = ConstantContent::new_bool(context, value);
let new_const = Constant::unique(context, new_const_contents);
Value::new_constant(context, new_const)
}
pub fn get_uint(context: &mut Context, nbits: u16, value: u64) -> Value {
let new_const_contents = ConstantContent::new_uint(context, nbits, value);
let new_const = Constant::unique(context, new_const_contents);
Value::new_constant(context, new_const)
}
pub fn get_uint256(context: &mut Context, value: U256) -> Value {
let new_const_contents = ConstantContent::new_uint256(context, value);
let new_const = Constant::unique(context, new_const_contents);
Value::new_constant(context, new_const)
}
pub fn get_b256(context: &mut Context, value: [u8; 32]) -> Value {
let new_const_contents = ConstantContent::new_b256(context, value);
let new_const = Constant::unique(context, new_const_contents);
Value::new_constant(context, new_const)
}
pub fn get_string(context: &mut Context, value: Vec<u8>) -> Value {
let new_const_contents = ConstantContent::new_string(context, value);
let new_const = Constant::unique(context, new_const_contents);
Value::new_constant(context, new_const)
}
pub fn get_untyped_slice(context: &mut Context, value: Vec<u8>) -> Value {
let new_const_contents = ConstantContent::new_untyped_slice(context, value);
let new_const = Constant::unique(context, new_const_contents);
Value::new_constant(context, new_const)
}
/// `value` must be created as an array constant first, using [`Constant::new_array()`].
pub fn get_array(context: &mut Context, value: ConstantContent) -> Value {
assert!(value.ty.is_array(context));
let new_const = Constant::unique(context, value);
Value::new_constant(context, new_const)
}
/// `value` must be created as a struct constant first, using [`Constant::new_struct()`].
pub fn get_struct(context: &mut Context, value: ConstantContent) -> Value {
assert!(value.ty.is_struct(context));
let new_const = Constant::unique(context, value);
Value::new_constant(context, new_const)
}
/// Returns the tag and the value of an enum constant if `self` is an enum constant,
/// otherwise `None`.
fn extract_enum_tag_and_value(
&self,
context: &Context,
) -> Option<(&ConstantContent, &ConstantContent)> {
if !self.ty.is_enum(context) {
return None;
}
let elems = match &self.value {
ConstantValue::Struct(elems) if elems.len() == 2 => elems,
_ => return None, // This should never be the case. If we have an enum, it is a struct with exactly two elements.
};
Some((&elems[0], &elems[1]))
}
/// Returns enum tag and value as [Constant]s, together with their [Padding]s,
/// if `self` is an enum [Constant], otherwise `None`.
pub fn enum_tag_and_value_with_paddings(
&self,
context: &Context,
) -> Option<(ConstantWithPadding, ConstantWithPadding)> {
if !self.ty.is_enum(context) {
return None;
}
let tag_and_value_with_paddings = self
.elements_of_aggregate_with_padding(context)
.expect("Enums are aggregates.");
debug_assert!(tag_and_value_with_paddings.len() == 2, "In case of enums, `elements_of_aggregate_with_padding` must return exactly two elements, the tag and the value.");
let tag = tag_and_value_with_paddings[0].clone();
let value = tag_and_value_with_paddings[1].clone();
Some((tag, value))
}
/// Returns elements of an array with the expected padding for each array element
/// if `self` is an array [Constant], otherwise `None`.
pub fn array_elements_with_padding(
&self,
context: &Context,
) -> Option<Vec<ConstantWithPadding>> {
if !self.ty.is_array(context) {
return None;
}
self.elements_of_aggregate_with_padding(context)
}
/// Returns fields of a struct with the expected padding for each field
/// if `self` is a struct [Constant], otherwise `None`.
pub fn struct_fields_with_padding(
&self,
context: &Context,
) -> Option<Vec<ConstantWithPadding>> {
if !self.ty.is_struct(context) {
return None;
}
self.elements_of_aggregate_with_padding(context)
}
/// Returns elements of an aggregate constant with the expected padding for each element
/// if `self` is an aggregate (struct, enum, or array), otherwise `None`.
/// If the returned [Padding] is `None` the default [Padding] for the type
/// is expected.
/// If the aggregate constant is an enum, the returned [Vec] has exactly two elements,
/// the first being the tag and the second the value of the enum variant.
fn elements_of_aggregate_with_padding(
&self,
context: &Context,
) -> Option<Vec<(&ConstantContent, Option<Padding>)>> {
// We need a special handling in case of enums.
if let Some((tag, value)) = self.extract_enum_tag_and_value(context) {
let tag_with_padding = (tag, None);
// Enum variants are left padded to the word boundary, and the size
// of each variant is the size of the union.
// We know we have an enum here, means exactly two fields in the struct
// second of which is the union.
let target_size = self.ty.get_field_types(context)[1]
.size(context)
.in_bytes_aligned() as usize;
let value_with_padding = (value, Some(Padding::Left { target_size }));
return Some(vec![tag_with_padding, value_with_padding]);
}
match &self.value {
// Individual array elements do not have additional padding.
ConstantValue::Array(elems) => Some(elems.iter().map(|el| (el, None)).collect()),
// Each struct field is right padded to the word boundary.
ConstantValue::Struct(elems) => Some(
elems
.iter()
.map(|el| {
let target_size = el.ty.size(context).in_bytes_aligned() as usize;
(el, Some(Padding::Right { target_size }))
})
.collect(),
),
_ => None,
}
}
/// Compare two Constant values. Can't impl PartialOrder because of context.
pub fn eq(&self, context: &Context, other: &Self) -> bool {
self.ty.eq(context, &other.ty)
&& match (&self.value, &other.value) {
// Two Undefs are *NOT* equal (PartialEq allows this).
(ConstantValue::Undef, _) | (_, ConstantValue::Undef) => false,
(ConstantValue::Unit, ConstantValue::Unit) => true,
(ConstantValue::Bool(l0), ConstantValue::Bool(r0)) => l0 == r0,
(ConstantValue::Uint(l0), ConstantValue::Uint(r0)) => l0 == r0,
(ConstantValue::U256(l0), ConstantValue::U256(r0)) => l0 == r0,
(ConstantValue::B256(l0), ConstantValue::B256(r0)) => l0 == r0,
(ConstantValue::String(l0), ConstantValue::String(r0)) => l0 == r0,
(ConstantValue::Array(l0), ConstantValue::Array(r0))
| (ConstantValue::Struct(l0), ConstantValue::Struct(r0)) => {
l0.iter().zip(r0.iter()).all(|(l0, r0)| l0.eq(context, r0))
}
_ => false,
}
}
pub fn as_uint(&self) -> Option<u64> {
match &self.value {
ConstantValue::Uint(v) => Some(*v),
_ => None,
}
}
pub fn as_bool(&self) -> Option<bool> {
match &self.value {
ConstantValue::Bool(v) => Some(*v),
_ => None,
}
}
pub fn as_u256(&self) -> Option<U256> {
match &self.value {
ConstantValue::U256(v) => Some(v.clone()),
_ => None,
}
}
pub fn as_b256(&self) -> Option<B256> {
match &self.value {
ConstantValue::B256(v) => Some(v.clone()),
_ => None,
}
}
pub fn as_string(&self) -> Option<String> {
match &self.value {
ConstantValue::String(v) => Some(
String::from_utf8(v.clone())
.expect("compilation ensures that the string slice is a valid UTF-8 sequence"),
),
_ => None,
}
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-ir/src/context.rs | sway-ir/src/context.rs | //! The main handle to an IR instance.
//!
//! [`Context`] contains several
//! [slotmap](https://github.com/orlp/slotmap) collections to maintain the
//! IR ECS.
//!
//! It is passed around as a mutable reference to many of the Sway-IR APIs.
use rustc_hash::FxHashMap;
use slotmap::{DefaultKey, SlotMap};
use sway_features::ExperimentalFeatures;
use sway_types::SourceEngine;
use crate::{
block::BlockContent,
function::FunctionContent,
metadata::Metadatum,
module::{Kind, ModuleContent, ModuleIterator},
value::ValueContent,
variable::LocalVarContent,
Constant, ConstantContent, GlobalVarContent, StorageKeyContent, Type, TypeContent,
};
// Copy of `sway_core::build_config::Backtrace`, which cannot
// be used here directly due to circular dependency.
#[derive(Clone, Copy, Debug, PartialEq, Eq, Default)]
pub enum Backtrace {
All,
#[default]
AllExceptNever,
OnlyAlways,
None,
}
/// The main IR context handle.
///
/// Every module, function, block and value is stored here. Some aggregate metadata is also
/// managed by the context.
pub struct Context<'eng> {
pub source_engine: &'eng SourceEngine,
pub(crate) modules: SlotMap<DefaultKey, ModuleContent>,
pub(crate) functions: SlotMap<DefaultKey, FunctionContent>,
pub(crate) blocks: SlotMap<DefaultKey, BlockContent>,
pub(crate) values: SlotMap<DefaultKey, ValueContent>,
pub(crate) local_vars: SlotMap<DefaultKey, LocalVarContent>,
pub(crate) global_vars: SlotMap<DefaultKey, GlobalVarContent>,
pub(crate) storage_keys: SlotMap<DefaultKey, StorageKeyContent>,
pub(crate) types: SlotMap<DefaultKey, TypeContent>,
pub(crate) type_map: FxHashMap<TypeContent, Type>,
pub(crate) constants: SlotMap<DefaultKey, ConstantContent>,
// Maps the hash of a ConstantContent to the list of constants with that hash.
pub(crate) constants_map: FxHashMap<u64, Vec<Constant>>,
pub(crate) metadata: SlotMap<DefaultKey, Metadatum>,
pub program_kind: Kind,
pub experimental: ExperimentalFeatures,
pub backtrace: Backtrace,
next_unique_sym_tag: u64,
next_unique_panic_error_code: u64,
next_unique_panicking_call_id: u64,
}
impl<'eng> Context<'eng> {
pub fn new(
source_engine: &'eng SourceEngine,
experimental: ExperimentalFeatures,
backtrace: Backtrace,
) -> Self {
let mut def = Self {
source_engine,
modules: Default::default(),
functions: Default::default(),
blocks: Default::default(),
values: Default::default(),
local_vars: Default::default(),
global_vars: Default::default(),
storage_keys: Default::default(),
types: Default::default(),
type_map: Default::default(),
constants: Default::default(),
constants_map: Default::default(),
metadata: Default::default(),
next_unique_sym_tag: 0,
next_unique_panic_error_code: 0,
// The next unique panicking call ID starts at 1, as 0 is reserved
// for the "there was no panicking call" case.
next_unique_panicking_call_id: 1,
program_kind: Kind::Contract,
experimental,
backtrace,
};
Type::create_basic_types(&mut def);
def
}
pub fn source_engine(&self) -> &'eng SourceEngine {
self.source_engine
}
/// Return an iterator for every module in this context.
pub fn module_iter(&self) -> ModuleIterator {
ModuleIterator::new(self)
}
/// Get a globally unique symbol.
///
/// The name will be in the form `"anon_N"`, where `N` is an incrementing decimal.
pub fn get_unique_name(&mut self) -> String {
format!("anon_{}", self.get_unique_symbol_id())
}
/// Get a globally unique symbol id.
pub fn get_unique_symbol_id(&mut self) -> u64 {
let sym = self.next_unique_sym_tag;
self.next_unique_sym_tag += 1;
sym
}
/// Get the next, unique, panic error code.
pub fn get_unique_panic_error_code(&mut self) -> u64 {
let code = self.next_unique_panic_error_code;
self.next_unique_panic_error_code += 1;
code
}
/// Get the next, unique, panicking call ID.
pub fn get_unique_panicking_call_id(&mut self) -> u64 {
let id = self.next_unique_panicking_call_id;
self.next_unique_panicking_call_id += 1;
id
}
}
use std::fmt::{Display, Error, Formatter};
impl Display for Context<'_> {
fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), Error> {
write!(f, "{}", crate::printer::to_string(self))
}
}
impl From<Context<'_>> for String {
fn from(context: Context) -> Self {
crate::printer::to_string(&context)
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-ir/src/metadata.rs | sway-ir/src/metadata.rs | use sway_types::SourceId;
/// Associated metadata attached mostly to values.
///
/// Each value (instruction, function argument or constant) has associated metadata which helps
/// describe properties which aren't required for code generation, but help with other
/// introspective tools (e.g., the debugger) or compiler error messages.
///
/// The metadata themselves are opaque to `sway-ir` and are represented with simple value types;
/// integers, strings, symbols (tags) and lists.
use crate::context::Context;
#[derive(Clone, Copy, Debug, Eq, Ord, PartialEq, PartialOrd, Hash)]
pub struct MetadataIndex(pub slotmap::DefaultKey);
#[derive(Clone, Debug, Eq, PartialEq, Hash)]
pub enum Metadatum {
Integer(u64),
Index(MetadataIndex),
String(String),
SourceId(SourceId),
Struct(String, Vec<Metadatum>),
List(Vec<MetadataIndex>),
}
/// Combine two metadata indices into one.
///
/// When multiple indices are attached to an IR value or function they must go in a list. It is
/// rare for `MetadataIndex` to exist outside of an `Option` though, so we may want to combine two
/// optional indices when we might end up with only one or the other, or maybe even None.
///
/// This function conveniently has all the logic to return the simplest combination of two
/// `Option<MetadataIndex>`s.
pub fn combine(
context: &mut Context,
md_idx_a: &Option<MetadataIndex>,
md_idx_b: &Option<MetadataIndex>,
) -> Option<MetadataIndex> {
match (md_idx_a, md_idx_b) {
(None, None) => None,
(Some(_), None) => *md_idx_a,
(None, Some(_)) => *md_idx_b,
(Some(idx_a), Some(idx_b)) => {
// Rather than potentially making lists of lists, if either are already list we can
// merge them together.
let mut new_list = Vec::new();
if let Metadatum::List(lst_a) = &context.metadata[idx_a.0] {
new_list.append(&mut lst_a.clone());
} else {
new_list.push(*idx_a);
}
if let Metadatum::List(lst_b) = &context.metadata[idx_b.0] {
new_list.append(&mut lst_b.clone());
} else {
new_list.push(*idx_b);
}
Some(MetadataIndex(
context.metadata.insert(Metadatum::List(new_list)),
))
}
}
}
impl MetadataIndex {
pub fn new_integer(context: &mut Context, int: u64) -> Self {
MetadataIndex(context.metadata.insert(Metadatum::Integer(int)))
}
pub fn new_index(context: &mut Context, idx: MetadataIndex) -> Self {
MetadataIndex(context.metadata.insert(Metadatum::Index(idx)))
}
pub fn new_source_id(context: &mut Context, id: SourceId) -> Self {
MetadataIndex(context.metadata.insert(Metadatum::SourceId(id)))
}
pub fn new_string<S: Into<String>>(context: &mut Context, s: S) -> Self {
MetadataIndex(context.metadata.insert(Metadatum::String(s.into())))
}
pub fn new_struct<S: Into<String>>(
context: &mut Context,
tag: S,
fields: Vec<Metadatum>,
) -> Self {
MetadataIndex(
context
.metadata
.insert(Metadatum::Struct(tag.into(), fields)),
)
}
pub fn new_list(context: &mut Context, els: Vec<MetadataIndex>) -> Self {
MetadataIndex(context.metadata.insert(Metadatum::List(els)))
}
pub fn get_content<'a>(&self, context: &'a Context) -> &'a Metadatum {
&context.metadata[self.0]
}
}
impl Metadatum {
pub fn unwrap_integer(&self) -> Option<u64> {
if let Metadatum::Integer(n) = self {
Some(*n)
} else {
None
}
}
pub fn unwrap_index(&self) -> Option<MetadataIndex> {
if let Metadatum::Index(idx) = self {
Some(*idx)
} else {
None
}
}
pub fn unwrap_string(&self) -> Option<&str> {
if let Metadatum::String(s) = self {
Some(s)
} else {
None
}
}
pub fn unwrap_source_id(&self) -> Option<&SourceId> {
if let Metadatum::SourceId(id) = self {
Some(id)
} else {
None
}
}
pub fn unwrap_struct<'a>(&'a self, tag: &str, num_fields: usize) -> Option<&'a [Metadatum]> {
match self {
Metadatum::Struct(t, fs) if t == tag && fs.len() == num_fields => Some(fs),
_otherwise => None,
}
}
pub fn unwrap_list(&self) -> Option<&[MetadataIndex]> {
if let Metadatum::List(els) = self {
Some(els)
} else {
None
}
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-ir/src/pretty.rs | sway-ir/src/pretty.rs | pub use sway_ir_macros::*;
use {crate::Context, std::fmt};
pub struct WithContext<'a, 'c, 'eng, T: ?Sized> {
thing: &'a T,
context: &'c Context<'eng>,
}
pub trait DebugWithContext {
fn fmt_with_context(&self, formatter: &mut fmt::Formatter, context: &Context) -> fmt::Result;
fn with_context<'a, 'c, 'eng>(
&'a self,
context: &'c Context<'eng>,
) -> WithContext<'a, 'c, 'eng, Self> {
WithContext {
thing: self,
context,
}
}
}
impl<T> DebugWithContext for &T
where
T: fmt::Debug,
{
fn fmt_with_context(&self, formatter: &mut fmt::Formatter, _context: &Context) -> fmt::Result {
fmt::Debug::fmt(self, formatter)
}
}
impl<T> fmt::Debug for WithContext<'_, '_, '_, T>
where
T: DebugWithContext,
{
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
let WithContext { thing, context } = self;
(*thing).fmt_with_context(formatter, context)
}
}
impl<T> DebugWithContext for Vec<T>
where
T: DebugWithContext,
{
fn fmt_with_context(&self, formatter: &mut fmt::Formatter, context: &Context) -> fmt::Result {
formatter
.debug_list()
.entries(self.iter().map(|value| (*value).with_context(context)))
.finish()
}
}
impl<T> DebugWithContext for [T]
where
T: DebugWithContext,
{
fn fmt_with_context(&self, formatter: &mut fmt::Formatter, context: &Context) -> fmt::Result {
formatter
.debug_list()
.entries(self.iter().map(|value| (*value).with_context(context)))
.finish()
}
}
impl<T> DebugWithContext for Option<T>
where
T: DebugWithContext,
{
fn fmt_with_context(&self, formatter: &mut fmt::Formatter, context: &Context) -> fmt::Result {
match self {
Some(value) => formatter
.debug_tuple("Some")
.field(&(*value).with_context(context))
.finish(),
None => formatter.write_str("None"),
}
}
}
macro_rules! tuple_impl (
($($ty:ident,)*) => {
impl<$($ty,)*> DebugWithContext for ($($ty,)*)
where
$($ty: DebugWithContext,)*
{
#[allow(unused_mut)]
#[allow(unused_variables)]
#[allow(non_snake_case)]
fn fmt_with_context(&self, formatter: &mut fmt::Formatter, context: &Context) -> fmt::Result {
let ($($ty,)*) = self;
let mut debug_tuple = &mut formatter.debug_tuple("");
$(
debug_tuple = debug_tuple.field(&(*$ty).with_context(context));
)*
debug_tuple.finish()
}
}
};
);
macro_rules! tuple_impls (
() => {
tuple_impl!();
};
($head:ident, $($tail:ident,)*) => {
tuple_impls!($($tail,)*);
tuple_impl!($head, $($tail,)*);
};
);
tuple_impls!(T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15,);
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-ir/src/storage_key.rs | sway-ir/src/storage_key.rs | //! A value representing a storage key. Every storage field access in the program
//! corresponds to a [StorageKey].
use std::vec;
use crate::{
context::Context, irtype::Type, pretty::DebugWithContext, Constant, ConstantContent,
ConstantValue, B256,
};
/// A wrapper around an [ECS](https://github.com/orlp/slotmap) handle into the
/// [`Context`].
#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash, DebugWithContext)]
pub struct StorageKey(#[in_context(storage_keys)] pub slotmap::DefaultKey);
#[doc(hidden)]
#[derive(Clone, DebugWithContext)]
pub struct StorageKeyContent {
pub ptr_ty: Type,
pub key: Constant,
}
impl StorageKey {
pub fn new(context: &mut Context, slot: [u8; 32], offset: u64, field_id: [u8; 32]) -> Self {
// Construct `ptr { b256, u64, b256 }`.
let b256_ty = Type::get_b256(context);
let uint64_ty = Type::get_uint64(context);
let key_ty = Type::new_struct(context, vec![b256_ty, uint64_ty, b256_ty]);
let ptr_ty = Type::new_typed_pointer(context, key_ty);
let slot = ConstantContent::new_b256(context, slot);
let offset = ConstantContent::new_uint(context, 64, offset);
let field_id = ConstantContent::new_b256(context, field_id);
let key = ConstantContent::new_struct(
context,
vec![b256_ty, uint64_ty, b256_ty],
vec![slot, offset, field_id],
);
let key = Constant::unique(context, key);
let content = StorageKeyContent { ptr_ty, key };
StorageKey(context.storage_keys.insert(content))
}
/// Return the storage key type, which is always `ptr { b256, u64, b256 }`.
pub fn get_type(&self, context: &Context) -> Type {
context.storage_keys[self.0].ptr_ty
}
/// Return the storage key, which is a constant of type `{ b256, u64, b256 }`.
pub fn get_key(&self, context: &Context) -> Constant {
context.storage_keys[self.0].key
}
/// Return the three parts of this storage key: `(slot, offset, field_id)`.
pub fn get_parts<'a>(&self, context: &'a Context) -> (&'a B256, u64, &'a B256) {
let ConstantContent {
value: ConstantValue::Struct(fields),
..
} = &context.storage_keys[self.0].key.get_content(context)
else {
unreachable!("`StorageKey::key` constant content is a struct with three fields");
};
let ConstantContent {
value: ConstantValue::B256(slot),
..
} = &fields[0]
else {
unreachable!("storage key slot is a `B256` constant");
};
let ConstantContent {
value: ConstantValue::Uint(offset),
..
} = &fields[1]
else {
unreachable!("storage key offset is a `u64` constant");
};
let ConstantContent {
value: ConstantValue::B256(field_id),
..
} = &fields[2]
else {
unreachable!("storage key field_id is a `B256` constant");
};
(slot, *offset, field_id)
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-ir/src/optimize/cse.rs | sway-ir/src/optimize/cse.rs | //! Value numbering based common subexpression elimination.
//! Reference: Value Driven Redundancy Elimination - Loren Taylor Simpson.
use core::panic;
use itertools::Itertools;
use rustc_hash::{FxHashMap, FxHashSet, FxHasher};
use slotmap::Key;
use std::{
collections::hash_map,
fmt::Debug,
hash::{Hash, Hasher},
};
use crate::{
AnalysisResults, BinaryOpKind, Context, DebugWithContext, DomTree, Function, InstOp, IrError,
Pass, PassMutability, PostOrder, Predicate, ScopedPass, Type, UnaryOpKind, Value,
DOMINATORS_NAME, POSTORDER_NAME,
};
pub const CSE_NAME: &str = "cse";
pub fn create_cse_pass() -> Pass {
Pass {
name: CSE_NAME,
descr: "Common subexpression elimination",
runner: ScopedPass::FunctionPass(PassMutability::Transform(cse)),
deps: vec![POSTORDER_NAME, DOMINATORS_NAME],
}
}
#[derive(Clone, Copy, Eq, PartialEq, Hash, DebugWithContext)]
enum ValueNumber {
// Top of the lattice = Don't know = uninitialized
Top,
// Belongs to a congruence class represented by the inner value.
Number(Value),
}
impl Debug for ValueNumber {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Self::Top => write!(f, "Top"),
Self::Number(arg0) => write!(f, "v{:?}", arg0.0.data()),
}
}
}
#[derive(Clone, Debug, Eq, PartialEq, Hash, DebugWithContext)]
enum Expr {
Phi(Vec<ValueNumber>),
UnaryOp {
op: UnaryOpKind,
arg: ValueNumber,
},
BinaryOp {
op: BinaryOpKind,
arg1: ValueNumber,
arg2: ValueNumber,
},
BitCast(ValueNumber, Type),
CastPtr(ValueNumber, Type),
Cmp(Predicate, ValueNumber, ValueNumber),
GetElemPtr {
base: ValueNumber,
elem_ptr_ty: Type,
indices: Vec<ValueNumber>,
},
IntToPtr(ValueNumber, Type),
PtrToInt(ValueNumber, Type),
}
/// Convert an instruction to an expression for hashing
/// Instructions that we don't handle will have their value numbers be equal to themselves.
fn instr_to_expr(context: &Context, vntable: &VNTable, instr: Value) -> Option<Expr> {
match &instr.get_instruction(context).unwrap().op {
InstOp::AsmBlock(_, _) => None,
InstOp::UnaryOp { op, arg } => Some(Expr::UnaryOp {
op: *op,
arg: vntable.value_map.get(arg).cloned().unwrap(),
}),
InstOp::BinaryOp { op, arg1, arg2 } => Some(Expr::BinaryOp {
op: *op,
arg1: vntable.value_map.get(arg1).cloned().unwrap(),
arg2: vntable.value_map.get(arg2).cloned().unwrap(),
}),
InstOp::BitCast(val, ty) => Some(Expr::BitCast(
vntable.value_map.get(val).cloned().unwrap(),
*ty,
)),
InstOp::Branch(_) => None,
InstOp::Call(_, _) => None,
InstOp::CastPtr(val, ty) => Some(Expr::CastPtr(
vntable.value_map.get(val).cloned().unwrap(),
*ty,
)),
InstOp::Cmp(pred, val1, val2) => Some(Expr::Cmp(
*pred,
vntable.value_map.get(val1).cloned().unwrap(),
vntable.value_map.get(val2).cloned().unwrap(),
)),
InstOp::ConditionalBranch { .. } => None,
InstOp::ContractCall { .. } => None,
InstOp::FuelVm(_) => None,
InstOp::GetLocal(_) => None,
InstOp::GetGlobal(_) => None,
InstOp::GetConfig(_, _) => None,
InstOp::GetStorageKey(_) => None,
InstOp::GetElemPtr {
base,
elem_ptr_ty,
indices,
} => Some(Expr::GetElemPtr {
base: vntable.value_map.get(base).cloned().unwrap(),
elem_ptr_ty: *elem_ptr_ty,
indices: indices
.iter()
.map(|idx| vntable.value_map.get(idx).cloned().unwrap())
.collect(),
}),
InstOp::IntToPtr(val, ty) => Some(Expr::IntToPtr(
vntable.value_map.get(val).cloned().unwrap(),
*ty,
)),
InstOp::Load(_) => None,
InstOp::Alloc { .. } => None,
InstOp::MemCopyBytes { .. } => None,
InstOp::MemCopyVal { .. } => None,
InstOp::MemClearVal { .. } => None,
InstOp::Nop => None,
InstOp::PtrToInt(val, ty) => Some(Expr::PtrToInt(
vntable.value_map.get(val).cloned().unwrap(),
*ty,
)),
InstOp::Ret(_, _) => None,
InstOp::Store { .. } => None,
}
}
/// Convert a PHI argument to Expr
fn phi_to_expr(context: &Context, vntable: &VNTable, phi_arg: Value) -> Expr {
let phi_arg = phi_arg.get_argument(context).unwrap();
let phi_args = phi_arg
.block
.pred_iter(context)
.map(|pred| {
let incoming_val = phi_arg
.get_val_coming_from(context, pred)
.expect("No parameter from predecessor");
vntable.value_map.get(&incoming_val).cloned().unwrap()
})
.collect();
Expr::Phi(phi_args)
}
#[derive(Default)]
struct VNTable {
value_map: FxHashMap<Value, ValueNumber>,
expr_map: FxHashMap<Expr, ValueNumber>,
}
impl Debug for VNTable {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
writeln!(f, "value_map:")?;
self.value_map.iter().for_each(|(key, value)| {
if format!("v{:?}", key.0.data()) == "v620v3" {
writeln!(f, "\tv{:?} -> {:?}", key.0.data(), value).expect("writeln! failed");
}
});
Ok(())
}
}
/// Wrapper around [DomTree::dominates] to work at instruction level.
/// Does `inst1` dominate `inst2` ?
fn dominates(context: &Context, dom_tree: &DomTree, inst1: Value, inst2: Value) -> bool {
let block1 = match &context.values[inst1.0].value {
crate::ValueDatum::Argument(arg) => arg.block,
crate::ValueDatum::Constant(_) => {
panic!("Shouldn't be querying dominance info for constants")
}
crate::ValueDatum::Instruction(i) => i.parent,
};
let block2 = match &context.values[inst2.0].value {
crate::ValueDatum::Argument(arg) => arg.block,
crate::ValueDatum::Constant(_) => {
panic!("Shouldn't be querying dominance info for constants")
}
crate::ValueDatum::Instruction(i) => i.parent,
};
if block1 == block2 {
let inst1_idx = block1
.instruction_iter(context)
.position(|inst| inst == inst1)
// Not found indicates a block argument
.unwrap_or(0);
let inst2_idx = block1
.instruction_iter(context)
.position(|inst| inst == inst2)
// Not found indicates a block argument
.unwrap_or(0);
inst1_idx < inst2_idx
} else {
dom_tree.dominates(block1, block2)
}
}
pub fn cse(
context: &mut Context,
analyses: &AnalysisResults,
function: Function,
) -> Result<bool, IrError> {
let mut vntable = VNTable::default();
// Function arg values map to themselves.
for arg in function.args_iter(context) {
vntable.value_map.insert(arg.1, ValueNumber::Number(arg.1));
}
// Map all other arg values map to Top.
for block in function.block_iter(context).skip(1) {
for arg in block.arg_iter(context) {
vntable.value_map.insert(*arg, ValueNumber::Top);
}
}
// Initialize all instructions and constants. Constants need special treatment.
// They don't have PartialEq implemented. So we need to value number them manually.
// This map maps the hash of a constant value to all possible collisions of it.
let mut const_map = FxHashMap::<u64, Vec<Value>>::default();
for (_, inst) in function.instruction_iter(context) {
vntable.value_map.insert(inst, ValueNumber::Top);
for (const_opd_val, const_opd_const) in inst
.get_instruction(context)
.unwrap()
.op
.get_operands()
.iter()
.filter_map(|opd| opd.get_constant(context).map(|copd| (opd, copd)))
{
let mut state = FxHasher::default();
const_opd_const.hash(&mut state);
let hash = state.finish();
if let Some(existing_const) = const_map.get(&hash).and_then(|consts| {
consts.iter().find(|val| {
let c = val
.get_constant(context)
.expect("const_map can only contain consts");
const_opd_const == c
})
}) {
vntable
.value_map
.insert(*const_opd_val, ValueNumber::Number(*existing_const));
} else {
const_map
.entry(hash)
.and_modify(|consts| consts.push(*const_opd_val))
.or_insert_with(|| vec![*const_opd_val]);
vntable
.value_map
.insert(*const_opd_val, ValueNumber::Number(*const_opd_val));
}
}
}
// We need to iterate over the blocks in RPO.
let post_order: &PostOrder = analyses.get_analysis_result(function);
// RPO based value number (Sec 4.2).
let mut changed = true;
while changed {
changed = false;
// For each block in RPO:
for (block_idx, block) in post_order.po_to_block.iter().rev().enumerate() {
// Process PHIs and then the other instructions.
if block_idx != 0 {
// Entry block arguments are not PHIs.
for (phi, expr_opt) in block
.arg_iter(context)
.map(|arg| (*arg, Some(phi_to_expr(context, &vntable, *arg))))
.collect_vec()
{
let expr = expr_opt.expect("PHIs must always translate to a valid Expr");
// We first try to see if PHIs can be simplified into a single value.
let vn = {
let Expr::Phi(ref phi_args) = expr else {
panic!("Expr must be a PHI")
};
phi_args
.iter()
.map(|vn| Some(*vn))
.reduce(|vn1, vn2| {
// Here `None` indicates Bottom of the lattice.
if let (Some(vn1), Some(vn2)) = (vn1, vn2) {
match (vn1, vn2) {
(ValueNumber::Top, ValueNumber::Top) => {
Some(ValueNumber::Top)
}
(ValueNumber::Top, ValueNumber::Number(vn))
| (ValueNumber::Number(vn), ValueNumber::Top) => {
Some(ValueNumber::Number(vn))
}
(ValueNumber::Number(vn1), ValueNumber::Number(vn2)) => {
(vn1 == vn2).then_some(ValueNumber::Number(vn1))
}
}
} else {
None
}
})
.flatten()
// The PHI couldn't be simplified to a single ValueNumber.
.unwrap_or(ValueNumber::Number(phi))
};
match vntable.value_map.entry(phi) {
hash_map::Entry::Occupied(occ) if *occ.get() == vn => {}
_ => {
changed = true;
vntable.value_map.insert(phi, vn);
}
}
}
}
for (inst, expr_opt) in block
.instruction_iter(context)
.map(|instr| (instr, instr_to_expr(context, &vntable, instr)))
.collect_vec()
{
// lookup(expr, x)
let vn = if let Some(expr) = expr_opt {
match vntable.expr_map.entry(expr) {
hash_map::Entry::Occupied(occ) => *occ.get(),
hash_map::Entry::Vacant(vac) => *(vac.insert(ValueNumber::Number(inst))),
}
} else {
// Instructions that always map to their own value number
// (i.e., they can never be equal to some other instruction).
ValueNumber::Number(inst)
};
match vntable.value_map.entry(inst) {
hash_map::Entry::Occupied(occ) if *occ.get() == vn => {}
_ => {
changed = true;
vntable.value_map.insert(inst, vn);
}
}
}
}
vntable.expr_map.clear();
}
// create a partition of congruent (equal) values.
let mut partition = FxHashMap::<ValueNumber, FxHashSet<Value>>::default();
vntable.value_map.iter().for_each(|(v, vn)| {
// If v is a constant or its value number is itself, don't add to the partition.
// The latter condition is so that we have only > 1 sized partitions.
if v.is_constant(context)
|| matches!(vn, ValueNumber::Top)
|| matches!(vn, ValueNumber::Number(v2) if (v == v2 || v2.is_constant(context)))
{
return;
}
partition
.entry(*vn)
.and_modify(|part| {
part.insert(*v);
})
.or_insert(vec![*v].into_iter().collect());
});
// For convenience, now add back back `v` into `partition[VN[v]]` if it isn't already there.
partition.iter_mut().for_each(|(vn, v_part)| {
let ValueNumber::Number(v) = vn else {
panic!("We cannot have Top at this point");
};
v_part.insert(*v);
assert!(
v_part.len() > 1,
"We've only created partitions with size greater than 1"
);
});
// There are two ways to replace congruent values (see the paper cited, Sec 5).
// 1. Dominator based. If v1 and v2 are equal, v1 dominates v2, we just remove v2
// and replace its uses with v1. Simple, and what we're going to do.
// 2. AVAIL based. More powerful, but also requires a data-flow-analysis for AVAIL
// and later on, mem2reg again since replacements will need breaking SSA.
let dom_tree: &DomTree = analyses.get_analysis_result(function);
let mut replace_map = FxHashMap::<Value, Value>::default();
let mut modified = false;
// Check every set in the partition.
partition.iter().for_each(|(_leader, vals)| {
// Iterate over every pair of values, checking if one can replace the other.
for v_pair in vals.iter().combinations(2) {
let (v1, v2) = (*v_pair[0], *v_pair[1]);
if dominates(context, dom_tree, v1, v2) {
modified = true;
replace_map.insert(v2, v1);
} else if dominates(context, dom_tree, v2, v1) {
modified = true;
replace_map.insert(v1, v2);
}
}
});
function.replace_values(context, &replace_map, None);
Ok(modified)
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-ir/src/optimize/ret_demotion.rs | sway-ir/src/optimize/ret_demotion.rs | /// Return value demotion.
///
/// This pass demotes 'by-value' function return types to 'by-reference` pointer types, based on
/// target specific parameters.
///
/// An extra argument pointer is added to the function.
/// The return value is mem_copied to the new argument instead of being returned by value.
use crate::{
AnalysisResults, BlockArgument, ConstantContent, Context, Function, InstOp, Instruction,
InstructionInserter, IrError, Module, Pass, PassMutability, ScopedPass, Type, Value,
};
pub const RET_DEMOTION_NAME: &str = "ret-demotion";
pub fn create_ret_demotion_pass() -> Pass {
Pass {
name: RET_DEMOTION_NAME,
descr: "Demotion of by-value function return values to by-reference",
deps: Vec::new(),
runner: ScopedPass::ModulePass(PassMutability::Transform(ret_val_demotion)),
}
}
pub fn ret_val_demotion(
context: &mut Context,
_analyses: &AnalysisResults,
module: Module,
) -> Result<bool, IrError> {
// This is a module pass because we need to update all the callers of a function if we change
// its signature.
let mut changed = false;
for function in module.function_iter(context) {
// Reject non-candidate.
let ret_type = function.get_return_type(context);
if !super::target_fuel::is_demotable_type(context, &ret_type) {
// Return type fits in a register.
continue;
}
changed = true;
// Change the function signature.
let ptr_ret_type = Type::new_typed_pointer(context, ret_type);
let unit_ty = Type::get_unit(context);
// The storage for the return value must be determined. For entry-point functions it's a new
// local and otherwise it's an extra argument.
let entry_block = function.get_entry_block(context);
let ptr_arg_val = if function.is_entry(context) {
// Entry functions return a pointer to the original return type.
function.set_return_type(context, ptr_ret_type);
// Create a local variable to hold the return value.
let ret_var = function.new_unique_local_var(
context,
"__ret_value".to_owned(),
ret_type,
None,
false,
);
// Insert the return value pointer at the start of the entry block.
let get_ret_var =
Value::new_instruction(context, entry_block, InstOp::GetLocal(ret_var));
entry_block.prepend_instructions(context, vec![get_ret_var]);
get_ret_var
} else {
// non-entry functions now return unit.
function.set_return_type(context, unit_ty);
let ptr_arg_val = Value::new_argument(
context,
BlockArgument {
block: entry_block,
idx: function.num_args(context),
ty: ptr_ret_type,
is_immutable: false,
},
);
function.add_arg(context, "__ret_value", ptr_arg_val);
entry_block.add_arg(context, ptr_arg_val);
ptr_arg_val
};
// Gather the blocks which are returning.
let ret_blocks = function
.block_iter(context)
.filter_map(|block| {
block.get_terminator(context).and_then(|term| {
if let InstOp::Ret(ret_val, _ty) = term.op {
Some((block, ret_val))
} else {
None
}
})
})
.collect::<Vec<_>>();
// Update each `ret` to store the return value to the 'out' arg and then return the pointer.
for (ret_block, ret_val) in ret_blocks {
// This is a special case where we're replacing the terminator. We can just pop it off the
// end of the block and add new instructions.
let last_instr_pos = ret_block.num_instructions(context) - 1;
let orig_ret_val = ret_block.get_instruction_at(context, last_instr_pos);
ret_block.remove_instruction_at(context, last_instr_pos);
let md_idx = orig_ret_val.and_then(|val| val.get_metadata(context));
ret_block
.append(context)
.store(ptr_arg_val, ret_val)
.add_metadatum(context, md_idx);
if !function.is_entry(context) {
let unit_ret = ConstantContent::get_unit(context);
ret_block
.append(context)
.ret(unit_ret, unit_ty)
.add_metadatum(context, md_idx);
} else {
// Entry functions still return the pointer to the return value.
ret_block
.append(context)
.ret(ptr_arg_val, ptr_ret_type)
.add_metadatum(context, md_idx);
}
}
// If the function isn't an entry point we need to update all the callers to pass the extra
// argument.
if !function.is_entry(context) {
update_callers(context, function, ret_type);
}
}
Ok(changed)
}
fn update_callers(context: &mut Context, function: Function, ret_type: Type) {
// Now update all the callers to pass the return value argument. Find all the call sites for
// this function.
let call_sites = context
.module_iter()
.flat_map(|module| module.function_iter(context))
.flat_map(|ref call_from_func| {
call_from_func
.block_iter(context)
.flat_map(|ref block| {
block
.instruction_iter(context)
.filter_map(|instr_val| {
if let Instruction {
op: InstOp::Call(call_to_func, _),
..
} = instr_val
.get_instruction(context)
.expect("`instruction_iter()` must return instruction values.")
{
(*call_to_func == function).then_some((
*call_from_func,
*block,
instr_val,
))
} else {
None
}
})
.collect::<Vec<_>>()
})
.collect::<Vec<_>>()
})
.collect::<Vec<_>>();
// Create a local var to receive the return value for each call site. Replace the `call`
// instruction with a `get_local`, an updated `call` and a `load`.
for (calling_func, calling_block, call_val) in call_sites {
// First make a new local variable.
let loc_var = calling_func.new_unique_local_var(
context,
"__ret_val".to_owned(),
ret_type,
None,
false,
);
let get_loc_val = Value::new_instruction(context, calling_block, InstOp::GetLocal(loc_var));
// Next we need to copy the original `call` but add the extra arg.
let Some(Instruction {
op: InstOp::Call(_, args),
..
}) = call_val.get_instruction(context)
else {
unreachable!("`call_val` is definitely a call instruction.");
};
let mut new_args = args.clone();
new_args.push(get_loc_val);
let new_call_val =
Value::new_instruction(context, calling_block, InstOp::Call(function, new_args));
// And finally load the value from the new local var.
let load_val = Value::new_instruction(context, calling_block, InstOp::Load(get_loc_val));
calling_block
.replace_instruction(context, call_val, get_loc_val, false)
.unwrap();
let mut inserter = InstructionInserter::new(
context,
calling_block,
crate::InsertionPosition::After(get_loc_val),
);
inserter.insert_slice(&[new_call_val, load_val]);
// Replace the old call with the new load.
calling_func.replace_value(context, call_val, load_val, None);
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-ir/src/optimize/target_fuel.rs | sway-ir/src/optimize/target_fuel.rs | //! Some Fuel VM specific utilities.
use crate::{
context::Context,
irtype::{Type, TypeContent},
};
/// Return whether a [Type] _cannot_ fit in a Fuel VM register and requires 'demotion'.
pub(super) fn is_demotable_type(context: &Context, ty: &Type) -> bool {
match ty.get_content(context) {
TypeContent::Unit
| TypeContent::Bool
| TypeContent::TypedPointer(_)
| TypeContent::Pointer => false,
TypeContent::Uint(bits) => *bits > 64,
_ => true,
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-ir/src/optimize/fn_dedup.rs | sway-ir/src/optimize/fn_dedup.rs | //! ## Deduplicate functions.
//!
//! If two functions are functionally identical, eliminate one
//! and replace all calls to it with a call to the retained one.
//!
//! This pass shouldn't be required once the monomorphiser stops
//! generating a new function for each instantiation even when the exact
//! same instantiation exists.
use std::hash::{Hash, Hasher};
use rustc_hash::{FxHashMap, FxHashSet, FxHasher};
use crate::{
build_call_graph, callee_first_order, AnalysisResults, Block, Context, Function, InstOp,
Instruction, IrError, MetadataIndex, Metadatum, Module, Pass, PassMutability, ScopedPass,
Value,
};
pub const FN_DEDUP_DEBUG_PROFILE_NAME: &str = "fn-dedup-debug";
pub const FN_DEDUP_RELEASE_PROFILE_NAME: &str = "fn-dedup-release";
pub fn create_fn_dedup_release_profile_pass() -> Pass {
Pass {
name: FN_DEDUP_RELEASE_PROFILE_NAME,
descr: "Function deduplication with metadata ignored",
deps: vec![],
runner: ScopedPass::ModulePass(PassMutability::Transform(dedup_fn_release_profile)),
}
}
pub fn create_fn_dedup_debug_profile_pass() -> Pass {
Pass {
name: FN_DEDUP_DEBUG_PROFILE_NAME,
descr: "Function deduplication with metadata considered",
deps: vec![],
runner: ScopedPass::ModulePass(PassMutability::Transform(dedup_fn_debug_profile)),
}
}
// Functions that are equivalent are put in the same set.
struct EqClass {
// Map a function hash to its equivalence class.
hash_set_map: FxHashMap<u64, FxHashSet<Function>>,
// Once we compute the hash of a function, it's noted here.
function_hash_map: FxHashMap<Function, u64>,
}
fn hash_fn(
context: &Context,
function: Function,
eq_class: &mut EqClass,
ignore_metadata: bool,
) -> u64 {
let state = &mut FxHasher::default();
// A unique, but only in this function, ID for values.
let localised_value_id: &mut FxHashMap<Value, u64> = &mut FxHashMap::default();
// A unique, but only in this function, ID for blocks.
let localised_block_id: &mut FxHashMap<Block, u64> = &mut FxHashMap::default();
// A unique, but only in this function, ID for MetadataIndex.
let metadata_hashes: &mut FxHashMap<MetadataIndex, u64> = &mut FxHashMap::default();
// TODO: We could do a similar localised ID'ing of local variable names
// and ASM block arguments too, thereby slightly relaxing the equality check.
fn get_localised_id<T: Eq + Hash>(t: T, map: &mut FxHashMap<T, u64>) -> u64 {
let cur_count = map.len();
*map.entry(t).or_insert(cur_count as u64)
}
fn hash_value(
context: &Context,
v: Value,
localised_value_id: &mut FxHashMap<Value, u64>,
metadata_hashes: &mut FxHashMap<MetadataIndex, u64>,
hasher: &mut FxHasher,
ignore_metadata: bool,
) {
let val = &context.values.get(v.0).unwrap().value;
std::mem::discriminant(val).hash(hasher);
match val {
crate::ValueDatum::Argument(_) | crate::ValueDatum::Instruction(_) => {
get_localised_id(v, localised_value_id).hash(hasher)
}
crate::ValueDatum::Constant(c) => c.hash(hasher),
}
if let Some(m) = &context.values.get(v.0).unwrap().metadata {
if !ignore_metadata {
hash_metadata(context, *m, metadata_hashes, hasher)
}
}
}
fn hash_metadata(
context: &Context,
m: MetadataIndex,
metadata_hashes: &mut FxHashMap<MetadataIndex, u64>,
hasher: &mut FxHasher,
) {
if let Some(hash) = metadata_hashes.get(&m) {
return hash.hash(hasher);
}
let md_contents = context
.metadata
.get(m.0)
.expect("Orphan / missing metadata");
let descr = std::mem::discriminant(md_contents);
let state = &mut FxHasher::default();
// We temporarily set the discriminant as the hash.
descr.hash(state);
metadata_hashes.insert(m, state.finish());
fn internal(
context: &Context,
m: &Metadatum,
metadata_hashes: &mut FxHashMap<MetadataIndex, u64>,
hasher: &mut FxHasher,
) {
match m {
Metadatum::Integer(i) => i.hash(hasher),
Metadatum::Index(mdi) => hash_metadata(context, *mdi, metadata_hashes, hasher),
Metadatum::String(s) => s.hash(hasher),
Metadatum::SourceId(sid) => sid.hash(hasher),
Metadatum::Struct(name, fields) => {
name.hash(hasher);
fields
.iter()
.for_each(|field| internal(context, field, metadata_hashes, hasher));
}
Metadatum::List(l) => l
.iter()
.for_each(|i| hash_metadata(context, *i, metadata_hashes, hasher)),
}
}
internal(context, md_contents, metadata_hashes, hasher);
let m_hash = state.finish();
metadata_hashes.insert(m, m_hash);
m_hash.hash(hasher);
}
// Start with the function return type.
function.get_return_type(context).hash(state);
// ... and local variables.
for (local_name, local_var) in function.locals_iter(context) {
local_name.hash(state);
if let Some(init) = local_var.get_initializer(context) {
init.hash(state);
}
local_var.get_type(context).hash(state);
}
// Process every block, first its arguments and then the instructions.
for block in function.block_iter(context) {
get_localised_id(block, localised_block_id).hash(state);
for &arg in block.arg_iter(context) {
get_localised_id(arg, localised_value_id).hash(state);
arg.get_argument(context).unwrap().ty.hash(state);
}
for inst in block.instruction_iter(context) {
get_localised_id(inst, localised_value_id).hash(state);
let inst = inst.get_instruction(context).unwrap();
std::mem::discriminant(&inst.op).hash(state);
// Hash value inputs to instructions in one-go.
for v in inst.op.get_operands() {
hash_value(
context,
v,
localised_value_id,
metadata_hashes,
state,
ignore_metadata,
);
}
// Hash non-value inputs.
match &inst.op {
crate::InstOp::AsmBlock(asm_block, args) => {
for arg in args
.iter()
.map(|arg| &arg.name)
.chain(asm_block.args_names.iter())
{
arg.as_str().hash(state);
}
if let Some(return_name) = &asm_block.return_name {
return_name.as_str().hash(state);
}
asm_block.return_type.hash(state);
for asm_inst in &asm_block.body {
asm_inst.op_name.as_str().hash(state);
for arg in &asm_inst.args {
arg.as_str().hash(state);
}
if let Some(imm) = &asm_inst.immediate {
imm.as_str().hash(state);
}
}
}
crate::InstOp::UnaryOp { op, .. } => op.hash(state),
crate::InstOp::BinaryOp { op, .. } => op.hash(state),
crate::InstOp::BitCast(_, ty) => ty.hash(state),
crate::InstOp::Branch(b) => {
get_localised_id(b.block, localised_block_id).hash(state)
}
crate::InstOp::Call(callee, _) => {
match eq_class.function_hash_map.get(callee) {
Some(callee_hash) => {
callee_hash.hash(state);
}
None => {
// We haven't processed this callee yet. Just hash its name.
callee.get_name(context).hash(state);
}
}
}
crate::InstOp::CastPtr(_, ty) => ty.hash(state),
crate::InstOp::Cmp(p, _, _) => p.hash(state),
crate::InstOp::ConditionalBranch {
cond_value: _,
true_block,
false_block,
} => {
get_localised_id(true_block.block, localised_block_id).hash(state);
get_localised_id(false_block.block, localised_block_id).hash(state);
}
crate::InstOp::ContractCall { name, .. } => {
name.hash(state);
}
crate::InstOp::FuelVm(fuel_vm_inst) => {
std::mem::discriminant(fuel_vm_inst).hash(state);
match fuel_vm_inst {
crate::FuelVmInstruction::Gtf { tx_field_id, .. } => {
tx_field_id.hash(state)
}
crate::FuelVmInstruction::Log { log_ty, .. } => log_ty.hash(state),
crate::FuelVmInstruction::ReadRegister(reg) => reg.hash(state),
crate::FuelVmInstruction::Revert(_)
| crate::FuelVmInstruction::JmpMem
| crate::FuelVmInstruction::Smo { .. }
| crate::FuelVmInstruction::StateClear { .. }
| crate::FuelVmInstruction::StateLoadQuadWord { .. }
| crate::FuelVmInstruction::StateLoadWord(_)
| crate::FuelVmInstruction::StateStoreQuadWord { .. }
| crate::FuelVmInstruction::StateStoreWord { .. } => (),
crate::FuelVmInstruction::WideUnaryOp { op, .. } => op.hash(state),
crate::FuelVmInstruction::WideBinaryOp { op, .. } => op.hash(state),
crate::FuelVmInstruction::WideModularOp { op, .. } => op.hash(state),
crate::FuelVmInstruction::WideCmpOp { op, .. } => op.hash(state),
crate::FuelVmInstruction::Retd { ptr, len } => {
ptr.hash(state);
len.hash(state);
}
}
}
crate::InstOp::GetLocal(local) => function
.lookup_local_name(context, local)
.unwrap()
.hash(state),
crate::InstOp::GetGlobal(global) => function
.get_module(context)
.lookup_global_variable_name(context, global)
.unwrap()
.hash(state),
crate::InstOp::GetConfig(_, name) => name.hash(state),
crate::InstOp::GetStorageKey(storage_key) => function
.get_module(context)
.lookup_storage_key_path(context, storage_key)
.unwrap()
.hash(state),
crate::InstOp::GetElemPtr { elem_ptr_ty, .. } => elem_ptr_ty.hash(state),
crate::InstOp::IntToPtr(_, ty) => ty.hash(state),
crate::InstOp::Load(_) => (),
crate::InstOp::MemCopyBytes { byte_len, .. } => byte_len.hash(state),
crate::InstOp::MemCopyVal { .. }
| crate::InstOp::MemClearVal { .. }
| crate::InstOp::Nop => (),
crate::InstOp::PtrToInt(_, ty) => ty.hash(state),
crate::InstOp::Ret(_, ty) => ty.hash(state),
crate::InstOp::Store { .. } => (),
crate::InstOp::Alloc { ty, count: _ } => ty.hash(state),
}
}
}
state.finish()
}
pub fn dedup_fns(
context: &mut Context,
_: &AnalysisResults,
module: Module,
ignore_metadata: bool,
) -> Result<bool, IrError> {
let mut modified = false;
let eq_class = &mut EqClass {
hash_set_map: FxHashMap::default(),
function_hash_map: FxHashMap::default(),
};
let mut dups_to_delete = vec![];
let cg = build_call_graph(context, &context.modules.get(module.0).unwrap().functions);
let callee_first = callee_first_order(&cg);
for function in callee_first {
let hash = hash_fn(context, function, eq_class, ignore_metadata);
eq_class
.hash_set_map
.entry(hash)
.and_modify(|class| {
class.insert(function);
})
.or_insert(vec![function].into_iter().collect());
eq_class.function_hash_map.insert(function, hash);
}
// Let's go over the entire module, replacing calls to functions
// with their representatives in the equivalence class.
for function in module.function_iter(context) {
let mut replacements = vec![];
for (_block, inst) in function.instruction_iter(context) {
let Some(Instruction {
op: InstOp::Call(callee, args),
..
}) = inst.get_instruction(context)
else {
continue;
};
let Some(callee_hash) = eq_class.function_hash_map.get(callee) else {
continue;
};
// If the representative (first element in the set) is different, we need to replace.
let Some(callee_rep) = eq_class
.hash_set_map
.get(callee_hash)
.and_then(|f| f.iter().next())
.filter(|rep| *rep != callee)
else {
continue;
};
dups_to_delete.push(*callee);
replacements.push((inst, args.clone(), callee_rep));
}
if !replacements.is_empty() {
modified = true;
}
for (inst, args, callee_rep) in replacements {
inst.replace(
context,
crate::ValueDatum::Instruction(Instruction {
op: InstOp::Call(*callee_rep, args.clone()),
parent: inst.get_instruction(context).unwrap().parent,
}),
);
}
}
// Replace config decode fns
for config in module.iter_configs(context) {
if let crate::ConfigContent::V1 { decode_fn, .. } = config {
let f = decode_fn.get();
let Some(callee_hash) = eq_class.function_hash_map.get(&f) else {
continue;
};
// If the representative (first element in the set) is different, we need to replace.
let Some(callee_rep) = eq_class
.hash_set_map
.get(callee_hash)
.and_then(|f| f.iter().next())
.filter(|rep| *rep != &f)
else {
continue;
};
dups_to_delete.push(decode_fn.get());
decode_fn.replace(*callee_rep);
}
}
// Remove replaced functions
for function in dups_to_delete {
module.remove_function(context, &function);
}
Ok(modified)
}
fn dedup_fn_debug_profile(
context: &mut Context,
analysis_results: &AnalysisResults,
module: Module,
) -> Result<bool, IrError> {
dedup_fns(context, analysis_results, module, false)
}
fn dedup_fn_release_profile(
context: &mut Context,
analysis_results: &AnalysisResults,
module: Module,
) -> Result<bool, IrError> {
dedup_fns(context, analysis_results, module, true)
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-ir/src/optimize/const_demotion.rs | sway-ir/src/optimize/const_demotion.rs | /// Constant value demotion.
///
/// This pass demotes 'by-value' constant types to 'by-reference` pointer types, based on target
/// specific parameters.
///
/// Storage for constant values is created on the stack in variables which are initialized with the
/// original values.
use crate::{
AnalysisResults, Block, Constant, Context, Function, InstOp, IrError, Pass, PassMutability,
ScopedPass, Value,
};
use rustc_hash::FxHashMap;
use sway_types::FxIndexMap;
pub const CONST_DEMOTION_NAME: &str = "const-demotion";
pub fn create_const_demotion_pass() -> Pass {
Pass {
name: CONST_DEMOTION_NAME,
descr: "Demotion of by-value constants to by-reference",
deps: Vec::new(),
runner: ScopedPass::FunctionPass(PassMutability::Transform(const_demotion)),
}
}
pub fn const_demotion(
context: &mut Context,
_: &AnalysisResults,
function: Function,
) -> Result<bool, IrError> {
// Find all candidate constant values and their wrapped constants.
let mut candidate_values: FxIndexMap<Block, Vec<(Value, Constant)>> = FxIndexMap::default();
for (block, inst) in function.instruction_iter(context) {
let operands = inst.get_instruction(context).unwrap().op.get_operands();
for val in operands.iter() {
if let Some(c) = val.get_constant(context) {
if super::target_fuel::is_demotable_type(context, &c.get_content(context).ty) {
let dem = (*val, *c);
match candidate_values.entry(block) {
indexmap::map::Entry::Occupied(mut occ) => {
occ.get_mut().push(dem);
}
indexmap::map::Entry::Vacant(vac) => {
vac.insert(vec![dem]);
}
}
}
}
}
}
if candidate_values.is_empty() {
return Ok(false);
}
for (block, cands) in candidate_values {
let mut replace_map: FxHashMap<Value, Value> = FxHashMap::default();
// The new instructions we're going to insert at the start of this block.
let mut this_block_new = Vec::new();
for (c_val, c) in cands {
// Create a variable for const.
let var = function.new_unique_local_var(
context,
"__const".to_owned(),
c.get_content(context).ty,
Some(c),
false,
);
let var_val = Value::new_instruction(context, block, InstOp::GetLocal(var));
let load_val = Value::new_instruction(context, block, InstOp::Load(var_val));
replace_map.insert(c_val, load_val);
this_block_new.push(var_val);
this_block_new.push(load_val);
}
block.replace_values(context, &replace_map);
block.prepend_instructions(context, this_block_new);
}
Ok(true)
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-ir/src/optimize/arg_demotion.rs | sway-ir/src/optimize/arg_demotion.rs | /// Function argument demotion.
///
/// This pass demotes 'by-value' function arg types to 'by-reference` pointer types, based on target
/// specific parameters.
use crate::{
AnalysisResults, Block, BlockArgument, Context, Function, InstOp, Instruction,
InstructionInserter, IrError, Module, Pass, PassMutability, ScopedPass, Type, Value,
ValueDatum,
};
use rustc_hash::FxHashMap;
pub const ARG_DEMOTION_NAME: &str = "arg-demotion";
pub fn create_arg_demotion_pass() -> Pass {
Pass {
name: ARG_DEMOTION_NAME,
descr: "Demotion of by-value function arguments to by-reference",
deps: Vec::new(),
runner: ScopedPass::ModulePass(PassMutability::Transform(arg_demotion)),
}
}
pub fn arg_demotion(
context: &mut Context,
_: &AnalysisResults,
module: Module,
) -> Result<bool, IrError> {
let mut result = false;
// This is a module pass because modifying the signature of a function may affect the
// call sites in other functions, requiring their modification as well.
for function in module.function_iter(context) {
result |= fn_arg_demotion(context, function)?;
// We also need to be sure that block args within this function are demoted.
for block in function.block_iter(context) {
result |= demote_block_signature(context, &function, block);
}
}
Ok(result)
}
fn fn_arg_demotion(context: &mut Context, function: Function) -> Result<bool, IrError> {
// The criteria for now for demotion is whether the arg type is larger than 64-bits or is an
// aggregate. This info should be instead determined by a target info analysis pass.
// Find candidate argument indices.
let candidate_args = function
.args_iter(context)
.enumerate()
.filter_map(|(idx, (_name, arg_val))| {
arg_val.get_type(context).and_then(|ty| {
super::target_fuel::is_demotable_type(context, &ty).then_some((idx, ty))
})
})
.collect::<Vec<(usize, Type)>>();
if candidate_args.is_empty() {
return Ok(false);
}
// Find all the call sites for this function.
let call_sites = context
.module_iter()
.flat_map(|module| module.function_iter(context))
.flat_map(|function| function.block_iter(context))
.flat_map(|block| {
block
.instruction_iter(context)
.filter_map(|instr_val| {
if let InstOp::Call(call_to_func, _) = instr_val
.get_instruction(context)
.expect("`instruction_iter()` must return instruction values.")
.op
{
(call_to_func == function).then_some((block, instr_val))
} else {
None
}
})
.collect::<Vec<_>>()
})
.collect::<Vec<(Block, Value)>>();
// Demote the function signature and the arg uses.
demote_fn_signature(context, &function, &candidate_args);
// We need to convert the caller arg value at *every* call site from a by-value to a
// by-reference. To do this we create local storage for the value, store it to the variable
// and pass a pointer to it.
for (call_block, call_val) in call_sites {
demote_caller(context, &function, call_block, call_val, &candidate_args);
}
Ok(true)
}
fn demote_fn_signature(context: &mut Context, function: &Function, arg_idcs: &[(usize, Type)]) {
// Change the types of the arg values in place to their pointer counterparts.
let entry_block = function.get_entry_block(context);
let old_arg_vals = arg_idcs
.iter()
.map(|(arg_idx, arg_ty)| {
let ptr_ty = Type::new_typed_pointer(context, *arg_ty);
// Create a new block arg, same as the old one but with a different type.
let blk_arg_val = entry_block
.get_arg(context, *arg_idx)
.expect("Entry block args should be mirror of function args.");
let ValueDatum::Argument(block_arg) = context.values[blk_arg_val.0].value else {
panic!("Block argument is not of right Value kind");
};
let new_blk_arg_val = Value::new_argument(
context,
BlockArgument {
ty: ptr_ty,
..block_arg
},
);
// Set both function and block arg to the new one.
entry_block.set_arg(context, new_blk_arg_val);
let (_name, fn_arg_val) = &mut context.functions[function.0].arguments[*arg_idx];
*fn_arg_val = new_blk_arg_val;
(blk_arg_val, new_blk_arg_val)
})
.collect::<Vec<_>>();
// For each of the old args, which have had their types changed, insert a `load` instruction.
let mut replace_map = FxHashMap::default();
let mut new_inserts = Vec::new();
for (old_arg_val, new_arg_val) in old_arg_vals {
let load_from_new_arg =
Value::new_instruction(context, entry_block, InstOp::Load(new_arg_val));
new_inserts.push(load_from_new_arg);
replace_map.insert(old_arg_val, load_from_new_arg);
}
entry_block.prepend_instructions(context, new_inserts);
// Replace all uses of the old arg with the loads.
function.replace_values(context, &replace_map, None);
}
fn demote_caller(
context: &mut Context,
function: &Function,
call_block: Block,
call_val: Value,
arg_idcs: &[(usize, Type)],
) {
// For each argument we update its type by storing the original value to a local variable and
// passing its pointer. We return early above if arg_idcs is empty but reassert it here to be
// sure.
assert!(!arg_idcs.is_empty());
// Grab the original args and copy them.
let Some(Instruction {
op: InstOp::Call(_, args),
..
}) = call_val.get_instruction(context)
else {
unreachable!("`call_val` is definitely a call instruction.");
};
// Create a copy of the args to be updated. And use a new vec of instructions to insert to
// avoid borrowing the block instructions mutably in the loop.
let mut args = args.clone();
let mut new_instrs = Vec::with_capacity(arg_idcs.len() * 2);
let call_function = call_block.get_function(context);
for (arg_idx, arg_ty) in arg_idcs {
// First we make a new local variable.
let loc_var = call_function.new_unique_local_var(
context,
"__tmp_arg".to_owned(),
*arg_ty,
None,
false,
);
let get_loc_val = Value::new_instruction(context, call_block, InstOp::GetLocal(loc_var));
// Before the call we store the original arg value to the new local var.
let store_val = Value::new_instruction(
context,
call_block,
InstOp::Store {
dst_val_ptr: get_loc_val,
stored_val: args[*arg_idx],
},
);
// Use the local var as the new arg.
args[*arg_idx] = get_loc_val;
// Insert the new `get_local` and the `store`.
new_instrs.push(get_loc_val);
new_instrs.push(store_val);
}
// Replace call with the new one with updated args.
let new_call_val = Value::new_instruction(context, call_block, InstOp::Call(*function, args));
call_block
.replace_instruction(context, call_val, new_call_val, false)
.unwrap();
// Insert new_instrs before the call.
let mut inserter = InstructionInserter::new(
context,
call_block,
crate::InsertionPosition::Before(new_call_val),
);
inserter.insert_slice(&new_instrs);
// Replace the old call with the new call.
call_function.replace_value(context, call_val, new_call_val, None);
}
fn demote_block_signature(context: &mut Context, function: &Function, block: Block) -> bool {
let candidate_args = block
.arg_iter(context)
.enumerate()
.filter_map(|(idx, arg_val)| {
arg_val.get_type(context).and_then(|ty| {
super::target_fuel::is_demotable_type(context, &ty).then_some((idx, *arg_val, ty))
})
})
.collect::<Vec<_>>();
if candidate_args.is_empty() {
return false;
}
let mut replace_map = FxHashMap::default();
let mut new_inserts = Vec::new();
// Update the block signature for each candidate arg. Create a replacement load for each one.
for (_arg_idx, arg_val, arg_ty) in &candidate_args {
let ptr_ty = Type::new_typed_pointer(context, *arg_ty);
// Create a new block arg, same as the old one but with a different type.
let ValueDatum::Argument(block_arg) = context.values[arg_val.0].value else {
panic!("Block argument is not of right Value kind");
};
let new_blk_arg_val = Value::new_argument(
context,
BlockArgument {
ty: ptr_ty,
..block_arg
},
);
block.set_arg(context, new_blk_arg_val);
let load_val = Value::new_instruction(context, block, InstOp::Load(new_blk_arg_val));
new_inserts.push(load_val);
replace_map.insert(*arg_val, load_val);
}
block.prepend_instructions(context, new_inserts);
// Replace the arg uses with the loads.
function.replace_values(context, &replace_map, None);
// Find the predecessors to this block and for each one use a temporary and pass its address to
// this block. We create a temporary for each block argument and they can be 'shared' between
// different predecessors since only one at a time can be the actual predecessor.
let arg_vars = candidate_args
.into_iter()
.map(|(idx, arg_val, arg_ty)| {
let local_var = function.new_unique_local_var(
context,
"__tmp_block_arg".to_owned(),
arg_ty,
None,
false,
);
(idx, arg_val, local_var)
})
.collect::<Vec<(usize, Value, crate::LocalVar)>>();
let preds = block.pred_iter(context).copied().collect::<Vec<Block>>();
for pred in preds {
for (arg_idx, _arg_val, arg_var) in &arg_vars {
// Get the value which is being passed to the block at this index.
let arg_val = pred.get_succ_params(context, &block)[*arg_idx];
// Insert a `get_local` and `store` for each candidate argument and insert them at the
// end of this block, before the terminator.
let get_local_val = Value::new_instruction(context, pred, InstOp::GetLocal(*arg_var));
let store_val = Value::new_instruction(
context,
pred,
InstOp::Store {
dst_val_ptr: get_local_val,
stored_val: arg_val,
},
);
let mut inserter = InstructionInserter::new(
context,
pred,
crate::InsertionPosition::At(pred.num_instructions(context) - 1),
);
inserter.insert_slice(&[get_local_val, store_val]);
// Replace the use of the old argument with the `get_local` pointer value.
let term_val = pred
.get_terminator_mut(context)
.expect("A predecessor must have a terminator");
term_val.replace_values(&FxHashMap::from_iter([(arg_val, get_local_val)]));
}
}
true
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-ir/src/optimize/misc_demotion.rs | sway-ir/src/optimize/misc_demotion.rs | use std::ops::Not;
/// Miscellaneous value demotion.
///
/// This pass demotes miscellaneous 'by-value' types to 'by-reference' pointer types, based on
/// target specific parameters.
///
/// Current special cases are:
/// - log arguments: These can be any type and should be demoted to pointers if possible.
/// - Fuel ASM block arguments: These are assumed to be pointers for 'by-reference' values.
/// - Fuel ASM block return values: These are also assumed to be pointers for 'by-reference'
/// values.
/// - Fuel Wide binary operators: Demote binary operands bigger than 64 bits.
use crate::{
asm::AsmArg, AnalysisResults, BinaryOpKind, Constant, ConstantContent, Context,
FuelVmInstruction, Function, InstOp, InstructionInserter, IrError, Pass, PassMutability,
Predicate, ScopedPass, Type, UnaryOpKind, Value,
};
use rustc_hash::FxHashMap;
pub const MISC_DEMOTION_NAME: &str = "misc-demotion";
pub fn create_misc_demotion_pass() -> Pass {
Pass {
name: MISC_DEMOTION_NAME,
descr: "Miscellaneous by-value demotions to by-reference",
deps: Vec::new(),
runner: ScopedPass::FunctionPass(PassMutability::Transform(misc_demotion)),
}
}
pub fn misc_demotion(
context: &mut Context,
_: &AnalysisResults,
function: Function,
) -> Result<bool, IrError> {
let log_res = log_demotion(context, function)?;
let asm_arg_res = asm_block_arg_demotion(context, function)?;
let asm_ret_res = asm_block_ret_demotion(context, function)?;
let addrof_res = ptr_to_int_demotion(context, function)?;
let wide_binary_op_res = wide_binary_op_demotion(context, function)?;
let wide_shifts_op_res = wide_shift_op_demotion(context, function)?;
let wide_cmp_res = wide_cmp_demotion(context, function)?;
let wide_unary_op_res = wide_unary_op_demotion(context, function)?;
Ok(log_res
|| asm_arg_res
|| asm_ret_res
|| addrof_res
|| wide_unary_op_res
|| wide_binary_op_res
|| wide_shifts_op_res
|| wide_cmp_res)
}
fn log_demotion(context: &mut Context, function: Function) -> Result<bool, IrError> {
// Find all log instructions.
let candidates = function
.instruction_iter(context)
.filter_map(|(block, instr_val)| {
instr_val.get_instruction(context).and_then(|instr| {
// Is the instruction a Log?
if let InstOp::FuelVm(FuelVmInstruction::Log {
log_val,
log_ty,
log_id,
log_data,
}) = instr.op
{
super::target_fuel::is_demotable_type(context, &log_ty)
.then_some((block, instr_val, log_val, log_ty, log_id, log_data))
} else {
None
}
})
})
.collect::<Vec<_>>();
if candidates.is_empty() {
return Ok(false);
}
// Take the logged value, store it in a temporary local, and replace it with its pointer in the
// log instruction.
for (block, log_instr_val, logged_val, logged_ty, log_id_val, log_data) in candidates {
// Create a variable for the arg, a get_local for it and a store.
let loc_var =
function.new_unique_local_var(context, "__log_arg".to_owned(), logged_ty, None, false);
let get_loc_val = Value::new_instruction(context, block, InstOp::GetLocal(loc_var));
let store_val = Value::new_instruction(
context,
block,
InstOp::Store {
dst_val_ptr: get_loc_val,
stored_val: logged_val,
},
);
// We need to replace the log instruction because we're changing the type to a pointer.
let ptr_ty = Type::new_typed_pointer(context, logged_ty);
let new_log_instr_val = Value::new_instruction(
context,
block,
InstOp::FuelVm(FuelVmInstruction::Log {
log_val: get_loc_val,
log_ty: ptr_ty,
log_id: log_id_val,
log_data,
}),
);
// NOTE: We don't need to replace the uses of the old log instruction as it doesn't return a
// value. (It's a 'statement' rather than an 'expression'.)
block
.replace_instruction(context, log_instr_val, new_log_instr_val, false)
.unwrap();
// Put these two _before_ it.
let mut inserter = InstructionInserter::new(
context,
block,
crate::InsertionPosition::Before(new_log_instr_val),
);
inserter.insert_slice(&[get_loc_val, store_val]);
}
Ok(true)
}
fn asm_block_arg_demotion(context: &mut Context, function: Function) -> Result<bool, IrError> {
// Gather the ASM blocks with reference type args.
let candidates = function
.instruction_iter(context)
.filter_map(|(block, instr_val)| {
instr_val.get_instruction(context).and_then(|instr| {
// Is the instruction an ASM block?
if let InstOp::AsmBlock(_asm_block, args) = &instr.op {
let ref_args = args
.iter()
.filter_map(
|AsmArg {
name: _,
initializer,
}| {
initializer.and_then(|init_val| {
init_val.get_type(context).and_then(|ty| {
super::target_fuel::is_demotable_type(context, &ty)
.then_some((init_val, ty))
})
})
},
)
.collect::<Vec<_>>();
(!ref_args.is_empty()).then_some((block, instr_val, ref_args))
} else {
None
}
})
})
.collect::<Vec<_>>();
if candidates.is_empty() {
return Ok(false);
}
for (block, asm_block_instr_val, ref_args) in candidates {
let mut replace_map = FxHashMap::default();
let mut temporaries = Vec::new();
for (ref_arg_val, ref_arg_ty) in ref_args {
// Create temporaries for each of the by-reference args.
let loc_var = function.new_unique_local_var(
context,
"__asm_arg".to_owned(),
ref_arg_ty,
None,
false,
);
// Create `get_local`s and `store`s for each one.
let get_loc_val = Value::new_instruction(context, block, InstOp::GetLocal(loc_var));
let store_val = Value::new_instruction(
context,
block,
InstOp::Store {
dst_val_ptr: get_loc_val,
stored_val: ref_arg_val,
},
);
replace_map.insert(ref_arg_val, get_loc_val);
temporaries.push(get_loc_val);
temporaries.push(store_val);
}
// Insert the temporaries into the block.
let mut inserter = InstructionInserter::new(
context,
block,
crate::InsertionPosition::Before(asm_block_instr_val),
);
inserter.insert_slice(&temporaries);
// Replace the args with the `get_local`s in the ASM block.
asm_block_instr_val.replace_instruction_values(context, &replace_map);
}
Ok(true)
}
fn asm_block_ret_demotion(context: &mut Context, function: Function) -> Result<bool, IrError> {
// Gather the ASM blocks which return a reference type.
let candidates = function
.instruction_iter(context)
.filter_map(|(block, instr_val)| {
instr_val.get_instruction(context).and_then(|instr| {
// Is the instruction an ASM block?
if let InstOp::AsmBlock(asm_block, args) = &instr.op {
let ret_ty = asm_block.return_type;
super::target_fuel::is_demotable_type(context, &ret_ty).then_some((
block,
instr_val,
asm_block.clone(),
args.clone(),
ret_ty,
))
} else {
None
}
})
})
.collect::<Vec<_>>();
if candidates.is_empty() {
return Ok(false);
}
let mut replace_map = FxHashMap::default();
for (block, asm_block_instr_val, mut asm_block, asm_args, ret_ty) in candidates {
// Change the ASM block return type to be a pointer.
let ret_ptr_ty = Type::new_typed_pointer(context, ret_ty);
asm_block.return_type = ret_ptr_ty;
let new_asm_block =
Value::new_instruction(context, block, InstOp::AsmBlock(asm_block, asm_args));
// Insert a load after the block.
let load_val = Value::new_instruction(context, block, InstOp::Load(new_asm_block));
block
.replace_instruction(context, asm_block_instr_val, new_asm_block, false)
.unwrap();
let mut inserter = InstructionInserter::new(
context,
block,
crate::InsertionPosition::After(new_asm_block),
);
inserter.insert(load_val);
// Replace uses of the old ASM block with the new load.
replace_map.insert(asm_block_instr_val, load_val);
}
function.replace_values(context, &replace_map, None);
Ok(true)
}
fn ptr_to_int_demotion(context: &mut Context, function: Function) -> Result<bool, IrError> {
// Find all ptr_to_int instructions, which are generated by the __addr_of() intrinsic.
let candidates = function
.instruction_iter(context)
.filter_map(|(block, instr_val)| {
instr_val.get_instruction(context).and_then(|instr| {
// Is the instruction a PtrToInt?
if let InstOp::PtrToInt(ptr_val, _int_ty) = instr.op {
ptr_val.get_type(context).and_then(|ptr_ty| {
super::target_fuel::is_demotable_type(context, &ptr_ty)
.then_some((block, instr_val, ptr_val, ptr_ty))
})
} else {
None
}
})
})
.collect::<Vec<_>>();
if candidates.is_empty() {
return Ok(false);
}
for (block, ptr_to_int_instr_val, ptr_val, ptr_ty) in candidates {
// If the ptr_val is a load from a memory location, we can just refer to that.
if let Some(instr) = ptr_val.get_instruction(context) {
if let Some(loaded_val) = match instr.op {
InstOp::Load(loaded_val) => Some(loaded_val),
_ => None,
} {
ptr_to_int_instr_val.replace_instruction_value(context, ptr_val, loaded_val);
continue;
}
}
// Take the ptr_to_int value, store it in a temporary local, and replace it with its pointer in
// the ptr_to_int instruction.
// Create a variable for the arg, a get_local for it and a store.
let loc_var = function.new_unique_local_var(
context,
"__ptr_to_int_arg".to_owned(),
ptr_ty,
None,
false,
);
let get_loc_val = Value::new_instruction(context, block, InstOp::GetLocal(loc_var));
let store_val = Value::new_instruction(
context,
block,
InstOp::Store {
dst_val_ptr: get_loc_val,
stored_val: ptr_val,
},
);
// Put these two _before_ ptr_to_int_instr_val.
let mut inserter = InstructionInserter::new(
context,
block,
crate::InsertionPosition::Before(ptr_to_int_instr_val),
);
inserter.insert_slice(&[get_loc_val, store_val]);
// Replace the argument to ptr_to_int.
ptr_to_int_instr_val.replace_instruction_value(context, ptr_val, get_loc_val);
}
Ok(true)
}
/// Find all binary operations on types bigger than 64 bits
/// and demote them to fuel specific `wide binary ops`, that
/// work only on pointers
fn wide_binary_op_demotion(context: &mut Context, function: Function) -> Result<bool, IrError> {
// Find all intrinsics on wide operators
let candidates = function
.instruction_iter(context)
.filter_map(|(block, instr_val)| {
use BinaryOpKind as B;
let InstOp::BinaryOp {
op: B::Add | B::Sub | B::Mul | B::Div | B::Mod | B::And | B::Or | B::Xor,
arg1,
arg2,
} = instr_val.get_instruction(context)?.op
else {
return None;
};
let arg1_type = arg1.get_type(context);
let arg2_type = arg2.get_type(context);
match (arg1_type, arg2_type) {
(Some(arg1_type), Some(arg2_type))
if arg1_type.is_uint_of(context, 256) && arg2_type.is_uint_of(context, 256) =>
{
Some((block, instr_val))
}
(Some(arg1_type), Some(arg2_type))
if arg1_type.is_b256(context) && arg2_type.is_b256(context) =>
{
Some((block, instr_val))
}
_ => None,
}
})
.collect::<Vec<_>>();
if candidates.is_empty() {
return Ok(false);
}
// Now create a local for the result
// get ptr to each arg
// and store the result after
for (block, binary_op_instr_val) in candidates {
let InstOp::BinaryOp { op, arg1, arg2 } = binary_op_instr_val
.get_instruction(context)
.cloned()
.unwrap()
.op
else {
continue;
};
let binary_op_metadata = binary_op_instr_val.get_metadata(context);
let arg1_ty = arg1.get_type(context).unwrap();
let arg1_metadata = arg1.get_metadata(context);
let arg2_ty = arg2.get_type(context).unwrap();
let arg2_metadata = arg2.get_metadata(context);
let operand_ty = arg1.get_type(context).unwrap();
let result_local = function.new_unique_local_var(
context,
"__wide_result".to_owned(),
operand_ty,
None,
true,
);
let get_result_local =
Value::new_instruction(context, block, InstOp::GetLocal(result_local))
.add_metadatum(context, binary_op_metadata);
let load_result_local =
Value::new_instruction(context, block, InstOp::Load(get_result_local))
.add_metadatum(context, binary_op_metadata);
// If arg1 is not a pointer, store it to a local
let lhs_store = if !arg1_ty.is_ptr(context) {
let lhs_local = function.new_unique_local_var(
context,
"__wide_lhs".to_owned(),
operand_ty,
None,
false,
);
let get_lhs_local = Value::new_instruction(context, block, InstOp::GetLocal(lhs_local))
.add_metadatum(context, arg1_metadata);
let store_lhs_local = Value::new_instruction(
context,
block,
InstOp::Store {
dst_val_ptr: get_lhs_local,
stored_val: arg1,
},
)
.add_metadatum(context, arg1_metadata);
Some((get_lhs_local, store_lhs_local))
} else {
None
};
let (arg1_needs_insert, get_arg1) = if let Some((lhs_local, _)) = &lhs_store {
(false, *lhs_local)
} else {
(true, arg1)
};
// If arg2 is not a pointer, store it to a local
let rhs_store = if !arg2_ty.is_ptr(context) {
let rhs_local = function.new_unique_local_var(
context,
"__wide_rhs".to_owned(),
operand_ty,
None,
false,
);
let get_rhs_local = Value::new_instruction(context, block, InstOp::GetLocal(rhs_local))
.add_metadatum(context, arg2_metadata);
let store_lhs_local = Value::new_instruction(
context,
block,
InstOp::Store {
dst_val_ptr: get_rhs_local,
stored_val: arg2,
},
)
.add_metadatum(context, arg2_metadata);
Some((get_rhs_local, store_lhs_local))
} else {
None
};
let (arg2_needs_insert, get_arg2) = if let Some((rhs_local, _)) = &rhs_store {
(false, *rhs_local)
} else {
(true, arg2)
};
// For MOD we need a local zero as RHS of the add operation
let (wide_op, get_local_zero) = match op {
BinaryOpKind::Mod => {
let initializer = ConstantContent::new_uint(context, 256, 0);
let initializer = Constant::unique(context, initializer);
let local_zero = function.new_unique_local_var(
context,
"__wide_zero".to_owned(),
operand_ty,
Some(initializer),
true,
);
let get_local_zero =
Value::new_instruction(context, block, InstOp::GetLocal(local_zero))
.add_metadatum(context, binary_op_metadata);
(
Value::new_instruction(
context,
block,
InstOp::FuelVm(FuelVmInstruction::WideModularOp {
op,
result: get_result_local,
arg1: get_arg1,
arg2: get_local_zero,
arg3: get_arg2,
}),
)
.add_metadatum(context, binary_op_metadata),
Some(get_local_zero),
)
}
_ => (
Value::new_instruction(
context,
block,
InstOp::FuelVm(FuelVmInstruction::WideBinaryOp {
op,
arg1: get_arg1,
arg2: get_arg2,
result: get_result_local,
}),
)
.add_metadatum(context, binary_op_metadata),
None,
),
};
// Assert all operands are pointers
assert!(get_arg1.get_type(context).unwrap().is_ptr(context));
assert!(get_arg2.get_type(context).unwrap().is_ptr(context));
assert!(get_result_local.get_type(context).unwrap().is_ptr(context));
if let Some(get_local_zero) = &get_local_zero {
assert!(get_local_zero.get_type(context).unwrap().is_ptr(context));
}
block
.replace_instruction(context, binary_op_instr_val, load_result_local, true)
.unwrap();
let mut additional_instrs = Vec::new();
// lhs
if let Some((get_lhs_local, store_lhs_local)) = lhs_store {
additional_instrs.push(get_lhs_local);
additional_instrs.push(store_lhs_local);
}
// Only for MOD
if let Some(get_local_zero) = get_local_zero {
additional_instrs.push(get_local_zero);
}
//rhs
if let Some((get_rhs_local, store_rhs_local)) = rhs_store {
additional_instrs.push(get_rhs_local);
additional_instrs.push(store_rhs_local);
}
if arg1_needs_insert {
additional_instrs.push(get_arg1);
}
if arg2_needs_insert {
additional_instrs.push(get_arg2);
}
additional_instrs.push(get_result_local);
additional_instrs.push(wide_op);
let mut inserter = InstructionInserter::new(
context,
block,
crate::InsertionPosition::Before(load_result_local),
);
inserter.insert_slice(&additional_instrs);
}
Ok(true)
}
/// Find all cmp operations on types bigger than 64 bits
/// and demote them to fuel specific `wide cmp ops`, that
/// work only on pointers
fn wide_cmp_demotion(context: &mut Context, function: Function) -> Result<bool, IrError> {
// Find all cmp on wide operators
let candidates = function
.instruction_iter(context)
.filter_map(|(block, instr_val)| {
let InstOp::Cmp(
Predicate::Equal | Predicate::LessThan | Predicate::GreaterThan,
arg1,
arg2,
) = instr_val.get_instruction(context)?.op
else {
return None;
};
let arg1_type = arg1.get_type(context);
let arg2_type = arg2.get_type(context);
match (arg1_type, arg2_type) {
(Some(arg1_type), Some(arg2_type))
if arg1_type.is_uint_of(context, 256) && arg2_type.is_uint_of(context, 256) =>
{
Some((block, instr_val))
}
(Some(arg1_type), Some(arg2_type))
if arg1_type.is_b256(context) && arg2_type.is_b256(context) =>
{
Some((block, instr_val))
}
_ => None,
}
})
.collect::<Vec<_>>();
if candidates.is_empty() {
return Ok(false);
}
// Get ptr to each arg
for (block, cmp_instr_val) in candidates {
let InstOp::Cmp(op, arg1, arg2) =
cmp_instr_val.get_instruction(context).cloned().unwrap().op
else {
continue;
};
let cmp_op_metadata = cmp_instr_val.get_metadata(context);
let arg1_ty = arg1.get_type(context).unwrap();
let arg1_metadata = arg1.get_metadata(context);
let arg2_ty = arg2.get_type(context).unwrap();
let arg2_metadata = arg2.get_metadata(context);
// If arg1 is not a pointer, store it to a local
let lhs_store = arg1_ty.is_ptr(context).not().then(|| {
let lhs_local = function.new_unique_local_var(
context,
"__wide_lhs".to_owned(),
arg1_ty,
None,
false,
);
let get_lhs_local = Value::new_instruction(context, block, InstOp::GetLocal(lhs_local))
.add_metadatum(context, arg1_metadata);
let store_lhs_local = Value::new_instruction(
context,
block,
InstOp::Store {
dst_val_ptr: get_lhs_local,
stored_val: arg1,
},
)
.add_metadatum(context, arg1_metadata);
(get_lhs_local, store_lhs_local)
});
let (arg1_needs_insert, get_arg1) = if let Some((lhs_local, _)) = &lhs_store {
(false, *lhs_local)
} else {
(true, arg1)
};
// If arg2 is not a pointer, store it to a local
let rhs_store = arg2_ty.is_ptr(context).not().then(|| {
let rhs_local = function.new_unique_local_var(
context,
"__wide_rhs".to_owned(),
arg1_ty,
None,
false,
);
let get_rhs_local = Value::new_instruction(context, block, InstOp::GetLocal(rhs_local))
.add_metadatum(context, arg2_metadata);
let store_lhs_local = Value::new_instruction(
context,
block,
InstOp::Store {
dst_val_ptr: get_rhs_local,
stored_val: arg2,
},
)
.add_metadatum(context, arg2_metadata);
(get_rhs_local, store_lhs_local)
});
let (arg2_needs_insert, get_arg2) = if let Some((rhs_local, _)) = &rhs_store {
(false, *rhs_local)
} else {
(true, arg2)
};
// Assert all operands are pointers
assert!(get_arg1.get_type(context).unwrap().is_ptr(context));
assert!(get_arg2.get_type(context).unwrap().is_ptr(context));
let wide_op = Value::new_instruction(
context,
block,
InstOp::FuelVm(FuelVmInstruction::WideCmpOp {
op,
arg1: get_arg1,
arg2: get_arg2,
}),
)
.add_metadatum(context, cmp_op_metadata);
block
.replace_instruction(context, cmp_instr_val, wide_op, true)
.unwrap();
let mut additional_instrs = Vec::new();
// lhs
if let Some((get_lhs_local, store_lhs_local)) = lhs_store {
additional_instrs.push(get_lhs_local);
additional_instrs.push(store_lhs_local);
}
//rhs
if let Some((get_rhs_local, store_rhs_local)) = rhs_store {
additional_instrs.push(get_rhs_local);
additional_instrs.push(store_rhs_local);
}
if arg1_needs_insert {
additional_instrs.push(get_arg1);
}
if arg2_needs_insert {
additional_instrs.push(get_arg2);
}
let mut inserter =
InstructionInserter::new(context, block, crate::InsertionPosition::Before(wide_op));
inserter.insert_slice(&additional_instrs);
}
Ok(true)
}
/// Find all unary operations on types bigger than 64 bits
/// and demote them to fuel specific `wide ops`, that
/// work only on pointers
fn wide_unary_op_demotion(context: &mut Context, function: Function) -> Result<bool, IrError> {
// Find all intrinsics on wide operators
let candidates = function
.instruction_iter(context)
.filter_map(|(block, instr_val)| {
let InstOp::UnaryOp {
op: UnaryOpKind::Not,
arg,
} = instr_val.get_instruction(context)?.op
else {
return None;
};
match arg.get_type(context) {
Some(t) if t.is_uint_of(context, 256) || t.is_b256(context) => {
Some((block, instr_val))
}
_ => None,
}
})
.collect::<Vec<_>>();
if candidates.is_empty() {
return Ok(false);
}
// Now create a local for the result
// get ptr to each arg
// and store the result after
for (block, binary_op_instr_val) in candidates {
let InstOp::UnaryOp { arg, .. } = binary_op_instr_val
.get_instruction(context)
.cloned()
.unwrap()
.op
else {
continue;
};
let unary_op_metadata = binary_op_instr_val.get_metadata(context);
let arg_ty = arg.get_type(context).unwrap();
let arg_metadata = arg.get_metadata(context);
let result_local =
function.new_unique_local_var(context, "__wide_result".to_owned(), arg_ty, None, true);
let get_result_local =
Value::new_instruction(context, block, InstOp::GetLocal(result_local))
.add_metadatum(context, unary_op_metadata);
let load_result_local =
Value::new_instruction(context, block, InstOp::Load(get_result_local))
.add_metadatum(context, unary_op_metadata);
// If arg1 is not a pointer, store it to a local
let lhs_store = arg_ty.is_ptr(context).not().then(|| {
let lhs_local = function.new_unique_local_var(
context,
"__wide_lhs".to_owned(),
arg_ty,
None,
false,
);
let get_lhs_local = Value::new_instruction(context, block, InstOp::GetLocal(lhs_local))
.add_metadatum(context, arg_metadata);
let store_lhs_local = Value::new_instruction(
context,
block,
InstOp::Store {
dst_val_ptr: get_lhs_local,
stored_val: arg,
},
)
.add_metadatum(context, arg_metadata);
(get_lhs_local, store_lhs_local)
});
let (arg1_needs_insert, get_arg) = if let Some((lhs_local, _)) = &lhs_store {
(false, *lhs_local)
} else {
(true, arg)
};
// Assert all operands are pointers
assert!(get_arg.get_type(context).unwrap().is_ptr(context));
assert!(get_result_local.get_type(context).unwrap().is_ptr(context));
let wide_op = Value::new_instruction(
context,
block,
InstOp::FuelVm(FuelVmInstruction::WideUnaryOp {
op: UnaryOpKind::Not,
arg: get_arg,
result: get_result_local,
}),
)
.add_metadatum(context, unary_op_metadata);
block
.replace_instruction(context, binary_op_instr_val, load_result_local, true)
.unwrap();
let mut additional_instrs = Vec::new();
// lhs
if let Some((get_lhs_local, store_lhs_local)) = lhs_store {
additional_instrs.push(get_lhs_local);
additional_instrs.push(store_lhs_local);
}
if arg1_needs_insert {
additional_instrs.push(get_arg);
}
additional_instrs.push(get_result_local);
additional_instrs.push(wide_op);
let mut inserter = InstructionInserter::new(
context,
block,
crate::InsertionPosition::Before(load_result_local),
);
inserter.insert_slice(&additional_instrs);
}
Ok(true)
}
/// Find all shift operations on types bigger than 64 bits
/// and demote them to fuel specific `wide binary ops`, that
/// work only on pointers
fn wide_shift_op_demotion(context: &mut Context, function: Function) -> Result<bool, IrError> {
// Find all intrinsics on wide operators
let candidates = function
.instruction_iter(context)
.filter_map(|(block, instr_val)| {
let instr = instr_val.get_instruction(context)?;
let InstOp::BinaryOp {
op: BinaryOpKind::Lsh | BinaryOpKind::Rsh,
arg1,
arg2,
} = instr.op
else {
return None;
};
let arg1_type = arg1.get_type(context);
let arg2_type = arg2.get_type(context);
match (arg1_type, arg2_type) {
(Some(arg1_type), Some(arg2_type))
if arg1_type.is_uint_of(context, 256) && arg2_type.is_uint64(context) =>
{
Some((block, instr_val))
}
(Some(arg1_type), Some(arg2_type))
if arg1_type.is_b256(context) && arg2_type.is_uint64(context) =>
{
Some((block, instr_val))
}
_ => None,
}
})
.collect::<Vec<_>>();
if candidates.is_empty() {
return Ok(false);
}
// Now create a local for the result
// get ptr to each arg
// and store the result after
for (block, binary_op_instr_val) in candidates {
let InstOp::BinaryOp { op, arg1, arg2 } = binary_op_instr_val
.get_instruction(context)
.cloned()
.unwrap()
.op
else {
continue;
};
let binary_op_metadata = binary_op_instr_val.get_metadata(context);
let arg1_ty = arg1.get_type(context).unwrap();
let arg1_metadata = arg1.get_metadata(context);
let arg2_ty = arg2.get_type(context).unwrap();
let operand_ty = arg1.get_type(context).unwrap();
let result_local = function.new_unique_local_var(
context,
"__wide_result".to_owned(),
operand_ty,
None,
true,
);
let get_result_local =
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | true |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-ir/src/optimize/inline.rs | sway-ir/src/optimize/inline.rs | //! Function inlining.
//!
//! Function inlining is pretty hairy so these passes must be maintained with care.
use std::{cell::RefCell, collections::HashMap};
use rustc_hash::FxHashMap;
use crate::{
asm::AsmArg,
block::Block,
call_graph, compute_post_order,
context::Context,
error::IrError,
function::Function,
instruction::{FuelVmInstruction, InstOp},
irtype::Type,
metadata::{combine, MetadataIndex},
value::{Value, ValueContent, ValueDatum},
variable::LocalVar,
AnalysisResults, BlockArgument, Instruction, Module, Pass, PassMutability, ScopedPass,
};
pub const FN_INLINE_NAME: &str = "inline";
pub fn create_fn_inline_pass() -> Pass {
Pass {
name: FN_INLINE_NAME,
descr: "Function inlining",
deps: vec![],
runner: ScopedPass::ModulePass(PassMutability::Transform(fn_inline)),
}
}
/// This is a copy of sway_core::inline::Inline.
/// TODO: Reuse: Depend on sway_core? Move it to sway_types?
#[derive(Debug)]
pub enum Inline {
Always,
Never,
}
pub fn metadata_to_inline(context: &Context, md_idx: Option<MetadataIndex>) -> Option<Inline> {
fn for_each_md_idx<T, F: FnMut(MetadataIndex) -> Option<T>>(
context: &Context,
md_idx: Option<MetadataIndex>,
mut f: F,
) -> Option<T> {
// If md_idx is not None and is a list then try them all.
md_idx.and_then(|md_idx| {
if let Some(md_idcs) = md_idx.get_content(context).unwrap_list() {
md_idcs.iter().find_map(|md_idx| f(*md_idx))
} else {
f(md_idx)
}
})
}
for_each_md_idx(context, md_idx, |md_idx| {
// Create a new inline and save it in the cache.
md_idx
.get_content(context)
.unwrap_struct("inline", 1)
.and_then(|fields| fields[0].unwrap_string())
.and_then(|inline_str| {
let inline = match inline_str {
"always" => Some(Inline::Always),
"never" => Some(Inline::Never),
_otherwise => None,
}?;
Some(inline)
})
})
}
pub fn fn_inline(
context: &mut Context,
_: &AnalysisResults,
module: Module,
) -> Result<bool, IrError> {
// Inspect ALL calls and count how often each function is called.
let call_counts: HashMap<Function, u64> =
module
.function_iter(context)
.fold(HashMap::new(), |mut counts, func| {
for (_block, ins) in func.instruction_iter(context) {
if let Some(Instruction {
op: InstOp::Call(callee, _args),
..
}) = ins.get_instruction(context)
{
counts
.entry(*callee)
.and_modify(|count| *count += 1)
.or_insert(1);
}
}
counts
});
let inline_heuristic = |ctx: &Context, func: &Function, _call_site: &Value| {
// The encoding code in the `__entry` functions contains pointer patterns that mark
// escape analysis and referred symbols as incomplete. This effectively forbids optimizations
// like SROA nad DCE. If we inline original entries, like e.g., `main`, the code in them will
// also not be optimized. Therefore, we forbid inlining of original entries into `__entry`.
if func.is_original_entry(ctx) {
return false;
}
let attributed_inline = metadata_to_inline(ctx, func.get_metadata(ctx));
match attributed_inline {
Some(Inline::Always) => {
// TODO: check if inlining of function is possible
// return true;
}
Some(Inline::Never) => {
return false;
}
None => {}
}
// If the function is called only once then definitely inline it.
if call_counts.get(func).copied().unwrap_or(0) == 1 {
return true;
}
// If the function is (still) small then also inline it.
const MAX_INLINE_INSTRS_COUNT: usize = 12;
if func.num_instructions_incl_asm_instructions(ctx) <= MAX_INLINE_INSTRS_COUNT {
return true;
}
false
};
let cg =
call_graph::build_call_graph(context, &module.function_iter(context).collect::<Vec<_>>());
let functions = call_graph::callee_first_order(&cg);
let mut modified = false;
for function in functions {
modified |= inline_some_function_calls(context, &function, inline_heuristic)?;
}
Ok(modified)
}
/// Inline all calls made from a specific function, effectively removing all `Call` instructions.
///
/// e.g., If this is applied to main() then all calls in the program are removed. This is
/// obviously dangerous for recursive functions, in which case this pass would inline forever.
pub fn inline_all_function_calls(
context: &mut Context,
function: &Function,
) -> Result<bool, IrError> {
inline_some_function_calls(context, function, |_, _, _| true)
}
/// Inline function calls based on a provided heuristic predicate.
///
/// There are many things to consider when deciding to inline a function. For example:
/// - The size of the function, especially if smaller than the call overhead size.
/// - The stack frame size of the function.
/// - The number of calls made to the function or if the function is called inside a loop.
/// - A particular call has constant arguments implying further constant folding.
/// - An attribute request, e.g., #[always_inline], #[never_inline].
pub fn inline_some_function_calls<F: Fn(&Context, &Function, &Value) -> bool>(
context: &mut Context,
function: &Function,
predicate: F,
) -> Result<bool, IrError> {
// Find call sites which passes the predicate.
// We use a RefCell so that the inliner can modify the value
// when it moves other instructions (which could be in call_date) after an inline.
let (call_sites, call_data): (Vec<_>, FxHashMap<_, _>) = function
.instruction_iter(context)
.filter_map(|(block, call_val)| match context.values[call_val.0].value {
ValueDatum::Instruction(Instruction {
op: InstOp::Call(inlined_function, _),
..
}) => predicate(context, &inlined_function, &call_val).then_some((
call_val,
(call_val, RefCell::new((block, inlined_function))),
)),
_ => None,
})
.unzip();
for call_site in &call_sites {
let call_site_in = call_data.get(call_site).unwrap();
let (block, inlined_function) = *call_site_in.borrow();
if function == &inlined_function {
// We can't inline a function into itself.
continue;
}
inline_function_call(
context,
*function,
block,
*call_site,
inlined_function,
&call_data,
)?;
}
Ok(!call_data.is_empty())
}
/// A utility to get a predicate which can be passed to inline_some_function_calls() based on
/// certain sizes of the function. If a constraint is None then any size is assumed to be
/// acceptable.
///
/// The max_stack_size is a bit tricky, as the IR doesn't really know (or care) about the size of
/// types. See the source code for how it works.
pub fn is_small_fn(
max_blocks: Option<usize>,
max_instrs: Option<usize>,
max_stack_size: Option<usize>,
) -> impl Fn(&Context, &Function, &Value) -> bool {
fn count_type_elements(context: &Context, ty: &Type) -> usize {
// This is meant to just be a heuristic rather than be super accurate.
if ty.is_array(context) {
count_type_elements(context, &ty.get_array_elem_type(context).unwrap())
* ty.get_array_len(context).unwrap() as usize
} else if ty.is_union(context) {
ty.get_field_types(context)
.iter()
.map(|ty| count_type_elements(context, ty))
.max()
.unwrap_or(1)
} else if ty.is_struct(context) {
ty.get_field_types(context)
.iter()
.map(|ty| count_type_elements(context, ty))
.sum()
} else {
1
}
}
move |context: &Context, function: &Function, _call_site: &Value| -> bool {
max_blocks.is_none_or(|max_block_count| function.num_blocks(context) <= max_block_count)
&& max_instrs.is_none_or(|max_instrs_count| {
function.num_instructions_incl_asm_instructions(context) <= max_instrs_count
})
&& max_stack_size.is_none_or(|max_stack_size_count| {
function
.locals_iter(context)
.map(|(_name, ptr)| count_type_elements(context, &ptr.get_inner_type(context)))
.sum::<usize>()
<= max_stack_size_count
})
}
}
/// Inline a function to a specific call site within another function.
///
/// The destination function, block and call site must be specified along with the function to
/// inline.
pub fn inline_function_call(
context: &mut Context,
function: Function,
block: Block,
call_site: Value,
inlined_function: Function,
call_data: &FxHashMap<Value, RefCell<(Block, Function)>>,
) -> Result<(), IrError> {
// Split the block at right after the call site.
let call_site_idx = block
.instruction_iter(context)
.position(|v| v == call_site)
.unwrap();
let (pre_block, post_block) = block.split_at(context, call_site_idx + 1);
if post_block != block {
// We need to update call_data for every call_site that was in block.
for inst in post_block.instruction_iter(context).filter(|inst| {
matches!(
context.values[inst.0].value,
ValueDatum::Instruction(Instruction {
op: InstOp::Call(..),
..
})
)
}) {
if let Some(call_info) = call_data.get(&inst) {
call_info.borrow_mut().0 = post_block;
}
}
}
// Remove the call from the pre_block instructions. It's still in the context.values[] though.
pre_block.remove_last_instruction(context);
// Returned values, if any, go to `post_block`, so a block arg there.
// We don't expect `post_block` to already have any block args.
if post_block.new_arg(context, call_site.get_type(context).unwrap()) != 0 {
panic!("Expected newly created post_block to not have block args")
}
function.replace_value(
context,
call_site,
post_block.get_arg(context, 0).unwrap(),
None,
);
// Take the locals from the inlined function and add them to this function. `value_map` is a
// map from the original local ptrs to the new ptrs.
let ptr_map = function.merge_locals_from(context, inlined_function);
let mut value_map = HashMap::new();
// Add the mapping from argument values in the inlined function to the args passed to the call.
if let ValueDatum::Instruction(Instruction {
op: InstOp::Call(_, passed_vals),
..
}) = &context.values[call_site.0].value
{
for (arg_val, passed_val) in context.functions[inlined_function.0]
.arguments
.iter()
.zip(passed_vals.iter())
{
value_map.insert(arg_val.1, *passed_val);
}
}
// Get the metadata attached to the function call which may need to be propagated to the
// inlined instructions.
let metadata = context.values[call_site.0].metadata;
// Now remove the call altogether.
context.values.remove(call_site.0);
// Insert empty blocks from the inlined function between our split blocks, and create a mapping
// from old blocks to new. We need this when inlining branch instructions, so they branch to
// the new blocks.
//
// We map the entry block in the inlined function (which we know must exist) to our `pre_block`
// from the split above. We'll start appending inlined instructions to that block rather than
// a new one (with a redundant branch to it from the `pre_block`).
let inlined_fn_name = inlined_function.get_name(context).to_owned();
let mut block_map = HashMap::new();
let mut block_iter = context.functions[inlined_function.0]
.blocks
.clone()
.into_iter();
block_map.insert(block_iter.next().unwrap(), pre_block);
block_map = block_iter.fold(block_map, |mut block_map, inlined_block| {
let inlined_block_label = inlined_block.get_label(context);
let new_block = function
.create_block_before(
context,
&post_block,
Some(format!("{inlined_fn_name}_{inlined_block_label}")),
)
.unwrap();
block_map.insert(inlined_block, new_block);
// We collect so that context can be mutably borrowed later.
let inlined_args: Vec<_> = inlined_block.arg_iter(context).copied().collect();
for inlined_arg in inlined_args {
if let ValueDatum::Argument(BlockArgument {
block: _,
idx: _,
ty,
is_immutable: _,
}) = &context.values[inlined_arg.0].value
{
let index = new_block.new_arg(context, *ty);
value_map.insert(inlined_arg, new_block.get_arg(context, index).unwrap());
} else {
unreachable!("Expected a block argument")
}
}
block_map
});
// Use a reverse-post-order traversal to ensure that definitions are seen before uses.
let inlined_block_iter = compute_post_order(context, &inlined_function)
.po_to_block
.into_iter()
.rev();
// We now have a mapping from old blocks to new (currently empty) blocks, and a mapping from
// old values (locals and args at this stage) to new values. We can copy instructions over,
// translating their blocks and values to refer to the new ones. The value map is still live
// as we add new instructions which replace the old ones to it too.
for ref block in inlined_block_iter {
for ins in block.instruction_iter(context) {
inline_instruction(
context,
block_map.get(block).unwrap(),
&post_block,
&ins,
&block_map,
&mut value_map,
&ptr_map,
metadata,
);
}
}
Ok(())
}
#[allow(clippy::too_many_arguments)]
fn inline_instruction(
context: &mut Context,
new_block: &Block,
post_block: &Block,
instruction: &Value,
block_map: &HashMap<Block, Block>,
value_map: &mut HashMap<Value, Value>,
local_map: &HashMap<LocalVar, LocalVar>,
fn_metadata: Option<MetadataIndex>,
) {
// Util to translate old blocks to new. If an old block isn't in the map then we panic, since
// it should be guaranteed to be there...that's a bug otherwise.
let map_block = |old_block| *block_map.get(&old_block).unwrap();
// Util to translate old values to new. If an old value isn't in the map then it (should be)
// a const, which we can just keep using.
let map_value = |old_val: Value| value_map.get(&old_val).copied().unwrap_or(old_val);
let map_local = |old_local| local_map.get(&old_local).copied().unwrap();
// The instruction needs to be cloned into the new block, with each value and/or block
// translated using the above maps. Most of these are relatively cheap as Instructions
// generally are lightweight, except maybe ASM blocks, but we're able to re-use the block
// content since it's a black box and not concerned with Values, Blocks or Pointers.
//
// We need to clone the instruction here, which is unfortunate. Maybe in the future we
// restructure instructions somehow, so we don't need a persistent `&Context` to access them.
if let ValueContent {
value: ValueDatum::Instruction(old_ins),
metadata: val_metadata,
} = context.values[instruction.0].clone()
{
// Combine the function metadata with this instruction metadata so we don't lose the
// function metadata after inlining.
let metadata = combine(context, &fn_metadata, &val_metadata);
let new_ins = match old_ins.op {
InstOp::AsmBlock(asm, args) => {
let new_args = args
.iter()
.map(|AsmArg { name, initializer }| AsmArg {
name: name.clone(),
initializer: initializer.map(map_value),
})
.collect();
// We can re-use the old asm block with the updated args.
new_block.append(context).asm_block_from_asm(asm, new_args)
}
InstOp::BitCast(value, ty) => new_block.append(context).bitcast(map_value(value), ty),
InstOp::UnaryOp { op, arg } => new_block.append(context).unary_op(op, map_value(arg)),
InstOp::BinaryOp { op, arg1, arg2 } => {
new_block
.append(context)
.binary_op(op, map_value(arg1), map_value(arg2))
}
// For `br` and `cbr` below we don't need to worry about the phi values, they're
// adjusted later in `inline_function_call()`.
InstOp::Branch(b) => new_block.append(context).branch(
map_block(b.block),
b.args.iter().map(|v| map_value(*v)).collect(),
),
InstOp::Call(f, args) => new_block.append(context).call(
f,
args.iter()
.map(|old_val: &Value| map_value(*old_val))
.collect::<Vec<Value>>()
.as_slice(),
),
InstOp::CastPtr(val, ty) => new_block.append(context).cast_ptr(map_value(val), ty),
InstOp::Cmp(pred, lhs_value, rhs_value) => {
new_block
.append(context)
.cmp(pred, map_value(lhs_value), map_value(rhs_value))
}
InstOp::ConditionalBranch {
cond_value,
true_block,
false_block,
} => new_block.append(context).conditional_branch(
map_value(cond_value),
map_block(true_block.block),
map_block(false_block.block),
true_block.args.iter().map(|v| map_value(*v)).collect(),
false_block.args.iter().map(|v| map_value(*v)).collect(),
),
InstOp::ContractCall {
return_type,
name,
params,
coins,
asset_id,
gas,
} => new_block.append(context).contract_call(
return_type,
name,
map_value(params),
map_value(coins),
map_value(asset_id),
map_value(gas),
),
InstOp::FuelVm(fuel_vm_instr) => match fuel_vm_instr {
FuelVmInstruction::Gtf { index, tx_field_id } => {
new_block.append(context).gtf(map_value(index), tx_field_id)
}
FuelVmInstruction::Log {
log_val,
log_ty,
log_id,
log_data,
} => new_block.append(context).log(
map_value(log_val),
log_ty,
map_value(log_id),
log_data,
),
FuelVmInstruction::ReadRegister(reg) => {
new_block.append(context).read_register(reg)
}
FuelVmInstruction::Revert(val) => new_block.append(context).revert(map_value(val)),
FuelVmInstruction::JmpMem => new_block.append(context).jmp_mem(),
FuelVmInstruction::Smo {
recipient,
message,
message_size,
coins,
} => new_block.append(context).smo(
map_value(recipient),
map_value(message),
map_value(message_size),
map_value(coins),
),
FuelVmInstruction::StateClear {
key,
number_of_slots,
} => new_block
.append(context)
.state_clear(map_value(key), map_value(number_of_slots)),
FuelVmInstruction::StateLoadQuadWord {
load_val,
key,
number_of_slots,
} => new_block.append(context).state_load_quad_word(
map_value(load_val),
map_value(key),
map_value(number_of_slots),
),
FuelVmInstruction::StateLoadWord(key) => {
new_block.append(context).state_load_word(map_value(key))
}
FuelVmInstruction::StateStoreQuadWord {
stored_val,
key,
number_of_slots,
} => new_block.append(context).state_store_quad_word(
map_value(stored_val),
map_value(key),
map_value(number_of_slots),
),
FuelVmInstruction::StateStoreWord { stored_val, key } => new_block
.append(context)
.state_store_word(map_value(stored_val), map_value(key)),
FuelVmInstruction::WideUnaryOp { op, arg, result } => new_block
.append(context)
.wide_unary_op(op, map_value(arg), map_value(result)),
FuelVmInstruction::WideBinaryOp {
op,
arg1,
arg2,
result,
} => new_block.append(context).wide_binary_op(
op,
map_value(arg1),
map_value(arg2),
map_value(result),
),
FuelVmInstruction::WideModularOp {
op,
result,
arg1,
arg2,
arg3,
} => new_block.append(context).wide_modular_op(
op,
map_value(result),
map_value(arg1),
map_value(arg2),
map_value(arg3),
),
FuelVmInstruction::WideCmpOp { op, arg1, arg2 } => new_block
.append(context)
.wide_cmp_op(op, map_value(arg1), map_value(arg2)),
FuelVmInstruction::Retd { ptr, len } => new_block
.append(context)
.retd(map_value(ptr), map_value(len)),
},
InstOp::GetElemPtr {
base,
elem_ptr_ty,
indices,
} => {
let elem_ty = elem_ptr_ty.get_pointee_type(context).unwrap();
new_block.append(context).get_elem_ptr(
map_value(base),
elem_ty,
indices.iter().map(|idx| map_value(*idx)).collect(),
)
}
InstOp::GetLocal(local_var) => {
new_block.append(context).get_local(map_local(local_var))
}
InstOp::GetGlobal(global_var) => new_block.append(context).get_global(global_var),
InstOp::GetStorageKey(storage_key) => {
new_block.append(context).get_storage_key(storage_key)
}
InstOp::GetConfig(module, name) => new_block.append(context).get_config(module, name),
InstOp::Alloc { ty, count } => new_block.append(context).alloc(ty, map_value(count)),
InstOp::IntToPtr(value, ty) => {
new_block.append(context).int_to_ptr(map_value(value), ty)
}
InstOp::Load(src_val) => new_block.append(context).load(map_value(src_val)),
InstOp::MemCopyBytes {
dst_val_ptr,
src_val_ptr,
byte_len,
} => new_block.append(context).mem_copy_bytes(
map_value(dst_val_ptr),
map_value(src_val_ptr),
byte_len,
),
InstOp::MemCopyVal {
dst_val_ptr,
src_val_ptr,
} => new_block
.append(context)
.mem_copy_val(map_value(dst_val_ptr), map_value(src_val_ptr)),
InstOp::MemClearVal { dst_val_ptr } => new_block
.append(context)
.mem_clear_val(map_value(dst_val_ptr)),
InstOp::Nop => new_block.append(context).nop(),
InstOp::PtrToInt(value, ty) => {
new_block.append(context).ptr_to_int(map_value(value), ty)
}
// We convert `ret` to `br post_block` and add the returned value as a phi value.
InstOp::Ret(val, _) => new_block
.append(context)
.branch(*post_block, vec![map_value(val)]),
InstOp::Store {
dst_val_ptr,
stored_val,
} => new_block
.append(context)
.store(map_value(dst_val_ptr), map_value(stored_val)),
}
.add_metadatum(context, metadata);
value_map.insert(*instruction, new_ins);
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-ir/src/optimize/memcpyopt.rs | sway-ir/src/optimize/memcpyopt.rs | //! Optimisations related to mem_copy.
//! - replace a `store` directly from a `load` with a `mem_copy_val`.
use indexmap::IndexMap;
use itertools::{Either, Itertools};
use rustc_hash::{FxHashMap, FxHashSet};
use sway_types::{FxIndexMap, FxIndexSet};
use crate::{
get_gep_symbol, get_loaded_symbols, get_referred_symbol, get_referred_symbols,
get_stored_symbols, memory_utils, AnalysisResults, Block, Context, EscapedSymbols,
FuelVmInstruction, Function, InstOp, Instruction, InstructionInserter, IrError, LocalVar, Pass,
PassMutability, ReferredSymbols, ScopedPass, Symbol, Type, Value, ValueDatum,
ESCAPED_SYMBOLS_NAME,
};
pub const MEMCPYOPT_NAME: &str = "memcpyopt";
pub fn create_memcpyopt_pass() -> Pass {
Pass {
name: MEMCPYOPT_NAME,
descr: "Optimizations related to MemCopy instructions",
deps: vec![ESCAPED_SYMBOLS_NAME],
runner: ScopedPass::FunctionPass(PassMutability::Transform(mem_copy_opt)),
}
}
pub fn mem_copy_opt(
context: &mut Context,
analyses: &AnalysisResults,
function: Function,
) -> Result<bool, IrError> {
let mut modified = false;
modified |= local_copy_prop_prememcpy(context, analyses, function)?;
modified |= load_store_to_memcopy(context, function)?;
modified |= local_copy_prop(context, analyses, function)?;
Ok(modified)
}
fn local_copy_prop_prememcpy(
context: &mut Context,
analyses: &AnalysisResults,
function: Function,
) -> Result<bool, IrError> {
struct InstInfo {
// The block containing the instruction.
block: Block,
// Relative (use only for comparison) position of instruction in `block`.
pos: usize,
}
// If the analysis result is incomplete we cannot do any safe optimizations here.
// Calculating the candidates below relies on complete result of an escape analysis.
let escaped_symbols = match analyses.get_analysis_result(function) {
EscapedSymbols::Complete(syms) => syms,
EscapedSymbols::Incomplete(_) => return Ok(false),
};
// All instructions that load from the `Symbol`.
let mut loads_map = FxHashMap::<Symbol, Vec<Value>>::default();
// All instructions that store to the `Symbol`.
let mut stores_map = FxHashMap::<Symbol, Vec<Value>>::default();
// All load and store instructions.
let mut instr_info_map = FxHashMap::<Value, InstInfo>::default();
for (pos, (block, inst)) in function.instruction_iter(context).enumerate() {
let info = || InstInfo { block, pos };
let inst_e = inst.get_instruction(context).unwrap();
match inst_e {
Instruction {
op: InstOp::Load(src_val_ptr),
..
} => {
if let Some(local) = get_referred_symbol(context, *src_val_ptr) {
loads_map
.entry(local)
.and_modify(|loads| loads.push(inst))
.or_insert(vec![inst]);
instr_info_map.insert(inst, info());
}
}
Instruction {
op: InstOp::Store { dst_val_ptr, .. },
..
} => {
if let Some(local) = get_referred_symbol(context, *dst_val_ptr) {
stores_map
.entry(local)
.and_modify(|stores| stores.push(inst))
.or_insert(vec![inst]);
instr_info_map.insert(inst, info());
}
}
_ => (),
}
}
let mut to_delete = FxHashSet::<Value>::default();
// Candidates for replacements. The map's key `Symbol` is the
// destination `Symbol` that can be replaced with the
// map's value `Symbol`, the source.
// Replacement is possible (among other criteria explained below)
// only if the Store of the source is the only storing to the destination.
let candidates: FxHashMap<Symbol, Symbol> = function
.instruction_iter(context)
.enumerate()
.filter_map(|(pos, (block, instr_val))| {
// 1. Go through all the Store instructions whose source is
// a Load instruction...
instr_val
.get_instruction(context)
.and_then(|instr| {
// Is the instruction a Store?
if let Instruction {
op:
InstOp::Store {
dst_val_ptr,
stored_val,
},
..
} = instr
{
get_gep_symbol(context, *dst_val_ptr).and_then(|dst_local| {
stored_val
.get_instruction(context)
.map(|src_instr| (src_instr, stored_val, dst_local))
})
} else {
None
}
})
.and_then(|(src_instr, stored_val, dst_local)| {
// Is the Store source a Load?
if let Instruction {
op: InstOp::Load(src_val_ptr),
..
} = src_instr
{
get_gep_symbol(context, *src_val_ptr)
.map(|src_local| (stored_val, dst_local, src_local))
} else {
None
}
})
.and_then(|(src_load, dst_local, src_local)| {
// 2. ... and pick the (dest_local, src_local) pairs that fulfill the
// below criteria, in other words, where `dest_local` can be
// replaced with `src_local`.
let (temp_empty1, temp_empty2, temp_empty3) = (vec![], vec![], vec![]);
let dst_local_stores = stores_map.get(&dst_local).unwrap_or(&temp_empty1);
let src_local_stores = stores_map.get(&src_local).unwrap_or(&temp_empty2);
let dst_local_loads = loads_map.get(&dst_local).unwrap_or(&temp_empty3);
// This must be the only store of dst_local.
if dst_local_stores.len() != 1 || dst_local_stores[0] != instr_val
||
// All stores of src_local must be in the same block, prior to src_load.
!src_local_stores.iter().all(|store_val|{
let instr_info = instr_info_map.get(store_val).unwrap();
let src_load_info = instr_info_map.get(src_load).unwrap();
instr_info.block == block && instr_info.pos < src_load_info.pos
})
||
// All loads of dst_local must be after this instruction, in the same block.
!dst_local_loads.iter().all(|load_val| {
let instr_info = instr_info_map.get(load_val).unwrap();
instr_info.block == block && instr_info.pos > pos
})
// We don't deal with symbols that escape.
|| escaped_symbols.contains(&dst_local)
|| escaped_symbols.contains(&src_local)
// We don't deal part copies.
|| dst_local.get_type(context) != src_local.get_type(context)
// We don't replace the destination when it's an arg.
|| matches!(dst_local, Symbol::Arg(_))
{
None
} else {
to_delete.insert(instr_val);
Some((dst_local, src_local))
}
})
})
.collect();
// If we have A replaces B and B replaces C, then A must replace C also.
// Recursively searches for the final replacement for the `local`.
// Returns `None` if the `local` cannot be replaced.
fn get_replace_with(candidates: &FxHashMap<Symbol, Symbol>, local: &Symbol) -> Option<Symbol> {
candidates
.get(local)
.map(|replace_with| get_replace_with(candidates, replace_with).unwrap_or(*replace_with))
}
// If the source is an Arg, we replace uses of destination with Arg.
// Otherwise (`get_local`), we replace the local symbol in-place.
enum ReplaceWith {
InPlaceLocal(LocalVar),
Value(Value),
}
// Because we can't borrow context for both iterating and replacing, do it in 2 steps.
// `replaces` are the original GetLocal instructions with the corresponding replacements
// of their arguments.
let replaces: Vec<_> = function
.instruction_iter(context)
.filter_map(|(_block, value)| match value.get_instruction(context) {
Some(Instruction {
op: InstOp::GetLocal(local),
..
}) => get_replace_with(&candidates, &Symbol::Local(*local)).map(|replace_with| {
(
value,
match replace_with {
Symbol::Local(local) => ReplaceWith::InPlaceLocal(local),
Symbol::Arg(ba) => {
ReplaceWith::Value(ba.block.get_arg(context, ba.idx).unwrap())
}
},
)
}),
_ => None,
})
.collect();
let mut value_replace = FxHashMap::<Value, Value>::default();
for (value, replace_with) in replaces.into_iter() {
match replace_with {
ReplaceWith::InPlaceLocal(replacement_var) => {
let Some(&Instruction {
op: InstOp::GetLocal(redundant_var),
parent,
}) = value.get_instruction(context)
else {
panic!("earlier match now fails");
};
if redundant_var.is_mutable(context) {
replacement_var.set_mutable(context, true);
}
value.replace(
context,
ValueDatum::Instruction(Instruction {
op: InstOp::GetLocal(replacement_var),
parent,
}),
)
}
ReplaceWith::Value(replace_with) => {
value_replace.insert(value, replace_with);
}
}
}
function.replace_values(context, &value_replace, None);
// Delete stores to the replaced local.
let blocks: Vec<Block> = function.block_iter(context).collect();
for block in blocks {
block.remove_instructions(context, |value| to_delete.contains(&value));
}
Ok(true)
}
// Deconstruct a memcpy into (dst_val_ptr, src_val_ptr, copy_len).
fn deconstruct_memcpy(context: &Context, inst: Value) -> Option<(Value, Value, u64)> {
match inst.get_instruction(context).unwrap() {
Instruction {
op:
InstOp::MemCopyBytes {
dst_val_ptr,
src_val_ptr,
byte_len,
},
..
} => Some((*dst_val_ptr, *src_val_ptr, *byte_len)),
Instruction {
op:
InstOp::MemCopyVal {
dst_val_ptr,
src_val_ptr,
},
..
} => Some((
*dst_val_ptr,
*src_val_ptr,
memory_utils::pointee_size(context, *dst_val_ptr),
)),
_ => None,
}
}
/// Copy propagation of `memcpy`s within a block.
fn local_copy_prop(
context: &mut Context,
analyses: &AnalysisResults,
function: Function,
) -> Result<bool, IrError> {
// If the analysis result is incomplete we cannot do any safe optimizations here.
// The `gen_new_copy` and `process_load` functions below rely on the fact that the
// analyzed symbols do not escape, something we cannot guarantee in case of
// an incomplete collection of escaped symbols.
let escaped_symbols = match analyses.get_analysis_result(function) {
EscapedSymbols::Complete(syms) => syms,
EscapedSymbols::Incomplete(_) => return Ok(false),
};
// Currently (as we scan a block) available `memcpy`s.
let mut available_copies: FxHashSet<Value>;
// Map a symbol to the available `memcpy`s of which it's a source.
let mut src_to_copies: FxIndexMap<Symbol, FxIndexSet<Value>>;
// Map a symbol to the available `memcpy`s of which it's a destination.
// (multiple `memcpy`s for the same destination may be available when
// they are partial / field writes, and don't alias).
let mut dest_to_copies: FxIndexMap<Symbol, FxIndexSet<Value>>;
// If a value (symbol) is found to be defined, remove it from our tracking.
fn kill_defined_symbol(
context: &Context,
value: Value,
len: u64,
available_copies: &mut FxHashSet<Value>,
src_to_copies: &mut FxIndexMap<Symbol, FxIndexSet<Value>>,
dest_to_copies: &mut FxIndexMap<Symbol, FxIndexSet<Value>>,
) {
match get_referred_symbols(context, value) {
ReferredSymbols::Complete(rs) => {
for sym in rs {
if let Some(copies) = src_to_copies.get_mut(&sym) {
for copy in &*copies {
let (_, src_ptr, copy_size) = deconstruct_memcpy(context, *copy)
.expect("Expected copy instruction");
if memory_utils::may_alias(context, value, len, src_ptr, copy_size) {
available_copies.remove(copy);
}
}
}
if let Some(copies) = dest_to_copies.get_mut(&sym) {
for copy in &*copies {
let (dest_ptr, copy_size) = match copy.get_instruction(context).unwrap()
{
Instruction {
op:
InstOp::MemCopyBytes {
dst_val_ptr,
src_val_ptr: _,
byte_len,
},
..
} => (*dst_val_ptr, *byte_len),
Instruction {
op:
InstOp::MemCopyVal {
dst_val_ptr,
src_val_ptr: _,
},
..
} => (
*dst_val_ptr,
memory_utils::pointee_size(context, *dst_val_ptr),
),
_ => panic!("Unexpected copy instruction"),
};
if memory_utils::may_alias(context, value, len, dest_ptr, copy_size) {
available_copies.remove(copy);
}
}
}
}
// Update src_to_copies and dest_to_copies to remove every copy not in available_copies.
src_to_copies.retain(|_, copies| {
copies.retain(|copy| available_copies.contains(copy));
!copies.is_empty()
});
dest_to_copies.retain(|_, copies| {
copies.retain(|copy| available_copies.contains(copy));
!copies.is_empty()
});
}
ReferredSymbols::Incomplete(_) => {
// The only safe thing we can do is to clear all information.
available_copies.clear();
src_to_copies.clear();
dest_to_copies.clear();
}
}
}
#[allow(clippy::too_many_arguments)]
fn gen_new_copy(
context: &Context,
escaped_symbols: &FxHashSet<Symbol>,
copy_inst: Value,
dst_val_ptr: Value,
src_val_ptr: Value,
available_copies: &mut FxHashSet<Value>,
src_to_copies: &mut FxIndexMap<Symbol, FxIndexSet<Value>>,
dest_to_copies: &mut FxIndexMap<Symbol, FxIndexSet<Value>>,
) {
if let (Some(dst_sym), Some(src_sym)) = (
get_gep_symbol(context, dst_val_ptr),
get_gep_symbol(context, src_val_ptr),
) {
if escaped_symbols.contains(&dst_sym) || escaped_symbols.contains(&src_sym) {
return;
}
dest_to_copies
.entry(dst_sym)
.and_modify(|set| {
set.insert(copy_inst);
})
.or_insert([copy_inst].into_iter().collect());
src_to_copies
.entry(src_sym)
.and_modify(|set| {
set.insert(copy_inst);
})
.or_insert([copy_inst].into_iter().collect());
available_copies.insert(copy_inst);
}
}
struct ReplGep {
base: Symbol,
elem_ptr_ty: Type,
indices: Vec<Value>,
}
enum Replacement {
OldGep(Value),
NewGep(ReplGep),
}
fn process_load(
context: &Context,
escaped_symbols: &FxHashSet<Symbol>,
inst: Value,
src_val_ptr: Value,
dest_to_copies: &FxIndexMap<Symbol, FxIndexSet<Value>>,
replacements: &mut FxHashMap<Value, (Value, Replacement)>,
) -> bool {
// For every `memcpy` that src_val_ptr is a destination of,
// check if we can do the load from the source of that memcpy.
if let Some(src_sym) = get_referred_symbol(context, src_val_ptr) {
if escaped_symbols.contains(&src_sym) {
return false;
}
for memcpy in dest_to_copies
.get(&src_sym)
.iter()
.flat_map(|set| set.iter())
{
let (dst_ptr_memcpy, src_ptr_memcpy, copy_len) =
deconstruct_memcpy(context, *memcpy).expect("Expected copy instruction");
// If the location where we're loading from exactly matches the destination of
// the memcpy, just load from the source pointer of the memcpy.
// TODO: In both the arms below, we check that the pointer type
// matches. This isn't really needed as the copy happens and the
// data we want is safe to access. But we just don't know how to
// generate the right GEP always. So that's left for another day.
if memory_utils::must_alias(
context,
src_val_ptr,
memory_utils::pointee_size(context, src_val_ptr),
dst_ptr_memcpy,
copy_len,
) {
// Replace src_val_ptr with src_ptr_memcpy.
if src_val_ptr.get_type(context) == src_ptr_memcpy.get_type(context) {
replacements
.insert(inst, (src_val_ptr, Replacement::OldGep(src_ptr_memcpy)));
return true;
}
} else {
// if the memcpy copies the entire symbol, we could
// insert a new GEP from the source of the memcpy.
if let (Some(memcpy_src_sym), Some(memcpy_dst_sym), Some(new_indices)) = (
get_gep_symbol(context, src_ptr_memcpy),
get_gep_symbol(context, dst_ptr_memcpy),
memory_utils::combine_indices(context, src_val_ptr),
) {
let memcpy_src_sym_type = memcpy_src_sym
.get_type(context)
.get_pointee_type(context)
.unwrap();
let memcpy_dst_sym_type = memcpy_dst_sym
.get_type(context)
.get_pointee_type(context)
.unwrap();
if memcpy_src_sym_type == memcpy_dst_sym_type
&& memcpy_dst_sym_type.size(context).in_bytes() == copy_len
{
replacements.insert(
inst,
(
src_val_ptr,
Replacement::NewGep(ReplGep {
base: memcpy_src_sym,
elem_ptr_ty: src_val_ptr.get_type(context).unwrap(),
indices: new_indices,
}),
),
);
return true;
}
}
}
}
}
false
}
let mut modified = false;
for block in function.block_iter(context) {
// A `memcpy` itself has a `load`, so we can `process_load` on it.
// If now, we've marked the source of this `memcpy` for optimization,
// it itself cannot be "generated" as a new candidate `memcpy`.
// This is the reason we run a loop on the block till there's no more
// optimization possible. We could track just the changes and do it
// all in one go, but that would complicate the algorithm. So I've
// marked this as a TODO for now (#4600).
loop {
available_copies = FxHashSet::default();
src_to_copies = IndexMap::default();
dest_to_copies = IndexMap::default();
// Replace the load/memcpy source pointer with something else.
let mut replacements = FxHashMap::default();
fn kill_escape_args(
context: &Context,
args: &Vec<Value>,
available_copies: &mut FxHashSet<Value>,
src_to_copies: &mut FxIndexMap<Symbol, FxIndexSet<Value>>,
dest_to_copies: &mut FxIndexMap<Symbol, FxIndexSet<Value>>,
) {
for arg in args {
match get_referred_symbols(context, *arg) {
ReferredSymbols::Complete(rs) => {
let max_size = rs
.iter()
.filter_map(|sym| {
sym.get_type(context)
.get_pointee_type(context)
.map(|pt| pt.size(context).in_bytes())
})
.max()
.unwrap_or(0);
kill_defined_symbol(
context,
*arg,
max_size,
available_copies,
src_to_copies,
dest_to_copies,
);
}
ReferredSymbols::Incomplete(_) => {
// The only safe thing we can do is to clear all information.
available_copies.clear();
src_to_copies.clear();
dest_to_copies.clear();
break;
}
}
}
}
for inst in block.instruction_iter(context) {
match inst.get_instruction(context).unwrap() {
Instruction {
op: InstOp::Call(callee, args),
..
} => {
let (immutable_args, mutable_args): (Vec<_>, Vec<_>) =
args.iter().enumerate().partition_map(|(arg_idx, arg)| {
if callee.is_arg_immutable(context, arg_idx) {
Either::Left(*arg)
} else {
Either::Right(*arg)
}
});
// whichever args may get mutated, we kill them.
kill_escape_args(
context,
&mutable_args,
&mut available_copies,
&mut src_to_copies,
&mut dest_to_copies,
);
// args that aren't mutated can be treated as a "load" (for the purposes
// of optimization).
for arg in immutable_args {
process_load(
context,
escaped_symbols,
inst,
arg,
&dest_to_copies,
&mut replacements,
);
}
}
Instruction {
op: InstOp::AsmBlock(_, args),
..
} => {
let args = args.iter().filter_map(|arg| arg.initializer).collect();
kill_escape_args(
context,
&args,
&mut available_copies,
&mut src_to_copies,
&mut dest_to_copies,
);
}
Instruction {
op: InstOp::IntToPtr(_, _),
..
} => {
// The only safe thing we can do is to clear all information.
available_copies.clear();
src_to_copies.clear();
dest_to_copies.clear();
}
Instruction {
op: InstOp::Load(src_val_ptr),
..
} => {
process_load(
context,
escaped_symbols,
inst,
*src_val_ptr,
&dest_to_copies,
&mut replacements,
);
}
Instruction {
op: InstOp::MemCopyBytes { .. } | InstOp::MemCopyVal { .. },
..
} => {
let (dst_val_ptr, src_val_ptr, copy_len) =
deconstruct_memcpy(context, inst).expect("Expected copy instruction");
kill_defined_symbol(
context,
dst_val_ptr,
copy_len,
&mut available_copies,
&mut src_to_copies,
&mut dest_to_copies,
);
// If this memcpy itself can be optimized, we do just that, and not "gen" a new one.
if !process_load(
context,
escaped_symbols,
inst,
src_val_ptr,
&dest_to_copies,
&mut replacements,
) {
gen_new_copy(
context,
escaped_symbols,
inst,
dst_val_ptr,
src_val_ptr,
&mut available_copies,
&mut src_to_copies,
&mut dest_to_copies,
);
}
}
Instruction {
op:
InstOp::Store {
dst_val_ptr,
stored_val: _,
},
..
} => {
kill_defined_symbol(
context,
*dst_val_ptr,
memory_utils::pointee_size(context, *dst_val_ptr),
&mut available_copies,
&mut src_to_copies,
&mut dest_to_copies,
);
}
Instruction {
op:
InstOp::FuelVm(
FuelVmInstruction::WideBinaryOp { result, .. }
| FuelVmInstruction::WideUnaryOp { result, .. }
| FuelVmInstruction::WideModularOp { result, .. }
| FuelVmInstruction::StateLoadQuadWord {
load_val: result, ..
},
),
..
} => {
kill_defined_symbol(
context,
*result,
memory_utils::pointee_size(context, *result),
&mut available_copies,
&mut src_to_copies,
&mut dest_to_copies,
);
}
_ => (),
}
}
if replacements.is_empty() {
break;
} else {
modified = true;
}
// If we have any NewGep replacements, insert those new GEPs into the block.
// Since the new instructions need to be just before the value load that they're
// going to be used in, we copy all the instructions into a new vec
// and just replace the contents of the basic block.
let mut new_insts = vec![];
for inst in block.instruction_iter(context) {
if let Some(replacement) = replacements.remove(&inst) {
let (to_replace, replacement) = match replacement {
(to_replace, Replacement::OldGep(v)) => (to_replace, v),
(
to_replace,
Replacement::NewGep(ReplGep {
base,
elem_ptr_ty,
indices,
}),
) => {
let base = match base {
Symbol::Local(local) => {
let base = Value::new_instruction(
context,
block,
InstOp::GetLocal(local),
);
new_insts.push(base);
base
}
Symbol::Arg(block_arg) => {
block_arg.block.get_arg(context, block_arg.idx).unwrap()
}
};
let v = Value::new_instruction(
context,
block,
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | true |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-ir/src/optimize/mem2reg.rs | sway-ir/src/optimize/mem2reg.rs | use indexmap::IndexMap;
/// Promote local memory to SSA registers.
/// This pass is essentially SSA construction. A good readable reference is:
/// https://www.cs.princeton.edu/~appel/modern/c/
/// We use block arguments instead of explicit PHI nodes. Conceptually,
/// they are both the same.
use rustc_hash::FxHashMap;
use std::collections::HashSet;
use sway_utils::mapped_stack::MappedStack;
use crate::{
AnalysisResults, Block, BranchToWithArgs, Constant, Context, DomFronts, DomTree, Function,
InstOp, Instruction, IrError, LocalVar, Pass, PassMutability, PostOrder, ScopedPass, Type,
Value, ValueDatum, DOMINATORS_NAME, DOM_FRONTS_NAME, POSTORDER_NAME,
};
pub const MEM2REG_NAME: &str = "mem2reg";
pub fn create_mem2reg_pass() -> Pass {
Pass {
name: MEM2REG_NAME,
descr: "Promotion of memory to SSA registers",
deps: vec![POSTORDER_NAME, DOMINATORS_NAME, DOM_FRONTS_NAME],
runner: ScopedPass::FunctionPass(PassMutability::Transform(promote_to_registers)),
}
}
// Check if a value is a valid (for our optimization) local pointer
fn get_validate_local_var(
context: &Context,
function: &Function,
val: &Value,
) -> Option<(String, LocalVar)> {
match context.values[val.0].value {
ValueDatum::Instruction(Instruction {
op: InstOp::GetLocal(local_var),
..
}) => {
let name = function.lookup_local_name(context, &local_var);
name.map(|name| (name.clone(), local_var))
}
_ => None,
}
}
fn is_promotable_type(context: &Context, ty: Type) -> bool {
ty.is_unit(context)
|| ty.is_bool(context)
|| ty.is_ptr(context)
|| (ty.is_uint(context) && ty.get_uint_width(context).unwrap() <= 64)
}
// Returns those locals that can be promoted to SSA registers.
fn filter_usable_locals(context: &mut Context, function: &Function) -> HashSet<String> {
// The size of an SSA register is target specific. Here we're going to just stick with atomic
// types which can fit in 64-bits.
let mut locals: HashSet<String> = function
.locals_iter(context)
.filter_map(|(name, var)| {
let ty = var.get_inner_type(context);
is_promotable_type(context, ty).then_some(name.clone())
})
.collect();
for (_, inst) in function.instruction_iter(context) {
match context.values[inst.0].value {
ValueDatum::Instruction(Instruction {
op: InstOp::Load(_),
..
}) => {}
ValueDatum::Instruction(Instruction {
op:
InstOp::Store {
dst_val_ptr: _,
stored_val,
},
..
}) => {
// Make sure that a local's address isn't stored.
// E.g., in cases like `let r = &some_local;`.
if let Some((local, _)) = get_validate_local_var(context, function, &stored_val) {
locals.remove(&local);
}
}
_ => {
// Make sure that no local escapes into instructions we don't understand.
let operands = inst.get_instruction(context).unwrap().op.get_operands();
for opd in operands {
if let Some((local, ..)) = get_validate_local_var(context, function, &opd) {
locals.remove(&local);
}
}
}
}
}
locals
}
// For each block, compute the set of locals that are live-in.
// TODO: Use rustc_index::bit_set::ChunkedBitSet by mapping local names to indices.
// This will allow more efficient set operations.
pub fn compute_livein(
context: &mut Context,
function: &Function,
po: &PostOrder,
locals: &HashSet<String>,
) -> FxHashMap<Block, HashSet<String>> {
let mut result = FxHashMap::<Block, HashSet<String>>::default();
for block in &po.po_to_block {
result.insert(*block, HashSet::<String>::default());
}
let mut changed = true;
while changed {
changed = false;
for block in &po.po_to_block {
// we begin by unioning the liveins at successor blocks.
let mut cur_live = HashSet::<String>::default();
for BranchToWithArgs { block: succ, .. } in block.successors(context) {
let succ_livein = &result[&succ];
cur_live.extend(succ_livein.iter().cloned());
}
// Scan the instructions, in reverse.
for inst in block.instruction_iter(context).rev() {
match context.values[inst.0].value {
ValueDatum::Instruction(Instruction {
op: InstOp::Load(ptr),
..
}) => {
let local_var = get_validate_local_var(context, function, &ptr);
match local_var {
Some((local, ..)) if locals.contains(&local) => {
cur_live.insert(local);
}
_ => {}
}
}
ValueDatum::Instruction(Instruction {
op: InstOp::Store { dst_val_ptr, .. },
..
}) => {
let local_var = get_validate_local_var(context, function, &dst_val_ptr);
match local_var {
Some((local, _)) if locals.contains(&local) => {
cur_live.remove(&local);
}
_ => (),
}
}
_ => (),
}
}
if result[block] != cur_live {
// Whatever's live now, is the live-in for the block.
result.get_mut(block).unwrap().extend(cur_live);
changed = true;
}
}
}
result
}
/// Promote loads of globals constants to SSA registers
/// We promote only non-mutable globals of copy types
fn promote_globals(context: &mut Context, function: &Function) -> Result<bool, IrError> {
let mut replacements = FxHashMap::<Value, Constant>::default();
for (_, inst) in function.instruction_iter(context) {
if let ValueDatum::Instruction(Instruction {
op: InstOp::Load(ptr),
..
}) = context.values[inst.0].value
{
if let ValueDatum::Instruction(Instruction {
op: InstOp::GetGlobal(global_var),
..
}) = context.values[ptr.0].value
{
if !global_var.is_mutable(context)
&& is_promotable_type(context, global_var.get_inner_type(context))
{
let constant = *global_var
.get_initializer(context)
.expect("`global_var` is not mutable so it must be initialized");
replacements.insert(inst, constant);
}
}
}
}
if replacements.is_empty() {
return Ok(false);
}
let replacements = replacements
.into_iter()
.map(|(k, v)| (k, Value::new_constant(context, v)))
.collect::<FxHashMap<_, _>>();
function.replace_values(context, &replacements, None);
Ok(true)
}
/// Promote memory values that are accessed via load/store to SSA registers.
pub fn promote_to_registers(
context: &mut Context,
analyses: &AnalysisResults,
function: Function,
) -> Result<bool, IrError> {
let mut modified = false;
modified |= promote_globals(context, &function)?;
modified |= promote_locals(context, analyses, function)?;
Ok(modified)
}
/// Promote locals to registers. We promote only locals of copy types,
/// whose every use is in a `get_local` without offsets, and the result of
/// such a `get_local` is used only in a load or a store.
pub fn promote_locals(
context: &mut Context,
analyses: &AnalysisResults,
function: Function,
) -> Result<bool, IrError> {
let safe_locals = filter_usable_locals(context, &function);
if safe_locals.is_empty() {
return Ok(false);
}
let po: &PostOrder = analyses.get_analysis_result(function);
let dom_tree: &DomTree = analyses.get_analysis_result(function);
let dom_fronts: &DomFronts = analyses.get_analysis_result(function);
let liveins = compute_livein(context, &function, po, &safe_locals);
// A list of the PHIs we insert in this transform.
let mut new_phi_tracker = HashSet::<(String, Block)>::new();
// A map from newly inserted block args to the Local that it's a PHI for.
let mut worklist = Vec::<(String, Type, Block)>::new();
let mut phi_to_local = FxHashMap::<Value, String>::default();
// Insert PHIs for each definition (store) at its dominance frontiers.
// Start by adding the existing definitions (stores) to a worklist,
// in program order (reverse post order). This is for faster convergence (or maybe not).
for (block, inst) in po
.po_to_block
.iter()
.rev()
.flat_map(|b| b.instruction_iter(context).map(|i| (*b, i)))
{
if let ValueDatum::Instruction(Instruction {
op: InstOp::Store { dst_val_ptr, .. },
..
}) = context.values[inst.0].value
{
match get_validate_local_var(context, &function, &dst_val_ptr) {
Some((local, var)) if safe_locals.contains(&local) => {
worklist.push((local, var.get_inner_type(context), block));
}
_ => (),
}
}
}
// Transitively add PHIs, till nothing more to do.
while let Some((local, ty, known_def)) = worklist.pop() {
for df in dom_fronts[&known_def].iter() {
if !new_phi_tracker.contains(&(local.clone(), *df)) && liveins[df].contains(&local) {
// Insert PHI for this local at block df.
let index = df.new_arg(context, ty);
phi_to_local.insert(df.get_arg(context, index).unwrap(), local.clone());
new_phi_tracker.insert((local.clone(), *df));
// Add df to the worklist.
worklist.push((local.clone(), ty, *df));
}
}
}
// We're just left with rewriting the loads and stores into SSA.
// For efficiency, we first collect the rewrites
// and then apply them all together in the next step.
#[allow(clippy::too_many_arguments)]
fn record_rewrites(
context: &mut Context,
function: &Function,
dom_tree: &DomTree,
node: Block,
safe_locals: &HashSet<String>,
phi_to_local: &FxHashMap<Value, String>,
name_stack: &mut MappedStack<String, Value>,
rewrites: &mut FxHashMap<Value, Value>,
deletes: &mut Vec<(Block, Value)>,
) {
// Whatever new definitions we find in this block, they must be popped
// when we're done. So let's keep track of that locally as a count.
let mut num_local_pushes = IndexMap::<String, u32>::new();
// Start with relevant block args, they are new definitions.
for arg in node.arg_iter(context) {
if let Some(local) = phi_to_local.get(arg) {
name_stack.push(local.clone(), *arg);
num_local_pushes
.entry(local.clone())
.and_modify(|count| *count += 1)
.or_insert(1);
}
}
for inst in node.instruction_iter(context) {
match context.values[inst.0].value {
ValueDatum::Instruction(Instruction {
op: InstOp::Load(ptr),
..
}) => {
let local_var = get_validate_local_var(context, function, &ptr);
match local_var {
Some((local, var)) if safe_locals.contains(&local) => {
// We should replace all uses of inst with new_stack[local].
let new_val = match name_stack.get(&local) {
Some(val) => *val,
None => {
// Nothing on the stack, let's attempt to get the initializer
let constant = *var
.get_initializer(context)
.expect("We're dealing with an uninitialized value");
Value::new_constant(context, constant)
}
};
rewrites.insert(inst, new_val);
deletes.push((node, inst));
}
_ => (),
}
}
ValueDatum::Instruction(Instruction {
op:
InstOp::Store {
dst_val_ptr,
stored_val,
},
..
}) => {
let local_var = get_validate_local_var(context, function, &dst_val_ptr);
match local_var {
Some((local, _)) if safe_locals.contains(&local) => {
// Henceforth, everything that's dominated by this inst must use stored_val
// instead of loading from dst_val.
name_stack.push(local.clone(), stored_val);
num_local_pushes
.entry(local)
.and_modify(|count| *count += 1)
.or_insert(1);
deletes.push((node, inst));
}
_ => (),
}
}
_ => (),
}
}
// Update arguments to successor blocks (i.e., PHI args).
for BranchToWithArgs { block: succ, .. } in node.successors(context) {
let args: Vec<_> = succ.arg_iter(context).copied().collect();
// For every arg of succ, if it's in phi_to_local,
// we pass, as arg, the top value of local
for arg in args {
if let Some(local) = phi_to_local.get(&arg) {
let ptr = function.get_local_var(context, local).unwrap();
let new_val = match name_stack.get(local) {
Some(val) => *val,
None => {
// Nothing on the stack, let's attempt to get the initializer
let constant = *ptr
.get_initializer(context)
.expect("We're dealing with an uninitialized value");
Value::new_constant(context, constant)
}
};
let params = node.get_succ_params_mut(context, &succ).unwrap();
params.push(new_val);
}
}
}
// Process dominator children.
for child in dom_tree.children(node) {
record_rewrites(
context,
function,
dom_tree,
child,
safe_locals,
phi_to_local,
name_stack,
rewrites,
deletes,
);
}
// Pop from the names stack.
for (local, pushes) in num_local_pushes.iter() {
for _ in 0..*pushes {
name_stack.pop(local);
}
}
}
let mut name_stack = MappedStack::<String, Value>::default();
let mut value_replacement = FxHashMap::<Value, Value>::default();
let mut delete_insts = Vec::<(Block, Value)>::new();
record_rewrites(
context,
&function,
dom_tree,
function.get_entry_block(context),
&safe_locals,
&phi_to_local,
&mut name_stack,
&mut value_replacement,
&mut delete_insts,
);
// Apply the rewrites.
function.replace_values(context, &value_replacement, None);
// Delete the loads and stores.
for (block, inst) in delete_insts {
block.remove_instruction(context, inst);
}
Ok(true)
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-ir/src/optimize/sroa.rs | sway-ir/src/optimize/sroa.rs | //! Scalar Replacement of Aggregates
use rustc_hash::{FxHashMap, FxHashSet};
use crate::{
combine_indices, get_gep_referred_symbols, get_loaded_ptr_values, get_stored_ptr_values,
pointee_size, AnalysisResults, Constant, ConstantValue, Context, EscapedSymbols, Function,
InstOp, IrError, LocalVar, Pass, PassMutability, ScopedPass, Symbol, Type, Value,
ESCAPED_SYMBOLS_NAME,
};
pub const SROA_NAME: &str = "sroa";
pub fn create_sroa_pass() -> Pass {
Pass {
name: SROA_NAME,
descr: "Scalar replacement of aggregates",
deps: vec![ESCAPED_SYMBOLS_NAME],
runner: ScopedPass::FunctionPass(PassMutability::Transform(sroa)),
}
}
// Split at a local aggregate variable into its constituent scalars.
// Returns a map from the offset of each scalar field to the new local created for it.
fn split_aggregate(
context: &mut Context,
function: Function,
local_aggr: LocalVar,
) -> FxHashMap<u32, LocalVar> {
let ty = local_aggr
.get_type(context)
.get_pointee_type(context)
.expect("Local not a pointer");
assert!(ty.is_aggregate(context));
let mut res = FxHashMap::default();
let aggr_base_name = function
.lookup_local_name(context, &local_aggr)
.cloned()
.unwrap_or("".to_string());
fn split_type(
context: &mut Context,
function: Function,
aggr_base_name: &String,
map: &mut FxHashMap<u32, LocalVar>,
ty: Type,
initializer: Option<Constant>,
base_off: &mut u32,
) {
fn constant_index(context: &mut Context, c: &Constant, idx: usize) -> Constant {
match &c.get_content(context).value {
ConstantValue::Array(cs) | ConstantValue::Struct(cs) => Constant::unique(
context,
cs.get(idx)
.expect("Malformed initializer. Cannot index into sub-initializer")
.clone(),
),
_ => panic!("Expected only array or struct const initializers"),
}
}
if !super::target_fuel::is_demotable_type(context, &ty) {
let ty_size: u32 = ty.size(context).in_bytes().try_into().unwrap();
let name = aggr_base_name.clone() + &base_off.to_string();
let scalarised_local =
function.new_unique_local_var(context, name, ty, initializer, false);
map.insert(*base_off, scalarised_local);
*base_off += ty_size;
} else {
let mut i = 0;
while let Some(member_ty) = ty.get_indexed_type(context, &[i]) {
let initializer = initializer
.as_ref()
.map(|c| constant_index(context, c, i as usize));
split_type(
context,
function,
aggr_base_name,
map,
member_ty,
initializer,
base_off,
);
if ty.is_struct(context) {
*base_off = crate::size_bytes_round_up_to_word_alignment!(*base_off);
}
i += 1;
}
}
}
let mut base_off = 0;
split_type(
context,
function,
&aggr_base_name,
&mut res,
ty,
local_aggr.get_initializer(context).cloned(),
&mut base_off,
);
res
}
/// Promote aggregates to scalars, so that other optimizations
/// such as mem2reg can treat them as any other SSA value.
pub fn sroa(
context: &mut Context,
analyses: &AnalysisResults,
function: Function,
) -> Result<bool, IrError> {
let escaped_symbols: &EscapedSymbols = analyses.get_analysis_result(function);
let candidates = candidate_symbols(context, escaped_symbols, function);
if candidates.is_empty() {
return Ok(false);
}
// We now split each candidate into constituent scalar variables.
let offset_scalar_map: FxHashMap<Symbol, FxHashMap<u32, LocalVar>> = candidates
.iter()
.map(|sym| {
let Symbol::Local(local_aggr) = sym else {
panic!("Expected only local candidates")
};
(*sym, split_aggregate(context, function, *local_aggr))
})
.collect();
let mut scalar_replacements = FxHashMap::<Value, Value>::default();
for block in function.block_iter(context) {
let mut new_insts = Vec::new();
for inst in block.instruction_iter(context) {
if let InstOp::MemCopyVal {
dst_val_ptr,
src_val_ptr,
} = inst.get_instruction(context).unwrap().op
{
let src_syms = get_gep_referred_symbols(context, src_val_ptr);
let dst_syms = get_gep_referred_symbols(context, dst_val_ptr);
// If neither source nor dest needs rewriting, we skip.
let src_sym = src_syms
.iter()
.next()
.filter(|src_sym| candidates.contains(src_sym));
let dst_sym = dst_syms
.iter()
.next()
.filter(|dst_sym| candidates.contains(dst_sym));
if src_sym.is_none() && dst_sym.is_none() {
new_insts.push(inst);
continue;
}
struct ElmDetail {
offset: u32,
r#type: Type,
indices: Vec<u32>,
}
// compute the offsets at which each (nested) field in our pointee type is at.
fn calc_elm_details(
context: &Context,
details: &mut Vec<ElmDetail>,
ty: Type,
base_off: &mut u32,
base_index: &mut Vec<u32>,
) {
if !super::target_fuel::is_demotable_type(context, &ty) {
let ty_size: u32 = ty.size(context).in_bytes().try_into().unwrap();
details.push(ElmDetail {
offset: *base_off,
r#type: ty,
indices: base_index.clone(),
});
*base_off += ty_size;
} else {
assert!(ty.is_aggregate(context));
base_index.push(0);
let mut i = 0;
while let Some(member_ty) = ty.get_indexed_type(context, &[i]) {
calc_elm_details(context, details, member_ty, base_off, base_index);
i += 1;
*base_index.last_mut().unwrap() += 1;
if ty.is_struct(context) {
*base_off =
crate::size_bytes_round_up_to_word_alignment!(*base_off);
}
}
base_index.pop();
}
}
let mut local_base_offset = 0;
let mut local_base_index = vec![];
let mut elm_details = vec![];
calc_elm_details(
context,
&mut elm_details,
src_val_ptr
.get_type(context)
.unwrap()
.get_pointee_type(context)
.expect("Unable to determine pointee type of pointer"),
&mut local_base_offset,
&mut local_base_index,
);
// Handle the source pointer first.
let mut elm_local_map = FxHashMap::default();
if let Some(src_sym) = src_sym {
// The source symbol is a candidate. So it has been split into scalars.
// Load each of these into a SSA variable.
let base_offset = combine_indices(context, src_val_ptr)
.and_then(|indices| {
src_sym
.get_type(context)
.get_pointee_type(context)
.and_then(|pointee_ty| {
pointee_ty.get_value_indexed_offset(context, &indices)
})
})
.expect("Source of memcpy was incorrectly identified as a candidate.")
as u32;
for detail in elm_details.iter() {
let elm_offset = detail.offset;
let actual_offset = elm_offset + base_offset;
let remapped_var = offset_scalar_map
.get(src_sym)
.unwrap()
.get(&actual_offset)
.unwrap();
let scalarized_local =
Value::new_instruction(context, block, InstOp::GetLocal(*remapped_var));
let load =
Value::new_instruction(context, block, InstOp::Load(scalarized_local));
elm_local_map.insert(elm_offset, load);
new_insts.push(scalarized_local);
new_insts.push(load);
}
} else {
// The source symbol is not a candidate. So it won't be split into scalars.
// We must use GEPs to load each individual element into an SSA variable.
for ElmDetail {
offset,
r#type,
indices,
} in &elm_details
{
let elm_addr = if indices.is_empty() {
// We're looking at a pointer to a scalar, so no GEP needed.
src_val_ptr
} else {
let elm_index_values = indices
.iter()
.map(|&index| Value::new_u64_constant(context, index.into()))
.collect();
let elem_ptr_ty = Type::new_typed_pointer(context, *r#type);
let gep = Value::new_instruction(
context,
block,
InstOp::GetElemPtr {
base: src_val_ptr,
elem_ptr_ty,
indices: elm_index_values,
},
);
new_insts.push(gep);
gep
};
let load = Value::new_instruction(context, block, InstOp::Load(elm_addr));
elm_local_map.insert(*offset, load);
new_insts.push(load);
}
}
if let Some(dst_sym) = dst_sym {
// The dst symbol is a candidate. So it has been split into scalars.
// Store to each of these from the SSA variable we created above.
let base_offset = combine_indices(context, dst_val_ptr)
.and_then(|indices| {
dst_sym
.get_type(context)
.get_pointee_type(context)
.and_then(|pointee_ty| {
pointee_ty.get_value_indexed_offset(context, &indices)
})
})
.expect("Source of memcpy was incorrectly identified as a candidate.")
as u32;
for detail in elm_details.iter() {
let elm_offset = detail.offset;
let actual_offset = elm_offset + base_offset;
let remapped_var = offset_scalar_map
.get(dst_sym)
.unwrap()
.get(&actual_offset)
.unwrap();
let scalarized_local =
Value::new_instruction(context, block, InstOp::GetLocal(*remapped_var));
let loaded_source = elm_local_map
.get(&elm_offset)
.expect("memcpy source not loaded");
let store = Value::new_instruction(
context,
block,
InstOp::Store {
dst_val_ptr: scalarized_local,
stored_val: *loaded_source,
},
);
new_insts.push(scalarized_local);
new_insts.push(store);
}
} else {
// The dst symbol is not a candidate. So it won't be split into scalars.
// We must use GEPs to store to each individual element from its SSA variable.
for ElmDetail {
offset,
r#type,
indices,
} in elm_details
{
let elm_addr = if indices.is_empty() {
// We're looking at a pointer to a scalar, so no GEP needed.
dst_val_ptr
} else {
let elm_index_values = indices
.iter()
.map(|&index| Value::new_u64_constant(context, index.into()))
.collect();
let elem_ptr_ty = Type::new_typed_pointer(context, r#type);
let gep = Value::new_instruction(
context,
block,
InstOp::GetElemPtr {
base: dst_val_ptr,
elem_ptr_ty,
indices: elm_index_values,
},
);
new_insts.push(gep);
gep
};
let loaded_source = elm_local_map
.get(&offset)
.expect("memcpy source not loaded");
let store = Value::new_instruction(
context,
block,
InstOp::Store {
dst_val_ptr: elm_addr,
stored_val: *loaded_source,
},
);
new_insts.push(store);
}
}
// We've handled the memcpy. it's been replaced with other instructions.
continue;
}
let loaded_pointers = get_loaded_ptr_values(context, inst);
let stored_pointers = get_stored_ptr_values(context, inst);
for ptr in loaded_pointers.iter().chain(stored_pointers.iter()) {
let syms = get_gep_referred_symbols(context, *ptr);
if let Some(sym) = syms
.iter()
.next()
.filter(|sym| syms.len() == 1 && candidates.contains(sym))
{
let Some(offset) = combine_indices(context, *ptr).and_then(|indices| {
sym.get_type(context)
.get_pointee_type(context)
.and_then(|pointee_ty| {
pointee_ty.get_value_indexed_offset(context, &indices)
})
}) else {
continue;
};
let remapped_var = offset_scalar_map
.get(sym)
.unwrap()
.get(&(offset as u32))
.unwrap();
let scalarized_local =
Value::new_instruction(context, block, InstOp::GetLocal(*remapped_var));
new_insts.push(scalarized_local);
scalar_replacements.insert(*ptr, scalarized_local);
}
}
new_insts.push(inst);
}
block.take_body(context, new_insts);
}
function.replace_values(context, &scalar_replacements, None);
Ok(true)
}
// Is the aggregate type something that we can handle?
fn is_processable_aggregate(context: &Context, ty: Type) -> bool {
fn check_sub_types(context: &Context, ty: Type) -> bool {
match ty.get_content(context) {
crate::TypeContent::Unit => true,
crate::TypeContent::Bool => true,
crate::TypeContent::Uint(width) => *width <= 64,
crate::TypeContent::B256 => false,
crate::TypeContent::Array(elm_ty, _) => check_sub_types(context, *elm_ty),
crate::TypeContent::Union(_) => false,
crate::TypeContent::Struct(fields) => {
fields.iter().all(|ty| check_sub_types(context, *ty))
}
crate::TypeContent::Slice => false,
crate::TypeContent::TypedSlice(..) => false,
crate::TypeContent::Pointer => true,
crate::TypeContent::TypedPointer(_) => true,
crate::TypeContent::StringSlice => false,
crate::TypeContent::StringArray(_) => false,
crate::TypeContent::Never => false,
}
}
ty.is_aggregate(context) && check_sub_types(context, ty)
}
// Filter out candidates that may not be profitable to scalarise.
// This can be tuned in detail in the future when we have real benchmarks.
fn profitability(context: &Context, function: Function, candidates: &mut FxHashSet<Symbol>) {
// If a candidate is sufficiently big and there's at least one memcpy
// accessing a big part of it, it may not be wise to scalarise it.
for (_, inst) in function.instruction_iter(context) {
if let InstOp::MemCopyVal {
dst_val_ptr,
src_val_ptr,
} = inst.get_instruction(context).unwrap().op
{
if pointee_size(context, dst_val_ptr) > 200 {
for sym in get_gep_referred_symbols(context, dst_val_ptr)
.union(&get_gep_referred_symbols(context, src_val_ptr))
{
candidates.remove(sym);
}
}
}
}
}
/// Only the following aggregates can be scalarised:
/// 1. Does not escape.
/// 2. Is always accessed via a scalar (register sized) field.
/// i.e., The entire aggregate or a sub-aggregate isn't loaded / stored.
/// (with an exception of `mem_copy_val` which we can handle).
/// 3. Never accessed via non-const indexing.
/// 4. Not aliased via a pointer that may point to more than one symbol.
fn candidate_symbols(
context: &Context,
escaped_symbols: &EscapedSymbols,
function: Function,
) -> FxHashSet<Symbol> {
let escaped_symbols = match escaped_symbols {
EscapedSymbols::Complete(syms) => syms,
EscapedSymbols::Incomplete(_) => return FxHashSet::<_>::default(),
};
let mut candidates: FxHashSet<Symbol> = function
.locals_iter(context)
.filter_map(|(_, l)| {
let sym = Symbol::Local(*l);
(!escaped_symbols.contains(&sym)
&& l.get_type(context)
.get_pointee_type(context)
.is_some_and(|pointee_ty| is_processable_aggregate(context, pointee_ty)))
.then_some(sym)
})
.collect();
// We walk the function to remove from `candidates`, any local that is
// 1. accessed by a bigger-than-register sized load / store.
// (we make an exception for load / store in `mem_copy_val` as that can be handled).
// 2. OR accessed via a non-const indexing.
// 3. OR aliased to a pointer that may point to more than one symbol.
for (_, inst) in function.instruction_iter(context) {
let loaded_pointers = get_loaded_ptr_values(context, inst);
let stored_pointers = get_stored_ptr_values(context, inst);
let inst = inst.get_instruction(context).unwrap();
for ptr in loaded_pointers.iter().chain(stored_pointers.iter()) {
let syms = get_gep_referred_symbols(context, *ptr);
if syms.len() != 1 {
for sym in &syms {
candidates.remove(sym);
}
continue;
}
if combine_indices(context, *ptr)
.is_some_and(|indices| indices.iter().any(|idx| !idx.is_constant(context)))
|| ptr.match_ptr_type(context).is_some_and(|pointee_ty| {
super::target_fuel::is_demotable_type(context, &pointee_ty)
&& !matches!(inst.op, InstOp::MemCopyVal { .. })
})
{
candidates.remove(syms.iter().next().unwrap());
}
}
}
profitability(context, function, &mut candidates);
candidates
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-ir/src/optimize/conditional_constprop.rs | sway-ir/src/optimize/conditional_constprop.rs | //! When a value is guaranteed to have a constant value in a region of the CFG,
//! this optimization replaces uses of that value with the constant in that region.
use rustc_hash::FxHashMap;
use crate::{
AnalysisResults, Context, DomTree, Function, InstOp, Instruction, IrError, Pass,
PassMutability, Predicate, ScopedPass, DOMINATORS_NAME,
};
pub const CCP_NAME: &str = "ccp";
pub fn create_ccp_pass() -> Pass {
Pass {
name: CCP_NAME,
descr: "Conditional constant proparagion",
deps: vec![DOMINATORS_NAME],
runner: ScopedPass::FunctionPass(PassMutability::Transform(ccp)),
}
}
pub fn ccp(
context: &mut Context,
analyses: &AnalysisResults,
function: Function,
) -> Result<bool, IrError> {
let dom_tree: &DomTree = analyses.get_analysis_result(function);
// In the set of blocks dominated by `key`, replace all uses of `val.0` with `val.1`.
let mut dom_region_replacements = FxHashMap::default();
for block in function.block_iter(context) {
let term = block
.get_terminator(context)
.expect("Malformed block: no terminator");
if let InstOp::ConditionalBranch {
cond_value,
true_block,
false_block,
} = &term.op
{
if let Some(Instruction {
parent: _,
op: InstOp::Cmp(pred, v1, v2),
}) = cond_value.get_instruction(context)
{
if true_block.block != false_block.block
&& matches!(pred, Predicate::Equal)
&& (v1.is_constant(context) ^ v2.is_constant(context)
&& true_block.block.num_predecessors(context) == 1)
{
if v1.is_constant(context) {
dom_region_replacements.insert(true_block.block, (*v2, *v1));
} else {
dom_region_replacements.insert(true_block.block, (*v1, *v2));
}
}
}
}
}
// lets walk the dominator tree from the root.
let root_block = function.get_entry_block(context);
if dom_region_replacements.is_empty() {
return Ok(false);
}
let mut stack = vec![(root_block, 0)];
let mut replacements = FxHashMap::default();
while let Some((block, next_child)) = stack.last().cloned() {
let cur_replacement_opt = dom_region_replacements.get(&block);
if next_child == 0 {
// Preorder processing
if let Some(cur_replacement) = cur_replacement_opt {
replacements.insert(cur_replacement.0, cur_replacement.1);
}
// walk the current block.
block.replace_values(context, &replacements);
}
// walk children.
if let Some(child) = dom_tree.child(block, next_child) {
// When we arrive back at "block" next time, we should process the next child.
stack.last_mut().unwrap().1 = next_child + 1;
// Go on to process the child.
stack.push((child, 0));
} else {
// No children left to process. Start postorder processing.
if let Some(cur_replacement) = cur_replacement_opt {
replacements.remove(&cur_replacement.0);
}
stack.pop();
}
}
Ok(true)
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-ir/src/optimize/arg_mutability_tagger.rs | sway-ir/src/optimize/arg_mutability_tagger.rs | //! Tags function pointer arguments as immutable based on their usage.
use crate::{
build_call_graph, callee_first_order, AnalysisResults, BinaryOpKind, Context,
FuelVmInstruction, Function, InstOp, IrError, Module, Pass, PassMutability, ScopedPass, Value,
ValueDatum,
};
use rustc_hash::{FxHashMap, FxHashSet};
pub const ARG_POINTEE_MUTABILITY_TAGGER_NAME: &str = "arg_pointee_mutability_tagger";
pub fn create_arg_pointee_mutability_tagger_pass() -> Pass {
Pass {
name: ARG_POINTEE_MUTABILITY_TAGGER_NAME,
descr: "Tags function pointer arguments as immutable based on their usage",
deps: vec![],
runner: ScopedPass::ModulePass(PassMutability::Transform(arg_pointee_mutability_tagger)),
}
}
fn arg_pointee_mutability_tagger(
context: &mut Context,
_analysis_results: &AnalysisResults,
module: Module,
) -> Result<bool, IrError> {
let fn_mutability: ArgPointeeMutabilityResult =
compute_arg_pointee_mutability(context, module)?;
let mut immutable_args = vec![];
for f in module.function_iter(context) {
assert!(fn_mutability.is_analyzed(f));
for (arg_idx, (_arg_name, arg_val)) in f.args_iter(context).enumerate() {
let is_immutable = matches!(
fn_mutability.get_mutability(f, arg_idx),
ArgPointeeMutability::Immutable
);
if is_immutable {
// Tag the argument as immutable
immutable_args.push(*arg_val);
}
}
}
let modified = !immutable_args.is_empty();
for arg_val in immutable_args {
let arg = arg_val
.get_argument_mut(context)
.expect("arg is an argument");
arg.is_immutable = true;
}
Ok(modified)
}
#[derive(Debug, Clone, PartialEq)]
/// The mutability of a pointer function argument's pointee.
pub enum ArgPointeeMutability {
Immutable,
Mutable,
NotAPointer,
}
/// Result of the arg pointee mutability analysis, for the arguments of each function.
#[derive(Default)]
pub struct ArgPointeeMutabilityResult(FxHashMap<Function, Vec<ArgPointeeMutability>>);
impl ArgPointeeMutabilityResult {
/// Get the mutability of the pointee for a function argument.
/// Panics on invalid function or argument index.
pub fn get_mutability(&self, function: Function, arg_index: usize) -> ArgPointeeMutability {
self.0.get(&function).unwrap()[arg_index].clone()
}
/// Does function have a result?
pub fn is_analyzed(&self, function: Function) -> bool {
self.0.contains_key(&function)
}
}
/// For every function argument that is a pointer, determine if that function
/// may directly mutate the corresponding pointee.
/// The word "directly" is important here, as it does not consider
/// indirect mutations through contained pointers or references.
pub fn compute_arg_pointee_mutability(
context: &Context,
module: Module,
) -> Result<ArgPointeeMutabilityResult, IrError> {
let cg = build_call_graph(context, &context.modules.get(module.0).unwrap().functions);
let callee_first = callee_first_order(&cg);
let mut res = ArgPointeeMutabilityResult::default();
for function in callee_first.iter() {
analyse_fn(context, *function, &mut res)?;
}
Ok(res)
}
// For every definition, what / where are its uses?
fn compute_def_use_chains(ctx: &Context, function: Function) -> FxHashMap<Value, FxHashSet<Value>> {
let mut def_use: FxHashMap<Value, FxHashSet<Value>> = FxHashMap::default();
for block in function.block_iter(ctx) {
// The formal block arguments "use" the actual arguments that are passed to them.
for formal_arg in block.arg_iter(ctx) {
for pred in block.pred_iter(ctx) {
let actual_arg = formal_arg
.get_argument(ctx)
.unwrap()
.get_val_coming_from(ctx, pred)
.unwrap();
def_use.entry(actual_arg).or_default().insert(*formal_arg);
}
}
// Instructions "use" their operands.
for inst in block.instruction_iter(ctx) {
for operand in inst.get_instruction(ctx).unwrap().op.get_operands() {
def_use.entry(operand).or_default().insert(inst);
}
}
}
def_use
}
fn analyse_fn(
ctx: &Context,
function: Function,
res: &mut ArgPointeeMutabilityResult,
) -> Result<(), IrError> {
assert!(
!res.is_analyzed(function),
"Function {} already analyzed",
function.get_name(ctx)
);
let mut has_atleast_one_pointer_arg = false;
let mut arg_mutabilities = function
.args_iter(ctx)
.map(|(_arg_name, arg)| {
if arg.get_type(ctx).is_some_and(|t| t.is_ptr(ctx)) {
has_atleast_one_pointer_arg = true;
// Assume that pointer arguments are not mutable by default.
ArgPointeeMutability::Immutable
} else {
ArgPointeeMutability::NotAPointer
}
})
.collect::<Vec<_>>();
if !has_atleast_one_pointer_arg {
// If there are no pointer arguments, we can skip further analysis.
res.0.insert(function, arg_mutabilities);
return Ok(());
}
let def_use = compute_def_use_chains(ctx, function);
'analyse_next_arg: for (arg_idx, (_arg_name, arg)) in function.args_iter(ctx).enumerate() {
if matches!(
arg_mutabilities[arg_idx],
ArgPointeeMutability::NotAPointer | ArgPointeeMutability::Mutable
) {
continue;
}
// Known aliases of this argument. Also serves as a visited set.
let mut aliases: FxHashSet<Value> = FxHashSet::default();
let mut in_worklist = FxHashSet::default();
let mut worklist = vec![];
// Start with the argument value itself.
in_worklist.insert(*arg);
worklist.push(*arg);
while let Some(value) = worklist.pop() {
in_worklist.remove(&value);
if !aliases.insert(value) {
// If we already visited this value, skip it.
continue;
}
match &ctx.values.get(value.0).unwrap().value {
ValueDatum::Instruction(inst) => match &inst.op {
InstOp::ConditionalBranch { .. } | InstOp::Branch(_) => {
// Branch instructions do not mutate anything.
// They do pass arguments to the next block,
// but that is captured by that argument itself being
// considered a use.
}
InstOp::Cmp(_, _, _) | InstOp::Ret(_, _) => (),
InstOp::UnaryOp { .. }
| InstOp::BitCast(_, _)
| InstOp::GetLocal(_)
| InstOp::GetGlobal(_)
| InstOp::GetConfig(_, _)
| InstOp::GetStorageKey(_)
| InstOp::IntToPtr(_, _)
| InstOp::Alloc { .. }
| InstOp::Nop => {
panic!("Pointers shouldn't be used in these instructions");
}
InstOp::BinaryOp { op, .. } => {
match op {
BinaryOpKind::Add | BinaryOpKind::Sub => {
// The result of a pointer add or sub is an alias to the pointer.
// Add uses of this instruction to worklist.
def_use
.get(&value)
.cloned()
.unwrap_or_default()
.iter()
.for_each(|r#use| {
in_worklist.insert(*r#use);
worklist.push(*r#use);
});
}
BinaryOpKind::Mul
| BinaryOpKind::Div
| BinaryOpKind::And
| BinaryOpKind::Or
| BinaryOpKind::Xor
| BinaryOpKind::Mod
| BinaryOpKind::Rsh
| BinaryOpKind::Lsh => {
panic!("Pointers shouldn't be used in these operations");
}
}
}
InstOp::PtrToInt(..)
| InstOp::ContractCall { .. }
| InstOp::AsmBlock(..)
| InstOp::Store { .. } => {
// It's a store, or we can't trace this anymore. Assume the worst.
*arg_mutabilities.get_mut(arg_idx).unwrap() = ArgPointeeMutability::Mutable;
continue 'analyse_next_arg;
}
InstOp::CastPtr(..) | InstOp::GetElemPtr { .. } => {
// The result is now an alias of the argument. Process it.
def_use
.get(&value)
.cloned()
.unwrap_or_default()
.iter()
.for_each(|r#use| {
in_worklist.insert(*r#use);
worklist.push(*r#use);
});
}
InstOp::Load(_) => {
// Since we don't worry about pointers that are indirectly mutated,
// (i.e., inside the loaded value) we're done here.
}
InstOp::MemClearVal { dst_val_ptr }
| InstOp::MemCopyBytes { dst_val_ptr, .. }
| InstOp::MemCopyVal { dst_val_ptr, .. } => {
// If the destination is an alias of the argument pointer,
// then the argument is being mutated. (We could be here
// because the source pointer is a use of the argument pointer,
// but that doesn't indicate mutability).
if in_worklist.contains(dst_val_ptr) || aliases.contains(dst_val_ptr) {
// If the destination pointer is the same as the argument pointer,
// we can assume that the pointee is mutable.
*arg_mutabilities.get_mut(arg_idx).unwrap() =
ArgPointeeMutability::Mutable;
continue 'analyse_next_arg;
}
}
InstOp::Call(callee, actual_params) => {
let Some(callee_mutability) = res.0.get(callee) else {
// assume the worst.x
*arg_mutabilities.get_mut(arg_idx).unwrap() =
ArgPointeeMutability::Mutable;
continue 'analyse_next_arg;
};
for (caller_param_idx, caller_param) in actual_params.iter().enumerate() {
if callee_mutability[caller_param_idx] == ArgPointeeMutability::Mutable
{
// The callee mutates the parameter at caller_param_idx
// If what we're passing at that position is an alias of our argument,
// then we mark that our argument is mutable.
if in_worklist.contains(caller_param)
|| aliases.contains(caller_param)
{
*arg_mutabilities.get_mut(arg_idx).unwrap() =
ArgPointeeMutability::Mutable;
}
}
}
}
InstOp::FuelVm(vmop) => match vmop {
FuelVmInstruction::Gtf { .. }
| FuelVmInstruction::Log { .. }
| FuelVmInstruction::ReadRegister(_)
| FuelVmInstruction::Revert(_)
| FuelVmInstruction::JmpMem
| FuelVmInstruction::Smo { .. }
| FuelVmInstruction::StateClear { .. } => {}
FuelVmInstruction::StateLoadQuadWord { load_val, .. } => {
// If the loaded value is an alias of the argument pointer,
// then the argument is being mutated.
if in_worklist.contains(load_val) || aliases.contains(load_val) {
*arg_mutabilities.get_mut(arg_idx).unwrap() =
ArgPointeeMutability::Mutable;
continue 'analyse_next_arg;
}
}
FuelVmInstruction::StateLoadWord(_)
| FuelVmInstruction::StateStoreWord { .. } => {}
FuelVmInstruction::StateStoreQuadWord { .. } => {}
FuelVmInstruction::WideUnaryOp { result, .. }
| FuelVmInstruction::WideBinaryOp { result, .. }
| FuelVmInstruction::WideModularOp { result, .. } => {
// If the result is an alias of the argument pointer,
// then the argument is being mutated.
if in_worklist.contains(result) || aliases.contains(result) {
*arg_mutabilities.get_mut(arg_idx).unwrap() =
ArgPointeeMutability::Mutable;
continue 'analyse_next_arg;
}
}
FuelVmInstruction::WideCmpOp { .. } => {}
FuelVmInstruction::Retd { .. } => {}
},
},
ValueDatum::Argument(_) => {
// Add all users of this argument to the worklist.
def_use
.get(&value)
.cloned()
.unwrap_or_default()
.iter()
.for_each(|r#use| {
in_worklist.insert(*r#use);
worklist.push(*r#use);
});
}
ValueDatum::Constant(_) => panic!("Constants cannot be users"),
}
}
}
res.0.insert(function, arg_mutabilities);
Ok(())
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-ir/src/optimize/constants.rs | sway-ir/src/optimize/constants.rs | //! Optimization passes for manipulating constant values.
use crate::{
constant::{ConstantContent, ConstantValue},
context::Context,
error::IrError,
function::Function,
instruction::InstOp,
value::ValueDatum,
AnalysisResults, BranchToWithArgs, Constant, Instruction, Pass, PassMutability, Predicate,
ScopedPass,
};
use rustc_hash::FxHashMap;
pub const CONST_FOLDING_NAME: &str = "const-folding";
pub fn create_const_folding_pass() -> Pass {
Pass {
name: CONST_FOLDING_NAME,
descr: "Constant folding",
deps: vec![],
runner: ScopedPass::FunctionPass(PassMutability::Transform(fold_constants)),
}
}
/// Find constant expressions which can be reduced to fewer operations.
pub fn fold_constants(
context: &mut Context,
_: &AnalysisResults,
function: Function,
) -> Result<bool, IrError> {
let mut modified = false;
loop {
if combine_cmp(context, &function) {
modified = true;
continue;
}
if combine_cbr(context, &function)? {
modified = true;
continue;
}
if combine_binary_op(context, &function) {
modified = true;
continue;
}
if remove_useless_binary_op(context, &function) {
modified = true;
continue;
}
if combine_unary_op(context, &function) {
modified = true;
continue;
}
// Other passes here... always continue to the top if pass returns true.
break;
}
Ok(modified)
}
fn combine_cbr(context: &mut Context, function: &Function) -> Result<bool, IrError> {
let candidate = function
.instruction_iter(context)
.find_map(
|(in_block, inst_val)| match &context.values[inst_val.0].value {
ValueDatum::Instruction(Instruction {
op:
InstOp::ConditionalBranch {
cond_value,
true_block,
false_block,
},
..
}) if cond_value.is_constant(context) => {
match &cond_value
.get_constant(context)
.unwrap()
.get_content(context)
.value
{
ConstantValue::Bool(true) => Some(Ok((
inst_val,
in_block,
true_block.clone(),
false_block.clone(),
))),
ConstantValue::Bool(false) => Some(Ok((
inst_val,
in_block,
false_block.clone(),
true_block.clone(),
))),
_ => Some(Err(IrError::VerifyConditionExprNotABool)),
}
}
_ => None,
},
)
.transpose()?;
candidate.map_or(
Ok(false),
|(
cbr,
from_block,
dest,
BranchToWithArgs {
block: no_more_dest,
..
},
)| {
// `no_more_dest` will no longer have from_block as a predecessor.
no_more_dest.remove_pred(context, &from_block);
// Although our cbr already branched to `dest`, in case
// `no_more_dest` and `dest` are the same, we'll need to re-add
// `from_block` as a predecessor for `dest`.
dest.block.add_pred(context, &from_block);
cbr.replace(
context,
ValueDatum::Instruction(Instruction {
op: InstOp::Branch(dest),
parent: cbr.get_instruction(context).unwrap().parent,
}),
);
Ok(true)
},
)
}
fn combine_cmp(context: &mut Context, function: &Function) -> bool {
let candidate = function
.instruction_iter(context)
.find_map(
|(block, inst_val)| match &context.values[inst_val.0].value {
ValueDatum::Instruction(Instruction {
op: InstOp::Cmp(pred, val1, val2),
..
}) if val1.is_constant(context) && val2.is_constant(context) => {
let val1 = val1.get_constant(context).unwrap();
let val2 = val2.get_constant(context).unwrap();
use ConstantValue::*;
match pred {
Predicate::Equal => Some((inst_val, block, val1 == val2)),
Predicate::GreaterThan => {
let r = match (
&val1.get_content(context).value,
&val2.get_content(context).value,
) {
(Uint(val1), Uint(val2)) => val1 > val2,
(U256(val1), U256(val2)) => val1 > val2,
(B256(val1), B256(val2)) => val1 > val2,
_ => {
unreachable!(
"Type checker allowed non integer value for GreaterThan"
)
}
};
Some((inst_val, block, r))
}
Predicate::LessThan => {
let r = match (
&val1.get_content(context).value,
&val2.get_content(context).value,
) {
(Uint(val1), Uint(val2)) => val1 < val2,
(U256(val1), U256(val2)) => val1 < val2,
(B256(val1), B256(val2)) => val1 < val2,
_ => {
unreachable!(
"Type checker allowed non integer value for GreaterThan"
)
}
};
Some((inst_val, block, r))
}
}
}
_ => None,
},
);
candidate.is_some_and(|(inst_val, block, cn_replace)| {
let const_content = ConstantContent::new_bool(context, cn_replace);
let constant = crate::Constant::unique(context, const_content);
// Replace this `cmp` instruction with a constant.
inst_val.replace(context, ValueDatum::Constant(constant));
block.remove_instruction(context, inst_val);
true
})
}
fn combine_binary_op(context: &mut Context, function: &Function) -> bool {
let candidate = function
.instruction_iter(context)
.find_map(
|(block, inst_val)| match &context.values[inst_val.0].value {
ValueDatum::Instruction(Instruction {
op: InstOp::BinaryOp { op, arg1, arg2 },
..
}) if arg1.is_constant(context) && arg2.is_constant(context) => {
let val1 = arg1.get_constant(context).unwrap().get_content(context);
let val2 = arg2.get_constant(context).unwrap().get_content(context);
use crate::BinaryOpKind::*;
use ConstantValue::*;
let v = match (op, &val1.value, &val2.value) {
(Add, Uint(l), Uint(r)) => l.checked_add(*r).map(Uint),
(Add, U256(l), U256(r)) => l.checked_add(r).map(U256),
(Sub, Uint(l), Uint(r)) => l.checked_sub(*r).map(Uint),
(Sub, U256(l), U256(r)) => l.checked_sub(r).map(U256),
(Mul, Uint(l), Uint(r)) => l.checked_mul(*r).map(Uint),
(Mul, U256(l), U256(r)) => l.checked_mul(r).map(U256),
(Div, Uint(l), Uint(r)) => l.checked_div(*r).map(Uint),
(Div, U256(l), U256(r)) => l.checked_div(r).map(U256),
(And, Uint(l), Uint(r)) => Some(Uint(l & r)),
(And, U256(l), U256(r)) => Some(U256(l & r)),
(Or, Uint(l), Uint(r)) => Some(Uint(l | r)),
(Or, U256(l), U256(r)) => Some(U256(l | r)),
(Xor, Uint(l), Uint(r)) => Some(Uint(l ^ r)),
(Xor, U256(l), U256(r)) => Some(U256(l ^ r)),
(Mod, Uint(l), Uint(r)) => l.checked_rem(*r).map(Uint),
(Mod, U256(l), U256(r)) => l.checked_rem(r).map(U256),
(Rsh, Uint(l), Uint(r)) => u32::try_from(*r)
.ok()
.and_then(|r| l.checked_shr(r).map(Uint)),
(Rsh, U256(l), Uint(r)) => Some(U256(l.shr(r))),
(Lsh, Uint(l), Uint(r)) => u32::try_from(*r)
.ok()
.and_then(|r| l.checked_shl(r).map(Uint)),
(Lsh, U256(l), Uint(r)) => l.checked_shl(r).map(U256),
_ => None,
};
v.map(|value| (inst_val, block, ConstantContent { ty: val1.ty, value }))
}
_ => None,
},
);
// Replace this binary op instruction with a constant.
candidate.is_some_and(|(inst_val, block, new_value)| {
let new_value = Constant::unique(context, new_value);
inst_val.replace(context, ValueDatum::Constant(new_value));
block.remove_instruction(context, inst_val);
true
})
}
fn remove_useless_binary_op(context: &mut Context, function: &Function) -> bool {
let candidate =
function
.instruction_iter(context)
.find_map(
|(block, candidate)| match &context.values[candidate.0].value {
ValueDatum::Instruction(Instruction {
op: InstOp::BinaryOp { op, arg1, arg2 },
..
}) if arg1.is_constant(context) || arg2.is_constant(context) => {
let val1 = arg1
.get_constant(context)
.map(|x| &x.get_content(context).value);
let val2 = arg2
.get_constant(context)
.map(|x| &x.get_content(context).value);
use crate::BinaryOpKind::*;
use ConstantValue::*;
match (op, val1, val2) {
// 0 + arg2
(Add, Some(Uint(0)), _) => Some((block, candidate, *arg2)),
// arg1 + 0
(Add, _, Some(Uint(0))) => Some((block, candidate, *arg1)),
// 1 * arg2
(Mul, Some(Uint(1)), _) => Some((block, candidate, *arg2)),
// arg1 * 1
(Mul, _, Some(Uint(1))) => Some((block, candidate, *arg1)),
// arg1 / 1
(Div, _, Some(Uint(1))) => Some((block, candidate, *arg1)),
// arg1 - 0
(Sub, _, Some(Uint(0))) => Some((block, candidate, *arg1)),
_ => None,
}
}
_ => None,
},
);
candidate.is_some_and(|(block, old_value, new_value)| {
let replace_map = FxHashMap::from_iter([(old_value, new_value)]);
function.replace_values(context, &replace_map, None);
block.remove_instruction(context, old_value);
true
})
}
fn combine_unary_op(context: &mut Context, function: &Function) -> bool {
let candidate = function
.instruction_iter(context)
.find_map(
|(block, inst_val)| match &context.values[inst_val.0].value {
ValueDatum::Instruction(Instruction {
op: InstOp::UnaryOp { op, arg },
..
}) if arg.is_constant(context) => {
let val = arg.get_constant(context).unwrap();
use crate::UnaryOpKind::*;
use ConstantValue::*;
let v = match (op, &val.get_content(context).value) {
(Not, Uint(v)) => val
.get_content(context)
.ty
.get_uint_width(context)
.and_then(|width| {
let max = match width {
8 => u8::MAX as u64,
16 => u16::MAX as u64,
32 => u32::MAX as u64,
64 => u64::MAX,
_ => return None,
};
Some(Uint((!v) & max))
}),
(Not, U256(v)) => Some(U256(!v)),
_ => None,
};
v.map(|value| {
(
inst_val,
block,
ConstantContent {
ty: val.get_content(context).ty,
value,
},
)
})
}
_ => None,
},
);
// Replace this unary op instruction with a constant.
candidate.is_some_and(|(inst_val, block, new_value)| {
let new_value = Constant::unique(context, new_value);
inst_val.replace(context, ValueDatum::Constant(new_value));
block.remove_instruction(context, inst_val);
true
})
}
#[cfg(test)]
mod tests {
use crate::{optimize::tests::*, CONST_FOLDING_NAME};
fn assert_operator(t: &str, opcode: &str, l: &str, r: Option<&str>, result: Option<&str>) {
let expected = result.map(|result| format!("const {t} {result}"));
let expected = expected.as_ref().map(|x| vec![x.as_str()]);
let body = format!(
"
entry fn main() -> {t} {{
entry():
l = const {t} {l}
{r_inst}
result = {opcode} l, {result_inst} !0
ret {t} result
}}
",
r_inst = r.map_or("".into(), |r| format!("r = const {t} {r}")),
result_inst = r.map_or("", |_| " r,")
);
assert_optimization(&[CONST_FOLDING_NAME], &body, expected);
}
#[test]
fn unary_op_are_optimized() {
assert_operator("u64", "not", &u64::MAX.to_string(), None, Some("0"));
}
#[test]
fn binary_op_are_optimized() {
// u64
assert_operator("u64", "add", "1", Some("1"), Some("2"));
assert_operator("u64", "sub", "1", Some("1"), Some("0"));
assert_operator("u64", "mul", "2", Some("2"), Some("4"));
assert_operator("u64", "div", "10", Some("5"), Some("2"));
assert_operator("u64", "mod", "12", Some("5"), Some("2"));
assert_operator("u64", "rsh", "16", Some("1"), Some("8"));
assert_operator("u64", "lsh", "16", Some("1"), Some("32"));
assert_operator(
"u64",
"and",
&0x00FFF.to_string(),
Some(&0xFFF00.to_string()),
Some(&0xF00.to_string()),
);
assert_operator(
"u64",
"or",
&0x00FFF.to_string(),
Some(&0xFFF00.to_string()),
Some(&0xFFFFF.to_string()),
);
assert_operator(
"u64",
"xor",
&0x00FFF.to_string(),
Some(&0xFFF00.to_string()),
Some(&0xFF0FF.to_string()),
);
}
#[test]
fn binary_op_are_not_optimized() {
assert_operator("u64", "add", &u64::MAX.to_string(), Some("1"), None);
assert_operator("u64", "sub", "0", Some("1"), None);
assert_operator("u64", "mul", &u64::MAX.to_string(), Some("2"), None);
assert_operator("u64", "div", "1", Some("0"), None);
assert_operator("u64", "mod", "1", Some("0"), None);
assert_operator("u64", "rsh", "1", Some("64"), None);
assert_operator("u64", "lsh", "1", Some("64"), None);
}
#[test]
fn ok_chain_optimization() {
// Unary operator
// `sub 1` is used to guarantee that the assert string is unique
assert_optimization(
&[CONST_FOLDING_NAME],
"
entry fn main() -> u64 {
entry():
a = const u64 18446744073709551615
b = not a, !0
c = not b, !0
d = const u64 1
result = sub c, d, !0
ret u64 result
}
",
Some(["const u64 18446744073709551614"]),
);
// Binary Operators
assert_optimization(
&[CONST_FOLDING_NAME],
"
entry fn main() -> u64 {
entry():
l0 = const u64 1
r0 = const u64 2
l1 = add l0, r0, !0
r1 = const u64 3
result = add l1, r1, !0
ret u64 result
}
",
Some(["const u64 6"]),
);
}
#[test]
fn ok_remove_useless_mul() {
assert_optimization(
&[CONST_FOLDING_NAME],
"entry fn main() -> u64 {
local u64 LOCAL
entry():
zero = const u64 0, !0
one = const u64 1, !0
l_ptr = get_local __ptr u64, LOCAL, !0
l = load l_ptr, !0
result1 = mul l, one, !0
result2 = mul one, result1, !0
result3 = add result2, zero, !0
result4 = add zero, result3, !0
result5 = div result4, one, !0
result6 = sub result5, zero, !0
ret u64 result6, !0
}",
Some([
"v3v1 = get_local __ptr u64, LOCAL",
"v4v1 = load v3v1",
"ret u64 v4v1",
]),
);
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-ir/src/optimize/dce.rs | sway-ir/src/optimize/dce.rs | //! ## Dead Code Elimination
//!
//! This optimization removes unused definitions. The pass is a combination of:
//! 1. A liveness analysis that keeps track of the uses of a definition,
//! 2. At the time of inspecting a definition, if it has no uses, it is removed.
//!
//! This pass does not do CFG transformations. That is handled by `simplify_cfg`.
use itertools::Itertools;
use rustc_hash::FxHashSet;
use crate::{
get_gep_referred_symbols, get_referred_symbols, memory_utils, AnalysisResults, Context,
EscapedSymbols, Function, GlobalVar, InstOp, Instruction, IrError, LocalVar, Module, Pass,
PassMutability, ReferredSymbols, ScopedPass, Symbol, Value, ValueDatum, ESCAPED_SYMBOLS_NAME,
};
use std::collections::{HashMap, HashSet};
pub const DCE_NAME: &str = "dce";
pub fn create_dce_pass() -> Pass {
Pass {
name: DCE_NAME,
descr: "Dead code elimination",
runner: ScopedPass::FunctionPass(PassMutability::Transform(dce)),
deps: vec![ESCAPED_SYMBOLS_NAME],
}
}
pub const GLOBALS_DCE_NAME: &str = "globals-dce";
pub fn create_globals_dce_pass() -> Pass {
Pass {
name: GLOBALS_DCE_NAME,
descr: "Dead globals (functions and variables) elimination",
deps: vec![],
runner: ScopedPass::ModulePass(PassMutability::Transform(globals_dce)),
}
}
fn can_eliminate_value(
context: &Context,
val: Value,
num_symbol_loaded: &NumSymbolLoaded,
escaped_symbols: &EscapedSymbols,
) -> bool {
let Some(inst) = val.get_instruction(context) else {
return true;
};
(!inst.op.is_terminator() && !inst.op.may_have_side_effect())
|| is_removable_store(context, val, num_symbol_loaded, escaped_symbols)
}
fn is_removable_store(
context: &Context,
val: Value,
num_symbol_loaded: &NumSymbolLoaded,
escaped_symbols: &EscapedSymbols,
) -> bool {
let escaped_symbols = match escaped_symbols {
EscapedSymbols::Complete(syms) => syms,
EscapedSymbols::Incomplete(_) => return false,
};
let num_symbol_loaded = match num_symbol_loaded {
NumSymbolLoaded::Unknown => return false,
NumSymbolLoaded::Known(known_num_symbol_loaded) => known_num_symbol_loaded,
};
match val.get_instruction(context).unwrap().op {
InstOp::MemCopyBytes { dst_val_ptr, .. }
| InstOp::MemCopyVal { dst_val_ptr, .. }
| InstOp::Store { dst_val_ptr, .. } => {
let syms = get_referred_symbols(context, dst_val_ptr);
match syms {
ReferredSymbols::Complete(syms) => syms.iter().all(|sym| {
!escaped_symbols.contains(sym)
&& num_symbol_loaded.get(sym).map_or(0, |uses| *uses) == 0
}),
// We cannot guarantee that the destination is not used.
ReferredSymbols::Incomplete(_) => false,
}
}
_ => false,
}
}
/// How many times a [Symbol] gets loaded from, directly or indirectly.
/// This number is either exactly `Known` for all the symbols loaded from, or is
/// considered to be `Unknown` for all the symbols.
enum NumSymbolLoaded {
Unknown,
Known(HashMap<Symbol, u32>),
}
/// Instructions that store to a [Symbol], directly or indirectly.
/// These instructions are either exactly `Known` for all the symbols stored to, or is
/// considered to be `Unknown` for all the symbols.
enum StoresOfSymbol {
Unknown,
Known(HashMap<Symbol, Vec<Value>>),
}
fn get_operands(value: Value, context: &Context) -> Vec<Value> {
if let Some(inst) = value.get_instruction(context) {
inst.op.get_operands()
} else if let Some(arg) = value.get_argument(context) {
arg.block
.pred_iter(context)
.map(|pred| {
arg.get_val_coming_from(context, pred)
.expect("Block arg doesn't have value passed from predecessor")
})
.collect()
} else {
vec![]
}
}
/// Perform dead code (if any) elimination and return true if the `function` is modified.
pub fn dce(
context: &mut Context,
analyses: &AnalysisResults,
function: Function,
) -> Result<bool, IrError> {
// For DCE, we need to proceed with the analysis even if we have
// incomplete list of escaped symbols, because we could have
// unused instructions in code. Removing unused instructions is
// independent of having any escaping symbols.
let escaped_symbols: &EscapedSymbols = analyses.get_analysis_result(function);
// Number of uses that an instruction / block arg has. This number is always known.
let mut num_ssa_uses: HashMap<Value, u32> = HashMap::new();
// Number of times a local is accessed via `get_local`. This number is always known.
let mut num_local_uses: HashMap<LocalVar, u32> = HashMap::new();
// Number of times a symbol, local or a function argument, is loaded, directly or indirectly. This number can be unknown.
let mut num_symbol_loaded: NumSymbolLoaded = NumSymbolLoaded::Known(HashMap::new());
// Instructions that store to a symbol, directly or indirectly. This information can be unknown.
let mut stores_of_sym: StoresOfSymbol = StoresOfSymbol::Known(HashMap::new());
// TODO: (REFERENCES) Update this logic once `mut arg: T`s are implemented.
// Currently, only `ref mut arg` arguments can be stored to,
// which means they can be loaded from the caller.
// Once we support `mut arg` in general, this will not be
// the case anymore and we will need to distinguish between
// `mut arg: T`, `arg: &mut T`, etc.
// Every argument is assumed to be loaded from (from the caller),
// so stores to it shouldn't be eliminated.
if let NumSymbolLoaded::Known(known_num_symbol_loaded) = &mut num_symbol_loaded {
for sym in function
.args_iter(context)
.flat_map(|arg| get_gep_referred_symbols(context, arg.1))
{
known_num_symbol_loaded
.entry(sym)
.and_modify(|count| *count += 1)
.or_insert(1);
}
}
// Go through each instruction and update use counters.
for (_block, inst) in function.instruction_iter(context) {
if let NumSymbolLoaded::Known(known_num_symbol_loaded) = &mut num_symbol_loaded {
match memory_utils::get_loaded_symbols(context, inst) {
ReferredSymbols::Complete(loaded_symbols) => {
for sym in loaded_symbols {
known_num_symbol_loaded
.entry(sym)
.and_modify(|count| *count += 1)
.or_insert(1);
}
}
ReferredSymbols::Incomplete(_) => num_symbol_loaded = NumSymbolLoaded::Unknown,
}
}
if let StoresOfSymbol::Known(known_stores_of_sym) = &mut stores_of_sym {
match memory_utils::get_stored_symbols(context, inst) {
ReferredSymbols::Complete(stored_symbols) => {
for stored_sym in stored_symbols {
known_stores_of_sym
.entry(stored_sym)
.and_modify(|stores| stores.push(inst))
.or_insert(vec![inst]);
}
}
ReferredSymbols::Incomplete(_) => stores_of_sym = StoresOfSymbol::Unknown,
}
}
// A local is used if it is accessed via `get_local`.
let inst = inst.get_instruction(context).unwrap();
if let InstOp::GetLocal(local) = inst.op {
num_local_uses
.entry(local)
.and_modify(|count| *count += 1)
.or_insert(1);
}
// An instruction or block-arg is used if it is an operand in another instruction.
let opds = inst.op.get_operands();
for opd in opds {
match context.values[opd.0].value {
ValueDatum::Instruction(_) | ValueDatum::Argument(_) => {
num_ssa_uses
.entry(opd)
.and_modify(|count| *count += 1)
.or_insert(1);
}
ValueDatum::Constant(_) => {}
}
}
}
// The list of all unused or `Store` instruction. Note that the `Store` instruction does
// not result in a value, and will, thus, always be treated as unused and will not
// have an entry in `num_inst_uses`. So, to collect unused or `Store` instructions it
// is sufficient to filter those that are not used.
let mut worklist = function
.instruction_iter(context)
.filter_map(|(_, inst)| (!num_ssa_uses.contains_key(&inst)).then_some(inst))
.collect::<Vec<_>>();
let dead_args = function
.block_iter(context)
.flat_map(|block| {
block
.arg_iter(context)
.filter_map(|arg| (!num_ssa_uses.contains_key(arg)).then_some(*arg))
.collect_vec()
})
.collect_vec();
worklist.extend(dead_args);
let mut modified = false;
let mut cemetery = FxHashSet::default();
while let Some(dead) = worklist.pop() {
if !can_eliminate_value(context, dead, &num_symbol_loaded, escaped_symbols)
|| cemetery.contains(&dead)
{
continue;
}
// Process dead's operands.
let opds = get_operands(dead, context);
for opd in opds {
// Reduce the use count of the operand used in the dead instruction.
// If it reaches 0, add it to the worklist, since it is not used
// anywhere else.
match context.values[opd.0].value {
ValueDatum::Instruction(_) | ValueDatum::Argument(_) => {
let nu = num_ssa_uses.get_mut(&opd).unwrap();
*nu -= 1;
if *nu == 0 {
worklist.push(opd);
}
}
ValueDatum::Constant(_) => {}
}
}
if dead.get_instruction(context).is_some() {
// If the `dead` instruction was the only instruction loading from a `sym`bol,
// after removing it, there will be no loads anymore, so all the stores to
// that `sym`bol can be added to the worklist.
if let ReferredSymbols::Complete(loaded_symbols) =
memory_utils::get_loaded_symbols(context, dead)
{
if let (
NumSymbolLoaded::Known(known_num_symbol_loaded),
StoresOfSymbol::Known(known_stores_of_sym),
) = (&mut num_symbol_loaded, &mut stores_of_sym)
{
for sym in loaded_symbols {
let nu = known_num_symbol_loaded.get_mut(&sym).unwrap();
*nu -= 1;
if *nu == 0 {
for store in known_stores_of_sym.get(&sym).unwrap_or(&vec![]) {
worklist.push(*store);
}
}
}
}
}
}
cemetery.insert(dead);
if let ValueDatum::Instruction(Instruction {
op: InstOp::GetLocal(local),
..
}) = context.values[dead.0].value
{
let count = num_local_uses.get_mut(&local).unwrap();
*count -= 1;
}
modified = true;
}
// Remove all dead instructions and arguments.
// We collect here and below because we want &mut Context for modifications.
for block in function.block_iter(context).collect_vec() {
if block != function.get_entry_block(context) {
// dead_args[arg_idx] indicates whether the argument is dead.
let dead_args = block
.arg_iter(context)
.map(|arg| cemetery.contains(arg))
.collect_vec();
for pred in block.pred_iter(context).cloned().collect_vec() {
let params = pred
.get_succ_params_mut(context, &block)
.expect("Invalid IR");
let mut index = 0;
// Remove parameters passed to a dead argument.
params.retain(|_| {
let retain = !dead_args[index];
index += 1;
retain
});
}
// Remove the dead argument itself.
let mut index = 0;
context.blocks[block.0].args.retain(|_| {
let retain = !dead_args[index];
index += 1;
retain
});
// Update the self-index stored in each arg.
for (arg_idx, arg) in block.arg_iter(context).cloned().enumerate().collect_vec() {
let arg = arg.get_argument_mut(context).unwrap();
arg.idx = arg_idx;
}
}
block.remove_instructions(context, |inst| cemetery.contains(&inst));
}
let local_removals: Vec<_> = function
.locals_iter(context)
.filter_map(|(name, local)| {
(num_local_uses.get(local).cloned().unwrap_or(0) == 0).then_some(name.clone())
})
.collect();
if !local_removals.is_empty() {
modified = true;
function.remove_locals(context, &local_removals);
}
Ok(modified)
}
/// Remove entire functions and globals from a module based on whether they are called / used or not,
/// using a list of root 'entry' functions to perform a search.
///
/// Functions which are `pub` will not be removed and only functions within the passed [`Module`]
/// are considered for removal.
pub fn globals_dce(
context: &mut Context,
_: &AnalysisResults,
module: Module,
) -> Result<bool, IrError> {
let mut called_fns: HashSet<Function> = HashSet::new();
let mut used_globals: HashSet<GlobalVar> = HashSet::new();
// config decode fns
for config in context.modules[module.0].configs.iter() {
if let crate::ConfigContent::V1 { decode_fn, .. } = config.1 {
grow_called_function_used_globals_set(
context,
decode_fn.get(),
&mut called_fns,
&mut used_globals,
);
}
}
// expand all called fns
for entry_fn in module
.function_iter(context)
.filter(|func| func.is_entry(context) || func.is_fallback(context))
{
grow_called_function_used_globals_set(
context,
entry_fn,
&mut called_fns,
&mut used_globals,
);
}
let mut modified = false;
// Remove dead globals
let m = &mut context.modules[module.0];
let cur_num_globals = m.global_variables.len();
m.global_variables.retain(|_, g| used_globals.contains(g));
modified |= cur_num_globals != m.global_variables.len();
// Gather the functions in the module which aren't called. It's better to collect them
// separately first so as to avoid any issues with invalidating the function iterator.
let dead_fns = module
.function_iter(context)
.filter(|f| !called_fns.contains(f))
.collect::<Vec<_>>();
for dead_fn in &dead_fns {
module.remove_function(context, dead_fn);
}
modified |= !dead_fns.is_empty();
Ok(modified)
}
// Recursively find all the functions called by an entry function.
fn grow_called_function_used_globals_set(
context: &Context,
caller: Function,
called_set: &mut HashSet<Function>,
used_globals: &mut HashSet<GlobalVar>,
) {
if called_set.insert(caller) {
// We haven't seen caller before. Iterate for all that it calls.
let mut callees = HashSet::new();
for (_block, value) in caller.instruction_iter(context) {
let inst = value.get_instruction(context).unwrap();
match &inst.op {
InstOp::Call(f, _args) => {
callees.insert(*f);
}
InstOp::GetGlobal(g) => {
used_globals.insert(*g);
}
_otherwise => (),
}
}
callees.into_iter().for_each(|func| {
grow_called_function_used_globals_set(context, func, called_set, used_globals);
});
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-ir/src/optimize/simplify_cfg.rs | sway-ir/src/optimize/simplify_cfg.rs | //! ## Simplify Control Flow Graph
//!
//! The optimizations here aim to reduce the complexity in control flow by removing basic blocks.
//! This may be done by removing 'dead' blocks which are no longer called (or in other words, have
//! no predecessors) or by merging blocks which are linked by a single unconditional branch.
//!
//! Removing blocks will make the IR neater and more efficient but will also remove indirection of
//! data flow via PHI instructions which in turn can make analyses for passes like constant folding
//! much simpler.
use rustc_hash::{FxHashMap, FxHashSet};
use crate::{
block::Block, context::Context, error::IrError, function::Function, instruction::InstOp,
value::ValueDatum, AnalysisResults, BranchToWithArgs, Instruction, InstructionInserter, Pass,
PassMutability, ScopedPass, Value,
};
pub const SIMPLIFY_CFG_NAME: &str = "simplify-cfg";
pub fn create_simplify_cfg_pass() -> Pass {
Pass {
name: SIMPLIFY_CFG_NAME,
descr: "Simplify the control flow graph (CFG)",
deps: vec![],
runner: ScopedPass::FunctionPass(PassMutability::Transform(simplify_cfg)),
}
}
pub fn simplify_cfg(
context: &mut Context,
_: &AnalysisResults,
function: Function,
) -> Result<bool, IrError> {
let mut modified = false;
modified |= remove_dead_blocks(context, &function)?;
modified |= merge_blocks(context, &function)?;
modified |= unlink_empty_blocks(context, &function)?;
modified |= remove_dead_blocks(context, &function)?;
Ok(modified)
}
fn unlink_empty_blocks(context: &mut Context, function: &Function) -> Result<bool, IrError> {
let mut modified = false;
let candidates: Vec<_> = function
.block_iter(context)
.skip(1)
.filter_map(|block| {
match block.get_terminator(context) {
// Except for a branch, we don't want anything else.
// If the block has PHI nodes, then values merge here. Cannot remove the block.
Some(Instruction {
op: InstOp::Branch(to_block),
..
}) if block.num_instructions(context) <= 1 && block.num_args(context) == 0 => {
Some((block, to_block.clone()))
}
_ => None,
}
})
.collect();
for (
block,
BranchToWithArgs {
block: to_block,
args: cur_params,
},
) in candidates
{
// If `to_block`'s predecessors and `block`'s predecessors intersect,
// AND `to_block` has an arg, then we have that pred branching to to_block
// with different args. While that's valid IR, it's harder to generate
// ASM for it, so let's just skip that for now.
if to_block.num_args(context) > 0
&& to_block.pred_iter(context).any(|to_block_pred| {
block
.pred_iter(context)
.any(|block_pred| block_pred == to_block_pred)
})
{
// We cannot filter this out in candidates itself because this condition
// may get updated *during* this optimization (i.e., inside this loop).
continue;
}
let preds: Vec<_> = block.pred_iter(context).copied().collect();
for pred in preds {
// Whatever parameters "block" passed to "to_block", that
// should now go from "pred" to "to_block".
let params_from_pred = pred.get_succ_params(context, &block);
let new_params = cur_params
.iter()
.map(|cur_param| match &context.values[cur_param.0].value {
ValueDatum::Argument(arg) if arg.block == block => {
// An argument should map to the actual parameter passed.
params_from_pred[arg.idx]
}
_ => *cur_param,
})
.collect();
pred.replace_successor(context, block, to_block, new_params);
modified = true;
}
}
Ok(modified)
}
fn remove_dead_blocks(context: &mut Context, function: &Function) -> Result<bool, IrError> {
let mut worklist = Vec::<Block>::new();
let mut reachable = std::collections::HashSet::<Block>::new();
// The entry is always reachable. Let's begin with that.
let entry_block = function.get_entry_block(context);
reachable.insert(entry_block);
worklist.push(entry_block);
// Mark reachable nodes.
while let Some(block) = worklist.pop() {
let succs = block.successors(context);
for BranchToWithArgs { block: succ, .. } in succs {
// If this isn't already marked reachable, we mark it and add to the worklist.
if !reachable.contains(&succ) {
reachable.insert(succ);
worklist.push(succ);
}
}
}
// Delete all unreachable nodes.
let mut modified = false;
for block in function.block_iter(context) {
if !reachable.contains(&block) {
modified = true;
for BranchToWithArgs { block: succ, .. } in block.successors(context) {
succ.remove_pred(context, &block);
}
function.remove_block(context, &block)?;
}
}
Ok(modified)
}
fn merge_blocks(context: &mut Context, function: &Function) -> Result<bool, IrError> {
// Check if block branches solely to another block B, and that B has exactly one predecessor.
fn check_candidate(context: &Context, from_block: Block) -> Option<(Block, Block)> {
from_block
.get_terminator(context)
.and_then(|term| match term {
Instruction {
op:
InstOp::Branch(BranchToWithArgs {
block: to_block, ..
}),
..
} if to_block.num_predecessors(context) == 1 => Some((from_block, *to_block)),
_ => None,
})
}
let blocks: Vec<_> = function.block_iter(context).collect();
let mut deleted_blocks = FxHashSet::<Block>::default();
let mut replace_map: FxHashMap<Value, Value> = FxHashMap::default();
let mut modified = false;
for from_block in blocks {
if deleted_blocks.contains(&from_block) {
continue;
}
// Find a block with an unconditional branch terminator which branches to a block with that
// single predecessor.
let twin_blocks = check_candidate(context, from_block);
// If not found then abort here.
let mut block_chain = match twin_blocks {
Some((from_block, to_block)) => vec![from_block, to_block],
None => continue,
};
// There may be more blocks which are also singly paired with these twins, so iteratively
// search for more blocks in a chain which can be all merged into one.
loop {
match check_candidate(context, block_chain.last().copied().unwrap()) {
None => {
// There is no twin for this block.
break;
}
Some(next_pair) => {
block_chain.push(next_pair.1);
}
}
}
// Keep a copy of the final block in the chain so we can adjust the successors below.
let final_to_block = block_chain.last().copied().unwrap();
let final_to_block_succs = final_to_block.successors(context);
// The first block in the chain will be extended with the contents of the rest of the blocks in
// the chain, which we'll call `from_block` since we're branching from here to the next one.
let mut block_chain = block_chain.into_iter();
let from_block = block_chain.next().unwrap();
// Loop for the rest of the chain, to all the `to_block`s.
for to_block in block_chain {
let from_params = from_block.get_succ_params(context, &to_block);
// We collect here so that we can have &mut Context later on.
let to_blocks: Vec<_> = to_block.arg_iter(context).copied().enumerate().collect();
for (arg_idx, to_block_arg) in to_blocks {
// replace all uses of `to_block_arg` with the parameter from `from_block`.
replace_map.insert(to_block_arg, from_params[arg_idx]);
}
// Update the parent block field for every instruction
// in `to_block` to `from_block`.
for val in to_block.instruction_iter(context) {
let instr = val.get_instruction_mut(context).unwrap();
instr.parent = from_block;
}
// Drop the terminator from `from_block`.
from_block.remove_last_instruction(context);
// Move instructions from `to_block` to `from_block`.
let to_block_instructions = to_block.instruction_iter(context).collect::<Vec<_>>();
let mut inserter =
InstructionInserter::new(context, from_block, crate::InsertionPosition::End);
inserter.insert_slice(&to_block_instructions);
// Remove `to_block`.
function.remove_block(context, &to_block)?;
deleted_blocks.insert(to_block);
}
// Adjust the successors to the final `to_block` to now be successors of the fully merged
// `from_block`.
for BranchToWithArgs { block: succ, .. } in final_to_block_succs {
succ.replace_pred(context, &final_to_block, &from_block)
}
modified = true;
}
if !replace_map.is_empty() {
assert!(modified);
function.replace_values(context, &replace_map, None);
}
Ok(modified)
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-ir/src/analysis/dominator.rs | sway-ir/src/analysis/dominator.rs | use crate::{
block::Block, AnalysisResult, AnalysisResultT, AnalysisResults, BranchToWithArgs, Context,
Function, IrError, Pass, PassMutability, ScopedPass, Value,
};
use indexmap::IndexSet;
/// Dominator tree and related algorithms.
/// The algorithms implemented here are from the paper
// "A Simple, Fast Dominance Algorithm" -- Keith D. Cooper, Timothy J. Harvey, and Ken Kennedy.
use rustc_hash::{FxHashMap, FxHashSet};
use std::fmt::Write;
use sway_types::{FxIndexMap, FxIndexSet};
/// Represents a node in the dominator tree.
pub struct DomTreeNode {
/// The immediate dominator of self.
pub parent: Option<Block>,
/// The blocks that self immediately dominates.
pub children: Vec<Block>,
}
impl DomTreeNode {
pub fn new(parent: Option<Block>) -> DomTreeNode {
DomTreeNode {
parent,
children: vec![],
}
}
}
// The dominator tree is represented by mapping each Block to its DomTreeNode.
#[derive(Default)]
pub struct DomTree(FxIndexMap<Block, DomTreeNode>);
impl AnalysisResultT for DomTree {}
// Dominance frontier sets.
pub type DomFronts = FxIndexMap<Block, FxIndexSet<Block>>;
impl AnalysisResultT for DomFronts {}
/// Post ordering of blocks in the CFG.
pub struct PostOrder {
pub block_to_po: FxHashMap<Block, usize>,
pub po_to_block: Vec<Block>,
}
impl AnalysisResultT for PostOrder {}
pub const POSTORDER_NAME: &str = "postorder";
pub fn create_postorder_pass() -> Pass {
Pass {
name: POSTORDER_NAME,
descr: "Postorder traversal of the control-flow graph",
deps: vec![],
runner: ScopedPass::FunctionPass(PassMutability::Analysis(compute_post_order_pass)),
}
}
pub fn compute_post_order_pass(
context: &Context,
_: &AnalysisResults,
function: Function,
) -> Result<AnalysisResult, IrError> {
Ok(Box::new(compute_post_order(context, &function)))
}
/// Compute the post-order traversal of the CFG.
/// Beware: Unreachable blocks aren't part of the result.
pub fn compute_post_order(context: &Context, function: &Function) -> PostOrder {
let mut res = PostOrder {
block_to_po: FxHashMap::default(),
po_to_block: Vec::default(),
};
let entry = function.get_entry_block(context);
let mut counter = 0;
let mut on_stack = FxHashSet::<Block>::default();
fn post_order(
context: &Context,
n: Block,
res: &mut PostOrder,
on_stack: &mut FxHashSet<Block>,
counter: &mut usize,
) {
if on_stack.contains(&n) {
return;
}
on_stack.insert(n);
for BranchToWithArgs { block: n_succ, .. } in n.successors(context) {
post_order(context, n_succ, res, on_stack, counter);
}
res.block_to_po.insert(n, *counter);
res.po_to_block.push(n);
*counter += 1;
}
post_order(context, entry, &mut res, &mut on_stack, &mut counter);
// We could assert the whole thing, but it'd be expensive.
assert!(res.po_to_block.last().unwrap() == &entry);
res
}
pub const DOMINATORS_NAME: &str = "dominators";
pub fn create_dominators_pass() -> Pass {
Pass {
name: DOMINATORS_NAME,
descr: "Dominator tree computation",
deps: vec![POSTORDER_NAME],
runner: ScopedPass::FunctionPass(PassMutability::Analysis(compute_dom_tree)),
}
}
/// Compute the dominator tree for the CFG.
fn compute_dom_tree(
context: &Context,
analyses: &AnalysisResults,
function: Function,
) -> Result<AnalysisResult, IrError> {
let po: &PostOrder = analyses.get_analysis_result(function);
let mut dom_tree = DomTree::default();
let entry = function.get_entry_block(context);
// This is to make the algorithm happy. It'll be changed to None later.
dom_tree.0.insert(entry, DomTreeNode::new(Some(entry)));
// initialize the dominators tree. This allows us to do dom_tree[b] fearlessly.
// Note that we just previously initialized "entry", so we skip that here.
for b in po.po_to_block.iter().take(po.po_to_block.len() - 1) {
dom_tree.0.insert(*b, DomTreeNode::new(None));
}
let mut changed = true;
while changed {
changed = false;
// For all nodes, b, in reverse postorder (except start node)
for b in po.po_to_block.iter().rev().skip(1) {
// new_idom <- first (processed) predecessor of b (pick one)
let mut new_idom = b
.pred_iter(context)
.find(|p| {
// "p" may not be reachable, and hence not in dom_tree.
po.block_to_po
.get(p)
.is_some_and(|p_po| *p_po > po.block_to_po[b])
})
.cloned()
.unwrap();
let picked_pred = new_idom;
// for all other (reachable) predecessors, p, of b:
for p in b
.pred_iter(context)
.filter(|p| **p != picked_pred && po.block_to_po.contains_key(p))
{
if dom_tree.0[p].parent.is_some() {
// if doms[p] already calculated
new_idom = intersect(po, &dom_tree, *p, new_idom);
}
}
let b_node = dom_tree.0.get_mut(b).unwrap();
match b_node.parent {
Some(idom) if idom == new_idom => {}
_ => {
b_node.parent = Some(new_idom);
changed = true;
}
}
}
}
// Find the nearest common dominator of two blocks,
// using the partially computed dominator tree.
fn intersect(
po: &PostOrder,
dom_tree: &DomTree,
mut finger1: Block,
mut finger2: Block,
) -> Block {
while finger1 != finger2 {
while po.block_to_po[&finger1] < po.block_to_po[&finger2] {
finger1 = dom_tree.0[&finger1].parent.unwrap();
}
while po.block_to_po[&finger2] < po.block_to_po[&finger1] {
finger2 = dom_tree.0[&finger2].parent.unwrap();
}
}
finger1
}
// Fix the root.
dom_tree.0.get_mut(&entry).unwrap().parent = None;
// Build the children.
let child_parent: Vec<_> = dom_tree
.0
.iter()
.filter_map(|(n, n_node)| n_node.parent.map(|n_parent| (*n, n_parent)))
.collect();
for (child, parent) in child_parent {
dom_tree.0.get_mut(&parent).unwrap().children.push(child);
}
Ok(Box::new(dom_tree))
}
impl DomTree {
/// Does `dominator` dominate `dominatee`?
pub fn dominates(&self, dominator: Block, dominatee: Block) -> bool {
let mut node_opt = Some(dominatee);
while let Some(node) = node_opt {
if node == dominator {
return true;
}
node_opt = self.0[&node].parent;
}
false
}
/// Get an iterator over the children nodes
pub fn children(&self, node: Block) -> impl Iterator<Item = Block> + '_ {
self.0[&node].children.iter().cloned()
}
/// Get i'th child of a given node
pub fn child(&self, node: Block, i: usize) -> Option<Block> {
self.0[&node].children.get(i).cloned()
}
/// Does `dominator` dominate `dominatee`?
pub fn dominates_instr(&self, context: &Context, dominator: Value, dominatee: Value) -> bool {
let dominator_inst = dominator.get_instruction(context).unwrap();
let dominatee_inst = dominatee.get_instruction(context).unwrap();
if dominator == dominatee {
return true;
}
let dominator_block = dominator_inst.parent;
let dominatee_block = dominatee_inst.parent;
if dominator_block == dominatee_block {
// Same block, but different instructions.
// Check the order of instructions in the block.
let mut found_dominator = false;
for instr in dominator_block.instruction_iter(context) {
if instr == dominator {
found_dominator = true;
}
if instr == dominatee {
return found_dominator;
}
}
false
} else {
self.dominates(dominator_block, dominatee_block)
}
}
}
pub const DOM_FRONTS_NAME: &str = "dominance-frontiers";
pub fn create_dom_fronts_pass() -> Pass {
Pass {
name: DOM_FRONTS_NAME,
descr: "Dominance frontiers computation",
deps: vec![DOMINATORS_NAME],
runner: ScopedPass::FunctionPass(PassMutability::Analysis(compute_dom_fronts)),
}
}
/// Compute dominance frontiers set for each block.
fn compute_dom_fronts(
context: &Context,
analyses: &AnalysisResults,
function: Function,
) -> Result<AnalysisResult, IrError> {
let dom_tree: &DomTree = analyses.get_analysis_result(function);
let mut res = DomFronts::default();
for (b, _) in dom_tree.0.iter() {
res.insert(*b, IndexSet::default());
}
// for all nodes, b
for (b, _) in dom_tree.0.iter() {
// if the number of predecessors of b >= 2
if b.num_predecessors(context) > 1 {
// unwrap() is safe as b is not "entry", and hence must have idom.
let b_idom = dom_tree.0[b].parent.unwrap();
// for all (reachable) predecessors, p, of b
for p in b.pred_iter(context).filter(|&p| dom_tree.0.contains_key(p)) {
let mut runner = *p;
while runner != b_idom {
// add b to runner’s dominance frontier set
res.get_mut(&runner).unwrap().insert(*b);
runner = dom_tree.0[&runner].parent.unwrap();
}
}
}
}
Ok(Box::new(res))
}
/// Print dominator tree in the graphviz dot format.
pub fn print_dot(context: &Context, func_name: &str, dom_tree: &DomTree) -> String {
let mut res = format!("digraph {func_name} {{\n");
for (b, idom) in dom_tree.0.iter() {
if let Some(idom) = idom.parent {
let _ = writeln!(
res,
"\t{} -> {}",
idom.get_label(context),
b.get_label(context)
);
}
}
res += "}\n";
res
}
/// Print dominator frontiers information.
pub fn print_dom_fronts(context: &Context, func_name: &str, dom_fronts: &DomFronts) -> String {
let mut res = format!("Dominance frontiers set for {func_name}:\n");
for (b, dfs) in dom_fronts.iter() {
res += &("\t".to_string() + &b.get_label(context) + ": ");
for f in dfs {
res += &(f.get_label(context) + " ");
}
res += "\n";
}
res
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-ir/src/analysis/call_graph.rs | sway-ir/src/analysis/call_graph.rs | /// Build call graphs for the program being compiled.
/// If a function F1 calls function F2, then the call
/// graph has an edge F1->F2.
use crate::{Context, Function, InstOp, Instruction, ValueDatum};
use indexmap::IndexSet;
use sway_types::{FxIndexMap, FxIndexSet};
pub type CallGraph = FxIndexMap<Function, FxIndexSet<Function>>;
/// Build call graph considering all providing functions.
pub fn build_call_graph(ctx: &Context, functions: &[Function]) -> CallGraph {
let mut res = CallGraph::default();
for function in functions {
let entry = res.entry(*function);
let entry = entry.or_insert_with(IndexSet::default);
for (_, inst) in function.instruction_iter(ctx) {
if let ValueDatum::Instruction(Instruction {
op: InstOp::Call(callee, _),
..
}) = ctx.values[inst.0].value
{
entry.insert(callee);
}
}
}
res
}
/// Given a call graph, return reverse topological sort
/// (post order traversal), i.e., If A calls B, then B
/// occurs before A in the returned Vec.
pub fn callee_first_order(cg: &CallGraph) -> Vec<Function> {
let mut res = Vec::new();
let mut visited = FxIndexSet::<Function>::default();
fn post_order_visitor(
cg: &CallGraph,
visited: &mut FxIndexSet<Function>,
res: &mut Vec<Function>,
node: Function,
) {
if visited.contains(&node) {
return;
}
visited.insert(node);
for callee in &cg[&node] {
post_order_visitor(cg, visited, res, *callee);
}
res.push(node);
}
for node in cg.keys() {
post_order_visitor(cg, &mut visited, &mut res, *node);
}
res
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-ir/src/analysis/memory_utils.rs | sway-ir/src/analysis/memory_utils.rs | //! An analysis to compute symbols that escape out from a function.
//! This could be into another function, or via `ptr_to_int` etc.
//! Any transformations involving such symbols are unsafe.
use indexmap::IndexSet;
use rustc_hash::FxHashSet;
use sway_types::{FxIndexMap, FxIndexSet};
use crate::{
AnalysisResult, AnalysisResultT, AnalysisResults, BlockArgument, Context, FuelVmInstruction,
Function, InstOp, Instruction, IrError, LocalVar, Pass, PassMutability, ScopedPass, Type,
Value, ValueDatum,
};
pub const ESCAPED_SYMBOLS_NAME: &str = "escaped-symbols";
pub fn create_escaped_symbols_pass() -> Pass {
Pass {
name: ESCAPED_SYMBOLS_NAME,
descr: "Symbols that escape or cannot be analyzed",
deps: vec![],
runner: ScopedPass::FunctionPass(PassMutability::Analysis(compute_escaped_symbols_pass)),
}
}
#[derive(Debug, Eq, PartialEq, Copy, Clone, Hash)]
pub enum Symbol {
Local(LocalVar),
Arg(BlockArgument),
}
impl Symbol {
pub fn get_type(&self, context: &Context) -> Type {
match self {
Symbol::Local(l) => l.get_type(context),
Symbol::Arg(ba) => ba.ty,
}
}
pub fn _get_name(&self, context: &Context, function: Function) -> String {
match self {
Symbol::Local(l) => function.lookup_local_name(context, l).unwrap().clone(),
Symbol::Arg(ba) => format!("{}[{}]", ba.block.get_label(context), ba.idx),
}
}
}
/// Get [Symbol]s, both [Symbol::Local]s and [Symbol::Arg]s, reachable
/// from the `val` via chain of [InstOp::GetElemPtr] (GEP) instructions.
/// A `val` can, via GEP instructions, refer indirectly to none, or one
/// or more symbols.
///
/// If the `val` is not a pointer, an empty set is returned.
///
/// Note that this function does not return [Symbol]s potentially reachable
/// via referencing (`&`), dereferencing (`*`), and raw pointers (`__addr_of`)
/// and is thus suitable for all IR analysis and manipulation that deals
/// strictly with GEP access.
///
/// To acquire all [Symbol]s reachable from the `val`, use [get_referred_symbols] instead.
pub fn get_gep_referred_symbols(context: &Context, val: Value) -> FxIndexSet<Symbol> {
match get_symbols(context, val, true) {
ReferredSymbols::Complete(symbols) => symbols,
_ => unreachable!(
"In the case of GEP access, the set of returned symbols is always complete."
),
}
}
/// Provides [Symbol]s, both [Symbol::Local]s and [Symbol::Arg]s, reachable
/// from a certain [Value] via chain of [InstOp::GetElemPtr] (GEP) instructions
/// or via [InstOp::IntToPtr] and [InstOp::PtrToInt] instruction patterns
/// specific to references, both referencing (`&`) and dereferencing (`*`),
/// and raw pointers, via `__addr_of`.
pub enum ReferredSymbols {
/// Guarantees that all [Symbol]s reachable from the particular [Value]
/// are collected, thus, that there are no escapes or pointer accesses
/// in the scope that _might_ result in symbols indirectly related to
/// the [Value] but not reachable only via GEP, or references, or
/// raw pointers only.
Complete(FxIndexSet<Symbol>),
/// Denotes that there _might_ be [Symbol]s out of returned ones that
/// are related to the particular [Value], but not reachable only via GEP,
/// or references, or raw pointers.
Incomplete(FxIndexSet<Symbol>),
}
impl ReferredSymbols {
pub fn new(is_complete: bool, symbols: FxIndexSet<Symbol>) -> Self {
if is_complete {
Self::Complete(symbols)
} else {
Self::Incomplete(symbols)
}
}
/// Returns the referred [Symbol]s and the information if they are
/// complete (true) or incomplete (false).
pub fn consume(self) -> (bool, FxIndexSet<Symbol>) {
let is_complete = matches!(self, ReferredSymbols::Complete(_));
let syms = match self {
ReferredSymbols::Complete(syms) | ReferredSymbols::Incomplete(syms) => syms,
};
(is_complete, syms)
}
}
/// Get [Symbol]s, both [Symbol::Local]s and [Symbol::Arg]s, reachable
/// from the `val` via chain of [InstOp::GetElemPtr] (GEP) instructions
/// or via [InstOp::IntToPtr] and [InstOp::PtrToInt] instruction patterns
/// specific to references, both referencing (`&`) and dereferencing (`*`),
/// and raw pointers, via `__addr_of`.
/// A `val` can, via these instructions, refer indirectly to none, or one
/// or more symbols.
///
/// Note that *this function does not perform any escape analysis*. E.g., if a
/// local symbol gets passed by `raw_ptr` or `&T` to a function and returned
/// back from the function via the same `raw_ptr` or `&T` the value returned
/// from the function will not be tracked back to the original symbol and the
/// symbol will not be collected as referred.
///
/// This means that, even if the result contains [Symbol]s, it _might_ be that
/// there are still other [Symbol]s in scope related to the `val`. E.g., in case
/// of branching, where the first branch directly returns `& local_var_a`
/// and the second branch, indirectly over a function call as explained above,
/// `& local_var_b`, only the `local_var_a` will be returned as a result.
///
/// Therefore, the function returns the [ReferredSymbols] enum to denote
/// if the returned set of symbols is guaranteed to be complete, or if it is
/// incomplete.
///
/// If the `val` is not a pointer, an empty set is returned and marked as
/// [ReferredSymbols::Complete].
pub fn get_referred_symbols(context: &Context, val: Value) -> ReferredSymbols {
get_symbols(context, val, false)
}
/// Get [Symbol]s, both [Symbol::Local]s and [Symbol::Arg]s, reachable
/// from the `val`.
///
/// If `gep_only` is `true` only the [Symbol]s reachable via GEP instructions
/// are returned. Otherwise, the result also contains [Symbol]s reachable
/// via referencing (`&`) and dereferencing (`*`).
///
/// If the `val` is not a pointer, an empty set is returned and marked as
/// [ReferredSymbols::Complete].
fn get_symbols(context: &Context, val: Value, gep_only: bool) -> ReferredSymbols {
// The input to this recursive function is always a pointer.
// The function tracks backwards where the pointer is coming from.
fn get_symbols_rec(
context: &Context,
symbols: &mut FxIndexSet<Symbol>,
visited: &mut FxHashSet<Value>,
ptr: Value,
gep_only: bool,
is_complete: &mut bool,
) {
fn get_argument_symbols(
context: &Context,
symbols: &mut FxIndexSet<Symbol>,
visited: &mut FxHashSet<Value>,
arg: BlockArgument,
gep_only: bool,
is_complete: &mut bool,
) {
if arg.block.get_label(context) == "entry" {
symbols.insert(Symbol::Arg(arg));
} else {
arg.block
.pred_iter(context)
.map(|pred| arg.get_val_coming_from(context, pred).unwrap())
.for_each(|v| {
get_symbols_rec(context, symbols, visited, v, gep_only, is_complete)
})
}
}
fn get_symbols_from_u64_address_argument(
context: &Context,
symbols: &mut FxIndexSet<Symbol>,
visited: &mut FxHashSet<Value>,
u64_address_arg: BlockArgument,
is_complete: &mut bool,
) {
if u64_address_arg.block.get_label(context) == "entry" {
// The u64 address is coming from a function argument.
// Same as in the case of a pointer coming from a function argument,
// we collect it.
symbols.insert(Symbol::Arg(u64_address_arg));
} else {
u64_address_arg
.block
.pred_iter(context)
.map(|pred| u64_address_arg.get_val_coming_from(context, pred).unwrap())
.for_each(|v| {
get_symbols_from_u64_address_rec(context, symbols, visited, v, is_complete)
})
}
}
// The input to this recursive function is always a `u64` holding an address.
// The below chain of instructions are specific to patterns where pointers
// are obtained from `u64` addresses and vice versa. This includes:
// - referencing and dereferencing
// - raw pointers (`__addr_of`)
// - GTF intrinsic
fn get_symbols_from_u64_address_rec(
context: &Context,
symbols: &mut FxIndexSet<Symbol>,
visited: &mut FxHashSet<Value>,
u64_address: Value,
is_complete: &mut bool,
) {
match context.values[u64_address.0].value {
// Follow the sources of the address, and for every source address,
// recursively come back to this function.
ValueDatum::Argument(arg) => get_symbols_from_u64_address_argument(
context,
symbols,
visited,
arg,
is_complete,
),
// 1. Patterns related to references and raw pointers.
ValueDatum::Instruction(Instruction {
// The address is coming from a `raw_pointer` or `&T` variable.
op: InstOp::Load(_loaded_from),
..
}) => {
// TODO: https://github.com/FuelLabs/sway/issues/6065
// We want to track sources of loaded addresses.
// Currently we don't and simply mark the result as incomplete.
*is_complete = false;
}
ValueDatum::Instruction(Instruction {
op: InstOp::PtrToInt(ptr_value, _),
..
}) => get_symbols_rec(context, symbols, visited, ptr_value, false, is_complete),
// 2. The address is coming from a GTF instruction.
ValueDatum::Instruction(Instruction {
// There cannot be a symbol behind it, and so the returned set is complete.
op: InstOp::FuelVm(FuelVmInstruction::Gtf { .. }),
..
}) => (),
// In other cases, e.g., getting the integer address from an unsafe pointer
// arithmetic, or as a function result, etc. we bail out and mark the
// collection as not being guaranteed to be a complete set of all referred symbols.
_ => {
*is_complete = false;
}
}
}
if visited.contains(&ptr) {
return;
}
visited.insert(ptr);
match context.values[ptr.0].value {
ValueDatum::Instruction(Instruction {
op: InstOp::GetLocal(local),
..
}) => {
symbols.insert(Symbol::Local(local));
}
ValueDatum::Instruction(Instruction {
op: InstOp::GetElemPtr { base, .. },
..
}) => get_symbols_rec(context, symbols, visited, base, gep_only, is_complete),
ValueDatum::Instruction(Instruction {
op: InstOp::IntToPtr(u64_address, _),
..
}) if !gep_only => get_symbols_from_u64_address_rec(
context,
symbols,
visited,
u64_address,
is_complete,
),
// We've reached a configurable at the top of the chain.
// There cannot be a symbol behind it, and so the returned set is complete.
ValueDatum::Instruction(Instruction {
op: InstOp::GetConfig(_, _),
..
}) if !gep_only => (),
// We've reached a global at the top of the chain.
// There cannot be a symbol behind it, and so the returned set is complete.
ValueDatum::Instruction(Instruction {
op: InstOp::GetGlobal(_),
..
}) if !gep_only => (),
// We've reached a storage key at the top of the chain.
// There cannot be a symbol behind it, and so the returned set is complete.
ValueDatum::Instruction(Instruction {
op: InstOp::GetStorageKey(_),
..
}) if !gep_only => (),
// Note that in this case, the pointer itself is coming from a `Load`,
// and not an address. So, we just continue following the pointer.
ValueDatum::Instruction(Instruction {
op: InstOp::Load(loaded_from),
..
}) if !gep_only => get_symbols_rec(
context,
symbols,
visited,
loaded_from,
gep_only,
is_complete,
),
ValueDatum::Instruction(Instruction {
op: InstOp::CastPtr(ptr_to_cast, _),
..
}) if !gep_only => get_symbols_rec(
context,
symbols,
visited,
ptr_to_cast,
gep_only,
is_complete,
),
ValueDatum::Argument(arg) => {
get_argument_symbols(context, symbols, visited, arg, gep_only, is_complete)
}
// We've reached a constant at the top of the chain.
// There cannot be a symbol behind it, and so the returned set is complete.
ValueDatum::Constant(_) if !gep_only => (),
_ if !gep_only => {
// In other cases, e.g., getting the pointer from an ASM block,
// or as a function result, etc., we cannot track the value up the chain
// and cannot guarantee that the value is not coming from some of the symbols.
// So, we bail out and mark the collection as not being guaranteed to be
// a complete set of all referred symbols.
*is_complete = false;
}
// In the case of GEP only access, the returned set is always complete.
_ => (),
}
}
if !val.get_type(context).is_some_and(|t| t.is_ptr(context)) {
return ReferredSymbols::new(true, IndexSet::default());
}
let mut visited = FxHashSet::default();
let mut symbols = IndexSet::default();
let mut is_complete = true;
get_symbols_rec(
context,
&mut symbols,
&mut visited,
val,
gep_only,
&mut is_complete,
);
ReferredSymbols::new(is_complete, symbols)
}
pub fn get_gep_symbol(context: &Context, val: Value) -> Option<Symbol> {
let syms = get_gep_referred_symbols(context, val);
(syms.len() == 1)
.then(|| syms.iter().next().cloned())
.flatten()
}
/// Return [Symbol] referred by `val` if there is _exactly one_ symbol referred,
/// or `None` if there are no [Symbol]s referred or if there is more than one
/// referred.
pub fn get_referred_symbol(context: &Context, val: Value) -> Option<Symbol> {
let syms = get_referred_symbols(context, val);
match syms {
ReferredSymbols::Complete(syms) => (syms.len() == 1)
.then(|| syms.iter().next().cloned())
.flatten(),
// It might be that we have more than one referred symbol here.
ReferredSymbols::Incomplete(_) => None,
}
}
pub enum EscapedSymbols {
/// Guarantees that all escaping [Symbol]s are collected.
Complete(FxHashSet<Symbol>),
/// Denotes that there _might_ be additional escaping [Symbol]s
/// out of the collected ones.
Incomplete(FxHashSet<Symbol>),
}
impl AnalysisResultT for EscapedSymbols {}
pub fn compute_escaped_symbols_pass(
context: &Context,
_analyses: &AnalysisResults,
function: Function,
) -> Result<AnalysisResult, IrError> {
Ok(Box::new(compute_escaped_symbols(context, &function)))
}
fn compute_escaped_symbols(context: &Context, function: &Function) -> EscapedSymbols {
let add_from_val = |result: &mut FxHashSet<Symbol>, val: &Value, is_complete: &mut bool| {
let (complete, syms) = get_referred_symbols(context, *val).consume();
*is_complete &= complete;
syms.iter().for_each(|s| {
result.insert(*s);
});
};
let mut result = FxHashSet::default();
let mut is_complete = true;
for (_block, inst) in function.instruction_iter(context) {
match &inst.get_instruction(context).unwrap().op {
InstOp::AsmBlock(_, args) => {
for arg_init in args.iter().filter_map(|arg| arg.initializer) {
add_from_val(&mut result, &arg_init, &mut is_complete)
}
}
InstOp::UnaryOp { .. } => (),
InstOp::BinaryOp { .. } => (),
InstOp::BitCast(_, _) => (),
InstOp::Branch(_) => (),
InstOp::Call(callee, args) => args
.iter()
.enumerate()
.filter(|(arg_idx, _arg)| {
// Immutable arguments are not considered as escaping symbols.
!callee.is_arg_immutable(context, *arg_idx)
})
.for_each(|(_, v)| add_from_val(&mut result, v, &mut is_complete)),
InstOp::CastPtr(ptr, _) => add_from_val(&mut result, ptr, &mut is_complete),
InstOp::Cmp(_, _, _) => (),
InstOp::ConditionalBranch { .. } => (),
InstOp::ContractCall { params, .. } => {
add_from_val(&mut result, params, &mut is_complete)
}
InstOp::FuelVm(_) => (),
InstOp::GetLocal(_) => (),
InstOp::GetGlobal(_) => (),
InstOp::GetConfig(_, _) => (),
InstOp::GetStorageKey(_) => (),
InstOp::GetElemPtr { .. } => (),
InstOp::IntToPtr(_, _) => (),
InstOp::Load(_) => (),
InstOp::MemCopyBytes { .. } => (),
InstOp::MemCopyVal { .. } => (),
InstOp::MemClearVal { .. } => (),
InstOp::Nop => (),
InstOp::PtrToInt(v, _) => add_from_val(&mut result, v, &mut is_complete),
InstOp::Ret(_, _) => (),
InstOp::Store { stored_val, .. } => {
add_from_val(&mut result, stored_val, &mut is_complete)
}
InstOp::Alloc { .. } => (),
}
}
if is_complete {
EscapedSymbols::Complete(result)
} else {
EscapedSymbols::Incomplete(result)
}
}
/// Pointers that may possibly be loaded from the instruction `inst`.
pub fn get_loaded_ptr_values(context: &Context, inst: Value) -> Vec<Value> {
match &inst.get_instruction(context).unwrap().op {
InstOp::UnaryOp { .. }
| InstOp::BinaryOp { .. }
| InstOp::BitCast(_, _)
| InstOp::Branch(_)
| InstOp::ConditionalBranch { .. }
| InstOp::Cmp(_, _, _)
| InstOp::Nop
| InstOp::CastPtr(_, _)
| InstOp::GetLocal(_)
| InstOp::GetGlobal(_)
| InstOp::GetConfig(_, _)
| InstOp::GetStorageKey(_)
| InstOp::GetElemPtr { .. }
| InstOp::IntToPtr(_, _) => vec![],
InstOp::PtrToInt(src_val_ptr, _) => vec![*src_val_ptr],
InstOp::ContractCall {
params,
coins,
asset_id,
..
} => vec![*params, *coins, *asset_id],
InstOp::Call(_, args) => args.clone(),
InstOp::AsmBlock(_, args) => args.iter().filter_map(|val| val.initializer).collect(),
InstOp::MemClearVal { .. } => vec![],
InstOp::MemCopyBytes { src_val_ptr, .. }
| InstOp::MemCopyVal { src_val_ptr, .. }
| InstOp::Ret(src_val_ptr, _)
| InstOp::Load(src_val_ptr)
| InstOp::FuelVm(FuelVmInstruction::Log {
log_val: src_val_ptr,
..
})
| InstOp::FuelVm(FuelVmInstruction::StateLoadWord(src_val_ptr))
| InstOp::FuelVm(FuelVmInstruction::StateStoreWord {
key: src_val_ptr, ..
})
| InstOp::FuelVm(FuelVmInstruction::StateLoadQuadWord {
key: src_val_ptr, ..
})
| InstOp::FuelVm(FuelVmInstruction::StateClear {
key: src_val_ptr, ..
}) => vec![*src_val_ptr],
InstOp::FuelVm(FuelVmInstruction::StateStoreQuadWord {
stored_val: memopd1,
key: memopd2,
..
})
| InstOp::FuelVm(FuelVmInstruction::Smo {
recipient: memopd1,
message: memopd2,
..
}) => vec![*memopd1, *memopd2],
InstOp::Store { dst_val_ptr: _, .. } => vec![],
InstOp::Alloc { .. } => vec![],
InstOp::FuelVm(FuelVmInstruction::Gtf { .. })
| InstOp::FuelVm(FuelVmInstruction::ReadRegister(_))
| InstOp::FuelVm(FuelVmInstruction::Revert(_) | FuelVmInstruction::JmpMem) => vec![],
InstOp::FuelVm(FuelVmInstruction::WideUnaryOp { arg, .. }) => vec![*arg],
InstOp::FuelVm(FuelVmInstruction::WideBinaryOp { arg1, arg2, .. })
| InstOp::FuelVm(FuelVmInstruction::WideCmpOp { arg1, arg2, .. }) => {
vec![*arg1, *arg2]
}
InstOp::FuelVm(FuelVmInstruction::WideModularOp {
arg1, arg2, arg3, ..
}) => vec![*arg1, *arg2, *arg3],
InstOp::FuelVm(FuelVmInstruction::Retd { ptr, .. }) => vec![*ptr],
}
}
/// [Symbol]s that may possibly, directly or indirectly, be loaded from the instruction `inst`.
pub fn get_loaded_symbols(context: &Context, inst: Value) -> ReferredSymbols {
let mut res = IndexSet::default();
let mut is_complete = true;
for val in get_loaded_ptr_values(context, inst) {
let (complete, syms) = get_referred_symbols(context, val).consume();
is_complete &= complete;
for sym in syms {
res.insert(sym);
}
}
ReferredSymbols::new(is_complete, res)
}
/// Pointers that may possibly be stored to the instruction `inst`.
pub fn get_stored_ptr_values(context: &Context, inst: Value) -> Vec<Value> {
match &inst.get_instruction(context).unwrap().op {
InstOp::UnaryOp { .. }
| InstOp::BinaryOp { .. }
| InstOp::BitCast(_, _)
| InstOp::Branch(_)
| InstOp::ConditionalBranch { .. }
| InstOp::Cmp(_, _, _)
| InstOp::Nop
| InstOp::PtrToInt(_, _)
| InstOp::Ret(_, _)
| InstOp::CastPtr(_, _)
| InstOp::GetLocal(_)
| InstOp::GetGlobal(_)
| InstOp::GetConfig(_, _)
| InstOp::GetStorageKey(_)
| InstOp::GetElemPtr { .. }
| InstOp::IntToPtr(_, _) => vec![],
InstOp::ContractCall { params, .. } => vec![*params],
InstOp::Call(_, args) => args.clone(),
InstOp::AsmBlock(_, args) => args.iter().filter_map(|val| val.initializer).collect(),
InstOp::MemCopyBytes { dst_val_ptr, .. }
| InstOp::MemCopyVal { dst_val_ptr, .. }
| InstOp::MemClearVal { dst_val_ptr }
| InstOp::Store { dst_val_ptr, .. } => vec![*dst_val_ptr],
InstOp::Load(_) => vec![],
InstOp::Alloc { .. } => vec![],
InstOp::FuelVm(vmop) => match vmop {
FuelVmInstruction::Gtf { .. }
| FuelVmInstruction::Log { .. }
| FuelVmInstruction::ReadRegister(_)
| FuelVmInstruction::Revert(_)
| FuelVmInstruction::JmpMem
| FuelVmInstruction::Smo { .. }
| FuelVmInstruction::StateClear { .. } => vec![],
FuelVmInstruction::StateLoadQuadWord { load_val, .. } => vec![*load_val],
FuelVmInstruction::StateLoadWord(_) | FuelVmInstruction::StateStoreWord { .. } => {
vec![]
}
FuelVmInstruction::StateStoreQuadWord { stored_val: _, .. } => vec![],
FuelVmInstruction::WideUnaryOp { result, .. }
| FuelVmInstruction::WideBinaryOp { result, .. }
| FuelVmInstruction::WideModularOp { result, .. } => vec![*result],
FuelVmInstruction::WideCmpOp { .. } => vec![],
_ => vec![],
},
}
}
/// [Symbol]s that may possibly, directly or indirectly, be stored to the instruction `inst`.
pub fn get_stored_symbols(context: &Context, inst: Value) -> ReferredSymbols {
let mut res = IndexSet::default();
let mut is_complete = true;
for val in get_stored_ptr_values(context, inst) {
let (complete, syms) = get_referred_symbols(context, val).consume();
is_complete &= complete;
for sym in syms {
res.insert(sym);
}
}
ReferredSymbols::new(is_complete, res)
}
/// Combine a series of GEPs into one.
pub fn combine_indices(context: &Context, val: Value) -> Option<Vec<Value>> {
match &context.values[val.0].value {
ValueDatum::Instruction(Instruction {
op: InstOp::GetLocal(_),
..
}) => Some(vec![]),
ValueDatum::Instruction(Instruction {
op:
InstOp::GetElemPtr {
base,
elem_ptr_ty: _,
indices,
},
..
}) => {
let mut base_indices = combine_indices(context, *base)?;
base_indices.append(&mut indices.clone());
Some(base_indices)
}
ValueDatum::Argument(_) => Some(vec![]),
_ => None,
}
}
/// Given a memory pointer instruction, compute the offset of indexed element,
/// for each symbol that it may alias to.
/// If for any symbol we can't compute it, return None.
pub fn get_memory_offsets(context: &Context, val: Value) -> Option<FxIndexMap<Symbol, u64>> {
let syms = get_gep_referred_symbols(context, val);
let mut res: FxIndexMap<Symbol, u64> = FxIndexMap::default();
for sym in syms {
let offset = sym
.get_type(context)
.get_pointee_type(context)?
.get_value_indexed_offset(context, &combine_indices(context, val)?)?;
res.insert(sym, offset);
}
Some(res)
}
/// Can memory ranges [val1, val1+len1] and [val2, val2+len2] overlap?
/// Conservatively returns true if cannot statically determine.
pub fn may_alias(context: &Context, val1: Value, len1: u64, val2: Value, len2: u64) -> bool {
let (Some(mem_offsets_1), Some(mem_offsets_2)) = (
get_memory_offsets(context, val1),
get_memory_offsets(context, val2),
) else {
return true;
};
for (sym1, off1) in mem_offsets_1 {
if let Some(off2) = mem_offsets_2.get(&sym1) {
// does off1 + len1 overlap with off2 + len2?
if (off1 <= *off2 && (off1 + len1 > *off2)) || (*off2 <= off1 && (*off2 + len2 > off1))
{
return true;
}
}
}
false
}
/// Are memory ranges [val1, val1+len1] and [val2, val2+len2] exactly the same?
/// Conservatively returns false if cannot statically determine.
pub fn must_alias(context: &Context, val1: Value, len1: u64, val2: Value, len2: u64) -> bool {
let (Some(mem_offsets_1), Some(mem_offsets_2)) = (
get_memory_offsets(context, val1),
get_memory_offsets(context, val2),
) else {
return false;
};
if mem_offsets_1.len() != 1 || mem_offsets_2.len() != 1 {
return false;
}
let (sym1, off1) = mem_offsets_1.iter().next().unwrap();
let (sym2, off2) = mem_offsets_2.iter().next().unwrap();
// does off1 + len1 overlap with off2 + len2?
sym1 == sym2 && off1 == off2 && len1 == len2
}
/// For a pointer argument `ptr_val`, what's the size of its pointee.
pub fn pointee_size(context: &Context, ptr_val: Value) -> u64 {
ptr_val
.get_type(context)
.unwrap()
.get_pointee_type(context)
.expect("Expected arg to be a pointer")
.size(context)
.in_bytes()
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-ir/src/bin/opt.rs | sway-ir/src/bin/opt.rs | use std::{
io::{BufReader, BufWriter, Read, Write},
process::exit,
};
use anyhow::anyhow;
use sway_features::ExperimentalFeatures;
use sway_ir::{
insert_after_each, register_known_passes, Backtrace, PassGroup, PassManager,
MODULE_PRINTER_NAME, MODULE_VERIFIER_NAME,
};
use sway_types::SourceEngine;
// -------------------------------------------------------------------------------------------------
fn main() -> Result<(), anyhow::Error> {
// Maintain a list of named pass functions for delegation.
let mut pass_mgr = PassManager::default();
register_known_passes(&mut pass_mgr);
// Build the config from the command line.
let config = ConfigBuilder::build(&pass_mgr, std::env::args())?;
// Read the input file, or standard in.
let input_str = read_from_input(&config.input_path)?;
let source_engine = SourceEngine::default();
// Parse it. XXX Improve this error message too.
let mut ir = sway_ir::parser::parse(
&input_str,
&source_engine,
ExperimentalFeatures::default(),
Backtrace::default(),
)?;
// Perform optimisation passes in order.
let mut passes = PassGroup::default();
for pass in config.passes {
passes.append_pass(pass);
}
if config.print_after_each {
passes = insert_after_each(passes, MODULE_PRINTER_NAME);
}
if config.verify_after_each {
passes = insert_after_each(passes, MODULE_VERIFIER_NAME);
}
pass_mgr.run(&mut ir, &passes)?;
// Write the output file or standard out.
write_to_output(ir, &config.output_path)?;
Ok(())
}
fn read_from_input(path_str: &Option<String>) -> std::io::Result<String> {
let mut input = Vec::new();
match path_str {
None => {
BufReader::new(std::io::stdin()).read_to_end(&mut input)?;
}
Some(path_str) => {
let file = std::fs::File::open(path_str)?;
BufReader::new(file).read_to_end(&mut input)?;
}
}
Ok(String::from_utf8_lossy(&input).to_string())
}
fn write_to_output<S: Into<String>>(ir_str: S, path_str: &Option<String>) -> std::io::Result<()> {
match path_str {
None => {
println!("{}", ir_str.into());
Ok(())
}
Some(path_str) => {
let file = std::fs::File::create(path_str)?;
BufWriter::new(file)
.write(ir_str.into().as_ref())
.map(|_| ())
}
}
}
// -------------------------------------------------------------------------------------------------
// Using a bespoke CLI parser since the order in which passes are specified is important.
#[derive(Default)]
struct Config {
input_path: Option<String>,
output_path: Option<String>,
verify_after_each: bool,
print_after_each: bool,
_time_passes: bool,
_stats: bool,
passes: Vec<&'static str>,
}
// This is a little clumsy in that it needs to consume items from the iterator carefully in each
// method to ensure we don't enter a weird state.
struct ConfigBuilder<'a, I: Iterator<Item = String>> {
next: Option<String>,
rest: I,
cfg: Config,
pass_mgr: &'a PassManager,
}
impl<I: Iterator<Item = String>> ConfigBuilder<'_, I> {
fn build(pass_mgr: &PassManager, mut rest: I) -> Result<Config, anyhow::Error> {
rest.next(); // Skip the first arg which is the binary name.
let next = rest.next();
ConfigBuilder {
next,
rest,
cfg: Config::default(),
pass_mgr,
}
.build_root()
}
fn build_root(mut self) -> Result<Config, anyhow::Error> {
match self.next {
None => Ok(self.cfg),
Some(opt) => {
self.next = self.rest.next();
match opt.as_str() {
"-i" => self.build_input(),
"-o" => self.build_output(),
"-verify-after-each" => {
self.cfg.verify_after_each = true;
self.build_root()
}
"-print-after-each" => {
self.cfg.print_after_each = true;
self.build_root()
}
"-h" => {
print!(
"Usage: opt [passname...] -i input_file -o output_file\n\n{}",
self.pass_mgr.help_text()
);
print!("\n\nIn the absence of -i or -o options, input is taken from stdin and output is printed to stdout.\n");
exit(0);
}
name => {
if matches!(opt.chars().next(), Some('-')) {
Err(anyhow!("Unrecognised option '{opt}'."))
} else {
self.build_pass(name)
}
}
}
}
}
}
fn build_input(mut self) -> Result<Config, anyhow::Error> {
match self.next {
None => Err(anyhow!("-i option requires an argument.")),
Some(path) => {
self.cfg.input_path = Some(path);
self.next = self.rest.next();
self.build_root()
}
}
}
fn build_output(mut self) -> Result<Config, anyhow::Error> {
match self.next {
None => Err(anyhow!("-o option requires an argument.")),
Some(path) => {
self.cfg.output_path = Some(path);
self.next = self.rest.next();
self.build_root()
}
}
}
fn build_pass(mut self, name: &str) -> Result<Config, anyhow::Error> {
if let Some(pass) = self.pass_mgr.lookup_registered_pass(name) {
self.cfg.passes.push(pass.name);
self.build_root()
} else {
Err(anyhow!(
"Unrecognised pass name '{name}'.\n\n{}",
self.pass_mgr.help_text()
))
}
}
}
// -------------------------------------------------------------------------------------------------
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-ir/tests/tests.rs | sway-ir/tests/tests.rs | use std::path::PathBuf;
use itertools::Itertools;
use sway_features::ExperimentalFeatures;
use sway_ir::{
create_arg_demotion_pass, create_arg_pointee_mutability_tagger_pass, create_ccp_pass,
create_const_demotion_pass, create_const_folding_pass, create_cse_pass, create_dce_pass,
create_dom_fronts_pass, create_dominators_pass, create_escaped_symbols_pass,
create_mem2reg_pass, create_memcpyopt_pass, create_memcpyprop_reverse_pass,
create_misc_demotion_pass, create_postorder_pass, create_ret_demotion_pass,
create_simplify_cfg_pass, metadata_to_inline, optimize as opt, register_known_passes,
Backtrace, Context, Function, IrError, PassGroup, PassManager, Value, DCE_NAME,
FN_DEDUP_DEBUG_PROFILE_NAME, FN_DEDUP_RELEASE_PROFILE_NAME, GLOBALS_DCE_NAME, MEM2REG_NAME,
SROA_NAME,
};
use sway_types::SourceEngine;
// -------------------------------------------------------------------------------------------------
// Utility for finding test files and running FileCheck. See actual pass invocations below.
fn run_tests<F: Fn(&str, &mut Context) -> bool>(sub_dir: &str, opt_fn: F) {
let source_engine = SourceEngine::default();
let manifest_dir = env!("CARGO_MANIFEST_DIR");
let dir: PathBuf = format!("{manifest_dir}/tests/{sub_dir}").into();
for entry in std::fs::read_dir(dir).unwrap() {
let path = entry.unwrap().path();
let input_bytes = std::fs::read(&path).unwrap();
let input = String::from_utf8_lossy(&input_bytes);
let experimental = ExperimentalFeatures {
new_encoding: false,
// TODO: Properly support experimental features in IR tests.
..Default::default()
};
// TODO: Properly support backtrace build option in IR tests.
let backtrace = Backtrace::default();
let mut ir = sway_ir::parser::parse(&input, &source_engine, experimental, backtrace)
.unwrap_or_else(|parse_err| {
println!("{}: {parse_err}", path.display());
panic!()
});
let first_line = input.split('\n').next().unwrap();
// The tests should return true, indicating they made modifications.
assert!(
opt_fn(first_line, &mut ir),
"Pass returned false (no changes made to {}).",
path.display()
);
ir.verify().unwrap_or_else(|err| {
println!("{err}");
panic!();
});
let output = sway_ir::printer::to_string(&ir);
let chkr = filecheck::CheckerBuilder::new()
.text(&input)
.unwrap()
.finish();
if chkr.is_empty() {
println!("{output}");
panic!("No filecheck directives found in test: {}", path.display());
}
match chkr.explain(&output, filecheck::NO_VARIABLES) {
Ok((success, report)) if !success => {
println!("--- FILECHECK FAILED FOR {}", path.display());
println!("{report}");
panic!()
}
Err(e) => {
panic!("filecheck directive error while checking: {e}");
}
_ => (),
}
}
}
// Utility for finding test files and running IR verifier tests.
// Each test file must contain an IR code that is parsable,
// but does not pass IR verification.
// Each test file must contain exactly one `// error: ...` line
// that specifies the expected IR verification error.
fn run_ir_verifier_tests(sub_dir: &str) {
let source_engine = SourceEngine::default();
let manifest_dir = env!("CARGO_MANIFEST_DIR");
let dir: PathBuf = format!("{manifest_dir}/tests/{sub_dir}").into();
for entry in std::fs::read_dir(dir).unwrap() {
let path = entry.unwrap().path();
let input_bytes = std::fs::read(&path).unwrap();
let input = String::from_utf8_lossy(&input_bytes);
let expected_errors = input
.lines()
.filter(|line| line.starts_with("// error: "))
.collect_vec();
let expected_error = match expected_errors[..] {
[] => {
println!(
"--- IR verifier test does not contain the expected error: {}",
path.display()
);
println!("The expected error must be specified by using the `// error: ` comment.");
println!("E.g., `// error: This is the expected error`");
println!("There must be exactly one error specified in each IR verifier test.");
panic!();
}
[err] => err.replace("// error: ", ""),
_ => {
println!(
"--- IR verifier test contains more then one expected error: {}",
path.display()
);
println!(
"There must be exactly one expected error specified in each IR verifier test."
);
println!("The specified expected errors were:");
println!("{}", expected_errors.join("\n"));
panic!();
}
};
let parse_result = sway_ir::parser::parse(
&input,
&source_engine,
ExperimentalFeatures::default(),
Backtrace::default(),
);
match parse_result {
Ok(_) => {
println!(
"--- Parsing and validating an IR verifier test passed without errors: {}",
path.display()
);
println!("The expected IR validation error was: {expected_error}");
panic!();
}
Err(err @ IrError::ParseFailure(_, _)) => {
println!(
"--- Parsing of an IR verifier test failed: {}",
path.display()
);
println!(
"IR verifier test must be parsable and result in an IR verification error."
);
println!("The parsing error was: {err}");
panic!();
}
Err(err) => {
let err = format!("{err}");
if !err.contains(&expected_error) {
println!("--- IR verifier test failed: {}", path.display());
println!("The expected error was: {expected_error}");
println!("The actual IR verification error was: {err}");
panic!();
}
}
}
}
}
// -------------------------------------------------------------------------------------------------
#[test]
fn inline() {
run_tests("inline", |first_line, ir: &mut Context| {
let mut words = first_line.split(' ').collect::<Vec<_>>();
let params = if words.is_empty() || words.remove(0) != "//" {
Vec::new()
} else {
words
};
let funcs = ir
.module_iter()
.flat_map(|module| module.function_iter(ir))
.collect::<Vec<_>>();
if params.contains(&"all") {
// Just inline everything, replacing all CALL instructions.
funcs.into_iter().fold(false, |acc, func| {
opt::inline_all_function_calls(ir, &func).unwrap() || acc
})
} else {
// Get the parameters from the first line. See the inline/README.md for details. If
// there aren't any found then there won't be any constraints and it'll be the
// equivalent of asking to inline everything.
let (max_blocks, max_instrs, max_stack) =
params
.windows(2)
.fold(
(None, None, None),
|acc @ (b, i, s), param_and_arg| match param_and_arg[0] {
"blocks" => (param_and_arg[1].parse().ok(), i, s),
"instrs" => (b, param_and_arg[1].parse().ok(), s),
"stack" => (b, i, param_and_arg[1].parse().ok()),
_ => acc,
},
);
funcs.into_iter().fold(false, |acc, func| {
let predicate = |context: &Context, function: &Function, call_site: &Value| {
let attributed_inline =
metadata_to_inline(context, function.get_metadata(context));
match attributed_inline {
Some(opt::Inline::Never) => false,
Some(opt::Inline::Always) => true,
None => (opt::is_small_fn(max_blocks, max_instrs, max_stack))(
context, function, call_site,
),
}
};
opt::inline_some_function_calls(ir, &func, predicate).unwrap() || acc
})
}
})
}
// -------------------------------------------------------------------------------------------------
// Clippy suggests using the map iterator below directly instead of collecting from it first, but
// if we try that then we have borrowing issues with `ir` which is used within the closure.
#[allow(clippy::needless_collect)]
#[test]
fn constants() {
run_tests("constants", |_first_line, ir: &mut Context| {
let mut pass_mgr = PassManager::default();
let mut pass_group = PassGroup::default();
let pass = pass_mgr.register(create_const_folding_pass());
pass_group.append_pass(pass);
pass_mgr.run(ir, &pass_group).unwrap()
})
}
// -------------------------------------------------------------------------------------------------
#[allow(clippy::needless_collect)]
#[test]
fn ccp() {
run_tests("ccp", |_first_line, ir: &mut Context| {
let mut pass_mgr = PassManager::default();
let mut pass_group = PassGroup::default();
pass_mgr.register(create_postorder_pass());
pass_mgr.register(create_dominators_pass());
let pass = pass_mgr.register(create_ccp_pass());
pass_group.append_pass(pass);
pass_mgr.run(ir, &pass_group).unwrap()
})
}
// -------------------------------------------------------------------------------------------------
#[allow(clippy::needless_collect)]
#[test]
fn simplify_cfg() {
run_tests("simplify_cfg", |_first_line, ir: &mut Context| {
let mut pass_mgr = PassManager::default();
let mut pass_group = PassGroup::default();
let pass = pass_mgr.register(create_simplify_cfg_pass());
pass_group.append_pass(pass);
pass_mgr.run(ir, &pass_group).unwrap()
})
}
// -------------------------------------------------------------------------------------------------
#[allow(clippy::needless_collect)]
#[test]
fn dce() {
run_tests("dce", |_first_line, ir: &mut Context| {
let mut pass_mgr = PassManager::default();
let mut pass_group = PassGroup::default();
pass_mgr.register(create_escaped_symbols_pass());
let mutability_tagger = pass_mgr.register(create_arg_pointee_mutability_tagger_pass());
pass_group.append_pass(mutability_tagger);
let pass = pass_mgr.register(create_dce_pass());
pass_group.append_pass(pass);
// Some tests require multiple passes of DCE to be run,
// this also reflects our actual compiler pipeline where DCE runs multiple times.
pass_group.append_pass(pass);
pass_mgr.run(ir, &pass_group).unwrap()
})
}
// -------------------------------------------------------------------------------------------------
#[allow(clippy::needless_collect)]
#[test]
fn cse() {
run_tests("cse", |_first_line, ir: &mut Context| {
let mut pass_mgr = PassManager::default();
let mut pass_group = PassGroup::default();
pass_mgr.register(create_postorder_pass());
pass_mgr.register(create_dominators_pass());
let pass = pass_mgr.register(create_cse_pass());
pass_group.append_pass(pass);
pass_mgr.run(ir, &pass_group).unwrap()
})
}
// -------------------------------------------------------------------------------------------------
#[allow(clippy::needless_collect)]
#[test]
fn mem2reg() {
run_tests("mem2reg", |_first_line, ir: &mut Context| {
let mut pass_mgr = PassManager::default();
let mut pass_group = PassGroup::default();
pass_mgr.register(create_postorder_pass());
pass_mgr.register(create_dominators_pass());
pass_mgr.register(create_dom_fronts_pass());
let pass = pass_mgr.register(create_mem2reg_pass());
pass_group.append_pass(pass);
pass_mgr.run(ir, &pass_group).unwrap()
})
}
// -------------------------------------------------------------------------------------------------
#[allow(clippy::needless_collect)]
#[test]
fn demote_arg() {
run_tests("demote_arg", |_first_line, ir: &mut Context| {
let mut pass_mgr = PassManager::default();
let mut pass_group = PassGroup::default();
let pass = pass_mgr.register(create_arg_demotion_pass());
pass_group.append_pass(pass);
pass_mgr.run(ir, &pass_group).unwrap()
})
}
// -------------------------------------------------------------------------------------------------
#[allow(clippy::needless_collect)]
#[test]
fn demote_const() {
run_tests("demote_const", |_first_line, ir: &mut Context| {
let mut pass_mgr = PassManager::default();
let mut pass_group = PassGroup::default();
let pass = pass_mgr.register(create_const_demotion_pass());
pass_group.append_pass(pass);
pass_mgr.run(ir, &pass_group).unwrap()
})
}
// -------------------------------------------------------------------------------------------------
#[allow(clippy::needless_collect)]
#[test]
fn demote_ret() {
run_tests("demote_ret", |_first_line, ir: &mut Context| {
let mut pass_mgr = PassManager::default();
let mut pass_group = PassGroup::default();
let pass = pass_mgr.register(create_ret_demotion_pass());
pass_group.append_pass(pass);
pass_mgr.run(ir, &pass_group).unwrap()
})
}
// -------------------------------------------------------------------------------------------------
#[allow(clippy::needless_collect)]
#[test]
fn demote_misc() {
run_tests("demote_misc", |_first_line, ir: &mut Context| {
let mut pass_mgr = PassManager::default();
let mut pass_group = PassGroup::default();
let pass = pass_mgr.register(create_misc_demotion_pass());
pass_group.append_pass(pass);
pass_mgr.run(ir, &pass_group).unwrap()
})
}
// -------------------------------------------------------------------------------------------------
#[allow(clippy::needless_collect)]
#[test]
fn memcpyopt() {
run_tests("memcpyopt", |_first_line, ir: &mut Context| {
let mut pass_mgr = PassManager::default();
let mut pass_group = PassGroup::default();
let mutability_tagger = pass_mgr.register(create_arg_pointee_mutability_tagger_pass());
pass_group.append_pass(mutability_tagger);
pass_mgr.register(create_escaped_symbols_pass());
let pass = pass_mgr.register(create_memcpyopt_pass());
pass_group.append_pass(pass);
pass_mgr.run(ir, &pass_group).unwrap()
})
}
// -------------------------------------------------------------------------------------------------
#[allow(clippy::needless_collect)]
#[test]
fn memcpy_prop() {
run_tests("memcpy_prop", |_first_line, ir: &mut Context| {
let mut pass_mgr = PassManager::default();
let mut pass_group = PassGroup::default();
let pass = pass_mgr.register(create_memcpyprop_reverse_pass());
pass_group.append_pass(pass);
pass_mgr.run(ir, &pass_group).unwrap()
})
}
// -------------------------------------------------------------------------------------------------
#[allow(clippy::needless_collect)]
#[test]
fn sroa() {
run_tests("sroa", |_first_line, ir: &mut Context| {
let mut pass_mgr = PassManager::default();
let mut pass_group = PassGroup::default();
register_known_passes(&mut pass_mgr);
pass_group.append_pass(SROA_NAME);
pass_group.append_pass(MEM2REG_NAME);
pass_group.append_pass(DCE_NAME);
pass_mgr.run(ir, &pass_group).unwrap()
})
}
// -------------------------------------------------------------------------------------------------
#[allow(clippy::needless_collect)]
#[test]
fn globals_dce() {
run_tests("globals_dce", |_first_line, ir: &mut Context| {
let mut pass_mgr = PassManager::default();
let mut pass_group = PassGroup::default();
register_known_passes(&mut pass_mgr);
pass_group.append_pass(GLOBALS_DCE_NAME);
pass_mgr.run(ir, &pass_group).unwrap()
})
}
// -------------------------------------------------------------------------------------------------
#[allow(clippy::needless_collect)]
#[test]
fn fndedup_debug() {
run_tests("fn_dedup/debug", |_first_line, ir: &mut Context| {
let mut pass_mgr = PassManager::default();
let mut pass_group = PassGroup::default();
register_known_passes(&mut pass_mgr);
pass_group.append_pass(FN_DEDUP_DEBUG_PROFILE_NAME);
pass_group.append_pass(GLOBALS_DCE_NAME);
pass_mgr.run(ir, &pass_group).unwrap()
})
}
#[allow(clippy::needless_collect)]
#[test]
fn fndedup_release() {
run_tests("fn_dedup/release", |_first_line, ir: &mut Context| {
let mut pass_mgr = PassManager::default();
let mut pass_group = PassGroup::default();
register_known_passes(&mut pass_mgr);
pass_group.append_pass(FN_DEDUP_RELEASE_PROFILE_NAME);
pass_group.append_pass(GLOBALS_DCE_NAME);
pass_mgr.run(ir, &pass_group).unwrap()
})
}
#[test]
fn verify() {
run_ir_verifier_tests("verify")
}
// -------------------------------------------------------------------------------------------------
#[test]
fn serialize() {
// This isn't running a pass, it's just confirming that the IR can be loaded and printed, and
// FileCheck can just confirm certain instructions came out OK.
run_tests("serialize", |_, _: &mut Context| true)
}
// -------------------------------------------------------------------------------------------------
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-ir/sway-ir-macros/src/lib.rs | sway-ir/sway-ir-macros/src/lib.rs | use {
itertools::Itertools,
proc_macro::TokenStream,
quote::{format_ident, quote},
syn::{
parse_macro_input, Attribute, Data, DeriveInput, Fields, FieldsNamed, FieldsUnnamed, Ident,
Variant,
},
};
#[proc_macro_derive(DebugWithContext, attributes(in_context))]
pub fn derive_debug_with_context(input: TokenStream) -> TokenStream {
let DeriveInput {
ident,
generics,
data,
..
} = parse_macro_input!(input);
let (impl_generics, ty_generics, where_clause) = generics.split_for_impl();
let type_name = ident.to_string();
let body = match data {
Data::Struct(data_struct) => match &data_struct.fields {
Fields::Named(fields_named) => {
let (field_names, fmt_fields) = fmt_fields_named(&type_name, fields_named);
quote! {
let #ident { #(#field_names,)* } = self;
#fmt_fields
}
}
Fields::Unnamed(fields_unnamed) => {
let (field_names, fmt_fields) = fmt_fields_unnamed(&type_name, fields_unnamed);
quote! {
let #ident(#(#field_names,)*) = self;
#fmt_fields
}
}
Fields::Unit => {
quote! {
formatter.write_str(#type_name)
}
}
},
Data::Enum(data_enum) => {
let branches = {
data_enum.variants.iter().map(|variant| {
let Variant {
ident: variant_ident,
fields,
..
} = variant;
let type_variant_name = format!("{type_name}::{variant_ident}");
match fields {
Fields::Named(fields_named) => {
let (field_names, fmt_fields) =
fmt_fields_named(&type_variant_name, fields_named);
quote! {
#ident::#variant_ident { #(#field_names,)* } => {
#fmt_fields
},
}
}
Fields::Unnamed(fields_unnamed) => {
let (field_names, fmt_fields) =
fmt_fields_unnamed(&type_variant_name, fields_unnamed);
quote! {
#ident::#variant_ident(#(#field_names,)*) => {
#fmt_fields
},
}
}
Fields::Unit => {
quote! {
#ident::#variant_ident => {
formatter.write_str(#type_variant_name)
},
}
}
}
})
};
quote! {
match self {
#(#branches)*
}
}
}
Data::Union(_) => {
panic!("#[derive(DebugWithContext)] cannot be used on unions");
}
};
let output = quote! {
impl #impl_generics DebugWithContext for #ident #ty_generics
#where_clause
{
fn fmt_with_context<'a, 'c>(
&'a self,
formatter: &mut std::fmt::Formatter,
context: &'c Context,
) -> std::fmt::Result {
#body
}
}
};
output.into()
}
fn fmt_fields_named<'i>(
name: &str,
fields_named: &'i FieldsNamed,
) -> (Vec<&'i Ident>, proc_macro2::TokenStream) {
let field_names = {
fields_named
.named
.iter()
.map(|field| field.ident.as_ref().unwrap())
.collect::<Vec<_>>()
};
let fmt_fields = {
fields_named
.named
.iter()
.zip(field_names.iter())
.map(|(field, name)| {
let name_str = name.to_string();
let expr = pass_through_context(name, &field.attrs);
quote! {
debug_struct = debug_struct.field(#name_str, &#expr);
}
})
};
let token_tree = quote! {
let mut debug_struct = &mut formatter.debug_struct(#name);
#(#fmt_fields)*
debug_struct.finish()
};
(field_names, token_tree)
}
fn fmt_fields_unnamed(
name: &str,
fields_unnamed: &FieldsUnnamed,
) -> (Vec<Ident>, proc_macro2::TokenStream) {
let field_names = {
(0..fields_unnamed.unnamed.len())
.map(|i| format_ident!("field_{}", i))
.collect::<Vec<_>>()
};
let fmt_fields = {
fields_unnamed
.unnamed
.iter()
.zip(field_names.iter())
.map(|(field, name)| {
let expr = pass_through_context(name, &field.attrs);
quote! {
debug_tuple = debug_tuple.field(&#expr);
}
})
};
let token_tree = quote! {
let mut debug_tuple = &mut formatter.debug_tuple(#name);
#(#fmt_fields)*
debug_tuple.finish()
};
(field_names, token_tree)
}
fn pass_through_context(field_name: &Ident, attrs: &[Attribute]) -> proc_macro2::TokenStream {
let context_field_opt = {
attrs
.iter()
.filter_map(|attr| {
let attr_name = attr.path().get_ident()?;
if attr_name != "in_context" {
return None;
}
let context_field = {
try_parse_context_field_from_attr(attr)
.expect("malformed #[in_context(..)] attribute")
};
Some(context_field)
})
.dedup()
.at_most_one()
.expect("multiple #[in_context(..)] attributes on field")
};
match context_field_opt {
None => {
quote! {
#field_name.with_context(context)
}
}
Some(context_field) => {
quote! {
context.#context_field[*#field_name].with_context(context)
}
}
}
}
fn try_parse_context_field_from_attr(attr: &Attribute) -> Option<Ident> {
let mut context_fields = Vec::new();
let _ = attr.parse_nested_meta(|nested_meta| {
context_fields.push(nested_meta.path.get_ident().unwrap().clone());
Ok(())
});
if context_fields.len() != 1 {
None
} else {
context_fields.pop()
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-utils/src/lib.rs | sway-utils/src/lib.rs | pub mod constants;
pub mod helpers;
pub mod mapped_stack;
pub mod performance;
pub use constants::*;
pub use helpers::*;
pub use mapped_stack::*;
pub use performance::*;
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-utils/src/mapped_stack.rs | sway-utils/src/mapped_stack.rs | use std::collections::HashMap;
/// A HashMap that can hold multiple values and
/// fetch values in a LIFO manner. Rust's MultiMap
/// handles values in a FIFO manner.
#[derive(Debug)]
pub struct MappedStack<K: std::cmp::Eq + std::hash::Hash, V> {
container: HashMap<K, Vec<V>>,
}
impl<K: std::cmp::Eq + std::hash::Hash, V> MappedStack<K, V> {
pub fn new() -> MappedStack<K, V> {
MappedStack {
container: HashMap::<K, Vec<V>>::new(),
}
}
pub fn push(&mut self, k: K, v: V) {
match self.container.get_mut(&k) {
Some(val_vec) => {
val_vec.push(v);
}
None => {
self.container.insert(k, vec![v]);
}
}
}
pub fn get(&self, k: &K) -> Option<&V> {
self.container.get(k).and_then(|val_vec| val_vec.last())
}
pub fn get_mut(&mut self, k: &K) -> Option<&mut V> {
self.container
.get_mut(k)
.and_then(|val_vec| val_vec.last_mut())
}
pub fn pop(&mut self, k: &K) {
if let Some(val_vec) = self.container.get_mut(k) {
val_vec.pop();
if val_vec.is_empty() {
self.container.remove(k);
}
}
}
}
impl<K: std::cmp::Eq + std::hash::Hash, V> Default for MappedStack<K, V> {
fn default() -> Self {
Self::new()
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-utils/src/helpers.rs | sway-utils/src/helpers.rs | use crate::constants;
use std::{
ffi::OsStr,
fs,
path::{Path, PathBuf},
};
use walkdir::WalkDir;
pub fn get_sway_files(path: PathBuf) -> Vec<PathBuf> {
let mut files = vec![];
let mut dir_entries = vec![path];
while let Some(next_dir) = dir_entries.pop() {
if let Ok(read_dir) = fs::read_dir(&next_dir) {
for entry in read_dir.filter_map(Result::ok) {
let path = entry.path();
if path.is_dir() {
dir_entries.push(path);
} else if is_sway_file(&path) {
files.push(path);
}
}
}
}
files
}
pub fn is_sway_file(file: &Path) -> bool {
file.is_file() && file.extension() == Some(OsStr::new(constants::SWAY_EXTENSION))
}
/// Create an iterator over all prefixes in a slice, smallest first
///
/// ```
/// # use sway_utils::iter_prefixes;
/// let val = [1, 2, 3];
/// let mut it = iter_prefixes(&val);
/// assert_eq!(it.next(), Some([1].as_slice()));
/// assert_eq!(it.next(), Some([1, 2].as_slice()));
/// assert_eq!(it.next(), Some([1, 2, 3].as_slice()));
/// assert_eq!(it.next(), None);
/// ```
pub fn iter_prefixes<T>(slice: &[T]) -> impl DoubleEndedIterator<Item = &[T]> {
(1..=slice.len()).map(move |len| &slice[..len])
}
/// Continually go down in the file tree until a Forc manifest file is found.
pub fn find_nested_manifest_dir(starter_path: &Path) -> Option<PathBuf> {
find_nested_dir_with_file(starter_path, constants::MANIFEST_FILE_NAME)
}
/// Continually go down in the file tree until a specified file is found.
///
/// Starts the search from child dirs of `starter_path`.
pub fn find_nested_dir_with_file(starter_path: &Path, file_name: &str) -> Option<PathBuf> {
let starter_dir = if starter_path.is_dir() {
starter_path
} else {
starter_path.parent()?
};
WalkDir::new(starter_path).into_iter().find_map(|e| {
let entry = e.ok()?;
if entry.path() != starter_dir.join(file_name) && entry.file_name() == OsStr::new(file_name)
{
let mut entry = entry.path().to_path_buf();
entry.pop();
Some(entry)
} else {
None
}
})
}
/// Continually go up in the file tree until a specified file is found.
///
/// Starts the search from `starter_path`.
pub fn find_parent_dir_with_file<P: AsRef<Path>>(
starter_path: P,
file_name: &str,
) -> Option<PathBuf> {
let mut path = std::fs::canonicalize(starter_path).ok()?;
let root_path = PathBuf::from("/");
while path != root_path {
path.push(file_name);
if path.exists() {
path.pop();
return Some(path);
}
path.pop();
path.pop();
}
None
}
/// Continually go up in the file tree until a Forc manifest file is found.
pub fn find_parent_manifest_dir<P: AsRef<Path>>(starter_path: P) -> Option<PathBuf> {
find_parent_dir_with_file(starter_path, constants::MANIFEST_FILE_NAME)
}
/// Continually go up in the file tree until a Forc manifest file is found and the given predicate
/// returns true.
pub fn find_parent_manifest_dir_with_check<T: AsRef<Path>, F>(
starter_path: T,
check: F,
) -> Option<PathBuf>
where
F: Fn(&Path) -> bool,
{
find_parent_manifest_dir(&starter_path).and_then(|manifest_dir| {
// If given check satisfies, return the current dir; otherwise, start searching from the parent.
if check(&manifest_dir) {
Some(manifest_dir)
} else {
manifest_dir
.parent()
.and_then(|parent_dir| find_parent_manifest_dir_with_check(parent_dir, check))
}
})
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-utils/src/constants.rs | sway-utils/src/constants.rs | pub const MANIFEST_FILE_NAME: &str = "Forc.toml";
pub const LOCK_FILE_NAME: &str = "Forc.lock";
pub const TEST_MANIFEST_FILE_NAME: &str = "Cargo.toml";
pub const TEST_DIRECTORY: &str = "tests";
pub const SWAY_EXTENSION: &str = "sw";
pub const USER_FORC_DIRECTORY: &str = ".forc";
pub const SRC_DIR: &str = "src";
pub const DEFAULT_NODE_URL: &str = "http://127.0.0.1:4000";
pub const LANGUAGE_NAME: &str = "Sway";
pub const STORAGE_DOMAIN: [u8; 1] = [0u8];
pub const STORAGE_TOP_LEVEL_NAMESPACE: &str = "storage";
pub const STORAGE_NAMESPACE_SEPARATOR: &str = "::";
pub const STORAGE_FIELD_SEPARATOR: &str = ".";
pub const STRUCT_FIELD_SEPARATOR: &str = ".";
pub const LIB_ENTRY: &str = "lib.sw";
pub const MAIN_ENTRY: &str = "main.sw";
pub const FORC_INIT_MANIFEST_AUTHOR: &str = "FORC_INIT_MANIFEST_AUTHOR";
pub const DEFAULT_IPFS_GATEWAY_URL: &str = "https://ipfs.io";
pub const DEFAULT_REGISTRY_IPFS_GATEWAY_URL: &str = "http://ipfs.forc.pub";
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-utils/src/performance.rs | sway-utils/src/performance.rs | use serde::{Deserialize, Serialize};
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct PerformanceMetric {
pub phase: String,
pub elapsed: f64,
pub memory_usage: Option<u64>,
}
#[derive(Clone, Debug, Default, Deserialize, Serialize)]
pub struct PerformanceData {
pub bytecode_size: usize,
pub metrics: Vec<PerformanceMetric>,
pub reused_programs: u64,
}
#[derive(serde::Serialize, Clone)]
pub struct FunctionEntryPoint {
/// The original entry point function name.
pub fn_name: String,
/// The immediate instruction offset at which the entry function begins.
pub imm: u64,
/// The function selector (only `Some` for contract ABI methods).
pub selector: Option<[u8; 4]>,
}
#[macro_export]
// Time the given expression and print/save the result.
macro_rules! time_expr {
($pkg_name:expr, $description:expr, $key:expr, $expression:expr, $build_config:expr, $data:expr) => {{
use std::io::{BufRead, Read, Write};
if let Some(cfg) = $build_config {
if cfg.profile {
println!("/dyno start {} {}", $pkg_name, $description);
let output = { $expression };
println!("/dyno stop {} {}", $pkg_name, $description);
output
} else if cfg.time_phases || cfg.metrics_outfile.is_some() {
let expr_start = std::time::Instant::now();
let output = { $expression };
let elapsed = expr_start.elapsed();
if cfg.time_phases {
println!(" Time elapsed to {}: {:?}", $description, elapsed);
}
if cfg.metrics_outfile.is_some() {
#[cfg(not(target_os = "macos"))]
let memory_usage = {
use sysinfo::{System, SystemExt};
let mut sys = System::new();
sys.refresh_system();
Some(sys.used_memory())
};
#[cfg(target_os = "macos")]
let memory_usage = None;
$data.metrics.push(PerformanceMetric {
phase: $key.to_string(),
elapsed: elapsed.as_secs_f64(),
memory_usage,
});
}
output
} else {
$expression
}
} else {
$expression
}
}};
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
FuelLabs/sway | https://github.com/FuelLabs/sway/blob/cc8f867043f3ec2c14ec4088d449cde603929a80/sway-features/src/lib.rs | sway-features/src/lib.rs | use std::collections::HashMap;
use clap::{Parser, ValueEnum};
macro_rules! features {
($($name:ident = $enabled:literal, $url:literal),* $(,)?) => {
paste::paste! {
#[derive(Copy, Clone, Debug, ValueEnum, PartialEq, Eq, Hash)]
#[value(rename_all = "snake")]
pub enum Feature {
$(
[<$name:camel>],
)*
}
impl Feature {
pub const CFG: &[&str] = &[
$(
stringify!([<experimental_ $name:snake>]),
)*
];
pub fn name(&self) -> &'static str {
match self {
$(
Feature::[<$name:camel>] => {
stringify!([<$name:snake>])
},
)*
}
}
pub fn url(&self) -> &'static str {
match self {
$(
Feature::[<$name:camel>] => {
$url
},
)*
}
}
pub fn error_because_is_disabled(&self, span: &sway_types::Span) -> sway_error::error::CompileError {
match self {
$(
Self::[<$name:camel>] => {
sway_error::error::CompileError::FeatureIsDisabled {
feature: stringify!([<$name:snake>]).into(),
url: $url.into(),
span: span.clone()
}
},
)*
}
}
}
impl std::str::FromStr for Feature {
type Err = Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s {
$(
stringify!([<$name:snake>]) => {
Ok(Self::[<$name:camel>])
},
)*
_ => Err(Error::UnknownFeature(s.to_string())),
}
}
}
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub struct ExperimentalFeatures {
$(
pub [<$name:snake>]: bool,
)*
}
impl std::default::Default for ExperimentalFeatures {
fn default() -> Self {
Self {
$(
[<$name:snake>]: $enabled,
)*
}
}
}
impl ExperimentalFeatures {
pub fn set_enabled_by_name(&mut self, feature: &str, enabled: bool) -> Result<(), Error> {
let feature = feature.trim();
match feature {
$(
stringify!([<$name:snake>]) => {
self.[<$name:snake>] = enabled;
Ok(())
},
)*
"" => Ok(()),
_ => Err(Error::UnknownFeature(feature.to_string())),
}
}
pub fn set_enabled(&mut self, feature: Feature, enabled: bool) {
match feature {
$(
Feature::[<$name:camel>] => {
self.[<$name:snake>] = enabled
},
)*
}
}
/// Used for testing if a `#[cfg(...)]` feature is enabled.
/// Already prepends "experimental_" to the feature name.
pub fn is_enabled_for_cfg(&self, cfg: &str) -> Result<bool, Error> {
match cfg {
$(
stringify!([<experimental_ $name:snake>]) => Ok(self.[<$name:snake>]),
)*
_ => Err(Error::UnknownFeature(cfg.to_string()))
}
}
$(
pub fn [<with_ $name:snake>](mut self, enabled: bool) -> Self {
self.[<$name:snake>] = enabled;
self
}
)*
}
}
};
}
impl ExperimentalFeatures {
/// Experimental features will be applied in the following order:
/// 1 - manifest (no specific order)
/// 2 - cli_no_experimental
/// 3 - cli_experimental
/// 4 - FORC_NO_EXPERIMENTAL (env var)
/// 5 - FORC_EXPERIMENTAL (env var)
pub fn new(
manifest: &HashMap<String, bool>,
cli_experimental: &[Feature],
cli_no_experimental: &[Feature],
) -> Result<ExperimentalFeatures, Error> {
let mut experimental = ExperimentalFeatures::default();
experimental.parse_from_package_manifest(manifest)?;
for f in cli_no_experimental {
experimental.set_enabled(*f, false);
}
for f in cli_experimental {
experimental.set_enabled(*f, true);
}
experimental.parse_from_environment_variables()?;
Ok(experimental)
}
}
features! {
new_encoding = true,
"https://github.com/FuelLabs/sway/issues/5727",
references = true,
"https://github.com/FuelLabs/sway/issues/5063",
const_generics = true,
"https://github.com/FuelLabs/sway/issues/6860",
new_hashing = true,
"https://github.com/FuelLabs/sway/issues/7256",
}
#[derive(Clone, Debug, Default, Parser)]
pub struct CliFields {
/// Comma separated list of all experimental features that will be enabled
#[clap(long, value_delimiter = ',')]
pub experimental: Vec<Feature>,
/// Comma separated list of all experimental features that will be disabled
#[clap(long, value_delimiter = ',')]
pub no_experimental: Vec<Feature>,
}
impl CliFields {
pub fn experimental_as_cli_string(&self) -> Option<String> {
Self::features_as_cli_string(&self.experimental)
}
pub fn no_experimental_as_cli_string(&self) -> Option<String> {
Self::features_as_cli_string(&self.no_experimental)
}
fn features_as_cli_string(features: &[Feature]) -> Option<String> {
if features.is_empty() {
None
} else {
Some(
features
.iter()
.map(|f| f.name())
.collect::<Vec<_>>()
.join(","),
)
}
}
}
#[derive(Debug)]
pub enum Error {
ParseError(String),
UnknownFeature(String),
}
impl std::fmt::Display for Error {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Error::ParseError(feature) => f.write_fmt(format_args!(
"Experimental feature \"{feature}\" cannot be parsed."
)),
Error::UnknownFeature(feature) => {
f.write_fmt(format_args!("Unknown experimental feature: \"{feature}\"."))
}
}
}
}
impl ExperimentalFeatures {
#[allow(clippy::iter_over_hash_type)]
pub fn parse_from_package_manifest(
&mut self,
experimental: &std::collections::HashMap<String, bool>,
) -> Result<(), Error> {
for (feature, enabled) in experimental {
self.set_enabled_by_name(feature, *enabled)?;
}
Ok(())
}
/// Enable and disable features using comma separated feature names from
/// environment variables "FORC_EXPERIMENTAL" and "FORC_NO_EXPERIMENTAL".
pub fn parse_from_environment_variables(&mut self) -> Result<(), Error> {
if let Ok(features) = std::env::var("FORC_NO_EXPERIMENTAL") {
self.parse_comma_separated_list(&features, false)?;
}
if let Ok(features) = std::env::var("FORC_EXPERIMENTAL") {
self.parse_comma_separated_list(&features, true)?;
}
Ok(())
}
pub fn parse_comma_separated_list(
&mut self,
features: impl AsRef<str>,
enabled: bool,
) -> Result<(), Error> {
for feature in features.as_ref().split(',') {
self.set_enabled_by_name(feature, enabled)?;
}
Ok(())
}
}
#[cfg(test)]
mod tests {
use super::*;
struct RollbackEnvVar(String, Option<String>);
impl RollbackEnvVar {
pub fn new(name: &str) -> Self {
let old = std::env::var(name).ok();
RollbackEnvVar(name.to_string(), old)
}
}
impl Drop for RollbackEnvVar {
fn drop(&mut self) {
if let Some(old) = self.1.take() {
std::env::set_var(&self.0, old);
}
}
}
#[test]
fn ok_parse_experimental_features() {
let _old = RollbackEnvVar::new("FORC_EXPERIMENTAL");
let _old = RollbackEnvVar::new("FORC_NO_EXPERIMENTAL");
let mut features = ExperimentalFeatures {
new_encoding: false,
..Default::default()
};
std::env::set_var("FORC_EXPERIMENTAL", "new_encoding");
std::env::set_var("FORC_NO_EXPERIMENTAL", "");
assert!(!features.new_encoding);
let _ = features.parse_from_environment_variables();
assert!(features.new_encoding);
std::env::set_var("FORC_EXPERIMENTAL", "");
std::env::set_var("FORC_NO_EXPERIMENTAL", "new_encoding");
assert!(features.new_encoding);
let _ = features.parse_from_environment_variables();
assert!(!features.new_encoding);
}
}
| rust | Apache-2.0 | cc8f867043f3ec2c14ec4088d449cde603929a80 | 2026-01-04T15:31:58.694488Z | false |
atuinsh/atuin | https://github.com/atuinsh/atuin/blob/8a010fed33ce19a9ddc589196c73c07ba7ba88e7/crates/atuin-daemon/build.rs | crates/atuin-daemon/build.rs | use std::{env, fs, path::PathBuf};
use protox::prost::Message;
fn main() -> std::io::Result<()> {
let proto_paths = ["proto/history.proto"];
let proto_include_dirs = ["proto"];
let file_descriptors = protox::compile(proto_paths, proto_include_dirs).unwrap();
let file_descriptor_path = PathBuf::from(env::var_os("OUT_DIR").expect("OUT_DIR not set"))
.join("file_descriptor_set.bin");
fs::write(&file_descriptor_path, file_descriptors.encode_to_vec()).unwrap();
tonic_build::configure()
.build_server(true)
.file_descriptor_set_path(&file_descriptor_path)
.skip_protoc_run()
.compile_protos(&proto_paths, &proto_include_dirs)
}
| rust | MIT | 8a010fed33ce19a9ddc589196c73c07ba7ba88e7 | 2026-01-04T15:36:14.139439Z | false |
atuinsh/atuin | https://github.com/atuinsh/atuin/blob/8a010fed33ce19a9ddc589196c73c07ba7ba88e7/crates/atuin-daemon/src/lib.rs | crates/atuin-daemon/src/lib.rs | pub mod client;
pub mod history;
pub mod server;
| rust | MIT | 8a010fed33ce19a9ddc589196c73c07ba7ba88e7 | 2026-01-04T15:36:14.139439Z | false |
atuinsh/atuin | https://github.com/atuinsh/atuin/blob/8a010fed33ce19a9ddc589196c73c07ba7ba88e7/crates/atuin-daemon/src/client.rs | crates/atuin-daemon/src/client.rs | use eyre::{Context, Result};
#[cfg(windows)]
use tokio::net::TcpStream;
use tonic::transport::{Channel, Endpoint, Uri};
use tower::service_fn;
use hyper_util::rt::TokioIo;
#[cfg(unix)]
use tokio::net::UnixStream;
use atuin_client::history::History;
use crate::history::{
EndHistoryRequest, StartHistoryRequest, history_client::HistoryClient as HistoryServiceClient,
};
pub struct HistoryClient {
client: HistoryServiceClient<Channel>,
}
// Wrap the grpc client
impl HistoryClient {
#[cfg(unix)]
pub async fn new(path: String) -> Result<Self> {
let log_path = path.clone();
let channel = Endpoint::try_from("http://atuin_local_daemon:0")?
.connect_with_connector(service_fn(move |_: Uri| {
let path = path.clone();
async move {
Ok::<_, std::io::Error>(TokioIo::new(UnixStream::connect(path.clone()).await?))
}
}))
.await
.wrap_err_with(|| {
format!(
"failed to connect to local atuin daemon at {}. Is it running?",
&log_path
)
})?;
let client = HistoryServiceClient::new(channel);
Ok(HistoryClient { client })
}
#[cfg(not(unix))]
pub async fn new(port: u64) -> Result<Self> {
let channel = Endpoint::try_from("http://atuin_local_daemon:0")?
.connect_with_connector(service_fn(move |_: Uri| {
let url = format!("127.0.0.1:{port}");
async move {
Ok::<_, std::io::Error>(TokioIo::new(TcpStream::connect(url.clone()).await?))
}
}))
.await
.wrap_err_with(|| {
format!(
"failed to connect to local atuin daemon at 127.0.0.1:{port}. Is it running?"
)
})?;
let client = HistoryServiceClient::new(channel);
Ok(HistoryClient { client })
}
pub async fn start_history(&mut self, h: History) -> Result<String> {
let req = StartHistoryRequest {
command: h.command,
cwd: h.cwd,
hostname: h.hostname,
session: h.session,
timestamp: h.timestamp.unix_timestamp_nanos() as u64,
};
let resp = self.client.start_history(req).await?;
Ok(resp.into_inner().id)
}
pub async fn end_history(
&mut self,
id: String,
duration: u64,
exit: i64,
) -> Result<(String, u64)> {
let req = EndHistoryRequest { id, duration, exit };
let resp = self.client.end_history(req).await?;
let resp = resp.into_inner();
Ok((resp.id, resp.idx))
}
}
| rust | MIT | 8a010fed33ce19a9ddc589196c73c07ba7ba88e7 | 2026-01-04T15:36:14.139439Z | false |
atuinsh/atuin | https://github.com/atuinsh/atuin/blob/8a010fed33ce19a9ddc589196c73c07ba7ba88e7/crates/atuin-daemon/src/history.rs | crates/atuin-daemon/src/history.rs | tonic::include_proto!("history");
| rust | MIT | 8a010fed33ce19a9ddc589196c73c07ba7ba88e7 | 2026-01-04T15:36:14.139439Z | false |
atuinsh/atuin | https://github.com/atuinsh/atuin/blob/8a010fed33ce19a9ddc589196c73c07ba7ba88e7/crates/atuin-daemon/src/server.rs | crates/atuin-daemon/src/server.rs | use eyre::WrapErr;
use atuin_client::encryption;
use atuin_client::history::store::HistoryStore;
use atuin_client::record::sqlite_store::SqliteStore;
use atuin_client::settings::Settings;
#[cfg(unix)]
use std::path::PathBuf;
use std::sync::Arc;
use time::OffsetDateTime;
use tracing::{Level, instrument};
use atuin_client::database::{Database, Sqlite as HistoryDatabase};
use atuin_client::history::{History, HistoryId};
use dashmap::DashMap;
use eyre::Result;
use tonic::{Request, Response, Status, transport::Server};
use crate::history::history_server::{History as HistorySvc, HistoryServer};
use crate::history::{EndHistoryReply, EndHistoryRequest, StartHistoryReply, StartHistoryRequest};
mod sync;
#[derive(Debug)]
pub struct HistoryService {
// A store for WIP history
// This is history that has not yet been completed, aka a command that's current running.
running: Arc<DashMap<HistoryId, History>>,
store: HistoryStore,
history_db: HistoryDatabase,
}
impl HistoryService {
pub fn new(store: HistoryStore, history_db: HistoryDatabase) -> Self {
Self {
running: Arc::new(DashMap::new()),
store,
history_db,
}
}
}
#[tonic::async_trait()]
impl HistorySvc for HistoryService {
#[instrument(skip_all, level = Level::INFO)]
async fn start_history(
&self,
request: Request<StartHistoryRequest>,
) -> Result<Response<StartHistoryReply>, Status> {
let running = self.running.clone();
let req = request.into_inner();
let timestamp =
OffsetDateTime::from_unix_timestamp_nanos(req.timestamp as i128).map_err(|_| {
Status::invalid_argument(
"failed to parse timestamp as unix time (expected nanos since epoch)",
)
})?;
let h: History = History::daemon()
.timestamp(timestamp)
.command(req.command)
.cwd(req.cwd)
.session(req.session)
.hostname(req.hostname)
.build()
.into();
// The old behaviour had us inserting half-finished history records into the database
// The new behaviour no longer allows that.
// History that's running is stored in-memory by the daemon, and only committed when
// complete.
// If anyone relied on the old behaviour, we could perhaps insert to the history db here
// too. I'd rather keep it pure, unless that ends up being the case.
let id = h.id.clone();
tracing::info!(id = id.to_string(), "start history");
running.insert(id.clone(), h);
let reply = StartHistoryReply { id: id.to_string() };
Ok(Response::new(reply))
}
#[instrument(skip_all, level = Level::INFO)]
async fn end_history(
&self,
request: Request<EndHistoryRequest>,
) -> Result<Response<EndHistoryReply>, Status> {
let running = self.running.clone();
let req = request.into_inner();
let id = HistoryId(req.id);
if let Some((_, mut history)) = running.remove(&id) {
history.exit = req.exit;
history.duration = match req.duration {
0 => i64::try_from(
(OffsetDateTime::now_utc() - history.timestamp).whole_nanoseconds(),
)
.expect("failed to convert calculated duration to i64"),
value => i64::try_from(value).expect("failed to get i64 duration"),
};
// Perhaps allow the incremental build to handle this entirely.
self.history_db
.save(&history)
.await
.map_err(|e| Status::internal(format!("failed to write to db: {e:?}")))?;
tracing::info!(
id = id.0.to_string(),
duration = history.duration,
"end history"
);
let (id, idx) =
self.store.push(history).await.map_err(|e| {
Status::internal(format!("failed to push record to store: {e:?}"))
})?;
let reply = EndHistoryReply {
id: id.0.to_string(),
idx,
};
return Ok(Response::new(reply));
}
Err(Status::not_found(format!(
"could not find history with id: {id}"
)))
}
}
#[cfg(unix)]
async fn shutdown_signal(socket: Option<PathBuf>) {
let mut term = tokio::signal::unix::signal(tokio::signal::unix::SignalKind::terminate())
.expect("failed to register sigterm handler");
let mut int = tokio::signal::unix::signal(tokio::signal::unix::SignalKind::interrupt())
.expect("failed to register sigint handler");
tokio::select! {
_ = term.recv() => {},
_ = int.recv() => {},
}
eprintln!("Removing socket...");
if let Some(socket) = socket {
std::fs::remove_file(socket).expect("failed to remove socket");
}
eprintln!("Shutting down...");
}
#[cfg(windows)]
async fn shutdown_signal() {
tokio::signal::windows::ctrl_c()
.expect("failed to register signal handler")
.recv()
.await;
eprintln!("Shutting down...");
}
#[cfg(unix)]
async fn start_server(settings: Settings, history: HistoryService) -> Result<()> {
use tokio::net::UnixListener;
use tokio_stream::wrappers::UnixListenerStream;
let socket_path = settings.daemon.socket_path;
let (uds, cleanup) = if cfg!(target_os = "linux") && settings.daemon.systemd_socket {
#[cfg(target_os = "linux")]
{
use eyre::OptionExt;
tracing::info!("getting systemd socket");
let listener = listenfd::ListenFd::from_env()
.take_unix_listener(0)?
.ok_or_eyre("missing systemd socket")?;
listener.set_nonblocking(true)?;
let actual_path = listener
.local_addr()
.context("getting systemd socket's path")
.and_then(|addr| {
addr.as_pathname()
.ok_or_eyre("systemd socket missing path")
.map(|path| path.to_owned())
});
match actual_path {
Ok(actual_path) => {
tracing::info!("listening on systemd socket: {actual_path:?}");
if actual_path != std::path::Path::new(&socket_path) {
tracing::warn!(
"systemd socket is not at configured client path: {socket_path:?}"
);
}
}
Err(err) => {
tracing::warn!(
"could not detect systemd socket path, ensure that it's at the configured path: {socket_path:?}, error: {err:?}"
);
}
}
(UnixListener::from_std(listener)?, false)
}
#[cfg(not(target_os = "linux"))]
unreachable!()
} else {
tracing::info!("listening on unix socket {socket_path:?}");
(UnixListener::bind(socket_path.clone())?, true)
};
let uds_stream = UnixListenerStream::new(uds);
Server::builder()
.add_service(HistoryServer::new(history))
.serve_with_incoming_shutdown(
uds_stream,
shutdown_signal(cleanup.then_some(socket_path.into())),
)
.await?;
Ok(())
}
#[cfg(not(unix))]
async fn start_server(settings: Settings, history: HistoryService) -> Result<()> {
use tokio::net::TcpListener;
use tokio_stream::wrappers::TcpListenerStream;
let port = settings.daemon.tcp_port;
let url = format!("127.0.0.1:{port}");
let tcp = TcpListener::bind(url).await?;
let tcp_stream = TcpListenerStream::new(tcp);
tracing::info!("listening on tcp port {:?}", port);
Server::builder()
.add_service(HistoryServer::new(history))
.serve_with_incoming_shutdown(tcp_stream, shutdown_signal())
.await?;
Ok(())
}
// break the above down when we end up with multiple services
/// Listen on a unix socket
/// Pass the path to the socket
pub async fn listen(
settings: Settings,
store: SqliteStore,
history_db: HistoryDatabase,
) -> Result<()> {
let encryption_key: [u8; 32] = encryption::load_key(&settings)
.context("could not load encryption key")?
.into();
let host_id = Settings::host_id().expect("failed to get host_id");
let history_store = HistoryStore::new(store.clone(), host_id, encryption_key);
let history = HistoryService::new(history_store.clone(), history_db.clone());
// start services
tokio::spawn(sync::worker(
settings.clone(),
store,
history_store,
history_db,
));
start_server(settings, history).await
}
| rust | MIT | 8a010fed33ce19a9ddc589196c73c07ba7ba88e7 | 2026-01-04T15:36:14.139439Z | false |
atuinsh/atuin | https://github.com/atuinsh/atuin/blob/8a010fed33ce19a9ddc589196c73c07ba7ba88e7/crates/atuin-daemon/src/server/sync.rs | crates/atuin-daemon/src/server/sync.rs | use eyre::Result;
use rand::Rng;
use tokio::time::{self, MissedTickBehavior};
use atuin_client::database::Sqlite as HistoryDatabase;
use atuin_client::{
encryption,
history::store::HistoryStore,
record::{sqlite_store::SqliteStore, sync},
settings::Settings,
};
use atuin_dotfiles::store::{AliasStore, var::VarStore};
pub async fn worker(
settings: Settings,
store: SqliteStore,
history_store: HistoryStore,
history_db: HistoryDatabase,
) -> Result<()> {
tracing::info!("booting sync worker");
let encryption_key: [u8; 32] = encryption::load_key(&settings)?.into();
let host_id = Settings::host_id().expect("failed to get host_id");
let alias_store = AliasStore::new(store.clone(), host_id, encryption_key);
let var_store = VarStore::new(store.clone(), host_id, encryption_key);
// Don't backoff by more than 30 mins (with a random jitter of up to 1 min)
let max_interval: f64 = 60.0 * 30.0 + rand::thread_rng().gen_range(0.0..60.0);
let mut ticker = time::interval(time::Duration::from_secs(settings.daemon.sync_frequency));
// IMPORTANT: without this, if we miss ticks because a sync takes ages or is otherwise delayed,
// we may end up running a lot of syncs in a hot loop. No bueno!
ticker.set_missed_tick_behavior(MissedTickBehavior::Skip);
loop {
ticker.tick().await;
tracing::info!("sync worker tick");
if !settings.logged_in() {
tracing::debug!("not logged in, skipping sync tick");
continue;
}
let res = sync::sync(&settings, &store).await;
if let Err(e) = res {
tracing::error!("sync tick failed with {e}");
let mut rng = rand::thread_rng();
let mut new_interval = ticker.period().as_secs_f64() * rng.gen_range(2.0..2.2);
if new_interval > max_interval {
new_interval = max_interval;
}
ticker = time::interval(time::Duration::from_secs(new_interval as u64));
ticker.reset_after(time::Duration::from_secs(new_interval as u64));
tracing::error!("backing off, next sync tick in {new_interval}");
} else {
let (uploaded, downloaded) = res.unwrap();
tracing::info!(
uploaded = ?uploaded,
downloaded = ?downloaded,
"sync complete"
);
history_store
.incremental_build(&history_db, &downloaded)
.await?;
alias_store.build().await?;
var_store.build().await?;
// Reset backoff on success
if ticker.period().as_secs() != settings.daemon.sync_frequency {
ticker = time::interval(time::Duration::from_secs(settings.daemon.sync_frequency));
}
// store sync time
tokio::task::spawn_blocking(Settings::save_sync_time).await??;
}
}
}
| rust | MIT | 8a010fed33ce19a9ddc589196c73c07ba7ba88e7 | 2026-01-04T15:36:14.139439Z | false |
atuinsh/atuin | https://github.com/atuinsh/atuin/blob/8a010fed33ce19a9ddc589196c73c07ba7ba88e7/crates/atuin-server-sqlite/build.rs | crates/atuin-server-sqlite/build.rs | // generated by `sqlx migrate build-script`
fn main() {
// trigger recompilation when a new migration is added
println!("cargo:rerun-if-changed=migrations");
}
| rust | MIT | 8a010fed33ce19a9ddc589196c73c07ba7ba88e7 | 2026-01-04T15:36:14.139439Z | false |
atuinsh/atuin | https://github.com/atuinsh/atuin/blob/8a010fed33ce19a9ddc589196c73c07ba7ba88e7/crates/atuin-server-sqlite/src/wrappers.rs | crates/atuin-server-sqlite/src/wrappers.rs | use ::sqlx::{FromRow, Result};
use atuin_common::record::{EncryptedData, Host, Record};
use atuin_server_database::models::{History, Session, User};
use sqlx::{Row, sqlite::SqliteRow};
pub struct DbUser(pub User);
pub struct DbSession(pub Session);
pub struct DbHistory(pub History);
pub struct DbRecord(pub Record<EncryptedData>);
impl<'a> FromRow<'a, SqliteRow> for DbUser {
fn from_row(row: &'a SqliteRow) -> Result<Self> {
Ok(Self(User {
id: row.try_get("id")?,
username: row.try_get("username")?,
email: row.try_get("email")?,
password: row.try_get("password")?,
verified: row.try_get("verified_at")?,
}))
}
}
impl<'a> ::sqlx::FromRow<'a, SqliteRow> for DbSession {
fn from_row(row: &'a SqliteRow) -> ::sqlx::Result<Self> {
Ok(Self(Session {
id: row.try_get("id")?,
user_id: row.try_get("user_id")?,
token: row.try_get("token")?,
}))
}
}
impl<'a> ::sqlx::FromRow<'a, SqliteRow> for DbHistory {
fn from_row(row: &'a SqliteRow) -> ::sqlx::Result<Self> {
Ok(Self(History {
id: row.try_get("id")?,
client_id: row.try_get("client_id")?,
user_id: row.try_get("user_id")?,
hostname: row.try_get("hostname")?,
timestamp: row.try_get("timestamp")?,
data: row.try_get("data")?,
created_at: row.try_get("created_at")?,
}))
}
}
impl<'a> ::sqlx::FromRow<'a, SqliteRow> for DbRecord {
fn from_row(row: &'a SqliteRow) -> ::sqlx::Result<Self> {
let idx: i64 = row.try_get("idx")?;
let timestamp: i64 = row.try_get("timestamp")?;
let data = EncryptedData {
data: row.try_get("data")?,
content_encryption_key: row.try_get("cek")?,
};
Ok(Self(Record {
id: row.try_get("client_id")?,
host: Host::new(row.try_get("host")?),
idx: idx as u64,
timestamp: timestamp as u64,
version: row.try_get("version")?,
tag: row.try_get("tag")?,
data,
}))
}
}
impl From<DbRecord> for Record<EncryptedData> {
fn from(other: DbRecord) -> Record<EncryptedData> {
Record { ..other.0 }
}
}
| rust | MIT | 8a010fed33ce19a9ddc589196c73c07ba7ba88e7 | 2026-01-04T15:36:14.139439Z | false |
atuinsh/atuin | https://github.com/atuinsh/atuin/blob/8a010fed33ce19a9ddc589196c73c07ba7ba88e7/crates/atuin-server-sqlite/src/lib.rs | crates/atuin-server-sqlite/src/lib.rs | use std::str::FromStr;
use async_trait::async_trait;
use atuin_common::{
record::{EncryptedData, HostId, Record, RecordIdx, RecordStatus},
utils::crypto_random_string,
};
use atuin_server_database::{
Database, DbError, DbResult, DbSettings,
models::{History, NewHistory, NewSession, NewUser, Session, User},
};
use futures_util::TryStreamExt;
use sqlx::{
Row,
sqlite::{SqliteConnectOptions, SqliteJournalMode, SqlitePoolOptions},
types::Uuid,
};
use time::{OffsetDateTime, PrimitiveDateTime, UtcOffset};
use tracing::instrument;
use wrappers::{DbHistory, DbRecord, DbSession, DbUser};
mod wrappers;
#[derive(Clone)]
pub struct Sqlite {
pool: sqlx::Pool<sqlx::sqlite::Sqlite>,
}
fn fix_error(error: sqlx::Error) -> DbError {
match error {
sqlx::Error::RowNotFound => DbError::NotFound,
error => DbError::Other(error.into()),
}
}
#[async_trait]
impl Database for Sqlite {
async fn new(settings: &DbSettings) -> DbResult<Self> {
let opts = SqliteConnectOptions::from_str(&settings.db_uri)
.map_err(fix_error)?
.journal_mode(SqliteJournalMode::Wal)
.create_if_missing(true);
let pool = SqlitePoolOptions::new()
.connect_with(opts)
.await
.map_err(fix_error)?;
sqlx::migrate!("./migrations")
.run(&pool)
.await
.map_err(|error| DbError::Other(error.into()))?;
Ok(Self { pool })
}
#[instrument(skip_all)]
async fn get_session(&self, token: &str) -> DbResult<Session> {
sqlx::query_as("select id, user_id, token from sessions where token = $1")
.bind(token)
.fetch_one(&self.pool)
.await
.map_err(fix_error)
.map(|DbSession(session)| session)
}
#[instrument(skip_all)]
async fn get_session_user(&self, token: &str) -> DbResult<User> {
sqlx::query_as(
"select users.id, users.username, users.email, users.password, users.verified_at from users
inner join sessions
on users.id = sessions.user_id
and sessions.token = $1",
)
.bind(token)
.fetch_one(&self.pool)
.await
.map_err(fix_error)
.map(|DbUser(user)| user)
}
#[instrument(skip_all)]
async fn add_session(&self, session: &NewSession) -> DbResult<()> {
let token: &str = &session.token;
sqlx::query(
"insert into sessions
(user_id, token)
values($1, $2)",
)
.bind(session.user_id)
.bind(token)
.execute(&self.pool)
.await
.map_err(fix_error)?;
Ok(())
}
#[instrument(skip_all)]
async fn get_user(&self, username: &str) -> DbResult<User> {
sqlx::query_as(
"select id, username, email, password, verified_at from users where username = $1",
)
.bind(username)
.fetch_one(&self.pool)
.await
.map_err(fix_error)
.map(|DbUser(user)| user)
}
#[instrument(skip_all)]
async fn get_user_session(&self, u: &User) -> DbResult<Session> {
sqlx::query_as("select id, user_id, token from sessions where user_id = $1")
.bind(u.id)
.fetch_one(&self.pool)
.await
.map_err(fix_error)
.map(|DbSession(session)| session)
}
#[instrument(skip_all)]
async fn add_user(&self, user: &NewUser) -> DbResult<i64> {
let email: &str = &user.email;
let username: &str = &user.username;
let password: &str = &user.password;
let res: (i64,) = sqlx::query_as(
"insert into users
(username, email, password)
values($1, $2, $3)
returning id",
)
.bind(username)
.bind(email)
.bind(password)
.fetch_one(&self.pool)
.await
.map_err(fix_error)?;
Ok(res.0)
}
#[instrument(skip_all)]
async fn user_verified(&self, id: i64) -> DbResult<bool> {
let res: (bool,) =
sqlx::query_as("select verified_at is not null from users where id = $1")
.bind(id)
.fetch_one(&self.pool)
.await
.map_err(fix_error)?;
Ok(res.0)
}
#[instrument(skip_all)]
async fn verify_user(&self, id: i64) -> DbResult<()> {
sqlx::query(
"update users set verified_at = (current_timestamp at time zone 'utc') where id=$1",
)
.bind(id)
.execute(&self.pool)
.await
.map_err(fix_error)?;
Ok(())
}
#[instrument(skip_all)]
async fn user_verification_token(&self, id: i64) -> DbResult<String> {
const TOKEN_VALID_MINUTES: i64 = 15;
// First we check if there is a verification token
let token: Option<(String, sqlx::types::time::OffsetDateTime)> = sqlx::query_as(
"select token, valid_until from user_verification_token where user_id = $1",
)
.bind(id)
.fetch_optional(&self.pool)
.await
.map_err(fix_error)?;
let token = if let Some((token, valid_until)) = token {
// We have a token, AND it's still valid
if valid_until > time::OffsetDateTime::now_utc() {
token
} else {
// token has expired. generate a new one, return it
let token = crypto_random_string::<24>();
sqlx::query("update user_verification_token set token = $2, valid_until = $3 where user_id=$1")
.bind(id)
.bind(&token)
.bind(time::OffsetDateTime::now_utc() + time::Duration::minutes(TOKEN_VALID_MINUTES))
.execute(&self.pool)
.await
.map_err(fix_error)?;
token
}
} else {
// No token in the database! Generate one, insert it
let token = crypto_random_string::<24>();
sqlx::query("insert into user_verification_token (user_id, token, valid_until) values ($1, $2, $3)")
.bind(id)
.bind(&token)
.bind(time::OffsetDateTime::now_utc() + time::Duration::minutes(TOKEN_VALID_MINUTES))
.execute(&self.pool)
.await
.map_err(fix_error)?;
token
};
Ok(token)
}
#[instrument(skip_all)]
async fn update_user_password(&self, user: &User) -> DbResult<()> {
sqlx::query(
"update users
set password = $1
where id = $2",
)
.bind(&user.password)
.bind(user.id)
.execute(&self.pool)
.await
.map_err(fix_error)?;
Ok(())
}
#[instrument(skip_all)]
async fn total_history(&self) -> DbResult<i64> {
let res: (i64,) = sqlx::query_as("select count(1) from history")
.fetch_optional(&self.pool)
.await
.map_err(fix_error)?
.unwrap_or((0,));
Ok(res.0)
}
#[instrument(skip_all)]
async fn count_history(&self, user: &User) -> DbResult<i64> {
// The cache is new, and the user might not yet have a cache value.
// They will have one as soon as they post up some new history, but handle that
// edge case.
let res: (i64,) = sqlx::query_as(
"select count(1) from history
where user_id = $1",
)
.bind(user.id)
.fetch_one(&self.pool)
.await
.map_err(fix_error)?;
Ok(res.0)
}
#[instrument(skip_all)]
async fn count_history_cached(&self, _user: &User) -> DbResult<i64> {
Err(DbError::NotFound)
}
#[instrument(skip_all)]
async fn delete_user(&self, u: &User) -> DbResult<()> {
sqlx::query("delete from sessions where user_id = $1")
.bind(u.id)
.execute(&self.pool)
.await
.map_err(fix_error)?;
sqlx::query("delete from users where id = $1")
.bind(u.id)
.execute(&self.pool)
.await
.map_err(fix_error)?;
sqlx::query("delete from history where user_id = $1")
.bind(u.id)
.execute(&self.pool)
.await
.map_err(fix_error)?;
Ok(())
}
async fn delete_history(&self, user: &User, id: String) -> DbResult<()> {
sqlx::query(
"update history
set deleted_at = $3
where user_id = $1
and client_id = $2
and deleted_at is null", // don't just keep setting it
)
.bind(user.id)
.bind(id)
.bind(time::OffsetDateTime::now_utc())
.fetch_all(&self.pool)
.await
.map_err(fix_error)?;
Ok(())
}
#[instrument(skip_all)]
async fn deleted_history(&self, user: &User) -> DbResult<Vec<String>> {
// The cache is new, and the user might not yet have a cache value.
// They will have one as soon as they post up some new history, but handle that
// edge case.
let res = sqlx::query(
"select client_id from history
where user_id = $1
and deleted_at is not null",
)
.bind(user.id)
.fetch_all(&self.pool)
.await
.map_err(fix_error)?;
let res = res.iter().map(|row| row.get("client_id")).collect();
Ok(res)
}
async fn delete_store(&self, user: &User) -> DbResult<()> {
sqlx::query(
"delete from store
where user_id = $1",
)
.bind(user.id)
.execute(&self.pool)
.await
.map_err(fix_error)?;
Ok(())
}
#[instrument(skip_all)]
async fn add_records(&self, user: &User, records: &[Record<EncryptedData>]) -> DbResult<()> {
let mut tx = self.pool.begin().await.map_err(fix_error)?;
for i in records {
let id = atuin_common::utils::uuid_v7();
sqlx::query(
"insert into store
(id, client_id, host, idx, timestamp, version, tag, data, cek, user_id)
values ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)
on conflict do nothing
",
)
.bind(id)
.bind(i.id)
.bind(i.host.id)
.bind(i.idx as i64)
.bind(i.timestamp as i64) // throwing away some data, but i64 is still big in terms of time
.bind(&i.version)
.bind(&i.tag)
.bind(&i.data.data)
.bind(&i.data.content_encryption_key)
.bind(user.id)
.execute(&mut *tx)
.await
.map_err(fix_error)?;
}
tx.commit().await.map_err(fix_error)?;
Ok(())
}
#[instrument(skip_all)]
async fn next_records(
&self,
user: &User,
host: HostId,
tag: String,
start: Option<RecordIdx>,
count: u64,
) -> DbResult<Vec<Record<EncryptedData>>> {
tracing::debug!("{:?} - {:?} - {:?}", host, tag, start);
let start = start.unwrap_or(0);
let records: Result<Vec<DbRecord>, DbError> = sqlx::query_as(
"select client_id, host, idx, timestamp, version, tag, data, cek from store
where user_id = $1
and tag = $2
and host = $3
and idx >= $4
order by idx asc
limit $5",
)
.bind(user.id)
.bind(tag.clone())
.bind(host)
.bind(start as i64)
.bind(count as i64)
.fetch_all(&self.pool)
.await
.map_err(fix_error);
let ret = match records {
Ok(records) => {
let records: Vec<Record<EncryptedData>> = records
.into_iter()
.map(|f| {
let record: Record<EncryptedData> = f.into();
record
})
.collect();
records
}
Err(DbError::NotFound) => {
tracing::debug!("no records found in store: {:?}/{}", host, tag);
return Ok(vec![]);
}
Err(e) => return Err(e),
};
Ok(ret)
}
async fn status(&self, user: &User) -> DbResult<RecordStatus> {
const STATUS_SQL: &str =
"select host, tag, max(idx) from store where user_id = $1 group by host, tag";
let res: Vec<(Uuid, String, i64)> = sqlx::query_as(STATUS_SQL)
.bind(user.id)
.fetch_all(&self.pool)
.await
.map_err(fix_error)?;
let mut status = RecordStatus::new();
for i in res {
status.set_raw(HostId(i.0), i.1, i.2 as u64);
}
Ok(status)
}
#[instrument(skip_all)]
async fn count_history_range(
&self,
user: &User,
range: std::ops::Range<time::OffsetDateTime>,
) -> DbResult<i64> {
let res: (i64,) = sqlx::query_as(
"select count(1) from history
where user_id = $1
and timestamp >= $2::date
and timestamp < $3::date",
)
.bind(user.id)
.bind(into_utc(range.start))
.bind(into_utc(range.end))
.fetch_one(&self.pool)
.await
.map_err(fix_error)?;
Ok(res.0)
}
#[instrument(skip_all)]
async fn list_history(
&self,
user: &User,
created_after: time::OffsetDateTime,
since: time::OffsetDateTime,
host: &str,
page_size: i64,
) -> DbResult<Vec<History>> {
let res = sqlx::query_as(
"select id, client_id, user_id, hostname, timestamp, data, created_at from history
where user_id = $1
and hostname != $2
and created_at >= $3
and timestamp >= $4
order by timestamp asc
limit $5",
)
.bind(user.id)
.bind(host)
.bind(into_utc(created_after))
.bind(into_utc(since))
.bind(page_size)
.fetch(&self.pool)
.map_ok(|DbHistory(h)| h)
.try_collect()
.await
.map_err(fix_error)?;
Ok(res)
}
#[instrument(skip_all)]
async fn add_history(&self, history: &[NewHistory]) -> DbResult<()> {
let mut tx = self.pool.begin().await.map_err(fix_error)?;
for i in history {
let client_id: &str = &i.client_id;
let hostname: &str = &i.hostname;
let data: &str = &i.data;
sqlx::query(
"insert into history
(client_id, user_id, hostname, timestamp, data)
values ($1, $2, $3, $4, $5)
on conflict do nothing
",
)
.bind(client_id)
.bind(i.user_id)
.bind(hostname)
.bind(i.timestamp)
.bind(data)
.execute(&mut *tx)
.await
.map_err(fix_error)?;
}
tx.commit().await.map_err(fix_error)?;
Ok(())
}
#[instrument(skip_all)]
async fn oldest_history(&self, user: &User) -> DbResult<History> {
sqlx::query_as(
"select id, client_id, user_id, hostname, timestamp, data, created_at from history
where user_id = $1
order by timestamp asc
limit 1",
)
.bind(user.id)
.fetch_one(&self.pool)
.await
.map_err(fix_error)
.map(|DbHistory(h)| h)
}
}
fn into_utc(x: OffsetDateTime) -> PrimitiveDateTime {
let x = x.to_offset(UtcOffset::UTC);
PrimitiveDateTime::new(x.date(), x.time())
}
| rust | MIT | 8a010fed33ce19a9ddc589196c73c07ba7ba88e7 | 2026-01-04T15:36:14.139439Z | false |
atuinsh/atuin | https://github.com/atuinsh/atuin/blob/8a010fed33ce19a9ddc589196c73c07ba7ba88e7/crates/atuin-history/src/stats.rs | crates/atuin-history/src/stats.rs | use std::collections::{HashMap, HashSet};
use crossterm::style::{Color, ResetColor, SetAttribute, SetForegroundColor};
use serde::{Deserialize, Serialize};
use unicode_segmentation::UnicodeSegmentation;
use atuin_client::{history::History, settings::Settings, theme::Meaning, theme::Theme};
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Stats {
pub total_commands: usize,
pub unique_commands: usize,
pub top: Vec<(Vec<String>, usize)>,
}
fn first_non_whitespace(s: &str) -> Option<usize> {
s.char_indices()
// find the first non whitespace char
.find(|(_, c)| !c.is_ascii_whitespace())
// return the index of that char
.map(|(i, _)| i)
}
fn first_whitespace(s: &str) -> usize {
s.char_indices()
// find the first whitespace char
.find(|(_, c)| c.is_ascii_whitespace())
// return the index of that char, (or the max length of the string)
.map_or(s.len(), |(i, _)| i)
}
fn interesting_command<'a>(settings: &Settings, mut command: &'a str) -> &'a str {
// Sort by length so that we match the longest prefix first
let mut common_prefix = settings.stats.common_prefix.clone();
common_prefix.sort_by_key(|b| std::cmp::Reverse(b.len()));
// Trim off the common prefix, if it exists
for p in &common_prefix {
if command.starts_with(p) {
let i = p.len();
let prefix = &command[..i];
command = command[i..].trim_start();
if command.is_empty() {
// no commands following, just use the prefix
return prefix;
}
break;
}
}
// Sort the common_subcommands by length so that we match the longest subcommand first
let mut common_subcommands = settings.stats.common_subcommands.clone();
common_subcommands.sort_by_key(|b| std::cmp::Reverse(b.len()));
// Check for a common subcommand
for p in &common_subcommands {
if command.starts_with(p) {
// if the subcommand is the same length as the command, then we just use the subcommand
if p.len() == command.len() {
return command;
}
// otherwise we need to use the subcommand + the next word
let non_whitespace = first_non_whitespace(&command[p.len()..]).unwrap_or(0);
let j =
p.len() + non_whitespace + first_whitespace(&command[p.len() + non_whitespace..]);
return &command[..j];
}
}
// Return the first word if there is no subcommand
&command[..first_whitespace(command)]
}
fn split_at_pipe(command: &str) -> Vec<&str> {
let mut result = vec![];
let mut quoted = false;
let mut start = 0;
let mut graphemes = UnicodeSegmentation::grapheme_indices(command, true);
while let Some((i, c)) = graphemes.next() {
let current = i;
match c {
"\"" => {
if command[start..current] != *"\"" {
quoted = !quoted;
}
}
"'" => {
if command[start..current] != *"'" {
quoted = !quoted;
}
}
"\\" => if graphemes.next().is_some() {},
"|" => {
if !quoted {
if current > start && command[start..].starts_with('|') {
start += 1;
}
result.push(&command[start..current]);
start = current;
}
}
_ => {}
}
}
if command[start..].starts_with('|') {
start += 1;
}
result.push(&command[start..]);
result
}
fn strip_leading_env_vars(command: &str) -> &str {
// fast path: no equals sign, no environment variable
if !command.contains('=') {
return command;
}
let mut in_token = false;
let mut token_start_pos = 0;
let mut in_single_quotes = false;
let mut in_double_quotes = false;
let mut escape_next = false;
let mut has_equals_outside_quotes = false;
for (i, g) in UnicodeSegmentation::grapheme_indices(command, true) {
if escape_next {
escape_next = false;
continue;
}
if !in_token {
token_start_pos = i;
}
match g {
"\\" => {
escape_next = true;
in_token = true;
}
"'" if !in_double_quotes => {
in_single_quotes = !in_single_quotes;
in_token = true;
}
"\"" if !in_single_quotes => {
in_double_quotes = !in_double_quotes;
in_token = true;
}
"=" if !in_single_quotes && !in_double_quotes => {
has_equals_outside_quotes = true;
in_token = true;
}
" " | "\t" if !in_single_quotes && !in_double_quotes => {
if in_token {
if !has_equals_outside_quotes {
// if we're not in an env var, we can break early
break;
}
in_token = false;
has_equals_outside_quotes = false;
}
}
_ => {
in_token = true;
}
}
}
command[token_start_pos..].trim()
}
pub fn pretty_print(stats: Stats, ngram_size: usize, theme: &Theme) {
let max = stats.top.iter().map(|x| x.1).max().unwrap();
let num_pad = max.ilog10() as usize + 1;
// Find the length of the longest command name for each column
let column_widths = stats
.top
.iter()
.map(|(commands, _)| commands.iter().map(|c| c.len()).collect::<Vec<usize>>())
.fold(vec![0; ngram_size], |acc, item| {
acc.iter()
.zip(item.iter())
.map(|(a, i)| *std::cmp::max(a, i))
.collect()
});
for (command, count) in stats.top {
let gray = SetForegroundColor(match theme.as_style(Meaning::Muted).foreground_color {
Some(color) => color,
None => Color::Grey,
});
let bold = SetAttribute(crossterm::style::Attribute::Bold);
let in_ten = 10 * count / max;
print!("[");
print!(
"{}",
SetForegroundColor(match theme.get_error().foreground_color {
Some(color) => color,
None => Color::Red,
})
);
for i in 0..in_ten {
if i == 2 {
print!(
"{}",
SetForegroundColor(match theme.get_warning().foreground_color {
Some(color) => color,
None => Color::Yellow,
})
);
}
if i == 5 {
print!(
"{}",
SetForegroundColor(match theme.get_info().foreground_color {
Some(color) => color,
None => Color::Green,
})
);
}
print!("▮");
}
for _ in in_ten..10 {
print!(" ");
}
let formatted_command = command
.iter()
.zip(column_widths.iter())
.map(|(cmd, width)| format!("{cmd:width$}"))
.collect::<Vec<_>>()
.join(" | ");
println!(
"{ResetColor}] {gray}{count:num_pad$}{ResetColor} {bold}{formatted_command}{ResetColor}"
);
}
println!("Total commands: {}", stats.total_commands);
println!("Unique commands: {}", stats.unique_commands);
}
pub fn compute(
settings: &Settings,
history: &[History],
count: usize,
ngram_size: usize,
) -> Option<Stats> {
let mut commands = HashSet::<&str>::with_capacity(history.len());
let mut total_unignored = 0;
let mut prefixes = HashMap::<Vec<&str>, usize>::with_capacity(history.len());
for i in history {
// just in case it somehow has a leading tab or space or something (legacy atuin didn't ignore space prefixes)
let command = strip_leading_env_vars(i.command.trim());
let prefix = interesting_command(settings, command);
if settings.stats.ignored_commands.iter().any(|c| c == prefix) {
continue;
}
total_unignored += 1;
commands.insert(command);
split_at_pipe(command)
.iter()
.map(|l| {
let command = l.trim();
commands.insert(command);
command
})
.collect::<Vec<_>>()
.windows(ngram_size)
.for_each(|w| {
*prefixes
.entry(w.iter().map(|c| interesting_command(settings, c)).collect())
.or_default() += 1;
});
}
let unique = commands.len();
let mut top = prefixes.into_iter().collect::<Vec<_>>();
top.sort_unstable_by_key(|x| std::cmp::Reverse(x.1));
top.truncate(count);
if top.is_empty() {
return None;
}
Some(Stats {
unique_commands: unique,
total_commands: total_unignored,
top: top
.into_iter()
.map(|t| (t.0.into_iter().map(|s| s.to_string()).collect(), t.1))
.collect(),
})
}
#[cfg(test)]
mod tests {
use atuin_client::history::History;
use atuin_client::settings::Settings;
use time::OffsetDateTime;
use super::compute;
use super::{interesting_command, split_at_pipe, strip_leading_env_vars};
#[test]
fn ignored_env_vars() {
let settings = Settings::utc();
let history: History = History::capture()
.timestamp(time::OffsetDateTime::now_utc())
.command("FOO='BAR=🚀' echo foo")
.cwd("/")
.build()
.into();
let stats = compute(&settings, &[history], 10, 1).expect("failed to compute stats");
assert_eq!(stats.top.first().unwrap().0, vec!["echo"]);
}
#[test]
fn ignored_commands() {
let mut settings = Settings::utc();
settings.stats.ignored_commands.push("cd".to_string());
let history = [
History::import()
.timestamp(OffsetDateTime::now_utc())
.command("cd foo")
.build()
.into(),
History::import()
.timestamp(OffsetDateTime::now_utc())
.command("cargo build stuff")
.build()
.into(),
];
let stats = compute(&settings, &history, 10, 1).expect("failed to compute stats");
assert_eq!(stats.total_commands, 1);
assert_eq!(stats.unique_commands, 1);
}
#[test]
fn interesting_commands() {
let settings = Settings::utc();
assert_eq!(interesting_command(&settings, "cargo"), "cargo");
assert_eq!(
interesting_command(&settings, "cargo build foo bar"),
"cargo build"
);
assert_eq!(
interesting_command(&settings, "sudo cargo build foo bar"),
"cargo build"
);
assert_eq!(interesting_command(&settings, "sudo"), "sudo");
}
// Test with spaces in the common_prefix
#[test]
fn interesting_commands_spaces() {
let mut settings = Settings::utc();
settings.stats.common_prefix.push("sudo test".to_string());
assert_eq!(interesting_command(&settings, "sudo test"), "sudo test");
assert_eq!(interesting_command(&settings, "sudo test "), "sudo test");
assert_eq!(interesting_command(&settings, "sudo test foo bar"), "foo");
assert_eq!(
interesting_command(&settings, "sudo test foo bar"),
"foo"
);
// Works with a common_subcommand as well
assert_eq!(
interesting_command(&settings, "sudo test cargo build foo bar"),
"cargo build"
);
// We still match on just the sudo prefix
assert_eq!(interesting_command(&settings, "sudo"), "sudo");
assert_eq!(interesting_command(&settings, "sudo foo"), "foo");
}
// Test with spaces in the common_subcommand
#[test]
fn interesting_commands_spaces_subcommand() {
let mut settings = Settings::utc();
settings
.stats
.common_subcommands
.push("cargo build".to_string());
assert_eq!(interesting_command(&settings, "cargo build"), "cargo build");
assert_eq!(
interesting_command(&settings, "cargo build "),
"cargo build"
);
assert_eq!(
interesting_command(&settings, "cargo build foo bar"),
"cargo build foo"
);
// Works with a common_prefix as well
assert_eq!(
interesting_command(&settings, "sudo cargo build foo bar"),
"cargo build foo"
);
// We still match on just cargo as a subcommand
assert_eq!(interesting_command(&settings, "cargo"), "cargo");
assert_eq!(interesting_command(&settings, "cargo foo"), "cargo foo");
}
// Test with spaces in the common_prefix and common_subcommand
#[test]
fn interesting_commands_spaces_both() {
let mut settings = Settings::utc();
settings.stats.common_prefix.push("sudo test".to_string());
settings
.stats
.common_subcommands
.push("cargo build".to_string());
assert_eq!(
interesting_command(&settings, "sudo test cargo build"),
"cargo build"
);
assert_eq!(
interesting_command(&settings, "sudo test cargo build"),
"cargo build"
);
assert_eq!(
interesting_command(&settings, "sudo test cargo build "),
"cargo build"
);
assert_eq!(
interesting_command(&settings, "sudo test cargo build foo bar"),
"cargo build foo"
);
}
#[test]
fn split_simple() {
assert_eq!(split_at_pipe("fd | rg"), ["fd ", " rg"]);
}
#[test]
fn split_multi() {
assert_eq!(
split_at_pipe("kubectl | jq | rg"),
["kubectl ", " jq ", " rg"]
);
}
#[test]
fn split_simple_quoted() {
assert_eq!(
split_at_pipe("foo | bar 'baz {} | quux' | xyzzy"),
["foo ", " bar 'baz {} | quux' ", " xyzzy"]
);
}
#[test]
fn split_multi_quoted() {
assert_eq!(
split_at_pipe("foo | bar 'baz \"{}\" | quux' | xyzzy"),
["foo ", " bar 'baz \"{}\" | quux' ", " xyzzy"]
);
}
#[test]
fn escaped_pipes() {
assert_eq!(
split_at_pipe("foo | bar baz \\| quux"),
["foo ", " bar baz \\| quux"]
);
}
#[test]
fn emoji() {
assert_eq!(
split_at_pipe("git commit -m \"🚀\""),
["git commit -m \"🚀\""]
);
}
#[test]
fn starts_with_pipe() {
assert_eq!(
split_at_pipe("| sed 's/[0-9a-f]//g'"),
["", " sed 's/[0-9a-f]//g'"]
);
}
#[test]
fn starts_with_spaces_and_pipe() {
assert_eq!(
split_at_pipe(" | sed 's/[0-9a-f]//g'"),
[" ", " sed 's/[0-9a-f]//g'"]
);
}
#[test]
fn strip_leading_env_vars_simple() {
assert_eq!(
strip_leading_env_vars("FOO=bar BAZ=quux echo foo"),
"echo foo"
);
}
#[test]
fn strip_leading_env_vars_quoted_single() {
assert_eq!(strip_leading_env_vars("FOO='BAR=baz' echo foo"), "echo foo");
}
#[test]
fn strip_leading_env_vars_quoted_double() {
assert_eq!(
strip_leading_env_vars("FOO=\"BAR=baz\" echo foo"),
"echo foo"
);
}
#[test]
fn strip_leading_env_vars_quoted_single_and_double() {
assert_eq!(
strip_leading_env_vars("FOO='BAR=\"baz\"' echo foo \"BAR=quux\""),
"echo foo \"BAR=quux\""
);
}
#[test]
fn strip_leading_env_vars_emojis() {
assert_eq!(
strip_leading_env_vars("FOO='BAR=🚀' echo foo \"BAR=quux\" foo"),
"echo foo \"BAR=quux\" foo"
);
}
#[test]
fn strip_leading_env_vars_name_same_as_command() {
assert_eq!(strip_leading_env_vars("FOO='bar' bar baz"), "bar baz");
}
}
| rust | MIT | 8a010fed33ce19a9ddc589196c73c07ba7ba88e7 | 2026-01-04T15:36:14.139439Z | false |
atuinsh/atuin | https://github.com/atuinsh/atuin/blob/8a010fed33ce19a9ddc589196c73c07ba7ba88e7/crates/atuin-history/src/lib.rs | crates/atuin-history/src/lib.rs | pub mod sort;
pub mod stats;
| rust | MIT | 8a010fed33ce19a9ddc589196c73c07ba7ba88e7 | 2026-01-04T15:36:14.139439Z | false |
atuinsh/atuin | https://github.com/atuinsh/atuin/blob/8a010fed33ce19a9ddc589196c73c07ba7ba88e7/crates/atuin-history/src/sort.rs | crates/atuin-history/src/sort.rs | use atuin_client::history::History;
type ScoredHistory = (f64, History);
// Fuzzy search already comes sorted by minspan
// This sorting should be applicable to all search modes, and solve the more "obvious" issues
// first.
// Later on, we can pass in context and do some boosts there too.
pub fn sort(query: &str, input: Vec<History>) -> Vec<History> {
// This can totally be extended. We need to be _careful_ that it's not slow.
// We also need to balance sorting db-side with sorting here. SQLite can do a lot,
// but some things are just much easier/more doable in Rust.
let mut scored = input
.into_iter()
.map(|h| {
// If history is _prefixed_ with the query, score it more highly
let score = if h.command.starts_with(query) {
2.0
} else if h.command.contains(query) {
1.75
} else {
1.0
};
// calculate how long ago the history was, in seconds
let now = time::OffsetDateTime::now_utc().unix_timestamp();
let time = h.timestamp.unix_timestamp();
let diff = std::cmp::max(1, now - time); // no /0 please
// prefer newer history, but not hugely so as to offset the other scoring
// the numbers will get super small over time, but I don't want time to overpower other
// scoring
#[allow(clippy::cast_precision_loss)]
let time_score = 1.0 + (1.0 / diff as f64);
let score = score * time_score;
(score, h)
})
.collect::<Vec<ScoredHistory>>();
scored.sort_by(|a, b| a.0.partial_cmp(&b.0).unwrap().reverse());
// Remove the scores and return the history
scored.into_iter().map(|(_, h)| h).collect::<Vec<History>>()
}
| rust | MIT | 8a010fed33ce19a9ddc589196c73c07ba7ba88e7 | 2026-01-04T15:36:14.139439Z | false |
atuinsh/atuin | https://github.com/atuinsh/atuin/blob/8a010fed33ce19a9ddc589196c73c07ba7ba88e7/crates/atuin-history/benches/smart_sort.rs | crates/atuin-history/benches/smart_sort.rs | use atuin_client::history::History;
use atuin_history::sort::sort;
use rand::Rng;
fn main() {
// Run registered benchmarks.
divan::main();
}
// Smart sort usually runs on 200 entries, test on a few sizes
#[divan::bench(args=[100, 200, 400, 800, 1600, 10000])]
fn smart_sort(lines: usize) {
// benchmark a few different sizes of "history"
// first we need to generate some history. This will use a whole bunch of memory, sorry
let mut rng = rand::thread_rng();
let now = time::OffsetDateTime::now_utc().unix_timestamp();
let possible_commands = ["echo", "ls", "cd", "grep", "atuin", "curl"];
let mut commands = Vec::<History>::with_capacity(lines);
for _ in 0..lines {
let command = possible_commands[rng.gen_range(0..possible_commands.len())];
let command = History::import()
.command(command)
.timestamp(time::OffsetDateTime::from_unix_timestamp(rng.gen_range(0..now)).unwrap())
.build()
.into();
commands.push(command);
}
let _ = sort("curl", commands);
}
| rust | MIT | 8a010fed33ce19a9ddc589196c73c07ba7ba88e7 | 2026-01-04T15:36:14.139439Z | false |
atuinsh/atuin | https://github.com/atuinsh/atuin/blob/8a010fed33ce19a9ddc589196c73c07ba7ba88e7/crates/atuin-scripts/src/settings.rs | crates/atuin-scripts/src/settings.rs | rust | MIT | 8a010fed33ce19a9ddc589196c73c07ba7ba88e7 | 2026-01-04T15:36:14.139439Z | false | |
atuinsh/atuin | https://github.com/atuinsh/atuin/blob/8a010fed33ce19a9ddc589196c73c07ba7ba88e7/crates/atuin-scripts/src/lib.rs | crates/atuin-scripts/src/lib.rs | pub mod database;
pub mod execution;
pub mod settings;
pub mod store;
| rust | MIT | 8a010fed33ce19a9ddc589196c73c07ba7ba88e7 | 2026-01-04T15:36:14.139439Z | false |
atuinsh/atuin | https://github.com/atuinsh/atuin/blob/8a010fed33ce19a9ddc589196c73c07ba7ba88e7/crates/atuin-scripts/src/store.rs | crates/atuin-scripts/src/store.rs | use eyre::{Result, bail};
use atuin_client::record::sqlite_store::SqliteStore;
use atuin_client::record::{encryption::PASETO_V4, store::Store};
use atuin_common::record::{Host, HostId, Record, RecordId, RecordIdx};
use record::ScriptRecord;
use script::{SCRIPT_TAG, SCRIPT_VERSION, Script};
use crate::database::Database;
pub mod record;
pub mod script;
#[derive(Debug, Clone)]
pub struct ScriptStore {
pub store: SqliteStore,
pub host_id: HostId,
pub encryption_key: [u8; 32],
}
impl ScriptStore {
pub fn new(store: SqliteStore, host_id: HostId, encryption_key: [u8; 32]) -> Self {
ScriptStore {
store,
host_id,
encryption_key,
}
}
async fn push_record(&self, record: ScriptRecord) -> Result<(RecordId, RecordIdx)> {
let bytes = record.serialize()?;
let idx = self
.store
.last(self.host_id, SCRIPT_TAG)
.await?
.map_or(0, |p| p.idx + 1);
let record = Record::builder()
.host(Host::new(self.host_id))
.version(SCRIPT_VERSION.to_string())
.tag(SCRIPT_TAG.to_string())
.idx(idx)
.data(bytes)
.build();
let id = record.id;
self.store
.push(&record.encrypt::<PASETO_V4>(&self.encryption_key))
.await?;
Ok((id, idx))
}
pub async fn create(&self, script: Script) -> Result<()> {
let record = ScriptRecord::Create(script);
self.push_record(record).await?;
Ok(())
}
pub async fn update(&self, script: Script) -> Result<()> {
let record = ScriptRecord::Update(script);
self.push_record(record).await?;
Ok(())
}
pub async fn delete(&self, script_id: uuid::Uuid) -> Result<()> {
let record = ScriptRecord::Delete(script_id);
self.push_record(record).await?;
Ok(())
}
pub async fn scripts(&self) -> Result<Vec<ScriptRecord>> {
let records = self.store.all_tagged(SCRIPT_TAG).await?;
let mut ret = Vec::with_capacity(records.len());
for record in records.into_iter() {
let script = match record.version.as_str() {
SCRIPT_VERSION => {
let decrypted = record.decrypt::<PASETO_V4>(&self.encryption_key)?;
ScriptRecord::deserialize(&decrypted.data, SCRIPT_VERSION)
}
version => bail!("unknown history version {version:?}"),
}?;
ret.push(script);
}
Ok(ret)
}
pub async fn build(&self, database: Database) -> Result<()> {
// Get all the scripts from the database - they are already sorted by timestamp
let scripts = self.scripts().await?;
for script in scripts {
match script {
ScriptRecord::Create(script) => {
database.save(&script).await?;
}
ScriptRecord::Update(script) => database.update(&script).await?,
ScriptRecord::Delete(id) => database.delete(&id.to_string()).await?,
}
}
Ok(())
}
}
| rust | MIT | 8a010fed33ce19a9ddc589196c73c07ba7ba88e7 | 2026-01-04T15:36:14.139439Z | false |
atuinsh/atuin | https://github.com/atuinsh/atuin/blob/8a010fed33ce19a9ddc589196c73c07ba7ba88e7/crates/atuin-scripts/src/database.rs | crates/atuin-scripts/src/database.rs | use std::{path::Path, str::FromStr, time::Duration};
use atuin_common::utils;
use sqlx::{
Result, Row,
sqlite::{
SqliteConnectOptions, SqliteJournalMode, SqlitePool, SqlitePoolOptions, SqliteRow,
SqliteSynchronous,
},
};
use tokio::fs;
use tracing::debug;
use uuid::Uuid;
use crate::store::script::Script;
#[derive(Debug, Clone)]
pub struct Database {
pub pool: SqlitePool,
}
impl Database {
pub async fn new(path: impl AsRef<Path>, timeout: f64) -> Result<Self> {
let path = path.as_ref();
debug!("opening script sqlite database at {:?}", path);
if utils::broken_symlink(path) {
eprintln!(
"Atuin: Script sqlite db path ({path:?}) is a broken symlink. Unable to read or create replacement."
);
std::process::exit(1);
}
if !path.exists()
&& let Some(dir) = path.parent()
{
fs::create_dir_all(dir).await?;
}
let opts = SqliteConnectOptions::from_str(path.as_os_str().to_str().unwrap())?
.journal_mode(SqliteJournalMode::Wal)
.optimize_on_close(true, None)
.synchronous(SqliteSynchronous::Normal)
.with_regexp()
.foreign_keys(true)
.create_if_missing(true);
let pool = SqlitePoolOptions::new()
.acquire_timeout(Duration::from_secs_f64(timeout))
.connect_with(opts)
.await?;
Self::setup_db(&pool).await?;
Ok(Self { pool })
}
pub async fn sqlite_version(&self) -> Result<String> {
sqlx::query_scalar("SELECT sqlite_version()")
.fetch_one(&self.pool)
.await
}
async fn setup_db(pool: &SqlitePool) -> Result<()> {
debug!("running sqlite database setup");
sqlx::migrate!("./migrations").run(pool).await?;
Ok(())
}
async fn save_raw(tx: &mut sqlx::Transaction<'_, sqlx::Sqlite>, s: &Script) -> Result<()> {
sqlx::query(
"insert or ignore into scripts(id, name, description, shebang, script)
values(?1, ?2, ?3, ?4, ?5)",
)
.bind(s.id.to_string())
.bind(s.name.as_str())
.bind(s.description.as_str())
.bind(s.shebang.as_str())
.bind(s.script.as_str())
.execute(&mut **tx)
.await?;
for tag in s.tags.iter() {
sqlx::query(
"insert or ignore into script_tags(script_id, tag)
values(?1, ?2)",
)
.bind(s.id.to_string())
.bind(tag)
.execute(&mut **tx)
.await?;
}
Ok(())
}
pub async fn save(&self, s: &Script) -> Result<()> {
debug!("saving script to sqlite");
let mut tx = self.pool.begin().await?;
Self::save_raw(&mut tx, s).await?;
tx.commit().await?;
Ok(())
}
pub async fn save_bulk(&self, s: &[Script]) -> Result<()> {
debug!("saving scripts to sqlite");
let mut tx = self.pool.begin().await?;
for i in s {
Self::save_raw(&mut tx, i).await?;
}
tx.commit().await?;
Ok(())
}
fn query_script(row: SqliteRow) -> Script {
let id = row.get("id");
let name = row.get("name");
let description = row.get("description");
let shebang = row.get("shebang");
let script = row.get("script");
let id = Uuid::parse_str(id).unwrap();
Script {
id,
name,
description,
shebang,
script,
tags: vec![],
}
}
fn query_script_tags(row: SqliteRow) -> String {
row.get("tag")
}
#[allow(dead_code)]
async fn load(&self, id: &str) -> Result<Option<Script>> {
debug!("loading script item {}", id);
let res = sqlx::query("select * from scripts where id = ?1")
.bind(id)
.map(Self::query_script)
.fetch_optional(&self.pool)
.await?;
// intentionally not joining, don't want to duplicate the script data in memory a whole bunch.
if let Some(mut script) = res {
let tags = sqlx::query("select tag from script_tags where script_id = ?1")
.bind(id)
.map(Self::query_script_tags)
.fetch_all(&self.pool)
.await?;
script.tags = tags;
Ok(Some(script))
} else {
Ok(None)
}
}
pub async fn list(&self) -> Result<Vec<Script>> {
debug!("listing scripts");
let mut res = sqlx::query("select * from scripts")
.map(Self::query_script)
.fetch_all(&self.pool)
.await?;
// Fetch all the tags for each script
for script in res.iter_mut() {
let tags = sqlx::query("select tag from script_tags where script_id = ?1")
.bind(script.id.to_string())
.map(Self::query_script_tags)
.fetch_all(&self.pool)
.await?;
script.tags = tags;
}
Ok(res)
}
pub async fn delete(&self, id: &str) -> Result<()> {
debug!("deleting script {}", id);
sqlx::query("delete from scripts where id = ?1")
.bind(id)
.execute(&self.pool)
.await?;
// delete all the tags for the script
sqlx::query("delete from script_tags where script_id = ?1")
.bind(id)
.execute(&self.pool)
.await?;
Ok(())
}
pub async fn update(&self, s: &Script) -> Result<()> {
debug!("updating script {:?}", s);
let mut tx = self.pool.begin().await?;
// Update the script's base fields
sqlx::query("update scripts set name = ?1, description = ?2, shebang = ?3, script = ?4 where id = ?5")
.bind(s.name.as_str())
.bind(s.description.as_str())
.bind(s.shebang.as_str())
.bind(s.script.as_str())
.bind(s.id.to_string())
.execute(&mut *tx)
.await?;
// Delete all existing tags for this script
sqlx::query("delete from script_tags where script_id = ?1")
.bind(s.id.to_string())
.execute(&mut *tx)
.await?;
// Insert new tags
for tag in s.tags.iter() {
sqlx::query(
"insert or ignore into script_tags(script_id, tag)
values(?1, ?2)",
)
.bind(s.id.to_string())
.bind(tag)
.execute(&mut *tx)
.await?;
}
tx.commit().await?;
Ok(())
}
pub async fn get_by_name(&self, name: &str) -> Result<Option<Script>> {
let res = sqlx::query("select * from scripts where name = ?1")
.bind(name)
.map(Self::query_script)
.fetch_optional(&self.pool)
.await?;
let script = if let Some(mut script) = res {
let tags = sqlx::query("select tag from script_tags where script_id = ?1")
.bind(script.id.to_string())
.map(Self::query_script_tags)
.fetch_all(&self.pool)
.await?;
script.tags = tags;
Some(script)
} else {
None
};
Ok(script)
}
}
#[cfg(test)]
mod test {
use super::*;
#[tokio::test]
async fn test_list() {
let db = Database::new("sqlite::memory:", 1.0).await.unwrap();
let scripts = db.list().await.unwrap();
assert_eq!(scripts.len(), 0);
let script = Script::builder()
.name("test".to_string())
.description("test".to_string())
.shebang("test".to_string())
.script("test".to_string())
.build();
db.save(&script).await.unwrap();
let scripts = db.list().await.unwrap();
assert_eq!(scripts.len(), 1);
assert_eq!(scripts[0].name, "test");
}
#[tokio::test]
async fn test_save_load() {
let db = Database::new("sqlite::memory:", 1.0).await.unwrap();
let script = Script::builder()
.name("test name".to_string())
.description("test description".to_string())
.shebang("test shebang".to_string())
.script("test script".to_string())
.build();
db.save(&script).await.unwrap();
let loaded = db.load(&script.id.to_string()).await.unwrap().unwrap();
assert_eq!(loaded, script);
}
#[tokio::test]
async fn test_save_bulk() {
let db = Database::new("sqlite::memory:", 1.0).await.unwrap();
let scripts = vec![
Script::builder()
.name("test name".to_string())
.description("test description".to_string())
.shebang("test shebang".to_string())
.script("test script".to_string())
.build(),
Script::builder()
.name("test name 2".to_string())
.description("test description 2".to_string())
.shebang("test shebang 2".to_string())
.script("test script 2".to_string())
.build(),
];
db.save_bulk(&scripts).await.unwrap();
let loaded = db.list().await.unwrap();
assert_eq!(loaded.len(), 2);
assert_eq!(loaded[0].name, "test name");
assert_eq!(loaded[1].name, "test name 2");
}
#[tokio::test]
async fn test_delete() {
let db = Database::new("sqlite::memory:", 1.0).await.unwrap();
let script = Script::builder()
.name("test name".to_string())
.description("test description".to_string())
.shebang("test shebang".to_string())
.script("test script".to_string())
.build();
db.save(&script).await.unwrap();
assert_eq!(db.list().await.unwrap().len(), 1);
db.delete(&script.id.to_string()).await.unwrap();
let loaded = db.list().await.unwrap();
assert_eq!(loaded.len(), 0);
}
}
| rust | MIT | 8a010fed33ce19a9ddc589196c73c07ba7ba88e7 | 2026-01-04T15:36:14.139439Z | false |
atuinsh/atuin | https://github.com/atuinsh/atuin/blob/8a010fed33ce19a9ddc589196c73c07ba7ba88e7/crates/atuin-scripts/src/execution.rs | crates/atuin-scripts/src/execution.rs | use crate::store::script::Script;
use eyre::Result;
use std::collections::{HashMap, HashSet};
use std::process::Stdio;
use tempfile::NamedTempFile;
use tokio::io::{AsyncReadExt, AsyncWriteExt, BufReader};
use tokio::sync::mpsc;
use tokio::task;
use tracing::debug;
// Helper function to build a complete script with shebang
pub fn build_executable_script(script: String, shebang: String) -> String {
if shebang.is_empty() {
// Default to bash if no shebang is provided
format!("#!/usr/bin/env bash\n{script}")
} else if script.starts_with("#!") {
format!("{shebang}\n{script}")
} else {
format!("#!{shebang}\n{script}")
}
}
/// Represents the communication channels for an interactive script
pub struct ScriptSession {
/// Channel to send input to the script
pub stdin_tx: mpsc::Sender<String>,
/// Exit code of the process once it completes
pub exit_code_rx: mpsc::Receiver<i32>,
}
impl ScriptSession {
/// Send input to the running script
pub async fn send_input(&self, input: String) -> Result<(), mpsc::error::SendError<String>> {
self.stdin_tx.send(input).await
}
/// Wait for the script to complete and get the exit code
pub async fn wait_for_exit(&mut self) -> Option<i32> {
self.exit_code_rx.recv().await
}
}
fn setup_template(script: &Script) -> Result<minijinja::Environment<'_>> {
let mut env = minijinja::Environment::new();
env.set_trim_blocks(true);
env.add_template("script", script.script.as_str())?;
Ok(env)
}
/// Template a script with the given context
pub fn template_script(
script: &Script,
context: &HashMap<String, serde_json::Value>,
) -> Result<String> {
let env = setup_template(script)?;
let template = env.get_template("script")?;
let rendered = template.render(context)?;
Ok(rendered)
}
/// Get the variables that need to be templated in a script
pub fn template_variables(script: &Script) -> Result<HashSet<String>> {
let env = setup_template(script)?;
let template = env.get_template("script")?;
Ok(template.undeclared_variables(true))
}
/// Execute a script interactively, allowing for ongoing stdin/stdout interaction
pub async fn execute_script_interactive(
script: String,
shebang: String,
) -> Result<ScriptSession, Box<dyn std::error::Error + Send + Sync>> {
// Create a temporary file for the script
let temp_file = NamedTempFile::new()?;
let temp_path = temp_file.path().to_path_buf();
debug!("creating temp file at {}", temp_path.display());
// Extract interpreter from shebang for fallback execution
let interpreter = if !shebang.is_empty() {
shebang.trim_start_matches("#!").trim().to_string()
} else {
"/usr/bin/env bash".to_string()
};
// Write script content to the temp file, including the shebang
let full_script_content = build_executable_script(script.clone(), shebang.clone());
debug!("writing script content to temp file");
tokio::fs::write(&temp_path, &full_script_content).await?;
// Make it executable on Unix systems
#[cfg(unix)]
{
debug!("making script executable");
use std::os::unix::fs::PermissionsExt;
let mut perms = std::fs::metadata(&temp_path)?.permissions();
perms.set_mode(0o755);
std::fs::set_permissions(&temp_path, perms)?;
}
// Store the temp_file to prevent it from being dropped
// This ensures it won't be deleted while the script is running
let _keep_temp_file = temp_file;
debug!("attempting direct script execution");
let mut child_result = tokio::process::Command::new(temp_path.to_str().unwrap())
.stdin(Stdio::piped())
.stdout(Stdio::piped())
.stderr(Stdio::piped())
.spawn();
// If direct execution fails, try using the interpreter
if let Err(e) = &child_result {
debug!("direct execution failed: {}, trying with interpreter", e);
// When falling back to interpreter, remove the shebang from the file
// Some interpreters don't handle scripts with shebangs well
debug!("writing script content without shebang for interpreter execution");
tokio::fs::write(&temp_path, &script).await?;
// Parse the interpreter command
let parts: Vec<&str> = interpreter.split_whitespace().collect();
if !parts.is_empty() {
let mut cmd = tokio::process::Command::new(parts[0]);
// Add any interpreter args
for i in parts.iter().skip(1) {
cmd.arg(i);
}
// Add the script path
cmd.arg(temp_path.to_str().unwrap());
// Try with the interpreter
child_result = cmd
.stdin(Stdio::piped())
.stdout(Stdio::piped())
.stderr(Stdio::piped())
.spawn();
}
}
// If it still fails, return the error
let mut child = match child_result {
Ok(child) => child,
Err(e) => {
return Err(format!("Failed to execute script: {e}").into());
}
};
// Get handles to stdin, stdout, stderr
let mut stdin = child
.stdin
.take()
.ok_or_else(|| "Failed to open child process stdin".to_string())?;
let stdout = child
.stdout
.take()
.ok_or_else(|| "Failed to open child process stdout".to_string())?;
let stderr = child
.stderr
.take()
.ok_or_else(|| "Failed to open child process stderr".to_string())?;
// Create channels for the interactive session
let (stdin_tx, mut stdin_rx) = mpsc::channel::<String>(32);
let (exit_code_tx, exit_code_rx) = mpsc::channel::<i32>(1);
// handle user stdin
debug!("spawning stdin handler");
tokio::spawn(async move {
while let Some(input) = stdin_rx.recv().await {
if let Err(e) = stdin.write_all(input.as_bytes()).await {
eprintln!("Error writing to stdin: {e}");
break;
}
if let Err(e) = stdin.flush().await {
eprintln!("Error flushing stdin: {e}");
break;
}
}
// when the channel closes (sender dropped), we let stdin close naturally
});
// handle stdout
debug!("spawning stdout handler");
let stdout_handle = task::spawn(async move {
let mut stdout_reader = BufReader::new(stdout);
let mut buffer = [0u8; 1024];
let mut stdout_writer = tokio::io::stdout();
loop {
match stdout_reader.read(&mut buffer).await {
Ok(0) => break, // End of stdout
Ok(n) => {
if let Err(e) = stdout_writer.write_all(&buffer[0..n]).await {
eprintln!("Error writing to stdout: {e}");
break;
}
if let Err(e) = stdout_writer.flush().await {
eprintln!("Error flushing stdout: {e}");
break;
}
}
Err(e) => {
eprintln!("Error reading from process stdout: {e}");
break;
}
}
}
});
// Process stderr in a separate task
debug!("spawning stderr handler");
let stderr_handle = task::spawn(async move {
let mut stderr_reader = BufReader::new(stderr);
let mut buffer = [0u8; 1024];
let mut stderr_writer = tokio::io::stderr();
loop {
match stderr_reader.read(&mut buffer).await {
Ok(0) => break, // End of stderr
Ok(n) => {
if let Err(e) = stderr_writer.write_all(&buffer[0..n]).await {
eprintln!("Error writing to stderr: {e}");
break;
}
if let Err(e) = stderr_writer.flush().await {
eprintln!("Error flushing stderr: {e}");
break;
}
}
Err(e) => {
eprintln!("Error reading from process stderr: {e}");
break;
}
}
}
});
// Spawn a task to wait for the child process to complete
debug!("spawning exit code handler");
let _keep_temp_file_clone = _keep_temp_file;
tokio::spawn(async move {
// Keep the temp file alive until the process completes
let _temp_file_ref = _keep_temp_file_clone;
// Wait for the child process to complete
let status = match child.wait().await {
Ok(status) => {
debug!("Process exited with status: {:?}", status);
status
}
Err(e) => {
eprintln!("Error waiting for child process: {e}");
// Send a default error code
let _ = exit_code_tx.send(-1).await;
return;
}
};
// Wait for stdout/stderr tasks to complete
if let Err(e) = stdout_handle.await {
eprintln!("Error joining stdout task: {e}");
}
if let Err(e) = stderr_handle.await {
eprintln!("Error joining stderr task: {e}");
}
// Send the exit code
let exit_code = status.code().unwrap_or(-1);
debug!("Sending exit code: {}", exit_code);
let _ = exit_code_tx.send(exit_code).await;
});
// Return the communication channels as a ScriptSession
Ok(ScriptSession {
stdin_tx,
exit_code_rx,
})
}
| rust | MIT | 8a010fed33ce19a9ddc589196c73c07ba7ba88e7 | 2026-01-04T15:36:14.139439Z | false |
atuinsh/atuin | https://github.com/atuinsh/atuin/blob/8a010fed33ce19a9ddc589196c73c07ba7ba88e7/crates/atuin-scripts/src/store/record.rs | crates/atuin-scripts/src/store/record.rs | use atuin_common::record::DecryptedData;
use eyre::{Result, eyre};
use uuid::Uuid;
use crate::store::script::SCRIPT_VERSION;
use super::script::Script;
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum ScriptRecord {
Create(Script),
Update(Script),
Delete(Uuid),
}
impl ScriptRecord {
pub fn serialize(&self) -> Result<DecryptedData> {
use rmp::encode;
let mut output = vec![];
match self {
ScriptRecord::Create(script) => {
// 0 -> a script create
encode::write_u8(&mut output, 0)?;
let bytes = script.serialize()?;
encode::write_bin(&mut output, &bytes.0)?;
}
ScriptRecord::Delete(id) => {
// 1 -> a script delete
encode::write_u8(&mut output, 1)?;
encode::write_str(&mut output, id.to_string().as_str())?;
}
ScriptRecord::Update(script) => {
// 2 -> a script update
encode::write_u8(&mut output, 2)?;
let bytes = script.serialize()?;
encode::write_bin(&mut output, &bytes.0)?;
}
};
Ok(DecryptedData(output))
}
pub fn deserialize(data: &DecryptedData, version: &str) -> Result<Self> {
use rmp::decode;
fn error_report<E: std::fmt::Debug>(err: E) -> eyre::Report {
eyre!("{err:?}")
}
match version {
SCRIPT_VERSION => {
let mut bytes = decode::Bytes::new(&data.0);
let record_type = decode::read_u8(&mut bytes).map_err(error_report)?;
match record_type {
// create
0 => {
// written by encode::write_bin above
let _ = decode::read_bin_len(&mut bytes).map_err(error_report)?;
let script = Script::deserialize(bytes.remaining_slice())?;
Ok(ScriptRecord::Create(script))
}
// delete
1 => {
let bytes = bytes.remaining_slice();
let (id, _) = decode::read_str_from_slice(bytes).map_err(error_report)?;
Ok(ScriptRecord::Delete(Uuid::parse_str(id)?))
}
// update
2 => {
// written by encode::write_bin above
let _ = decode::read_bin_len(&mut bytes).map_err(error_report)?;
let script = Script::deserialize(bytes.remaining_slice())?;
Ok(ScriptRecord::Update(script))
}
_ => Err(eyre!("unknown script record type {record_type}")),
}
}
_ => Err(eyre!("unknown version {version:?}")),
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_serialize_create() {
let script = Script::builder()
.id(uuid::Uuid::parse_str("0195c825a35f7982bdb016168881cbc6").unwrap())
.name("test".to_string())
.description("test".to_string())
.shebang("test".to_string())
.tags(vec!["test".to_string()])
.script("test".to_string())
.build();
let record = ScriptRecord::Create(script);
let serialized = record.serialize().unwrap();
assert_eq!(
serialized.0,
vec![
204, 0, 196, 65, 150, 217, 36, 48, 49, 57, 53, 99, 56, 50, 53, 45, 97, 51, 53, 102,
45, 55, 57, 56, 50, 45, 98, 100, 98, 48, 45, 49, 54, 49, 54, 56, 56, 56, 49, 99,
98, 99, 54, 164, 116, 101, 115, 116, 164, 116, 101, 115, 116, 164, 116, 101, 115,
116, 145, 164, 116, 101, 115, 116, 164, 116, 101, 115, 116
]
);
}
#[test]
fn test_serialize_delete() {
let record = ScriptRecord::Delete(
uuid::Uuid::parse_str("0195c825a35f7982bdb016168881cbc6").unwrap(),
);
let serialized = record.serialize().unwrap();
assert_eq!(
serialized.0,
vec![
204, 1, 217, 36, 48, 49, 57, 53, 99, 56, 50, 53, 45, 97, 51, 53, 102, 45, 55, 57,
56, 50, 45, 98, 100, 98, 48, 45, 49, 54, 49, 54, 56, 56, 56, 49, 99, 98, 99, 54
]
);
}
#[test]
fn test_serialize_update() {
let script = Script::builder()
.id(uuid::Uuid::parse_str("0195c825a35f7982bdb016168881cbc6").unwrap())
.name(String::from("test"))
.description(String::from("test"))
.shebang(String::from("test"))
.tags(vec![String::from("test"), String::from("test2")])
.script(String::from("test"))
.build();
let record = ScriptRecord::Update(script);
let serialized = record.serialize().unwrap();
assert_eq!(
serialized.0,
vec![
204, 2, 196, 71, 150, 217, 36, 48, 49, 57, 53, 99, 56, 50, 53, 45, 97, 51, 53, 102,
45, 55, 57, 56, 50, 45, 98, 100, 98, 48, 45, 49, 54, 49, 54, 56, 56, 56, 49, 99,
98, 99, 54, 164, 116, 101, 115, 116, 164, 116, 101, 115, 116, 164, 116, 101, 115,
116, 146, 164, 116, 101, 115, 116, 165, 116, 101, 115, 116, 50, 164, 116, 101, 115,
116
],
);
}
#[test]
fn test_serialize_deserialize_create() {
let script = Script::builder()
.name("test".to_string())
.description("test".to_string())
.shebang("test".to_string())
.tags(vec!["test".to_string()])
.script("test".to_string())
.build();
let record = ScriptRecord::Create(script);
let serialized = record.serialize().unwrap();
let deserialized = ScriptRecord::deserialize(&serialized, SCRIPT_VERSION).unwrap();
assert_eq!(record, deserialized);
}
#[test]
fn test_serialize_deserialize_delete() {
let record = ScriptRecord::Delete(
uuid::Uuid::parse_str("0195c825a35f7982bdb016168881cbc6").unwrap(),
);
let serialized = record.serialize().unwrap();
let deserialized = ScriptRecord::deserialize(&serialized, SCRIPT_VERSION).unwrap();
assert_eq!(record, deserialized);
}
#[test]
fn test_serialize_deserialize_update() {
let script = Script::builder()
.name("test".to_string())
.description("test".to_string())
.shebang("test".to_string())
.tags(vec!["test".to_string()])
.script("test".to_string())
.build();
let record = ScriptRecord::Update(script);
let serialized = record.serialize().unwrap();
let deserialized = ScriptRecord::deserialize(&serialized, SCRIPT_VERSION).unwrap();
assert_eq!(record, deserialized);
}
}
| rust | MIT | 8a010fed33ce19a9ddc589196c73c07ba7ba88e7 | 2026-01-04T15:36:14.139439Z | false |
atuinsh/atuin | https://github.com/atuinsh/atuin/blob/8a010fed33ce19a9ddc589196c73c07ba7ba88e7/crates/atuin-scripts/src/store/script.rs | crates/atuin-scripts/src/store/script.rs | use atuin_common::record::DecryptedData;
use eyre::{Result, bail, ensure};
use uuid::Uuid;
use rmp::{
decode::{self, Bytes},
encode,
};
use typed_builder::TypedBuilder;
pub const SCRIPT_VERSION: &str = "v0";
pub const SCRIPT_TAG: &str = "script";
pub const SCRIPT_LEN: usize = 20000; // 20kb max total len
#[derive(Debug, Clone, PartialEq, Eq, TypedBuilder)]
/// A script is a set of commands that can be run, with the specified shebang
pub struct Script {
/// The id of the script
#[builder(default = uuid::Uuid::new_v4())]
pub id: Uuid,
/// The name of the script
pub name: String,
/// The description of the script
#[builder(default = String::new())]
pub description: String,
/// The interpreter of the script
#[builder(default = String::new())]
pub shebang: String,
/// The tags of the script
#[builder(default = Vec::new())]
pub tags: Vec<String>,
/// The script content
pub script: String,
}
impl Script {
pub fn serialize(&self) -> Result<DecryptedData> {
// sort the tags first, to ensure consistent ordering
let mut tags = self.tags.clone();
tags.sort();
let mut output = vec![];
encode::write_array_len(&mut output, 6)?;
encode::write_str(&mut output, &self.id.to_string())?;
encode::write_str(&mut output, &self.name)?;
encode::write_str(&mut output, &self.description)?;
encode::write_str(&mut output, &self.shebang)?;
encode::write_array_len(&mut output, self.tags.len() as u32)?;
for tag in &tags {
encode::write_str(&mut output, tag)?;
}
encode::write_str(&mut output, &self.script)?;
Ok(DecryptedData(output))
}
pub fn deserialize(bytes: &[u8]) -> Result<Self> {
let mut bytes = decode::Bytes::new(bytes);
let nfields = decode::read_array_len(&mut bytes).unwrap();
ensure!(nfields == 6, "too many entries in v0 script record");
let bytes = bytes.remaining_slice();
let (id, bytes) = decode::read_str_from_slice(bytes).unwrap();
let (name, bytes) = decode::read_str_from_slice(bytes).unwrap();
let (description, bytes) = decode::read_str_from_slice(bytes).unwrap();
let (shebang, bytes) = decode::read_str_from_slice(bytes).unwrap();
let mut bytes = Bytes::new(bytes);
let tags_len = decode::read_array_len(&mut bytes).unwrap();
let mut bytes = bytes.remaining_slice();
let mut tags = Vec::new();
for _ in 0..tags_len {
let (tag, remaining) = decode::read_str_from_slice(bytes).unwrap();
tags.push(tag.to_owned());
bytes = remaining;
}
let (script, bytes) = decode::read_str_from_slice(bytes).unwrap();
if !bytes.is_empty() {
bail!("trailing bytes in encoded script record. malformed")
}
Ok(Script {
id: Uuid::parse_str(id).unwrap(),
name: name.to_owned(),
description: description.to_owned(),
shebang: shebang.to_owned(),
tags,
script: script.to_owned(),
})
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_serialize() {
let script = Script {
id: uuid::Uuid::parse_str("0195c825a35f7982bdb016168881cbc6").unwrap(),
name: "test".to_string(),
description: "test".to_string(),
shebang: "test".to_string(),
tags: vec!["test".to_string()],
script: "test".to_string(),
};
let serialized = script.serialize().unwrap();
assert_eq!(
serialized.0,
vec![
150, 217, 36, 48, 49, 57, 53, 99, 56, 50, 53, 45, 97, 51, 53, 102, 45, 55, 57, 56,
50, 45, 98, 100, 98, 48, 45, 49, 54, 49, 54, 56, 56, 56, 49, 99, 98, 99, 54, 164,
116, 101, 115, 116, 164, 116, 101, 115, 116, 164, 116, 101, 115, 116, 145, 164,
116, 101, 115, 116, 164, 116, 101, 115, 116
]
);
}
#[test]
fn test_serialize_deserialize() {
let script = Script {
id: uuid::Uuid::new_v4(),
name: "test".to_string(),
description: "test".to_string(),
shebang: "test".to_string(),
tags: vec!["test".to_string()],
script: "test".to_string(),
};
let serialized = script.serialize().unwrap();
let deserialized = Script::deserialize(&serialized.0).unwrap();
assert_eq!(script, deserialized);
}
}
| rust | MIT | 8a010fed33ce19a9ddc589196c73c07ba7ba88e7 | 2026-01-04T15:36:14.139439Z | false |
atuinsh/atuin | https://github.com/atuinsh/atuin/blob/8a010fed33ce19a9ddc589196c73c07ba7ba88e7/crates/atuin-server/src/settings.rs | crates/atuin-server/src/settings.rs | use std::{io::prelude::*, path::PathBuf};
use atuin_server_database::DbSettings;
use config::{Config, Environment, File as ConfigFile, FileFormat};
use eyre::{Result, eyre};
use fs_err::{File, create_dir_all};
use serde::{Deserialize, Serialize};
static EXAMPLE_CONFIG: &str = include_str!("../server.toml");
#[derive(Default, Clone, Debug, Deserialize, Serialize)]
pub struct Mail {
#[serde(alias = "enable")]
pub enabled: bool,
/// Configuration for the postmark api client
/// This is what we use for Atuin Cloud, the forum, etc.
#[serde(default)]
pub postmark: Postmark,
#[serde(default)]
pub verification: MailVerification,
}
#[derive(Default, Clone, Debug, Deserialize, Serialize)]
pub struct Postmark {
#[serde(alias = "token")]
pub token: Option<String>,
}
#[derive(Default, Clone, Debug, Deserialize, Serialize)]
pub struct MailVerification {
#[serde(alias = "enable")]
pub from: String,
pub subject: String,
}
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct Metrics {
#[serde(alias = "enabled")]
pub enable: bool,
pub host: String,
pub port: u16,
}
impl Default for Metrics {
fn default() -> Self {
Self {
enable: false,
host: String::from("127.0.0.1"),
port: 9001,
}
}
}
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct Settings {
pub host: String,
pub port: u16,
pub path: String,
pub open_registration: bool,
pub max_history_length: usize,
pub max_record_size: usize,
pub page_size: i64,
pub register_webhook_url: Option<String>,
pub register_webhook_username: String,
pub metrics: Metrics,
pub tls: Tls,
pub mail: Mail,
/// Enable legacy sync v1 routes (history-based sync)
/// Set to false to use only the newer record-based sync
pub sync_v1_enabled: bool,
/// Advertise a version that is not what we are _actually_ running
/// Many clients compare their version with api.atuin.sh, and if they differ, notify the user
/// that an update is available.
/// Now that we take beta releases, we should be able to advertise a different version to avoid
/// notifying users when the server runs something that is not a stable release.
pub fake_version: Option<String>,
#[serde(flatten)]
pub db_settings: DbSettings,
}
impl Settings {
pub fn new() -> Result<Self> {
let mut config_file = if let Ok(p) = std::env::var("ATUIN_CONFIG_DIR") {
PathBuf::from(p)
} else {
let mut config_file = PathBuf::new();
let config_dir = atuin_common::utils::config_dir();
config_file.push(config_dir);
config_file
};
config_file.push("server.toml");
// create the config file if it does not exist
let mut config_builder = Config::builder()
.set_default("host", "127.0.0.1")?
.set_default("port", 8888)?
.set_default("open_registration", false)?
.set_default("max_history_length", 8192)?
.set_default("max_record_size", 1024 * 1024 * 1024)? // pretty chonky
.set_default("path", "")?
.set_default("register_webhook_username", "")?
.set_default("page_size", 1100)?
.set_default("metrics.enable", false)?
.set_default("metrics.host", "127.0.0.1")?
.set_default("metrics.port", 9001)?
.set_default("mail.enable", false)?
.set_default("tls.enable", false)?
.set_default("tls.cert_path", "")?
.set_default("tls.pkey_path", "")?
.set_default("sync_v1_enabled", true)?
.add_source(
Environment::with_prefix("atuin")
.prefix_separator("_")
.separator("__"),
);
config_builder = if config_file.exists() {
config_builder.add_source(ConfigFile::new(
config_file.to_str().unwrap(),
FileFormat::Toml,
))
} else {
create_dir_all(config_file.parent().unwrap())?;
let mut file = File::create(config_file)?;
file.write_all(EXAMPLE_CONFIG.as_bytes())?;
config_builder
};
let config = config_builder.build()?;
config
.try_deserialize()
.map_err(|e| eyre!("failed to deserialize: {}", e))
}
}
pub fn example_config() -> &'static str {
EXAMPLE_CONFIG
}
#[derive(Clone, Debug, Default, Deserialize, Serialize)]
pub struct Tls {
#[serde(alias = "enabled")]
pub enable: bool,
pub cert_path: PathBuf,
pub pkey_path: PathBuf,
}
| rust | MIT | 8a010fed33ce19a9ddc589196c73c07ba7ba88e7 | 2026-01-04T15:36:14.139439Z | false |
atuinsh/atuin | https://github.com/atuinsh/atuin/blob/8a010fed33ce19a9ddc589196c73c07ba7ba88e7/crates/atuin-server/src/router.rs | crates/atuin-server/src/router.rs | use async_trait::async_trait;
use atuin_common::api::{ATUIN_CARGO_VERSION, ATUIN_HEADER_VERSION, ErrorResponse};
use axum::{
Router,
extract::{FromRequestParts, Request},
http::{self, request::Parts},
middleware::Next,
response::{IntoResponse, Response},
routing::{delete, get, patch, post},
};
use eyre::Result;
use tower::ServiceBuilder;
use tower_http::trace::TraceLayer;
use super::handlers;
use crate::{
handlers::{ErrorResponseStatus, RespExt},
metrics,
settings::Settings,
};
use atuin_server_database::{Database, DbError, models::User};
pub struct UserAuth(pub User);
#[async_trait]
impl<DB: Send + Sync> FromRequestParts<AppState<DB>> for UserAuth
where
DB: Database,
{
type Rejection = ErrorResponseStatus<'static>;
async fn from_request_parts(
req: &mut Parts,
state: &AppState<DB>,
) -> Result<Self, Self::Rejection> {
let auth_header = req
.headers
.get(http::header::AUTHORIZATION)
.ok_or_else(|| {
ErrorResponse::reply("missing authorization header")
.with_status(http::StatusCode::BAD_REQUEST)
})?;
let auth_header = auth_header.to_str().map_err(|_| {
ErrorResponse::reply("invalid authorization header encoding")
.with_status(http::StatusCode::BAD_REQUEST)
})?;
let (typ, token) = auth_header.split_once(' ').ok_or_else(|| {
ErrorResponse::reply("invalid authorization header encoding")
.with_status(http::StatusCode::BAD_REQUEST)
})?;
if typ != "Token" {
return Err(
ErrorResponse::reply("invalid authorization header encoding")
.with_status(http::StatusCode::BAD_REQUEST),
);
}
let user = state
.database
.get_session_user(token)
.await
.map_err(|e| match e {
DbError::NotFound => ErrorResponse::reply("session not found")
.with_status(http::StatusCode::FORBIDDEN),
DbError::Other(e) => {
tracing::error!(error = ?e, "could not query user session");
ErrorResponse::reply("could not query user session")
.with_status(http::StatusCode::INTERNAL_SERVER_ERROR)
}
})?;
Ok(UserAuth(user))
}
}
async fn teapot() -> impl IntoResponse {
// This used to return 418: 🫖
// Much as it was fun, it wasn't as useful or informative as it should be
(http::StatusCode::NOT_FOUND, "404 not found")
}
async fn clacks_overhead(request: Request, next: Next) -> Response {
let mut response = next.run(request).await;
let gnu_terry_value = "GNU Terry Pratchett, Kris Nova";
let gnu_terry_header = "X-Clacks-Overhead";
response
.headers_mut()
.insert(gnu_terry_header, gnu_terry_value.parse().unwrap());
response
}
/// Ensure that we only try and sync with clients on the same major version
async fn semver(request: Request, next: Next) -> Response {
let mut response = next.run(request).await;
response
.headers_mut()
.insert(ATUIN_HEADER_VERSION, ATUIN_CARGO_VERSION.parse().unwrap());
response
}
#[derive(Clone)]
pub struct AppState<DB: Database> {
pub database: DB,
pub settings: Settings,
}
pub fn router<DB: Database>(database: DB, settings: Settings) -> Router {
let mut routes = Router::new()
.route("/", get(handlers::index))
.route("/healthz", get(handlers::health::health_check));
// Sync v1 routes - can be disabled in favor of record-based sync
if settings.sync_v1_enabled {
routes = routes
.route("/sync/count", get(handlers::history::count))
.route("/sync/history", get(handlers::history::list))
.route("/sync/calendar/:focus", get(handlers::history::calendar))
.route("/sync/status", get(handlers::status::status))
.route("/history", post(handlers::history::add))
.route("/history", delete(handlers::history::delete));
}
let routes = routes
.route("/user/:username", get(handlers::user::get))
.route("/account", delete(handlers::user::delete))
.route("/account/password", patch(handlers::user::change_password))
.route("/register", post(handlers::user::register))
.route("/login", post(handlers::user::login))
.route("/record", post(handlers::record::post))
.route("/record", get(handlers::record::index))
.route("/record/next", get(handlers::record::next))
.route("/api/v0/me", get(handlers::v0::me::get))
.route("/api/v0/account/verify", post(handlers::user::verify_user))
.route(
"/api/v0/account/send-verification",
post(handlers::user::send_verification),
)
.route("/api/v0/record", post(handlers::v0::record::post))
.route("/api/v0/record", get(handlers::v0::record::index))
.route("/api/v0/record/next", get(handlers::v0::record::next))
.route("/api/v0/store", delete(handlers::v0::store::delete));
let path = settings.path.as_str();
if path.is_empty() {
routes
} else {
Router::new().nest(path, routes)
}
.fallback(teapot)
.with_state(AppState { database, settings })
.layer(
ServiceBuilder::new()
.layer(axum::middleware::from_fn(clacks_overhead))
.layer(TraceLayer::new_for_http())
.layer(axum::middleware::from_fn(metrics::track_metrics))
.layer(axum::middleware::from_fn(semver)),
)
}
| rust | MIT | 8a010fed33ce19a9ddc589196c73c07ba7ba88e7 | 2026-01-04T15:36:14.139439Z | false |
atuinsh/atuin | https://github.com/atuinsh/atuin/blob/8a010fed33ce19a9ddc589196c73c07ba7ba88e7/crates/atuin-server/src/lib.rs | crates/atuin-server/src/lib.rs | #![forbid(unsafe_code)]
use std::future::Future;
use std::net::SocketAddr;
use atuin_server_database::Database;
use axum::{Router, serve};
use axum_server::Handle;
use axum_server::tls_rustls::RustlsConfig;
use eyre::{Context, Result, eyre};
mod handlers;
mod metrics;
mod router;
mod utils;
pub use settings::Settings;
pub use settings::example_config;
pub mod settings;
use tokio::net::TcpListener;
use tokio::signal;
#[cfg(target_family = "unix")]
async fn shutdown_signal() {
let mut term = signal::unix::signal(signal::unix::SignalKind::terminate())
.expect("failed to register signal handler");
let mut interrupt = signal::unix::signal(signal::unix::SignalKind::interrupt())
.expect("failed to register signal handler");
tokio::select! {
_ = term.recv() => {},
_ = interrupt.recv() => {},
};
eprintln!("Shutting down gracefully...");
}
#[cfg(target_family = "windows")]
async fn shutdown_signal() {
signal::windows::ctrl_c()
.expect("failed to register signal handler")
.recv()
.await;
eprintln!("Shutting down gracefully...");
}
pub async fn launch<Db: Database>(settings: Settings, addr: SocketAddr) -> Result<()> {
if settings.tls.enable {
launch_with_tls::<Db>(settings, addr, shutdown_signal()).await
} else {
launch_with_tcp_listener::<Db>(
settings,
TcpListener::bind(addr)
.await
.context("could not connect to socket")?,
shutdown_signal(),
)
.await
}
}
pub async fn launch_with_tcp_listener<Db: Database>(
settings: Settings,
listener: TcpListener,
shutdown: impl Future<Output = ()> + Send + 'static,
) -> Result<()> {
let r = make_router::<Db>(settings).await?;
serve(listener, r.into_make_service())
.with_graceful_shutdown(shutdown)
.await?;
Ok(())
}
async fn launch_with_tls<Db: Database>(
settings: Settings,
addr: SocketAddr,
shutdown: impl Future<Output = ()>,
) -> Result<()> {
let crypto_provider = rustls::crypto::ring::default_provider().install_default();
if crypto_provider.is_err() {
return Err(eyre!("Failed to install default crypto provider"));
}
let rustls_config = RustlsConfig::from_pem_file(
settings.tls.cert_path.clone(),
settings.tls.pkey_path.clone(),
)
.await;
if rustls_config.is_err() {
return Err(eyre!("Failed to load TLS key and/or certificate"));
}
let rustls_config = rustls_config.unwrap();
let r = make_router::<Db>(settings).await?;
let handle = Handle::new();
let server = axum_server::bind_rustls(addr, rustls_config)
.handle(handle.clone())
.serve(r.into_make_service());
tokio::select! {
_ = server => {}
_ = shutdown => {
handle.graceful_shutdown(None);
}
}
Ok(())
}
// The separate listener means it's much easier to ensure metrics are not accidentally exposed to
// the public.
pub async fn launch_metrics_server(host: String, port: u16) -> Result<()> {
let listener = TcpListener::bind((host, port))
.await
.context("failed to bind metrics tcp")?;
let recorder_handle = metrics::setup_metrics_recorder();
let router = Router::new().route(
"/metrics",
axum::routing::get(move || std::future::ready(recorder_handle.render())),
);
serve(listener, router.into_make_service())
.with_graceful_shutdown(shutdown_signal())
.await?;
Ok(())
}
async fn make_router<Db: Database>(settings: Settings) -> Result<Router, eyre::Error> {
let db = Db::new(&settings.db_settings)
.await
.wrap_err_with(|| format!("failed to connect to db: {:?}", settings.db_settings))?;
let r = router::router(db, settings);
Ok(r)
}
| rust | MIT | 8a010fed33ce19a9ddc589196c73c07ba7ba88e7 | 2026-01-04T15:36:14.139439Z | false |
atuinsh/atuin | https://github.com/atuinsh/atuin/blob/8a010fed33ce19a9ddc589196c73c07ba7ba88e7/crates/atuin-server/src/utils.rs | crates/atuin-server/src/utils.rs | use eyre::Result;
use semver::{Version, VersionReq};
pub fn client_version_min(user_agent: &str, req: &str) -> Result<bool> {
if user_agent.is_empty() {
return Ok(false);
}
let version = user_agent.replace("atuin/", "");
let req = VersionReq::parse(req)?;
let version = Version::parse(version.as_str())?;
Ok(req.matches(&version))
}
| rust | MIT | 8a010fed33ce19a9ddc589196c73c07ba7ba88e7 | 2026-01-04T15:36:14.139439Z | false |
atuinsh/atuin | https://github.com/atuinsh/atuin/blob/8a010fed33ce19a9ddc589196c73c07ba7ba88e7/crates/atuin-server/src/metrics.rs | crates/atuin-server/src/metrics.rs | use std::time::Instant;
use axum::{
extract::{MatchedPath, Request},
middleware::Next,
response::IntoResponse,
};
use metrics_exporter_prometheus::{Matcher, PrometheusBuilder, PrometheusHandle};
pub fn setup_metrics_recorder() -> PrometheusHandle {
const EXPONENTIAL_SECONDS: &[f64] = &[
0.005, 0.01, 0.025, 0.05, 0.1, 0.25, 0.5, 1.0, 2.5, 5.0, 10.0,
];
PrometheusBuilder::new()
.set_buckets_for_metric(
Matcher::Full("http_requests_duration_seconds".to_string()),
EXPONENTIAL_SECONDS,
)
.unwrap()
.install_recorder()
.unwrap()
}
/// Middleware to record some common HTTP metrics
/// Generic over B to allow for arbitrary body types (eg Vec<u8>, Streams, a deserialized thing, etc)
/// Someday tower-http might provide a metrics middleware: https://github.com/tower-rs/tower-http/issues/57
pub async fn track_metrics(req: Request, next: Next) -> impl IntoResponse {
let start = Instant::now();
let path = match req.extensions().get::<MatchedPath>() {
Some(matched_path) => matched_path.as_str().to_owned(),
_ => req.uri().path().to_owned(),
};
let method = req.method().clone();
// Run the rest of the request handling first, so we can measure it and get response
// codes.
let response = next.run(req).await;
let latency = start.elapsed().as_secs_f64();
let status = response.status().as_u16().to_string();
let labels = [
("method", method.to_string()),
("path", path),
("status", status),
];
metrics::counter!("http_requests_total", &labels).increment(1);
metrics::histogram!("http_requests_duration_seconds", &labels).record(latency);
response
}
| rust | MIT | 8a010fed33ce19a9ddc589196c73c07ba7ba88e7 | 2026-01-04T15:36:14.139439Z | false |
atuinsh/atuin | https://github.com/atuinsh/atuin/blob/8a010fed33ce19a9ddc589196c73c07ba7ba88e7/crates/atuin-server/src/handlers/user.rs | crates/atuin-server/src/handlers/user.rs | use std::borrow::Borrow;
use std::collections::HashMap;
use std::time::Duration;
use argon2::{
Algorithm, Argon2, Params, PasswordHash, PasswordHasher, PasswordVerifier, Version,
password_hash::SaltString,
};
use axum::{
Json,
extract::{Path, State},
http::StatusCode,
};
use metrics::counter;
use postmark::{Query, reqwest::PostmarkClient};
use rand::rngs::OsRng;
use tracing::{debug, error, info, instrument};
use super::{ErrorResponse, ErrorResponseStatus, RespExt};
use crate::router::{AppState, UserAuth};
use atuin_server_database::{
Database, DbError,
models::{NewSession, NewUser},
};
use reqwest::header::CONTENT_TYPE;
use atuin_common::{api::*, utils::crypto_random_string};
pub fn verify_str(hash: &str, password: &str) -> bool {
let arg2 = Argon2::new(Algorithm::Argon2id, Version::V0x13, Params::default());
let Ok(hash) = PasswordHash::new(hash) else {
return false;
};
arg2.verify_password(password.as_bytes(), &hash).is_ok()
}
// Try to send a Discord webhook once - if it fails, we don't retry. "At most once", and best effort.
// Don't return the status because if this fails, we don't really care.
async fn send_register_hook(url: &str, username: String, registered: String) {
let hook = HashMap::from([
("username", username),
("content", format!("{registered} has just signed up!")),
]);
let client = reqwest::Client::new();
let resp = client
.post(url)
.timeout(Duration::new(5, 0))
.header(CONTENT_TYPE, "application/json")
.json(&hook)
.send()
.await;
match resp {
Ok(_) => info!("register webhook sent ok!"),
Err(e) => error!("failed to send register webhook: {}", e),
}
}
#[instrument(skip_all, fields(user.username = username.as_str()))]
pub async fn get<DB: Database>(
Path(username): Path<String>,
state: State<AppState<DB>>,
) -> Result<Json<UserResponse>, ErrorResponseStatus<'static>> {
let db = &state.0.database;
let user = match db.get_user(username.as_ref()).await {
Ok(user) => user,
Err(DbError::NotFound) => {
debug!("user not found: {}", username);
return Err(ErrorResponse::reply("user not found").with_status(StatusCode::NOT_FOUND));
}
Err(DbError::Other(err)) => {
error!("database error: {}", err);
return Err(ErrorResponse::reply("database error")
.with_status(StatusCode::INTERNAL_SERVER_ERROR));
}
};
Ok(Json(UserResponse {
username: user.username,
}))
}
#[instrument(skip_all)]
pub async fn register<DB: Database>(
state: State<AppState<DB>>,
Json(register): Json<RegisterRequest>,
) -> Result<Json<RegisterResponse>, ErrorResponseStatus<'static>> {
if !state.settings.open_registration {
return Err(
ErrorResponse::reply("this server is not open for registrations")
.with_status(StatusCode::BAD_REQUEST),
);
}
for c in register.username.chars() {
match c {
'a'..='z' | 'A'..='Z' | '0'..='9' | '-' => {}
_ => {
return Err(ErrorResponse::reply(
"Only alphanumeric and hyphens (-) are allowed in usernames",
)
.with_status(StatusCode::BAD_REQUEST));
}
}
}
let hashed = hash_secret(®ister.password);
let new_user = NewUser {
email: register.email.clone(),
username: register.username.clone(),
password: hashed,
};
let db = &state.0.database;
let user_id = match db.add_user(&new_user).await {
Ok(id) => id,
Err(e) => {
error!("failed to add user: {}", e);
return Err(
ErrorResponse::reply("failed to add user").with_status(StatusCode::BAD_REQUEST)
);
}
};
// 24 bytes encoded as base64
let token = crypto_random_string::<24>();
let new_session = NewSession {
user_id,
token: (&token).into(),
};
if let Some(url) = &state.settings.register_webhook_url {
// Could probs be run on another thread, but it's ok atm
send_register_hook(
url,
state.settings.register_webhook_username.clone(),
register.username,
)
.await;
}
counter!("atuin_users_registered").increment(1);
match db.add_session(&new_session).await {
Ok(_) => Ok(Json(RegisterResponse { session: token })),
Err(e) => {
error!("failed to add session: {}", e);
Err(ErrorResponse::reply("failed to register user")
.with_status(StatusCode::BAD_REQUEST))
}
}
}
#[instrument(skip_all, fields(user.id = user.id))]
pub async fn delete<DB: Database>(
UserAuth(user): UserAuth,
state: State<AppState<DB>>,
) -> Result<Json<DeleteUserResponse>, ErrorResponseStatus<'static>> {
debug!("request to delete user {}", user.id);
let db = &state.0.database;
if let Err(e) = db.delete_user(&user).await {
error!("failed to delete user: {}", e);
return Err(ErrorResponse::reply("failed to delete user")
.with_status(StatusCode::INTERNAL_SERVER_ERROR));
};
counter!("atuin_users_deleted").increment(1);
Ok(Json(DeleteUserResponse {}))
}
#[instrument(skip_all, fields(user.id = user.id))]
pub async fn send_verification<DB: Database>(
UserAuth(user): UserAuth,
state: State<AppState<DB>>,
) -> Result<Json<SendVerificationResponse>, ErrorResponseStatus<'static>> {
let settings = state.0.settings;
debug!("request to verify user {}", user.username);
if !settings.mail.enabled {
return Ok(Json(SendVerificationResponse {
email_sent: false,
verified: false,
}));
}
if user.verified.is_some() {
return Ok(Json(SendVerificationResponse {
email_sent: false,
verified: true,
}));
}
// TODO: if we ever add another mail provider, can match on them all here.
let postmark_token = match settings.mail.postmark.token {
Some(token) => token,
_ => {
error!("Failed to verify email: got None for postmark token");
return Err(ErrorResponse::reply("mail not configured")
.with_status(StatusCode::INTERNAL_SERVER_ERROR));
}
};
let db = &state.0.database;
let verification_token = db
.user_verification_token(user.id)
.await
.expect("Failed to verify");
debug!("Generated verification token, emailing user");
let client = PostmarkClient::builder()
.base_url("https://api.postmarkapp.com/")
.server_token(postmark_token)
.build();
let req = postmark::api::email::SendEmailRequest::builder()
.from(settings.mail.verification.from)
.subject(settings.mail.verification.subject)
.to(user.email)
.body(postmark::api::Body::text(format!(
"Please run the following command to finalize your Atuin account verification. It is valid for 15 minutes:\n\natuin account verify --token '{verification_token}'"
)))
.build();
req.execute(&client)
.await
.expect("postmark email request failed");
debug!("Email sent");
Ok(Json(SendVerificationResponse {
email_sent: true,
verified: false,
}))
}
#[instrument(skip_all, fields(user.id = user.id))]
pub async fn verify_user<DB: Database>(
UserAuth(user): UserAuth,
state: State<AppState<DB>>,
Json(token_request): Json<VerificationTokenRequest>,
) -> Result<Json<VerificationTokenResponse>, ErrorResponseStatus<'static>> {
let db = state.0.database;
if user.verified.is_some() {
return Ok(Json(VerificationTokenResponse { verified: true }));
}
let token = db.user_verification_token(user.id).await.map_err(|e| {
error!("Failed to read user token: {e}");
ErrorResponse::reply("Failed to verify").with_status(StatusCode::INTERNAL_SERVER_ERROR)
})?;
if token_request.token == token {
db.verify_user(user.id).await.map_err(|e| {
error!("Failed to verify user: {e}");
ErrorResponse::reply("Failed to verify").with_status(StatusCode::INTERNAL_SERVER_ERROR)
})?;
} else {
info!(
"Incorrect verification token {} vs {}",
token_request.token, token
);
return Ok(Json(VerificationTokenResponse { verified: false }));
}
Ok(Json(VerificationTokenResponse { verified: true }))
}
#[instrument(skip_all, fields(user.id = user.id, change_password))]
pub async fn change_password<DB: Database>(
UserAuth(mut user): UserAuth,
state: State<AppState<DB>>,
Json(change_password): Json<ChangePasswordRequest>,
) -> Result<Json<ChangePasswordResponse>, ErrorResponseStatus<'static>> {
let db = &state.0.database;
let verified = verify_str(
user.password.as_str(),
change_password.current_password.borrow(),
);
if !verified {
return Err(
ErrorResponse::reply("password is not correct").with_status(StatusCode::UNAUTHORIZED)
);
}
let hashed = hash_secret(&change_password.new_password);
user.password = hashed;
if let Err(e) = db.update_user_password(&user).await {
error!("failed to change user password: {}", e);
return Err(ErrorResponse::reply("failed to change user password")
.with_status(StatusCode::INTERNAL_SERVER_ERROR));
};
Ok(Json(ChangePasswordResponse {}))
}
#[instrument(skip_all, fields(user.username = login.username.as_str()))]
pub async fn login<DB: Database>(
state: State<AppState<DB>>,
login: Json<LoginRequest>,
) -> Result<Json<LoginResponse>, ErrorResponseStatus<'static>> {
let db = &state.0.database;
let user = match db.get_user(login.username.borrow()).await {
Ok(u) => u,
Err(DbError::NotFound) => {
return Err(ErrorResponse::reply("user not found").with_status(StatusCode::NOT_FOUND));
}
Err(DbError::Other(e)) => {
error!("failed to get user {}: {}", login.username.clone(), e);
return Err(ErrorResponse::reply("database error")
.with_status(StatusCode::INTERNAL_SERVER_ERROR));
}
};
let session = match db.get_user_session(&user).await {
Ok(u) => u,
Err(DbError::NotFound) => {
debug!("user session not found for user id={}", user.id);
return Err(ErrorResponse::reply("user not found").with_status(StatusCode::NOT_FOUND));
}
Err(DbError::Other(err)) => {
error!("database error for user {}: {}", login.username, err);
return Err(ErrorResponse::reply("database error")
.with_status(StatusCode::INTERNAL_SERVER_ERROR));
}
};
let verified = verify_str(user.password.as_str(), login.password.borrow());
if !verified {
debug!(user = user.username, "login failed");
return Err(
ErrorResponse::reply("password is not correct").with_status(StatusCode::UNAUTHORIZED)
);
}
debug!(user = user.username, "login success");
Ok(Json(LoginResponse {
session: session.token,
}))
}
fn hash_secret(password: &str) -> String {
let arg2 = Argon2::new(Algorithm::Argon2id, Version::V0x13, Params::default());
let salt = SaltString::generate(&mut OsRng);
let hash = arg2.hash_password(password.as_bytes(), &salt).unwrap();
hash.to_string()
}
| rust | MIT | 8a010fed33ce19a9ddc589196c73c07ba7ba88e7 | 2026-01-04T15:36:14.139439Z | false |
atuinsh/atuin | https://github.com/atuinsh/atuin/blob/8a010fed33ce19a9ddc589196c73c07ba7ba88e7/crates/atuin-server/src/handlers/status.rs | crates/atuin-server/src/handlers/status.rs | use axum::{Json, extract::State, http::StatusCode};
use tracing::instrument;
use super::{ErrorResponse, ErrorResponseStatus, RespExt};
use crate::router::{AppState, UserAuth};
use atuin_server_database::Database;
use atuin_common::api::*;
const VERSION: &str = env!("CARGO_PKG_VERSION");
#[instrument(skip_all, fields(user.id = user.id))]
pub async fn status<DB: Database>(
UserAuth(user): UserAuth,
state: State<AppState<DB>>,
) -> Result<Json<StatusResponse>, ErrorResponseStatus<'static>> {
let db = &state.0.database;
let deleted = db.deleted_history(&user).await.unwrap_or(vec![]);
let count = match db.count_history_cached(&user).await {
// By default read out the cached value
Ok(count) => count,
// If that fails, fallback on a full COUNT. Cache is built on a POST
// only
Err(_) => match db.count_history(&user).await {
Ok(count) => count,
Err(_) => {
return Err(ErrorResponse::reply("failed to query history count")
.with_status(StatusCode::INTERNAL_SERVER_ERROR));
}
},
};
tracing::debug!(user = user.username, "requested sync status");
Ok(Json(StatusResponse {
count,
deleted,
username: user.username,
version: VERSION.to_string(),
page_size: state.settings.page_size,
}))
}
| rust | MIT | 8a010fed33ce19a9ddc589196c73c07ba7ba88e7 | 2026-01-04T15:36:14.139439Z | false |
atuinsh/atuin | https://github.com/atuinsh/atuin/blob/8a010fed33ce19a9ddc589196c73c07ba7ba88e7/crates/atuin-server/src/handlers/history.rs | crates/atuin-server/src/handlers/history.rs | use std::{collections::HashMap, convert::TryFrom};
use axum::{
Json,
extract::{Path, Query, State},
http::{HeaderMap, StatusCode},
};
use metrics::counter;
use time::{Month, UtcOffset};
use tracing::{debug, error, instrument};
use super::{ErrorResponse, ErrorResponseStatus, RespExt};
use crate::{
router::{AppState, UserAuth},
utils::client_version_min,
};
use atuin_server_database::{
Database,
calendar::{TimePeriod, TimePeriodInfo},
models::NewHistory,
};
use atuin_common::api::*;
#[instrument(skip_all, fields(user.id = user.id))]
pub async fn count<DB: Database>(
UserAuth(user): UserAuth,
state: State<AppState<DB>>,
) -> Result<Json<CountResponse>, ErrorResponseStatus<'static>> {
let db = &state.0.database;
match db.count_history_cached(&user).await {
// By default read out the cached value
Ok(count) => Ok(Json(CountResponse { count })),
// If that fails, fallback on a full COUNT. Cache is built on a POST
// only
Err(_) => match db.count_history(&user).await {
Ok(count) => Ok(Json(CountResponse { count })),
Err(_) => Err(ErrorResponse::reply("failed to query history count")
.with_status(StatusCode::INTERNAL_SERVER_ERROR)),
},
}
}
#[instrument(skip_all, fields(user.id = user.id))]
pub async fn list<DB: Database>(
req: Query<SyncHistoryRequest>,
UserAuth(user): UserAuth,
headers: HeaderMap,
state: State<AppState<DB>>,
) -> Result<Json<SyncHistoryResponse>, ErrorResponseStatus<'static>> {
let db = &state.0.database;
let agent = headers
.get("user-agent")
.map_or("", |v| v.to_str().unwrap_or(""));
let variable_page_size = client_version_min(agent, ">=15.0.0").unwrap_or(false);
let page_size = if variable_page_size {
state.settings.page_size
} else {
100
};
if req.sync_ts.unix_timestamp_nanos() < 0 || req.history_ts.unix_timestamp_nanos() < 0 {
error!("client asked for history from < epoch 0");
counter!("atuin_history_epoch_before_zero").increment(1);
return Err(
ErrorResponse::reply("asked for history from before epoch 0")
.with_status(StatusCode::BAD_REQUEST),
);
}
let history = db
.list_history(&user, req.sync_ts, req.history_ts, &req.host, page_size)
.await;
if let Err(e) = history {
error!("failed to load history: {}", e);
return Err(ErrorResponse::reply("failed to load history")
.with_status(StatusCode::INTERNAL_SERVER_ERROR));
}
let history: Vec<String> = history
.unwrap()
.iter()
.map(|i| i.data.to_string())
.collect();
debug!(
"loaded {} items of history for user {}",
history.len(),
user.id
);
counter!("atuin_history_returned").increment(history.len() as u64);
Ok(Json(SyncHistoryResponse { history }))
}
#[instrument(skip_all, fields(user.id = user.id))]
pub async fn delete<DB: Database>(
UserAuth(user): UserAuth,
state: State<AppState<DB>>,
Json(req): Json<DeleteHistoryRequest>,
) -> Result<Json<MessageResponse>, ErrorResponseStatus<'static>> {
let db = &state.0.database;
// user_id is the ID of the history, as set by the user (the server has its own ID)
let deleted = db.delete_history(&user, req.client_id).await;
if let Err(e) = deleted {
error!("failed to delete history: {}", e);
return Err(ErrorResponse::reply("failed to delete history")
.with_status(StatusCode::INTERNAL_SERVER_ERROR));
}
Ok(Json(MessageResponse {
message: String::from("deleted OK"),
}))
}
#[instrument(skip_all, fields(user.id = user.id))]
pub async fn add<DB: Database>(
UserAuth(user): UserAuth,
state: State<AppState<DB>>,
Json(req): Json<Vec<AddHistoryRequest>>,
) -> Result<(), ErrorResponseStatus<'static>> {
let State(AppState { database, settings }) = state;
debug!("request to add {} history items", req.len());
counter!("atuin_history_uploaded").increment(req.len() as u64);
let mut history: Vec<NewHistory> = req
.into_iter()
.map(|h| NewHistory {
client_id: h.id,
user_id: user.id,
hostname: h.hostname,
timestamp: h.timestamp,
data: h.data,
})
.collect();
history.retain(|h| {
// keep if within limit, or limit is 0 (unlimited)
let keep = h.data.len() <= settings.max_history_length || settings.max_history_length == 0;
// Don't return an error here. We want to insert as much of the
// history list as we can, so log the error and continue going.
if !keep {
counter!("atuin_history_too_long").increment(1);
tracing::warn!(
"history too long, got length {}, max {}",
h.data.len(),
settings.max_history_length
);
}
keep
});
if let Err(e) = database.add_history(&history).await {
error!("failed to add history: {}", e);
return Err(ErrorResponse::reply("failed to add history")
.with_status(StatusCode::INTERNAL_SERVER_ERROR));
};
Ok(())
}
#[derive(serde::Deserialize, Debug)]
pub struct CalendarQuery {
#[serde(default = "serde_calendar::zero")]
year: i32,
#[serde(default = "serde_calendar::one")]
month: u8,
#[serde(default = "serde_calendar::utc")]
tz: UtcOffset,
}
mod serde_calendar {
use time::UtcOffset;
pub fn zero() -> i32 {
0
}
pub fn one() -> u8 {
1
}
pub fn utc() -> UtcOffset {
UtcOffset::UTC
}
}
#[instrument(skip_all, fields(user.id = user.id))]
pub async fn calendar<DB: Database>(
Path(focus): Path<String>,
Query(params): Query<CalendarQuery>,
UserAuth(user): UserAuth,
state: State<AppState<DB>>,
) -> Result<Json<HashMap<u64, TimePeriodInfo>>, ErrorResponseStatus<'static>> {
let focus = focus.as_str();
let year = params.year;
let month = Month::try_from(params.month).map_err(|e| ErrorResponseStatus {
error: ErrorResponse {
reason: e.to_string().into(),
},
status: StatusCode::BAD_REQUEST,
})?;
let period = match focus {
"year" => TimePeriod::Year,
"month" => TimePeriod::Month { year },
"day" => TimePeriod::Day { year, month },
_ => {
return Err(ErrorResponse::reply("invalid focus: use year/month/day")
.with_status(StatusCode::BAD_REQUEST));
}
};
let db = &state.0.database;
let focus = db.calendar(&user, period, params.tz).await.map_err(|_| {
ErrorResponse::reply("failed to query calendar")
.with_status(StatusCode::INTERNAL_SERVER_ERROR)
})?;
Ok(Json(focus))
}
| rust | MIT | 8a010fed33ce19a9ddc589196c73c07ba7ba88e7 | 2026-01-04T15:36:14.139439Z | false |
atuinsh/atuin | https://github.com/atuinsh/atuin/blob/8a010fed33ce19a9ddc589196c73c07ba7ba88e7/crates/atuin-server/src/handlers/record.rs | crates/atuin-server/src/handlers/record.rs | use axum::{Json, http::StatusCode, response::IntoResponse};
use serde_json::json;
use tracing::instrument;
use super::{ErrorResponse, ErrorResponseStatus, RespExt};
use crate::router::UserAuth;
use atuin_common::record::{EncryptedData, Record};
#[instrument(skip_all, fields(user.id = user.id))]
pub async fn post(UserAuth(user): UserAuth) -> Result<(), ErrorResponseStatus<'static>> {
// anyone who has actually used the old record store (a very small number) will see this error
// upon trying to sync.
// 1. The status endpoint will say that the server has nothing
// 2. The client will try to upload local records
// 3. Sync will fail with this error
// If the client has no local records, they will see the empty index and do nothing. For the
// vast majority of users, this is the case.
return Err(
ErrorResponse::reply("record store deprecated; please upgrade")
.with_status(StatusCode::BAD_REQUEST),
);
}
#[instrument(skip_all, fields(user.id = user.id))]
pub async fn index(UserAuth(user): UserAuth) -> axum::response::Response {
let ret = json!({
"hosts": {}
});
ret.to_string().into_response()
}
#[instrument(skip_all, fields(user.id = user.id))]
pub async fn next(
UserAuth(user): UserAuth,
) -> Result<Json<Vec<Record<EncryptedData>>>, ErrorResponseStatus<'static>> {
let records = Vec::new();
Ok(Json(records))
}
| rust | MIT | 8a010fed33ce19a9ddc589196c73c07ba7ba88e7 | 2026-01-04T15:36:14.139439Z | false |
atuinsh/atuin | https://github.com/atuinsh/atuin/blob/8a010fed33ce19a9ddc589196c73c07ba7ba88e7/crates/atuin-server/src/handlers/mod.rs | crates/atuin-server/src/handlers/mod.rs | use atuin_common::api::{ErrorResponse, IndexResponse};
use atuin_server_database::Database;
use axum::{Json, extract::State, http, response::IntoResponse};
use crate::router::AppState;
pub mod health;
pub mod history;
pub mod record;
pub mod status;
pub mod user;
pub mod v0;
const VERSION: &str = env!("CARGO_PKG_VERSION");
pub async fn index<DB: Database>(state: State<AppState<DB>>) -> Json<IndexResponse> {
let homage = r#""Through the fathomless deeps of space swims the star turtle Great A'Tuin, bearing on its back the four giant elephants who carry on their shoulders the mass of the Discworld." -- Sir Terry Pratchett"#;
// Error with a -1 response
// It's super unlikely this will happen
let count = state.database.total_history().await.unwrap_or(-1);
let version = state
.settings
.fake_version
.clone()
.unwrap_or(VERSION.to_string());
Json(IndexResponse {
homage: homage.to_string(),
total_history: count,
version,
})
}
impl IntoResponse for ErrorResponseStatus<'_> {
fn into_response(self) -> axum::response::Response {
(self.status, Json(self.error)).into_response()
}
}
pub struct ErrorResponseStatus<'a> {
pub error: ErrorResponse<'a>,
pub status: http::StatusCode,
}
pub trait RespExt<'a> {
fn with_status(self, status: http::StatusCode) -> ErrorResponseStatus<'a>;
fn reply(reason: &'a str) -> Self;
}
impl<'a> RespExt<'a> for ErrorResponse<'a> {
fn with_status(self, status: http::StatusCode) -> ErrorResponseStatus<'a> {
ErrorResponseStatus {
error: self,
status,
}
}
fn reply(reason: &'a str) -> ErrorResponse<'a> {
Self {
reason: reason.into(),
}
}
}
| rust | MIT | 8a010fed33ce19a9ddc589196c73c07ba7ba88e7 | 2026-01-04T15:36:14.139439Z | false |
atuinsh/atuin | https://github.com/atuinsh/atuin/blob/8a010fed33ce19a9ddc589196c73c07ba7ba88e7/crates/atuin-server/src/handlers/health.rs | crates/atuin-server/src/handlers/health.rs | use axum::{Json, http, response::IntoResponse};
use serde::Serialize;
#[derive(Serialize)]
pub struct HealthResponse {
pub status: &'static str,
}
pub async fn health_check() -> impl IntoResponse {
(
http::StatusCode::OK,
Json(HealthResponse { status: "healthy" }),
)
}
| rust | MIT | 8a010fed33ce19a9ddc589196c73c07ba7ba88e7 | 2026-01-04T15:36:14.139439Z | false |
atuinsh/atuin | https://github.com/atuinsh/atuin/blob/8a010fed33ce19a9ddc589196c73c07ba7ba88e7/crates/atuin-server/src/handlers/v0/store.rs | crates/atuin-server/src/handlers/v0/store.rs | use axum::{extract::Query, extract::State, http::StatusCode};
use metrics::counter;
use serde::Deserialize;
use tracing::{error, instrument};
use crate::{
handlers::{ErrorResponse, ErrorResponseStatus, RespExt},
router::{AppState, UserAuth},
};
use atuin_server_database::Database;
#[derive(Deserialize)]
pub struct DeleteParams {}
#[instrument(skip_all, fields(user.id = user.id))]
pub async fn delete<DB: Database>(
_params: Query<DeleteParams>,
UserAuth(user): UserAuth,
state: State<AppState<DB>>,
) -> Result<(), ErrorResponseStatus<'static>> {
let State(AppState {
database,
settings: _,
}) = state;
if let Err(e) = database.delete_store(&user).await {
counter!("atuin_store_delete_failed").increment(1);
error!("failed to delete store {e:?}");
return Err(ErrorResponse::reply("failed to delete store")
.with_status(StatusCode::INTERNAL_SERVER_ERROR));
}
counter!("atuin_store_deleted").increment(1);
Ok(())
}
| rust | MIT | 8a010fed33ce19a9ddc589196c73c07ba7ba88e7 | 2026-01-04T15:36:14.139439Z | false |
atuinsh/atuin | https://github.com/atuinsh/atuin/blob/8a010fed33ce19a9ddc589196c73c07ba7ba88e7/crates/atuin-server/src/handlers/v0/me.rs | crates/atuin-server/src/handlers/v0/me.rs | use axum::Json;
use tracing::instrument;
use crate::handlers::ErrorResponseStatus;
use crate::router::UserAuth;
use atuin_common::api::*;
#[instrument(skip_all, fields(user.id = user.id))]
pub async fn get(
UserAuth(user): UserAuth,
) -> Result<Json<MeResponse>, ErrorResponseStatus<'static>> {
Ok(Json(MeResponse {
username: user.username,
}))
}
| rust | MIT | 8a010fed33ce19a9ddc589196c73c07ba7ba88e7 | 2026-01-04T15:36:14.139439Z | false |
atuinsh/atuin | https://github.com/atuinsh/atuin/blob/8a010fed33ce19a9ddc589196c73c07ba7ba88e7/crates/atuin-server/src/handlers/v0/record.rs | crates/atuin-server/src/handlers/v0/record.rs | use axum::{Json, extract::Query, extract::State, http::StatusCode};
use metrics::counter;
use serde::Deserialize;
use tracing::{error, instrument};
use crate::{
handlers::{ErrorResponse, ErrorResponseStatus, RespExt},
router::{AppState, UserAuth},
};
use atuin_server_database::Database;
use atuin_common::record::{EncryptedData, HostId, Record, RecordIdx, RecordStatus};
#[instrument(skip_all, fields(user.id = user.id))]
pub async fn post<DB: Database>(
UserAuth(user): UserAuth,
state: State<AppState<DB>>,
Json(records): Json<Vec<Record<EncryptedData>>>,
) -> Result<(), ErrorResponseStatus<'static>> {
let State(AppState { database, settings }) = state;
tracing::debug!(
count = records.len(),
user = user.username,
"request to add records"
);
counter!("atuin_record_uploaded").increment(records.len() as u64);
let keep = records
.iter()
.all(|r| r.data.data.len() <= settings.max_record_size || settings.max_record_size == 0);
if !keep {
counter!("atuin_record_too_large").increment(1);
return Err(
ErrorResponse::reply("could not add records; record too large")
.with_status(StatusCode::BAD_REQUEST),
);
}
if let Err(e) = database.add_records(&user, &records).await {
error!("failed to add record: {}", e);
return Err(ErrorResponse::reply("failed to add record")
.with_status(StatusCode::INTERNAL_SERVER_ERROR));
};
Ok(())
}
#[instrument(skip_all, fields(user.id = user.id))]
pub async fn index<DB: Database>(
UserAuth(user): UserAuth,
state: State<AppState<DB>>,
) -> Result<Json<RecordStatus>, ErrorResponseStatus<'static>> {
let State(AppState {
database,
settings: _,
}) = state;
let record_index = match database.status(&user).await {
Ok(index) => index,
Err(e) => {
error!("failed to get record index: {}", e);
return Err(ErrorResponse::reply("failed to calculate record index")
.with_status(StatusCode::INTERNAL_SERVER_ERROR));
}
};
tracing::debug!(user = user.username, "record index request");
Ok(Json(record_index))
}
#[derive(Deserialize)]
pub struct NextParams {
host: HostId,
tag: String,
start: Option<RecordIdx>,
count: u64,
}
#[instrument(skip_all, fields(user.id = user.id))]
pub async fn next<DB: Database>(
params: Query<NextParams>,
UserAuth(user): UserAuth,
state: State<AppState<DB>>,
) -> Result<Json<Vec<Record<EncryptedData>>>, ErrorResponseStatus<'static>> {
let State(AppState {
database,
settings: _,
}) = state;
let params = params.0;
let records = match database
.next_records(&user, params.host, params.tag, params.start, params.count)
.await
{
Ok(records) => records,
Err(e) => {
error!("failed to get record index: {}", e);
return Err(ErrorResponse::reply("failed to calculate record index")
.with_status(StatusCode::INTERNAL_SERVER_ERROR));
}
};
counter!("atuin_record_downloaded").increment(records.len() as u64);
Ok(Json(records))
}
| rust | MIT | 8a010fed33ce19a9ddc589196c73c07ba7ba88e7 | 2026-01-04T15:36:14.139439Z | false |
atuinsh/atuin | https://github.com/atuinsh/atuin/blob/8a010fed33ce19a9ddc589196c73c07ba7ba88e7/crates/atuin-server/src/handlers/v0/mod.rs | crates/atuin-server/src/handlers/v0/mod.rs | pub(crate) mod me;
pub(crate) mod record;
pub(crate) mod store;
| rust | MIT | 8a010fed33ce19a9ddc589196c73c07ba7ba88e7 | 2026-01-04T15:36:14.139439Z | false |
atuinsh/atuin | https://github.com/atuinsh/atuin/blob/8a010fed33ce19a9ddc589196c73c07ba7ba88e7/crates/atuin-common/src/lib.rs | crates/atuin-common/src/lib.rs | #![deny(unsafe_code)]
/// Defines a new UUID type wrapper
macro_rules! new_uuid {
($name:ident) => {
#[derive(
Debug,
Copy,
Clone,
PartialEq,
Eq,
Hash,
PartialOrd,
Ord,
serde::Serialize,
serde::Deserialize,
)]
#[serde(transparent)]
pub struct $name(pub Uuid);
impl<DB: sqlx::Database> sqlx::Type<DB> for $name
where
Uuid: sqlx::Type<DB>,
{
fn type_info() -> <DB as sqlx::Database>::TypeInfo {
Uuid::type_info()
}
}
impl<'r, DB: sqlx::Database> sqlx::Decode<'r, DB> for $name
where
Uuid: sqlx::Decode<'r, DB>,
{
fn decode(
value: DB::ValueRef<'r>,
) -> std::result::Result<Self, sqlx::error::BoxDynError> {
Uuid::decode(value).map(Self)
}
}
impl<'q, DB: sqlx::Database> sqlx::Encode<'q, DB> for $name
where
Uuid: sqlx::Encode<'q, DB>,
{
fn encode_by_ref(
&self,
buf: &mut DB::ArgumentBuffer<'q>,
) -> Result<sqlx::encode::IsNull, Box<dyn std::error::Error + Send + Sync + 'static>>
{
self.0.encode_by_ref(buf)
}
}
};
}
pub mod api;
pub mod record;
pub mod shell;
pub mod utils;
| rust | MIT | 8a010fed33ce19a9ddc589196c73c07ba7ba88e7 | 2026-01-04T15:36:14.139439Z | false |
atuinsh/atuin | https://github.com/atuinsh/atuin/blob/8a010fed33ce19a9ddc589196c73c07ba7ba88e7/crates/atuin-common/src/shell.rs | crates/atuin-common/src/shell.rs | use std::{ffi::OsStr, path::Path, process::Command};
use serde::Serialize;
use sysinfo::{Process, System, get_current_pid};
use thiserror::Error;
#[derive(PartialEq)]
pub enum Shell {
Sh,
Bash,
Fish,
Zsh,
Xonsh,
Nu,
Powershell,
Unknown,
}
impl std::fmt::Display for Shell {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let shell = match self {
Shell::Bash => "bash",
Shell::Fish => "fish",
Shell::Zsh => "zsh",
Shell::Nu => "nu",
Shell::Xonsh => "xonsh",
Shell::Sh => "sh",
Shell::Powershell => "powershell",
Shell::Unknown => "unknown",
};
write!(f, "{shell}")
}
}
#[derive(Debug, Error, Serialize)]
pub enum ShellError {
#[error("shell not supported")]
NotSupported,
#[error("failed to execute shell command: {0}")]
ExecError(String),
}
impl Shell {
pub fn current() -> Shell {
let sys = System::new_all();
let process = sys
.process(get_current_pid().expect("Failed to get current PID"))
.expect("Process with current pid does not exist");
let parent = sys
.process(process.parent().expect("Atuin running with no parent!"))
.expect("Process with parent pid does not exist");
let shell = parent.name().trim().to_lowercase();
let shell = shell.strip_prefix('-').unwrap_or(&shell);
Shell::from_string(shell.to_string())
}
pub fn from_env() -> Shell {
std::env::var("ATUIN_SHELL").map_or(Shell::Unknown, |shell| {
Shell::from_string(shell.trim().to_lowercase())
})
}
pub fn config_file(&self) -> Option<std::path::PathBuf> {
let mut path = if let Some(base) = directories::BaseDirs::new() {
base.home_dir().to_owned()
} else {
return None;
};
// TODO: handle all shells
match self {
Shell::Bash => path.push(".bashrc"),
Shell::Zsh => path.push(".zshrc"),
Shell::Fish => path.push(".config/fish/config.fish"),
_ => return None,
};
Some(path)
}
/// Best-effort attempt to determine the default shell
/// This implementation will be different across different platforms
/// Caller should ensure to handle Shell::Unknown correctly
pub fn default_shell() -> Result<Shell, ShellError> {
let sys = System::name().unwrap_or("".to_string()).to_lowercase();
// TODO: Support Linux
// I'm pretty sure we can use /etc/passwd there, though there will probably be some issues
let path = if sys.contains("darwin") {
// This works in my testing so far
Shell::Sh.run_interactive([
"dscl localhost -read \"/Local/Default/Users/$USER\" shell | awk '{print $2}'",
])?
} else if cfg!(windows) {
return Ok(Shell::Powershell);
} else {
Shell::Sh.run_interactive(["getent passwd $LOGNAME | cut -d: -f7"])?
};
let path = Path::new(path.trim());
let shell = path.file_name();
if shell.is_none() {
return Err(ShellError::NotSupported);
}
Ok(Shell::from_string(
shell.unwrap().to_string_lossy().to_string(),
))
}
pub fn from_string(name: String) -> Shell {
match name.as_str() {
"bash" => Shell::Bash,
"fish" => Shell::Fish,
"zsh" => Shell::Zsh,
"xonsh" => Shell::Xonsh,
"nu" => Shell::Nu,
"sh" => Shell::Sh,
"powershell" => Shell::Powershell,
_ => Shell::Unknown,
}
}
/// Returns true if the shell is posix-like
/// Note that while fish is not posix compliant, it behaves well enough for our current
/// featureset that this does not matter.
pub fn is_posixish(&self) -> bool {
matches!(self, Shell::Bash | Shell::Fish | Shell::Zsh)
}
pub fn run_interactive<I, S>(&self, args: I) -> Result<String, ShellError>
where
I: IntoIterator<Item = S>,
S: AsRef<OsStr>,
{
let shell = self.to_string();
let output = if self == &Self::Powershell {
Command::new(shell)
.args(args)
.output()
.map_err(|e| ShellError::ExecError(e.to_string()))?
} else {
Command::new(shell)
.arg("-ic")
.args(args)
.output()
.map_err(|e| ShellError::ExecError(e.to_string()))?
};
Ok(String::from_utf8(output.stdout).unwrap())
}
}
pub fn shell_name(parent: Option<&Process>) -> String {
let sys = System::new_all();
let parent = if let Some(parent) = parent {
parent
} else {
let process = sys
.process(get_current_pid().expect("Failed to get current PID"))
.expect("Process with current pid does not exist");
sys.process(process.parent().expect("Atuin running with no parent!"))
.expect("Process with parent pid does not exist")
};
let shell = parent.name().trim().to_lowercase();
let shell = shell.strip_prefix('-').unwrap_or(&shell);
shell.to_string()
}
| rust | MIT | 8a010fed33ce19a9ddc589196c73c07ba7ba88e7 | 2026-01-04T15:36:14.139439Z | false |
atuinsh/atuin | https://github.com/atuinsh/atuin/blob/8a010fed33ce19a9ddc589196c73c07ba7ba88e7/crates/atuin-common/src/calendar.rs | crates/atuin-common/src/calendar.rs | // Calendar data
use serde::{Serialize, Deserialize};
pub enum TimePeriod {
YEAR,
MONTH,
DAY,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct TimePeriodInfo {
pub count: u64,
// TODO: Use this for merkle tree magic
pub hash: String,
}
| rust | MIT | 8a010fed33ce19a9ddc589196c73c07ba7ba88e7 | 2026-01-04T15:36:14.139439Z | false |
atuinsh/atuin | https://github.com/atuinsh/atuin/blob/8a010fed33ce19a9ddc589196c73c07ba7ba88e7/crates/atuin-common/src/api.rs | crates/atuin-common/src/api.rs | use lazy_static::lazy_static;
use semver::Version;
use serde::{Deserialize, Serialize};
use std::borrow::Cow;
use time::OffsetDateTime;
// the usage of X- has been deprecated for quite along time, it turns out
pub static ATUIN_HEADER_VERSION: &str = "Atuin-Version";
pub static ATUIN_CARGO_VERSION: &str = env!("CARGO_PKG_VERSION");
lazy_static! {
pub static ref ATUIN_VERSION: Version =
Version::parse(ATUIN_CARGO_VERSION).expect("failed to parse self semver");
}
#[derive(Debug, Serialize, Deserialize)]
pub struct UserResponse {
pub username: String,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct RegisterRequest {
pub email: String,
pub username: String,
pub password: String,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct RegisterResponse {
pub session: String,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct DeleteUserResponse {}
#[derive(Debug, Serialize, Deserialize)]
pub struct SendVerificationResponse {
pub email_sent: bool,
pub verified: bool,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct VerificationTokenRequest {
pub token: String,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct VerificationTokenResponse {
pub verified: bool,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct ChangePasswordRequest {
pub current_password: String,
pub new_password: String,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct ChangePasswordResponse {}
#[derive(Debug, Serialize, Deserialize)]
pub struct LoginRequest {
pub username: String,
pub password: String,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct LoginResponse {
pub session: String,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct AddHistoryRequest {
pub id: String,
#[serde(with = "time::serde::rfc3339")]
pub timestamp: OffsetDateTime,
pub data: String,
pub hostname: String,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct CountResponse {
pub count: i64,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct SyncHistoryRequest {
#[serde(with = "time::serde::rfc3339")]
pub sync_ts: OffsetDateTime,
#[serde(with = "time::serde::rfc3339")]
pub history_ts: OffsetDateTime,
pub host: String,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct SyncHistoryResponse {
pub history: Vec<String>,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct ErrorResponse<'a> {
pub reason: Cow<'a, str>,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct IndexResponse {
pub homage: String,
pub version: String,
pub total_history: i64,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct StatusResponse {
pub count: i64,
pub username: String,
pub deleted: Vec<String>,
// These could/should also go on the index of the server
// However, we do not request the server index as a part of normal sync
// I'd rather slightly increase the size of this response, than add an extra HTTP request
pub page_size: i64, // max page size supported by the server
pub version: String,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct DeleteHistoryRequest {
pub client_id: String,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct MessageResponse {
pub message: String,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct MeResponse {
pub username: String,
}
| rust | MIT | 8a010fed33ce19a9ddc589196c73c07ba7ba88e7 | 2026-01-04T15:36:14.139439Z | false |
atuinsh/atuin | https://github.com/atuinsh/atuin/blob/8a010fed33ce19a9ddc589196c73c07ba7ba88e7/crates/atuin-common/src/utils.rs | crates/atuin-common/src/utils.rs | use std::borrow::Cow;
use std::env;
use std::path::PathBuf;
use eyre::{Result, eyre};
use base64::prelude::{BASE64_URL_SAFE_NO_PAD, Engine};
use getrandom::getrandom;
use uuid::Uuid;
/// Generate N random bytes, using a cryptographically secure source
pub fn crypto_random_bytes<const N: usize>() -> [u8; N] {
// rand say they are in principle safe for crypto purposes, but that it is perhaps a better
// idea to use getrandom for things such as passwords.
let mut ret = [0u8; N];
getrandom(&mut ret).expect("Failed to generate random bytes!");
ret
}
/// Generate N random bytes using a cryptographically secure source, return encoded as a string
pub fn crypto_random_string<const N: usize>() -> String {
let bytes = crypto_random_bytes::<N>();
// We only use this to create a random string, and won't be reversing it to find the original
// data - no padding is OK there. It may be in URLs.
BASE64_URL_SAFE_NO_PAD.encode(bytes)
}
pub fn uuid_v7() -> Uuid {
Uuid::now_v7()
}
pub fn uuid_v4() -> String {
Uuid::new_v4().as_simple().to_string()
}
pub fn has_git_dir(path: &str) -> bool {
let mut gitdir = PathBuf::from(path);
gitdir.push(".git");
gitdir.exists()
}
// detect if any parent dir has a git repo in it
// I really don't want to bring in libgit for something simple like this
// If we start to do anything more advanced, then perhaps
pub fn in_git_repo(path: &str) -> Option<PathBuf> {
let mut gitdir = PathBuf::from(path);
while gitdir.parent().is_some() && !has_git_dir(gitdir.to_str().unwrap()) {
gitdir.pop();
}
// No parent? then we hit root, finding no git
if gitdir.parent().is_some() {
return Some(gitdir);
}
None
}
// TODO: more reliable, more tested
// I don't want to use ProjectDirs, it puts config in awkward places on
// mac. Data too. Seems to be more intended for GUI apps.
#[cfg(not(target_os = "windows"))]
pub fn home_dir() -> PathBuf {
let home = std::env::var("HOME").expect("$HOME not found");
PathBuf::from(home)
}
#[cfg(target_os = "windows")]
pub fn home_dir() -> PathBuf {
let home = std::env::var("USERPROFILE").expect("%userprofile% not found");
PathBuf::from(home)
}
pub fn config_dir() -> PathBuf {
let config_dir =
std::env::var("XDG_CONFIG_HOME").map_or_else(|_| home_dir().join(".config"), PathBuf::from);
config_dir.join("atuin")
}
pub fn data_dir() -> PathBuf {
let data_dir = std::env::var("XDG_DATA_HOME")
.map_or_else(|_| home_dir().join(".local").join("share"), PathBuf::from);
data_dir.join("atuin")
}
pub fn runtime_dir() -> PathBuf {
std::env::var("XDG_RUNTIME_DIR").map_or_else(|_| data_dir(), PathBuf::from)
}
pub fn dotfiles_cache_dir() -> PathBuf {
// In most cases, this will be ~/.local/share/atuin/dotfiles/cache
let data_dir = std::env::var("XDG_DATA_HOME")
.map_or_else(|_| home_dir().join(".local").join("share"), PathBuf::from);
data_dir.join("atuin").join("dotfiles").join("cache")
}
pub fn get_current_dir() -> String {
// Prefer PWD environment variable over cwd if available to better support symbolic links
match env::var("PWD") {
Ok(v) => v,
Err(_) => match env::current_dir() {
Ok(dir) => dir.display().to_string(),
Err(_) => String::from(""),
},
}
}
pub fn broken_symlink<P: Into<PathBuf>>(path: P) -> bool {
let path = path.into();
path.is_symlink() && !path.exists()
}
/// Extension trait for anything that can behave like a string to make it easy to escape control
/// characters.
///
/// Intended to help prevent control characters being printed and interpreted by the terminal when
/// printing history as well as to ensure the commands that appear in the interactive search
/// reflect the actual command run rather than just the printable characters.
pub trait Escapable: AsRef<str> {
fn escape_control(&self) -> Cow<'_, str> {
if !self.as_ref().contains(|c: char| c.is_ascii_control()) {
self.as_ref().into()
} else {
let mut remaining = self.as_ref();
// Not a perfect way to reserve space but should reduce the allocations
let mut buf = String::with_capacity(remaining.len());
while let Some(i) = remaining.find(|c: char| c.is_ascii_control()) {
// safe to index with `..i`, `i` and `i+1..` as part[i] is a single byte ascii char
buf.push_str(&remaining[..i]);
buf.push('^');
buf.push(match remaining.as_bytes()[i] {
0x7F => '?',
code => char::from_u32(u32::from(code) + 64).unwrap(),
});
remaining = &remaining[i + 1..];
}
buf.push_str(remaining);
buf.into()
}
}
}
pub fn unquote(s: &str) -> Result<String> {
if s.chars().count() < 2 {
return Err(eyre!("not enough chars"));
}
let quote = s.chars().next().unwrap();
// not quoted, do nothing
if quote != '"' && quote != '\'' && quote != '`' {
return Ok(s.to_string());
}
if s.chars().last().unwrap() != quote {
return Err(eyre!("unexpected eof, quotes do not match"));
}
// removes quote characters
// the sanity checks performed above ensure that the quotes will be ASCII and this will not
// panic
let s = &s[1..s.len() - 1];
Ok(s.to_string())
}
impl<T: AsRef<str>> Escapable for T {}
#[allow(unsafe_code)]
#[cfg(test)]
mod tests {
use pretty_assertions::assert_ne;
use super::*;
use std::collections::HashSet;
#[cfg(not(windows))]
#[test]
fn test_dirs() {
// these tests need to be run sequentially to prevent race condition
test_config_dir_xdg();
test_config_dir();
test_data_dir_xdg();
test_data_dir();
}
#[cfg(not(windows))]
fn test_config_dir_xdg() {
// TODO: Audit that the environment access only happens in single-threaded code.
unsafe { env::remove_var("HOME") };
// TODO: Audit that the environment access only happens in single-threaded code.
unsafe { env::set_var("XDG_CONFIG_HOME", "/home/user/custom_config") };
assert_eq!(
config_dir(),
PathBuf::from("/home/user/custom_config/atuin")
);
// TODO: Audit that the environment access only happens in single-threaded code.
unsafe { env::remove_var("XDG_CONFIG_HOME") };
}
#[cfg(not(windows))]
fn test_config_dir() {
// TODO: Audit that the environment access only happens in single-threaded code.
unsafe { env::set_var("HOME", "/home/user") };
// TODO: Audit that the environment access only happens in single-threaded code.
unsafe { env::remove_var("XDG_CONFIG_HOME") };
assert_eq!(config_dir(), PathBuf::from("/home/user/.config/atuin"));
// TODO: Audit that the environment access only happens in single-threaded code.
unsafe { env::remove_var("HOME") };
}
#[cfg(not(windows))]
fn test_data_dir_xdg() {
// TODO: Audit that the environment access only happens in single-threaded code.
unsafe { env::remove_var("HOME") };
// TODO: Audit that the environment access only happens in single-threaded code.
unsafe { env::set_var("XDG_DATA_HOME", "/home/user/custom_data") };
assert_eq!(data_dir(), PathBuf::from("/home/user/custom_data/atuin"));
// TODO: Audit that the environment access only happens in single-threaded code.
unsafe { env::remove_var("XDG_DATA_HOME") };
}
#[cfg(not(windows))]
fn test_data_dir() {
// TODO: Audit that the environment access only happens in single-threaded code.
unsafe { env::set_var("HOME", "/home/user") };
// TODO: Audit that the environment access only happens in single-threaded code.
unsafe { env::remove_var("XDG_DATA_HOME") };
assert_eq!(data_dir(), PathBuf::from("/home/user/.local/share/atuin"));
// TODO: Audit that the environment access only happens in single-threaded code.
unsafe { env::remove_var("HOME") };
}
#[test]
fn uuid_is_unique() {
let how_many: usize = 1000000;
// for peace of mind
let mut uuids: HashSet<Uuid> = HashSet::with_capacity(how_many);
// there will be many in the same millisecond
for _ in 0..how_many {
let uuid = uuid_v7();
uuids.insert(uuid);
}
assert_eq!(uuids.len(), how_many);
}
#[test]
fn escape_control_characters() {
use super::Escapable;
// CSI colour sequence
assert_eq!("\x1b[31mfoo".escape_control(), "^[[31mfoo");
// Tabs count as control chars
assert_eq!("foo\tbar".escape_control(), "foo^Ibar");
// space is in control char range but should be excluded
assert_eq!("two words".escape_control(), "two words");
// unicode multi-byte characters
let s = "🐢\x1b[32m🦀";
assert_eq!(s.escape_control(), s.replace("\x1b", "^["));
}
#[test]
fn escape_no_control_characters() {
use super::Escapable as _;
assert!(matches!(
"no control characters".escape_control(),
Cow::Borrowed(_)
));
assert!(matches!(
"with \x1b[31mcontrol\x1b[0m characters".escape_control(),
Cow::Owned(_)
));
}
#[test]
fn dumb_random_test() {
// Obviously not a test of randomness, but make sure we haven't made some
// catastrophic error
assert_ne!(crypto_random_string::<1>(), crypto_random_string::<1>());
assert_ne!(crypto_random_string::<2>(), crypto_random_string::<2>());
assert_ne!(crypto_random_string::<4>(), crypto_random_string::<4>());
assert_ne!(crypto_random_string::<8>(), crypto_random_string::<8>());
assert_ne!(crypto_random_string::<16>(), crypto_random_string::<16>());
assert_ne!(crypto_random_string::<32>(), crypto_random_string::<32>());
}
}
| rust | MIT | 8a010fed33ce19a9ddc589196c73c07ba7ba88e7 | 2026-01-04T15:36:14.139439Z | false |
atuinsh/atuin | https://github.com/atuinsh/atuin/blob/8a010fed33ce19a9ddc589196c73c07ba7ba88e7/crates/atuin-common/src/record.rs | crates/atuin-common/src/record.rs | use std::collections::HashMap;
use eyre::Result;
use serde::{Deserialize, Serialize};
use typed_builder::TypedBuilder;
use uuid::Uuid;
#[derive(Clone, Debug, PartialEq)]
pub struct DecryptedData(pub Vec<u8>);
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct EncryptedData {
pub data: String,
pub content_encryption_key: String,
}
#[derive(Debug, PartialEq, PartialOrd, Ord, Eq)]
pub struct Diff {
pub host: HostId,
pub tag: String,
pub local: Option<RecordIdx>,
pub remote: Option<RecordIdx>,
}
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq)]
pub struct Host {
pub id: HostId,
pub name: String,
}
impl Host {
pub fn new(id: HostId) -> Self {
Host {
id,
name: String::new(),
}
}
}
new_uuid!(RecordId);
new_uuid!(HostId);
pub type RecordIdx = u64;
/// A single record stored inside of our local database
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize, TypedBuilder)]
pub struct Record<Data> {
/// a unique ID
#[builder(default = RecordId(crate::utils::uuid_v7()))]
pub id: RecordId,
/// The integer record ID. This is only unique per (host, tag).
pub idx: RecordIdx,
/// The unique ID of the host.
// TODO(ellie): Optimize the storage here. We use a bunch of IDs, and currently store
// as strings. I would rather avoid normalization, so store as UUID binary instead of
// encoding to a string and wasting much more storage.
pub host: Host,
/// The creation time in nanoseconds since unix epoch
#[builder(default = time::OffsetDateTime::now_utc().unix_timestamp_nanos() as u64)]
pub timestamp: u64,
/// The version the data in the entry conforms to
// However we want to track versions for this tag, eg v2
pub version: String,
/// The type of data we are storing here. Eg, "history"
pub tag: String,
/// Some data. This can be anything you wish to store. Use the tag field to know how to handle it.
pub data: Data,
}
/// Extra data from the record that should be encoded in the data
#[derive(Debug, Copy, Clone)]
pub struct AdditionalData<'a> {
pub id: &'a RecordId,
pub idx: &'a u64,
pub version: &'a str,
pub tag: &'a str,
pub host: &'a HostId,
}
impl<Data> Record<Data> {
pub fn append(&self, data: Vec<u8>) -> Record<DecryptedData> {
Record::builder()
.host(self.host.clone())
.version(self.version.clone())
.idx(self.idx + 1)
.tag(self.tag.clone())
.data(DecryptedData(data))
.build()
}
}
/// An index representing the current state of the record stores
/// This can be both remote, or local, and compared in either direction
#[derive(Debug, Serialize, Deserialize)]
pub struct RecordStatus {
// A map of host -> tag -> max(idx)
pub hosts: HashMap<HostId, HashMap<String, RecordIdx>>,
}
impl Default for RecordStatus {
fn default() -> Self {
Self::new()
}
}
impl Extend<(HostId, String, RecordIdx)> for RecordStatus {
fn extend<T: IntoIterator<Item = (HostId, String, RecordIdx)>>(&mut self, iter: T) {
for (host, tag, tail_idx) in iter {
self.set_raw(host, tag, tail_idx);
}
}
}
impl RecordStatus {
pub fn new() -> RecordStatus {
RecordStatus {
hosts: HashMap::new(),
}
}
/// Insert a new tail record into the store
pub fn set(&mut self, tail: Record<DecryptedData>) {
self.set_raw(tail.host.id, tail.tag, tail.idx)
}
pub fn set_raw(&mut self, host: HostId, tag: String, tail_id: RecordIdx) {
self.hosts.entry(host).or_default().insert(tag, tail_id);
}
pub fn get(&self, host: HostId, tag: String) -> Option<RecordIdx> {
self.hosts.get(&host).and_then(|v| v.get(&tag)).cloned()
}
/// Diff this index with another, likely remote index.
/// The two diffs can then be reconciled, and the optimal change set calculated
/// Returns a tuple, with (host, tag, Option(OTHER))
/// OTHER is set to the value of the idx on the other machine. If it is greater than our index,
/// then we need to do some downloading. If it is smaller, then we need to do some uploading
/// Note that we cannot upload if we are not the owner of the record store - hosts can only
/// write to their own store.
pub fn diff(&self, other: &Self) -> Vec<Diff> {
let mut ret = Vec::new();
// First, we check if other has everything that self has
for (host, tag_map) in self.hosts.iter() {
for (tag, idx) in tag_map.iter() {
match other.get(*host, tag.clone()) {
// The other store is all up to date! No diff.
Some(t) if t.eq(idx) => continue,
// The other store does exist, and it is either ahead or behind us. A diff regardless
Some(t) => ret.push(Diff {
host: *host,
tag: tag.clone(),
local: Some(*idx),
remote: Some(t),
}),
// The other store does not exist :O
None => ret.push(Diff {
host: *host,
tag: tag.clone(),
local: Some(*idx),
remote: None,
}),
};
}
}
// At this point, there is a single case we have not yet considered.
// If the other store knows of a tag that we are not yet aware of, then the diff will be missed
// account for that!
for (host, tag_map) in other.hosts.iter() {
for (tag, idx) in tag_map.iter() {
match self.get(*host, tag.clone()) {
// If we have this host/tag combo, the comparison and diff will have already happened above
Some(_) => continue,
None => ret.push(Diff {
host: *host,
tag: tag.clone(),
remote: Some(*idx),
local: None,
}),
};
}
}
// Stability is a nice property to have
ret.sort();
ret
}
}
pub trait Encryption {
fn re_encrypt(
data: EncryptedData,
ad: AdditionalData,
old_key: &[u8; 32],
new_key: &[u8; 32],
) -> Result<EncryptedData> {
let data = Self::decrypt(data, ad, old_key)?;
Ok(Self::encrypt(data, ad, new_key))
}
fn encrypt(data: DecryptedData, ad: AdditionalData, key: &[u8; 32]) -> EncryptedData;
fn decrypt(data: EncryptedData, ad: AdditionalData, key: &[u8; 32]) -> Result<DecryptedData>;
}
impl Record<DecryptedData> {
pub fn encrypt<E: Encryption>(self, key: &[u8; 32]) -> Record<EncryptedData> {
let ad = AdditionalData {
id: &self.id,
version: &self.version,
tag: &self.tag,
host: &self.host.id,
idx: &self.idx,
};
Record {
data: E::encrypt(self.data, ad, key),
id: self.id,
host: self.host,
idx: self.idx,
timestamp: self.timestamp,
version: self.version,
tag: self.tag,
}
}
}
impl Record<EncryptedData> {
pub fn decrypt<E: Encryption>(self, key: &[u8; 32]) -> Result<Record<DecryptedData>> {
let ad = AdditionalData {
id: &self.id,
version: &self.version,
tag: &self.tag,
host: &self.host.id,
idx: &self.idx,
};
Ok(Record {
data: E::decrypt(self.data, ad, key)?,
id: self.id,
host: self.host,
idx: self.idx,
timestamp: self.timestamp,
version: self.version,
tag: self.tag,
})
}
pub fn re_encrypt<E: Encryption>(
self,
old_key: &[u8; 32],
new_key: &[u8; 32],
) -> Result<Record<EncryptedData>> {
let ad = AdditionalData {
id: &self.id,
version: &self.version,
tag: &self.tag,
host: &self.host.id,
idx: &self.idx,
};
Ok(Record {
data: E::re_encrypt(self.data, ad, old_key, new_key)?,
id: self.id,
host: self.host,
idx: self.idx,
timestamp: self.timestamp,
version: self.version,
tag: self.tag,
})
}
}
#[cfg(test)]
mod tests {
use crate::record::{Host, HostId};
use super::{DecryptedData, Diff, Record, RecordStatus};
use pretty_assertions::assert_eq;
fn test_record() -> Record<DecryptedData> {
Record::builder()
.host(Host::new(HostId(crate::utils::uuid_v7())))
.version("v1".into())
.tag(crate::utils::uuid_v7().simple().to_string())
.data(DecryptedData(vec![0, 1, 2, 3]))
.idx(0)
.build()
}
#[test]
fn record_index() {
let mut index = RecordStatus::new();
let record = test_record();
index.set(record.clone());
let tail = index.get(record.host.id, record.tag);
assert_eq!(
record.idx,
tail.expect("tail not in store"),
"tail in store did not match"
);
}
#[test]
fn record_index_overwrite() {
let mut index = RecordStatus::new();
let record = test_record();
let child = record.append(vec![1, 2, 3]);
index.set(record.clone());
index.set(child.clone());
let tail = index.get(record.host.id, record.tag);
assert_eq!(
child.idx,
tail.expect("tail not in store"),
"tail in store did not match"
);
}
#[test]
fn record_index_no_diff() {
// Here, they both have the same version and should have no diff
let mut index1 = RecordStatus::new();
let mut index2 = RecordStatus::new();
let record1 = test_record();
index1.set(record1.clone());
index2.set(record1);
let diff = index1.diff(&index2);
assert_eq!(0, diff.len(), "expected empty diff");
}
#[test]
fn record_index_single_diff() {
// Here, they both have the same stores, but one is ahead by a single record
let mut index1 = RecordStatus::new();
let mut index2 = RecordStatus::new();
let record1 = test_record();
let record2 = record1.append(vec![1, 2, 3]);
index1.set(record1);
index2.set(record2.clone());
let diff = index1.diff(&index2);
assert_eq!(1, diff.len(), "expected single diff");
assert_eq!(
diff[0],
Diff {
host: record2.host.id,
tag: record2.tag,
remote: Some(1),
local: Some(0)
}
);
}
#[test]
fn record_index_multi_diff() {
// A much more complex case, with a bunch more checks
let mut index1 = RecordStatus::new();
let mut index2 = RecordStatus::new();
let store1record1 = test_record();
let store1record2 = store1record1.append(vec![1, 2, 3]);
let store2record1 = test_record();
let store2record2 = store2record1.append(vec![1, 2, 3]);
let store3record1 = test_record();
let store4record1 = test_record();
// index1 only knows about the first two entries of the first two stores
index1.set(store1record1);
index1.set(store2record1);
// index2 is fully up to date with the first two stores, and knows of a third
index2.set(store1record2);
index2.set(store2record2);
index2.set(store3record1);
// index1 knows of a 4th store
index1.set(store4record1);
let diff1 = index1.diff(&index2);
let diff2 = index2.diff(&index1);
// both diffs the same length
assert_eq!(4, diff1.len());
assert_eq!(4, diff2.len());
dbg!(&diff1, &diff2);
// both diffs should be ALMOST the same. They will agree on which hosts and tags
// require updating, but the "other" value will not be the same.
let smol_diff_1: Vec<(HostId, String)> =
diff1.iter().map(|v| (v.host, v.tag.clone())).collect();
let smol_diff_2: Vec<(HostId, String)> =
diff1.iter().map(|v| (v.host, v.tag.clone())).collect();
assert_eq!(smol_diff_1, smol_diff_2);
// diffing with yourself = no diff
assert_eq!(index1.diff(&index1).len(), 0);
assert_eq!(index2.diff(&index2).len(), 0);
}
}
| rust | MIT | 8a010fed33ce19a9ddc589196c73c07ba7ba88e7 | 2026-01-04T15:36:14.139439Z | false |
atuinsh/atuin | https://github.com/atuinsh/atuin/blob/8a010fed33ce19a9ddc589196c73c07ba7ba88e7/crates/atuin-client/src/settings.rs | crates/atuin-client/src/settings.rs | use std::{
collections::HashMap, convert::TryFrom, fmt, io::prelude::*, path::PathBuf, str::FromStr,
};
use atuin_common::record::HostId;
use atuin_common::utils;
use clap::ValueEnum;
use config::{
Config, ConfigBuilder, Environment, File as ConfigFile, FileFormat, builder::DefaultState,
};
use eyre::{Context, Error, Result, bail, eyre};
use fs_err::{File, create_dir_all};
use humantime::parse_duration;
use regex::RegexSet;
use semver::Version;
use serde::{Deserialize, Serialize};
use serde_with::DeserializeFromStr;
use time::{
OffsetDateTime, UtcOffset,
format_description::{FormatItem, well_known::Rfc3339},
macros::format_description,
};
use uuid::Uuid;
pub const HISTORY_PAGE_SIZE: i64 = 100;
pub const LAST_SYNC_FILENAME: &str = "last_sync_time";
pub const LAST_VERSION_CHECK_FILENAME: &str = "last_version_check_time";
pub const LATEST_VERSION_FILENAME: &str = "latest_version";
pub const HOST_ID_FILENAME: &str = "host_id";
static EXAMPLE_CONFIG: &str = include_str!("../config.toml");
mod dotfiles;
mod kv;
mod scripts;
#[derive(Clone, Debug, Deserialize, Copy, ValueEnum, PartialEq, Serialize)]
pub enum SearchMode {
#[serde(rename = "prefix")]
Prefix,
#[serde(rename = "fulltext")]
#[clap(aliases = &["fulltext"])]
FullText,
#[serde(rename = "fuzzy")]
Fuzzy,
#[serde(rename = "skim")]
Skim,
}
impl SearchMode {
pub fn as_str(&self) -> &'static str {
match self {
SearchMode::Prefix => "PREFIX",
SearchMode::FullText => "FULLTXT",
SearchMode::Fuzzy => "FUZZY",
SearchMode::Skim => "SKIM",
}
}
pub fn next(&self, settings: &Settings) -> Self {
match self {
SearchMode::Prefix => SearchMode::FullText,
// if the user is using skim, we go to skim
SearchMode::FullText if settings.search_mode == SearchMode::Skim => SearchMode::Skim,
// otherwise fuzzy.
SearchMode::FullText => SearchMode::Fuzzy,
SearchMode::Fuzzy | SearchMode::Skim => SearchMode::Prefix,
}
}
}
#[derive(Clone, Debug, Deserialize, Copy, PartialEq, Eq, ValueEnum, Serialize)]
pub enum FilterMode {
#[serde(rename = "global")]
Global = 0,
#[serde(rename = "host")]
Host = 1,
#[serde(rename = "session")]
Session = 2,
#[serde(rename = "directory")]
Directory = 3,
#[serde(rename = "workspace")]
Workspace = 4,
#[serde(rename = "session-preload")]
SessionPreload = 5,
}
impl FilterMode {
pub fn as_str(&self) -> &'static str {
match self {
FilterMode::Global => "GLOBAL",
FilterMode::Host => "HOST",
FilterMode::Session => "SESSION",
FilterMode::Directory => "DIRECTORY",
FilterMode::Workspace => "WORKSPACE",
FilterMode::SessionPreload => "SESSION+",
}
}
}
#[derive(Clone, Debug, Deserialize, Copy, Serialize)]
pub enum ExitMode {
#[serde(rename = "return-original")]
ReturnOriginal,
#[serde(rename = "return-query")]
ReturnQuery,
}
// FIXME: Can use upstream Dialect enum if https://github.com/stevedonovan/chrono-english/pull/16 is merged
// FIXME: Above PR was merged, but dependency was changed to interim (fork of chrono-english) in the ... interim
#[derive(Clone, Debug, Deserialize, Copy, Serialize)]
pub enum Dialect {
#[serde(rename = "us")]
Us,
#[serde(rename = "uk")]
Uk,
}
impl From<Dialect> for interim::Dialect {
fn from(d: Dialect) -> interim::Dialect {
match d {
Dialect::Uk => interim::Dialect::Uk,
Dialect::Us => interim::Dialect::Us,
}
}
}
/// Type wrapper around `time::UtcOffset` to support a wider variety of timezone formats.
///
/// Note that the parsing of this struct needs to be done before starting any
/// multithreaded runtime, otherwise it will fail on most Unix systems.
///
/// See: <https://github.com/atuinsh/atuin/pull/1517#discussion_r1447516426>
#[derive(Clone, Copy, Debug, Eq, PartialEq, DeserializeFromStr, Serialize)]
pub struct Timezone(pub UtcOffset);
impl fmt::Display for Timezone {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.0.fmt(f)
}
}
/// format: <+|-><hour>[:<minute>[:<second>]]
static OFFSET_FMT: &[FormatItem<'_>] = format_description!(
"[offset_hour sign:mandatory padding:none][optional [:[offset_minute padding:none][optional [:[offset_second padding:none]]]]]"
);
impl FromStr for Timezone {
type Err = Error;
fn from_str(s: &str) -> Result<Self> {
// local timezone
if matches!(s.to_lowercase().as_str(), "l" | "local") {
// There have been some timezone issues, related to errors fetching it on some
// platforms
// Rather than fail to start, fallback to UTC. The user should still be able to specify
// their timezone manually in the config file.
let offset = UtcOffset::current_local_offset().unwrap_or(UtcOffset::UTC);
return Ok(Self(offset));
}
if matches!(s.to_lowercase().as_str(), "0" | "utc") {
let offset = UtcOffset::UTC;
return Ok(Self(offset));
}
// offset from UTC
if let Ok(offset) = UtcOffset::parse(s, OFFSET_FMT) {
return Ok(Self(offset));
}
// IDEA: Currently named timezones are not supported, because the well-known crate
// for this is `chrono_tz`, which is not really interoperable with the datetime crate
// that we currently use - `time`. If ever we migrate to using `chrono`, this would
// be a good feature to add.
bail!(r#""{s}" is not a valid timezone spec"#)
}
}
#[derive(Clone, Debug, Deserialize, Copy, Serialize)]
pub enum Style {
#[serde(rename = "auto")]
Auto,
#[serde(rename = "full")]
Full,
#[serde(rename = "compact")]
Compact,
}
#[derive(Clone, Debug, Deserialize, Copy, Serialize)]
pub enum WordJumpMode {
#[serde(rename = "emacs")]
Emacs,
#[serde(rename = "subl")]
Subl,
}
#[derive(Clone, Debug, Deserialize, Copy, PartialEq, Eq, ValueEnum, Serialize)]
pub enum KeymapMode {
#[serde(rename = "emacs")]
Emacs,
#[serde(rename = "vim-normal")]
VimNormal,
#[serde(rename = "vim-insert")]
VimInsert,
#[serde(rename = "auto")]
Auto,
}
impl KeymapMode {
pub fn as_str(&self) -> &'static str {
match self {
KeymapMode::Emacs => "EMACS",
KeymapMode::VimNormal => "VIMNORMAL",
KeymapMode::VimInsert => "VIMINSERT",
KeymapMode::Auto => "AUTO",
}
}
}
// We want to translate the config to crossterm::cursor::SetCursorStyle, but
// the original type does not implement trait serde::Deserialize unfortunately.
// It seems impossible to implement Deserialize for external types when it is
// used in HashMap (https://stackoverflow.com/questions/67142663). We instead
// define an adapter type.
#[derive(Clone, Debug, Deserialize, Copy, PartialEq, Eq, ValueEnum, Serialize)]
pub enum CursorStyle {
#[serde(rename = "default")]
DefaultUserShape,
#[serde(rename = "blink-block")]
BlinkingBlock,
#[serde(rename = "steady-block")]
SteadyBlock,
#[serde(rename = "blink-underline")]
BlinkingUnderScore,
#[serde(rename = "steady-underline")]
SteadyUnderScore,
#[serde(rename = "blink-bar")]
BlinkingBar,
#[serde(rename = "steady-bar")]
SteadyBar,
}
impl CursorStyle {
pub fn as_str(&self) -> &'static str {
match self {
CursorStyle::DefaultUserShape => "DEFAULT",
CursorStyle::BlinkingBlock => "BLINKBLOCK",
CursorStyle::SteadyBlock => "STEADYBLOCK",
CursorStyle::BlinkingUnderScore => "BLINKUNDERLINE",
CursorStyle::SteadyUnderScore => "STEADYUNDERLINE",
CursorStyle::BlinkingBar => "BLINKBAR",
CursorStyle::SteadyBar => "STEADYBAR",
}
}
}
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct Stats {
#[serde(default = "Stats::common_prefix_default")]
pub common_prefix: Vec<String>, // sudo, etc. commands we want to strip off
#[serde(default = "Stats::common_subcommands_default")]
pub common_subcommands: Vec<String>, // kubectl, commands we should consider subcommands for
#[serde(default = "Stats::ignored_commands_default")]
pub ignored_commands: Vec<String>, // cd, ls, etc. commands we want to completely hide from stats
}
impl Stats {
fn common_prefix_default() -> Vec<String> {
vec!["sudo", "doas"].into_iter().map(String::from).collect()
}
fn common_subcommands_default() -> Vec<String> {
vec![
"apt",
"cargo",
"composer",
"dnf",
"docker",
"dotnet",
"git",
"go",
"ip",
"jj",
"kubectl",
"nix",
"nmcli",
"npm",
"pecl",
"pnpm",
"podman",
"port",
"systemctl",
"tmux",
"yarn",
]
.into_iter()
.map(String::from)
.collect()
}
fn ignored_commands_default() -> Vec<String> {
vec![]
}
}
impl Default for Stats {
fn default() -> Self {
Self {
common_prefix: Self::common_prefix_default(),
common_subcommands: Self::common_subcommands_default(),
ignored_commands: Self::ignored_commands_default(),
}
}
}
#[derive(Clone, Debug, Deserialize, Default, Serialize)]
pub struct Sync {
pub records: bool,
}
#[derive(Clone, Debug, Deserialize, Default, Serialize)]
pub struct Keys {
pub scroll_exits: bool,
pub exit_past_line_start: bool,
pub accept_past_line_end: bool,
pub accept_past_line_start: bool,
pub accept_with_backspace: bool,
pub prefix: String,
}
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct Preview {
pub strategy: PreviewStrategy,
}
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct Theme {
/// Name of desired theme ("default" for base)
pub name: String,
/// Whether any available additional theme debug should be shown
pub debug: Option<bool>,
/// How many levels of parenthood will be traversed if needed
pub max_depth: Option<u8>,
}
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct Daemon {
/// Use the daemon to sync
/// If enabled, requires a running daemon with `atuin daemon`
#[serde(alias = "enable")]
pub enabled: bool,
/// The daemon will handle sync on an interval. How often to sync, in seconds.
pub sync_frequency: u64,
/// The path to the unix socket used by the daemon
pub socket_path: String,
/// Use a socket passed via systemd's socket activation protocol, instead of the path
pub systemd_socket: bool,
/// The port that should be used for TCP on non unix systems
pub tcp_port: u64,
}
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct Search {
/// The list of enabled filter modes, in order of priority.
pub filters: Vec<FilterMode>,
}
impl Default for Preview {
fn default() -> Self {
Self {
strategy: PreviewStrategy::Auto,
}
}
}
impl Default for Theme {
fn default() -> Self {
Self {
name: "".to_string(),
debug: None::<bool>,
max_depth: Some(10),
}
}
}
impl Default for Daemon {
fn default() -> Self {
Self {
enabled: false,
sync_frequency: 300,
socket_path: "".to_string(),
systemd_socket: false,
tcp_port: 8889,
}
}
}
impl Default for Search {
fn default() -> Self {
Self {
filters: vec![
FilterMode::Global,
FilterMode::Host,
FilterMode::Session,
FilterMode::SessionPreload,
FilterMode::Workspace,
FilterMode::Directory,
],
}
}
}
// The preview height strategy also takes max_preview_height into account.
#[derive(Clone, Debug, Deserialize, Copy, PartialEq, Eq, ValueEnum, Serialize)]
pub enum PreviewStrategy {
// Preview height is calculated for the length of the selected command.
#[serde(rename = "auto")]
Auto,
// Preview height is calculated for the length of the longest command stored in the history.
#[serde(rename = "static")]
Static,
// max_preview_height is used as fixed height.
#[serde(rename = "fixed")]
Fixed,
}
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct Settings {
pub dialect: Dialect,
pub timezone: Timezone,
pub style: Style,
pub auto_sync: bool,
pub update_check: bool,
pub sync_address: String,
pub sync_frequency: String,
pub db_path: String,
pub record_store_path: String,
pub key_path: String,
pub session_path: String,
pub search_mode: SearchMode,
pub filter_mode: Option<FilterMode>,
pub filter_mode_shell_up_key_binding: Option<FilterMode>,
pub search_mode_shell_up_key_binding: Option<SearchMode>,
pub shell_up_key_binding: bool,
pub inline_height: u16,
pub inline_height_shell_up_key_binding: Option<u16>,
pub invert: bool,
pub show_preview: bool,
pub max_preview_height: u16,
pub show_help: bool,
pub show_tabs: bool,
pub show_numeric_shortcuts: bool,
pub auto_hide_height: u16,
pub exit_mode: ExitMode,
pub keymap_mode: KeymapMode,
pub keymap_mode_shell: KeymapMode,
pub keymap_cursor: HashMap<String, CursorStyle>,
pub word_jump_mode: WordJumpMode,
pub word_chars: String,
pub scroll_context_lines: usize,
pub history_format: String,
pub prefers_reduced_motion: bool,
pub store_failed: bool,
#[serde(with = "serde_regex", default = "RegexSet::empty", skip_serializing)]
pub history_filter: RegexSet,
#[serde(with = "serde_regex", default = "RegexSet::empty", skip_serializing)]
pub cwd_filter: RegexSet,
pub secrets_filter: bool,
pub workspaces: bool,
pub ctrl_n_shortcuts: bool,
pub network_connect_timeout: u64,
pub network_timeout: u64,
pub local_timeout: f64,
pub enter_accept: bool,
pub smart_sort: bool,
pub command_chaining: bool,
#[serde(default)]
pub stats: Stats,
#[serde(default)]
pub sync: Sync,
#[serde(default)]
pub keys: Keys,
#[serde(default)]
pub preview: Preview,
#[serde(default)]
pub dotfiles: dotfiles::Settings,
#[serde(default)]
pub daemon: Daemon,
#[serde(default)]
pub search: Search,
#[serde(default)]
pub theme: Theme,
#[serde(default)]
pub scripts: scripts::Settings,
#[serde(default)]
pub kv: kv::Settings,
}
impl Settings {
pub fn utc() -> Self {
Self::builder()
.expect("Could not build default")
.set_override("timezone", "0")
.expect("failed to override timezone with UTC")
.build()
.expect("Could not build config")
.try_deserialize()
.expect("Could not deserialize config")
}
fn save_to_data_dir(filename: &str, value: &str) -> Result<()> {
let data_dir = atuin_common::utils::data_dir();
let data_dir = data_dir.as_path();
let path = data_dir.join(filename);
fs_err::write(path, value)?;
Ok(())
}
fn read_from_data_dir(filename: &str) -> Option<String> {
let data_dir = atuin_common::utils::data_dir();
let data_dir = data_dir.as_path();
let path = data_dir.join(filename);
if !path.exists() {
return None;
}
let value = fs_err::read_to_string(path);
value.ok()
}
fn save_current_time(filename: &str) -> Result<()> {
Settings::save_to_data_dir(
filename,
OffsetDateTime::now_utc().format(&Rfc3339)?.as_str(),
)?;
Ok(())
}
fn load_time_from_file(filename: &str) -> Result<OffsetDateTime> {
let value = Settings::read_from_data_dir(filename);
match value {
Some(v) => Ok(OffsetDateTime::parse(v.as_str(), &Rfc3339)?),
None => Ok(OffsetDateTime::UNIX_EPOCH),
}
}
pub fn save_sync_time() -> Result<()> {
Settings::save_current_time(LAST_SYNC_FILENAME)
}
pub fn save_version_check_time() -> Result<()> {
Settings::save_current_time(LAST_VERSION_CHECK_FILENAME)
}
pub fn last_sync() -> Result<OffsetDateTime> {
Settings::load_time_from_file(LAST_SYNC_FILENAME)
}
pub fn last_version_check() -> Result<OffsetDateTime> {
Settings::load_time_from_file(LAST_VERSION_CHECK_FILENAME)
}
pub fn host_id() -> Option<HostId> {
let id = Settings::read_from_data_dir(HOST_ID_FILENAME);
if let Some(id) = id {
let parsed =
Uuid::from_str(id.as_str()).expect("failed to parse host ID from local directory");
return Some(HostId(parsed));
}
let uuid = atuin_common::utils::uuid_v7();
Settings::save_to_data_dir(HOST_ID_FILENAME, uuid.as_simple().to_string().as_ref())
.expect("Could not write host ID to data dir");
Some(HostId(uuid))
}
pub fn should_sync(&self) -> Result<bool> {
if !self.auto_sync || !PathBuf::from(self.session_path.as_str()).exists() {
return Ok(false);
}
if self.sync_frequency == "0" {
return Ok(true);
}
match parse_duration(self.sync_frequency.as_str()) {
Ok(d) => {
let d = time::Duration::try_from(d)?;
Ok(OffsetDateTime::now_utc() - Settings::last_sync()? >= d)
}
Err(e) => Err(eyre!("failed to check sync: {}", e)),
}
}
pub fn logged_in(&self) -> bool {
let session_path = self.session_path.as_str();
PathBuf::from(session_path).exists()
}
pub fn session_token(&self) -> Result<String> {
if !self.logged_in() {
return Err(eyre!("Tried to load session; not logged in"));
}
let session_path = self.session_path.as_str();
Ok(fs_err::read_to_string(session_path)?)
}
#[cfg(feature = "check-update")]
fn needs_update_check(&self) -> Result<bool> {
let last_check = Settings::last_version_check()?;
let diff = OffsetDateTime::now_utc() - last_check;
// Check a max of once per hour
Ok(diff.whole_hours() >= 1)
}
#[cfg(feature = "check-update")]
async fn latest_version(&self) -> Result<Version> {
// Default to the current version, and if that doesn't parse, a version so high it's unlikely to ever
// suggest upgrading.
let current =
Version::parse(env!("CARGO_PKG_VERSION")).unwrap_or(Version::new(100000, 0, 0));
if !self.needs_update_check()? {
// Worst case, we don't want Atuin to fail to start because something funky is going on with
// version checking.
let version = tokio::task::spawn_blocking(|| {
Settings::read_from_data_dir(LATEST_VERSION_FILENAME)
})
.await
.expect("file task panicked");
let version = match version {
Some(v) => Version::parse(&v).unwrap_or(current),
None => current,
};
return Ok(version);
}
#[cfg(feature = "sync")]
let latest = crate::api_client::latest_version().await.unwrap_or(current);
#[cfg(not(feature = "sync"))]
let latest = current;
let latest_encoded = latest.to_string();
tokio::task::spawn_blocking(move || {
Settings::save_version_check_time()?;
Settings::save_to_data_dir(LATEST_VERSION_FILENAME, &latest_encoded)?;
Ok::<(), eyre::Report>(())
})
.await
.expect("file task panicked")?;
Ok(latest)
}
// Return Some(latest version) if an update is needed. Otherwise, none.
#[cfg(feature = "check-update")]
pub async fn needs_update(&self) -> Option<Version> {
if !self.update_check {
return None;
}
let current =
Version::parse(env!("CARGO_PKG_VERSION")).unwrap_or(Version::new(100000, 0, 0));
let latest = self.latest_version().await;
if latest.is_err() {
return None;
}
let latest = latest.unwrap();
if latest > current {
return Some(latest);
}
None
}
pub fn default_filter_mode(&self, git_root: bool) -> FilterMode {
self.filter_mode
.filter(|x| self.search.filters.contains(x))
.or_else(|| {
self.search
.filters
.iter()
.find(|x| match (x, git_root, self.workspaces) {
(FilterMode::Workspace, true, true) => true,
(FilterMode::Workspace, _, _) => false,
(_, _, _) => true,
})
.copied()
})
.unwrap_or(FilterMode::Global)
}
#[cfg(not(feature = "check-update"))]
pub async fn needs_update(&self) -> Option<Version> {
None
}
pub fn builder() -> Result<ConfigBuilder<DefaultState>> {
let data_dir = atuin_common::utils::data_dir();
let db_path = data_dir.join("history.db");
let record_store_path = data_dir.join("records.db");
let kv_path = data_dir.join("kv.db");
let scripts_path = data_dir.join("scripts.db");
let socket_path = atuin_common::utils::runtime_dir().join("atuin.sock");
let key_path = data_dir.join("key");
let session_path = data_dir.join("session");
Ok(Config::builder()
.set_default("history_format", "{time}\t{command}\t{duration}")?
.set_default("db_path", db_path.to_str())?
.set_default("record_store_path", record_store_path.to_str())?
.set_default("key_path", key_path.to_str())?
.set_default("session_path", session_path.to_str())?
.set_default("dialect", "us")?
.set_default("timezone", "local")?
.set_default("auto_sync", true)?
.set_default("update_check", cfg!(feature = "check-update"))?
.set_default("sync_address", "https://api.atuin.sh")?
.set_default("sync_frequency", "5m")?
.set_default("search_mode", "fuzzy")?
.set_default("filter_mode", None::<String>)?
.set_default("style", "compact")?
.set_default("inline_height", 40)?
.set_default("show_preview", true)?
.set_default("preview.strategy", "auto")?
.set_default("max_preview_height", 4)?
.set_default("show_help", true)?
.set_default("show_tabs", true)?
.set_default("show_numeric_shortcuts", true)?
.set_default("auto_hide_height", 8)?
.set_default("invert", false)?
.set_default("exit_mode", "return-original")?
.set_default("word_jump_mode", "emacs")?
.set_default(
"word_chars",
"abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789",
)?
.set_default("scroll_context_lines", 1)?
.set_default("shell_up_key_binding", false)?
.set_default("workspaces", false)?
.set_default("ctrl_n_shortcuts", false)?
.set_default("secrets_filter", true)?
.set_default("network_connect_timeout", 5)?
.set_default("network_timeout", 30)?
.set_default("local_timeout", 2.0)?
// enter_accept defaults to false here, but true in the default config file. The dissonance is
// intentional!
// Existing users will get the default "False", so we don't mess with any potential
// muscle memory.
// New users will get the new default, that is more similar to what they are used to.
.set_default("enter_accept", false)?
.set_default("sync.records", true)?
.set_default("keys.scroll_exits", true)?
.set_default("keys.accept_past_line_end", true)?
.set_default("keys.exit_past_line_start", true)?
.set_default("keys.accept_past_line_start", false)?
.set_default("keys.accept_with_backspace", false)?
.set_default("keys.prefix", "a")?
.set_default("keymap_mode", "emacs")?
.set_default("keymap_mode_shell", "auto")?
.set_default("keymap_cursor", HashMap::<String, String>::new())?
.set_default("smart_sort", false)?
.set_default("command_chaining", false)?
.set_default("store_failed", true)?
.set_default("daemon.sync_frequency", 300)?
.set_default("daemon.enabled", false)?
.set_default("daemon.socket_path", socket_path.to_str())?
.set_default("daemon.systemd_socket", false)?
.set_default("daemon.tcp_port", 8889)?
.set_default("kv.db_path", kv_path.to_str())?
.set_default("scripts.db_path", scripts_path.to_str())?
.set_default(
"search.filters",
vec![
"global",
"host",
"session",
"workspace",
"directory",
"session-preload",
],
)?
.set_default("theme.name", "default")?
.set_default("theme.debug", None::<bool>)?
.set_default(
"prefers_reduced_motion",
std::env::var("NO_MOTION")
.ok()
.map(|_| config::Value::new(None, config::ValueKind::Boolean(true)))
.unwrap_or_else(|| config::Value::new(None, config::ValueKind::Boolean(false))),
)?
.add_source(
Environment::with_prefix("atuin")
.prefix_separator("_")
.separator("__"),
))
}
pub fn new() -> Result<Self> {
let config_dir = atuin_common::utils::config_dir();
let data_dir = atuin_common::utils::data_dir();
create_dir_all(&config_dir)
.wrap_err_with(|| format!("could not create dir {config_dir:?}"))?;
create_dir_all(&data_dir).wrap_err_with(|| format!("could not create dir {data_dir:?}"))?;
let mut config_file = if let Ok(p) = std::env::var("ATUIN_CONFIG_DIR") {
PathBuf::from(p)
} else {
let mut config_file = PathBuf::new();
config_file.push(config_dir);
config_file
};
config_file.push("config.toml");
let mut config_builder = Self::builder()?;
config_builder = if config_file.exists() {
config_builder.add_source(ConfigFile::new(
config_file.to_str().unwrap(),
FileFormat::Toml,
))
} else {
let mut file = File::create(config_file).wrap_err("could not create config file")?;
file.write_all(EXAMPLE_CONFIG.as_bytes())
.wrap_err("could not write default config file")?;
config_builder
};
let config = config_builder.build()?;
let mut settings: Settings = config
.try_deserialize()
.map_err(|e| eyre!("failed to deserialize: {}", e))?;
// all paths should be expanded
settings.db_path = Self::expand_path(settings.db_path)?;
settings.record_store_path = Self::expand_path(settings.record_store_path)?;
settings.key_path = Self::expand_path(settings.key_path)?;
settings.session_path = Self::expand_path(settings.session_path)?;
settings.daemon.socket_path = Self::expand_path(settings.daemon.socket_path)?;
Ok(settings)
}
fn expand_path(path: String) -> Result<String> {
shellexpand::full(&path)
.map(|p| p.to_string())
.map_err(|e| eyre!("failed to expand path: {}", e))
}
pub fn example_config() -> &'static str {
EXAMPLE_CONFIG
}
pub fn paths_ok(&self) -> bool {
let paths = [
&self.db_path,
&self.record_store_path,
&self.key_path,
&self.session_path,
];
paths.iter().all(|p| !utils::broken_symlink(p))
}
}
impl Default for Settings {
fn default() -> Self {
// if this panics something is very wrong, as the default config
// does not build or deserialize into the settings struct
Self::builder()
.expect("Could not build default")
.build()
.expect("Could not build config")
.try_deserialize()
.expect("Could not deserialize config")
}
}
#[cfg(test)]
pub(crate) fn test_local_timeout() -> f64 {
std::env::var("ATUIN_TEST_LOCAL_TIMEOUT")
.ok()
.and_then(|x| x.parse().ok())
// this hardcoded value should be replaced by a simple way to get the
// default local_timeout of Settings if possible
.unwrap_or(2.0)
}
#[cfg(test)]
mod tests {
use std::str::FromStr;
use eyre::Result;
use super::Timezone;
#[test]
fn can_parse_offset_timezone_spec() -> Result<()> {
assert_eq!(Timezone::from_str("+02")?.0.as_hms(), (2, 0, 0));
assert_eq!(Timezone::from_str("-04")?.0.as_hms(), (-4, 0, 0));
assert_eq!(Timezone::from_str("+05:30")?.0.as_hms(), (5, 30, 0));
assert_eq!(Timezone::from_str("-09:30")?.0.as_hms(), (-9, -30, 0));
// single digit hours are allowed
assert_eq!(Timezone::from_str("+2")?.0.as_hms(), (2, 0, 0));
assert_eq!(Timezone::from_str("-4")?.0.as_hms(), (-4, 0, 0));
assert_eq!(Timezone::from_str("+5:30")?.0.as_hms(), (5, 30, 0));
assert_eq!(Timezone::from_str("-9:30")?.0.as_hms(), (-9, -30, 0));
// fully qualified form
assert_eq!(Timezone::from_str("+09:30:00")?.0.as_hms(), (9, 30, 0));
assert_eq!(Timezone::from_str("-09:30:00")?.0.as_hms(), (-9, -30, 0));
// these offsets don't really exist but are supported anyway
assert_eq!(Timezone::from_str("+0:5")?.0.as_hms(), (0, 5, 0));
assert_eq!(Timezone::from_str("-0:5")?.0.as_hms(), (0, -5, 0));
assert_eq!(Timezone::from_str("+01:23:45")?.0.as_hms(), (1, 23, 45));
assert_eq!(Timezone::from_str("-01:23:45")?.0.as_hms(), (-1, -23, -45));
// require a leading sign for clarity
assert!(Timezone::from_str("5").is_err());
assert!(Timezone::from_str("10:30").is_err());
Ok(())
}
#[test]
fn can_choose_workspace_filters_when_in_git_context() -> Result<()> {
let mut settings = super::Settings::default();
settings.search.filters = vec![
super::FilterMode::Workspace,
super::FilterMode::Host,
super::FilterMode::Directory,
super::FilterMode::Session,
super::FilterMode::Global,
];
settings.workspaces = true;
assert_eq!(
settings.default_filter_mode(true),
super::FilterMode::Workspace,
);
Ok(())
}
#[test]
fn wont_choose_workspace_filters_when_not_in_git_context() -> Result<()> {
let mut settings = super::Settings::default();
settings.search.filters = vec![
super::FilterMode::Workspace,
super::FilterMode::Host,
super::FilterMode::Directory,
super::FilterMode::Session,
super::FilterMode::Global,
];
settings.workspaces = true;
assert_eq!(settings.default_filter_mode(false), super::FilterMode::Host,);
Ok(())
}
#[test]
fn wont_choose_workspace_filters_when_workspaces_disabled() -> Result<()> {
let mut settings = super::Settings::default();
settings.search.filters = vec![
super::FilterMode::Workspace,
super::FilterMode::Host,
super::FilterMode::Directory,
super::FilterMode::Session,
super::FilterMode::Global,
];
settings.workspaces = false;
| rust | MIT | 8a010fed33ce19a9ddc589196c73c07ba7ba88e7 | 2026-01-04T15:36:14.139439Z | true |
atuinsh/atuin | https://github.com/atuinsh/atuin/blob/8a010fed33ce19a9ddc589196c73c07ba7ba88e7/crates/atuin-client/src/register.rs | crates/atuin-client/src/register.rs | use eyre::Result;
use tokio::fs::File;
use tokio::io::AsyncWriteExt;
use crate::{api_client, settings::Settings};
pub async fn register(
settings: &Settings,
username: String,
email: String,
password: String,
) -> Result<String> {
let session =
api_client::register(settings.sync_address.as_str(), &username, &email, &password).await?;
let path = settings.session_path.as_str();
let mut file = File::create(path).await?;
file.write_all(session.session.as_bytes()).await?;
let _key = crate::encryption::load_key(settings)?;
Ok(session.session)
}
| rust | MIT | 8a010fed33ce19a9ddc589196c73c07ba7ba88e7 | 2026-01-04T15:36:14.139439Z | false |
atuinsh/atuin | https://github.com/atuinsh/atuin/blob/8a010fed33ce19a9ddc589196c73c07ba7ba88e7/crates/atuin-client/src/lib.rs | crates/atuin-client/src/lib.rs | #![deny(unsafe_code)]
#[macro_use]
extern crate log;
#[cfg(feature = "sync")]
pub mod api_client;
#[cfg(feature = "sync")]
pub mod sync;
pub mod database;
pub mod encryption;
pub mod history;
pub mod import;
pub mod login;
pub mod logout;
pub mod ordering;
pub mod plugin;
pub mod record;
pub mod register;
pub mod secrets;
pub mod settings;
pub mod theme;
mod utils;
| rust | MIT | 8a010fed33ce19a9ddc589196c73c07ba7ba88e7 | 2026-01-04T15:36:14.139439Z | false |
atuinsh/atuin | https://github.com/atuinsh/atuin/blob/8a010fed33ce19a9ddc589196c73c07ba7ba88e7/crates/atuin-client/src/theme.rs | crates/atuin-client/src/theme.rs | use config::{Config, File as ConfigFile, FileFormat};
use lazy_static::lazy_static;
use log;
use palette::named;
use serde::{Deserialize, Serialize};
use serde_json;
use std::collections::HashMap;
use std::error;
use std::io::{Error, ErrorKind};
use std::path::PathBuf;
use strum_macros;
static DEFAULT_MAX_DEPTH: u8 = 10;
// Collection of settable "meanings" that can have colors set.
// NOTE: You can add a new meaning here without breaking backwards compatibility but please:
// - update the atuin/docs repository, which has a list of available meanings
// - add a fallback in the MEANING_FALLBACKS below, so that themes which do not have it
// get a sensible fallback (see Title as an example)
#[derive(
Serialize, Deserialize, Copy, Clone, Hash, Debug, Eq, PartialEq, strum_macros::Display,
)]
#[strum(serialize_all = "camel_case")]
pub enum Meaning {
AlertInfo,
AlertWarn,
AlertError,
Annotation,
Base,
Guidance,
Important,
Title,
Muted,
}
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct ThemeConfig {
// Definition of the theme
pub theme: ThemeDefinitionConfigBlock,
// Colors
pub colors: HashMap<Meaning, String>,
}
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct ThemeDefinitionConfigBlock {
/// Name of theme ("default" for base)
pub name: String,
/// Whether any theme should be treated as a parent _if available_
pub parent: Option<String>,
}
use crossterm::style::{Attribute, Attributes, Color, ContentStyle};
// For now, a theme is loaded as a mapping of meanings to colors, but it may be desirable to
// expand that in the future to general styles, so we populate a Meaning->ContentStyle hashmap.
pub struct Theme {
pub name: String,
pub parent: Option<String>,
pub styles: HashMap<Meaning, ContentStyle>,
}
// Themes have a number of convenience functions for the most commonly used meanings.
// The general purpose `as_style` routine gives back a style, but for ease-of-use and to keep
// theme-related boilerplate minimal, the convenience functions give a color.
impl Theme {
// This is the base "default" color, for general text
pub fn get_base(&self) -> ContentStyle {
self.styles[&Meaning::Base]
}
pub fn get_info(&self) -> ContentStyle {
self.get_alert(log::Level::Info)
}
pub fn get_warning(&self) -> ContentStyle {
self.get_alert(log::Level::Warn)
}
pub fn get_error(&self) -> ContentStyle {
self.get_alert(log::Level::Error)
}
// The alert meanings may be chosen by the Level enum, rather than the methods above
// or the full Meaning enum, to simplify programmatic selection of a log-level.
pub fn get_alert(&self, severity: log::Level) -> ContentStyle {
self.styles[ALERT_TYPES.get(&severity).unwrap()]
}
pub fn new(
name: String,
parent: Option<String>,
styles: HashMap<Meaning, ContentStyle>,
) -> Theme {
Theme {
name,
parent,
styles,
}
}
pub fn closest_meaning<'a>(&self, meaning: &'a Meaning) -> &'a Meaning {
if self.styles.contains_key(meaning) {
meaning
} else if MEANING_FALLBACKS.contains_key(meaning) {
self.closest_meaning(&MEANING_FALLBACKS[meaning])
} else {
&Meaning::Base
}
}
// General access - if you have a meaning, this will give you a (crossterm) style
pub fn as_style(&self, meaning: Meaning) -> ContentStyle {
self.styles[self.closest_meaning(&meaning)]
}
// Turns a map of meanings to colornames into a theme
// If theme-debug is on, then we will print any colornames that we cannot load,
// but we do not have this on in general, as it could print unfiltered text to the terminal
// from a theme TOML file. However, it will always return a theme, falling back to
// defaults on error, so that a TOML file does not break loading
pub fn from_foreground_colors(
name: String,
parent: Option<&Theme>,
foreground_colors: HashMap<Meaning, String>,
debug: bool,
) -> Theme {
let styles: HashMap<Meaning, ContentStyle> = foreground_colors
.iter()
.map(|(name, color)| {
(
*name,
StyleFactory::from_fg_string(color).unwrap_or_else(|err| {
if debug {
log::warn!("Tried to load string as a color unsuccessfully: ({name}={color}) {err}");
}
ContentStyle::default()
}),
)
})
.collect();
Theme::from_map(name, parent, &styles)
}
// Boil down a meaning-color hashmap into a theme, by taking the defaults
// for any unknown colors
fn from_map(
name: String,
parent: Option<&Theme>,
overrides: &HashMap<Meaning, ContentStyle>,
) -> Theme {
let styles = match parent {
Some(theme) => Box::new(theme.styles.clone()),
None => Box::new(DEFAULT_THEME.styles.clone()),
}
.iter()
.map(|(name, color)| match overrides.get(name) {
Some(value) => (*name, *value),
None => (*name, *color),
})
.collect();
Theme::new(name, parent.map(|p| p.name.clone()), styles)
}
}
// Use palette to get a color from a string name, if possible
fn from_string(name: &str) -> Result<Color, String> {
if name.is_empty() {
return Err("Empty string".into());
}
let first_char = name.chars().next().unwrap();
match first_char {
'#' => {
let hexcode = &name[1..];
let vec: Vec<u8> = hexcode
.chars()
.collect::<Vec<char>>()
.chunks(2)
.map(|pair| u8::from_str_radix(pair.iter().collect::<String>().as_str(), 16))
.filter_map(|n| n.ok())
.collect();
if vec.len() != 3 {
return Err("Could not parse 3 hex values from string".into());
}
Ok(Color::Rgb {
r: vec[0],
g: vec[1],
b: vec[2],
})
}
'@' => {
// For full flexibility, we need to use serde_json, given
// crossterm's approach.
serde_json::from_str::<Color>(format!("\"{}\"", &name[1..]).as_str())
.map_err(|_| format!("Could not convert color name {name} to Crossterm color"))
}
_ => {
let srgb = named::from_str(name).ok_or("No such color in palette")?;
Ok(Color::Rgb {
r: srgb.red,
g: srgb.green,
b: srgb.blue,
})
}
}
}
pub struct StyleFactory {}
impl StyleFactory {
fn from_fg_string(name: &str) -> Result<ContentStyle, String> {
match from_string(name) {
Ok(color) => Ok(Self::from_fg_color(color)),
Err(err) => Err(err),
}
}
// For succinctness, if we are confident that the name will be known,
// this routine is available to keep the code readable
fn known_fg_string(name: &str) -> ContentStyle {
Self::from_fg_string(name).unwrap()
}
fn from_fg_color(color: Color) -> ContentStyle {
ContentStyle {
foreground_color: Some(color),
..ContentStyle::default()
}
}
fn from_fg_color_and_attributes(color: Color, attributes: Attributes) -> ContentStyle {
ContentStyle {
foreground_color: Some(color),
attributes,
..ContentStyle::default()
}
}
}
// Built-in themes. Rather than having extra files added before any theming
// is available, this gives a couple of basic options, demonstrating the use
// of themes: autumn and marine
lazy_static! {
static ref ALERT_TYPES: HashMap<log::Level, Meaning> = {
HashMap::from([
(log::Level::Info, Meaning::AlertInfo),
(log::Level::Warn, Meaning::AlertWarn),
(log::Level::Error, Meaning::AlertError),
])
};
static ref MEANING_FALLBACKS: HashMap<Meaning, Meaning> = {
HashMap::from([
(Meaning::Guidance, Meaning::AlertInfo),
(Meaning::Annotation, Meaning::AlertInfo),
(Meaning::Title, Meaning::Important),
])
};
static ref DEFAULT_THEME: Theme = {
Theme::new(
"default".to_string(),
None,
HashMap::from([
(
Meaning::AlertError,
StyleFactory::from_fg_color(Color::DarkRed),
),
(
Meaning::AlertWarn,
StyleFactory::from_fg_color(Color::DarkYellow),
),
(
Meaning::AlertInfo,
StyleFactory::from_fg_color(Color::DarkGreen),
),
(
Meaning::Annotation,
StyleFactory::from_fg_color(Color::DarkGrey),
),
(
Meaning::Guidance,
StyleFactory::from_fg_color(Color::DarkBlue),
),
(
Meaning::Important,
StyleFactory::from_fg_color_and_attributes(
Color::White,
Attributes::from(Attribute::Bold),
),
),
(Meaning::Muted, StyleFactory::from_fg_color(Color::Grey)),
(Meaning::Base, ContentStyle::default()),
]),
)
};
static ref BUILTIN_THEMES: HashMap<&'static str, Theme> = {
HashMap::from([
("default", HashMap::new()),
(
"(none)",
HashMap::from([
(Meaning::AlertError, ContentStyle::default()),
(Meaning::AlertWarn, ContentStyle::default()),
(Meaning::AlertInfo, ContentStyle::default()),
(Meaning::Annotation, ContentStyle::default()),
(Meaning::Guidance, ContentStyle::default()),
(Meaning::Important, ContentStyle::default()),
(Meaning::Muted, ContentStyle::default()),
(Meaning::Base, ContentStyle::default()),
]),
),
(
"autumn",
HashMap::from([
(
Meaning::AlertError,
StyleFactory::known_fg_string("saddlebrown"),
),
(
Meaning::AlertWarn,
StyleFactory::known_fg_string("darkorange"),
),
(Meaning::AlertInfo, StyleFactory::known_fg_string("gold")),
(
Meaning::Annotation,
StyleFactory::from_fg_color(Color::DarkGrey),
),
(Meaning::Guidance, StyleFactory::known_fg_string("brown")),
]),
),
(
"marine",
HashMap::from([
(
Meaning::AlertError,
StyleFactory::known_fg_string("yellowgreen"),
),
(Meaning::AlertWarn, StyleFactory::known_fg_string("cyan")),
(
Meaning::AlertInfo,
StyleFactory::known_fg_string("turquoise"),
),
(
Meaning::Annotation,
StyleFactory::known_fg_string("steelblue"),
),
(
Meaning::Base,
StyleFactory::known_fg_string("lightsteelblue"),
),
(Meaning::Guidance, StyleFactory::known_fg_string("teal")),
]),
),
])
.iter()
.map(|(name, theme)| (*name, Theme::from_map(name.to_string(), None, theme)))
.collect()
};
}
// To avoid themes being repeatedly loaded, we store them in a theme manager
pub struct ThemeManager {
loaded_themes: HashMap<String, Theme>,
debug: bool,
override_theme_dir: Option<String>,
}
// Theme-loading logic
impl ThemeManager {
pub fn new(debug: Option<bool>, theme_dir: Option<String>) -> Self {
Self {
loaded_themes: HashMap::new(),
debug: debug.unwrap_or(false),
override_theme_dir: match theme_dir {
Some(theme_dir) => Some(theme_dir),
None => std::env::var("ATUIN_THEME_DIR").ok(),
},
}
}
// Try to load a theme from a `{name}.toml` file in the theme directory. If an override is set
// for the theme dir (via ATUIN_THEME_DIR env) we should load the theme from there
pub fn load_theme_from_file(
&mut self,
name: &str,
max_depth: u8,
) -> Result<&Theme, Box<dyn error::Error>> {
let mut theme_file = if let Some(p) = &self.override_theme_dir {
if p.is_empty() {
return Err(Box::new(Error::new(
ErrorKind::NotFound,
"Empty theme directory override and could not find theme elsewhere",
)));
}
PathBuf::from(p)
} else {
let config_dir = atuin_common::utils::config_dir();
let mut theme_file = if let Ok(p) = std::env::var("ATUIN_CONFIG_DIR") {
PathBuf::from(p)
} else {
let mut theme_file = PathBuf::new();
theme_file.push(config_dir);
theme_file
};
theme_file.push("themes");
theme_file
};
let theme_toml = format!["{name}.toml"];
theme_file.push(theme_toml);
let mut config_builder = Config::builder();
config_builder = config_builder.add_source(ConfigFile::new(
theme_file.to_str().unwrap(),
FileFormat::Toml,
));
let config = config_builder.build()?;
self.load_theme_from_config(name, config, max_depth)
}
pub fn load_theme_from_config(
&mut self,
name: &str,
config: Config,
max_depth: u8,
) -> Result<&Theme, Box<dyn error::Error>> {
let debug = self.debug;
let theme_config: ThemeConfig = match config.try_deserialize() {
Ok(tc) => tc,
Err(e) => {
return Err(Box::new(Error::new(
ErrorKind::InvalidInput,
format!(
"Failed to deserialize theme: {}",
if debug {
e.to_string()
} else {
"set theme debug on for more info".to_string()
}
),
)));
}
};
let colors: HashMap<Meaning, String> = theme_config.colors;
let parent: Option<&Theme> = match theme_config.theme.parent {
Some(parent_name) => {
if max_depth == 0 {
return Err(Box::new(Error::new(
ErrorKind::InvalidInput,
"Parent requested but we hit the recursion limit",
)));
}
Some(self.load_theme(parent_name.as_str(), Some(max_depth - 1)))
}
None => Some(self.load_theme("default", Some(max_depth - 1))),
};
if debug && name != theme_config.theme.name {
log::warn!(
"Your theme config name is not the name of your loaded theme {} != {}",
name,
theme_config.theme.name
);
}
let theme = Theme::from_foreground_colors(theme_config.theme.name, parent, colors, debug);
let name = name.to_string();
self.loaded_themes.insert(name.clone(), theme);
let theme = self.loaded_themes.get(&name).unwrap();
Ok(theme)
}
// Check if the requested theme is loaded and, if not, then attempt to get it
// from the builtins or, if not there, from file
pub fn load_theme(&mut self, name: &str, max_depth: Option<u8>) -> &Theme {
if self.loaded_themes.contains_key(name) {
return self.loaded_themes.get(name).unwrap();
}
let built_ins = &BUILTIN_THEMES;
match built_ins.get(name) {
Some(theme) => theme,
None => match self.load_theme_from_file(name, max_depth.unwrap_or(DEFAULT_MAX_DEPTH)) {
Ok(theme) => theme,
Err(err) => {
log::warn!("Could not load theme {name}: {err}");
built_ins.get("(none)").unwrap()
}
},
}
}
}
#[cfg(test)]
mod theme_tests {
use super::*;
#[test]
fn test_can_load_builtin_theme() {
let mut manager = ThemeManager::new(Some(false), Some("".to_string()));
let theme = manager.load_theme("autumn", None);
assert_eq!(
theme.as_style(Meaning::Guidance).foreground_color,
from_string("brown").ok()
);
}
#[test]
fn test_can_create_theme() {
let mut manager = ThemeManager::new(Some(false), Some("".to_string()));
let mytheme = Theme::new(
"mytheme".to_string(),
None,
HashMap::from([(
Meaning::AlertError,
StyleFactory::known_fg_string("yellowgreen"),
)]),
);
manager.loaded_themes.insert("mytheme".to_string(), mytheme);
let theme = manager.load_theme("mytheme", None);
assert_eq!(
theme.as_style(Meaning::AlertError).foreground_color,
from_string("yellowgreen").ok()
);
}
#[test]
fn test_can_fallback_when_meaning_missing() {
let mut manager = ThemeManager::new(Some(false), Some("".to_string()));
// We use title as an example of a meaning that is not defined
// even in the base theme.
assert!(!DEFAULT_THEME.styles.contains_key(&Meaning::Title));
let config = Config::builder()
.add_source(ConfigFile::from_str(
"
[theme]
name = \"title_theme\"
[colors]
Guidance = \"white\"
AlertInfo = \"zomp\"
",
FileFormat::Toml,
))
.build()
.unwrap();
let theme = manager
.load_theme_from_config("config_theme", config, 1)
.unwrap();
// Correctly picks overridden color.
assert_eq!(
theme.as_style(Meaning::Guidance).foreground_color,
from_string("white").ok()
);
// Does not fall back to any color.
assert_eq!(theme.as_style(Meaning::AlertInfo).foreground_color, None);
// Even for the base.
assert_eq!(theme.as_style(Meaning::Base).foreground_color, None);
// Falls back to red as meaning missing from theme, so picks base default.
assert_eq!(
theme.as_style(Meaning::AlertError).foreground_color,
Some(Color::DarkRed)
);
// Falls back to Important as Title not available.
assert_eq!(
theme.as_style(Meaning::Title).foreground_color,
theme.as_style(Meaning::Important).foreground_color,
);
let title_config = Config::builder()
.add_source(ConfigFile::from_str(
"
[theme]
name = \"title_theme\"
[colors]
Title = \"white\"
AlertInfo = \"zomp\"
",
FileFormat::Toml,
))
.build()
.unwrap();
let title_theme = manager
.load_theme_from_config("title_theme", title_config, 1)
.unwrap();
assert_eq!(
title_theme.as_style(Meaning::Title).foreground_color,
Some(Color::White)
);
}
#[test]
fn test_no_fallbacks_are_circular() {
let mytheme = Theme::new("mytheme".to_string(), None, HashMap::from([]));
MEANING_FALLBACKS
.iter()
.for_each(|pair| assert_eq!(mytheme.closest_meaning(pair.0), &Meaning::Base))
}
#[test]
fn test_can_get_colors_via_convenience_functions() {
let mut manager = ThemeManager::new(Some(true), Some("".to_string()));
let theme = manager.load_theme("default", None);
assert_eq!(theme.get_error().foreground_color.unwrap(), Color::DarkRed);
assert_eq!(
theme.get_warning().foreground_color.unwrap(),
Color::DarkYellow
);
assert_eq!(theme.get_info().foreground_color.unwrap(), Color::DarkGreen);
assert_eq!(theme.get_base().foreground_color, None);
assert_eq!(
theme.get_alert(log::Level::Error).foreground_color.unwrap(),
Color::DarkRed
)
}
#[test]
fn test_can_use_parent_theme_for_fallbacks() {
testing_logger::setup();
let mut manager = ThemeManager::new(Some(false), Some("".to_string()));
// First, we introduce a base theme
let solarized = Config::builder()
.add_source(ConfigFile::from_str(
"
[theme]
name = \"solarized\"
[colors]
Guidance = \"white\"
AlertInfo = \"pink\"
",
FileFormat::Toml,
))
.build()
.unwrap();
let solarized_theme = manager
.load_theme_from_config("solarized", solarized, 1)
.unwrap();
assert_eq!(
solarized_theme
.as_style(Meaning::AlertInfo)
.foreground_color,
from_string("pink").ok()
);
// Then we introduce a derived theme
let unsolarized = Config::builder()
.add_source(ConfigFile::from_str(
"
[theme]
name = \"unsolarized\"
parent = \"solarized\"
[colors]
AlertInfo = \"red\"
",
FileFormat::Toml,
))
.build()
.unwrap();
let unsolarized_theme = manager
.load_theme_from_config("unsolarized", unsolarized, 1)
.unwrap();
// It will take its own values
assert_eq!(
unsolarized_theme
.as_style(Meaning::AlertInfo)
.foreground_color,
from_string("red").ok()
);
// ...or fall back to the parent
assert_eq!(
unsolarized_theme
.as_style(Meaning::Guidance)
.foreground_color,
from_string("white").ok()
);
testing_logger::validate(|captured_logs| assert_eq!(captured_logs.len(), 0));
// If the parent is not found, we end up with the no theme colors or styling
// as this is considered a (soft) error state.
let nunsolarized = Config::builder()
.add_source(ConfigFile::from_str(
"
[theme]
name = \"nunsolarized\"
parent = \"nonsolarized\"
[colors]
AlertInfo = \"red\"
",
FileFormat::Toml,
))
.build()
.unwrap();
let nunsolarized_theme = manager
.load_theme_from_config("nunsolarized", nunsolarized, 1)
.unwrap();
assert_eq!(
nunsolarized_theme
.as_style(Meaning::Guidance)
.foreground_color,
None
);
testing_logger::validate(|captured_logs| {
assert_eq!(captured_logs.len(), 1);
assert_eq!(
captured_logs[0].body,
"Could not load theme nonsolarized: Empty theme directory override and could not find theme elsewhere"
);
assert_eq!(captured_logs[0].level, log::Level::Warn)
});
}
#[test]
fn test_can_debug_theme() {
testing_logger::setup();
[true, false].iter().for_each(|debug| {
let mut manager = ThemeManager::new(Some(*debug), Some("".to_string()));
let config = Config::builder()
.add_source(ConfigFile::from_str(
"
[theme]
name = \"mytheme\"
[colors]
Guidance = \"white\"
AlertInfo = \"xinetic\"
",
FileFormat::Toml,
))
.build()
.unwrap();
manager
.load_theme_from_config("config_theme", config, 1)
.unwrap();
testing_logger::validate(|captured_logs| {
if *debug {
assert_eq!(captured_logs.len(), 2);
assert_eq!(
captured_logs[0].body,
"Your theme config name is not the name of your loaded theme config_theme != mytheme"
);
assert_eq!(captured_logs[0].level, log::Level::Warn);
assert_eq!(
captured_logs[1].body,
"Tried to load string as a color unsuccessfully: (AlertInfo=xinetic) No such color in palette"
);
assert_eq!(captured_logs[1].level, log::Level::Warn)
} else {
assert_eq!(captured_logs.len(), 0)
}
})
})
}
#[test]
fn test_can_parse_color_strings_correctly() {
assert_eq!(
from_string("brown").unwrap(),
Color::Rgb {
r: 165,
g: 42,
b: 42
}
);
assert_eq!(from_string(""), Err("Empty string".into()));
["manatee", "caput mortuum", "123456"]
.iter()
.for_each(|inp| {
assert_eq!(from_string(inp), Err("No such color in palette".into()));
});
assert_eq!(
from_string("#ff1122").unwrap(),
Color::Rgb {
r: 255,
g: 17,
b: 34
}
);
["#1122", "#ffaa112", "#brown"].iter().for_each(|inp| {
assert_eq!(
from_string(inp),
Err("Could not parse 3 hex values from string".into())
);
});
assert_eq!(from_string("@dark_grey").unwrap(), Color::DarkGrey);
assert_eq!(
from_string("@rgb_(255,255,255)").unwrap(),
Color::Rgb {
r: 255,
g: 255,
b: 255
}
);
assert_eq!(from_string("@ansi_(255)").unwrap(), Color::AnsiValue(255));
["@", "@DarkGray", "@Dark 4ay", "@ansi(256)"]
.iter()
.for_each(|inp| {
assert_eq!(
from_string(inp),
Err(format!(
"Could not convert color name {inp} to Crossterm color"
))
);
});
}
}
| rust | MIT | 8a010fed33ce19a9ddc589196c73c07ba7ba88e7 | 2026-01-04T15:36:14.139439Z | false |
atuinsh/atuin | https://github.com/atuinsh/atuin/blob/8a010fed33ce19a9ddc589196c73c07ba7ba88e7/crates/atuin-client/src/ordering.rs | crates/atuin-client/src/ordering.rs | use minspan::minspan;
use super::{history::History, settings::SearchMode};
pub fn reorder_fuzzy(mode: SearchMode, query: &str, res: Vec<History>) -> Vec<History> {
match mode {
SearchMode::Fuzzy => reorder(query, |x| &x.command, res),
_ => res,
}
}
fn reorder<F, A>(query: &str, f: F, res: Vec<A>) -> Vec<A>
where
F: Fn(&A) -> &String,
A: Clone,
{
let mut r = res.clone();
let qvec = &query.chars().collect();
r.sort_by_cached_key(|h| {
// TODO for fzf search we should sum up scores for each matched term
let (from, to) = match minspan::span(qvec, &(f(h).chars().collect())) {
Some(x) => x,
// this is a little unfortunate: when we are asked to match a query that is found nowhere,
// we don't want to return a None, as the comparison behaviour would put the worst matches
// at the front. therefore, we'll return a set of indices that are one larger than the longest
// possible legitimate match. This is meaningless except as a comparison.
None => (0, res.len()),
};
1 + to - from
});
r
}
| rust | MIT | 8a010fed33ce19a9ddc589196c73c07ba7ba88e7 | 2026-01-04T15:36:14.139439Z | false |
atuinsh/atuin | https://github.com/atuinsh/atuin/blob/8a010fed33ce19a9ddc589196c73c07ba7ba88e7/crates/atuin-client/src/sync.rs | crates/atuin-client/src/sync.rs | use std::collections::HashSet;
use std::iter::FromIterator;
use eyre::Result;
use atuin_common::api::AddHistoryRequest;
use crypto_secretbox::Key;
use time::OffsetDateTime;
use crate::{
api_client,
database::Database,
encryption::{decrypt, encrypt, load_key},
settings::Settings,
};
pub fn hash_str(string: &str) -> String {
use sha2::{Digest, Sha256};
let mut hasher = Sha256::new();
hasher.update(string.as_bytes());
hex::encode(hasher.finalize())
}
// Currently sync is kinda naive, and basically just pages backwards through
// history. This means newly added stuff shows up properly! We also just use
// the total count in each database to indicate whether a sync is needed.
// I think this could be massively improved! If we had a way of easily
// indicating count per time period (hour, day, week, year, etc) then we can
// easily pinpoint where we are missing data and what needs downloading. Start
// with year, then find the week, then the day, then the hour, then download it
// all! The current naive approach will do for now.
// Check if remote has things we don't, and if so, download them.
// Returns (num downloaded, total local)
async fn sync_download(
key: &Key,
force: bool,
client: &api_client::Client<'_>,
db: &impl Database,
) -> Result<(i64, i64)> {
debug!("starting sync download");
let remote_status = client.status().await?;
let remote_count = remote_status.count;
// useful to ensure we don't even save something that hasn't yet been synced + deleted
let remote_deleted =
HashSet::<&str>::from_iter(remote_status.deleted.iter().map(String::as_str));
let initial_local = db.history_count(true).await?;
let mut local_count = initial_local;
let mut last_sync = if force {
OffsetDateTime::UNIX_EPOCH
} else {
Settings::last_sync()?
};
let mut last_timestamp = OffsetDateTime::UNIX_EPOCH;
let host = if force { Some(String::from("")) } else { None };
while remote_count > local_count {
let page = client
.get_history(last_sync, last_timestamp, host.clone())
.await?;
let history: Vec<_> = page
.history
.iter()
// TODO: handle deletion earlier in this chain
.map(|h| serde_json::from_str(h).expect("invalid base64"))
.map(|h| decrypt(h, key).expect("failed to decrypt history! check your key"))
.map(|mut h| {
if remote_deleted.contains(h.id.0.as_str()) {
h.deleted_at = Some(time::OffsetDateTime::now_utc());
h.command = String::from("");
}
h
})
.collect();
db.save_bulk(&history).await?;
local_count = db.history_count(true).await?;
let remote_page_size = std::cmp::max(remote_status.page_size, 0) as usize;
if history.len() < remote_page_size {
break;
}
let page_last = history
.last()
.expect("could not get last element of page")
.timestamp;
// in the case of a small sync frequency, it's possible for history to
// be "lost" between syncs. In this case we need to rewind the sync
// timestamps
if page_last == last_timestamp {
last_timestamp = OffsetDateTime::UNIX_EPOCH;
last_sync -= time::Duration::hours(1);
} else {
last_timestamp = page_last;
}
}
for i in remote_status.deleted {
// we will update the stored history to have this data
// pretty much everything can be nullified
match db.load(i.as_str()).await? {
Some(h) => {
db.delete(h).await?;
}
_ => {
info!(
"could not delete history with id {}, not found locally",
i.as_str()
);
}
}
}
Ok((local_count - initial_local, local_count))
}
// Check if we have things remote doesn't, and if so, upload them
async fn sync_upload(
key: &Key,
_force: bool,
client: &api_client::Client<'_>,
db: &impl Database,
) -> Result<()> {
debug!("starting sync upload");
let remote_status = client.status().await?;
let remote_deleted: HashSet<String> = HashSet::from_iter(remote_status.deleted.clone());
let initial_remote_count = client.count().await?;
let mut remote_count = initial_remote_count;
let local_count = db.history_count(true).await?;
debug!("remote has {remote_count}, we have {local_count}");
// first just try the most recent set
let mut cursor = OffsetDateTime::now_utc();
while local_count > remote_count {
let last = db.before(cursor, remote_status.page_size).await?;
let mut buffer = Vec::new();
if last.is_empty() {
break;
}
for i in last {
let data = encrypt(&i, key)?;
let data = serde_json::to_string(&data)?;
let add_hist = AddHistoryRequest {
id: i.id.to_string(),
timestamp: i.timestamp,
data,
hostname: hash_str(&i.hostname),
};
buffer.push(add_hist);
}
// anything left over outside of the 100 block size
client.post_history(&buffer).await?;
cursor = buffer.last().unwrap().timestamp;
remote_count = client.count().await?;
debug!("upload cursor: {cursor:?}");
}
let deleted = db.deleted().await?;
for i in deleted {
if remote_deleted.contains(&i.id.to_string()) {
continue;
}
info!("deleting {} on remote", i.id);
client.delete_history(i).await?;
}
Ok(())
}
pub async fn sync(settings: &Settings, force: bool, db: &impl Database) -> Result<()> {
let client = api_client::Client::new(
&settings.sync_address,
settings.session_token()?.as_str(),
settings.network_connect_timeout,
settings.network_timeout,
)?;
Settings::save_sync_time()?;
let key = load_key(settings)?; // encryption key
sync_upload(&key, force, &client, db).await?;
let download = sync_download(&key, force, &client, db).await?;
debug!("sync downloaded {}", download.0);
Ok(())
}
| rust | MIT | 8a010fed33ce19a9ddc589196c73c07ba7ba88e7 | 2026-01-04T15:36:14.139439Z | false |
atuinsh/atuin | https://github.com/atuinsh/atuin/blob/8a010fed33ce19a9ddc589196c73c07ba7ba88e7/crates/atuin-client/src/login.rs | crates/atuin-client/src/login.rs | use std::path::PathBuf;
use atuin_common::api::LoginRequest;
use eyre::{Context, Result, bail};
use tokio::fs::File;
use tokio::io::AsyncWriteExt;
use crate::{
api_client,
encryption::{Key, decode_key, encode_key, load_key},
record::{sqlite_store::SqliteStore, store::Store},
settings::Settings,
};
pub async fn login(
settings: &Settings,
store: &SqliteStore,
username: String,
password: String,
key: String,
) -> Result<String> {
// try parse the key as a mnemonic...
let key = match bip39::Mnemonic::from_phrase(&key, bip39::Language::English) {
Ok(mnemonic) => encode_key(Key::from_slice(mnemonic.entropy()))?,
Err(err) => {
match err.downcast_ref::<bip39::ErrorKind>() {
Some(err) => {
match err {
// assume they copied in the base64 key
bip39::ErrorKind::InvalidWord => key,
bip39::ErrorKind::InvalidChecksum => {
bail!("key mnemonic was not valid")
}
bip39::ErrorKind::InvalidKeysize(_)
| bip39::ErrorKind::InvalidWordLength(_)
| bip39::ErrorKind::InvalidEntropyLength(_, _) => {
bail!("key was not the correct length")
}
}
}
_ => {
// unknown error. assume they copied the base64 key
key
}
}
}
};
let key_path = settings.key_path.as_str();
let key_path = PathBuf::from(key_path);
if !key_path.exists() {
if decode_key(key.clone()).is_err() {
bail!("the specified key was invalid");
}
let mut file = File::create(key_path).await?;
file.write_all(key.as_bytes()).await?;
} else {
// we now know that the user has logged in specifying a key, AND that the key path
// exists
// 1. check if the saved key and the provided key match. if so, nothing to do.
// 2. if not, re-encrypt the local history and overwrite the key
let current_key: [u8; 32] = load_key(settings)?.into();
let encoded = key.clone(); // gonna want to save it in a bit
let new_key: [u8; 32] = decode_key(key)
.context("could not decode provided key - is not valid base64")?
.into();
if new_key != current_key {
println!("\nRe-encrypting local store with new key");
store.re_encrypt(¤t_key, &new_key).await?;
println!("Writing new key");
let mut file = File::create(key_path).await?;
file.write_all(encoded.as_bytes()).await?;
}
}
let session = api_client::login(
settings.sync_address.as_str(),
LoginRequest { username, password },
)
.await?;
let session_path = settings.session_path.as_str();
let mut file = File::create(session_path).await?;
file.write_all(session.session.as_bytes()).await?;
Ok(session.session)
}
| rust | MIT | 8a010fed33ce19a9ddc589196c73c07ba7ba88e7 | 2026-01-04T15:36:14.139439Z | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.