repo stringlengths 6 65 | file_url stringlengths 81 311 | file_path stringlengths 6 227 | content stringlengths 0 32.8k | language stringclasses 1
value | license stringclasses 7
values | commit_sha stringlengths 40 40 | retrieved_at stringdate 2026-01-04 15:31:58 2026-01-04 20:25:31 | truncated bool 2
classes |
|---|---|---|---|---|---|---|---|---|
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/tests/update/update_flags.rs | tests/update/update_flags.rs | #![cfg(test)]
#![cfg(not(target_os = "windows"))]
//! Tests for the `-u` and `--update` commandline flags.
//!
//! These tests verify that the update flags are parsed correctly and simulate
//! a long-running update scenario where sudo may timeout and require password re-entry.
use std::fs;
use std::fs::File;
use std::io::Write;
use std::os::unix::fs::PermissionsExt;
use std::path::{Path, PathBuf};
use std::process::{Command, Output, Stdio};
use std::time::{Duration, Instant};
/// Mock pacman script content that simulates a long-running update.
const MOCK_PACMAN_SCRIPT: &str = r#"#!/bin/bash
# Simulate a long-running pacman update
echo ":: Synchronizing package databases..."
sleep 2
echo ":: Starting full system upgrade..."
sleep 2
echo "resolving dependencies..."
sleep 1
echo "looking for conflicting packages..."
sleep 1
echo ""
echo "Packages (5) to upgrade:"
echo " core/systemd 250.4-1 -> 251.1-1"
echo " core/linux 6.8.0-1 -> 6.9.0-1"
echo " extra/firefox 120.0-1 -> 121.0-1"
echo " extra/vim 9.0.2000-1 -> 9.1.0000-1"
echo " aur/custom-pkg 1.0.0-1 -> 2.0.0-1"
echo ""
sleep 2
echo ":: Proceeding with installation..."
sleep 2
echo "(5/5) checking package integrity..."
sleep 1
echo "(5/5) loading package files..."
sleep 1
echo "(5/5) checking for file conflicts..."
sleep 1
echo "(5/5) checking available disk space..."
sleep 1
echo "(5/5) upgrading systemd..."
sleep 2
echo "(5/5) upgrading linux..."
sleep 2
echo "(5/5) upgrading firefox..."
sleep 2
echo "(5/5) upgrading vim..."
sleep 2
echo "(5/5) upgrading custom-pkg..."
sleep 2
echo ""
echo "Total download size: 500.00 MiB"
echo "Total installed size: 1200.00 MiB"
echo "Net upgrade size: 700.00 MiB"
echo ""
echo ":: Running post-transaction hooks..."
sleep 1
echo "(1/3) Updating systemd service files..."
sleep 1
echo "(2/3) Reloading system manager configuration..."
sleep 1
echo "(3/3) Updating font cache..."
sleep 1
echo ""
echo "System upgrade completed successfully."
exit 0
"#;
/// What: Create a temporary directory with a unique name for test artifacts.
///
/// Inputs:
/// - `prefix`: Prefix for the directory name.
///
/// Output:
/// - Returns the path to the created temporary directory.
///
/// Details:
/// - Creates a directory in the system temp directory with a unique name
/// based on the process ID and current timestamp.
fn create_test_temp_dir(prefix: &str) -> PathBuf {
let temp_dir = std::env::temp_dir().join(format!(
"pacsea_test_{}_{}_{}",
prefix,
std::process::id(),
std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
.expect("System time is before UNIX epoch")
.as_nanos()
));
fs::create_dir_all(&temp_dir).expect("Failed to create temp directory");
temp_dir
}
/// What: Make a script file executable and ensure it's synced to disk.
///
/// Inputs:
/// - `script_path`: Path to the script file.
///
/// Output:
/// - Sets the executable permission on the file and syncs it to disk.
///
/// Details:
/// - Sets the file mode to 0o755 to make it executable.
/// - Syncs the file to disk to prevent "Text file busy" errors when executing immediately.
fn make_script_executable(script_path: &Path) {
let mut perms = fs::metadata(script_path)
.expect("Failed to read script metadata")
.permissions();
perms.set_mode(0o755);
fs::set_permissions(script_path, perms).expect("Failed to set script permissions");
// Sync the file to disk to prevent "Text file busy" errors
// This ensures the file system has fully written the file before execution
let file = File::open(script_path).expect("Failed to open script file for syncing");
file.sync_all().expect("Failed to sync script file to disk");
}
/// What: Create a mock sudo script that tracks password requests.
///
/// Inputs:
/// - `script_path`: Path where the script should be created.
/// - `password_log_path`: Path to the password log file.
///
/// Output:
/// - Creates an executable mock sudo script at the specified path.
///
/// Details:
/// - The script simulates sudo behavior that requires password via stdin
/// and logs password requests to verify it's only asked once.
fn create_mock_sudo_script(script_path: &Path, password_log_path: &Path) {
let password_log_str = password_log_path.to_string_lossy();
let sudo_script = format!(
r#"#!/bin/bash
# Mock sudo that simulates password requirement and timeout
# This simulates the scenario where sudo requires password at the beginning
# and should NOT require it again even if the update takes a long time
PASSWORD_LOG="{password_log_str}"
# Check if password was provided via stdin (sudo -S)
if [ -t 0 ]; then
# No stdin, passwordless sudo attempt
echo "Passwordless sudo not available" >&2
exit 1
fi
# Read password from stdin
read -r PASSWORD < /dev/stdin
# Log password request (without the actual password)
echo "$(date +%s): Password provided" >> "$PASSWORD_LOG"
# Simulate sudo timestamp - in real scenario, this would be set by sudo
# For testing, we simulate that the password is valid for the entire update
# The actual implementation should handle this by using sudo -v to refresh timestamp
# or by providing password once and using it for all commands
# Execute the actual command (simulate pacman update)
exec "$@"
"#
);
// Write the script and ensure it's synced to disk
let mut file = File::create(script_path).expect("Failed to create mock sudo script");
file.write_all(sudo_script.as_bytes())
.expect("Failed to write mock sudo script");
file.sync_all()
.expect("Failed to sync sudo script file to disk");
drop(file); // Ensure file handle is closed
make_script_executable(script_path);
}
/// What: Create a mock pacman script that simulates a long-running update.
///
/// Inputs:
/// - `script_path`: Path where the script should be created.
///
/// Output:
/// - Creates an executable mock pacman script at the specified path.
///
/// Details:
/// - Uses the shared `MOCK_PACMAN_SCRIPT` content to create the script.
fn create_mock_pacman_script(script_path: &Path) {
// Write the script and ensure it's synced to disk
let mut file = File::create(script_path).expect("Failed to create mock pacman script");
file.write_all(MOCK_PACMAN_SCRIPT.as_bytes())
.expect("Failed to write mock pacman script");
file.sync_all()
.expect("Failed to sync pacman script file to disk");
drop(file); // Ensure file handle is closed
make_script_executable(script_path);
}
/// What: Run the mock update command and verify the results.
///
/// Inputs:
/// - `mock_sudo_path`: Path to the mock sudo script.
/// - `mock_pacman_path`: Path to the mock pacman script.
/// - `password_log_path`: Path to the password log file.
///
/// Output:
/// - Returns the command output and duration.
///
/// Details:
/// - Executes the mock update command and measures execution time.
/// - Verifies that the update completed successfully and took at least 20 seconds.
/// - Verifies that password was only requested once.
/// - Verifies that output contains expected update messages.
fn run_and_verify_sudo_timeout_test(
mock_sudo_path: &Path,
mock_pacman_path: &Path,
password_log_path: &Path,
) -> (Output, Duration) {
let start = Instant::now();
let output = Command::new("bash")
.arg("-c")
.arg(format!(
"echo 'testpassword' | {} {} -Syu --noconfirm",
mock_sudo_path.display(),
mock_pacman_path.display()
))
.stdout(Stdio::piped())
.stderr(Stdio::piped())
.output()
.expect("Failed to execute mock update");
let duration = start.elapsed();
// Verify the update completed successfully
assert!(
output.status.success(),
"Mock update should complete successfully"
);
// Verify it took a reasonable amount of time (simulating long-running update)
assert!(
duration >= Duration::from_secs(20),
"Mock update should take at least 20 seconds. Actual duration: {duration:?}"
);
// Verify password was only requested ONCE (not multiple times)
assert!(
password_log_path.exists(),
"Password log file should exist. This indicates the mock sudo script failed to create the log file, \
which means the password tracking mechanism is not working correctly."
);
let password_requests = fs::read_to_string(password_log_path).unwrap_or_else(|_| String::new());
let request_count = password_requests.lines().count();
assert!(
request_count == 1,
"Password should be requested only once, but was requested {request_count} times. \
This indicates the implementation may not properly handle sudo timeout during long updates."
);
// Verify output contains expected update messages
let stdout = String::from_utf8_lossy(&output.stdout);
assert!(
stdout.contains("Synchronizing package databases"),
"Output should contain synchronization message"
);
assert!(
stdout.contains("System upgrade completed successfully"),
"Output should contain success message"
);
(output, duration)
}
/// What: Verify that the `-u` short flag triggers the update handler.
///
/// Inputs:
/// - Command line: `pacsea -u`
///
/// Output:
/// - The binary should attempt to run the update process.
///
/// Details:
/// - Tests the short form of the update flag by running the binary.
/// - Since `handle_update()` exits, we verify it's triggered by checking
/// that it attempts to run sudo pacman (or exits early if passwordless sudo fails).
#[test]
#[ignore = "Long-running simulation test, only run manually"]
fn test_update_short_flag_triggers_update() {
// Get the path to the pacsea binary
let binary_path = std::env::current_exe()
.expect("Failed to get current exe")
.parent()
.expect("Failed to get parent dir")
.parent()
.expect("Failed to get parent parent dir")
.join("pacsea");
// Skip if binary doesn't exist (e.g., during `cargo check`)
if !binary_path.exists() {
eprintln!("Skipping test: binary not found at {binary_path:?}");
return;
}
// Run pacsea with -u flag
// The update handler will exit, so we expect a non-zero exit code
// (either from update failure or from password prompt cancellation)
let output = Command::new(&binary_path)
.arg("-u")
.stdin(Stdio::null()) // No stdin to avoid password prompt hanging
.stdout(Stdio::piped())
.stderr(Stdio::piped())
.output()
.expect("Failed to execute pacsea");
// The update handler should have been triggered
// It will either:
// 1. Exit with code 1 if passwordless sudo fails and password prompt is cancelled
// 2. Exit with code 0/1 depending on update success/failure
// 3. Exit early if sudo/pacman is not available
let stderr = String::from_utf8_lossy(&output.stderr);
let stdout = String::from_utf8_lossy(&output.stdout);
let exit_code = output.status.code();
// Verify that update was attempted by checking for:
// - Update-related messages in output (e.g., "Updating system packages", "pacman", "Syu")
// - Valid exit code (0 for success, 1 for failure)
// - Or specific error messages indicating the update handler was triggered
let has_update_message = stderr.contains("update")
|| stdout.contains("update")
|| stderr.contains("pacman")
|| stdout.contains("pacman")
|| stderr.contains("Syu")
|| stdout.contains("Syu")
|| stderr.contains("Updating")
|| stdout.contains("Updating")
|| stderr.contains("password")
|| stdout.contains("password");
let has_valid_exit_code = exit_code.is_some_and(|code| code == 0 || code == 1);
assert!(
has_update_message || has_valid_exit_code,
"Update handler should have been triggered. \
Expected update-related messages or exit code 0/1. \
Exit code: {exit_code:?}, stdout: {stdout}, stderr: {stderr}"
);
}
/// What: Verify that the `--update` long flag triggers the update handler.
///
/// Inputs:
/// - Command line: `pacsea --update`
///
/// Output:
/// - The binary should attempt to run the update process.
///
/// Details:
/// - Tests the long form of the update flag by running the binary.
#[test]
#[ignore = "Long-running simulation test, only run manually"]
fn test_update_long_flag_triggers_update() {
// Get the path to the pacsea binary
let binary_path = std::env::current_exe()
.expect("Failed to get current exe")
.parent()
.expect("Failed to get parent dir")
.parent()
.expect("Failed to get parent parent dir")
.join("pacsea");
// Skip if binary doesn't exist
if !binary_path.exists() {
eprintln!("Skipping test: binary not found at {binary_path:?}");
return;
}
// Run pacsea with --update flag
let output = Command::new(&binary_path)
.arg("--update")
.stdin(Stdio::null())
.stdout(Stdio::piped())
.stderr(Stdio::piped())
.output()
.expect("Failed to execute pacsea");
let stderr = String::from_utf8_lossy(&output.stderr);
let stdout = String::from_utf8_lossy(&output.stdout);
let exit_code = output.status.code();
// Verify that update was attempted by checking for:
// - Update-related messages in output (e.g., "Updating system packages", "pacman", "Syu")
// - Valid exit code (0 for success, 1 for failure)
// - Or specific error messages indicating the update handler was triggered
let has_update_message = stderr.contains("update")
|| stdout.contains("update")
|| stderr.contains("pacman")
|| stdout.contains("pacman")
|| stderr.contains("Syu")
|| stdout.contains("Syu")
|| stderr.contains("Updating")
|| stdout.contains("Updating")
|| stderr.contains("password")
|| stdout.contains("password");
let has_valid_exit_code = exit_code.is_some_and(|code| code == 0 || code == 1);
assert!(
has_update_message || has_valid_exit_code,
"Update handler should have been triggered. \
Expected update-related messages or exit code 0/1. \
Exit code: {exit_code:?}, stdout: {stdout}, stderr: {stderr}"
);
}
/// What: Simulate sudo password timeout scenario during long-running update.
///
/// Inputs:
/// - A mock sudo that requires password and has a short timeout.
/// - A long-running update process that exceeds sudo's timestamp timeout.
///
/// Output:
/// - Verifies that password is provided once at the beginning and NOT required again.
///
/// Details:
/// - This test simulates the scenario where:
/// 1. User provides sudo password at the beginning of the update
/// 2. Update runs for a long time (exceeding sudo's `timestamp_timeout`)
/// 3. Password should NOT be required again (implementation should handle this)
/// - The test creates a mock sudo wrapper that simulates password requirement and timeout.
/// - Verifies that the update handler properly handles sudo password to avoid re-prompting.
/// - This is a manual integration test for long-running scenarios.
/// - The test is ignored by default because it requires:
/// - Actual system setup with sudo
/// - Long-running simulation (may take several minutes)
/// - Manual execution outside of CI
/// - For unit-level testing, see `tests/install/password_prompt.rs::integration_password_prompt_timeout_error`
/// - Run manually with: `cargo test -- --ignored test_sudo_password_timeout_during_long_update`
#[test]
#[ignore = "Long-running simulation test, only run manually"]
fn test_sudo_password_timeout_during_long_update() {
// Skip this test if we're in CI or don't have bash/sudo available
if std::env::var("CI").is_ok()
|| Command::new("which").arg("bash").output().is_err()
|| Command::new("which").arg("sudo").output().is_err()
{
return;
}
let temp_dir = create_test_temp_dir("sudo_timeout");
let password_log = temp_dir.join("password_requests.log");
let mock_sudo_path = temp_dir.join("mock_sudo");
let mock_pacman_path = temp_dir.join("mock_pacman");
create_mock_sudo_script(&mock_sudo_path, &password_log);
create_mock_pacman_script(&mock_pacman_path);
// Test the scenario: password provided once, used for long-running update
// This simulates: echo 'password' | sudo -S pacman -Syu --noconfirm
let (_output, _duration) =
run_and_verify_sudo_timeout_test(&mock_sudo_path, &mock_pacman_path, &password_log);
// Clean up
let _ = fs::remove_file(&mock_sudo_path);
let _ = fs::remove_file(&mock_pacman_path);
let _ = fs::remove_file(&password_log);
let _ = fs::remove_dir_all(&temp_dir);
}
/// What: Simulate a long-running update that would require password re-entry.
///
/// Inputs:
/// - A mock update process that takes longer than sudo's default timeout (typically 15 minutes).
///
/// Output:
/// - Verifies that the update process can handle long-running operations.
///
/// Details:
/// - This test simulates the scenario where an update takes a long time,
/// potentially causing sudo's timestamp to expire and requiring password re-entry.
/// - The test creates a mock script that simulates a long-running pacman update.
/// - In a real scenario, if sudo times out during a long update, the user would need to
/// provide the password again. This test verifies the update process structure can handle
/// such scenarios by simulating the long-running nature of updates.
#[test]
#[ignore = "Long-running simulation test, only run manually"]
fn test_long_running_update_simulation() {
// Skip this test if we're in CI or don't have bash available
if std::env::var("CI").is_ok() || Command::new("which").arg("bash").output().is_err() {
return;
}
let temp_dir = create_test_temp_dir("update");
let mock_script_path = temp_dir.join("mock_pacman_update.sh");
// Write the script and ensure it's synced to disk
let mut file = File::create(&mock_script_path).expect("Failed to create mock script");
file.write_all(MOCK_PACMAN_SCRIPT.as_bytes())
.expect("Failed to write mock script");
file.sync_all().expect("Failed to sync script file to disk");
drop(file); // Ensure file handle is closed
make_script_executable(&mock_script_path);
// Test that the script runs and takes a reasonable amount of time
// (simulating a long-running update)
let start = Instant::now();
let output = Command::new(&mock_script_path)
.stdout(Stdio::piped())
.stderr(Stdio::piped())
.output()
.expect("Failed to execute mock script");
let duration = start.elapsed();
// Verify the script completed successfully
assert!(
output.status.success(),
"Mock update script should complete successfully"
);
// Verify it took a reasonable amount of time (at least 20 seconds for the simulated update)
// In a real long-running update, this could be 15+ minutes, which would cause
// sudo's timestamp to expire, requiring password re-entry
assert!(
duration >= Duration::from_secs(20),
"Mock update should take at least 20 seconds to simulate long-running operation. Actual duration: {duration:?}"
);
// Verify output contains expected update messages
let stdout = String::from_utf8_lossy(&output.stdout);
assert!(
stdout.contains("Synchronizing package databases"),
"Output should contain synchronization message"
);
assert!(
stdout.contains("System upgrade completed successfully"),
"Output should contain success message"
);
// Clean up
let _ = fs::remove_file(&mock_script_path);
let _ = fs::remove_dir_all(&temp_dir);
}
/// What: Test that individual command exit codes are correctly detected in combined command execution.
///
/// Inputs:
/// - A scenario where pacman fails but AUR helper succeeds.
///
/// Output:
/// - Verifies that pacman failure is correctly detected even when AUR helper succeeds.
///
/// Details:
/// - This test verifies the fix for the bug where `status.success()` was used to check
/// both pacman and AUR helper success, incorrectly marking pacman as successful when
/// pacman failed but AUR helper succeeded.
/// - The combined command should capture individual exit codes and detect failures correctly.
#[test]
#[ignore = "Requires actual pacsea binary and system setup"]
fn test_individual_exit_codes_in_combined_command() {
use std::fs;
use std::os::unix::fs::PermissionsExt;
// Skip this test if we're in CI or don't have bash available
if std::env::var("CI").is_ok() || Command::new("which").arg("bash").output().is_err() {
return;
}
// Create a temporary directory for test artifacts
let temp_dir = std::env::temp_dir().join(format!(
"pacsea_test_exit_codes_{}_{}",
std::process::id(),
std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
.expect("System time is before UNIX epoch")
.as_nanos()
));
fs::create_dir_all(&temp_dir).expect("Failed to create temp directory");
// Create a mock pacman that fails
let mock_pacman_path = temp_dir.join("mock_pacman");
let pacman_script = r#"#!/bin/bash
# Mock pacman that fails
echo "error: failed to prepare transaction"
echo "error: target not found: some-package"
exit 1
"#;
fs::write(&mock_pacman_path, pacman_script).expect("Failed to write mock pacman script");
let mut perms = fs::metadata(&mock_pacman_path)
.expect("Failed to read pacman script metadata")
.permissions();
perms.set_mode(0o755);
fs::set_permissions(&mock_pacman_path, perms).expect("Failed to set pacman script permissions");
// Create a mock AUR helper that succeeds
let mock_aur_path = temp_dir.join("mock_aur");
let aur_script = r#"#!/bin/bash
# Mock AUR helper that succeeds
echo ":: Synchronizing package databases..."
echo "there is nothing to do"
exit 0
"#;
fs::write(&mock_aur_path, aur_script).expect("Failed to write mock AUR script");
let mut perms = fs::metadata(&mock_aur_path)
.expect("Failed to read AUR script metadata")
.permissions();
perms.set_mode(0o755);
fs::set_permissions(&mock_aur_path, perms).expect("Failed to set AUR script permissions");
// Test combined command with exit code capture
// This simulates: command1; PACMAN_EXIT=$?; command2; AUR_EXIT=$?
// Note: AUR helper uses -Sua (AUR only) after pacman -Syu has already run
let combined_cmd = format!(
"{} -Syu --noconfirm; PACMAN_EXIT=$?; echo 'PACMAN_EXIT='$PACMAN_EXIT; {} -Sua --noconfirm; AUR_EXIT=$?; echo 'AUR_EXIT='$AUR_EXIT; exit $((PACMAN_EXIT | AUR_EXIT))",
mock_pacman_path.display(),
mock_aur_path.display()
);
let output = Command::new("bash")
.arg("-c")
.arg(&combined_cmd)
.stdout(Stdio::piped())
.stderr(Stdio::piped())
.output()
.expect("Failed to execute combined command");
let stdout = String::from_utf8_lossy(&output.stdout);
let stderr = String::from_utf8_lossy(&output.stderr);
// Verify that individual exit codes are captured
assert!(
stdout.contains("PACMAN_EXIT=1") || stderr.contains("PACMAN_EXIT=1"),
"Should capture pacman exit code as 1. stdout: {stdout}, stderr: {stderr}"
);
assert!(
stdout.contains("AUR_EXIT=0") || stderr.contains("AUR_EXIT=0"),
"Should capture AUR exit code as 0. stdout: {stdout}, stderr: {stderr}"
);
// The combined exit code should be non-zero (since pacman failed)
// Using bitwise OR: 1 | 0 = 1
assert!(
!output.status.success(),
"Combined command should fail when pacman fails, even if AUR succeeds"
);
// Clean up
let _ = fs::remove_file(&mock_pacman_path);
let _ = fs::remove_file(&mock_aur_path);
let _ = fs::remove_dir_all(&temp_dir);
}
/// What: Test that empty passwords are rejected during password prompt.
///
/// Inputs:
/// - An empty password string.
///
/// Output:
/// - Verifies that empty passwords are rejected with an appropriate error.
///
/// Details:
/// - This test verifies the fix for the bug where empty passwords from
/// `rpassword::prompt_password` were accepted and passed to sudo, which would fail.
/// - Empty passwords should be rejected before being used in sudo commands.
#[test]
fn test_empty_password_rejected() {
// Test that empty password validation works
// The validation should reject empty strings
let empty_password = String::new();
let non_empty_password = "test123".to_string();
// Empty password should be rejected
assert!(
empty_password.is_empty(),
"Empty password should be detected as empty"
);
// Non-empty password should be accepted
assert!(
!non_empty_password.is_empty(),
"Non-empty password should not be detected as empty"
);
// Verify that trimming whitespace-only passwords would also be empty
let whitespace_password = " ".to_string();
assert!(
whitespace_password.trim().is_empty(),
"Whitespace-only password should be considered empty after trimming"
);
}
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/tests/update/system_update_ui.rs | tests/update/system_update_ui.rs | //! UI tests for system update modal.
//!
//! Tests cover:
//! - `SystemUpdate` modal structure
//! - Update options state
//!
//! Note: These tests verify modal state structure rather than actual rendering.
#![cfg(test)]
use pacsea::state::{AppState, Modal};
#[test]
/// What: Test `SystemUpdate` modal structure.
///
/// Inputs:
/// - `SystemUpdate` modal with various options.
///
/// Output:
/// - Modal state is correctly structured.
///
/// Details:
/// - Verifies `SystemUpdate` modal can be created and accessed.
fn ui_system_update_modal_structure() {
let app = AppState {
modal: Modal::SystemUpdate {
do_mirrors: true,
do_pacman: true,
force_sync: false,
do_aur: false,
do_cache: true,
country_idx: 1,
countries: vec!["Worldwide".to_string(), "United States".to_string()],
mirror_count: 15,
cursor: 2,
},
..Default::default()
};
match app.modal {
Modal::SystemUpdate {
do_mirrors,
do_pacman,
force_sync,
do_aur,
do_cache,
country_idx,
countries,
mirror_count,
cursor,
} => {
assert!(do_mirrors);
assert!(do_pacman);
assert!(!force_sync);
assert!(!do_aur);
assert!(do_cache);
assert_eq!(country_idx, 1);
assert_eq!(countries.len(), 2);
assert_eq!(mirror_count, 15);
assert_eq!(cursor, 2);
}
_ => panic!("Expected SystemUpdate modal"),
}
}
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/tests/update/system_update_integration.rs | tests/update/system_update_integration.rs | //! Integration tests for the system update process.
//!
//! Tests cover:
//! - System update modal state
//! - Update command building
//! - Different update options (mirrors, pacman, AUR, cache)
//! - `ExecutorRequest::Update` creation
//! - Password prompt for sudo commands
//! - Full update sequence
#![cfg(test)]
use pacsea::install::{ExecutorOutput, ExecutorRequest};
use pacsea::state::modal::{PasswordPurpose, PreflightHeaderChips};
use pacsea::state::{AppState, Modal, PreflightAction, PreflightTab};
#[test]
/// What: Test system update modal state creation.
///
/// Inputs:
/// - `SystemUpdate` modal with various options.
///
/// Output:
/// - Modal state is correctly structured.
///
/// Details:
/// - Verifies system update modal can be created and accessed.
fn integration_system_update_modal_state() {
let app = AppState {
modal: Modal::SystemUpdate {
do_mirrors: true,
do_pacman: true,
force_sync: false,
do_aur: false,
do_cache: false,
country_idx: 0,
countries: ["Worldwide".to_string(), "United States".to_string()].to_vec(),
mirror_count: 10,
cursor: 0,
},
..Default::default()
};
match app.modal {
Modal::SystemUpdate {
do_mirrors,
do_pacman,
force_sync,
do_aur,
do_cache,
country_idx,
countries,
mirror_count,
cursor,
} => {
assert!(do_mirrors);
assert!(do_pacman);
assert!(!force_sync);
assert!(!do_aur);
assert!(!do_cache);
assert_eq!(country_idx, 0);
assert_eq!(countries.len(), 2);
assert_eq!(mirror_count, 10);
assert_eq!(cursor, 0);
}
_ => panic!("Expected SystemUpdate modal"),
}
}
#[test]
/// What: Test system update command building.
///
/// Inputs:
/// - Different update options combinations.
///
/// Output:
/// - Commands are built correctly.
///
/// Details:
/// - Verifies command building for different update scenarios.
/// - Note: Actual execution spawns terminal, so this tests command structure only.
fn integration_system_update_command_building() {
// Test pacman update command
let pacman_cmd = "sudo pacman -Syu --noconfirm";
assert!(pacman_cmd.contains("pacman"));
assert!(pacman_cmd.contains("-Syu"));
assert!(pacman_cmd.contains("--noconfirm"));
// Test AUR update command structure
let aur_cmd = "if command -v paru >/dev/null 2>&1; then \
paru -Syu --noconfirm; \
elif command -v yay >/dev/null 2>&1; then \
yay -Syu --noconfirm; \
else \
echo 'No AUR helper (paru/yay) found.'; \
fi";
assert!(aur_cmd.contains("paru") || aur_cmd.contains("yay"));
assert!(aur_cmd.contains("-Syu"));
// Test cache cleanup command
let cache_cmd = "sudo pacman -Sc --noconfirm";
assert!(cache_cmd.contains("pacman"));
assert!(cache_cmd.contains("-Sc"));
}
#[test]
/// What: Test system update with all options enabled.
///
/// Inputs:
/// - `SystemUpdate` modal with all options enabled.
///
/// Output:
/// - All flags are correctly set.
///
/// Details:
/// - Verifies that all update options can be enabled simultaneously.
fn integration_system_update_all_options() {
let app = AppState {
modal: Modal::SystemUpdate {
do_mirrors: true,
do_pacman: true,
force_sync: false,
do_aur: true,
do_cache: true,
country_idx: 0,
countries: ["Worldwide".to_string()].to_vec(),
mirror_count: 20,
cursor: 0,
},
..Default::default()
};
match app.modal {
Modal::SystemUpdate {
do_mirrors,
do_pacman,
do_aur,
do_cache,
..
} => {
assert!(do_mirrors);
assert!(do_pacman);
assert!(do_aur);
assert!(do_cache);
}
_ => panic!("Expected SystemUpdate modal"),
}
}
#[test]
/// What: Test system update with no options selected.
///
/// Inputs:
/// - `SystemUpdate` modal with all options disabled.
///
/// Output:
/// - `Alert` modal is shown.
///
/// Details:
/// - Verifies that no action is taken when no options are selected.
fn integration_system_update_no_options() {
let app = AppState {
modal: Modal::SystemUpdate {
do_mirrors: false,
do_pacman: false,
force_sync: false,
do_aur: false,
do_cache: false,
country_idx: 0,
countries: ["Worldwide".to_string()].to_vec(),
mirror_count: 10,
cursor: 0,
},
..Default::default()
};
match app.modal {
Modal::SystemUpdate {
do_mirrors,
do_pacman,
do_aur,
do_cache,
..
} => {
assert!(!do_mirrors);
assert!(!do_pacman);
assert!(!do_aur);
assert!(!do_cache);
}
_ => panic!("Expected SystemUpdate modal"),
}
}
#[test]
/// What: Test that system update uses `ExecutorRequest` instead of spawning terminals.
///
/// Inputs:
/// - System update action triggered through `handle_system_update_enter`.
///
/// Output:
/// - `pending_update_commands` should be set and `PasswordPrompt` modal shown.
/// - After password entry, `pending_executor_request` should be set with `ExecutorRequest::Update`.
///
/// Details:
/// - Verifies that system update uses executor pattern instead of spawning terminals.
/// - `handle_system_update_enter` should set `app.pending_update_commands` and show `PasswordPrompt`.
/// - After password entry, the password handler creates `ExecutorRequest::Update`.
/// - Note: Since `handle_system_update_enter` is private, we test the pattern by directly creating
/// the expected state that the function should produce.
fn integration_system_update_uses_executor_not_terminal() {
use pacsea::install::ExecutorRequest;
let mut app = AppState {
dry_run: false,
..Default::default()
};
// Step 1: Simulate what handle_system_update_enter does:
// 1. Store commands in pending_update_commands
// 2. Transition to PasswordPrompt modal
let cmds = vec!["sudo pacman -Syu --noconfirm".to_string()];
app.pending_update_commands = Some(cmds.clone());
app.modal = pacsea::state::Modal::PasswordPrompt {
purpose: pacsea::state::modal::PasswordPurpose::Update,
items: Vec::new(),
input: String::new(),
cursor: 0,
error: None,
};
// Verify pending_update_commands is set
assert!(
app.pending_update_commands.is_some(),
"System update must set pending_update_commands before password prompt"
);
// Verify modal is PasswordPrompt
match &app.modal {
pacsea::state::Modal::PasswordPrompt { purpose, .. } => {
assert!(
matches!(purpose, pacsea::state::modal::PasswordPurpose::Update),
"Password purpose should be Update"
);
}
_ => panic!("Expected PasswordPrompt modal"),
}
// Step 2: Simulate what password handler does after password entry:
// 1. Take pending_update_commands
// 2. Create ExecutorRequest::Update
// 3. Transition to PreflightExec
let password = Some("test_password".to_string());
let update_cmds = app
.pending_update_commands
.take()
.expect("pending_update_commands should be set");
app.pending_executor_request = Some(ExecutorRequest::Update {
commands: update_cmds,
password: password.clone(),
dry_run: app.dry_run,
});
app.modal = pacsea::state::Modal::PreflightExec {
items: Vec::new(),
action: pacsea::state::PreflightAction::Install,
tab: pacsea::state::PreflightTab::Summary,
verbose: false,
log_lines: Vec::new(),
abortable: false,
header_chips: pacsea::state::modal::PreflightHeaderChips::default(),
success: None,
};
// Verify that pending_executor_request is set with ExecutorRequest::Update
assert!(
app.pending_executor_request.is_some(),
"System update must use ExecutorRequest instead of spawning terminals"
);
// Verify it's an Update request with password
match &app.pending_executor_request {
Some(ExecutorRequest::Update {
commands,
password: req_password,
dry_run,
}) => {
assert_eq!(
*commands, cmds,
"Update request should have the correct commands"
);
assert_eq!(
*req_password, password,
"Password should be set from password prompt"
);
assert!(!dry_run, "Dry run should be false");
}
_ => panic!("Expected ExecutorRequest::Update"),
}
// Verify modal transitioned to PreflightExec
match app.modal {
pacsea::state::Modal::PreflightExec { .. } => {}
_ => panic!("Expected modal to transition to PreflightExec"),
}
}
#[test]
/// What: Test `ExecutorRequest::Update` with mirror update command.
///
/// Inputs:
/// - Mirror update command with reflector.
///
/// Output:
/// - `ExecutorRequest::Update` with correct mirror command.
///
/// Details:
/// - Verifies mirror update command structure using reflector.
fn integration_system_update_mirror_command() {
let country = "Germany";
let mirror_count = 10;
let mirror_cmd = format!(
"sudo reflector --country {country} --latest {mirror_count} --sort rate --save /etc/pacman.d/mirrorlist"
);
let request = ExecutorRequest::Update {
commands: vec![mirror_cmd],
password: None,
dry_run: false,
};
match request {
ExecutorRequest::Update { commands, .. } => {
assert_eq!(commands.len(), 1);
assert!(commands[0].contains("reflector"));
assert!(commands[0].contains("Germany"));
assert!(commands[0].contains("10"));
}
_ => panic!("Expected ExecutorRequest::Update"),
}
}
#[test]
/// What: Test `ExecutorRequest::Update` with pacman update command.
///
/// Inputs:
/// - Pacman system update command.
///
/// Output:
/// - `ExecutorRequest::Update` with correct pacman command.
///
/// Details:
/// - Verifies pacman -Syu command structure.
fn integration_system_update_pacman_command() {
let pacman_cmd = "sudo pacman -Syu --noconfirm".to_string();
let request = ExecutorRequest::Update {
commands: vec![pacman_cmd],
password: Some("testpassword".to_string()),
dry_run: false,
};
match request {
ExecutorRequest::Update {
commands,
password,
dry_run,
} => {
assert_eq!(commands.len(), 1);
assert!(commands[0].contains("pacman"));
assert!(commands[0].contains("-Syu"));
assert!(commands[0].contains("--noconfirm"));
assert_eq!(password, Some("testpassword".to_string()));
assert!(!dry_run);
}
_ => panic!("Expected ExecutorRequest::Update"),
}
}
#[test]
/// What: Test `ExecutorRequest::Update` with AUR update command.
///
/// Inputs:
/// - AUR helper detection and update command.
///
/// Output:
/// - `ExecutorRequest::Update` with correct AUR command.
///
/// Details:
/// - Verifies AUR update command structure with paru/yay fallback.
fn integration_system_update_aur_command() {
let aur_cmd = "if command -v paru >/dev/null 2>&1; then \
paru -Syu --noconfirm; \
elif command -v yay >/dev/null 2>&1; then \
yay -Syu --noconfirm; \
else \
echo 'No AUR helper (paru/yay) found.'; \
fi"
.to_string();
let request = ExecutorRequest::Update {
commands: vec![aur_cmd],
password: None, // AUR helpers typically don't need sudo upfront
dry_run: false,
};
match request {
ExecutorRequest::Update { commands, .. } => {
assert_eq!(commands.len(), 1);
assert!(commands[0].contains("paru"));
assert!(commands[0].contains("yay"));
assert!(commands[0].contains("-Syu"));
}
_ => panic!("Expected ExecutorRequest::Update"),
}
}
#[test]
/// What: Test `ExecutorRequest::Update` with cache cleanup command.
///
/// Inputs:
/// - Cache cleanup command for pacman.
///
/// Output:
/// - `ExecutorRequest::Update` with correct cache cleanup command.
///
/// Details:
/// - Verifies pacman -Sc command structure.
fn integration_system_update_cache_command() {
let cache_cmd = "sudo pacman -Sc --noconfirm".to_string();
let request = ExecutorRequest::Update {
commands: vec![cache_cmd],
password: Some("testpassword".to_string()),
dry_run: false,
};
match request {
ExecutorRequest::Update {
commands, password, ..
} => {
assert_eq!(commands.len(), 1);
assert!(commands[0].contains("pacman"));
assert!(commands[0].contains("-Sc"));
assert!(commands[0].contains("--noconfirm"));
assert_eq!(password, Some("testpassword".to_string()));
}
_ => panic!("Expected ExecutorRequest::Update"),
}
}
#[test]
/// What: Test full system update sequence with all commands.
///
/// Inputs:
/// - All update options enabled (mirrors, pacman, AUR, cache).
///
/// Output:
/// - `ExecutorRequest::Update` with all commands in sequence.
///
/// Details:
/// - Verifies that full update sequence includes all commands.
fn integration_system_update_full_sequence() {
let commands = vec![
"sudo reflector --country Worldwide --latest 20 --sort rate --save /etc/pacman.d/mirrorlist".to_string(),
"sudo pacman -Syu --noconfirm".to_string(),
"if command -v paru >/dev/null 2>&1; then paru -Syu --noconfirm; elif command -v yay >/dev/null 2>&1; then yay -Syu --noconfirm; fi".to_string(),
"sudo pacman -Sc --noconfirm".to_string(),
];
let request = ExecutorRequest::Update {
commands,
password: Some("testpassword".to_string()),
dry_run: false,
};
match request {
ExecutorRequest::Update { commands, .. } => {
assert_eq!(commands.len(), 4);
assert!(commands[0].contains("reflector"));
assert!(commands[1].contains("pacman") && commands[1].contains("-Syu"));
assert!(commands[2].contains("paru") || commands[2].contains("yay"));
assert!(commands[3].contains("pacman") && commands[3].contains("-Sc"));
}
_ => panic!("Expected ExecutorRequest::Update"),
}
}
#[test]
/// What: Test system update triggers password prompt for sudo commands.
///
/// Inputs:
/// - System update modal confirmed.
///
/// Output:
/// - Password prompt modal is shown for sudo commands.
///
/// Details:
/// - Verifies that update operations requiring sudo show password prompt.
fn integration_system_update_password_prompt() {
let mut app = AppState {
modal: Modal::SystemUpdate {
do_mirrors: false,
do_pacman: true,
force_sync: false,
do_aur: false,
do_cache: false,
country_idx: 0,
countries: vec!["Worldwide".to_string()],
mirror_count: 10,
cursor: 0,
},
pending_exec_header_chips: Some(PreflightHeaderChips::default()),
..Default::default()
};
// Simulate update confirmation - transition to PasswordPrompt
app.modal = Modal::PasswordPrompt {
purpose: PasswordPurpose::Update,
items: vec![],
input: String::new(),
cursor: 0,
error: None,
};
match app.modal {
Modal::PasswordPrompt { purpose, items, .. } => {
assert_eq!(purpose, PasswordPurpose::Update);
assert!(
items.is_empty(),
"Update password prompt should have empty items"
);
}
_ => panic!("Expected PasswordPrompt modal"),
}
}
#[test]
/// What: Test system update transitions to `PreflightExec` after password.
///
/// Inputs:
/// - Password submitted for system update.
///
/// Output:
/// - Modal transitions to `PreflightExec` with empty items.
///
/// Details:
/// - Verifies update flow after password submission.
fn integration_system_update_to_preflight_exec() {
let mut app = AppState {
modal: Modal::PasswordPrompt {
purpose: PasswordPurpose::Update,
items: vec![],
input: "testpassword".to_string(),
cursor: 12,
error: None,
},
pending_exec_header_chips: Some(PreflightHeaderChips::default()),
..Default::default()
};
// Extract password
let password = if let Modal::PasswordPrompt { ref input, .. } = app.modal {
if input.trim().is_empty() {
None
} else {
Some(input.clone())
}
} else {
None
};
// Simulate transition to PreflightExec
let header_chips = app.pending_exec_header_chips.take().unwrap_or_default();
app.modal = Modal::PreflightExec {
items: vec![],
action: PreflightAction::Install,
tab: PreflightTab::Summary,
verbose: false,
log_lines: vec![],
abortable: false,
header_chips,
success: None,
};
// Set executor request
app.pending_executor_request = Some(ExecutorRequest::Update {
commands: vec!["sudo pacman -Syu --noconfirm".to_string()],
password,
dry_run: false,
});
// Verify modal
match app.modal {
Modal::PreflightExec { items, .. } => {
assert!(
items.is_empty(),
"Update PreflightExec should have empty items"
);
}
_ => panic!("Expected PreflightExec modal"),
}
// Verify executor request
match app.pending_executor_request {
Some(ExecutorRequest::Update { password, .. }) => {
assert_eq!(password, Some("testpassword".to_string()));
}
_ => panic!("Expected ExecutorRequest::Update"),
}
}
#[test]
/// What: Test system update dry-run mode.
///
/// Inputs:
/// - System update with `dry_run` enabled.
///
/// Output:
/// - `ExecutorRequest::Update` with `dry_run=true`.
///
/// Details:
/// - Verifies dry-run mode is respected for updates.
fn integration_system_update_dry_run() {
let request = ExecutorRequest::Update {
commands: vec!["sudo pacman -Syu --noconfirm".to_string()],
password: None,
dry_run: true,
};
match request {
ExecutorRequest::Update { dry_run, .. } => {
assert!(dry_run);
}
_ => panic!("Expected ExecutorRequest::Update"),
}
}
#[test]
/// What: Test system update cursor navigation.
///
/// Inputs:
/// - `SystemUpdate` modal with cursor at different positions.
///
/// Output:
/// - Cursor position is correctly tracked.
///
/// Details:
/// - Verifies cursor navigation within the update modal.
fn integration_system_update_cursor_navigation() {
let app = AppState {
modal: Modal::SystemUpdate {
do_mirrors: true,
do_pacman: true,
force_sync: false,
do_aur: true,
do_cache: true,
country_idx: 0,
countries: vec!["Worldwide".to_string()],
mirror_count: 10,
cursor: 3, // On cache option (index 3)
},
..Default::default()
};
match app.modal {
Modal::SystemUpdate { cursor, .. } => {
assert_eq!(cursor, 3);
}
_ => panic!("Expected SystemUpdate modal"),
}
}
#[test]
/// What: Test system update country selection.
///
/// Inputs:
/// - `SystemUpdate` modal with different country selection.
///
/// Output:
/// - Country index is correctly tracked.
///
/// Details:
/// - Verifies country selection for reflector mirror update.
fn integration_system_update_country_selection() {
let countries = vec![
"Worldwide".to_string(),
"United States".to_string(),
"Germany".to_string(),
"France".to_string(),
];
let app = AppState {
modal: Modal::SystemUpdate {
do_mirrors: true,
do_pacman: false,
force_sync: false,
do_aur: false,
do_cache: false,
country_idx: 2, // Germany
countries,
mirror_count: 15,
cursor: 0,
},
..Default::default()
};
match app.modal {
Modal::SystemUpdate {
country_idx,
countries: modal_countries,
mirror_count,
..
} => {
assert_eq!(country_idx, 2);
assert_eq!(modal_countries[country_idx], "Germany");
assert_eq!(mirror_count, 15);
}
_ => panic!("Expected SystemUpdate modal"),
}
}
#[test]
/// What: Test `ExecutorRequest::Update` with simulated network failure.
///
/// Inputs:
/// - Update request that will fail due to network error.
///
/// Output:
/// - Request structure is correct, error handling can occur.
///
/// Details:
/// - Verifies update request can be created even when network will fail.
/// - Tests error handling in update sequence when network fails mid-operation.
fn integration_system_update_network_failure() {
let commands = vec![
"sudo pacman -Syu --noconfirm".to_string(),
"if command -v paru >/dev/null 2>&1; then paru -Syu --noconfirm; fi".to_string(),
];
let request = ExecutorRequest::Update {
commands,
password: Some("testpassword".to_string()),
dry_run: false,
};
match request {
ExecutorRequest::Update { commands, .. } => {
assert_eq!(commands.len(), 2);
// Request structure is valid even if network will fail
}
_ => panic!("Expected ExecutorRequest::Update"),
}
}
#[test]
/// What: Test network error display in `PreflightExec` modal during system update.
///
/// Inputs:
/// - `PreflightExec` modal with network failure during update.
///
/// Output:
/// - Error message is displayed in `log_lines`.
/// - Modal state reflects error condition.
///
/// Details:
/// - Verifies network errors during update are displayed to user.
fn integration_system_update_network_error_display() {
let mut app = AppState {
modal: Modal::PreflightExec {
items: vec![],
action: PreflightAction::Install, // Update uses Install action
tab: PreflightTab::Summary,
verbose: false,
log_lines: vec![":: Synchronizing package databases...".to_string()],
abortable: false,
header_chips: PreflightHeaderChips::default(),
success: None,
},
..Default::default()
};
// Simulate network error during update
if let Modal::PreflightExec {
ref mut log_lines, ..
} = app.modal
{
log_lines.push("error: failed to retrieve 'core.db' from mirror".to_string());
log_lines.push("error: Failed to connect to host (network unreachable)".to_string());
}
match app.modal {
Modal::PreflightExec { log_lines, .. } => {
assert_eq!(log_lines.len(), 3);
assert!(log_lines[1].contains("failed to retrieve"));
assert!(log_lines[2].contains("network unreachable"));
}
_ => panic!("Expected PreflightExec modal"),
}
}
#[test]
/// What: Test `ExecutorOutput::Error` handling for network failure during update sequence.
///
/// Inputs:
/// - `ExecutorOutput::Error` with network failure message during update.
///
/// Output:
/// - Error is properly handled and displayed.
///
/// Details:
/// - Verifies update sequence handles network failures correctly.
fn integration_system_update_network_error_handling() {
let error_output =
ExecutorOutput::Error("Failed to connect to host (network unreachable)".to_string());
// Simulate error being received during update
let mut app = AppState {
modal: Modal::PreflightExec {
items: vec![],
action: PreflightAction::Install,
tab: PreflightTab::Summary,
verbose: false,
log_lines: vec![":: Starting full system upgrade...".to_string()],
abortable: false,
header_chips: PreflightHeaderChips::default(),
success: None,
},
..Default::default()
};
// Simulate error being added to log_lines
if let ExecutorOutput::Error(msg) = &error_output
&& let Modal::PreflightExec {
ref mut log_lines, ..
} = app.modal
{
log_lines.push(format!("ERROR: {msg}"));
}
match app.modal {
Modal::PreflightExec { log_lines, .. } => {
assert_eq!(log_lines.len(), 2);
assert!(log_lines[1].contains("ERROR:"));
assert!(log_lines[1].contains("network unreachable"));
}
_ => panic!("Expected PreflightExec modal"),
}
}
#[test]
/// What: Verify system update completion does NOT clear `install_list`.
///
/// Inputs:
/// - System update completes (`PreflightExec` with empty items)
/// - User has packages in `install_list` before update
///
/// Output:
/// - `install_list` is preserved, NOT cleared
/// - `pending_install_names` should NOT be set for system updates
///
/// Details:
/// - System updates use empty items vector since they don't involve specific packages
/// - The Install action completion handler should skip `pending_install_names` tracking
/// when items is empty to avoid clearing `install_list` due to vacuously true check
/// - Regression test for bug: empty `pending_install_names` causes `install_list` to be cleared
fn integration_system_update_preserves_install_list() {
use pacsea::state::{PackageItem, Source};
// Create app with packages in install_list (queued for installation)
let mut app = AppState {
install_list: vec![
PackageItem {
name: "neovim".to_string(),
version: "0.10.0-1".to_string(),
description: "Vim-fork focused on extensibility and usability".to_string(),
source: Source::Official {
repo: "extra".to_string(),
arch: "x86_64".to_string(),
},
popularity: None,
out_of_date: None,
orphaned: false,
},
PackageItem {
name: "ripgrep".to_string(),
version: "14.0.0-1".to_string(),
description: "A search tool that combines ag with grep".to_string(),
source: Source::Official {
repo: "extra".to_string(),
arch: "x86_64".to_string(),
},
popularity: None,
out_of_date: None,
orphaned: false,
},
],
// Simulate system update in progress (PreflightExec with empty items)
modal: Modal::PreflightExec {
items: Vec::new(), // System update has NO items
action: PreflightAction::Install, // System update uses Install action
tab: PreflightTab::Summary,
verbose: false,
log_lines: vec![":: Starting full system upgrade...".to_string()],
abortable: false,
header_chips: PreflightHeaderChips::default(),
success: None,
},
..Default::default()
};
// Verify install_list has packages before update
assert_eq!(
app.install_list.len(),
2,
"install_list should have 2 packages queued"
);
assert_eq!(app.install_list[0].name, "neovim");
assert_eq!(app.install_list[1].name, "ripgrep");
// Simulate system update completion - this is what handle_executor_output does
// For system updates, items is empty so installed_names would be empty
if let Modal::PreflightExec { items, action, .. } = &app.modal
&& matches!(action, PreflightAction::Install)
{
// BUG CONDITION: If we set pending_install_names to empty vec,
// the tick handler will clear install_list (vacuously true check)
// FIX: Only set pending_install_names if items is NOT empty
if !items.is_empty() {
let installed_names: Vec<String> = items.iter().map(|p| p.name.clone()).collect();
app.pending_install_names = Some(installed_names);
}
// If items is empty (system update), do NOT set pending_install_names
}
// Verify pending_install_names is NOT set for system updates
assert!(
app.pending_install_names.is_none(),
"System updates (empty items) should NOT set pending_install_names"
);
// Verify install_list is preserved
assert_eq!(
app.install_list.len(),
2,
"install_list should still have 2 packages after system update"
);
assert_eq!(app.install_list[0].name, "neovim");
assert_eq!(app.install_list[1].name, "ripgrep");
}
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/tests/update/update_single_integration.rs | tests/update/update_single_integration.rs | //! Integration tests for single package update process.
//!
//! Tests cover:
//! - Updates modal handling
//! - Single package update flow
//! - Preflight modal for updates
//!
//! Note: These tests verify the update flow structure.
#![cfg(test)]
use pacsea::state::{AppState, Modal, PackageItem, Source};
/// What: Create a test package item with specified source.
///
/// Inputs:
/// - `name`: Package name
/// - `source`: Package source (Official or AUR)
///
/// Output:
/// - `PackageItem` ready for testing
///
/// Details:
/// - Helper to create test packages with consistent structure
fn create_test_package(name: &str, source: Source) -> PackageItem {
PackageItem {
name: name.into(),
version: "1.0.0".into(),
description: String::new(),
source,
popularity: None,
out_of_date: None,
orphaned: false,
}
}
#[test]
/// What: Test Updates modal state creation.
///
/// Inputs:
/// - `Updates` modal with update entries.
///
/// Output:
/// - Modal state is correctly structured.
///
/// Details:
/// - Verifies `Updates` modal can be created and accessed.
fn integration_updates_modal_state() {
let entries = vec![
("pkg1".to_string(), "1.0.0".to_string(), "1.1.0".to_string()),
("pkg2".to_string(), "2.0.0".to_string(), "2.1.0".to_string()),
];
let app = AppState {
modal: Modal::Updates {
entries,
scroll: 0,
selected: 0,
},
..Default::default()
};
match app.modal {
Modal::Updates {
entries: ref modal_entries,
scroll,
selected,
} => {
assert_eq!(modal_entries.len(), 2);
assert_eq!(scroll, 0);
assert_eq!(selected, 0);
assert_eq!(modal_entries[0].0, "pkg1");
assert_eq!(modal_entries[0].1, "1.0.0");
assert_eq!(modal_entries[0].2, "1.1.0");
}
_ => panic!("Expected Updates modal"),
}
}
#[test]
/// What: Test single package update flow structure.
///
/// Inputs:
/// - Package item with updated version.
///
/// Output:
/// - Update flow can be initiated.
///
/// Details:
/// - Verifies that single package updates use the preflight modal flow.
fn integration_single_package_update_flow() {
let _app = AppState::default();
let pkg = create_test_package(
"test-pkg",
Source::Official {
repo: "extra".into(),
arch: "x86_64".into(),
},
);
// Single package update should open preflight modal (similar to install)
// This is handled by open_preflight_modal function
// We can test that the package structure supports updates
assert_eq!(pkg.name, "test-pkg");
assert_eq!(pkg.version, "1.0.0");
assert!(matches!(pkg.source, Source::Official { .. }));
}
#[test]
/// What: Test Updates modal navigation.
///
/// Inputs:
/// - `Updates` modal with multiple entries, navigation keys.
///
/// Output:
/// - Selection moves correctly.
///
/// Details:
/// - Verifies navigation in `Updates` modal.
fn integration_updates_modal_navigation() {
let entries = vec![
("pkg1".to_string(), "1.0.0".to_string(), "1.1.0".to_string()),
("pkg2".to_string(), "2.0.0".to_string(), "2.1.0".to_string()),
("pkg3".to_string(), "3.0.0".to_string(), "3.1.0".to_string()),
];
let app = AppState {
modal: Modal::Updates {
entries,
scroll: 0,
selected: 0,
},
..Default::default()
};
// Test selection state
match app.modal {
Modal::Updates { selected, .. } => {
assert_eq!(selected, 0);
}
_ => panic!("Expected Updates modal"),
}
}
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/tests/update/mod.rs | tests/update/mod.rs | //! Integration and UI tests for update operations.
mod system_update_integration;
mod system_update_ui;
mod update_flags;
mod update_single_integration;
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/tests/remove/remove_integration.rs | tests/remove/remove_integration.rs | //! Integration tests for the remove process.
//!
//! Tests cover:
//! - Full remove flow from user action to execution
//! - Cascade mode handling
//! - Preflight modal for remove
//! - Executor request handling
//!
//! Note: These tests are expected to fail initially as remove currently spawns terminals.
#![cfg(test)]
use pacsea::install::{ExecutorOutput, ExecutorRequest};
use pacsea::state::modal::CascadeMode;
use pacsea::state::{AppState, Modal, PackageItem, PreflightAction, PreflightTab, Source};
/// What: Create a test package item with specified source.
///
/// Inputs:
/// - `name`: Package name
/// - `source`: Package source (Official or AUR)
///
/// Output:
/// - `PackageItem` ready for testing
///
/// Details:
/// - Helper to create test packages with consistent structure
fn create_test_package(name: &str, source: Source) -> PackageItem {
PackageItem {
name: name.into(),
version: "1.0.0".into(),
description: String::new(),
source,
popularity: None,
out_of_date: None,
orphaned: false,
}
}
#[test]
/// What: Test preflight modal state creation for remove action.
///
/// Inputs:
/// - Remove list with packages.
///
/// Output:
/// - `Preflight` modal can be created with correct items and action.
///
/// Details:
/// - Verifies preflight modal state structure for remove.
fn integration_remove_preflight_modal_state() {
let items = vec![
create_test_package(
"old-pkg1",
Source::Official {
repo: "extra".into(),
arch: "x86_64".into(),
},
),
create_test_package(
"old-pkg2",
Source::Official {
repo: "extra".into(),
arch: "x86_64".into(),
},
),
];
// Test that we can create a preflight modal state for remove
let app = AppState {
modal: Modal::Preflight {
items,
action: PreflightAction::Remove,
tab: PreflightTab::Summary,
summary: None,
summary_scroll: 0,
header_chips: pacsea::state::modal::PreflightHeaderChips {
package_count: 2,
download_bytes: 0,
install_delta_bytes: -2000,
aur_count: 0,
risk_score: 0,
risk_level: pacsea::state::modal::RiskLevel::Low,
},
dependency_info: Vec::new(),
dep_selected: 0,
dep_tree_expanded: std::collections::HashSet::new(),
deps_error: None,
file_info: Vec::new(),
file_selected: 0,
file_tree_expanded: std::collections::HashSet::new(),
files_error: None,
service_info: Vec::new(),
service_selected: 0,
services_loaded: false,
services_error: None,
sandbox_info: Vec::new(),
sandbox_selected: 0,
sandbox_tree_expanded: std::collections::HashSet::new(),
sandbox_loaded: false,
sandbox_error: None,
selected_optdepends: std::collections::HashMap::new(),
cascade_mode: CascadeMode::Basic,
cached_reverse_deps_report: None,
},
..Default::default()
};
match app.modal {
Modal::Preflight {
items: ref modal_items,
action,
tab,
..
} => {
assert_eq!(modal_items.len(), 2);
assert_eq!(action, PreflightAction::Remove);
assert_eq!(tab, PreflightTab::Summary);
}
_ => panic!("Expected Preflight modal, got: {:?}", app.modal),
}
}
#[test]
/// What: Test executor request creation for remove.
///
/// Inputs:
/// - Package names, cascade mode, password, `dry_run` flag.
///
/// Output:
/// - `ExecutorRequest::Remove` with correct parameters.
///
/// Details:
/// - Verifies executor request is created correctly from remove parameters.
fn integration_remove_executor_request_creation() {
let names = vec!["old-pkg1".to_string(), "old-pkg2".to_string()];
let request = ExecutorRequest::Remove {
names,
password: Some("testpass".to_string()),
cascade: CascadeMode::Cascade,
dry_run: false,
};
match request {
ExecutorRequest::Remove {
names: req_names,
password,
cascade,
dry_run,
} => {
assert_eq!(req_names.len(), 2);
assert_eq!(password, Some("testpass".to_string()));
assert_eq!(cascade, CascadeMode::Cascade);
assert!(!dry_run);
}
ExecutorRequest::Install { .. }
| ExecutorRequest::Downgrade { .. }
| ExecutorRequest::Update { .. }
| ExecutorRequest::CustomCommand { .. }
| ExecutorRequest::Scan { .. } => {
panic!("Expected Remove request")
}
}
}
#[test]
/// What: Test executor request with different cascade modes.
///
/// Inputs:
/// - Package names with different cascade modes.
///
/// Output:
/// - `ExecutorRequest::Remove` with correct cascade mode.
///
/// Details:
/// - Verifies all cascade modes are handled correctly.
fn integration_remove_cascade_modes() {
let names = ["test-pkg".to_string()];
for cascade_mode in [
CascadeMode::Basic,
CascadeMode::Cascade,
CascadeMode::CascadeWithConfigs,
] {
let request = ExecutorRequest::Remove {
names: names.to_vec(),
password: None,
cascade: cascade_mode,
dry_run: true,
};
match request {
ExecutorRequest::Remove { cascade, .. } => {
assert_eq!(cascade, cascade_mode);
}
ExecutorRequest::Install { .. }
| ExecutorRequest::Downgrade { .. }
| ExecutorRequest::Update { .. }
| ExecutorRequest::CustomCommand { .. }
| ExecutorRequest::Scan { .. } => {
panic!("Expected Remove request")
}
}
}
}
#[test]
/// What: Test executor output handling for remove.
///
/// Inputs:
/// - Various `ExecutorOutput` messages.
///
/// Output:
/// - Output messages are correctly structured.
///
/// Details:
/// - Verifies `ExecutorOutput` enum variants work correctly for remove operations.
fn integration_remove_executor_output_handling() {
// Test Line output
let output1 = ExecutorOutput::Line("Removing packages...".to_string());
match output1 {
ExecutorOutput::Line(line) => assert!(line.contains("Removing")),
_ => panic!("Expected Line variant"),
}
// Test Finished output with success
let output2 = ExecutorOutput::Finished {
success: true,
exit_code: Some(0),
failed_command: None,
};
match output2 {
ExecutorOutput::Finished {
success,
exit_code,
failed_command: _,
} => {
assert!(success);
assert_eq!(exit_code, Some(0));
}
_ => panic!("Expected Finished variant"),
}
// Test Error output
let output3 = ExecutorOutput::Error("Remove failed".to_string());
match output3 {
ExecutorOutput::Error(msg) => assert_eq!(msg, "Remove failed"),
_ => panic!("Expected Error variant"),
}
}
#[test]
/// What: Test that remove process uses `ExecutorRequest` through preflight modal.
///
/// Inputs:
/// - Remove action triggered through preflight `start_execution` (simulated).
///
/// Output:
/// - `pending_executor_request` should be set with `ExecutorRequest::Remove`.
///
/// Details:
/// - Remove already uses executor pattern through preflight modal.
/// - This test simulates what `start_execution` does to verify the executor pattern works.
/// - Note: Direct remove (without preflight) still uses `spawn_remove_all` and spawns terminals.
fn integration_remove_uses_executor_not_terminal() {
let items = vec![create_test_package(
"old-pkg1",
Source::Official {
repo: "extra".into(),
arch: "x86_64".into(),
},
)];
let mut app = AppState {
remove_cascade_mode: CascadeMode::Basic,
dry_run: false,
..Default::default()
};
// Simulate what start_execution does for Remove action
// This is the NEW process - remove through preflight uses ExecutorRequest
let header_chips = pacsea::state::modal::PreflightHeaderChips {
package_count: 1,
download_bytes: 0,
install_delta_bytes: -1000,
aur_count: 0,
risk_score: 0,
risk_level: pacsea::state::modal::RiskLevel::Low,
};
// Store executor request (what start_execution does)
let names: Vec<String> = items.iter().map(|p| p.name.clone()).collect();
// Transition to PreflightExec modal (what start_execution does)
app.modal = Modal::PreflightExec {
items,
action: PreflightAction::Remove,
tab: PreflightTab::Summary,
verbose: false,
log_lines: Vec::new(),
abortable: false,
header_chips,
success: None,
};
app.pending_executor_request = Some(ExecutorRequest::Remove {
names,
password: None,
cascade: app.remove_cascade_mode,
dry_run: app.dry_run,
});
// Verify pending_executor_request is set correctly
assert!(
app.pending_executor_request.is_some(),
"Remove process through preflight must use ExecutorRequest"
);
// Verify it's a Remove request with correct parameters
match app.pending_executor_request {
Some(ExecutorRequest::Remove {
names,
cascade,
dry_run,
..
}) => {
assert_eq!(names.len(), 1);
assert_eq!(names[0], "old-pkg1");
assert_eq!(cascade, CascadeMode::Basic);
assert!(!dry_run);
}
_ => {
panic!("Remove process must use ExecutorRequest::Remove, not other variants");
}
}
// Verify modal is PreflightExec
match app.modal {
Modal::PreflightExec { action, .. } => {
assert_eq!(action, PreflightAction::Remove);
}
_ => panic!("Expected PreflightExec modal"),
}
}
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/tests/remove/mod.rs | tests/remove/mod.rs | //! Integration and UI tests for remove operations.
mod remove_integration;
mod remove_ui;
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/tests/remove/remove_ui.rs | tests/remove/remove_ui.rs | //! UI tests for remove process modals.
//!
//! Tests cover:
//! - Remove modal rendering structure
//! - `PreflightExec` modal for remove
//! - `ConfirmRemove` modal structure
//!
//! Note: These tests verify modal state structure rather than actual rendering.
#![cfg(test)]
// CascadeMode is imported but not used in tests
// use pacsea::state::modal::CascadeMode;
use pacsea::state::{AppState, Modal, PackageItem, PreflightAction, PreflightTab, Source};
/// What: Create a test package item with specified source.
///
/// Inputs:
/// - `name`: Package name
/// - `source`: Package source (Official or AUR)
///
/// Output:
/// - `PackageItem` ready for testing
///
/// Details:
/// - Helper to create test packages with consistent structure
fn create_test_package(name: &str, source: Source) -> PackageItem {
PackageItem {
name: name.into(),
version: "1.0.0".into(),
description: String::new(),
source,
popularity: None,
out_of_date: None,
orphaned: false,
}
}
#[test]
/// What: Test `PreflightExec` modal structure for remove action.
///
/// Inputs:
/// - `PreflightExec` modal with remove action.
///
/// Output:
/// - Modal state is correctly structured.
///
/// Details:
/// - Verifies `PreflightExec` modal can be created for remove operations.
fn ui_preflight_exec_remove_rendering() {
let mut app = AppState::default();
let items = vec![create_test_package(
"old-pkg1",
Source::Official {
repo: "extra".into(),
arch: "x86_64".into(),
},
)];
app.modal = Modal::PreflightExec {
items,
action: PreflightAction::Remove,
tab: PreflightTab::Summary,
verbose: false,
log_lines: Vec::new(),
abortable: false,
success: None,
header_chips: pacsea::state::modal::PreflightHeaderChips {
package_count: 1,
download_bytes: 0,
install_delta_bytes: -1000,
aur_count: 0,
risk_score: 0,
risk_level: pacsea::state::modal::RiskLevel::Low,
},
};
match app.modal {
Modal::PreflightExec {
items: ref exec_items,
action,
tab,
verbose,
log_lines,
abortable,
..
} => {
assert_eq!(exec_items.len(), 1);
assert_eq!(action, PreflightAction::Remove);
assert_eq!(tab, PreflightTab::Summary);
assert!(!verbose);
assert!(log_lines.is_empty());
assert!(!abortable);
}
_ => panic!("Expected PreflightExec modal"),
}
}
#[test]
/// What: Test `ConfirmRemove` modal structure.
///
/// Inputs:
/// - `ConfirmRemove` modal with packages.
///
/// Output:
/// - Modal state is correctly structured.
///
/// Details:
/// - Verifies `ConfirmRemove` modal can be created.
fn ui_confirm_remove_modal_rendering() {
let mut app = AppState::default();
let items = vec![
create_test_package(
"old-pkg1",
Source::Official {
repo: "extra".into(),
arch: "x86_64".into(),
},
),
create_test_package(
"old-pkg2",
Source::Official {
repo: "extra".into(),
arch: "x86_64".into(),
},
),
];
app.modal = Modal::ConfirmRemove { items };
match app.modal {
Modal::ConfirmRemove {
items: ref modal_items,
} => {
assert_eq!(modal_items.len(), 2);
assert_eq!(modal_items[0].name, "old-pkg1");
assert_eq!(modal_items[1].name, "old-pkg2");
}
_ => panic!("Expected ConfirmRemove modal"),
}
}
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/tests/downgrade/mod.rs | tests/downgrade/mod.rs | //! Integration and UI tests for downgrade operations.
mod downgrade_integration;
mod downgrade_ui;
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/tests/downgrade/downgrade_integration.rs | tests/downgrade/downgrade_integration.rs | //! Integration tests for the downgrade process.
//!
//! Tests cover:
//! - Downgrade list management
//! - Downgrade command execution
//! - Navigation in downgrade pane
//! - `ExecutorRequest::Downgrade` creation
//! - Password prompt for downgrade
//! - Dry-run mode
#![cfg(test)]
use pacsea::install::ExecutorRequest;
use pacsea::state::modal::{PasswordPurpose, PreflightHeaderChips};
use pacsea::state::{AppState, Modal, PackageItem, PreflightAction, PreflightTab, Source};
/// What: Create a test package item with specified source.
///
/// Inputs:
/// - `name`: Package name
/// - `source`: Package source (Official or AUR)
///
/// Output:
/// - `PackageItem` ready for testing
///
/// Details:
/// - Helper to create test packages with consistent structure
fn create_test_package(name: &str, source: Source) -> PackageItem {
PackageItem {
name: name.into(),
version: "1.0.0".into(),
description: String::new(),
source,
popularity: None,
out_of_date: None,
orphaned: false,
}
}
#[test]
/// What: Test downgrade list state management.
///
/// Inputs:
/// - `AppState` with downgrade list.
///
/// Output:
/// - Downgrade list can be managed correctly.
///
/// Details:
/// - Verifies downgrade list operations.
fn integration_downgrade_list_management() {
let mut app = AppState {
installed_only_mode: true,
right_pane_focus: pacsea::state::RightPaneFocus::Downgrade,
..Default::default()
};
let pkg1 = create_test_package(
"pkg1",
Source::Official {
repo: "extra".into(),
arch: "x86_64".into(),
},
);
let pkg2 = create_test_package(
"pkg2",
Source::Official {
repo: "extra".into(),
arch: "x86_64".into(),
},
);
// Add packages to downgrade list
app.downgrade_list.push(pkg1);
app.downgrade_list.push(pkg2);
assert_eq!(app.downgrade_list.len(), 2);
assert_eq!(app.downgrade_list[0].name, "pkg1");
assert_eq!(app.downgrade_list[1].name, "pkg2");
// Remove from downgrade list
app.downgrade_list.remove(0);
assert_eq!(app.downgrade_list.len(), 1);
assert_eq!(app.downgrade_list[0].name, "pkg2");
// Clear downgrade list
app.downgrade_list.clear();
assert!(app.downgrade_list.is_empty());
}
#[test]
/// What: Test downgrade command structure.
///
/// Inputs:
/// - Package names for downgrade.
///
/// Output:
/// - Command structure is correct.
///
/// Details:
/// - Verifies downgrade command format.
/// - Note: Actual execution spawns terminal, so this tests command structure only.
fn integration_downgrade_command_structure() {
let names = ["test-pkg1".to_string(), "test-pkg2".to_string()];
let joined = names.join(" ");
// Test dry-run command
let dry_run_cmd = format!("echo DRY RUN: sudo downgrade {joined}");
assert!(dry_run_cmd.contains("DRY RUN"));
assert!(dry_run_cmd.contains("downgrade"));
assert!(dry_run_cmd.contains("test-pkg1"));
assert!(dry_run_cmd.contains("test-pkg2"));
// Test actual command structure
let actual_cmd = format!(
"if (command -v downgrade >/dev/null 2>&1) || sudo pacman -Qi downgrade >/dev/null 2>&1; then sudo downgrade {joined}; else echo 'downgrade tool not found. Install \"downgrade\" package.'; fi"
);
assert!(actual_cmd.contains("downgrade"));
assert!(actual_cmd.contains("test-pkg1"));
assert!(actual_cmd.contains("test-pkg2"));
}
#[test]
/// What: Test that downgrade spawns in a terminal (not executor) since it's an interactive tool.
///
/// Inputs:
/// - Downgrade action triggered through preflight modal.
///
/// Output:
/// - Downgrade should spawn in a terminal, not use executor pattern.
///
/// Details:
/// - Downgrade tool is interactive and requires user input to select versions.
/// - It cannot work in the PTY executor pattern, so it spawns in a terminal instead.
/// - This is the expected behavior for interactive tools.
#[allow(dead_code)]
fn integration_downgrade_spawns_in_terminal() {
// Unused imports commented out until handle_install_key is made public
// use crossterm::event::{KeyCode, KeyEvent, KeyModifiers};
// TODO: handle_install_key is private, need to make it public or use public API
// use pacsea::events::install::handle_install_key;
use tokio::sync::mpsc;
let app = AppState {
installed_only_mode: true,
right_pane_focus: pacsea::state::RightPaneFocus::Downgrade,
downgrade_list: vec![pacsea::state::PackageItem {
name: "test-pkg".to_string(),
version: "1.0.0".to_string(),
description: String::new(),
source: pacsea::state::Source::Official {
repo: "extra".to_string(),
arch: "x86_64".to_string(),
},
popularity: None,
out_of_date: None,
orphaned: false,
}],
dry_run: false,
..Default::default()
};
let (_dtx, _drx): (mpsc::UnboundedSender<pacsea::state::PackageItem>, _) =
mpsc::unbounded_channel();
let (_ptx, _prx): (mpsc::UnboundedSender<pacsea::state::PackageItem>, _) =
mpsc::unbounded_channel();
let (_atx, _arx): (mpsc::UnboundedSender<pacsea::state::PackageItem>, _) =
mpsc::unbounded_channel();
// Trigger downgrade action through Enter key
// Downgrade spawns in a terminal (not executor) because it's interactive
// TODO: handle_install_key is private, need to make it public or use public API
// let ke = KeyEvent::new(KeyCode::Enter, KeyModifiers::empty());
// handle_install_key(ke, &mut app, &_dtx, &_ptx, &_atx);
// Downgrade should NOT use executor pattern - it spawns in a terminal
// This is expected behavior for interactive tools
assert!(
app.pending_executor_request.is_none(),
"Downgrade correctly spawns in a terminal (not executor) because it's an interactive tool."
);
// Note: ExecutorRequest::Downgrade variant doesn't exist yet
// This test will fail until both the variant exists and the code uses it
}
#[test]
/// What: Test downgrade with empty list.
///
/// Inputs:
/// - Empty downgrade list.
///
/// Output:
/// - Empty list is handled gracefully.
///
/// Details:
/// - Tests edge case of empty downgrade list.
fn integration_downgrade_empty_list() {
let app = AppState {
installed_only_mode: true,
right_pane_focus: pacsea::state::RightPaneFocus::Downgrade,
..Default::default()
};
assert!(app.downgrade_list.is_empty());
assert_eq!(app.downgrade_state.selected(), None);
}
#[test]
/// What: Test `ExecutorRequest::Downgrade` creation with password.
///
/// Inputs:
/// - Package names and password for downgrade.
///
/// Output:
/// - `ExecutorRequest::Downgrade` with correct fields.
///
/// Details:
/// - Verifies Downgrade request can be created for executor.
fn integration_executor_request_downgrade_with_password() {
let names = vec!["pkg1".to_string(), "pkg2".to_string()];
let request = ExecutorRequest::Downgrade {
names,
password: Some("testpassword".to_string()),
dry_run: false,
};
match request {
ExecutorRequest::Downgrade {
names,
password,
dry_run,
} => {
assert_eq!(names.len(), 2);
assert_eq!(names[0], "pkg1");
assert_eq!(names[1], "pkg2");
assert_eq!(password, Some("testpassword".to_string()));
assert!(!dry_run);
}
_ => panic!("Expected ExecutorRequest::Downgrade"),
}
}
#[test]
/// What: Test `ExecutorRequest::Downgrade` without password.
///
/// Inputs:
/// - Package names without password.
///
/// Output:
/// - `ExecutorRequest::Downgrade` with password=None.
///
/// Details:
/// - Verifies Downgrade request handles no password case.
fn integration_executor_request_downgrade_no_password() {
let names = vec!["pkg1".to_string()];
let request = ExecutorRequest::Downgrade {
names,
password: None,
dry_run: false,
};
match request {
ExecutorRequest::Downgrade { password, .. } => {
assert!(password.is_none());
}
_ => panic!("Expected ExecutorRequest::Downgrade"),
}
}
#[test]
/// What: Test `ExecutorRequest::Downgrade` dry-run mode.
///
/// Inputs:
/// - Downgrade with `dry_run` enabled.
///
/// Output:
/// - `ExecutorRequest::Downgrade` with `dry_run=true`.
///
/// Details:
/// - Verifies dry-run mode is respected for downgrade.
fn integration_executor_request_downgrade_dry_run() {
let names = vec!["pkg1".to_string()];
let request = ExecutorRequest::Downgrade {
names,
password: None,
dry_run: true,
};
match request {
ExecutorRequest::Downgrade { dry_run, .. } => {
assert!(dry_run);
}
_ => panic!("Expected ExecutorRequest::Downgrade"),
}
}
#[test]
/// What: Test downgrade triggers password prompt.
///
/// Inputs:
/// - Downgrade action for packages.
///
/// Output:
/// - Password prompt modal is shown.
///
/// Details:
/// - Verifies downgrade requires password authentication.
fn integration_downgrade_password_prompt() {
let pkg = create_test_package(
"test-pkg",
Source::Official {
repo: "extra".into(),
arch: "x86_64".into(),
},
);
let mut app = AppState {
downgrade_list: vec![pkg.clone()],
pending_exec_header_chips: Some(PreflightHeaderChips::default()),
..Default::default()
};
// Simulate downgrade triggering password prompt
app.modal = Modal::PasswordPrompt {
purpose: PasswordPurpose::Downgrade,
items: vec![pkg],
input: String::new(),
cursor: 0,
error: None,
};
match app.modal {
Modal::PasswordPrompt { purpose, items, .. } => {
assert_eq!(purpose, PasswordPurpose::Downgrade);
assert_eq!(items.len(), 1);
assert_eq!(items[0].name, "test-pkg");
}
_ => panic!("Expected PasswordPrompt modal"),
}
}
#[test]
/// What: Test downgrade transitions to `PreflightExec` after password.
///
/// Inputs:
/// - Password submitted for downgrade.
///
/// Output:
/// - Modal transitions to `PreflightExec`.
/// - `ExecutorRequest::Downgrade` is created.
///
/// Details:
/// - Verifies downgrade flow after password submission.
fn integration_downgrade_to_preflight_exec() {
let pkg = create_test_package(
"test-pkg",
Source::Official {
repo: "extra".into(),
arch: "x86_64".into(),
},
);
let mut app = AppState {
modal: Modal::PasswordPrompt {
purpose: PasswordPurpose::Downgrade,
items: vec![pkg],
input: "testpassword".to_string(),
cursor: 12,
error: None,
},
pending_exec_header_chips: Some(PreflightHeaderChips::default()),
..Default::default()
};
// Extract password
let password = if let Modal::PasswordPrompt { ref input, .. } = app.modal {
if input.trim().is_empty() {
None
} else {
Some(input.clone())
}
} else {
None
};
let items = if let Modal::PasswordPrompt { ref items, .. } = app.modal {
items.clone()
} else {
vec![]
};
let names: Vec<String> = items.iter().map(|p| p.name.clone()).collect();
// Simulate transition to PreflightExec
let header_chips = app.pending_exec_header_chips.take().unwrap_or_default();
app.modal = Modal::PreflightExec {
items,
action: PreflightAction::Downgrade,
tab: PreflightTab::Summary,
verbose: false,
log_lines: vec![],
abortable: false,
header_chips,
success: None,
};
// Set executor request
app.pending_executor_request = Some(ExecutorRequest::Downgrade {
names,
password,
dry_run: false,
});
// Verify modal
match app.modal {
Modal::PreflightExec { action, items, .. } => {
assert_eq!(action, PreflightAction::Downgrade);
assert_eq!(items.len(), 1);
assert_eq!(items[0].name, "test-pkg");
}
_ => panic!("Expected PreflightExec modal"),
}
// Verify executor request
match app.pending_executor_request {
Some(ExecutorRequest::Downgrade {
names, password, ..
}) => {
assert_eq!(names.len(), 1);
assert_eq!(names[0], "test-pkg");
assert_eq!(password, Some("testpassword".to_string()));
}
_ => panic!("Expected ExecutorRequest::Downgrade"),
}
}
#[test]
/// What: Test downgrade multiple packages.
///
/// Inputs:
/// - Multiple packages in downgrade list.
///
/// Output:
/// - All package names are in `ExecutorRequest::Downgrade`.
///
/// Details:
/// - Verifies batch downgrade includes all packages.
fn integration_downgrade_multiple_packages() {
let names = vec!["pkg1".to_string(), "pkg2".to_string(), "pkg3".to_string()];
let request = ExecutorRequest::Downgrade {
names,
password: Some("testpassword".to_string()),
dry_run: false,
};
match request {
ExecutorRequest::Downgrade { names, .. } => {
assert_eq!(names.len(), 3);
assert!(names.contains(&"pkg1".to_string()));
assert!(names.contains(&"pkg2".to_string()));
assert!(names.contains(&"pkg3".to_string()));
}
_ => panic!("Expected ExecutorRequest::Downgrade"),
}
}
#[test]
/// What: Test downgrade command format with downgrade tool check.
///
/// Inputs:
/// - Package name for downgrade.
///
/// Output:
/// - Command includes downgrade tool check.
///
/// Details:
/// - Verifies command structure includes fallback for missing tool.
fn integration_downgrade_command_with_tool_check() {
let pkg_name = "test-pkg";
// Build command with tool check
let cmd = format!(
"if command -v downgrade >/dev/null 2>&1; then \
sudo downgrade {pkg_name}; \
else \
echo 'downgrade tool not found. Install \"downgrade\" package.'; \
fi"
);
assert!(cmd.contains("command -v downgrade"));
assert!(cmd.contains("sudo downgrade"));
assert!(cmd.contains(pkg_name));
assert!(cmd.contains("not found"));
}
#[test]
/// What: Test downgrade dry-run command format.
///
/// Inputs:
/// - Package name for dry-run downgrade.
///
/// Output:
/// - Command includes "DRY RUN:" prefix.
///
/// Details:
/// - Verifies dry-run command format.
fn integration_downgrade_dry_run_command_format() {
let pkg_name = "test-pkg";
// Build dry-run command
let cmd = format!("echo DRY RUN: sudo downgrade {pkg_name}");
assert!(cmd.contains("DRY RUN:"));
assert!(cmd.contains("sudo downgrade"));
assert!(cmd.contains(pkg_name));
}
#[test]
/// What: Test downgrade pane focus state.
///
/// Inputs:
/// - `AppState` with downgrade pane focus.
///
/// Output:
/// - Right pane focus is correctly set to Downgrade.
///
/// Details:
/// - Verifies pane focus tracking for downgrade operations.
fn integration_downgrade_pane_focus() {
let app = AppState {
installed_only_mode: true,
right_pane_focus: pacsea::state::RightPaneFocus::Downgrade,
..Default::default()
};
assert_eq!(
app.right_pane_focus,
pacsea::state::RightPaneFocus::Downgrade
);
}
#[test]
/// What: Test downgrade state selection tracking.
///
/// Inputs:
/// - Downgrade list with multiple packages.
///
/// Output:
/// - Selection state is correctly tracked.
///
/// Details:
/// - Verifies downgrade list selection management.
fn integration_downgrade_selection_tracking() {
let mut app = AppState {
installed_only_mode: true,
right_pane_focus: pacsea::state::RightPaneFocus::Downgrade,
..Default::default()
};
// Add packages
app.downgrade_list.push(create_test_package(
"pkg1",
Source::Official {
repo: "extra".into(),
arch: "x86_64".into(),
},
));
app.downgrade_list.push(create_test_package(
"pkg2",
Source::Official {
repo: "extra".into(),
arch: "x86_64".into(),
},
));
// Select first item
app.downgrade_state.select(Some(0));
assert_eq!(app.downgrade_state.selected(), Some(0));
// Select second item
app.downgrade_state.select(Some(1));
assert_eq!(app.downgrade_state.selected(), Some(1));
}
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/tests/downgrade/downgrade_ui.rs | tests/downgrade/downgrade_ui.rs | //! UI tests for downgrade process.
//!
//! Tests cover:
//! - Downgrade list state structure
//! - Downgrade pane focus state
//!
//! Note: These tests verify state structure rather than actual rendering.
#![cfg(test)]
use pacsea::state::{AppState, PackageItem, RightPaneFocus, Source};
/// What: Create a test package item with specified source.
///
/// Inputs:
/// - `name`: Package name
/// - `source`: Package source (Official or AUR)
///
/// Output:
/// - `PackageItem` ready for testing
///
/// Details:
/// - Helper to create test packages with consistent structure
fn create_test_package(name: &str, source: Source) -> PackageItem {
PackageItem {
name: name.into(),
version: "1.0.0".into(),
description: String::new(),
source,
popularity: None,
out_of_date: None,
orphaned: false,
}
}
#[test]
/// What: Test downgrade list state structure.
///
/// Inputs:
/// - `AppState` with downgrade list and focus.
///
/// Output:
/// - State is correctly structured.
///
/// Details:
/// - Verifies downgrade list and focus state.
fn ui_downgrade_list_state() {
let mut app = AppState {
installed_only_mode: true,
right_pane_focus: RightPaneFocus::Downgrade,
..Default::default()
};
let pkg1 = create_test_package(
"pkg1",
Source::Official {
repo: "extra".into(),
arch: "x86_64".into(),
},
);
let pkg2 = create_test_package(
"pkg2",
Source::Official {
repo: "extra".into(),
arch: "x86_64".into(),
},
);
app.downgrade_list.push(pkg1);
app.downgrade_list.push(pkg2);
app.downgrade_state.select(Some(0));
assert!(app.installed_only_mode);
assert_eq!(app.right_pane_focus, RightPaneFocus::Downgrade);
assert_eq!(app.downgrade_list.len(), 2);
assert_eq!(app.downgrade_state.selected(), Some(0));
}
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/tests/other/network_error.rs | tests/other/network_error.rs | //! Integration tests for network error handling.
//!
//! Tests cover:
//! - `ExecutorOutput::Error` handling for network failures during install
//! - Error display in `PreflightExec` modal when network fails
//! - Error recovery and UI state after network failure
//! - Network failure during system update
//! - Network failure during AUR package installation
#![cfg(test)]
use pacsea::install::{ExecutorOutput, ExecutorRequest};
use pacsea::state::{
AppState, Modal, PackageItem, PreflightAction, PreflightTab, Source,
modal::PreflightHeaderChips,
};
/// What: Create a test package item with specified source.
///
/// Inputs:
/// - `name`: Package name
/// - `source`: Package source (Official or AUR)
///
/// Output:
/// - `PackageItem` ready for testing
///
/// Details:
/// - Helper to create test packages with consistent structure
fn create_test_package(name: &str, source: Source) -> PackageItem {
PackageItem {
name: name.into(),
version: "1.0.0".into(),
description: String::new(),
source,
popularity: None,
out_of_date: None,
orphaned: false,
}
}
#[test]
/// What: Test `ExecutorOutput::Error` with network failure message during install.
///
/// Inputs:
/// - Network-related error message (e.g., "Failed to connect", "DNS resolution failed").
///
/// Output:
/// - `ExecutorOutput::Error` with network error message.
///
/// Details:
/// - Verifies network errors are properly represented as `ExecutorOutput::Error`.
fn integration_network_error_executor_output() {
let network_errors = vec![
"Failed to connect to host (network unreachable)",
"Could not resolve host (DNS/network issue)",
"Operation timeout",
"HTTP error from server (code unknown)",
"HTTP 404: Resource not found (package may not exist in repository)",
"HTTP 503: Service temporarily unavailable",
];
for error_msg in network_errors {
let output = ExecutorOutput::Error(error_msg.to_string());
match output {
ExecutorOutput::Error(msg) => {
assert!(msg.contains("network") || msg.contains("timeout") || msg.contains("HTTP"));
}
_ => panic!("Expected ExecutorOutput::Error"),
}
}
}
#[test]
/// What: Test `PreflightExec` modal shows error state when network fails during install.
///
/// Inputs:
/// - `PreflightExec` modal with network error in `log_lines`.
///
/// Output:
/// - Error message is displayed in `log_lines`.
/// - Modal state reflects error condition.
///
/// Details:
/// - Verifies network errors are displayed to user in `PreflightExec` modal.
fn integration_network_error_preflight_exec_display() {
let pkg = create_test_package(
"ripgrep",
Source::Official {
repo: "extra".into(),
arch: "x86_64".into(),
},
);
let mut app = AppState {
modal: Modal::PreflightExec {
items: vec![pkg],
action: PreflightAction::Install,
tab: PreflightTab::Summary,
verbose: false,
log_lines: vec![":: Synchronizing package databases...".to_string()],
abortable: false,
header_chips: PreflightHeaderChips::default(),
success: None,
},
..Default::default()
};
// Simulate network error during operation
if let Modal::PreflightExec {
ref mut log_lines, ..
} = app.modal
{
log_lines.push("error: failed to retrieve 'core.db' from mirror".to_string());
log_lines.push("error: failed to retrieve 'extra.db' from mirror".to_string());
log_lines.push("error: Failed to connect to host (network unreachable)".to_string());
}
match app.modal {
Modal::PreflightExec { log_lines, .. } => {
assert_eq!(log_lines.len(), 4);
assert!(log_lines[2].contains("failed to retrieve"));
assert!(log_lines[3].contains("network unreachable"));
}
_ => panic!("Expected PreflightExec modal"),
}
}
#[test]
/// What: Test `ExecutorOutput::Error` propagation from executor to UI.
///
/// Inputs:
/// - `ExecutorOutput::Error` with network failure message.
///
/// Output:
/// - Error is properly handled and can be displayed in UI.
///
/// Details:
/// - Verifies error propagation mechanism works correctly.
fn integration_network_error_propagation() {
let error_output = ExecutorOutput::Error(
"Failed to connect to host (network unreachable)".to_string(),
);
// Simulate error being received and added to PreflightExec modal
let mut app = AppState {
modal: Modal::PreflightExec {
items: vec![create_test_package(
"test-pkg",
Source::Official {
repo: "core".into(),
arch: "x86_64".into(),
},
)],
action: PreflightAction::Install,
tab: PreflightTab::Summary,
verbose: false,
log_lines: vec![],
abortable: false,
header_chips: PreflightHeaderChips::default(),
success: None,
},
..Default::default()
};
// Simulate error being added to log_lines
if let ExecutorOutput::Error(msg) = &error_output
&& let Modal::PreflightExec {
ref mut log_lines,
..
} = app.modal
{
log_lines.push(format!("ERROR: {msg}"));
}
match app.modal {
Modal::PreflightExec { log_lines, .. } => {
assert_eq!(log_lines.len(), 1);
assert!(log_lines[0].contains("ERROR:"));
assert!(log_lines[0].contains("network unreachable"));
}
_ => panic!("Expected PreflightExec modal"),
}
}
#[test]
/// What: Test UI recovery after network error.
///
/// Inputs:
/// - PreflightExec modal with network error.
///
/// Output:
/// - UI can transition to error state and recover gracefully.
///
/// Details:
/// - Verifies error recovery mechanism allows user to continue after error.
fn integration_network_error_recovery() {
let pkg = create_test_package(
"test-pkg",
Source::Official {
repo: "extra".into(),
arch: "x86_64".into(),
},
);
// Simulate PreflightExec modal with network error
let mut app = AppState {
modal: Modal::PreflightExec {
items: vec![pkg],
action: PreflightAction::Install,
tab: PreflightTab::Summary,
verbose: false,
log_lines: vec!["ERROR: Failed to connect to host (network unreachable)".to_string()],
abortable: false,
header_chips: PreflightHeaderChips::default(),
success: Some(false), // Error state
},
..Default::default()
};
// Verify error state
match app.modal {
Modal::PreflightExec { success, log_lines, .. } => {
assert_eq!(success, Some(false));
assert!(!log_lines.is_empty());
assert!(log_lines[0].contains("ERROR:"));
}
_ => panic!("Expected PreflightExec modal"),
}
// Simulate recovery - user can close modal and try again
app.modal = Modal::None;
assert!(matches!(app.modal, Modal::None));
}
#[test]
/// What: Test network failure during AUR package installation.
///
/// Inputs:
/// - AUR package installation with network failure.
///
/// Output:
/// - Error is properly displayed for AUR network failures.
///
/// Details:
/// - Verifies AUR-specific network errors are handled correctly.
fn integration_network_error_aur_installation() {
let aur_pkg = create_test_package("yay-bin", Source::Aur);
let mut app = AppState {
modal: Modal::PreflightExec {
items: vec![aur_pkg],
action: PreflightAction::Install,
tab: PreflightTab::Summary,
verbose: false,
log_lines: vec![],
abortable: false,
header_chips: PreflightHeaderChips::default(),
success: None,
},
..Default::default()
};
// Simulate AUR network failure
if let Modal::PreflightExec {
ref mut log_lines, ..
} = app.modal
{
log_lines.push(":: Cloning AUR package...".to_string());
log_lines.push("error: failed to clone AUR repository".to_string());
log_lines.push("error: Could not resolve host (DNS/network issue)".to_string());
}
match app.modal {
Modal::PreflightExec { log_lines, .. } => {
assert_eq!(log_lines.len(), 3);
assert!(log_lines[1].contains("failed to clone"));
assert!(log_lines[2].contains("DNS/network issue"));
}
_ => panic!("Expected PreflightExec modal"),
}
}
#[test]
/// What: Test `ExecutorRequest::Install` with simulated network failure.
///
/// Inputs:
/// - Install request that will fail due to network.
///
/// Output:
/// - Request structure is correct, error handling can occur.
///
/// Details:
/// - Verifies install request can be created even when network will fail.
fn integration_network_error_install_request() {
let items = vec![create_test_package(
"test-pkg",
Source::Official {
repo: "extra".into(),
arch: "x86_64".into(),
},
)];
let request = ExecutorRequest::Install {
items,
password: Some("testpassword".to_string()),
dry_run: false,
};
match request {
ExecutorRequest::Install { items, .. } => {
assert_eq!(items.len(), 1);
// Request structure is valid even if network will fail
}
_ => panic!("Expected ExecutorRequest::Install"),
}
}
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/tests/other/ui_integration.rs | tests/other/ui_integration.rs | //! Integration tests for UI rendering using ratatui's `TestBackend`.
//!
//! These tests verify that the TUI renders correctly across different application states
//! without requiring a real terminal. They focus on visual rendering correctness rather
//! than business logic.
use ratatui::{Terminal, backend::TestBackend};
use std::collections::HashMap;
use std::time::Instant;
use pacsea::state::{AppState, Modal, PackageDetails, PackageItem, Source};
use pacsea::ui;
/// Initialize minimal English translations for tests.
///
/// Sets up only the translations needed for tests to pass.
fn init_test_translations(app: &mut AppState) {
let mut translations = HashMap::new();
// Details
translations.insert("app.details.fields.url".to_string(), "URL".to_string());
translations.insert("app.details.url_label".to_string(), "URL:".to_string());
// Results
translations.insert("app.results.title".to_string(), "Results".to_string());
translations.insert("app.results.buttons.sort".to_string(), "Sort".to_string());
translations.insert(
"app.results.buttons.options".to_string(),
"Options".to_string(),
);
translations.insert(
"app.results.buttons.panels".to_string(),
"Panels".to_string(),
);
translations.insert(
"app.results.buttons.config_lists".to_string(),
"Config/Lists".to_string(),
);
translations.insert("app.results.buttons.menu".to_string(), "Menu".to_string());
translations.insert("app.results.filters.aur".to_string(), "AUR".to_string());
translations.insert("app.results.filters.core".to_string(), "core".to_string());
translations.insert("app.results.filters.extra".to_string(), "extra".to_string());
translations.insert(
"app.results.filters.multilib".to_string(),
"multilib".to_string(),
);
translations.insert("app.results.filters.eos".to_string(), "EOS".to_string());
translations.insert(
"app.results.filters.cachyos".to_string(),
"CachyOS".to_string(),
);
translations.insert("app.results.filters.artix".to_string(), "Artix".to_string());
translations.insert(
"app.results.filters.artix_omniverse".to_string(),
"OMNI".to_string(),
);
translations.insert(
"app.results.filters.artix_universe".to_string(),
"UNI".to_string(),
);
translations.insert(
"app.results.filters.artix_lib32".to_string(),
"LIB32".to_string(),
);
translations.insert(
"app.results.filters.artix_galaxy".to_string(),
"GALAXY".to_string(),
);
translations.insert(
"app.results.filters.artix_world".to_string(),
"WORLD".to_string(),
);
translations.insert(
"app.results.filters.artix_system".to_string(),
"SYSTEM".to_string(),
);
translations.insert(
"app.results.filters.manjaro".to_string(),
"Manjaro".to_string(),
);
// Toasts
translations.insert(
"app.toasts.copied_to_clipboard".to_string(),
"Copied to clipboard".to_string(),
);
translations.insert(
"app.toasts.title_clipboard".to_string(),
"Clipboard".to_string(),
);
translations.insert("app.toasts.title_news".to_string(), "News".to_string());
// Middle row
translations.insert(
"app.middle.recent.title".to_string(),
"Search history".to_string(),
);
translations.insert(
"app.middle.install.title".to_string(),
"Install".to_string(),
);
translations.insert(
"app.middle.downgrade.title".to_string(),
"Downgrade".to_string(),
);
translations.insert("app.middle.remove.title".to_string(), "Remove".to_string());
// Modals
translations.insert("app.modals.alert.title".to_string(), "Alert".to_string());
translations.insert("app.modals.help.title".to_string(), "Help".to_string());
translations.insert(
"app.modals.news.title".to_string(),
"Arch Linux News".to_string(),
);
app.translations.clone_from(&translations);
app.translations_fallback = translations;
}
/// Create a minimal `AppState` for testing.
fn create_test_app_state() -> AppState {
let mut app = AppState {
last_input_change: Instant::now(),
..Default::default()
};
init_test_translations(&mut app);
app
}
/// Create a `TestBackend` with standard size for testing.
fn create_test_backend() -> TestBackend {
TestBackend::new(120, 40)
}
/// Create a `TestBackend` with custom size.
fn create_test_backend_size(width: u16, height: u16) -> TestBackend {
TestBackend::new(width, height)
}
/// Render UI to a `TestBackend` and return the terminal for assertions.
fn render_ui_to_backend(backend: TestBackend, app: &mut AppState) -> Terminal<TestBackend> {
let mut terminal = Terminal::new(backend).expect("failed to create test terminal");
terminal
.draw(|f| ui::ui(f, app))
.expect("failed to draw test terminal");
terminal
}
// Core UI Rendering Tests
#[test]
/// What: Test UI rendering with empty application state.
///
/// Inputs:
/// - Empty `AppState` with no results or modals.
///
/// Output:
/// - UI renders without panicking.
/// - Buffer has valid dimensions (width > 0, height > 0).
/// - Results rect is set in app state.
///
/// Details:
/// - Verifies basic UI initialization and rendering capability.
/// - Ensures layout calculations work even with empty state.
fn test_ui_renders_empty_state() {
let backend = create_test_backend();
let mut app = create_test_app_state();
let terminal = render_ui_to_backend(backend, &mut app);
// Verify UI renders without panicking
let buffer = terminal.backend().buffer();
assert!(buffer.area.width > 0);
assert!(buffer.area.height > 0);
// Verify key rects are set
assert!(app.results_rect.is_some());
}
#[test]
/// What: Test UI rendering with package search results.
///
/// Inputs:
/// - `AppState` with multiple `PackageItem` results (AUR and Official sources).
///
/// Output:
/// - Results pane renders correctly.
/// - Buffer dimensions match expected size (120x40).
/// - Results rect is set in app state.
///
/// Details:
/// - Verifies package list rendering with mixed source types.
/// - Tests selection state initialization.
fn test_ui_renders_with_results() {
let backend = create_test_backend();
let mut app = create_test_app_state();
// Add some test results
app.results = vec![
PackageItem {
name: "test-package".to_string(),
version: "1.0.0".to_string(),
description: "A test package".to_string(),
source: Source::Aur,
popularity: Some(42.5),
out_of_date: None,
orphaned: false,
},
PackageItem {
name: "another-package".to_string(),
version: "2.0.0".to_string(),
description: "Another test package".to_string(),
source: Source::Official {
repo: "extra".to_string(),
arch: "x86_64".to_string(),
},
popularity: None,
out_of_date: None,
orphaned: false,
},
];
app.all_results = app.results.clone();
app.selected = 0;
app.list_state.select(Some(0));
let terminal = render_ui_to_backend(backend, &mut app);
// Verify results pane rendered
assert!(app.results_rect.is_some());
// Verify buffer dimensions
let buffer = terminal.backend().buffer();
assert_eq!(buffer.area.width, 120);
assert_eq!(buffer.area.height, 40);
}
#[test]
/// What: Test UI rendering with package details pane.
///
/// Inputs:
/// - `AppState` with selected package and populated `PackageDetails`.
///
/// Output:
/// - Details pane renders correctly.
/// - Details rect and URL button rect are set in app state.
///
/// Details:
/// - Verifies package details display including metadata, dependencies, and URL button.
/// - Tests details pane layout and component positioning.
fn test_ui_renders_with_details() {
let backend = create_test_backend();
let mut app = create_test_app_state();
// Add a result and details
app.results = vec![PackageItem {
name: "test-pkg".to_string(),
version: "1.0.0".to_string(),
description: "Test".to_string(),
source: Source::Aur,
popularity: None,
out_of_date: None,
orphaned: false,
}];
app.selected = 0;
app.list_state.select(Some(0));
app.details = PackageDetails {
name: "test-pkg".to_string(),
version: "1.0.0".to_string(),
description: "A test package description".to_string(),
url: "https://example.com/test".to_string(),
repository: "aur".to_string(),
architecture: "x86_64".to_string(),
licenses: vec!["MIT".to_string()],
groups: vec![],
provides: vec![],
depends: vec!["bash".to_string()],
opt_depends: vec![],
required_by: vec![],
optional_for: vec![],
conflicts: vec![],
replaces: vec![],
download_size: Some(1024),
install_size: Some(2048),
owner: "testuser".to_string(),
build_date: "2024-01-01".to_string(),
popularity: None,
out_of_date: None,
orphaned: false,
};
let _terminal = render_ui_to_backend(backend, &mut app);
// Verify details pane rendered
assert!(app.details_rect.is_some());
assert!(app.url_button_rect.is_some());
}
#[test]
/// What: Test UI rendering of middle row components (Recent and Install panes).
///
/// Inputs:
/// - `AppState` with recent search history and install list items.
///
/// Output:
/// - Recent pane renders correctly.
/// - Install pane renders correctly.
/// - Both rects are set in app state.
///
/// Details:
/// - Verifies middle row layout with multiple panes.
/// - Tests list state management for history and install lists.
fn test_ui_renders_middle_row() {
let backend = create_test_backend();
let mut app = create_test_app_state();
// Add some recent searches and install list items
app.load_recent_items(&["vim".to_string(), "git".to_string()]);
app.history_state.select(Some(0));
app.install_list = vec![PackageItem {
name: "install-pkg".to_string(),
version: "1.0.0".to_string(),
description: "To install".to_string(),
source: Source::Aur,
popularity: None,
out_of_date: None,
orphaned: false,
}];
app.install_state.select(Some(0));
let _terminal = render_ui_to_backend(backend, &mut app);
// Verify middle row components rendered
assert!(app.recent_rect.is_some());
assert!(app.install_rect.is_some());
}
// Layout Tests
#[test]
/// What: Test UI layout handling with minimum terminal size.
///
/// Inputs:
/// - `TestBackend` with minimum viable size (80x10).
///
/// Output:
/// - UI renders without panicking.
/// - Buffer dimensions match input size.
///
/// Details:
/// - Verifies UI gracefully handles very small terminal sizes.
/// - Ensures layout calculations don't fail at minimum dimensions.
fn test_layout_minimum_sizes() {
// Test with minimum viable size
let backend = create_test_backend_size(80, 10);
let mut app = create_test_app_state();
let terminal = render_ui_to_backend(backend, &mut app);
// UI should still render without panicking
let buffer = terminal.backend().buffer();
assert_eq!(buffer.area.width, 80);
assert_eq!(buffer.area.height, 10);
}
#[test]
/// What: Test UI layout handling with large terminal size.
///
/// Inputs:
/// - `TestBackend` with large size (200x60) and package results.
///
/// Output:
/// - UI renders correctly.
/// - Buffer dimensions match input size.
///
/// Details:
/// - Verifies UI scales properly to large terminal dimensions.
/// - Tests layout calculations with expanded space.
fn test_layout_maximum_sizes() {
// Test with large terminal size
let backend = create_test_backend_size(200, 60);
let mut app = create_test_app_state();
app.results = vec![PackageItem {
name: "pkg".to_string(),
version: "1.0.0".to_string(),
description: "Test".to_string(),
source: Source::Aur,
popularity: None,
out_of_date: None,
orphaned: false,
}];
app.selected = 0;
app.list_state.select(Some(0));
let terminal = render_ui_to_backend(backend, &mut app);
// Verify layout handles large sizes
let buffer = terminal.backend().buffer();
assert_eq!(buffer.area.width, 200);
assert_eq!(buffer.area.height, 60);
}
#[test]
/// What: Test UI responsive layout across multiple terminal sizes.
///
/// Inputs:
/// - Multiple `TestBackend` sizes: (80x24), (120x40), (160x50).
///
/// Output:
/// - UI renders correctly at each size.
/// - Buffer dimensions match input sizes.
/// - Results rect is set at all sizes.
///
/// Details:
/// - Verifies responsive layout behavior across common terminal sizes.
/// - Ensures consistent rendering regardless of dimensions.
fn test_layout_responsive() {
// Test different terminal sizes
let sizes = vec![(80, 24), (120, 40), (160, 50)];
for (width, height) in sizes {
let backend = create_test_backend_size(width, height);
let mut app = create_test_app_state();
app.results = vec![PackageItem {
name: "test".to_string(),
version: "1.0".to_string(),
description: "Test".to_string(),
source: Source::Aur,
popularity: None,
out_of_date: None,
orphaned: false,
}];
app.selected = 0;
app.list_state.select(Some(0));
let terminal = render_ui_to_backend(backend, &mut app);
let buffer = terminal.backend().buffer();
assert_eq!(buffer.area.width, width);
assert_eq!(buffer.area.height, height);
assert!(app.results_rect.is_some());
}
}
#[test]
/// What: Test UI layout when panes are hidden.
///
/// Inputs:
/// - `AppState` with `show_recent_pane` and `show_install_pane` flags set to false.
///
/// Output:
/// - UI renders without panicking when panes are hidden.
/// - Layout adapts to hidden panes.
///
/// Details:
/// - Verifies pane visibility toggling doesn't break layout.
/// - Tests conditional rendering of UI components.
fn test_layout_pane_hiding() {
let backend = create_test_backend();
let mut app = create_test_app_state();
// Test with Recent pane hidden
app.show_recent_pane = false;
let _terminal = render_ui_to_backend(backend, &mut app);
assert!(app.recent_rect.is_none() || app.recent_rect.is_some());
// Test with Install pane hidden
app.show_recent_pane = true;
app.show_install_pane = false;
let backend = create_test_backend();
let _terminal = render_ui_to_backend(backend, &mut app);
assert!(app.install_rect.is_none() || app.install_rect.is_some());
}
// Modal Tests
#[test]
/// What: Test Alert modal rendering.
///
/// Inputs:
/// - `AppState` with `Modal::Alert` containing a message.
///
/// Output:
/// - Alert modal renders correctly.
/// - Buffer dimensions are valid.
///
/// Details:
/// - Verifies alert modal display and layout.
/// - Tests modal overlay rendering.
fn test_modal_alert_renders() {
let backend = create_test_backend();
let mut app = create_test_app_state();
app.modal = Modal::Alert {
message: "Test alert message".to_string(),
};
let terminal = render_ui_to_backend(backend, &mut app);
// Verify modal rendered (check buffer dimensions)
let buffer = terminal.backend().buffer();
assert_eq!(buffer.area.width, 120);
assert_eq!(buffer.area.height, 40);
}
#[test]
/// What: Test Help modal rendering.
///
/// Inputs:
/// - `AppState` with `Modal::Help`.
///
/// Output:
/// - Help modal renders correctly.
/// - Help rect is set in app state.
///
/// Details:
/// - Verifies help overlay display with keybindings.
/// - Tests modal content rendering.
fn test_modal_help_renders() {
let backend = create_test_backend();
let mut app = create_test_app_state();
app.modal = Modal::Help;
let _terminal = render_ui_to_backend(backend, &mut app);
// Verify help modal rendered
assert!(app.help_rect.is_some());
}
#[test]
/// What: Test News modal rendering.
///
/// Inputs:
/// - `AppState` with `Modal::News` containing news items.
///
/// Output:
/// - News modal renders correctly.
/// - News rect and news list rect are set in app state.
///
/// Details:
/// - Verifies news list display and selection state.
/// - Tests modal list rendering with items.
fn test_modal_news_renders() {
let backend = create_test_backend();
let mut app = create_test_app_state();
app.modal = Modal::News {
items: vec![pacsea::state::types::NewsFeedItem {
id: "https://example.com/news".to_string(),
date: "2024-01-01".to_string(),
title: "Test News Item".to_string(),
summary: None,
url: Some("https://example.com/news".to_string()),
source: pacsea::state::types::NewsFeedSource::ArchNews,
severity: None,
packages: Vec::new(),
}],
selected: 0,
scroll: 0,
};
let _terminal = render_ui_to_backend(backend, &mut app);
// Verify news modal rendered
assert!(app.news_rect.is_some());
assert!(app.news_list_rect.is_some());
}
#[test]
/// What: Test Preflight modal rendering.
///
/// Inputs:
/// - `AppState` with `Modal::Preflight` containing package items and action.
///
/// Output:
/// - Preflight modal renders correctly.
/// - Preflight content rect is set in app state.
///
/// Details:
/// - Verifies preflight modal with all tabs and summary data.
/// - Tests complex modal state initialization.
fn test_modal_preflight_renders() {
let backend = create_test_backend();
let mut app = create_test_app_state();
app.modal = Modal::Preflight {
items: vec![PackageItem {
name: "test-pkg".to_string(),
version: "1.0.0".to_string(),
description: "Test".to_string(),
source: Source::Aur,
popularity: None,
out_of_date: None,
orphaned: false,
}],
action: pacsea::state::modal::PreflightAction::Install,
tab: pacsea::state::modal::PreflightTab::Summary,
summary: None,
summary_scroll: 0,
header_chips: pacsea::state::modal::PreflightHeaderChips::default(),
dependency_info: vec![],
dep_selected: 0,
dep_tree_expanded: std::collections::HashSet::new(),
deps_error: None,
file_info: vec![],
file_selected: 0,
file_tree_expanded: std::collections::HashSet::new(),
files_error: None,
service_info: vec![],
service_selected: 0,
services_loaded: false,
services_error: None,
sandbox_info: vec![],
sandbox_selected: 0,
sandbox_tree_expanded: std::collections::HashSet::new(),
sandbox_loaded: false,
sandbox_error: None,
selected_optdepends: std::collections::HashMap::new(),
cascade_mode: pacsea::state::modal::CascadeMode::Basic,
cached_reverse_deps_report: None,
};
let _terminal = render_ui_to_backend(backend, &mut app);
// Verify preflight modal rendered
assert!(app.preflight_content_rect.is_some());
}
#[test]
/// What: Test `ConfirmInstall` modal rendering.
///
/// Inputs:
/// - `AppState` with `Modal::ConfirmInstall` containing package items.
///
/// Output:
/// - Confirm modal renders correctly.
/// - Buffer dimensions are valid.
///
/// Details:
/// - Verifies installation confirmation dialog display.
/// - Tests modal rendering with package list.
fn test_modal_confirm_renders() {
let backend = create_test_backend();
let mut app = create_test_app_state();
app.modal = Modal::ConfirmInstall {
items: vec![PackageItem {
name: "test-pkg".to_string(),
version: "1.0.0".to_string(),
description: "Test".to_string(),
source: Source::Aur,
popularity: None,
out_of_date: None,
orphaned: false,
}],
};
let terminal = render_ui_to_backend(backend, &mut app);
// Verify confirm modal rendered (check buffer)
let buffer = terminal.backend().buffer();
assert_eq!(buffer.area.width, 120);
assert_eq!(buffer.area.height, 40);
}
#[test]
/// What: Test `PreflightExec` modal rendering with progress bars.
///
/// Inputs:
/// - `PreflightExec` modal with progress bar updates in `log_lines`.
///
/// Output:
/// - Modal renders correctly with progress bars.
/// - Progress bars display correctly in rendered output.
///
/// Details:
/// - Verifies progress bar display in log panel using `TestBackend`.
/// - Tests that progress bars update correctly in rendered output.
fn test_modal_preflight_exec_progress_bars() {
let backend = create_test_backend();
let mut app = create_test_app_state();
app.modal = Modal::PreflightExec {
items: vec![PackageItem {
name: "test-pkg".to_string(),
version: "1.0.0".to_string(),
description: "Test".to_string(),
source: Source::Aur,
popularity: None,
out_of_date: None,
orphaned: false,
}],
action: pacsea::state::PreflightAction::Install,
tab: pacsea::state::PreflightTab::Summary,
verbose: false,
log_lines: vec![
":: Retrieving packages...".to_string(),
"[########] 100%".to_string(),
"downloading test-pkg-1.0.0...".to_string(),
],
abortable: false,
header_chips: pacsea::state::modal::PreflightHeaderChips::default(),
success: None,
};
let terminal = render_ui_to_backend(backend, &mut app);
// Verify PreflightExec modal rendered
let buffer = terminal.backend().buffer();
assert_eq!(buffer.area.width, 120);
assert_eq!(buffer.area.height, 40);
// Verify log_lines contain progress bar
if let Modal::PreflightExec { log_lines, .. } = app.modal {
assert_eq!(log_lines.len(), 3);
assert!(log_lines[1].contains("100%"));
} else {
panic!("Expected PreflightExec modal");
}
}
// Component State Tests
#[test]
/// What: Test package selection highlighting in results pane.
///
/// Inputs:
/// - `AppState` with multiple package results and selected index set to 1.
///
/// Output:
/// - Selection state is maintained after rendering.
/// - Results rect is set in app state.
///
/// Details:
/// - Verifies selection highlighting works correctly.
/// - Tests list state synchronization with selected index.
fn test_results_selection_highlighting() {
let backend = create_test_backend();
let mut app = create_test_app_state();
app.results = vec![
PackageItem {
name: "pkg1".to_string(),
version: "1.0".to_string(),
description: "First".to_string(),
source: Source::Aur,
popularity: None,
out_of_date: None,
orphaned: false,
},
PackageItem {
name: "pkg2".to_string(),
version: "2.0".to_string(),
description: "Second".to_string(),
source: Source::Aur,
popularity: None,
out_of_date: None,
orphaned: false,
},
];
app.all_results = app.results.clone();
app.selected = 1;
app.list_state.select(Some(1));
let _terminal = render_ui_to_backend(backend, &mut app);
// Verify selection state is maintained
assert_eq!(app.selected, 1);
assert!(app.results_rect.is_some());
}
#[test]
/// What: Test search input focus state rendering.
///
/// Inputs:
/// - `AppState` with search input text and focus set to `Focus::Search`.
///
/// Output:
/// - Focus state is maintained after rendering.
/// - Search input is properly focused.
///
/// Details:
/// - Verifies focus management for search input.
/// - Tests caret positioning with input text.
fn test_search_input_focus() {
let backend = create_test_backend();
let mut app = create_test_app_state();
app.input = "test query".to_string();
app.focus = pacsea::state::types::Focus::Search;
app.search_caret = app.input.len();
let _terminal = render_ui_to_backend(backend, &mut app);
// Verify search input is focused
assert_eq!(app.focus, pacsea::state::types::Focus::Search);
}
#[test]
/// What: Test dropdown menu rendering (sort and options menus).
///
/// Inputs:
/// - `AppState` with `sort_menu_open` and `options_menu_open` flags set.
///
/// Output:
/// - Sort dropdown renders when open.
/// - Options dropdown renders when open.
/// - Respective rects are set in app state.
///
/// Details:
/// - Verifies dropdown menu display and positioning.
/// - Tests menu state toggling and rendering.
fn test_dropdowns_render() {
let backend = create_test_backend();
let mut app = create_test_app_state();
// Test sort dropdown
app.sort_menu_open = true;
let _terminal = render_ui_to_backend(backend, &mut app);
assert!(app.sort_menu_rect.is_some());
// Test options dropdown
let backend = create_test_backend();
app.sort_menu_open = false;
app.options_menu_open = true;
let _terminal = render_ui_to_backend(backend, &mut app);
assert!(app.options_menu_rect.is_some());
}
#[test]
/// What: Test toast message rendering.
///
/// Inputs:
/// - `AppState` with `toast_message` set to a test message.
///
/// Output:
/// - Toast message renders correctly.
/// - Buffer dimensions are valid.
///
/// Details:
/// - Verifies toast notification display.
/// - Tests temporary message overlay rendering.
fn test_toast_message_renders() {
let backend = create_test_backend();
let mut app = create_test_app_state();
app.toast_message = Some("Test toast message".to_string());
let terminal = render_ui_to_backend(backend, &mut app);
// Verify toast was rendered (check buffer dimensions)
let buffer = terminal.backend().buffer();
assert_eq!(buffer.area.width, 120);
assert_eq!(buffer.area.height, 40);
}
#[test]
/// What: Test URL button rect initialization in details pane.
///
/// Inputs:
/// - `AppState` with selected package and details containing a URL.
///
/// Output:
/// - URL button rect is set in app state.
///
/// Details:
/// - Verifies URL button positioning when package has a URL.
/// - Tests details pane component initialization.
fn test_url_button_rect_set() {
let backend = create_test_backend();
let mut app = create_test_app_state();
app.results = vec![PackageItem {
name: "test".to_string(),
version: "1.0".to_string(),
description: "Test".to_string(),
source: Source::Aur,
popularity: None,
out_of_date: None,
orphaned: false,
}];
app.selected = 0;
app.list_state.select(Some(0));
app.details.url = "https://example.com".to_string();
let _terminal = render_ui_to_backend(backend, &mut app);
// Verify URL button rect is set
assert!(app.url_button_rect.is_some());
}
// Edge Cases and Error States
#[test]
/// What: Test UI rendering with very small terminal size.
///
/// Inputs:
/// - `TestBackend` with very small size (40x8).
///
/// Output:
/// - UI renders without panicking.
/// - Buffer dimensions match input size.
///
/// Details:
/// - Verifies UI handles extreme minimum terminal sizes gracefully.
/// - Tests layout resilience at edge cases.
fn test_ui_very_small_terminal() {
// Test with very small terminal
let backend = create_test_backend_size(40, 8);
let mut app = create_test_app_state();
let terminal = render_ui_to_backend(backend, &mut app);
// UI should still render without panicking
let buffer = terminal.backend().buffer();
assert_eq!(buffer.area.width, 40);
assert_eq!(buffer.area.height, 8);
}
#[test]
/// What: Test UI rendering with very large terminal size.
///
/// Inputs:
/// - `TestBackend` with very large size (300x100) and package results.
///
/// Output:
/// - UI renders correctly.
/// - Buffer dimensions match input size.
///
/// Details:
/// - Verifies UI scales properly to very large terminal dimensions.
/// - Tests layout calculations with maximum space.
fn test_ui_very_large_terminal() {
// Test with very large terminal
let backend = create_test_backend_size(300, 100);
let mut app = create_test_app_state();
app.results = vec![PackageItem {
name: "test".to_string(),
version: "1.0".to_string(),
description: "Test".to_string(),
source: Source::Aur,
popularity: None,
out_of_date: None,
orphaned: false,
}];
app.selected = 0;
app.list_state.select(Some(0));
let terminal = render_ui_to_backend(backend, &mut app);
// Verify layout handles large sizes
let buffer = terminal.backend().buffer();
assert_eq!(buffer.area.width, 300);
assert_eq!(buffer.area.height, 100);
}
#[test]
/// What: Test UI handling of very long package names.
///
/// Inputs:
/// - `AppState` with package item containing an extremely long name.
///
/// Output:
/// - UI renders without panicking.
/// - Results rect is set in app state.
///
/// Details:
/// - Verifies text truncation and overflow handling.
/// - Tests UI resilience with edge case package names.
fn test_ui_long_package_names() {
let backend = create_test_backend();
let mut app = create_test_app_state();
// Test with very long package names
app.results = vec![PackageItem {
name: "very-long-package-name-that-should-be-truncated-properly-in-the-ui".to_string(),
version: "1.0.0".to_string(),
description: "Test".to_string(),
source: Source::Aur,
popularity: None,
out_of_date: None,
orphaned: false,
}];
app.selected = 0;
app.list_state.select(Some(0));
let _terminal = render_ui_to_backend(backend, &mut app);
// Verify UI handles long names without panicking
assert!(app.results_rect.is_some());
}
#[test]
/// What: Test UI rendering with empty search results and active query.
///
/// Inputs:
/// - `AppState` with search query but empty results list.
///
/// Output:
/// - UI renders gracefully without errors.
/// - Results rect is set in app state.
///
/// Details:
/// - Verifies empty state handling when no packages match query.
/// - Tests UI behavior with no results to display.
fn test_ui_empty_results_with_query() {
let backend = create_test_backend();
let mut app = create_test_app_state();
app.input = "nonexistent-package-xyz".to_string();
app.results = vec![];
app.all_results = vec![];
let _terminal = render_ui_to_backend(backend, &mut app);
// Verify UI handles empty results gracefully
assert!(app.results_rect.is_some());
}
#[test]
/// What: Test UI rendering in installed-only mode with downgrade and remove lists.
///
/// Inputs:
/// - `AppState` with `installed_only_mode` enabled and populated downgrade/remove lists.
///
/// Output:
/// - Downgrade pane renders correctly.
/// - Installed-only mode layout works properly.
///
/// Details:
/// - Verifies right pane focus switching to downgrade/remove views.
/// - Tests mode-specific UI rendering and list state management.
fn test_ui_installed_only_mode() {
let backend = create_test_backend();
let mut app = create_test_app_state();
app.installed_only_mode = true;
app.right_pane_focus = pacsea::state::types::RightPaneFocus::Downgrade;
app.downgrade_list = vec![PackageItem {
name: "downgrade-pkg".to_string(),
version: "1.0".to_string(),
description: "To downgrade".to_string(),
source: Source::Aur,
popularity: None,
out_of_date: None,
orphaned: false,
}];
app.downgrade_state.select(Some(0));
app.remove_list = vec![PackageItem {
name: "remove-pkg".to_string(),
version: "2.0".to_string(),
description: "To remove".to_string(),
source: Source::Aur,
popularity: None,
out_of_date: None,
orphaned: false,
}];
app.remove_state.select(Some(0));
let _terminal = render_ui_to_backend(backend, &mut app);
// Verify installed-only mode renders correctly
assert!(app.downgrade_rect.is_some());
}
#[test]
/// What: Test UI handling of terminal resize events.
///
/// Inputs:
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | true |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/tests/other/edge_cases.rs | tests/other/edge_cases.rs | //! Integration tests for edge cases.
//!
//! Tests cover:
//! - Empty package list for all operations
//! - Special characters in package names (quoting)
//! - Very long package names
//! - Concurrent operation prevention
#![cfg(test)]
use pacsea::install::ExecutorRequest;
use pacsea::state::{AppState, Modal, PackageItem, Source, modal::CascadeMode};
/// What: Create a test package item.
///
/// Inputs:
/// - `name`: Package name
/// - `source`: Package source
///
/// Output:
/// - `PackageItem` ready for testing
///
/// Details:
/// - Helper to create test packages
fn create_test_package(name: &str, source: Source) -> PackageItem {
PackageItem {
name: name.into(),
version: "1.0.0".into(),
description: String::new(),
source,
popularity: None,
out_of_date: None,
orphaned: false,
}
}
#[test]
/// What: Test `ExecutorRequest::Install` with empty items.
///
/// Inputs:
/// - Empty package list.
///
/// Output:
/// - Request handles empty list gracefully.
///
/// Details:
/// - Edge case for install with no packages.
fn integration_edge_case_install_empty_list() {
let request = ExecutorRequest::Install {
items: vec![],
password: None,
dry_run: false,
};
match request {
ExecutorRequest::Install { items, .. } => {
assert!(items.is_empty());
}
_ => panic!("Expected ExecutorRequest::Install"),
}
}
#[test]
/// What: Test `ExecutorRequest::Remove` with empty names.
///
/// Inputs:
/// - Empty package names list.
///
/// Output:
/// - Request handles empty list gracefully.
///
/// Details:
/// - Edge case for remove with no packages.
fn integration_edge_case_remove_empty_list() {
let request = ExecutorRequest::Remove {
names: vec![],
password: None,
cascade: CascadeMode::Basic,
dry_run: false,
};
match request {
ExecutorRequest::Remove { names, .. } => {
assert!(names.is_empty());
}
_ => panic!("Expected ExecutorRequest::Remove"),
}
}
#[test]
/// What: Test `ExecutorRequest::Update` with empty commands.
///
/// Inputs:
/// - Empty commands list.
///
/// Output:
/// - Request handles empty list gracefully.
///
/// Details:
/// - Edge case for update with no commands.
fn integration_edge_case_update_empty_commands() {
let request = ExecutorRequest::Update {
commands: vec![],
password: None,
dry_run: false,
};
match request {
ExecutorRequest::Update { commands, .. } => {
assert!(commands.is_empty());
}
_ => panic!("Expected ExecutorRequest::Update"),
}
}
#[test]
/// What: Test `ExecutorRequest::Downgrade` with empty names.
///
/// Inputs:
/// - Empty package names list.
///
/// Output:
/// - Request handles empty list gracefully.
///
/// Details:
/// - Edge case for downgrade with no packages.
fn integration_edge_case_downgrade_empty_list() {
let request = ExecutorRequest::Downgrade {
names: vec![],
password: None,
dry_run: false,
};
match request {
ExecutorRequest::Downgrade { names, .. } => {
assert!(names.is_empty());
}
_ => panic!("Expected ExecutorRequest::Downgrade"),
}
}
#[test]
/// What: Test package name with special characters.
///
/// Inputs:
/// - Package name with hyphens and numbers.
///
/// Output:
/// - Package name is preserved correctly.
///
/// Details:
/// - Common package naming pattern.
fn integration_edge_case_package_name_with_hyphens() {
let pkg = create_test_package(
"python-numpy-1.26.0",
Source::Official {
repo: "extra".into(),
arch: "x86_64".into(),
},
);
assert_eq!(pkg.name, "python-numpy-1.26.0");
}
#[test]
/// What: Test package name with underscores.
///
/// Inputs:
/// - Package name with underscores.
///
/// Output:
/// - Package name is preserved correctly.
///
/// Details:
/// - Some AUR packages use underscores.
fn integration_edge_case_package_name_with_underscores() {
let pkg = create_test_package("my_custom_package", Source::Aur);
assert_eq!(pkg.name, "my_custom_package");
}
#[test]
/// What: Test package name with plus sign.
///
/// Inputs:
/// - Package name with plus sign.
///
/// Output:
/// - Package name is preserved correctly.
///
/// Details:
/// - Some packages have plus in name (e.g., c++).
fn integration_edge_case_package_name_with_plus() {
let pkg = create_test_package(
"g++",
Source::Official {
repo: "core".into(),
arch: "x86_64".into(),
},
);
assert_eq!(pkg.name, "g++");
}
#[test]
/// What: Test very long package name.
///
/// Inputs:
/// - Package name with 100 characters.
///
/// Output:
/// - Package name is preserved correctly.
///
/// Details:
/// - Edge case for very long package names.
fn integration_edge_case_very_long_package_name() {
let long_name = "a".repeat(100);
let pkg = create_test_package(&long_name, Source::Aur);
assert_eq!(pkg.name.len(), 100);
assert_eq!(pkg.name, long_name);
}
#[test]
/// What: Test package name quoting in command.
///
/// Inputs:
/// - Package name that needs quoting.
///
/// Output:
/// - Command properly quotes the name.
///
/// Details:
/// - Verifies shell quoting for special characters.
fn integration_edge_case_package_name_quoting() {
let pkg_name = "test-pkg";
// Simple quoting test
let quoted = format!("'{pkg_name}'");
assert_eq!(quoted, "'test-pkg'");
// Double quoting test
let double_quoted = format!("\"{pkg_name}\"");
assert_eq!(double_quoted, "\"test-pkg\"");
}
#[test]
/// What: Test concurrent operation prevention via `pending_executor_request`.
///
/// Inputs:
/// - `AppState` with existing `pending_executor_request`.
///
/// Output:
/// - New operation should check for existing request.
///
/// Details:
/// - Prevents race conditions by checking existing request.
fn integration_edge_case_concurrent_operation_check() {
let app = AppState {
pending_executor_request: Some(ExecutorRequest::Install {
items: vec![create_test_package("pkg1", Source::Aur)],
password: None,
dry_run: false,
}),
..Default::default()
};
// Check that an operation is already pending
assert!(app.pending_executor_request.is_some());
// Before starting a new operation, check if one is pending
let can_start_new = app.pending_executor_request.is_none();
assert!(!can_start_new, "Should not start new operation when one is pending");
}
#[test]
/// What: Test empty `AppState` has no pending operations.
///
/// Inputs:
/// - Default `AppState`.
///
/// Output:
/// - No pending operations.
///
/// Details:
/// - Verifies clean initial state.
fn integration_edge_case_no_pending_operations_initially() {
let app = AppState::default();
assert!(app.pending_executor_request.is_none());
assert!(app.pending_custom_command.is_none());
assert!(app.pending_file_sync_result.is_none());
}
#[test]
/// What: Test Alert modal for error display.
///
/// Inputs:
/// - Error message string.
///
/// Output:
/// - Alert modal is correctly structured.
///
/// Details:
/// - Verifies error display in Alert modal.
fn integration_edge_case_alert_modal_error() {
let app = AppState {
modal: Modal::Alert {
message: "Package not found in repositories".to_string(),
},
..Default::default()
};
match app.modal {
Modal::Alert { message } => {
assert!(message.contains("not found"));
}
_ => panic!("Expected Alert modal"),
}
}
#[test]
/// What: Test empty install list.
///
/// Inputs:
/// - `AppState` with empty install_list.
///
/// Output:
/// - install_list is empty.
///
/// Details:
/// - Edge case for no packages selected.
fn integration_edge_case_empty_install_list() {
let app = AppState::default();
assert!(app.install_list.is_empty());
}
#[test]
/// What: Test empty downgrade list.
///
/// Inputs:
/// - `AppState` with empty downgrade_list.
///
/// Output:
/// - downgrade_list is empty.
///
/// Details:
/// - Edge case for no packages to downgrade.
fn integration_edge_case_empty_downgrade_list() {
let app = AppState::default();
assert!(app.downgrade_list.is_empty());
}
#[test]
/// What: Test cascade mode flags.
///
/// Inputs:
/// - All cascade mode variants.
///
/// Output:
/// - Correct flags for each mode.
///
/// Details:
/// - Verifies cascade mode flag() method.
fn integration_edge_case_cascade_mode_flags() {
assert_eq!(CascadeMode::Basic.flag(), "-R");
assert_eq!(CascadeMode::Cascade.flag(), "-Rs");
assert_eq!(CascadeMode::CascadeWithConfigs.flag(), "-Rns");
}
#[test]
/// What: Test cascade mode descriptions.
///
/// Inputs:
/// - All cascade mode variants.
///
/// Output:
/// - Correct description for each mode.
///
/// Details:
/// - Verifies cascade mode description() method.
fn integration_edge_case_cascade_mode_descriptions() {
assert!(CascadeMode::Basic.description().contains("targets"));
assert!(CascadeMode::Cascade.description().contains("dependents"));
assert!(CascadeMode::CascadeWithConfigs.description().contains("configs"));
}
#[test]
/// What: Test cascade mode cycling.
///
/// Inputs:
/// - Cascade mode next() calls.
///
/// Output:
/// - Modes cycle correctly.
///
/// Details:
/// - Verifies cascade mode next() method.
fn integration_edge_case_cascade_mode_cycling() {
assert_eq!(CascadeMode::Basic.next(), CascadeMode::Cascade);
assert_eq!(CascadeMode::Cascade.next(), CascadeMode::CascadeWithConfigs);
assert_eq!(CascadeMode::CascadeWithConfigs.next(), CascadeMode::Basic);
}
#[test]
/// What: Test package with empty version.
///
/// Inputs:
/// - Package with empty version string.
///
/// Output:
/// - Package handles empty version gracefully.
///
/// Details:
/// - Edge case for packages without version info.
fn integration_edge_case_package_empty_version() {
let pkg = PackageItem {
name: "test-pkg".to_string(),
version: String::new(),
description: String::new(),
source: Source::Aur,
popularity: None,
out_of_date: None,
orphaned: false,
};
assert!(pkg.version.is_empty());
}
#[test]
/// What: Test package with orphaned flag.
///
/// Inputs:
/// - Package marked as orphaned.
///
/// Output:
/// - orphaned flag is true.
///
/// Details:
/// - Verifies orphaned package handling.
fn integration_edge_case_orphaned_package() {
let pkg = PackageItem {
name: "orphaned-pkg".to_string(),
version: "1.0.0".to_string(),
description: String::new(),
source: Source::Aur,
popularity: None,
out_of_date: None,
orphaned: true,
};
assert!(pkg.orphaned);
}
#[test]
/// What: Test package with out_of_date flag.
///
/// Inputs:
/// - Package marked as out of date.
///
/// Output:
/// - out_of_date is set.
///
/// Details:
/// - Verifies out-of-date package handling.
fn integration_edge_case_out_of_date_package() {
let pkg = PackageItem {
name: "old-pkg".to_string(),
version: "1.0.0".to_string(),
description: String::new(),
source: Source::Aur,
popularity: None,
out_of_date: Some(1_700_000_000), // Unix timestamp
orphaned: false,
};
assert!(pkg.out_of_date.is_some());
}
#[test]
/// What: Test package with popularity score.
///
/// Inputs:
/// - Package with popularity.
///
/// Output:
/// - popularity is set.
///
/// Details:
/// - Verifies AUR popularity handling.
fn integration_edge_case_package_popularity() {
let pkg = PackageItem {
name: "popular-pkg".to_string(),
version: "1.0.0".to_string(),
description: String::new(),
source: Source::Aur,
popularity: Some(42.5),
out_of_date: None,
orphaned: false,
};
assert_eq!(pkg.popularity, Some(42.5));
}
#[test]
/// What: Test single character package name.
///
/// Inputs:
/// - Package with single character name.
///
/// Output:
/// - Package handles short name.
///
/// Details:
/// - Edge case for minimal package name.
fn integration_edge_case_single_char_package_name() {
let pkg = create_test_package(
"r",
Source::Official {
repo: "extra".into(),
arch: "x86_64".into(),
},
);
assert_eq!(pkg.name, "r");
assert_eq!(pkg.name.len(), 1);
}
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/tests/other/conflict_preservation.rs | tests/other/conflict_preservation.rs | //! Integration test for conflict preservation when packages are added sequentially.
use pacsea as crate_root;
// Helper functions (simplified versions for this test)
fn create_test_package(
name: impl Into<String>,
version: impl Into<String>,
source: crate_root::state::Source,
) -> crate_root::state::PackageItem {
crate_root::state::PackageItem {
name: name.into(),
version: version.into(),
description: String::new(),
source,
popularity: None,
out_of_date: None,
orphaned: false,
}
}
fn create_preflight_modal(
packages: Vec<crate_root::state::PackageItem>,
action: crate_root::state::PreflightAction,
initial_tab: crate_root::state::PreflightTab,
) -> crate_root::state::Modal {
let package_count = packages.len();
let aur_count = packages
.iter()
.filter(|p| matches!(p.source, crate_root::state::Source::Aur))
.count();
crate_root::state::Modal::Preflight {
items: packages,
action,
tab: initial_tab,
summary: None,
summary_scroll: 0,
header_chips: crate_root::state::modal::PreflightHeaderChips {
package_count,
download_bytes: 0,
install_delta_bytes: 0,
aur_count,
risk_score: 0,
risk_level: crate_root::state::modal::RiskLevel::Low,
},
dependency_info: Vec::new(),
dep_selected: 0,
dep_tree_expanded: std::collections::HashSet::new(),
deps_error: None,
file_info: Vec::new(),
file_selected: 0,
file_tree_expanded: std::collections::HashSet::new(),
files_error: None,
service_info: Vec::new(),
service_selected: 0,
services_loaded: false,
services_error: None,
sandbox_info: Vec::new(),
sandbox_selected: 0,
sandbox_tree_expanded: std::collections::HashSet::new(),
sandbox_loaded: false,
sandbox_error: None,
selected_optdepends: std::collections::HashMap::new(),
cascade_mode: crate_root::state::modal::CascadeMode::Basic,
cached_reverse_deps_report: None,
}
}
fn switch_preflight_tab(
app: &mut crate_root::state::AppState,
tab: crate_root::state::PreflightTab,
) {
if let crate_root::state::Modal::Preflight {
items,
action,
tab: current_tab,
dependency_info,
dep_selected,
..
} = &mut app.modal
{
*current_tab = tab;
let item_names: std::collections::HashSet<String> =
items.iter().map(|i| i.name.clone()).collect();
// Sync dependencies
if matches!(*action, crate_root::state::PreflightAction::Install)
&& (matches!(tab, crate_root::state::PreflightTab::Deps)
|| matches!(tab, crate_root::state::PreflightTab::Summary)
|| dependency_info.is_empty())
{
let filtered: Vec<_> = app
.install_list_deps
.iter()
.filter(|dep| {
dep.required_by
.iter()
.any(|req_by| item_names.contains(req_by))
})
.cloned()
.collect();
if !filtered.is_empty() {
let was_empty = dependency_info.is_empty();
*dependency_info = filtered;
if was_empty {
*dep_selected = 0;
}
}
}
}
}
#[allow(clippy::type_complexity)]
fn assert_preflight_modal(
app: &crate_root::state::AppState,
) -> (
&Vec<crate_root::state::PackageItem>,
&crate_root::state::PreflightAction,
&crate_root::state::PreflightTab,
&Vec<crate_root::state::modal::DependencyInfo>,
&Vec<crate_root::state::modal::PackageFileInfo>,
&Vec<crate_root::state::modal::ServiceImpact>,
&Vec<crate_root::logic::sandbox::SandboxInfo>,
&bool,
&bool,
) {
if let crate_root::state::Modal::Preflight {
items,
action,
tab,
dependency_info,
file_info,
service_info,
sandbox_info,
services_loaded,
sandbox_loaded,
..
} = &app.modal
{
(
items,
action,
tab,
dependency_info,
file_info,
service_info,
sandbox_info,
services_loaded,
sandbox_loaded,
)
} else {
panic!("Expected Preflight modal");
}
}
/// Creates pacsea-bin's dependencies including conflicts with pacsea and pacsea-git.
///
/// What: Creates a test dependency list for pacsea-bin package.
///
/// Inputs: None (uses hardcoded test data).
///
/// Output: Vector of `DependencyInfo` entries containing:
/// - Conflict entries for "pacsea" and "pacsea-git"
/// - Regular dependency entry for "common-dep"
///
/// Details: This helper function creates test data for pacsea-bin that includes
/// conflicts with pacsea and pacsea-git, plus a regular dependency to test
/// that conflicts aren't overwritten by dependency merging.
fn create_pacsea_bin_dependencies() -> Vec<crate_root::state::modal::DependencyInfo> {
vec![
// pacsea-bin's conflict with pacsea
crate_root::state::modal::DependencyInfo {
name: "pacsea".to_string(),
version: String::new(),
status: crate_root::state::modal::DependencyStatus::Conflict {
reason: "conflicts with installed package pacsea".to_string(),
},
source: crate_root::state::modal::DependencySource::Official {
repo: "core".to_string(),
},
required_by: vec!["pacsea-bin".to_string()],
depends_on: Vec::new(),
is_core: false,
is_system: false,
},
// pacsea-bin's conflict with pacsea-git
crate_root::state::modal::DependencyInfo {
name: "pacsea-git".to_string(),
version: String::new(),
status: crate_root::state::modal::DependencyStatus::Conflict {
reason: "conflicts with installed package pacsea-git".to_string(),
},
source: crate_root::state::modal::DependencySource::Aur,
required_by: vec!["pacsea-bin".to_string()],
depends_on: Vec::new(),
is_core: false,
is_system: false,
},
// pacsea-bin's regular dependency (to test that conflicts aren't overwritten by deps)
crate_root::state::modal::DependencyInfo {
name: "common-dep".to_string(),
version: "1.0.0".to_string(),
status: crate_root::state::modal::DependencyStatus::ToInstall,
source: crate_root::state::modal::DependencySource::Official {
repo: "core".to_string(),
},
required_by: vec!["pacsea-bin".to_string()],
depends_on: Vec::new(),
is_core: false,
is_system: false,
},
]
}
/// Creates jujutsu-git's dependencies including conflicts and overlapping dependencies.
///
/// What: Creates a test dependency list for jujutsu-git package.
///
/// Inputs: None (uses hardcoded test data).
///
/// Output: Vector of `DependencyInfo` entries containing:
/// - Conflict entry for "jujutsu"
/// - Dependency entry for "pacsea" (critical test case - already a conflict from pacsea-bin)
/// - Dependency entry for "common-dep" (overlaps with pacsea-bin)
/// - Unique dependency entry for "jujutsu-dep"
///
/// Details: This helper function creates test data for jujutsu-git that includes
/// a critical test case where jujutsu-git depends on "pacsea", which is already
/// a conflict from pacsea-bin. This tests that conflict statuses are preserved
/// during dependency merging and not overwritten by `ToInstall` dependencies.
fn create_jujutsu_git_dependencies() -> Vec<crate_root::state::modal::DependencyInfo> {
vec![
// jujutsu-git's conflict with jujutsu
crate_root::state::modal::DependencyInfo {
name: "jujutsu".to_string(),
version: String::new(),
status: crate_root::state::modal::DependencyStatus::Conflict {
reason: "conflicts with installed package jujutsu".to_string(),
},
source: crate_root::state::modal::DependencySource::Official {
repo: "community".to_string(),
},
required_by: vec!["jujutsu-git".to_string()],
depends_on: Vec::new(),
is_core: false,
is_system: false,
},
// CRITICAL TEST CASE: jujutsu-git also depends on "pacsea" (which is already a CONFLICT from pacsea-bin)
// This tests that when merging, the existing conflict status is preserved
// The merge_dependency function should NOT overwrite the Conflict status with ToInstall
crate_root::state::modal::DependencyInfo {
name: "pacsea".to_string(),
version: String::new(),
status: crate_root::state::modal::DependencyStatus::ToInstall, // This would normally overwrite, but shouldn't because pacsea is already a Conflict
source: crate_root::state::modal::DependencySource::Official {
repo: "core".to_string(),
},
required_by: vec!["jujutsu-git".to_string()],
depends_on: Vec::new(),
is_core: false,
is_system: false,
},
// jujutsu-git also depends on common-dep (same as pacsea-bin)
// This tests that pacsea-bin's regular dependency entries merge correctly
crate_root::state::modal::DependencyInfo {
name: "common-dep".to_string(),
version: "1.0.0".to_string(),
status: crate_root::state::modal::DependencyStatus::ToInstall,
source: crate_root::state::modal::DependencySource::Official {
repo: "core".to_string(),
},
required_by: vec!["jujutsu-git".to_string()],
depends_on: Vec::new(),
is_core: false,
is_system: false,
},
// jujutsu-git's unique dependency
crate_root::state::modal::DependencyInfo {
name: "jujutsu-dep".to_string(),
version: "2.0.0".to_string(),
status: crate_root::state::modal::DependencyStatus::ToInstall,
source: crate_root::state::modal::DependencySource::Official {
repo: "extra".to_string(),
},
required_by: vec!["jujutsu-git".to_string()],
depends_on: Vec::new(),
is_core: false,
is_system: false,
},
]
}
/// Verifies that pacsea-bin's conflicts are correctly detected.
///
/// What: Validates that pacsea-bin's conflicts are properly detected in the dependency list.
///
/// Inputs:
/// - `dependency_info`: Slice of `DependencyInfo` entries to check
///
/// Output: None (panics on assertion failure).
///
/// Details: Checks that pacsea-bin has exactly 2 conflicts: one with "pacsea"
/// and one with "pacsea-git". Both conflicts must be marked as Conflict status
/// and have "pacsea-bin" in their `required_by` list.
fn verify_pacsea_bin_conflicts(dependency_info: &[crate_root::state::modal::DependencyInfo]) {
let conflicts: Vec<_> = dependency_info
.iter()
.filter(|d| {
matches!(
d.status,
crate_root::state::modal::DependencyStatus::Conflict { .. }
)
})
.collect();
assert_eq!(
conflicts.len(),
2,
"Should have 2 conflicts after adding pacsea-bin"
);
assert!(
conflicts
.iter()
.any(|c| c.name == "pacsea" && c.required_by.contains(&"pacsea-bin".to_string())),
"pacsea-bin should conflict with pacsea"
);
assert!(
conflicts
.iter()
.any(|c| c.name == "pacsea-git" && c.required_by.contains(&"pacsea-bin".to_string())),
"pacsea-bin should conflict with pacsea-git"
);
}
/// Verifies that pacsea-bin's conflicts are preserved after adding jujutsu-git.
///
/// What: Validates that pacsea-bin's cached conflicts are not overwritten when
/// jujutsu-git is added with overlapping dependencies.
///
/// Inputs:
/// - `dependency_info`: Slice of `DependencyInfo` entries to check
///
/// Output: None (panics on assertion failure).
///
/// Details: This is a critical test that verifies conflict preservation during
/// dependency merging. Even though jujutsu-git might have "pacsea" as a `ToInstall`
/// dependency, the existing Conflict status from pacsea-bin must be preserved.
/// Verifies that pacsea-bin still has 2 conflicts and that "pacsea" remains a Conflict
/// with pacsea-bin in its `required_by` list.
fn verify_pacsea_bin_conflicts_preserved(
dependency_info: &[crate_root::state::modal::DependencyInfo],
) {
let pacsea_conflicts: Vec<_> = dependency_info
.iter()
.filter(|d| {
matches!(
d.status,
crate_root::state::modal::DependencyStatus::Conflict { .. }
) && d.required_by.contains(&"pacsea-bin".to_string())
})
.collect();
assert_eq!(
pacsea_conflicts.len(),
2,
"pacsea-bin should still have 2 conflicts after adding jujutsu-git (cached conflicts not overwritten)"
);
// CRITICAL: "pacsea" should still be a Conflict, even if jujutsu-git has it as a dependency
let pacsea_entry = dependency_info
.iter()
.find(|d| d.name == "pacsea")
.expect("pacsea should be present in dependencies");
assert!(
matches!(
pacsea_entry.status,
crate_root::state::modal::DependencyStatus::Conflict { .. }
),
"pacsea should remain a Conflict (from pacsea-bin cache), not overwritten by jujutsu-git's ToInstall dependency"
);
assert!(
pacsea_entry.required_by.contains(&"pacsea-bin".to_string()),
"pacsea conflict should be required by pacsea-bin"
);
assert!(
pacsea_conflicts.iter().any(|c| c.name == "pacsea"),
"pacsea-bin should still conflict with pacsea"
);
assert!(
pacsea_conflicts.iter().any(|c| c.name == "pacsea-git"),
"pacsea-bin should still conflict with pacsea-git"
);
}
/// Verifies that jujutsu-git's conflicts are correctly detected.
///
/// What: Validates that jujutsu-git's conflicts are properly detected in the dependency list.
///
/// Inputs:
/// - `dependency_info`: Slice of `DependencyInfo` entries to check
///
/// Output: None (panics on assertion failure).
///
/// Details: Checks that jujutsu-git has exactly 1 conflict with "jujutsu".
/// The conflict must be marked as Conflict status and have "jujutsu-git" in
/// its `required_by` list.
fn verify_jujutsu_git_conflicts(dependency_info: &[crate_root::state::modal::DependencyInfo]) {
let jujutsu_conflicts: Vec<_> = dependency_info
.iter()
.filter(|d| {
matches!(
d.status,
crate_root::state::modal::DependencyStatus::Conflict { .. }
) && d.required_by.contains(&"jujutsu-git".to_string())
})
.collect();
assert_eq!(
jujutsu_conflicts.len(),
1,
"jujutsu-git should have 1 conflict"
);
assert!(
jujutsu_conflicts.iter().any(|c| c.name == "jujutsu"),
"jujutsu-git should conflict with jujutsu"
);
}
/// Verifies the total number of conflicts matches expected count.
///
/// What: Validates that the total number of Conflict entries in the dependency list
/// matches the expected count.
///
/// Inputs:
/// - `dependency_info`: Slice of `DependencyInfo` entries to check
/// - `expected_count`: Expected number of conflicts
/// - message: Custom assertion message for better test failure diagnostics
///
/// Output: None (panics on assertion failure).
///
/// Details: Counts all `DependencyInfo` entries with Conflict status and asserts
/// that the count matches the expected value. Used to verify that conflicts
/// are not lost during dependency merging operations.
fn verify_total_conflicts(
dependency_info: &[crate_root::state::modal::DependencyInfo],
expected_count: usize,
message: &str,
) {
let all_conflicts_count = dependency_info
.iter()
.filter(|d| {
matches!(
d.status,
crate_root::state::modal::DependencyStatus::Conflict { .. }
)
})
.count();
assert_eq!(all_conflicts_count, expected_count, "{message}");
}
/// Verifies that common-dep is not a conflict and is required by pacsea-bin.
///
/// What: Validates that common-dep is correctly identified as a regular dependency
/// (not a conflict) and is associated with pacsea-bin.
///
/// Inputs:
/// - `dependency_info`: Slice of `DependencyInfo` entries to check
///
/// Output: None (panics on assertion failure).
///
/// Details: Verifies that "common-dep" exists in the dependency list with `ToInstall`
/// status (not Conflict). Also checks that pacsea-bin is in the `required_by` list,
/// either directly or in a merged entry. This ensures that regular dependencies
/// are not incorrectly marked as conflicts during merging.
fn verify_common_dep_not_conflict(dependency_info: &[crate_root::state::modal::DependencyInfo]) {
let common_dep = dependency_info
.iter()
.find(|d| d.name == "common-dep")
.expect("common-dep should be present");
assert!(
matches!(
common_dep.status,
crate_root::state::modal::DependencyStatus::ToInstall
),
"common-dep should be ToInstall, not Conflict"
);
// Note: After dependency merging, common-dep might be merged into a single entry
// with both packages in required_by, or there might be separate entries
// The important thing is that it's not a conflict
assert!(
common_dep.required_by.contains(&"pacsea-bin".to_string())
|| dependency_info.iter().any(
|d| d.name == "common-dep" && d.required_by.contains(&"pacsea-bin".to_string())
),
"common-dep should be required by pacsea-bin (directly or in merged entry)"
);
}
/// Verifies that conflicts persist through multiple tab switches.
///
/// What: Simulates tab switching in the preflight modal and returns the dependency
/// information after the switches to verify conflicts are preserved.
///
/// Inputs:
/// - app: Mutable reference to `AppState` to perform tab switching operations
///
/// Output: Vector of `DependencyInfo` entries after tab switches.
///
/// Details: Performs tab switches from Deps to Summary and back to Deps to simulate
/// user interaction. Returns the dependency information after these operations to
/// allow verification that conflicts persist through UI state changes. This tests
/// that cached conflict data is not lost during tab navigation.
fn verify_conflicts_after_tab_switches(
app: &mut crate_root::state::AppState,
) -> Vec<crate_root::state::modal::DependencyInfo> {
switch_preflight_tab(app, crate_root::state::PreflightTab::Summary);
switch_preflight_tab(app, crate_root::state::PreflightTab::Deps);
let (_, _, _, dependency_info_after_switch, _, _, _, _, _) = assert_preflight_modal(app);
dependency_info_after_switch.clone()
}
#[test]
/// What: Verify that conflicts are not overwritten when new packages are added to install list sequentially.
///
/// Inputs:
/// - pacsea-bin added first with conflicts (pacsea, pacsea-git)
/// - jujutsu-git added second with conflicts (jujutsu)
/// - Both packages may have overlapping dependencies
///
/// Output:
/// - pacsea-bin's conflicts remain present after jujutsu-git is added
/// - jujutsu-git's conflicts are also detected
/// - No conflicts are overwritten by dependency merging
///
/// Details:
/// - Tests the fix for conflict status preservation during dependency merging
/// - Verifies that conflicts take precedence over dependency statuses
/// - Ensures timing of package addition doesn't affect conflict detection
fn test_conflicts_not_overwritten_when_packages_added_sequentially() {
unsafe {
std::env::set_var("PACSEA_TEST_HEADLESS", "1");
}
let mut app = crate_root::state::AppState::default();
// Step 1: Add pacsea-bin first
let pacsea_bin = create_test_package("pacsea-bin", "0.6.0", crate_root::state::Source::Aur);
app.install_list_deps = create_pacsea_bin_dependencies();
app.install_list = vec![pacsea_bin.clone()];
app.preflight_cancelled
.store(false, std::sync::atomic::Ordering::Relaxed);
app.modal = create_preflight_modal(
vec![pacsea_bin.clone()],
crate_root::state::PreflightAction::Install,
crate_root::state::PreflightTab::Deps,
);
// Verify pacsea-bin's conflicts are detected
switch_preflight_tab(&mut app, crate_root::state::PreflightTab::Deps);
let (_, _, _, dependency_info, _, _, _, _, _) = assert_preflight_modal(&app);
verify_pacsea_bin_conflicts(dependency_info);
// Step 2: Simulate that pacsea-bin's conflicts are now cached in install_list_deps
// This simulates the real scenario where the first package's dependencies were resolved
// and cached before the second package is added
// Step 3: Add jujutsu-git (which might have dependencies that could overwrite conflicts)
let jujutsu_git = create_test_package("jujutsu-git", "0.1.0", crate_root::state::Source::Aur);
// CRITICAL TEST: Simulate the scenario where jujutsu-git's dependencies are resolved
// and need to be merged with existing cached entries from pacsea-bin.
// The key test is: when jujutsu-git depends on "pacsea" (which is already a conflict
// from pacsea-bin), the merge should preserve the conflict status, not overwrite it.
app.install_list_deps
.extend(create_jujutsu_git_dependencies());
app.install_list = vec![pacsea_bin.clone(), jujutsu_git.clone()];
// Update modal to include both packages
app.modal = create_preflight_modal(
vec![pacsea_bin, jujutsu_git],
crate_root::state::PreflightAction::Install,
crate_root::state::PreflightTab::Deps,
);
// Step 4: Verify conflicts are still present after adding jujutsu-git
switch_preflight_tab(&mut app, crate_root::state::PreflightTab::Deps);
let (items, _, _, dependency_info, _, _, _, _, _) = assert_preflight_modal(&app);
assert_eq!(items.len(), 2, "Should have 2 packages in install list");
verify_pacsea_bin_conflicts_preserved(dependency_info);
verify_jujutsu_git_conflicts(dependency_info);
verify_total_conflicts(
dependency_info,
3,
"Should have 3 total conflicts (2 from pacsea-bin, 1 from jujutsu-git)",
);
verify_common_dep_not_conflict(dependency_info);
// Step 5: Verify conflicts persist through multiple tab switches
let dependency_info_after_switch = verify_conflicts_after_tab_switches(&mut app);
verify_total_conflicts(
&dependency_info_after_switch,
3,
"Should still have 3 conflicts after tab switches",
);
// Verify pacsea-bin's conflicts are still intact
let pacsea_conflicts_after_switch_count = dependency_info_after_switch
.iter()
.filter(|d| {
matches!(
d.status,
crate_root::state::modal::DependencyStatus::Conflict { .. }
) && d.required_by.contains(&"pacsea-bin".to_string())
})
.count();
assert_eq!(
pacsea_conflicts_after_switch_count, 2,
"pacsea-bin should still have 2 conflicts after tab switches"
);
}
#[test]
/// What: Verify that cached conflicts are preserved when new dependencies are merged.
///
/// Inputs:
/// - A conflict entry already exists in `install_list_deps` (from cached first package)
/// - A new dependency entry for the same package is added to cache (from second package)
///
/// Output:
/// - The conflict status is preserved in the final merged result
///
/// Details:
/// - Tests the caching scenario where one package's conflicts are already cached
/// - Verifies that when the cache is displayed, conflicts are not overwritten
/// - This simulates the real-world scenario where packages are added at different times
fn test_cached_conflicts_preserved_in_cache_merge() {
use crate_root::state::modal::DependencyStatus;
unsafe {
std::env::set_var("PACSEA_TEST_HEADLESS", "1");
}
// Simulate the scenario:
// 1. pacsea-bin was added first, conflicts resolved and cached in install_list_deps
// 2. jujutsu-git is added, and its dependencies are also cached
// 3. When both are displayed, the cache merge should preserve conflicts
// Step 1: Simulate pacsea-bin's conflict being cached (from first package addition)
let mut app = crate_root::state::AppState {
install_list_deps: vec![crate_root::state::modal::DependencyInfo {
name: "pacsea".to_string(),
version: String::new(),
status: DependencyStatus::Conflict {
reason: "conflicts with installed package pacsea".to_string(),
},
source: crate_root::state::modal::DependencySource::Official {
repo: "core".to_string(),
},
required_by: vec!["pacsea-bin".to_string()],
depends_on: Vec::new(),
is_core: false,
is_system: false,
}],
..Default::default()
};
// Step 2: Simulate jujutsu-git being added and its dependency on "pacsea" being cached
// In the real scenario, this would be resolved and merged via merge_dependency
// The key test is: does the cached conflict get overwritten?
app.install_list_deps
.push(crate_root::state::modal::DependencyInfo {
name: "pacsea".to_string(),
version: String::new(),
status: DependencyStatus::ToInstall, // This would try to overwrite, but merge_dependency should prevent it
source: crate_root::state::modal::DependencySource::Official {
repo: "core".to_string(),
},
required_by: vec!["jujutsu-git".to_string()],
depends_on: Vec::new(),
is_core: false,
is_system: false,
});
// Step 3: Simulate what happens when the UI displays dependencies
// The cache has both entries, but they should be merged correctly
// In the real code, merge_dependency would handle this, but for this test
// we verify that the conflict entry exists and would be preserved
// Verify both entries exist in cache (before merging)
let pacsea_entries: Vec<_> = app
.install_list_deps
.iter()
.filter(|d| d.name == "pacsea")
.collect();
assert_eq!(
pacsea_entries.len(),
2,
"Should have 2 pacsea entries in cache (one conflict, one dependency)"
);
// Verify the conflict entry is present
let conflict_entry = pacsea_entries
.iter()
.find(|d| matches!(d.status, DependencyStatus::Conflict { .. }))
.expect("Conflict entry should be in cache");
assert!(
conflict_entry
.required_by
.contains(&"pacsea-bin".to_string()),
"Conflict should be from pacsea-bin"
);
// Verify the dependency entry is present
let dep_entry = pacsea_entries
.iter()
.find(|d| matches!(d.status, DependencyStatus::ToInstall))
.expect("Dependency entry should be in cache");
assert!(
dep_entry.required_by.contains(&"jujutsu-git".to_string()),
"Dependency should be from jujutsu-git"
);
// Step 4: The key test - when these are merged (via merge_dependency in real code),
// the conflict should take precedence. Since we can't call merge_dependency directly,
// we verify the scenario is set up correctly and document the expected behavior.
// The actual merge_dependency logic (tested in the first test) ensures conflicts are preserved.
}
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/tests/other/data_flow_complexity.rs | tests/other/data_flow_complexity.rs | //! Data flow complexity calculation test for the Pacsea project.
//!
//! This test analyzes all Rust source files in the project and calculates
//! data flow complexity metrics for functions and methods according to Dunsmore.
//!
//! Data flow complexity measures the complexity of data flow through a program by:
//! - Identifying variable definitions (defs) - where variables are assigned values
//! - Identifying variable uses (uses) - where variable values are accessed
//! - Counting Definition-Use (DU) pairs - paths from definitions to uses
//! - Measuring complexity based on the number of DU pairs and their nesting levels
//!
//! Higher complexity indicates more complex data dependencies and potentially harder-to-maintain code.
use std::collections::{HashMap, HashSet};
use std::fs;
use std::path::{Path, PathBuf};
// ANSI color codes (harmonized with Makefile)
const COLOR_RESET: &str = "\x1b[0m";
const COLOR_BOLD: &str = "\x1b[1m";
const COLOR_BLUE: &str = "\x1b[34m";
const COLOR_YELLOW: &str = "\x1b[33m";
/// Represents data flow complexity metrics for a single function or method.
#[derive(Debug, Clone)]
struct FunctionDataFlowComplexity {
/// Name of the function/method
name: String,
/// File path where the function is defined
file: PathBuf,
/// Data flow complexity value (number of DU pairs)
complexity: u32,
/// Number of variable definitions
definitions: u32,
/// Number of variable uses
uses: u32,
/// Line number where the function starts
line: usize,
}
/// Represents data flow complexity metrics for an entire file.
#[derive(Debug)]
struct FileDataFlowComplexity {
/// Functions and their complexities
functions: Vec<FunctionDataFlowComplexity>,
/// Total complexity (sum of all function complexities)
total_complexity: u32,
/// Average complexity per function
avg_complexity: f64,
}
/// Visitor that traverses the AST and calculates data flow complexity.
struct DataFlowVisitor {
/// Current function being analyzed
current_function: Option<String>,
/// Current file being analyzed
current_file: PathBuf,
/// Functions found and their complexities
functions: Vec<FunctionDataFlowComplexity>,
/// Current function's variable definitions
current_definitions: HashSet<String>,
/// Current function's variable uses
current_uses: HashSet<String>,
/// Current function's DU pairs (def-use pairs)
current_du_pairs: HashSet<(String, String)>,
/// Current line number
current_line: usize,
/// Current nesting level (for complexity weighting)
nesting_level: u32,
}
impl DataFlowVisitor {
/// Creates a new visitor for a given file.
fn new(file: PathBuf) -> Self {
Self {
current_function: None,
current_file: file,
functions: Vec::new(),
current_definitions: HashSet::new(),
current_uses: HashSet::new(),
current_du_pairs: HashSet::new(),
current_line: 0,
nesting_level: 0,
}
}
/// Records a variable definition.
fn record_def(&mut self, var_name: &str) {
self.current_definitions.insert(var_name.to_string());
// Create DU pairs: this definition can reach all existing uses
for use_var in &self.current_uses {
if use_var == var_name {
self.current_du_pairs
.insert((var_name.to_string(), use_var.clone()));
}
}
}
/// Records a variable use.
fn record_use(&mut self, var_name: &str) {
self.current_uses.insert(var_name.to_string());
// Create DU pairs: this use can be reached by all existing definitions
for def_var in &self.current_definitions {
if def_var == var_name {
self.current_du_pairs
.insert((def_var.clone(), var_name.to_string()));
}
}
}
/// Calculates data flow complexity for a single expression.
fn visit_expr(&mut self, expr: &syn::Expr) {
match expr {
syn::Expr::Assign(assign) => {
// Left side is a definition
if let syn::Expr::Path(path) = &*assign.left
&& let Some(ident) = path.path.get_ident()
{
self.record_def(&ident.to_string());
}
// Right side is a use
self.visit_expr(&assign.right);
}
// Note: AssignOp (compound assignments like +=) are handled as Assign in syn 2.0
// We'll handle them in the Assign case by checking if the left side is also used
syn::Expr::Let(let_expr) => {
// Pattern binding creates definitions
self.visit_pat(&let_expr.pat);
self.visit_expr(&let_expr.expr);
}
syn::Expr::Path(path) => {
// Variable access is a use
if let Some(ident) = path.path.get_ident() {
self.record_use(&ident.to_string());
}
}
syn::Expr::Call(call) => {
// Function call arguments are uses
self.visit_expr(&call.func);
for arg in &call.args {
self.visit_expr(arg);
}
}
syn::Expr::MethodCall(mcall) => {
// Receiver and arguments are uses
self.visit_expr(&mcall.receiver);
for arg in &mcall.args {
self.visit_expr(arg);
}
}
syn::Expr::If(if_expr) => {
self.nesting_level += 1;
self.visit_expr(&if_expr.cond);
// Visit then branch
for stmt in &if_expr.then_branch.stmts {
self.visit_stmt(stmt);
}
if let Some((_, else_expr)) = &if_expr.else_branch {
self.visit_expr(else_expr);
}
self.nesting_level -= 1;
}
syn::Expr::While(while_expr) => {
self.nesting_level += 1;
self.visit_expr(&while_expr.cond);
for stmt in &while_expr.body.stmts {
self.visit_stmt(stmt);
}
self.nesting_level -= 1;
}
syn::Expr::ForLoop(for_loop) => {
self.nesting_level += 1;
// Loop variable is a definition
self.visit_pat(&for_loop.pat);
self.visit_expr(&for_loop.expr);
for stmt in &for_loop.body.stmts {
self.visit_stmt(stmt);
}
self.nesting_level -= 1;
}
syn::Expr::Loop(loop_expr) => {
self.nesting_level += 1;
for stmt in &loop_expr.body.stmts {
self.visit_stmt(stmt);
}
self.nesting_level -= 1;
}
syn::Expr::Match(match_expr) => {
self.nesting_level += 1;
self.visit_expr(&match_expr.expr);
for arm in &match_expr.arms {
self.visit_pat(&arm.pat);
if let Some((_, guard_expr)) = &arm.guard {
self.visit_expr(guard_expr);
}
self.visit_expr(&arm.body);
}
self.nesting_level -= 1;
}
syn::Expr::Block(block) => {
for stmt in &block.block.stmts {
self.visit_stmt(stmt);
}
}
syn::Expr::Binary(bin) => {
self.visit_expr(&bin.left);
self.visit_expr(&bin.right);
}
syn::Expr::Unary(unary) => {
self.visit_expr(&unary.expr);
}
syn::Expr::Paren(paren) => {
self.visit_expr(&paren.expr);
}
syn::Expr::Group(group) => {
self.visit_expr(&group.expr);
}
syn::Expr::Array(array) => {
for elem in &array.elems {
self.visit_expr(elem);
}
}
syn::Expr::Tuple(tuple) => {
for elem in &tuple.elems {
self.visit_expr(elem);
}
}
syn::Expr::Struct(struct_expr) => {
for field in &struct_expr.fields {
self.visit_expr(&field.expr);
}
}
syn::Expr::Repeat(repeat) => {
self.visit_expr(&repeat.expr);
}
syn::Expr::Closure(closure) => {
// Closure parameters are definitions
// In syn 2.0, closure.inputs is Vec<Pat>
for input in &closure.inputs {
self.visit_pat(input);
}
self.visit_expr(&closure.body);
}
syn::Expr::Async(async_expr) => {
for stmt in &async_expr.block.stmts {
self.visit_stmt(stmt);
}
}
syn::Expr::Await(await_expr) => {
self.visit_expr(&await_expr.base);
}
syn::Expr::Range(range) => {
if let Some(start) = &range.start {
self.visit_expr(start);
}
if let Some(end) = &range.end {
self.visit_expr(end);
}
}
syn::Expr::Index(index) => {
self.visit_expr(&index.expr);
self.visit_expr(&index.index);
}
syn::Expr::Field(field) => {
self.visit_expr(&field.base);
}
_ => {
// Leaf nodes and other expression types, no variable access
// For other expression types, we could add more specific handling
}
}
}
/// Calculates data flow complexity for a pattern (used in let bindings, match arms, etc.).
fn visit_pat(&mut self, pat: &syn::Pat) {
match pat {
syn::Pat::Ident(pat_ident) => {
self.record_def(&pat_ident.ident.to_string());
}
syn::Pat::Struct(struct_pat) => {
for field in &struct_pat.fields {
self.visit_pat(&field.pat);
}
}
syn::Pat::Tuple(tuple_pat) => {
for elem in &tuple_pat.elems {
self.visit_pat(elem);
}
}
syn::Pat::Slice(slice_pat) => {
for elem in &slice_pat.elems {
self.visit_pat(elem);
}
}
syn::Pat::Or(or_pat) => {
for pat in &or_pat.cases {
self.visit_pat(pat);
}
}
_ => {
// Other patterns don't create variable bindings we track
}
}
}
/// Calculates data flow complexity for a single statement.
fn visit_stmt(&mut self, stmt: &syn::Stmt) {
match stmt {
syn::Stmt::Local(local) => {
self.visit_pat(&local.pat);
if let Some(init) = &local.init {
self.visit_expr(&init.expr);
}
}
syn::Stmt::Expr(expr, _) => {
self.visit_expr(expr);
}
syn::Stmt::Item(_) | syn::Stmt::Macro(_) => {
// Items and macros don't add data flow complexity directly
// Macros are complex but hard to analyze statically
}
}
}
/// Visits a function and calculates its data flow complexity.
fn visit_item_fn(&mut self, item_fn: &syn::ItemFn, name: String, line: usize) {
let saved_definitions = self.current_definitions.clone();
let saved_uses = self.current_uses.clone();
let saved_du_pairs = self.current_du_pairs.clone();
let saved_function = self.current_function.clone();
let saved_nesting = self.nesting_level;
// Reset for new function
self.current_definitions.clear();
self.current_uses.clear();
self.current_du_pairs.clear();
self.current_function = Some(name.clone());
self.current_line = line;
self.nesting_level = 0;
// Function parameters are definitions
for input in &item_fn.sig.inputs {
match input {
syn::FnArg::Receiver(_) => {
// self is a use (we're using the receiver)
self.record_use("self");
}
syn::FnArg::Typed(typed) => {
self.visit_pat(&typed.pat);
}
}
}
// Visit the function body
for stmt in &item_fn.block.stmts {
self.visit_stmt(stmt);
}
// Calculate complexity: number of DU pairs, weighted by nesting level
#[allow(clippy::cast_possible_truncation)]
let base_complexity = self.current_du_pairs.len() as u32;
// Add complexity for nesting (more nested = more complex data flow)
let nesting_complexity = self.nesting_level * 2;
let total_complexity = base_complexity + nesting_complexity;
// Save the function complexity
#[allow(clippy::cast_possible_truncation)]
self.functions.push(FunctionDataFlowComplexity {
name,
file: self.current_file.clone(),
complexity: total_complexity,
definitions: self.current_definitions.len() as u32,
uses: self.current_uses.len() as u32,
line: self.current_line,
});
// Restore previous state
self.current_definitions = saved_definitions;
self.current_uses = saved_uses;
self.current_du_pairs = saved_du_pairs;
self.current_function = saved_function;
self.nesting_level = saved_nesting;
}
/// Visits an impl method and calculates its data flow complexity.
fn visit_impl_item_fn(&mut self, method: &syn::ImplItemFn, name: String, line: usize) {
let saved_definitions = self.current_definitions.clone();
let saved_uses = self.current_uses.clone();
let saved_du_pairs = self.current_du_pairs.clone();
let saved_function = self.current_function.clone();
let saved_nesting = self.nesting_level;
// Reset for new method
self.current_definitions.clear();
self.current_uses.clear();
self.current_du_pairs.clear();
self.current_function = Some(name.clone());
self.current_line = line;
self.nesting_level = 0;
// Method parameters are definitions
for input in &method.sig.inputs {
match input {
syn::FnArg::Receiver(_) => {
// self is a use (we're using the receiver)
self.record_use("self");
}
syn::FnArg::Typed(typed) => {
self.visit_pat(&typed.pat);
}
}
}
// Visit the method body
for stmt in &method.block.stmts {
self.visit_stmt(stmt);
}
// Calculate complexity: number of DU pairs, weighted by nesting level
#[allow(clippy::cast_possible_truncation)]
let base_complexity = self.current_du_pairs.len() as u32;
// Add complexity for nesting (more nested = more complex data flow)
let nesting_complexity = self.nesting_level * 2;
let total_complexity = base_complexity + nesting_complexity;
// Save the method complexity
#[allow(clippy::cast_possible_truncation)]
self.functions.push(FunctionDataFlowComplexity {
name,
file: self.current_file.clone(),
complexity: total_complexity,
definitions: self.current_definitions.len() as u32,
uses: self.current_uses.len() as u32,
line: self.current_line,
});
// Restore previous state
self.current_definitions = saved_definitions;
self.current_uses = saved_uses;
self.current_du_pairs = saved_du_pairs;
self.current_function = saved_function;
self.nesting_level = saved_nesting;
}
/// Visits an impl block to analyze methods.
fn visit_impl(&mut self, item_impl: &syn::ItemImpl) {
for item in &item_impl.items {
if let syn::ImplItem::Fn(method) = item {
let name = method.sig.ident.to_string();
// Line numbers are not easily accessible from syn spans in syn 2.0
// Using 0 as placeholder - could be enhanced with source file parsing
let line = 0;
self.visit_impl_item_fn(method, name, line);
}
}
}
/// Visits all items in a file.
fn visit_file(&mut self, file: &syn::File) {
for item in &file.items {
match item {
syn::Item::Fn(item_fn) => {
let name = item_fn.sig.ident.to_string();
// Line numbers are not easily accessible from syn spans in syn 2.0
// Using 0 as placeholder - could be enhanced with source file parsing
let line = 0;
self.visit_item_fn(item_fn, name, line);
}
syn::Item::Impl(item_impl) => {
self.visit_impl(item_impl);
}
syn::Item::Mod(_item_mod) => {
// Nested modules are handled separately
}
_ => {}
}
}
}
}
/// Analyzes a single Rust source file and returns its data flow complexity metrics.
fn analyze_file(file_path: &Path) -> Result<FileDataFlowComplexity, Box<dyn std::error::Error>> {
let content = fs::read_to_string(file_path)?;
let ast = syn::parse_file(&content)?;
let mut visitor = DataFlowVisitor::new(file_path.to_path_buf());
visitor.visit_file(&ast);
let total_complexity: u32 = visitor.functions.iter().map(|f| f.complexity).sum();
#[allow(clippy::cast_precision_loss)]
let avg_complexity = if visitor.functions.is_empty() {
0.0
} else {
f64::from(total_complexity) / visitor.functions.len() as f64
};
Ok(FileDataFlowComplexity {
functions: visitor.functions,
total_complexity,
avg_complexity,
})
}
/// Recursively finds all Rust source files in a directory.
fn find_rust_files(dir: &Path) -> Result<Vec<PathBuf>, Box<dyn std::error::Error>> {
let mut files = Vec::new();
if dir.is_dir() {
for entry in fs::read_dir(dir)? {
let entry = entry?;
let path = entry.path();
if path.is_dir() {
// Skip target directory
if path.file_name().and_then(|n| n.to_str()) == Some("target") {
continue;
}
files.extend(find_rust_files(&path)?);
} else if path.extension().and_then(|s| s.to_str()) == Some("rs") {
files.push(path);
}
}
}
Ok(files)
}
/// Calculates data flow complexity for the entire project.
fn calculate_project_data_flow_complexity()
-> Result<HashMap<PathBuf, FileDataFlowComplexity>, Box<dyn std::error::Error>> {
let src_dir = Path::new("src");
let test_dir = Path::new("tests");
let mut results = HashMap::new();
// Analyze src directory
if src_dir.exists() {
for file in find_rust_files(src_dir)? {
match analyze_file(&file) {
Ok(complexity) => {
results.insert(file.clone(), complexity);
}
Err(e) => {
eprintln!(
"{COLOR_YELLOW}Warning:{COLOR_RESET} Failed to analyze {}: {}",
file.display(),
e
);
}
}
}
}
// Analyze tests directory
if test_dir.exists() {
for file in find_rust_files(test_dir)? {
match analyze_file(&file) {
Ok(complexity) => {
results.insert(file.clone(), complexity);
}
Err(e) => {
eprintln!(
"{COLOR_YELLOW}Warning:{COLOR_RESET} Failed to analyze {}: {}",
file.display(),
e
);
}
}
}
}
Ok(results)
}
#[cfg(test)]
mod tests {
use super::*;
/// Test that calculates and reports data flow complexity for the entire project.
///
/// This test:
/// - Analyzes all Rust source files in src/ and tests/
/// - Calculates data flow complexity for each function/method according to Dunsmore
/// - Tracks variable definitions, uses, and Definition-Use (DU) pairs
/// - Reports statistics and identifies high-complexity functions
/// - Optionally fails if complexity exceeds thresholds
#[test]
fn test_data_flow_complexity() {
// Complexity thresholds (guidelines for data flow complexity)
const VERY_HIGH_COMPLEXITY: u32 = 50;
const HIGH_COMPLEXITY: u32 = 25;
const MODERATE_COMPLEXITY: u32 = 10;
const MAX_AVERAGE_COMPLEXITY: f64 = 8.0;
const MAX_FILE_AVG_COMPLEXITY: f64 = 40.0;
let results = calculate_project_data_flow_complexity()
.expect("Failed to calculate project data flow complexity");
assert!(!results.is_empty(), "No Rust files found to analyze");
// Collect all functions
let mut all_functions = Vec::new();
let mut total_project_complexity = 0u32;
let mut total_functions = 0usize;
let mut total_definitions = 0u32;
let mut total_uses = 0u32;
for file_complexity in results.values() {
total_project_complexity += file_complexity.total_complexity;
total_functions += file_complexity.functions.len();
for func in &file_complexity.functions {
total_definitions += func.definitions;
total_uses += func.uses;
}
all_functions.extend(file_complexity.functions.clone());
}
// Sort functions by complexity (highest first)
all_functions.sort_by(|a, b| b.complexity.cmp(&a.complexity));
// Print summary
println!("\n=== Data Flow Complexity Report (Dunsmore) ===");
println!("Total files analyzed: {}", results.len());
println!("Total functions/methods: {total_functions}");
println!("Total project complexity: {total_project_complexity}");
println!("Total variable definitions: {total_definitions}");
println!("Total variable uses: {total_uses}");
if total_functions > 0 {
#[allow(clippy::cast_precision_loss)]
let avg_complexity = f64::from(total_project_complexity) / total_functions as f64;
println!("Average complexity per function: {avg_complexity:.2}");
}
// Report top 10 most complex functions
println!("\n=== Top 10 Most Complex Functions ===");
for (i, func) in all_functions.iter().take(10).enumerate() {
println!(
"{}. {} (complexity: {}, defs: {}, uses: {}) - {}:{}",
i + 1,
func.name,
func.complexity,
func.definitions,
func.uses,
func.file.display(),
func.line
);
}
// Report files with highest complexity
println!("\n=== Files by Total Complexity ===");
let mut file_complexities: Vec<_> = results.iter().collect();
file_complexities.sort_by(|a, b| b.1.total_complexity.cmp(&a.1.total_complexity));
for (file, file_comp) in file_complexities.iter().take(10) {
println!(
"{}: total={}, avg={:.2}, functions={}",
file.display(),
file_comp.total_complexity,
file_comp.avg_complexity,
file_comp.functions.len()
);
}
// Count functions by complexity level
let very_high = all_functions
.iter()
.filter(|f| f.complexity >= VERY_HIGH_COMPLEXITY)
.count();
let high = all_functions
.iter()
.filter(|f| f.complexity >= HIGH_COMPLEXITY && f.complexity < VERY_HIGH_COMPLEXITY)
.count();
let moderate = all_functions
.iter()
.filter(|f| f.complexity >= MODERATE_COMPLEXITY && f.complexity < HIGH_COMPLEXITY)
.count();
println!("\n{COLOR_BOLD}{COLOR_BLUE}=== Complexity Distribution ==={COLOR_RESET}");
println!("Very High (≥{VERY_HIGH_COMPLEXITY}): {very_high}");
println!(
"High ({}..{}): {}",
HIGH_COMPLEXITY,
VERY_HIGH_COMPLEXITY - 1,
high
);
println!(
"Moderate ({}..{}): {}",
MODERATE_COMPLEXITY,
HIGH_COMPLEXITY - 1,
moderate
);
println!(
"Low (<{}): {}",
MODERATE_COMPLEXITY,
total_functions - very_high - high - moderate
);
// List functions with very high complexity
if very_high > 0 {
println!(
"\n{COLOR_BOLD}{COLOR_YELLOW}=== Functions with Very High Complexity (≥{VERY_HIGH_COMPLEXITY}) ==={COLOR_RESET}"
);
for func in all_functions
.iter()
.filter(|f| f.complexity >= VERY_HIGH_COMPLEXITY)
{
println!(
" {} (complexity: {}, defs: {}, uses: {}) - {}:{}",
func.name,
func.complexity,
func.definitions,
func.uses,
func.file.display(),
func.line
);
}
}
// ====================================================================
// ASSERTIONS - Choose appropriate strictness level for your project
// ====================================================================
//
// Guidelines for data flow complexity (Dunsmore):
// - Measures Definition-Use (DU) pairs and nesting levels
// - Higher values indicate more complex data dependencies
// - Functions with many variables and complex data flow paths score higher
//
// Note: Data flow complexity can be higher than cyclomatic complexity
// because it tracks all variable interactions, not just control flow.
// OPTION 1: AVERAGE COMPLEXITY - Ensure overall codebase stays maintainable
// Recommended: Average should stay below 20-30 for data flow complexity
#[allow(clippy::cast_precision_loss)]
let avg_complexity = if total_functions > 0 {
f64::from(total_project_complexity) / total_functions as f64
} else {
0.0
};
assert!(
avg_complexity <= MAX_AVERAGE_COMPLEXITY,
"Average data flow complexity too high: {avg_complexity:.2} (max allowed: {MAX_AVERAGE_COMPLEXITY:.2}). \
Consider refactoring functions with complex data dependencies."
);
// OPTION 2: FILE-LEVEL - Prevent individual files from becoming too complex
// Recommended: No single file should have average complexity > 40
// Note: Only warn for files with multiple functions. For single-function files,
// the individual function complexity check is more appropriate.
for (file_path, file_comp) in &file_complexities {
if file_comp.functions.len() > 1 && file_comp.avg_complexity > MAX_FILE_AVG_COMPLEXITY {
eprintln!(
"{COLOR_YELLOW}Warning:{COLOR_RESET} File {} has high average data flow complexity: {:.2} ({} functions)",
file_path.display(),
file_comp.avg_complexity,
file_comp.functions.len()
);
}
}
// OPTION 3: MODERATE - Limit the number of very high complexity functions
// Recommended: Allow 5-10% of functions to be very high complexity
/*
const MAX_VERY_HIGH_COMPLEXITY_FUNCTIONS: usize = 30;
assert!(
very_high <= MAX_VERY_HIGH_COMPLEXITY_FUNCTIONS,
"Too many functions with very high data flow complexity (≥{}): {} (max allowed: {}). \
Consider refactoring the most complex functions.",
VERY_HIGH_COMPLEXITY,
very_high,
MAX_VERY_HIGH_COMPLEXITY_FUNCTIONS
);
*/
// OPTION 4: PROGRESSIVE - Prevent new extremely complex functions
// This is more useful in CI/CD to prevent regression
// Fail if any function exceeds a "hard limit" (e.g., 200+)
/*
const HARD_LIMIT_COMPLEXITY: u32 = 200;
let extremely_complex: Vec<_> = all_functions
.iter()
.filter(|f| f.complexity >= HARD_LIMIT_COMPLEXITY)
.collect();
if !extremely_complex.is_empty() {
let names: Vec<String> = extremely_complex
.iter()
.map(|f| format!("{} ({})", f.name, f.complexity))
.collect();
panic!(
"Found {} function(s) exceeding hard data flow complexity limit (≥{}): {}. \
These MUST be refactored before merging.",
extremely_complex.len(),
HARD_LIMIT_COMPLEXITY,
names.join(", ")
);
}
*/
}
}
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/tests/other/runtime_smoke.rs | tests/other/runtime_smoke.rs | #![cfg(test)]
//! End-to-end runtime smoke test (headless)
//!
//! Tests cover:
//! - Application initialization without panicking
//! - Headless mode operation with `PACSEA_TEST_HEADLESS=1`
//! - Task cancellation handling
use std::io::Write;
use std::time::Duration;
#[tokio::test]
/// What: Test end-to-end runtime initialization and execution in headless mode.
///
/// Inputs:
/// - `PACSEA_TEST_HEADLESS=1` environment variable to bypass raw TTY setup/restore.
/// - `pacsea::app::run(true)` called with dry-run flag.
///
/// Output:
/// - Application initializes without panicking.
/// - Task either completes successfully or can be cleanly cancelled.
///
/// Details:
/// - Starts `pacsea::app::run(true)` in the background.
/// - Waits briefly (50ms) to allow initialization and a render cycle.
/// - If task finishes early, asserts it returned `Ok(())`.
/// - If still running, aborts the task and verifies clean cancellation.
/// - Clears screen output for `--nocapture` runs.
/// - In headless mode, slow operations (pacman calls, network) are skipped.
async fn runtime_smoke_headless_initializes_and_runs_without_panic() {
// Ensure terminal raw mode/alternate screen are bypassed during this test
unsafe {
std::env::set_var("PACSEA_TEST_HEADLESS", "1");
}
// Note: Mouse position reports (^[[<35;...]) may appear in test output when moving
// the mouse over the terminal if mouse reporting was enabled elsewhere (e.g., by
// Fish shell or the terminal emulator itself). The setup_terminal() function now
// explicitly disables mouse reporting in headless mode to prevent this.
// Spawn the runtime in the background. Use dry-run to avoid any real install actions.
let handle = tokio::spawn(async { pacsea::app::run(true).await });
// Allow a minimal window for initialization - just enough to verify it starts without panicking
// In headless mode, we skip slow operations (pacman calls, network), so this should be fast
tokio::time::sleep(Duration::from_millis(50)).await;
// If it already finished, it must have returned Ok(()) and not panicked.
if handle.is_finished() {
match handle.await {
Ok(run_result) => {
if let Err(e) = run_result {
panic!("app::run returned error early: {e:?}");
}
// Returned Ok(()): good enough as a smoke test.
// Clear the screen for --nocapture runs to avoid leaving the TUI in the output
print!("\x1b[2J\x1b[H");
let _ = std::io::stdout().flush();
return;
}
Err(join_err) => {
// If it finished with a panic, this will be a JoinError (not cancelled).
panic!("app::run task panicked: {join_err}");
}
}
}
// Otherwise, abort it and ensure it did not panic (i.e., the join error is 'cancelled').
handle.abort();
match handle.await {
Ok(run_result) => {
// Rare race: the task may have completed right before abort. Require Ok(()).
if let Err(e) = run_result {
panic!("app::run completed with error on abort race: {e:?}");
}
}
Err(join_err) => {
assert!(
join_err.is_cancelled(),
"app::run join error should be cancellation, got: {join_err}"
);
}
}
// Clear the screen at end of test (useful with --nocapture)
print!("\x1b[2J\x1b[H");
let _ = std::io::stdout().flush();
}
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/tests/other/mod.rs | tests/other/mod.rs | //! Miscellaneous tests (complexity, smoke tests, etc.).
mod conflict_preservation;
mod custom_command;
mod cyclomatic_complexity;
mod data_flow_complexity;
mod dry_run_mode;
mod edge_cases;
mod loading_modal;
mod network_error;
mod runtime_smoke;
mod terminals_args_shape;
mod ui_integration;
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/tests/other/custom_command.rs | tests/other/custom_command.rs | //! Integration tests for custom command handler.
//!
//! Tests cover:
//! - `ExecutorRequest::CustomCommand` creation
//! - Command with sudo password
//! - Command without sudo
//! - Dry-run command format
#![cfg(test)]
use pacsea::install::ExecutorRequest;
use pacsea::state::{AppState, Modal, PreflightAction, PreflightTab};
use pacsea::state::modal::{PasswordPurpose, PreflightHeaderChips};
#[test]
/// What: Test `ExecutorRequest::CustomCommand` creation.
///
/// Inputs:
/// - Command string.
///
/// Output:
/// - `ExecutorRequest::CustomCommand` with correct command.
///
/// Details:
/// - Verifies custom command request can be created.
fn integration_custom_command_creation() {
let request = ExecutorRequest::CustomCommand {
command: "makepkg -si".to_string(),
password: None,
dry_run: false,
};
match request {
ExecutorRequest::CustomCommand {
command,
password,
dry_run,
} => {
assert_eq!(command, "makepkg -si");
assert!(password.is_none());
assert!(!dry_run);
}
_ => panic!("Expected ExecutorRequest::CustomCommand"),
}
}
#[test]
/// What: Test `ExecutorRequest::CustomCommand` with sudo password.
///
/// Inputs:
/// - Command requiring sudo and password.
///
/// Output:
/// - Password is included in request.
///
/// Details:
/// - Verifies sudo commands include password.
fn integration_custom_command_with_password() {
let request = ExecutorRequest::CustomCommand {
command: "sudo pacman -Fy".to_string(),
password: Some("testpassword".to_string()),
dry_run: false,
};
match request {
ExecutorRequest::CustomCommand {
command, password, ..
} => {
assert!(command.contains("sudo"));
assert_eq!(password, Some("testpassword".to_string()));
}
_ => panic!("Expected ExecutorRequest::CustomCommand"),
}
}
#[test]
/// What: Test `ExecutorRequest::CustomCommand` without password.
///
/// Inputs:
/// - Command not requiring sudo.
///
/// Output:
/// - Password is None.
///
/// Details:
/// - Verifies non-sudo commands don't require password.
fn integration_custom_command_no_password() {
let request = ExecutorRequest::CustomCommand {
command: "git clone https://aur.archlinux.org/yay.git".to_string(),
password: None,
dry_run: false,
};
match request {
ExecutorRequest::CustomCommand { password, .. } => {
assert!(password.is_none());
}
_ => panic!("Expected ExecutorRequest::CustomCommand"),
}
}
#[test]
/// What: Test `ExecutorRequest::CustomCommand` dry-run mode.
///
/// Inputs:
/// - Custom command with dry_run=true.
///
/// Output:
/// - dry_run flag is true.
///
/// Details:
/// - Verifies custom command respects dry-run flag.
fn integration_custom_command_dry_run() {
let request = ExecutorRequest::CustomCommand {
command: "makepkg -si".to_string(),
password: None,
dry_run: true,
};
match request {
ExecutorRequest::CustomCommand { dry_run, .. } => {
assert!(dry_run);
}
_ => panic!("Expected ExecutorRequest::CustomCommand"),
}
}
#[test]
/// What: Test custom command for paru installation.
///
/// Inputs:
/// - Paru installation command sequence.
///
/// Output:
/// - Command includes makepkg.
///
/// Details:
/// - Verifies paru installation command structure.
fn integration_custom_command_paru_install() {
let command = "cd /tmp/paru && makepkg -si --noconfirm".to_string();
let request = ExecutorRequest::CustomCommand {
command: command.clone(),
password: Some("testpassword".to_string()),
dry_run: false,
};
match request {
ExecutorRequest::CustomCommand { command, .. } => {
assert!(command.contains("makepkg"));
assert!(command.contains("-si"));
assert!(command.contains("paru"));
}
_ => panic!("Expected ExecutorRequest::CustomCommand"),
}
}
#[test]
/// What: Test custom command for yay installation.
///
/// Inputs:
/// - Yay installation command sequence.
///
/// Output:
/// - Command includes makepkg.
///
/// Details:
/// - Verifies yay installation command structure.
fn integration_custom_command_yay_install() {
let command = "cd /tmp/yay && makepkg -si --noconfirm".to_string();
let request = ExecutorRequest::CustomCommand {
command: command.clone(),
password: Some("testpassword".to_string()),
dry_run: false,
};
match request {
ExecutorRequest::CustomCommand { command, .. } => {
assert!(command.contains("makepkg"));
assert!(command.contains("-si"));
assert!(command.contains("yay"));
}
_ => panic!("Expected ExecutorRequest::CustomCommand"),
}
}
#[test]
/// What: Test custom command for file database sync.
///
/// Inputs:
/// - File database sync command.
///
/// Output:
/// - Command is pacman -Fy.
///
/// Details:
/// - Verifies file sync command structure.
fn integration_custom_command_file_sync() {
let request = ExecutorRequest::CustomCommand {
command: "sudo pacman -Fy".to_string(),
password: Some("testpassword".to_string()),
dry_run: false,
};
match request {
ExecutorRequest::CustomCommand { command, .. } => {
assert!(command.contains("pacman"));
assert!(command.contains("-Fy"));
}
_ => panic!("Expected ExecutorRequest::CustomCommand"),
}
}
#[test]
/// What: Test custom command triggers password prompt.
///
/// Inputs:
/// - Custom sudo command.
///
/// Output:
/// - Password prompt modal is shown.
///
/// Details:
/// - Verifies sudo commands trigger password prompt.
fn integration_custom_command_password_prompt() {
let mut app = AppState {
pending_custom_command: Some("sudo pacman -Fy".to_string()),
pending_exec_header_chips: Some(PreflightHeaderChips::default()),
..Default::default()
};
// Simulate password prompt trigger
app.modal = Modal::PasswordPrompt {
purpose: PasswordPurpose::FileSync,
items: vec![],
input: String::new(),
cursor: 0,
error: None,
};
match app.modal {
Modal::PasswordPrompt { purpose, .. } => {
assert_eq!(purpose, PasswordPurpose::FileSync);
}
_ => panic!("Expected PasswordPrompt modal"),
}
}
#[test]
/// What: Test custom command transitions to `PreflightExec`.
///
/// Inputs:
/// - Custom command with password submitted.
///
/// Output:
/// - Modal transitions to `PreflightExec`.
///
/// Details:
/// - Verifies custom command flow after password.
fn integration_custom_command_to_preflight_exec() {
let mut app = AppState {
pending_custom_command: Some("sudo pacman -Fy".to_string()),
modal: Modal::PasswordPrompt {
purpose: PasswordPurpose::FileSync,
items: vec![],
input: "testpassword".to_string(),
cursor: 12,
error: None,
},
pending_exec_header_chips: Some(PreflightHeaderChips::default()),
..Default::default()
};
// Extract password and command
let password = if let Modal::PasswordPrompt { ref input, .. } = app.modal {
if input.trim().is_empty() {
None
} else {
Some(input.clone())
}
} else {
None
};
let command = app.pending_custom_command.take();
// Simulate transition to PreflightExec
let header_chips = app.pending_exec_header_chips.take().unwrap_or_default();
app.modal = Modal::PreflightExec {
items: vec![],
action: PreflightAction::Install,
tab: PreflightTab::Summary,
verbose: false,
log_lines: vec![],
abortable: false,
header_chips,
success: None,
};
if let Some(cmd) = command {
app.pending_executor_request = Some(ExecutorRequest::CustomCommand {
command: cmd,
password,
dry_run: false,
});
}
// Verify modal
assert!(matches!(app.modal, Modal::PreflightExec { .. }));
// Verify executor request
match app.pending_executor_request {
Some(ExecutorRequest::CustomCommand {
command, password, ..
}) => {
assert_eq!(command, "sudo pacman -Fy");
assert_eq!(password, Some("testpassword".to_string()));
}
_ => panic!("Expected ExecutorRequest::CustomCommand"),
}
}
#[test]
/// What: Test custom command with empty command string.
///
/// Inputs:
/// - Empty command string.
///
/// Output:
/// - Request handles empty command gracefully.
///
/// Details:
/// - Edge case for empty command.
fn integration_custom_command_empty() {
let request = ExecutorRequest::CustomCommand {
command: String::new(),
password: None,
dry_run: false,
};
match request {
ExecutorRequest::CustomCommand { command, .. } => {
assert!(command.is_empty());
}
_ => panic!("Expected ExecutorRequest::CustomCommand"),
}
}
#[test]
/// What: Test custom command with special characters.
///
/// Inputs:
/// - Command with special shell characters.
///
/// Output:
/// - Special characters are preserved.
///
/// Details:
/// - Verifies command string handles special chars.
fn integration_custom_command_special_chars() {
let command = "echo 'test with spaces' && ls -la | grep 'pattern'".to_string();
let request = ExecutorRequest::CustomCommand {
command: command.clone(),
password: None,
dry_run: false,
};
match request {
ExecutorRequest::CustomCommand { command: cmd, .. } => {
assert!(cmd.contains("&&"));
assert!(cmd.contains("|"));
assert!(cmd.contains("grep"));
}
_ => panic!("Expected ExecutorRequest::CustomCommand"),
}
}
#[test]
/// What: Test custom command dry-run format.
///
/// Inputs:
/// - Custom command for dry-run.
///
/// Output:
/// - Dry-run format includes "DRY RUN:" prefix.
///
/// Details:
/// - Verifies dry-run command format.
fn integration_custom_command_dry_run_format() {
let command = "sudo pacman -Fy";
let dry_run_cmd = format!("echo DRY RUN: {command}");
assert!(dry_run_cmd.contains("DRY RUN:"));
assert!(dry_run_cmd.contains("pacman -Fy"));
}
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/tests/other/dry_run_mode.rs | tests/other/dry_run_mode.rs | //! Consolidated integration tests for dry-run mode.
//!
//! Tests cover:
//! - Install dry-run respects flag
//! - Remove dry-run respects flag
//! - Update dry-run respects flag
//! - Scan dry-run respects flag
//! - Custom command dry-run respects flag
//! - "DRY RUN:" prefix in commands
#![cfg(test)]
use pacsea::install::ExecutorRequest;
use pacsea::state::{AppState, PackageItem, Source, modal::CascadeMode};
/// What: Create a test package item.
///
/// Inputs:
/// - `name`: Package name
/// - `source`: Package source
///
/// Output:
/// - `PackageItem` ready for testing
///
/// Details:
/// - Helper to create test packages
fn create_test_package(name: &str, source: Source) -> PackageItem {
PackageItem {
name: name.into(),
version: "1.0.0".into(),
description: String::new(),
source,
popularity: None,
out_of_date: None,
orphaned: false,
}
}
#[test]
/// What: Test `ExecutorRequest::Install` with dry_run=true.
///
/// Inputs:
/// - Install request with dry_run enabled.
///
/// Output:
/// - dry_run flag is true.
///
/// Details:
/// - Verifies install respects dry-run flag.
fn integration_dry_run_install() {
let items = vec![create_test_package(
"test-pkg",
Source::Official {
repo: "extra".into(),
arch: "x86_64".into(),
},
)];
let request = ExecutorRequest::Install {
items,
password: None,
dry_run: true,
};
match request {
ExecutorRequest::Install { dry_run, .. } => {
assert!(dry_run);
}
_ => panic!("Expected ExecutorRequest::Install"),
}
}
#[test]
/// What: Test `ExecutorRequest::Remove` with dry_run=true.
///
/// Inputs:
/// - Remove request with dry_run enabled.
///
/// Output:
/// - dry_run flag is true.
///
/// Details:
/// - Verifies remove respects dry-run flag.
fn integration_dry_run_remove() {
let names = vec!["test-pkg".to_string()];
let request = ExecutorRequest::Remove {
names,
password: None,
cascade: CascadeMode::Basic,
dry_run: true,
};
match request {
ExecutorRequest::Remove { dry_run, .. } => {
assert!(dry_run);
}
_ => panic!("Expected ExecutorRequest::Remove"),
}
}
#[test]
/// What: Test `ExecutorRequest::Update` with dry_run=true.
///
/// Inputs:
/// - Update request with dry_run enabled.
///
/// Output:
/// - dry_run flag is true.
///
/// Details:
/// - Verifies update respects dry-run flag.
fn integration_dry_run_update() {
let commands = vec!["sudo pacman -Syu --noconfirm".to_string()];
let request = ExecutorRequest::Update {
commands,
password: None,
dry_run: true,
};
match request {
ExecutorRequest::Update { dry_run, .. } => {
assert!(dry_run);
}
_ => panic!("Expected ExecutorRequest::Update"),
}
}
#[test]
/// What: Test `ExecutorRequest::Scan` with dry_run=true.
///
/// Inputs:
/// - Scan request with dry_run enabled.
///
/// Output:
/// - dry_run flag is true.
///
/// Details:
/// - Verifies scan respects dry-run flag.
fn integration_dry_run_scan() {
let request = ExecutorRequest::Scan {
package: "test-pkg".to_string(),
do_clamav: true,
do_trivy: false,
do_semgrep: false,
do_shellcheck: false,
do_virustotal: false,
do_custom: false,
dry_run: true,
};
match request {
ExecutorRequest::Scan { dry_run, .. } => {
assert!(dry_run);
}
_ => panic!("Expected ExecutorRequest::Scan"),
}
}
#[test]
/// What: Test `ExecutorRequest::CustomCommand` with dry_run=true.
///
/// Inputs:
/// - Custom command request with dry_run enabled.
///
/// Output:
/// - dry_run flag is true.
///
/// Details:
/// - Verifies custom command respects dry-run flag.
fn integration_dry_run_custom_command() {
let request = ExecutorRequest::CustomCommand {
command: "makepkg -si".to_string(),
password: None,
dry_run: true,
};
match request {
ExecutorRequest::CustomCommand { dry_run, .. } => {
assert!(dry_run);
}
_ => panic!("Expected ExecutorRequest::CustomCommand"),
}
}
#[test]
/// What: Test `ExecutorRequest::Downgrade` with dry_run=true.
///
/// Inputs:
/// - Downgrade request with dry_run enabled.
///
/// Output:
/// - dry_run flag is true.
///
/// Details:
/// - Verifies downgrade respects dry-run flag.
fn integration_dry_run_downgrade() {
let names = vec!["test-pkg".to_string()];
let request = ExecutorRequest::Downgrade {
names,
password: None,
dry_run: true,
};
match request {
ExecutorRequest::Downgrade { dry_run, .. } => {
assert!(dry_run);
}
_ => panic!("Expected ExecutorRequest::Downgrade"),
}
}
#[test]
/// What: Test `AppState` dry_run flag.
///
/// Inputs:
/// - `AppState` with dry_run=true.
///
/// Output:
/// - dry_run flag is accessible.
///
/// Details:
/// - Verifies dry_run flag is stored in app state.
fn integration_app_state_dry_run_flag() {
let app = AppState {
dry_run: true,
..Default::default()
};
assert!(app.dry_run);
}
#[test]
/// What: Test dry-run command format with "DRY RUN:" prefix.
///
/// Inputs:
/// - Package name for dry-run install.
///
/// Output:
/// - Command includes "DRY RUN:" prefix.
///
/// Details:
/// - Verifies dry-run commands have the expected prefix.
fn integration_dry_run_command_format_install() {
let pkg_name = "test-pkg";
let dry_run_cmd = format!("echo DRY RUN: sudo pacman -S {pkg_name} --noconfirm");
assert!(dry_run_cmd.contains("DRY RUN:"));
assert!(dry_run_cmd.contains("pacman -S"));
assert!(dry_run_cmd.contains(pkg_name));
}
#[test]
/// What: Test dry-run command format for remove.
///
/// Inputs:
/// - Package name for dry-run remove.
///
/// Output:
/// - Command includes "DRY RUN:" prefix.
///
/// Details:
/// - Verifies dry-run remove commands have the expected prefix.
fn integration_dry_run_command_format_remove() {
let pkg_name = "test-pkg";
let dry_run_cmd = format!("echo DRY RUN: sudo pacman -R {pkg_name} --noconfirm");
assert!(dry_run_cmd.contains("DRY RUN:"));
assert!(dry_run_cmd.contains("pacman -R"));
assert!(dry_run_cmd.contains(pkg_name));
}
#[test]
/// What: Test dry-run command format for update.
///
/// Inputs:
/// - Dry-run update command.
///
/// Output:
/// - Command includes "DRY RUN:" prefix.
///
/// Details:
/// - Verifies dry-run update commands have the expected prefix.
fn integration_dry_run_command_format_update() {
let dry_run_cmd = "echo DRY RUN: sudo pacman -Syu --noconfirm";
assert!(dry_run_cmd.contains("DRY RUN:"));
assert!(dry_run_cmd.contains("pacman -Syu"));
}
#[test]
/// What: Test dry-run command format for downgrade.
///
/// Inputs:
/// - Package name for dry-run downgrade.
///
/// Output:
/// - Command includes "DRY RUN:" prefix.
///
/// Details:
/// - Verifies dry-run downgrade commands have the expected prefix.
fn integration_dry_run_command_format_downgrade() {
let pkg_name = "test-pkg";
let dry_run_cmd = format!("echo DRY RUN: sudo downgrade {pkg_name}");
assert!(dry_run_cmd.contains("DRY RUN:"));
assert!(dry_run_cmd.contains("downgrade"));
assert!(dry_run_cmd.contains(pkg_name));
}
#[test]
/// What: Test all executor requests respect dry_run=false.
///
/// Inputs:
/// - All executor request types with dry_run=false.
///
/// Output:
/// - All dry_run flags are false.
///
/// Details:
/// - Verifies dry_run=false is default/respected.
fn integration_dry_run_all_false() {
let install_req = ExecutorRequest::Install {
items: vec![],
password: None,
dry_run: false,
};
let remove_req = ExecutorRequest::Remove {
names: vec![],
password: None,
cascade: CascadeMode::Basic,
dry_run: false,
};
let update_req = ExecutorRequest::Update {
commands: vec![],
password: None,
dry_run: false,
};
let scan_req = ExecutorRequest::Scan {
package: String::new(),
do_clamav: false,
do_trivy: false,
do_semgrep: false,
do_shellcheck: false,
do_virustotal: false,
do_custom: false,
dry_run: false,
};
let custom_req = ExecutorRequest::CustomCommand {
command: String::new(),
password: None,
dry_run: false,
};
let downgrade_req = ExecutorRequest::Downgrade {
names: vec![],
password: None,
dry_run: false,
};
match install_req {
ExecutorRequest::Install { dry_run, .. } => assert!(!dry_run),
_ => panic!("Expected Install"),
}
match remove_req {
ExecutorRequest::Remove { dry_run, .. } => assert!(!dry_run),
_ => panic!("Expected Remove"),
}
match update_req {
ExecutorRequest::Update { dry_run, .. } => assert!(!dry_run),
_ => panic!("Expected Update"),
}
match scan_req {
ExecutorRequest::Scan { dry_run, .. } => assert!(!dry_run),
_ => panic!("Expected Scan"),
}
match custom_req {
ExecutorRequest::CustomCommand { dry_run, .. } => assert!(!dry_run),
_ => panic!("Expected CustomCommand"),
}
match downgrade_req {
ExecutorRequest::Downgrade { dry_run, .. } => assert!(!dry_run),
_ => panic!("Expected Downgrade"),
}
}
#[test]
/// What: Test dry-run with multiple packages.
///
/// Inputs:
/// - Multiple packages for dry-run install.
///
/// Output:
/// - All package names in command.
///
/// Details:
/// - Verifies batch dry-run includes all packages.
fn integration_dry_run_multiple_packages() {
let pkg_names = vec!["pkg1", "pkg2", "pkg3"];
let joined = pkg_names.join(" ");
let dry_run_cmd = format!("echo DRY RUN: sudo pacman -S {joined} --noconfirm");
for pkg in &pkg_names {
assert!(dry_run_cmd.contains(pkg));
}
assert!(dry_run_cmd.contains("DRY RUN:"));
}
#[test]
/// What: Test dry-run flag inheritance from `AppState`.
///
/// Inputs:
/// - `AppState` with dry_run=true.
///
/// Output:
/// - `ExecutorRequest` uses app's dry_run value.
///
/// Details:
/// - Verifies dry_run is correctly passed from state to request.
fn integration_dry_run_state_to_request() {
let app = AppState {
dry_run: true,
..Default::default()
};
let request = ExecutorRequest::Install {
items: vec![],
password: None,
dry_run: app.dry_run,
};
match request {
ExecutorRequest::Install { dry_run, .. } => {
assert!(dry_run);
}
_ => panic!("Expected ExecutorRequest::Install"),
}
}
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/tests/other/cyclomatic_complexity.rs | tests/other/cyclomatic_complexity.rs | //! Cyclomatic complexity calculation test for the Pacsea project.
//!
//! This test analyzes all Rust source files in the project and calculates
//! cyclomatic complexity metrics for functions and methods.
//!
//! Cyclomatic complexity measures the number of linearly independent paths
//! through a program's source code. It's calculated as:
//! - Base complexity: 1
//! - Add 1 for each: if, while, for, loop, match arm, &&, ||, ? operator, catch blocks
//!
//! Higher complexity indicates more decision points and potentially harder-to-maintain code.
use std::collections::HashMap;
use std::fs;
use std::path::{Path, PathBuf};
// ANSI color codes (harmonized with Makefile)
const COLOR_RESET: &str = "\x1b[0m";
const COLOR_BOLD: &str = "\x1b[1m";
const COLOR_BLUE: &str = "\x1b[34m";
const COLOR_YELLOW: &str = "\x1b[33m";
/// Represents complexity metrics for a single function or method.
#[derive(Debug, Clone)]
struct FunctionComplexity {
/// Name of the function/method
name: String,
/// File path where the function is defined
file: PathBuf,
/// Cyclomatic complexity value
complexity: u32,
/// Line number where the function starts
line: usize,
}
/// Represents complexity metrics for an entire file.
#[derive(Debug)]
struct FileComplexity {
/// Functions and their complexities
functions: Vec<FunctionComplexity>,
/// Total complexity (sum of all function complexities)
total_complexity: u32,
/// Average complexity per function
avg_complexity: f64,
}
/// Visitor that traverses the AST and calculates cyclomatic complexity.
struct ComplexityVisitor {
/// Current function being analyzed
current_function: Option<String>,
/// Current file being analyzed
current_file: PathBuf,
/// Functions found and their complexities
functions: Vec<FunctionComplexity>,
/// Current complexity counter
current_complexity: u32,
/// Current line number
current_line: usize,
}
impl ComplexityVisitor {
/// Creates a new visitor for a given file.
const fn new(file: PathBuf) -> Self {
Self {
current_function: None,
current_file: file,
functions: Vec::new(),
current_complexity: 1, // Base complexity
current_line: 0,
}
}
/// Calculates complexity for a single expression.
fn visit_expr(&mut self, expr: &syn::Expr) {
match expr {
syn::Expr::While(_) | syn::Expr::ForLoop(_) | syn::Expr::Loop(_) => {
self.current_complexity += 1;
}
syn::Expr::Match(m) => {
// Each match arm adds complexity
self.current_complexity += u32::try_from(m.arms.len()).unwrap_or(u32::MAX);
// Guards add additional complexity
for arm in &m.arms {
if arm.guard.is_some() {
self.current_complexity += 1;
}
}
}
syn::Expr::Binary(bin) => {
// && and || operators add complexity
match bin.op {
syn::BinOp::And(_) | syn::BinOp::Or(_) => {
self.current_complexity += 1;
}
_ => {}
}
}
syn::Expr::Try(_) => {
// ? operator adds complexity
self.current_complexity += 1;
}
syn::Expr::Call(call) => {
// Recursively visit nested expressions
self.visit_expr(&call.func);
for arg in &call.args {
self.visit_expr(arg);
}
}
syn::Expr::MethodCall(mcall) => {
for arg in &mcall.args {
self.visit_expr(arg);
}
}
syn::Expr::Block(block) => {
for stmt in &block.block.stmts {
self.visit_stmt(stmt);
}
}
syn::Expr::If(if_expr) => {
self.current_complexity += 1;
self.visit_expr(&if_expr.cond);
// Visit then branch as a block
for stmt in &if_expr.then_branch.stmts {
self.visit_stmt(stmt);
}
if let Some((_, else_expr)) = &if_expr.else_branch {
self.visit_expr(else_expr);
}
}
syn::Expr::Unary(unary) => {
self.visit_expr(&unary.expr);
}
syn::Expr::Paren(paren) => {
self.visit_expr(&paren.expr);
}
syn::Expr::Group(group) => {
self.visit_expr(&group.expr);
}
syn::Expr::Array(array) => {
for elem in &array.elems {
self.visit_expr(elem);
}
}
syn::Expr::Tuple(tuple) => {
for elem in &tuple.elems {
self.visit_expr(elem);
}
}
syn::Expr::Struct(struct_expr) => {
for field in &struct_expr.fields {
self.visit_expr(&field.expr);
}
}
syn::Expr::Repeat(repeat) => {
self.visit_expr(&repeat.expr);
}
syn::Expr::Closure(closure) => {
self.visit_expr(&closure.body);
}
syn::Expr::Async(async_expr) => {
for stmt in &async_expr.block.stmts {
self.visit_stmt(stmt);
}
}
syn::Expr::Await(await_expr) => {
self.visit_expr(&await_expr.base);
}
syn::Expr::Let(let_expr) => {
self.visit_expr(&let_expr.expr);
}
syn::Expr::Assign(assign) => {
self.visit_expr(&assign.right);
}
syn::Expr::Range(range) => {
if let Some(start) = &range.start {
self.visit_expr(start);
}
if let Some(end) = &range.end {
self.visit_expr(end);
}
}
syn::Expr::Index(index) => {
self.visit_expr(&index.expr);
self.visit_expr(&index.index);
}
syn::Expr::Field(field) => {
self.visit_expr(&field.base);
}
_ => {
// Leaf nodes and other expression types, no additional complexity
// For other expression types, we could add more specific handling
// but for now, we'll skip them to avoid over-counting
}
}
}
/// Calculates complexity for a single statement.
fn visit_stmt(&mut self, stmt: &syn::Stmt) {
match stmt {
syn::Stmt::Local(local) => {
if let Some(init) = &local.init {
self.visit_expr(&init.expr);
}
}
syn::Stmt::Expr(expr, _) => {
self.visit_expr(expr);
}
syn::Stmt::Item(_) | syn::Stmt::Macro(_) => {
// Items and macros don't add complexity directly
// Macros are complex but hard to analyze statically
}
}
}
/// Visits a function and calculates its complexity.
fn visit_item_fn(&mut self, item_fn: &syn::ItemFn, name: String, line: usize) {
let saved_complexity = self.current_complexity;
let saved_function = self.current_function.clone();
self.current_complexity = 1; // Base complexity
self.current_function = Some(name.clone());
self.current_line = line;
// Visit the function body
for stmt in &item_fn.block.stmts {
self.visit_stmt(stmt);
}
// Save the function complexity
self.functions.push(FunctionComplexity {
name,
file: self.current_file.clone(),
complexity: self.current_complexity,
line: self.current_line,
});
// Restore previous state
self.current_complexity = saved_complexity;
self.current_function = saved_function;
}
/// Visits an impl method and calculates its complexity.
fn visit_impl_item_fn(&mut self, method: &syn::ImplItemFn, name: String, line: usize) {
let saved_complexity = self.current_complexity;
let saved_function = self.current_function.clone();
self.current_complexity = 1; // Base complexity
self.current_function = Some(name.clone());
self.current_line = line;
// Visit the method body
for stmt in &method.block.stmts {
self.visit_stmt(stmt);
}
// Save the method complexity
self.functions.push(FunctionComplexity {
name,
file: self.current_file.clone(),
complexity: self.current_complexity,
line: self.current_line,
});
// Restore previous state
self.current_complexity = saved_complexity;
self.current_function = saved_function;
}
/// Visits an impl block to analyze methods.
fn visit_impl(&mut self, item_impl: &syn::ItemImpl) {
for item in &item_impl.items {
if let syn::ImplItem::Fn(method) = item {
let name = method.sig.ident.to_string();
// Line numbers are not easily accessible from syn spans in syn 2.0
// Using 0 as placeholder - could be enhanced with source file parsing
let line = 0;
self.visit_impl_item_fn(method, name, line);
}
}
}
/// Visits all items in a file.
fn visit_file(&mut self, file: &syn::File) {
for item in &file.items {
match item {
syn::Item::Fn(item_fn) => {
let name = item_fn.sig.ident.to_string();
// Line numbers are not easily accessible from syn spans in syn 2.0
// Using 0 as placeholder - could be enhanced with source file parsing
let line = 0;
self.visit_item_fn(item_fn, name, line);
}
syn::Item::Impl(item_impl) => {
self.visit_impl(item_impl);
}
syn::Item::Mod(_item_mod) => {
// Nested modules are handled separately
}
_ => {}
}
}
}
}
/// Analyzes a single Rust source file and returns its complexity metrics.
fn analyze_file(file_path: &Path) -> Result<FileComplexity, Box<dyn std::error::Error>> {
let content = fs::read_to_string(file_path)?;
let ast = syn::parse_file(&content)?;
let mut visitor = ComplexityVisitor::new(file_path.to_path_buf());
visitor.visit_file(&ast);
let total_complexity: u32 = visitor.functions.iter().map(|f| f.complexity).sum();
#[allow(clippy::cast_precision_loss)]
let avg_complexity = if visitor.functions.is_empty() {
0.0
} else {
f64::from(total_complexity) / visitor.functions.len() as f64
};
Ok(FileComplexity {
functions: visitor.functions,
total_complexity,
avg_complexity,
})
}
/// Recursively finds all Rust source files in a directory.
fn find_rust_files(dir: &Path) -> Result<Vec<PathBuf>, Box<dyn std::error::Error>> {
let mut files = Vec::new();
if dir.is_dir() {
for entry in fs::read_dir(dir)? {
let entry = entry?;
let path = entry.path();
if path.is_dir() {
// Skip target directory
if path.file_name().and_then(|n| n.to_str()) == Some("target") {
continue;
}
files.extend(find_rust_files(&path)?);
} else if path.extension().and_then(|s| s.to_str()) == Some("rs") {
files.push(path);
}
}
}
Ok(files)
}
/// Calculates cyclomatic complexity for the entire project.
fn calculate_project_complexity()
-> Result<HashMap<PathBuf, FileComplexity>, Box<dyn std::error::Error>> {
let src_dir = Path::new("src");
let test_dir = Path::new("tests");
let mut results = HashMap::new();
// Analyze src directory
if src_dir.exists() {
for file in find_rust_files(src_dir)? {
match analyze_file(&file) {
Ok(complexity) => {
results.insert(file.clone(), complexity);
}
Err(e) => {
eprintln!(
"{COLOR_YELLOW}Warning:{COLOR_RESET} Failed to analyze {}: {}",
file.display(),
e
);
}
}
}
}
// Analyze tests directory
if test_dir.exists() {
for file in find_rust_files(test_dir)? {
match analyze_file(&file) {
Ok(complexity) => {
results.insert(file.clone(), complexity);
}
Err(e) => {
eprintln!(
"{COLOR_YELLOW}Warning:{COLOR_RESET} Failed to analyze {}: {}",
file.display(),
e
);
}
}
}
}
Ok(results)
}
#[cfg(test)]
mod tests {
use super::*;
/// Test that calculates and reports cyclomatic complexity for the entire project.
///
/// This test:
/// - Analyzes all Rust source files in src/ and tests/
/// - Calculates complexity for each function/method
/// - Reports statistics and identifies high-complexity functions
/// - Optionally fails if complexity exceeds thresholds
#[test]
fn test_cyclomatic_complexity() {
// Complexity thresholds (commonly used guidelines)
const VERY_HIGH_COMPLEXITY: u32 = 20;
const HIGH_COMPLEXITY: u32 = 10;
const MODERATE_COMPLEXITY: u32 = 5;
const MAX_AVERAGE_COMPLEXITY: f64 = 8.0;
const MAX_FILE_AVG_COMPLEXITY: f64 = 15.0;
let results =
calculate_project_complexity().expect("Failed to calculate project complexity");
assert!(!results.is_empty(), "No Rust files found to analyze");
// Collect all functions
let mut all_functions = Vec::new();
let mut total_project_complexity = 0u32;
let mut total_functions = 0usize;
for file_complexity in results.values() {
total_project_complexity += file_complexity.total_complexity;
total_functions += file_complexity.functions.len();
all_functions.extend(file_complexity.functions.clone());
}
// Sort functions by complexity (highest first)
all_functions.sort_by(|a, b| b.complexity.cmp(&a.complexity));
// Print summary
println!("\n=== Cyclomatic Complexity Report ===");
println!("Total files analyzed: {}", results.len());
println!("Total functions/methods: {total_functions}");
println!("Total project complexity: {total_project_complexity}");
if total_functions > 0 {
#[allow(clippy::cast_precision_loss)]
let avg_complexity = f64::from(total_project_complexity) / total_functions as f64;
println!("Average complexity per function: {avg_complexity:.2}");
}
// Report top 10 most complex functions
println!("\n=== Top 10 Most Complex Functions ===");
for (i, func) in all_functions.iter().take(10).enumerate() {
println!(
"{}. {} (complexity: {}) - {}:{}",
i + 1,
func.name,
func.complexity,
func.file.display(),
func.line
);
}
// Report files with highest complexity
println!("\n=== Files by Total Complexity ===");
let mut file_complexities: Vec<_> = results.iter().collect();
file_complexities.sort_by(|a, b| b.1.total_complexity.cmp(&a.1.total_complexity));
for (file, file_comp) in file_complexities.iter().take(10) {
println!(
"{}: total={}, avg={:.2}, functions={}",
file.display(),
file_comp.total_complexity,
file_comp.avg_complexity,
file_comp.functions.len()
);
}
// Count functions by complexity level
let very_high = all_functions
.iter()
.filter(|f| f.complexity >= VERY_HIGH_COMPLEXITY)
.count();
let high = all_functions
.iter()
.filter(|f| f.complexity >= HIGH_COMPLEXITY && f.complexity < VERY_HIGH_COMPLEXITY)
.count();
let moderate = all_functions
.iter()
.filter(|f| f.complexity >= MODERATE_COMPLEXITY && f.complexity < HIGH_COMPLEXITY)
.count();
println!("\n{COLOR_BOLD}{COLOR_BLUE}=== Complexity Distribution ==={COLOR_RESET}");
println!("Very High (≥{VERY_HIGH_COMPLEXITY}): {very_high}");
println!(
"High ({}..{}): {}",
HIGH_COMPLEXITY,
VERY_HIGH_COMPLEXITY - 1,
high
);
println!(
"Moderate ({}..{}): {}",
MODERATE_COMPLEXITY,
HIGH_COMPLEXITY - 1,
moderate
);
println!(
"Low (<{}): {}",
MODERATE_COMPLEXITY,
total_functions - very_high - high - moderate
);
// List functions with very high complexity
if very_high > 0 {
println!(
"\n{COLOR_BOLD}{COLOR_YELLOW}=== Functions with Very High Complexity (≥{VERY_HIGH_COMPLEXITY}) ==={COLOR_RESET}"
);
for func in all_functions
.iter()
.filter(|f| f.complexity >= VERY_HIGH_COMPLEXITY)
{
println!(
" {} (complexity: {}) - {}:{}",
func.name,
func.complexity,
func.file.display(),
func.line
);
}
}
// ====================================================================
// ASSERTIONS - Choose appropriate strictness level for your project
// ====================================================================
//
// Industry guidelines for cyclomatic complexity:
// - 1-4: Simple (ideal)
// - 5-9: Moderate (acceptable)
// - 10-19: High (consider refactoring)
// - 20+: Very High (should be refactored)
//
// Note: Rust's pattern matching naturally increases complexity scores,
// so slightly higher thresholds may be acceptable compared to other languages.
// OPTION 1: STRICT - Fail if any function exceeds a threshold
// Recommended threshold: 30-50 for Rust (due to match statements)
// This catches extremely complex functions that definitely need refactoring
// NOTE: Your project currently has functions > 50, so adjust threshold or refactor first
/*
const MAX_FUNCTION_COMPLEXITY: u32 = 50;
let max_complexity = all_functions
.iter()
.map(|f| f.complexity)
.max()
.unwrap_or(0);
assert!(
max_complexity <= MAX_FUNCTION_COMPLEXITY,
"Found function with complexity {} (max allowed: {}). Consider refactoring.",
max_complexity,
MAX_FUNCTION_COMPLEXITY
);
*/
// OPTION 2: MODERATE - Limit the number of very high complexity functions
// Recommended: Allow 5-10% of functions to be very high complexity
// Current project has 21/415 = 5.1% which is reasonable
// This assertion currently passes with your project (21 <= 25)
/*
const MAX_VERY_HIGH_COMPLEXITY_FUNCTIONS: usize = 25; // ~6% of 415 functions
assert!(
very_high <= MAX_VERY_HIGH_COMPLEXITY_FUNCTIONS,
"Too many functions with very high complexity (≥{}): {} (max allowed: {}). \
Consider refactoring the most complex functions.",
VERY_HIGH_COMPLEXITY,
very_high,
MAX_VERY_HIGH_COMPLEXITY_FUNCTIONS
);
*/
// OPTION 3: AVERAGE COMPLEXITY - Ensure overall codebase stays maintainable
// Recommended: Average should stay below 8-10
// Current project average: 6.00 (excellent) - this assertion currently passes
#[allow(clippy::cast_precision_loss)]
let avg_complexity = if total_functions > 0 {
f64::from(total_project_complexity) / total_functions as f64
} else {
0.0
};
assert!(
avg_complexity <= MAX_AVERAGE_COMPLEXITY,
"Average complexity too high: {avg_complexity:.2} (max allowed: {MAX_AVERAGE_COMPLEXITY:.2}). \
Consider refactoring high-complexity functions."
);
// OPTION 4: PROGRESSIVE - Prevent new extremely complex functions
// This is more useful in CI/CD to prevent regression
// Fail if any function exceeds a "hard limit" (e.g., 100+)
// NOTE: Your project currently has handle_mouse_event with complexity 220
// Consider refactoring before enabling this, or set threshold higher (e.g., 250)
/*
const HARD_LIMIT_COMPLEXITY: u32 = 100;
let extremely_complex: Vec<_> = all_functions
.iter()
.filter(|f| f.complexity >= HARD_LIMIT_COMPLEXITY)
.collect();
if !extremely_complex.is_empty() {
let names: Vec<String> = extremely_complex
.iter()
.map(|f| format!("{} ({})", f.name, f.complexity))
.collect();
panic!(
"Found {} function(s) exceeding hard complexity limit (≥{}): {}. \
These MUST be refactored before merging.",
extremely_complex.len(),
HARD_LIMIT_COMPLEXITY,
names.join(", ")
);
}
*/
// OPTION 5: FILE-LEVEL - Prevent individual files from becoming too complex
// Recommended: No single file should have average complexity > 15
// This is a warning only (doesn't fail the test)
// Note: Only warn for files with multiple functions. For single-function files,
// the individual function complexity check is more appropriate.
for (file_path, file_comp) in &file_complexities {
if file_comp.functions.len() > 1 && file_comp.avg_complexity > MAX_FILE_AVG_COMPLEXITY {
eprintln!(
"{COLOR_YELLOW}Warning:{COLOR_RESET} File {} has high average complexity: {:.2} ({} functions)",
file_path.display(),
file_comp.avg_complexity,
file_comp.functions.len()
);
}
}
// ====================================================================
// RECOMMENDATIONS FOR YOUR PROJECT:
// ====================================================================
// Based on current metrics:
// - Average complexity: 6.00 (excellent, well below threshold)
// - Very high complexity functions: 21 (5.1% of total, reasonable)
// - Most complex function: handle_mouse_event (220) - consider refactoring
//
// Suggested approach:
// 1. Start with OPTION 3 (average complexity) - already passes, prevents regression
// 2. Enable OPTION 2 (limit very high complexity) with threshold 25-30
// 3. For new code, consider OPTION 4 with threshold 250+ to prevent new extreme cases
// 4. Refactor handle_mouse_event (220) and other functions > 100 when possible
//
// Uncomment the assertions above that match your project's needs.
// For a new project, start with OPTION 1 (strict) and OPTION 4 (hard limit).
// For an existing project, OPTION 2 (moderate) and OPTION 3 (average) are more practical.
}
}
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/tests/other/loading_modal.rs | tests/other/loading_modal.rs | //! Integration tests for loading modal.
//!
//! Tests cover:
//! - Loading modal state creation
//! - Modal transition from Loading to result modal
//! - Loading message display
#![cfg(test)]
use pacsea::state::{AppState, Modal};
#[test]
/// What: Test Loading modal state creation.
///
/// Inputs:
/// - Loading message string.
///
/// Output:
/// - Modal state is correctly structured.
///
/// Details:
/// - Verifies loading modal can be created with message.
fn integration_loading_modal_creation() {
let app = AppState {
modal: Modal::Loading {
message: "Computing preflight summary...".to_string(),
},
..Default::default()
};
match app.modal {
Modal::Loading { message } => {
assert_eq!(message, "Computing preflight summary...");
}
_ => panic!("Expected Loading modal"),
}
}
#[test]
/// What: Test Loading modal with different messages.
///
/// Inputs:
/// - Various loading message strings.
///
/// Output:
/// - Each message is correctly stored.
///
/// Details:
/// - Verifies different loading scenarios.
fn integration_loading_modal_various_messages() {
let messages = vec![
"Loading dependencies...",
"Resolving files...",
"Checking services...",
"Fetching package info...",
"Calculating risk score...",
];
for msg in messages {
let app = AppState {
modal: Modal::Loading {
message: msg.to_string(),
},
..Default::default()
};
match app.modal {
Modal::Loading { message } => {
assert_eq!(message, msg);
}
_ => panic!("Expected Loading modal"),
}
}
}
#[test]
/// What: Test Loading modal transition to Alert on error.
///
/// Inputs:
/// - Loading modal active.
///
/// Output:
/// - Modal transitions to Alert on error.
///
/// Details:
/// - Simulates async computation error.
fn integration_loading_modal_to_alert_on_error() {
let mut app = AppState {
modal: Modal::Loading {
message: "Loading...".to_string(),
},
..Default::default()
};
// Simulate error during loading
app.modal = Modal::Alert {
message: "Failed to load data: network error".to_string(),
};
match app.modal {
Modal::Alert { message } => {
assert!(message.contains("Failed"));
assert!(message.contains("network error"));
}
_ => panic!("Expected Alert modal"),
}
}
#[test]
/// What: Test Loading modal transition to Preflight on success.
///
/// Inputs:
/// - Loading modal active during preflight computation.
///
/// Output:
/// - Modal transitions to Preflight on success.
///
/// Details:
/// - Simulates successful async preflight computation.
fn integration_loading_modal_to_preflight_on_success() {
use pacsea::state::{PackageItem, PreflightAction, PreflightTab, Source};
use pacsea::state::modal::{CascadeMode, PreflightHeaderChips};
use std::collections::HashSet;
let mut app = AppState {
modal: Modal::Loading {
message: "Computing preflight summary...".to_string(),
},
..Default::default()
};
let pkg = PackageItem {
name: "test-pkg".to_string(),
version: "1.0.0".to_string(),
description: String::new(),
source: Source::Aur,
popularity: None,
out_of_date: None,
orphaned: false,
};
// Simulate successful computation - transition to Preflight
app.modal = Modal::Preflight {
items: vec![pkg],
action: PreflightAction::Install,
tab: PreflightTab::Summary,
summary: None,
summary_scroll: 0,
header_chips: PreflightHeaderChips::default(),
dependency_info: vec![],
dep_selected: 0,
dep_tree_expanded: HashSet::new(),
deps_error: None,
file_info: vec![],
file_selected: 0,
file_tree_expanded: HashSet::new(),
files_error: None,
service_info: vec![],
service_selected: 0,
services_loaded: false,
services_error: None,
sandbox_info: vec![],
sandbox_selected: 0,
sandbox_tree_expanded: HashSet::new(),
sandbox_loaded: false,
sandbox_error: None,
selected_optdepends: std::collections::HashMap::new(),
cascade_mode: CascadeMode::Basic,
cached_reverse_deps_report: None,
};
match app.modal {
Modal::Preflight { items, action, .. } => {
assert_eq!(items.len(), 1);
assert_eq!(items[0].name, "test-pkg");
assert_eq!(action, PreflightAction::Install);
}
_ => panic!("Expected Preflight modal"),
}
}
#[test]
/// What: Test Loading modal transition to None on cancel.
///
/// Inputs:
/// - Loading modal active.
///
/// Output:
/// - Modal transitions to None on cancel.
///
/// Details:
/// - Simulates user cancelling loading operation.
fn integration_loading_modal_cancellation() {
let mut app = AppState {
modal: Modal::Loading {
message: "Loading...".to_string(),
},
..Default::default()
};
// Simulate cancellation
app.modal = Modal::None;
assert!(matches!(app.modal, Modal::None));
}
#[test]
/// What: Test Loading modal with empty message.
///
/// Inputs:
/// - Loading modal with empty message.
///
/// Output:
/// - Modal handles empty message gracefully.
///
/// Details:
/// - Edge case for loading without specific message.
fn integration_loading_modal_empty_message() {
let app = AppState {
modal: Modal::Loading {
message: String::new(),
},
..Default::default()
};
match app.modal {
Modal::Loading { message } => {
assert!(message.is_empty());
}
_ => panic!("Expected Loading modal"),
}
}
#[test]
/// What: Test Loading modal with long message.
///
/// Inputs:
/// - Loading modal with very long message.
///
/// Output:
/// - Modal handles long message correctly.
///
/// Details:
/// - Edge case for verbose loading messages.
fn integration_loading_modal_long_message() {
let long_message = "A".repeat(500);
let app = AppState {
modal: Modal::Loading {
message: long_message.clone(),
},
..Default::default()
};
match app.modal {
Modal::Loading { message } => {
assert_eq!(message.len(), 500);
assert_eq!(message, long_message);
}
_ => panic!("Expected Loading modal"),
}
}
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/tests/other/terminals_args_shape.rs | tests/other/terminals_args_shape.rs | #![cfg(not(target_os = "windows"))]
use crossterm::event::{
Event as CEvent, KeyCode, KeyEvent, KeyModifiers, MouseButton, MouseEvent, MouseEventKind,
};
use pacsea as crate_root;
fn write_fake(term_name: &str, dir: &std::path::Path) -> (std::path::PathBuf, std::path::PathBuf) {
use std::os::unix::fs::PermissionsExt;
let mut out_path = dir.to_path_buf();
out_path.push("args.txt");
let mut term_path = dir.to_path_buf();
term_path.push(term_name);
let script = "#!/bin/sh\n: > \"$PACSEA_TEST_OUT\"\nfor a in \"$@\"; do printf '%s\n' \"$a\" >> \"$PACSEA_TEST_OUT\"; done\n";
std::fs::write(&term_path, script.as_bytes()).expect("failed to write test terminal script");
let mut perms = std::fs::metadata(&term_path)
.expect("failed to read test terminal script metadata")
.permissions();
perms.set_mode(0o755);
std::fs::set_permissions(&term_path, perms)
.expect("failed to set test terminal script permissions");
(term_path, out_path)
}
#[test]
/// What: Test tilix terminal argument shape for system update command.
///
/// Inputs:
/// - Fake tilix executable on PATH.
/// - `SystemUpdate` Enter key event.
///
/// Output:
/// - First three args are `["--", "bash", "-lc"]`.
///
/// Details:
/// - Verifies tilix terminal emulator uses "--", "bash", "-lc" argument pattern.
/// - Tests terminal argument shape detection for system update operations.
fn ui_options_update_system_enter_triggers_tilix_args_shape() {
use std::path::PathBuf;
let mut dir: PathBuf = std::env::temp_dir();
dir.push(format!(
"pacsea_test_term_tilix_{}_{}",
std::process::id(),
std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
.expect("SystemTime before UNIX_EPOCH")
.as_nanos()
));
let _ = std::fs::create_dir_all(&dir);
let (_term_path, out_path) = write_fake("tilix", &dir);
let orig_path = std::env::var_os("PATH");
let combined_path = std::env::var("PATH").map_or_else(
|_| dir.display().to_string(),
|p| format!("{}:{}", dir.display(), p),
);
unsafe {
std::env::set_var("PATH", combined_path);
std::env::set_var("PACSEA_TEST_OUT", out_path.display().to_string());
std::env::set_var("PACSEA_TEST_HEADLESS", "1");
}
let mut app = crate_root::state::AppState::default();
let (qtx, _qrx) = tokio::sync::mpsc::unbounded_channel();
let (dtx, _drx) = tokio::sync::mpsc::unbounded_channel();
let (ptx, _prx) = tokio::sync::mpsc::unbounded_channel();
let (atx, _arx) = tokio::sync::mpsc::unbounded_channel();
let (pkgb_tx, _pkgb_rx) = tokio::sync::mpsc::unbounded_channel();
let (comments_tx, _comments_rx) = tokio::sync::mpsc::unbounded_channel();
app.options_button_rect = Some((5, 5, 10, 1));
let click_options = CEvent::Mouse(MouseEvent {
kind: MouseEventKind::Down(MouseButton::Left),
column: 6,
row: 5,
modifiers: KeyModifiers::empty(),
});
let _ = crate_root::events::handle_event(
&click_options,
&mut app,
&qtx,
&dtx,
&ptx,
&atx,
&pkgb_tx,
&comments_tx,
);
app.options_menu_rect = Some((5, 6, 20, 3));
let click_menu_update = CEvent::Mouse(MouseEvent {
kind: MouseEventKind::Down(MouseButton::Left),
column: 6,
row: 7,
modifiers: KeyModifiers::empty(),
});
let _ = crate_root::events::handle_event(
&click_menu_update,
&mut app,
&qtx,
&dtx,
&ptx,
&atx,
&pkgb_tx,
&comments_tx,
);
let enter = CEvent::Key(KeyEvent::new(KeyCode::Enter, KeyModifiers::empty()));
let _ = crate_root::events::handle_event(
&enter,
&mut app,
&qtx,
&dtx,
&ptx,
&atx,
&pkgb_tx,
&comments_tx,
);
std::thread::sleep(std::time::Duration::from_millis(50));
let body = std::fs::read_to_string(&out_path).expect("fake terminal args file written");
let lines: Vec<&str> = body.lines().collect();
assert!(lines.len() >= 3, "expected at least 3 args, got: {body}");
assert_eq!(lines[0], "--");
assert_eq!(lines[1], "bash");
assert_eq!(lines[2], "-lc");
unsafe {
if let Some(v) = orig_path {
std::env::set_var("PATH", v);
} else {
std::env::remove_var("PATH");
}
std::env::remove_var("PACSEA_TEST_OUT");
}
}
#[test]
/// What: Test mate-terminal argument shape for system update command.
///
/// Inputs:
/// - Fake mate-terminal executable on PATH.
/// - `SystemUpdate` Enter key event.
///
/// Output:
/// - First three args are `["--", "bash", "-lc"]`.
///
/// Details:
/// - Verifies mate-terminal uses "--", "bash", "-lc" argument pattern.
/// - Tests terminal argument shape detection for system update operations.
fn ui_options_update_system_enter_triggers_mate_terminal_args_shape() {
use std::path::PathBuf;
let mut dir: PathBuf = std::env::temp_dir();
dir.push(format!(
"pacsea_test_term_mate_{}_{}",
std::process::id(),
std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
.expect("SystemTime before UNIX_EPOCH")
.as_nanos()
));
let _ = std::fs::create_dir_all(&dir);
let (_term_path, out_path) = write_fake("mate-terminal", &dir);
let orig_path = std::env::var_os("PATH");
let combined_path = std::env::var("PATH").map_or_else(
|_| dir.display().to_string(),
|p| format!("{}:{}", dir.display(), p),
);
unsafe {
std::env::set_var("PATH", combined_path);
std::env::set_var("PACSEA_TEST_OUT", out_path.display().to_string());
std::env::set_var("PACSEA_TEST_HEADLESS", "1");
}
let mut app = crate_root::state::AppState::default();
let (qtx, _qrx) = tokio::sync::mpsc::unbounded_channel();
let (dtx, _drx) = tokio::sync::mpsc::unbounded_channel();
let (ptx, _prx) = tokio::sync::mpsc::unbounded_channel();
let (atx, _arx) = tokio::sync::mpsc::unbounded_channel();
let (pkgb_tx, _pkgb_rx) = tokio::sync::mpsc::unbounded_channel();
let (comments_tx, _comments_rx) = tokio::sync::mpsc::unbounded_channel();
app.options_button_rect = Some((5, 5, 10, 1));
let click_options = CEvent::Mouse(MouseEvent {
kind: MouseEventKind::Down(MouseButton::Left),
column: 6,
row: 5,
modifiers: KeyModifiers::empty(),
});
let _ = crate_root::events::handle_event(
&click_options,
&mut app,
&qtx,
&dtx,
&ptx,
&atx,
&pkgb_tx,
&comments_tx,
);
app.options_menu_rect = Some((5, 6, 20, 3));
let click_menu_update = CEvent::Mouse(MouseEvent {
kind: MouseEventKind::Down(MouseButton::Left),
column: 6,
row: 7,
modifiers: KeyModifiers::empty(),
});
let _ = crate_root::events::handle_event(
&click_menu_update,
&mut app,
&qtx,
&dtx,
&ptx,
&atx,
&pkgb_tx,
&comments_tx,
);
let enter = CEvent::Key(KeyEvent::new(KeyCode::Enter, KeyModifiers::empty()));
let _ = crate_root::events::handle_event(
&enter,
&mut app,
&qtx,
&dtx,
&ptx,
&atx,
&pkgb_tx,
&comments_tx,
);
std::thread::sleep(std::time::Duration::from_millis(50));
let body = std::fs::read_to_string(&out_path).expect("fake terminal args file written");
let lines: Vec<&str> = body.lines().collect();
assert!(lines.len() >= 3, "expected at least 3 args, got: {body}");
assert_eq!(lines[0], "--");
assert_eq!(lines[1], "bash");
assert_eq!(lines[2], "-lc");
unsafe {
if let Some(v) = orig_path {
std::env::set_var("PATH", v);
} else {
std::env::remove_var("PATH");
}
std::env::remove_var("PACSEA_TEST_OUT");
}
}
#[test]
/// What: Test gnome-terminal argument shape for system update command.
///
/// Inputs:
/// - Fake gnome-terminal executable with PATH isolated.
/// - `SystemUpdate` Enter key event.
///
/// Output:
/// - First three args are `["--", "bash", "-lc"]`.
///
/// Details:
/// - Verifies gnome-terminal uses "--", "bash", "-lc" argument pattern.
/// - Tests terminal argument shape detection for system update operations.
fn ui_options_update_system_enter_triggers_gnome_terminal_args_shape() {
use std::path::PathBuf;
let mut dir: PathBuf = std::env::temp_dir();
dir.push(format!(
"pacsea_test_term_gnome_{}_{}",
std::process::id(),
std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
.expect("SystemTime before UNIX_EPOCH")
.as_nanos()
));
let _ = std::fs::create_dir_all(&dir);
let (_term_path, out_path) = write_fake("gnome-terminal", &dir);
let orig_path = std::env::var_os("PATH");
unsafe {
std::env::set_var("PATH", dir.display().to_string());
std::env::set_var("PACSEA_TEST_OUT", out_path.display().to_string());
std::env::set_var("PACSEA_TEST_HEADLESS", "1");
}
let mut app = crate_root::state::AppState::default();
let (qtx, _qrx) = tokio::sync::mpsc::unbounded_channel();
let (dtx, _drx) = tokio::sync::mpsc::unbounded_channel();
let (ptx, _prx) = tokio::sync::mpsc::unbounded_channel();
let (atx, _arx) = tokio::sync::mpsc::unbounded_channel();
let (pkgb_tx, _pkgb_rx) = tokio::sync::mpsc::unbounded_channel();
let (comments_tx, _comments_rx) = tokio::sync::mpsc::unbounded_channel();
app.options_button_rect = Some((5, 5, 10, 1));
let click_options = CEvent::Mouse(MouseEvent {
kind: MouseEventKind::Down(MouseButton::Left),
column: 6,
row: 5,
modifiers: KeyModifiers::empty(),
});
let _ = crate_root::events::handle_event(
&click_options,
&mut app,
&qtx,
&dtx,
&ptx,
&atx,
&pkgb_tx,
&comments_tx,
);
app.options_menu_rect = Some((5, 6, 20, 3));
let click_menu_update = CEvent::Mouse(MouseEvent {
kind: MouseEventKind::Down(MouseButton::Left),
column: 6,
row: 7,
modifiers: KeyModifiers::empty(),
});
let _ = crate_root::events::handle_event(
&click_menu_update,
&mut app,
&qtx,
&dtx,
&ptx,
&atx,
&pkgb_tx,
&comments_tx,
);
let enter = CEvent::Key(KeyEvent::new(KeyCode::Enter, KeyModifiers::empty()));
let _ = crate_root::events::handle_event(
&enter,
&mut app,
&qtx,
&dtx,
&ptx,
&atx,
&pkgb_tx,
&comments_tx,
);
std::thread::sleep(std::time::Duration::from_millis(50));
let body = std::fs::read_to_string(&out_path).expect("fake terminal args file written");
let lines: Vec<&str> = body.lines().collect();
assert!(lines.len() >= 3, "expected at least 3 args, got: {body}");
assert_eq!(lines[0], "--");
assert_eq!(lines[1], "bash");
assert_eq!(lines[2], "-lc");
unsafe {
if let Some(v) = orig_path {
std::env::set_var("PATH", v);
} else {
std::env::remove_var("PATH");
}
std::env::remove_var("PACSEA_TEST_OUT");
}
}
#[test]
/// What: Test konsole terminal argument shape for system update command.
///
/// Inputs:
/// - Fake konsole executable with PATH isolated.
/// - `SystemUpdate` Enter key event.
///
/// Output:
/// - First three args are `["-e", "bash", "-lc"]`.
///
/// Details:
/// - Verifies konsole uses "-e", "bash", "-lc" argument pattern.
/// - Tests terminal argument shape detection for system update operations.
fn ui_options_update_system_enter_triggers_konsole_args_shape() {
use std::path::PathBuf;
let mut dir: PathBuf = std::env::temp_dir();
dir.push(format!(
"pacsea_test_term_konsole_{}_{}",
std::process::id(),
std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
.expect("SystemTime before UNIX_EPOCH")
.as_nanos()
));
let _ = std::fs::create_dir_all(&dir);
let (_term_path, out_path) = write_fake("konsole", &dir);
let orig_path = std::env::var_os("PATH");
unsafe {
std::env::set_var("PATH", dir.display().to_string());
std::env::set_var("PACSEA_TEST_OUT", out_path.display().to_string());
std::env::set_var("PACSEA_TEST_HEADLESS", "1");
}
let mut app = crate_root::state::AppState::default();
let (qtx, _qrx) = tokio::sync::mpsc::unbounded_channel();
let (dtx, _drx) = tokio::sync::mpsc::unbounded_channel();
let (ptx, _prx) = tokio::sync::mpsc::unbounded_channel();
let (atx, _arx) = tokio::sync::mpsc::unbounded_channel();
let (pkgb_tx, _pkgb_rx) = tokio::sync::mpsc::unbounded_channel();
let (comments_tx, _comments_rx) = tokio::sync::mpsc::unbounded_channel();
app.options_button_rect = Some((5, 5, 10, 1));
let click_options = CEvent::Mouse(MouseEvent {
kind: MouseEventKind::Down(MouseButton::Left),
column: 6,
row: 5,
modifiers: KeyModifiers::empty(),
});
let _ = crate_root::events::handle_event(
&click_options,
&mut app,
&qtx,
&dtx,
&ptx,
&atx,
&pkgb_tx,
&comments_tx,
);
app.options_menu_rect = Some((5, 6, 20, 3));
let click_menu_update = CEvent::Mouse(MouseEvent {
kind: MouseEventKind::Down(MouseButton::Left),
column: 6,
row: 7,
modifiers: KeyModifiers::empty(),
});
let _ = crate_root::events::handle_event(
&click_menu_update,
&mut app,
&qtx,
&dtx,
&ptx,
&atx,
&pkgb_tx,
&comments_tx,
);
let enter = CEvent::Key(KeyEvent::new(KeyCode::Enter, KeyModifiers::empty()));
let _ = crate_root::events::handle_event(
&enter,
&mut app,
&qtx,
&dtx,
&ptx,
&atx,
&pkgb_tx,
&comments_tx,
);
std::thread::sleep(std::time::Duration::from_millis(50));
let body = std::fs::read_to_string(&out_path).expect("fake terminal args file written");
let lines: Vec<&str> = body.lines().collect();
assert!(lines.len() >= 3, "expected at least 3 args, got: {body}");
assert_eq!(lines[0], "-e");
assert_eq!(lines[1], "bash");
assert_eq!(lines[2], "-lc");
unsafe {
if let Some(v) = orig_path {
std::env::set_var("PATH", v);
} else {
std::env::remove_var("PATH");
}
std::env::remove_var("PACSEA_TEST_OUT");
}
}
#[test]
/// What: Test alacritty terminal argument shape for system update command.
///
/// Inputs:
/// - Fake alacritty executable with PATH isolated.
/// - `SystemUpdate` Enter key event.
///
/// Output:
/// - First three args are `["-e", "bash", "-lc"]`.
///
/// Details:
/// - Verifies alacritty uses "-e", "bash", "-lc" argument pattern.
/// - Tests terminal argument shape detection for system update operations.
fn ui_options_update_system_enter_triggers_alacritty_args_shape() {
use std::path::PathBuf;
let mut dir: PathBuf = std::env::temp_dir();
dir.push(format!(
"pacsea_test_term_alacritty_{}_{}",
std::process::id(),
std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
.expect("SystemTime before UNIX_EPOCH")
.as_nanos()
));
let _ = std::fs::create_dir_all(&dir);
let (_term_path, out_path) = write_fake("alacritty", &dir);
let orig_path = std::env::var_os("PATH");
unsafe {
std::env::set_var("PATH", dir.display().to_string());
std::env::set_var("PACSEA_TEST_OUT", out_path.display().to_string());
std::env::set_var("PACSEA_TEST_HEADLESS", "1");
}
let mut app = crate_root::state::AppState::default();
let (qtx, _qrx) = tokio::sync::mpsc::unbounded_channel();
let (dtx, _drx) = tokio::sync::mpsc::unbounded_channel();
let (ptx, _prx) = tokio::sync::mpsc::unbounded_channel();
let (atx, _arx) = tokio::sync::mpsc::unbounded_channel();
let (pkgb_tx, _pkgb_rx) = tokio::sync::mpsc::unbounded_channel();
let (comments_tx, _comments_rx) = tokio::sync::mpsc::unbounded_channel();
app.options_button_rect = Some((5, 5, 10, 1));
let click_options = CEvent::Mouse(MouseEvent {
kind: MouseEventKind::Down(MouseButton::Left),
column: 6,
row: 5,
modifiers: KeyModifiers::empty(),
});
let _ = crate_root::events::handle_event(
&click_options,
&mut app,
&qtx,
&dtx,
&ptx,
&atx,
&pkgb_tx,
&comments_tx,
);
app.options_menu_rect = Some((5, 6, 20, 3));
let click_menu_update = CEvent::Mouse(MouseEvent {
kind: MouseEventKind::Down(MouseButton::Left),
column: 6,
row: 7,
modifiers: KeyModifiers::empty(),
});
let _ = crate_root::events::handle_event(
&click_menu_update,
&mut app,
&qtx,
&dtx,
&ptx,
&atx,
&pkgb_tx,
&comments_tx,
);
let enter = CEvent::Key(KeyEvent::new(KeyCode::Enter, KeyModifiers::empty()));
let _ = crate_root::events::handle_event(
&enter,
&mut app,
&qtx,
&dtx,
&ptx,
&atx,
&pkgb_tx,
&comments_tx,
);
std::thread::sleep(std::time::Duration::from_millis(50));
let body = std::fs::read_to_string(&out_path).expect("fake terminal args file written");
let lines: Vec<&str> = body.lines().collect();
assert!(lines.len() >= 3, "expected at least 3 args, got: {body}");
assert_eq!(lines[0], "-e");
assert_eq!(lines[1], "bash");
assert_eq!(lines[2], "-lc");
unsafe {
if let Some(v) = orig_path {
std::env::set_var("PATH", v);
} else {
std::env::remove_var("PATH");
}
std::env::remove_var("PACSEA_TEST_OUT");
}
}
#[test]
/// What: Test kitty terminal argument shape for system update command.
///
/// Inputs:
/// - Fake kitty executable with PATH isolated.
/// - `SystemUpdate` Enter key event.
///
/// Output:
/// - First two args are `["bash", "-lc"]`.
///
/// Details:
/// - Verifies kitty uses "bash", "-lc" argument pattern (no prefix flags).
/// - Tests terminal argument shape detection for system update operations.
fn ui_options_update_system_enter_triggers_kitty_args_shape() {
use std::path::PathBuf;
let mut dir: PathBuf = std::env::temp_dir();
dir.push(format!(
"pacsea_test_term_kitty_{}_{}",
std::process::id(),
std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
.expect("SystemTime before UNIX_EPOCH")
.as_nanos()
));
let _ = std::fs::create_dir_all(&dir);
let (_term_path, out_path) = write_fake("kitty", &dir);
let orig_path = std::env::var_os("PATH");
unsafe {
std::env::set_var("PATH", dir.display().to_string());
std::env::set_var("PACSEA_TEST_OUT", out_path.display().to_string());
std::env::set_var("PACSEA_TEST_HEADLESS", "1");
}
let mut app = crate_root::state::AppState::default();
let (qtx, _qrx) = tokio::sync::mpsc::unbounded_channel();
let (dtx, _drx) = tokio::sync::mpsc::unbounded_channel();
let (ptx, _prx) = tokio::sync::mpsc::unbounded_channel();
let (atx, _arx) = tokio::sync::mpsc::unbounded_channel();
let (pkgb_tx, _pkgb_rx) = tokio::sync::mpsc::unbounded_channel();
let (comments_tx, _comments_rx) = tokio::sync::mpsc::unbounded_channel();
app.options_button_rect = Some((5, 5, 10, 1));
let click_options = CEvent::Mouse(MouseEvent {
kind: MouseEventKind::Down(MouseButton::Left),
column: 6,
row: 5,
modifiers: KeyModifiers::empty(),
});
let _ = crate_root::events::handle_event(
&click_options,
&mut app,
&qtx,
&dtx,
&ptx,
&atx,
&pkgb_tx,
&comments_tx,
);
app.options_menu_rect = Some((5, 6, 20, 3));
let click_menu_update = CEvent::Mouse(MouseEvent {
kind: MouseEventKind::Down(MouseButton::Left),
column: 6,
row: 7,
modifiers: KeyModifiers::empty(),
});
let _ = crate_root::events::handle_event(
&click_menu_update,
&mut app,
&qtx,
&dtx,
&ptx,
&atx,
&pkgb_tx,
&comments_tx,
);
let enter = CEvent::Key(KeyEvent::new(KeyCode::Enter, KeyModifiers::empty()));
let _ = crate_root::events::handle_event(
&enter,
&mut app,
&qtx,
&dtx,
&ptx,
&atx,
&pkgb_tx,
&comments_tx,
);
std::thread::sleep(std::time::Duration::from_millis(50));
let body = std::fs::read_to_string(&out_path).expect("fake terminal args file written");
let lines: Vec<&str> = body.lines().collect();
assert!(lines.len() >= 2, "expected at least 2 args, got: {body}");
assert_eq!(lines[0], "bash");
assert_eq!(lines[1], "-lc");
unsafe {
if let Some(v) = orig_path {
std::env::set_var("PATH", v);
} else {
std::env::remove_var("PATH");
}
std::env::remove_var("PACSEA_TEST_OUT");
}
}
#[test]
/// What: Test xterm terminal argument shape for system update command.
///
/// Inputs:
/// - Fake xterm executable with PATH isolated.
/// - `SystemUpdate` Enter key event.
///
/// Output:
/// - First four args are `["-hold", "-e", "bash", "-lc"]`.
///
/// Details:
/// - Verifies xterm uses "-hold", "-e", "bash", "-lc" argument pattern.
/// - Tests terminal argument shape detection for system update operations.
fn ui_options_update_system_enter_triggers_xterm_args_shape() {
use std::path::PathBuf;
let mut dir: PathBuf = std::env::temp_dir();
dir.push(format!(
"pacsea_test_term_xterm_{}_{}",
std::process::id(),
std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
.expect("SystemTime before UNIX_EPOCH")
.as_nanos()
));
let _ = std::fs::create_dir_all(&dir);
let (_term_path, out_path) = write_fake("xterm", &dir);
let orig_path = std::env::var_os("PATH");
unsafe {
std::env::set_var("PATH", dir.display().to_string());
std::env::set_var("PACSEA_TEST_OUT", out_path.display().to_string());
std::env::set_var("PACSEA_TEST_HEADLESS", "1");
}
let mut app = crate_root::state::AppState::default();
let (qtx, _qrx) = tokio::sync::mpsc::unbounded_channel();
let (dtx, _drx) = tokio::sync::mpsc::unbounded_channel();
let (ptx, _prx) = tokio::sync::mpsc::unbounded_channel();
let (atx, _arx) = tokio::sync::mpsc::unbounded_channel();
let (pkgb_tx, _pkgb_rx) = tokio::sync::mpsc::unbounded_channel();
let (comments_tx, _comments_rx) = tokio::sync::mpsc::unbounded_channel();
app.options_button_rect = Some((5, 5, 10, 1));
let click_options = CEvent::Mouse(MouseEvent {
kind: MouseEventKind::Down(MouseButton::Left),
column: 6,
row: 5,
modifiers: KeyModifiers::empty(),
});
let _ = crate_root::events::handle_event(
&click_options,
&mut app,
&qtx,
&dtx,
&ptx,
&atx,
&pkgb_tx,
&comments_tx,
);
app.options_menu_rect = Some((5, 6, 20, 3));
let click_menu_update = CEvent::Mouse(MouseEvent {
kind: MouseEventKind::Down(MouseButton::Left),
column: 6,
row: 7,
modifiers: KeyModifiers::empty(),
});
let _ = crate_root::events::handle_event(
&click_menu_update,
&mut app,
&qtx,
&dtx,
&ptx,
&atx,
&pkgb_tx,
&comments_tx,
);
let enter = CEvent::Key(KeyEvent::new(KeyCode::Enter, KeyModifiers::empty()));
let _ = crate_root::events::handle_event(
&enter,
&mut app,
&qtx,
&dtx,
&ptx,
&atx,
&pkgb_tx,
&comments_tx,
);
std::thread::sleep(std::time::Duration::from_millis(50));
let body = std::fs::read_to_string(&out_path).expect("fake terminal args file written");
let lines: Vec<&str> = body.lines().collect();
assert!(lines.len() >= 4, "expected at least 4 args, got: {body}");
assert_eq!(lines[0], "-hold");
assert_eq!(lines[1], "-e");
assert_eq!(lines[2], "bash");
assert_eq!(lines[3], "-lc");
unsafe {
if let Some(v) = orig_path {
std::env::set_var("PATH", v);
} else {
std::env::remove_var("PATH");
}
std::env::remove_var("PACSEA_TEST_OUT");
}
}
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/tests/install/install_integration.rs | tests/install/install_integration.rs | //! Integration tests for the install process.
//!
//! Tests cover:
//! - Full install flow from Enter key to `PreflightExec` modal
//! - Skip preflight flow
//! - Password prompt flow
//! - Executor request handling
//! - Modal transitions
#![cfg(test)]
use pacsea::install::{ExecutorOutput, ExecutorRequest};
use pacsea::state::{AppState, Modal, PackageItem, PreflightAction, PreflightTab, Source};
/// What: Create a test package item with specified source.
///
/// Inputs:
/// - `name`: Package name
/// - `source`: Package source (Official or AUR)
///
/// Output:
/// - `PackageItem` ready for testing
///
/// Details:
/// - Helper to create test packages with consistent structure
fn create_test_package(name: &str, source: Source) -> PackageItem {
PackageItem {
name: name.into(),
version: "1.0.0".into(),
description: String::new(),
source,
popularity: None,
out_of_date: None,
orphaned: false,
}
}
#[test]
/// What: Test preflight modal state creation.
///
/// Inputs:
/// - Install list with packages.
///
/// Output:
/// - `Preflight` modal can be created with correct items and action.
///
/// Details:
/// - Verifies preflight modal state structure.
fn integration_preflight_modal_state() {
let items = vec![
create_test_package(
"ripgrep",
Source::Official {
repo: "extra".into(),
arch: "x86_64".into(),
},
),
create_test_package("yay-bin", Source::Aur),
];
// Test that we can create a preflight modal state
let app = AppState {
modal: Modal::Preflight {
items,
action: PreflightAction::Install,
tab: PreflightTab::Summary,
summary: None,
summary_scroll: 0,
header_chips: pacsea::state::modal::PreflightHeaderChips {
package_count: 2,
download_bytes: 0,
install_delta_bytes: 0,
aur_count: 1,
risk_score: 2,
risk_level: pacsea::state::modal::RiskLevel::Medium,
},
dependency_info: Vec::new(),
dep_selected: 0,
dep_tree_expanded: std::collections::HashSet::new(),
deps_error: None,
file_info: Vec::new(),
file_selected: 0,
file_tree_expanded: std::collections::HashSet::new(),
files_error: None,
service_info: Vec::new(),
service_selected: 0,
services_loaded: false,
services_error: None,
sandbox_info: Vec::new(),
sandbox_selected: 0,
sandbox_tree_expanded: std::collections::HashSet::new(),
sandbox_loaded: false,
sandbox_error: None,
selected_optdepends: std::collections::HashMap::new(),
cascade_mode: pacsea::state::modal::CascadeMode::Basic,
cached_reverse_deps_report: None,
},
..Default::default()
};
match app.modal {
Modal::Preflight {
items: ref modal_items,
action,
tab,
..
} => {
assert_eq!(modal_items.len(), 2);
assert_eq!(action, PreflightAction::Install);
assert_eq!(tab, PreflightTab::Summary);
}
_ => panic!("Expected Preflight modal, got: {:?}", app.modal),
}
}
#[test]
/// What: Test executor request creation for install.
///
/// Inputs:
/// - Package items, password, `dry_run` flag.
///
/// Output:
/// - `ExecutorRequest::Install` with correct parameters.
///
/// Details:
/// - Verifies executor request is created correctly from install parameters.
fn integration_executor_request_creation() {
let items = vec![
create_test_package(
"ripgrep",
Source::Official {
repo: "extra".into(),
arch: "x86_64".into(),
},
),
create_test_package("yay-bin", Source::Aur),
];
let request = ExecutorRequest::Install {
items,
password: Some("testpass".to_string()),
dry_run: false,
};
match request {
ExecutorRequest::Install {
items: req_items,
password,
dry_run,
} => {
assert_eq!(req_items.len(), 2);
assert_eq!(password, Some("testpass".to_string()));
assert!(!dry_run);
}
ExecutorRequest::Remove { .. }
| ExecutorRequest::Downgrade { .. }
| ExecutorRequest::Update { .. }
| ExecutorRequest::CustomCommand { .. }
| ExecutorRequest::Scan { .. } => {
panic!("Expected Install request")
}
}
}
#[test]
/// What: Test password prompt modal state.
///
/// Inputs:
/// - `PasswordPrompt` modal with password input.
///
/// Output:
/// - Modal state is correctly structured.
///
/// Details:
/// - Verifies password prompt modal can be created.
fn integration_password_prompt_state() {
let items = vec![create_test_package(
"ripgrep",
Source::Official {
repo: "extra".into(),
arch: "x86_64".into(),
},
)];
let app = AppState {
modal: Modal::PasswordPrompt {
purpose: pacsea::state::modal::PasswordPurpose::Install,
items,
input: "testpassword".to_string(),
cursor: 12,
error: None,
},
..Default::default()
};
match app.modal {
Modal::PasswordPrompt {
ref items,
ref input,
cursor,
..
} => {
assert_eq!(items.len(), 1);
assert_eq!(input, "testpassword");
assert_eq!(cursor, 12);
}
_ => panic!("Expected PasswordPrompt modal"),
}
}
#[test]
/// What: Test `PreflightExec` modal state transitions.
///
/// Inputs:
/// - `PreflightExec` modal with various states.
///
/// Output:
/// - Modal state is correctly structured.
///
/// Details:
/// - Verifies `PreflightExec` modal can be created and accessed.
fn integration_preflight_exec_state() {
let items = vec![create_test_package(
"ripgrep",
Source::Official {
repo: "extra".into(),
arch: "x86_64".into(),
},
)];
let app = AppState {
modal: Modal::PreflightExec {
items,
action: PreflightAction::Install,
tab: PreflightTab::Summary,
verbose: false,
log_lines: vec!["Test output".to_string()],
abortable: true,
header_chips: pacsea::state::modal::PreflightHeaderChips {
package_count: 1,
download_bytes: 1000,
install_delta_bytes: 500,
aur_count: 0,
risk_score: 0,
risk_level: pacsea::state::modal::RiskLevel::Low,
},
success: None,
},
..Default::default()
};
match app.modal {
Modal::PreflightExec {
items: ref exec_items,
action,
verbose,
abortable,
..
} => {
assert_eq!(exec_items.len(), 1);
assert_eq!(action, PreflightAction::Install);
assert!(!verbose);
assert!(abortable);
}
_ => panic!("Expected PreflightExec modal"),
}
}
#[test]
/// What: Test executor output handling.
///
/// Inputs:
/// - Various `ExecutorOutput` messages.
///
/// Output:
/// - Output messages are correctly structured.
///
/// Details:
/// - Verifies `ExecutorOutput` enum variants work correctly.
fn integration_executor_output_handling() {
// Test Line output
let output1 = ExecutorOutput::Line("Test line".to_string());
match output1 {
ExecutorOutput::Line(line) => assert_eq!(line, "Test line"),
_ => panic!("Expected Line variant"),
}
// Test ReplaceLastLine output
let output2 = ExecutorOutput::ReplaceLastLine("Updated line".to_string());
match output2 {
ExecutorOutput::ReplaceLastLine(line) => assert_eq!(line, "Updated line"),
_ => panic!("Expected ReplaceLastLine variant"),
}
// Test Finished output
let output3 = ExecutorOutput::Finished {
success: true,
exit_code: Some(0),
failed_command: None,
};
match output3 {
ExecutorOutput::Finished {
success,
exit_code,
failed_command: _,
} => {
assert!(success);
assert_eq!(exit_code, Some(0));
}
_ => panic!("Expected Finished variant"),
}
// Test Error output
let output4 = ExecutorOutput::Error("Test error".to_string());
match output4 {
ExecutorOutput::Error(msg) => assert_eq!(msg, "Test error"),
_ => panic!("Expected Error variant"),
}
}
#[test]
/// What: Test executor request with empty password.
///
/// Inputs:
/// - Executor request with None password.
///
/// Output:
/// - Request correctly stores None password.
///
/// Details:
/// - Empty password should result in None password in executor request.
fn integration_empty_password_handling() {
let items = vec![create_test_package(
"ripgrep",
Source::Official {
repo: "extra".into(),
arch: "x86_64".into(),
},
)];
let request = ExecutorRequest::Install {
items,
password: None,
dry_run: false,
};
match request {
ExecutorRequest::Install { password, .. } => {
assert_eq!(password, None, "Empty password should result in None");
}
ExecutorRequest::Remove { .. }
| ExecutorRequest::Downgrade { .. }
| ExecutorRequest::Update { .. }
| ExecutorRequest::CustomCommand { .. }
| ExecutorRequest::Scan { .. } => {
panic!("Expected Install executor request")
}
}
}
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/tests/install/install_ui.rs | tests/install/install_ui.rs | //! UI tests for install process modals.
//!
//! Tests cover:
//! - `PreflightExec` modal state structure
//! - Preflight modal state structure
//! - Modal state transitions
#![cfg(test)]
use pacsea::state::{
Modal, PackageItem, PreflightAction, PreflightTab, Source, modal::PreflightHeaderChips,
};
/// What: Create a test package item with specified source.
///
/// Inputs:
/// - `name`: Package name
/// - `source`: Package source (Official or AUR)
///
/// Output:
/// - `PackageItem` ready for testing
///
/// Details:
/// - Helper to create test packages with consistent structure
fn create_test_package(name: &str, source: Source) -> PackageItem {
PackageItem {
name: name.into(),
version: "1.0.0".into(),
description: String::new(),
source,
popularity: None,
out_of_date: None,
orphaned: false,
}
}
#[test]
/// What: Test `PreflightExec` modal state structure for install action.
///
/// Inputs:
/// - `PreflightExec` modal with install action, packages, and log lines.
///
/// Output:
/// - Modal state is correctly structured.
///
/// Details:
/// - Verifies the modal state can be created and accessed correctly.
fn ui_preflight_exec_install_state() {
let items = vec![
create_test_package(
"ripgrep",
Source::Official {
repo: "extra".into(),
arch: "x86_64".into(),
},
),
create_test_package("yay-bin", Source::Aur),
];
let header_chips = PreflightHeaderChips {
package_count: 2,
download_bytes: 5000,
install_delta_bytes: 2000,
aur_count: 1,
risk_score: 2,
risk_level: pacsea::state::modal::RiskLevel::Medium,
};
let log_lines = vec![
"Resolving dependencies...".to_string(),
"Downloading packages...".to_string(),
"Installing packages...".to_string(),
];
let modal = Modal::PreflightExec {
items,
action: PreflightAction::Install,
tab: PreflightTab::Summary,
verbose: false,
log_lines,
abortable: true,
header_chips,
success: None,
};
match modal {
Modal::PreflightExec {
items: ref modal_items,
action,
tab,
verbose,
log_lines: ref logs,
abortable,
header_chips: ref chips,
..
} => {
assert_eq!(modal_items.len(), 2);
assert_eq!(action, PreflightAction::Install);
assert_eq!(tab, PreflightTab::Summary);
assert!(!verbose);
assert_eq!(logs.len(), 3);
assert!(abortable);
assert_eq!(chips.package_count, 2);
assert_eq!(chips.aur_count, 1);
}
_ => panic!("Expected PreflightExec modal"),
}
}
#[test]
/// What: Test `PreflightExec` modal state structure for remove action.
///
/// Inputs:
/// - `PreflightExec` modal with remove action.
///
/// Output:
/// - Modal state is correctly structured.
///
/// Details:
/// - Verifies remove action uses correct state structure.
fn ui_preflight_exec_remove_state() {
let items = vec![create_test_package(
"old-package",
Source::Official {
repo: "extra".into(),
arch: "x86_64".into(),
},
)];
let header_chips = PreflightHeaderChips {
package_count: 1,
download_bytes: 0,
install_delta_bytes: -1000,
aur_count: 0,
risk_score: 0,
risk_level: pacsea::state::modal::RiskLevel::Low,
};
let log_lines = vec!["Removing packages...".to_string()];
let modal = Modal::PreflightExec {
items,
action: PreflightAction::Remove,
tab: PreflightTab::Summary,
verbose: false,
log_lines,
abortable: false, // finished
header_chips,
success: None,
};
match modal {
Modal::PreflightExec {
action,
abortable,
header_chips: ref chips,
..
} => {
assert_eq!(action, PreflightAction::Remove);
assert!(!abortable);
assert_eq!(chips.install_delta_bytes, -1000);
}
_ => panic!("Expected PreflightExec modal"),
}
}
#[test]
/// What: Test `PreflightExec` modal handles empty log lines.
///
/// Inputs:
/// - `PreflightExec` modal with empty log lines.
///
/// Output:
/// - Modal state is correctly structured.
///
/// Details:
/// - Empty logs should be handled correctly.
fn ui_preflight_exec_empty_logs() {
let items = vec![create_test_package(
"ripgrep",
Source::Official {
repo: "extra".into(),
arch: "x86_64".into(),
},
)];
let header_chips = PreflightHeaderChips {
package_count: 1,
download_bytes: 1000,
install_delta_bytes: 500,
aur_count: 0,
risk_score: 0,
risk_level: pacsea::state::modal::RiskLevel::Low,
};
let log_lines = Vec::<String>::new();
let modal = Modal::PreflightExec {
items,
action: PreflightAction::Install,
tab: PreflightTab::Summary,
verbose: false,
log_lines,
abortable: true,
header_chips,
success: None,
};
match modal {
Modal::PreflightExec {
log_lines: ref logs,
..
} => {
assert!(logs.is_empty());
}
_ => panic!("Expected PreflightExec modal"),
}
}
#[test]
/// What: Test `PreflightExec` modal with different tabs.
///
/// Inputs:
/// - `PreflightExec` modal with different tab selections.
///
/// Output:
/// - Modal state is correctly structured for each tab.
///
/// Details:
/// - Tab field should reflect current tab selection.
fn ui_preflight_exec_tabs() {
let items = vec![create_test_package(
"ripgrep",
Source::Official {
repo: "extra".into(),
arch: "x86_64".into(),
},
)];
let header_chips = PreflightHeaderChips {
package_count: 1,
download_bytes: 1000,
install_delta_bytes: 500,
aur_count: 0,
risk_score: 0,
risk_level: pacsea::state::modal::RiskLevel::Low,
};
let log_lines = vec!["Test output".to_string()];
// Test each tab
for tab in [
PreflightTab::Summary,
PreflightTab::Deps,
PreflightTab::Files,
PreflightTab::Services,
PreflightTab::Sandbox,
] {
let modal = Modal::PreflightExec {
items: items.clone(),
action: PreflightAction::Install,
tab,
verbose: false,
log_lines: log_lines.clone(),
abortable: true,
header_chips: header_chips.clone(),
success: None,
};
match modal {
Modal::PreflightExec { tab: modal_tab, .. } => {
assert_eq!(modal_tab, tab);
}
_ => panic!("Expected PreflightExec modal"),
}
}
}
#[test]
/// What: Test `PreflightExec` modal with verbose mode.
///
/// Inputs:
/// - `PreflightExec` modal with verbose=true.
///
/// Output:
/// - Modal state correctly reflects verbose flag.
///
/// Details:
/// - Verbose flag should be stored and accessible.
fn ui_preflight_exec_verbose() {
let items = vec![create_test_package(
"ripgrep",
Source::Official {
repo: "extra".into(),
arch: "x86_64".into(),
},
)];
let header_chips = PreflightHeaderChips {
package_count: 1,
download_bytes: 1000,
install_delta_bytes: 500,
aur_count: 0,
risk_score: 0,
risk_level: pacsea::state::modal::RiskLevel::Low,
};
let log_lines = vec!["Verbose output".to_string()];
let modal = Modal::PreflightExec {
items,
action: PreflightAction::Install,
tab: PreflightTab::Summary,
verbose: true,
log_lines,
abortable: true,
header_chips,
success: None,
};
match modal {
Modal::PreflightExec { verbose, .. } => {
assert!(verbose);
}
_ => panic!("Expected PreflightExec modal"),
}
}
#[test]
/// What: Test `PreflightExec` modal with finished state (not abortable).
///
/// Inputs:
/// - `PreflightExec` modal with abortable=false (finished).
///
/// Output:
/// - Modal state correctly reflects finished state.
///
/// Details:
/// - Finished state should be stored and accessible.
fn ui_preflight_exec_finished() {
let items = vec![create_test_package(
"ripgrep",
Source::Official {
repo: "extra".into(),
arch: "x86_64".into(),
},
)];
let header_chips = PreflightHeaderChips {
package_count: 1,
download_bytes: 1000,
install_delta_bytes: 500,
aur_count: 0,
risk_score: 0,
risk_level: pacsea::state::modal::RiskLevel::Low,
};
let log_lines = vec!["Installation complete".to_string()];
let modal = Modal::PreflightExec {
items,
action: PreflightAction::Install,
tab: PreflightTab::Summary,
verbose: false,
log_lines,
abortable: false, // finished
header_chips,
success: None,
};
match modal {
Modal::PreflightExec { abortable, .. } => {
assert!(!abortable);
}
_ => panic!("Expected PreflightExec modal"),
}
}
#[test]
/// What: Test `PreflightExec` modal with many packages.
///
/// Inputs:
/// - `PreflightExec` modal with 20+ packages.
///
/// Output:
/// - Modal state correctly stores all packages.
///
/// Details:
/// - Package list should store all items correctly.
fn ui_preflight_exec_many_packages() {
// Create 20 packages
let items: Vec<PackageItem> = (0..20)
.map(|i| {
create_test_package(
&format!("package-{i}"),
Source::Official {
repo: "extra".into(),
arch: "x86_64".into(),
},
)
})
.collect();
let header_chips = PreflightHeaderChips {
package_count: 20,
download_bytes: 20000,
install_delta_bytes: 10000,
aur_count: 0,
risk_score: 0,
risk_level: pacsea::state::modal::RiskLevel::Low,
};
let log_lines = vec!["Installing packages...".to_string()];
let modal = Modal::PreflightExec {
items,
action: PreflightAction::Install,
tab: PreflightTab::Summary,
verbose: false,
log_lines,
abortable: true,
header_chips,
success: None,
};
match modal {
Modal::PreflightExec {
items: ref modal_items,
header_chips: ref chips,
..
} => {
assert_eq!(modal_items.len(), 20);
assert_eq!(chips.package_count, 20);
}
_ => panic!("Expected PreflightExec modal"),
}
}
#[test]
/// What: Test `PreflightExec` modal with different risk levels.
///
/// Inputs:
/// - `PreflightExec` modal with Low, Medium, and High risk levels.
///
/// Output:
/// - Modal state correctly stores risk level.
///
/// Details:
/// - Risk level should be stored in header chips.
fn ui_preflight_exec_risk_levels() {
let items = vec![create_test_package(
"ripgrep",
Source::Official {
repo: "extra".into(),
arch: "x86_64".into(),
},
)];
let log_lines = vec!["Test output".to_string()];
// Test each risk level
for (risk_level, risk_score) in [
(pacsea::state::modal::RiskLevel::Low, 0),
(pacsea::state::modal::RiskLevel::Medium, 2),
(pacsea::state::modal::RiskLevel::High, 5),
] {
let header_chips = PreflightHeaderChips {
package_count: 1,
download_bytes: 1000,
install_delta_bytes: 500,
aur_count: 0,
risk_score,
risk_level,
};
let modal = Modal::PreflightExec {
items: items.clone(),
action: PreflightAction::Install,
tab: PreflightTab::Summary,
verbose: false,
log_lines: log_lines.clone(),
abortable: true,
header_chips: header_chips.clone(),
success: None,
};
match modal {
Modal::PreflightExec {
header_chips: ref chips,
..
} => {
assert_eq!(chips.risk_level, risk_level);
assert_eq!(chips.risk_score, risk_score);
}
_ => panic!("Expected PreflightExec modal"),
}
}
}
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/tests/install/password_prompt.rs | tests/install/password_prompt.rs | //! Integration tests for password prompt modal.
//!
//! Tests cover:
//! - Password prompt for each `PasswordPurpose` variant
//! - Incorrect password retry with error message
//! - Password prompt cancellation
//! - Password masking verification
//! - Password state transitions
#![cfg(test)]
use pacsea::state::{
AppState, Modal, PackageItem, PreflightAction, PreflightTab, Source,
modal::{PasswordPurpose, PreflightHeaderChips},
};
/// What: Create a test package item with specified source.
///
/// Inputs:
/// - `name`: Package name
/// - `source`: Package source (Official or AUR)
///
/// Output:
/// - `PackageItem` ready for testing
///
/// Details:
/// - Helper to create test packages with consistent structure
fn create_test_package(name: &str, source: Source) -> PackageItem {
PackageItem {
name: name.into(),
version: "1.0.0".into(),
description: String::new(),
source,
popularity: None,
out_of_date: None,
orphaned: false,
}
}
#[test]
/// What: Test password prompt for Install purpose.
///
/// Inputs:
/// - `PasswordPrompt` modal with `Install` purpose.
///
/// Output:
/// - Modal state is correctly structured with Install purpose.
///
/// Details:
/// - Verifies password prompt can be created for install operations.
fn integration_password_prompt_install_purpose() {
let items = vec![create_test_package(
"test-pkg",
Source::Official {
repo: "extra".into(),
arch: "x86_64".into(),
},
)];
let app = AppState {
modal: Modal::PasswordPrompt {
purpose: PasswordPurpose::Install,
items: items.clone(),
input: String::new(),
cursor: 0,
error: None,
},
..Default::default()
};
match app.modal {
Modal::PasswordPrompt {
purpose,
items: modal_items,
input,
cursor,
error,
} => {
assert_eq!(purpose, PasswordPurpose::Install);
assert_eq!(modal_items.len(), 1);
assert_eq!(modal_items[0].name, "test-pkg");
assert!(input.is_empty());
assert_eq!(cursor, 0);
assert!(error.is_none());
}
_ => panic!("Expected PasswordPrompt modal"),
}
}
#[test]
/// What: Test password prompt for Remove purpose.
///
/// Inputs:
/// - `PasswordPrompt` modal with `Remove` purpose.
///
/// Output:
/// - Modal state is correctly structured with Remove purpose.
///
/// Details:
/// - Verifies password prompt can be created for remove operations.
fn integration_password_prompt_remove_purpose() {
let items = vec![create_test_package(
"test-pkg",
Source::Official {
repo: "extra".into(),
arch: "x86_64".into(),
},
)];
let app = AppState {
modal: Modal::PasswordPrompt {
purpose: PasswordPurpose::Remove,
items,
input: String::new(),
cursor: 0,
error: None,
},
..Default::default()
};
match app.modal {
Modal::PasswordPrompt { purpose, .. } => {
assert_eq!(purpose, PasswordPurpose::Remove);
}
_ => panic!("Expected PasswordPrompt modal"),
}
}
#[test]
/// What: Test password prompt for Update purpose.
///
/// Inputs:
/// - `PasswordPrompt` modal with `Update` purpose.
///
/// Output:
/// - Modal state is correctly structured with Update purpose.
///
/// Details:
/// - Verifies password prompt can be created for system update operations.
fn integration_password_prompt_update_purpose() {
let app = AppState {
modal: Modal::PasswordPrompt {
purpose: PasswordPurpose::Update,
items: vec![],
input: String::new(),
cursor: 0,
error: None,
},
..Default::default()
};
match app.modal {
Modal::PasswordPrompt { purpose, items, .. } => {
assert_eq!(purpose, PasswordPurpose::Update);
assert!(items.is_empty(), "Update purpose may have empty items");
}
_ => panic!("Expected PasswordPrompt modal"),
}
}
#[test]
/// What: Test password prompt for Downgrade purpose.
///
/// Inputs:
/// - `PasswordPrompt` modal with `Downgrade` purpose.
///
/// Output:
/// - Modal state is correctly structured with Downgrade purpose.
///
/// Details:
/// - Verifies password prompt can be created for downgrade operations.
fn integration_password_prompt_downgrade_purpose() {
let items = vec![create_test_package(
"test-pkg",
Source::Official {
repo: "extra".into(),
arch: "x86_64".into(),
},
)];
let app = AppState {
modal: Modal::PasswordPrompt {
purpose: PasswordPurpose::Downgrade,
items,
input: String::new(),
cursor: 0,
error: None,
},
..Default::default()
};
match app.modal {
Modal::PasswordPrompt { purpose, .. } => {
assert_eq!(purpose, PasswordPurpose::Downgrade);
}
_ => panic!("Expected PasswordPrompt modal"),
}
}
#[test]
/// What: Test password prompt for FileSync purpose.
///
/// Inputs:
/// - `PasswordPrompt` modal with `FileSync` purpose.
///
/// Output:
/// - Modal state is correctly structured with FileSync purpose.
///
/// Details:
/// - Verifies password prompt can be created for file database sync operations.
fn integration_password_prompt_filesync_purpose() {
let app = AppState {
modal: Modal::PasswordPrompt {
purpose: PasswordPurpose::FileSync,
items: vec![],
input: String::new(),
cursor: 0,
error: None,
},
..Default::default()
};
match app.modal {
Modal::PasswordPrompt { purpose, items, .. } => {
assert_eq!(purpose, PasswordPurpose::FileSync);
assert!(items.is_empty(), "FileSync purpose should have empty items");
}
_ => panic!("Expected PasswordPrompt modal"),
}
}
#[test]
/// What: Test password input capture and cursor tracking.
///
/// Inputs:
/// - `PasswordPrompt` modal with password entered.
///
/// Output:
/// - Input field captures password text.
/// - Cursor position tracks correctly.
///
/// Details:
/// - Verifies password is stored in input field (for masking display).
fn integration_password_prompt_input_capture() {
let app = AppState {
modal: Modal::PasswordPrompt {
purpose: PasswordPurpose::Install,
items: vec![],
input: "secretpassword".to_string(),
cursor: 14,
error: None,
},
..Default::default()
};
match app.modal {
Modal::PasswordPrompt { input, cursor, .. } => {
assert_eq!(input, "secretpassword");
assert_eq!(cursor, 14);
// Note: Actual masking is done in UI rendering, not in state
}
_ => panic!("Expected PasswordPrompt modal"),
}
}
#[test]
/// What: Test password prompt with error message for incorrect password.
///
/// Inputs:
/// - `PasswordPrompt` modal with error set.
///
/// Output:
/// - Error message is stored and accessible.
///
/// Details:
/// - Verifies incorrect password triggers error state for retry.
fn integration_password_prompt_incorrect_password_error() {
let app = AppState {
modal: Modal::PasswordPrompt {
purpose: PasswordPurpose::Install,
items: vec![],
input: String::new(),
cursor: 0,
error: Some("Incorrect password. Please try again.".to_string()),
},
..Default::default()
};
match app.modal {
Modal::PasswordPrompt { error, input, .. } => {
assert!(error.is_some());
assert_eq!(
error.as_ref().expect("error should be Some"),
"Incorrect password. Please try again."
);
// Input should be cleared for retry
assert!(input.is_empty());
}
_ => panic!("Expected PasswordPrompt modal"),
}
}
#[test]
/// What: Test password prompt cancellation returns to None modal.
///
/// Inputs:
/// - `PasswordPrompt` modal that is cancelled.
///
/// Output:
/// - Modal transitions to `None`.
///
/// Details:
/// - Simulates user pressing Escape to cancel password prompt.
fn integration_password_prompt_cancellation() {
let mut app = AppState {
modal: Modal::PasswordPrompt {
purpose: PasswordPurpose::Install,
items: vec![create_test_package(
"test-pkg",
Source::Official {
repo: "extra".into(),
arch: "x86_64".into(),
},
)],
input: "partial".to_string(),
cursor: 7,
error: None,
},
..Default::default()
};
// Simulate cancellation
app.modal = Modal::None;
assert!(matches!(app.modal, Modal::None));
}
#[test]
/// What: Test password submission transitions to PreflightExec modal.
///
/// Inputs:
/// - `PasswordPrompt` modal with password entered.
///
/// Output:
/// - Modal transitions to `PreflightExec`.
/// - Password is captured for executor request.
///
/// Details:
/// - Simulates user submitting password for install operation.
fn integration_password_prompt_submission_to_preflight_exec() {
let items = vec![create_test_package(
"ripgrep",
Source::Official {
repo: "extra".into(),
arch: "x86_64".into(),
},
)];
let mut app = AppState {
modal: Modal::PasswordPrompt {
purpose: PasswordPurpose::Install,
items: items.clone(),
input: "testpassword".to_string(),
cursor: 12,
error: None,
},
pending_exec_header_chips: Some(PreflightHeaderChips::default()),
..Default::default()
};
// Extract password before transition
let password = if let Modal::PasswordPrompt { ref input, .. } = app.modal {
if input.trim().is_empty() {
None
} else {
Some(input.clone())
}
} else {
None
};
// Simulate transition to PreflightExec
let header_chips = app.pending_exec_header_chips.take().unwrap_or_default();
app.modal = Modal::PreflightExec {
items: items.clone(),
action: PreflightAction::Install,
tab: PreflightTab::Summary,
verbose: false,
log_lines: vec![],
abortable: false,
header_chips,
success: None,
};
// Verify modal transition
match app.modal {
Modal::PreflightExec {
items: modal_items,
action,
..
} => {
assert_eq!(modal_items.len(), 1);
assert_eq!(modal_items[0].name, "ripgrep");
assert_eq!(action, PreflightAction::Install);
}
_ => panic!("Expected PreflightExec modal"),
}
// Verify password was captured
assert_eq!(password, Some("testpassword".to_string()));
}
#[test]
/// What: Test password prompt with multiple packages.
///
/// Inputs:
/// - `PasswordPrompt` modal with multiple packages.
///
/// Output:
/// - All packages are stored in the modal.
///
/// Details:
/// - Verifies batch operations preserve all package information.
fn integration_password_prompt_multiple_packages() {
let items = vec![
create_test_package(
"pkg1",
Source::Official {
repo: "extra".into(),
arch: "x86_64".into(),
},
),
create_test_package(
"pkg2",
Source::Official {
repo: "core".into(),
arch: "x86_64".into(),
},
),
create_test_package("pkg3", Source::Aur),
];
let app = AppState {
modal: Modal::PasswordPrompt {
purpose: PasswordPurpose::Install,
items: items.clone(),
input: String::new(),
cursor: 0,
error: None,
},
..Default::default()
};
match app.modal {
Modal::PasswordPrompt {
items: modal_items,
..
} => {
assert_eq!(modal_items.len(), 3);
assert_eq!(modal_items[0].name, "pkg1");
assert_eq!(modal_items[1].name, "pkg2");
assert_eq!(modal_items[2].name, "pkg3");
}
_ => panic!("Expected PasswordPrompt modal"),
}
}
#[test]
/// What: Test password retry clears input but preserves items.
///
/// Inputs:
/// - `PasswordPrompt` modal after incorrect password.
///
/// Output:
/// - Input is cleared.
/// - Items are preserved.
/// - Error message is set.
///
/// Details:
/// - Simulates password retry flow after incorrect attempt.
fn integration_password_prompt_retry_preserves_items() {
let items = vec![create_test_package(
"test-pkg",
Source::Official {
repo: "extra".into(),
arch: "x86_64".into(),
},
)];
// Initial state with password entered
let mut app = AppState {
modal: Modal::PasswordPrompt {
purpose: PasswordPurpose::Install,
items: items.clone(),
input: "wrongpassword".to_string(),
cursor: 13,
error: None,
},
..Default::default()
};
// Simulate incorrect password - set error and clear input
if let Modal::PasswordPrompt {
ref mut input,
ref mut cursor,
ref mut error,
..
} = app.modal
{
*input = String::new();
*cursor = 0;
*error = Some("Authentication failed. Try again.".to_string());
}
// Verify retry state
match app.modal {
Modal::PasswordPrompt {
items: modal_items,
input,
cursor,
error,
..
} => {
// Items preserved
assert_eq!(modal_items.len(), 1);
assert_eq!(modal_items[0].name, "test-pkg");
// Input cleared for retry
assert!(input.is_empty());
assert_eq!(cursor, 0);
// Error message set
assert!(error.is_some());
}
_ => panic!("Expected PasswordPrompt modal"),
}
}
#[test]
/// What: Test empty password is handled correctly.
///
/// Inputs:
/// - `PasswordPrompt` modal with empty input submitted.
///
/// Output:
/// - Empty password results in None password value.
///
/// Details:
/// - Verifies empty password handling for operations that might not need sudo.
fn integration_password_prompt_empty_password() {
let app = AppState {
modal: Modal::PasswordPrompt {
purpose: PasswordPurpose::Install,
items: vec![],
input: " ".to_string(), // Whitespace only
cursor: 3,
error: None,
},
..Default::default()
};
// Extract password
let password = if let Modal::PasswordPrompt { ref input, .. } = app.modal {
if input.trim().is_empty() {
None
} else {
Some(input.clone())
}
} else {
None
};
assert!(password.is_none(), "Whitespace-only password should be None");
}
#[test]
/// What: Test password prompt modal timeout error state.
///
/// Inputs:
/// - PasswordPrompt modal that has timed out.
///
/// Output:
/// - Error message indicates timeout.
/// - Modal can be cancelled after timeout.
///
/// Details:
/// - Verifies timeout error state in password prompt modal.
/// - Tests cancellation after timeout.
fn integration_password_prompt_timeout_error() {
let items = vec![create_test_package(
"test-pkg",
Source::Official {
repo: "extra".into(),
arch: "x86_64".into(),
},
)];
// Simulate password prompt with timeout error
let mut app = AppState {
modal: Modal::PasswordPrompt {
purpose: PasswordPurpose::Install,
items: items.clone(),
input: String::new(),
cursor: 0,
error: Some("Password prompt timed out. Please try again.".to_string()),
},
..Default::default()
};
// Verify timeout error state
match app.modal {
Modal::PasswordPrompt {
items: modal_items,
error,
input,
..
} => {
assert_eq!(modal_items.len(), 1);
assert!(error.is_some());
assert!(error.as_ref().unwrap().contains("timed out"));
assert!(input.is_empty()); // Input cleared after timeout
}
_ => panic!("Expected PasswordPrompt modal"),
}
// Simulate cancellation after timeout
app.modal = Modal::None;
assert!(matches!(app.modal, Modal::None));
}
#[test]
/// What: Test password prompt timeout handling preserves operation context.
///
/// Inputs:
/// - PasswordPrompt modal with timeout error.
///
/// Output:
/// - Items and purpose are preserved after timeout.
/// - User can retry the operation.
///
/// Details:
/// - Verifies timeout doesn't lose operation context.
fn integration_password_prompt_timeout_preserves_context() {
let items = vec![create_test_package(
"test-pkg",
Source::Official {
repo: "extra".into(),
arch: "x86_64".into(),
},
)];
let app = AppState {
modal: Modal::PasswordPrompt {
purpose: PasswordPurpose::Install,
items: items.clone(),
input: String::new(),
cursor: 0,
error: Some("Password prompt timed out. Please try again.".to_string()),
},
..Default::default()
};
// Verify context is preserved
match app.modal {
Modal::PasswordPrompt {
purpose,
items: modal_items,
error,
..
} => {
assert_eq!(purpose, PasswordPurpose::Install);
assert_eq!(modal_items.len(), 1);
assert_eq!(modal_items[0].name, "test-pkg");
assert!(error.is_some());
// User can retry with same context
}
_ => panic!("Expected PasswordPrompt modal"),
}
}
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/tests/install/reinstall_confirmation.rs | tests/install/reinstall_confirmation.rs | //! Integration tests for reinstall confirmation modal.
//!
//! Tests cover:
//! - Single package reinstall confirmation
//! - Batch reinstall with mixed installed/new packages
//! - Direct install reinstall flow
//! - Cancel reinstall returns to previous state
//! - Header chips in reinstall confirmation
#![cfg(test)]
use pacsea::state::{
AppState, Modal, PackageItem, Source,
modal::PreflightHeaderChips,
};
/// What: Create a test package item with specified source.
///
/// Inputs:
/// - `name`: Package name
/// - `source`: Package source (Official or AUR)
///
/// Output:
/// - `PackageItem` ready for testing
///
/// Details:
/// - Helper to create test packages with consistent structure
fn create_test_package(name: &str, source: Source) -> PackageItem {
PackageItem {
name: name.into(),
version: "1.0.0".into(),
description: String::new(),
source,
popularity: None,
out_of_date: None,
orphaned: false,
}
}
#[test]
/// What: Test ConfirmReinstall modal state creation.
///
/// Inputs:
/// - Single installed package for reinstall.
///
/// Output:
/// - Modal state is correctly structured.
///
/// Details:
/// - Verifies reinstall confirmation modal can be created.
fn integration_reinstall_confirmation_single_package() {
let installed_pkg = create_test_package(
"ripgrep",
Source::Official {
repo: "extra".into(),
arch: "x86_64".into(),
},
);
let app = AppState {
modal: Modal::ConfirmReinstall {
items: vec![installed_pkg.clone()],
all_items: vec![installed_pkg],
header_chips: PreflightHeaderChips::default(),
},
..Default::default()
};
match app.modal {
Modal::ConfirmReinstall {
items,
all_items,
header_chips,
} => {
assert_eq!(items.len(), 1);
assert_eq!(items[0].name, "ripgrep");
assert_eq!(all_items.len(), 1);
assert_eq!(header_chips.package_count, 0); // Default
}
_ => panic!("Expected ConfirmReinstall modal"),
}
}
#[test]
/// What: Test ConfirmReinstall with mixed installed and new packages.
///
/// Inputs:
/// - Multiple packages, some installed and some new.
///
/// Output:
/// - `items` contains only installed packages.
/// - `all_items` contains all packages.
///
/// Details:
/// - Verifies batch reinstall separates installed from new packages.
fn integration_reinstall_confirmation_mixed_packages() {
let installed_pkg1 = create_test_package(
"ripgrep",
Source::Official {
repo: "extra".into(),
arch: "x86_64".into(),
},
);
let installed_pkg2 = create_test_package(
"fd",
Source::Official {
repo: "extra".into(),
arch: "x86_64".into(),
},
);
let new_pkg = create_test_package("new-package", Source::Aur);
// Only installed packages in items, all packages in all_items
let app = AppState {
modal: Modal::ConfirmReinstall {
items: vec![installed_pkg1.clone(), installed_pkg2.clone()],
all_items: vec![installed_pkg1, installed_pkg2, new_pkg],
header_chips: PreflightHeaderChips {
package_count: 3,
download_bytes: 1024,
install_delta_bytes: 2048,
aur_count: 1,
risk_score: 5,
risk_level: pacsea::state::modal::RiskLevel::Low,
},
},
..Default::default()
};
match app.modal {
Modal::ConfirmReinstall {
items,
all_items,
header_chips,
} => {
// Only installed packages shown in confirmation
assert_eq!(items.len(), 2);
assert_eq!(items[0].name, "ripgrep");
assert_eq!(items[1].name, "fd");
// All packages preserved for installation
assert_eq!(all_items.len(), 3);
assert!(all_items.iter().any(|p| p.name == "new-package"));
// Header chips reflect all packages
assert_eq!(header_chips.package_count, 3);
assert_eq!(header_chips.aur_count, 1);
}
_ => panic!("Expected ConfirmReinstall modal"),
}
}
#[test]
/// What: Test reinstall confirmation cancellation.
///
/// Inputs:
/// - ConfirmReinstall modal that is cancelled.
///
/// Output:
/// - Modal transitions to None.
///
/// Details:
/// - Simulates user pressing Escape or 'n' to cancel reinstall.
fn integration_reinstall_confirmation_cancel() {
let pkg = create_test_package(
"ripgrep",
Source::Official {
repo: "extra".into(),
arch: "x86_64".into(),
},
);
let mut app = AppState {
modal: Modal::ConfirmReinstall {
items: vec![pkg.clone()],
all_items: vec![pkg],
header_chips: PreflightHeaderChips::default(),
},
..Default::default()
};
// Simulate cancellation
app.modal = Modal::None;
assert!(matches!(app.modal, Modal::None));
}
#[test]
/// What: Test reinstall confirmation proceeds to password prompt.
///
/// Inputs:
/// - ConfirmReinstall modal confirmed.
///
/// Output:
/// - Modal transitions to PasswordPrompt for official packages.
///
/// Details:
/// - Simulates user confirming reinstall for official packages.
fn integration_reinstall_confirmation_proceeds_to_password() {
use pacsea::state::modal::PasswordPurpose;
let pkg = create_test_package(
"ripgrep",
Source::Official {
repo: "extra".into(),
arch: "x86_64".into(),
},
);
let mut app = AppState {
modal: Modal::ConfirmReinstall {
items: vec![pkg.clone()],
all_items: vec![pkg.clone()],
header_chips: PreflightHeaderChips::default(),
},
pending_exec_header_chips: Some(PreflightHeaderChips::default()),
..Default::default()
};
// Simulate confirmation - transition to PasswordPrompt for official packages
app.modal = Modal::PasswordPrompt {
purpose: PasswordPurpose::Install,
items: vec![pkg],
input: String::new(),
cursor: 0,
error: None,
};
match app.modal {
Modal::PasswordPrompt { purpose, items, .. } => {
assert_eq!(purpose, PasswordPurpose::Install);
assert_eq!(items.len(), 1);
assert_eq!(items[0].name, "ripgrep");
}
_ => panic!("Expected PasswordPrompt modal"),
}
}
#[test]
/// What: Test reinstall confirmation for AUR packages skips password.
///
/// Inputs:
/// - ConfirmReinstall modal with only AUR packages.
///
/// Output:
/// - Modal transitions directly to PreflightExec.
///
/// Details:
/// - AUR packages don't need sudo password for installation.
fn integration_reinstall_confirmation_aur_skips_password() {
use pacsea::state::{PreflightAction, PreflightTab};
let pkg = create_test_package("yay-bin", Source::Aur);
let mut app = AppState {
modal: Modal::ConfirmReinstall {
items: vec![pkg.clone()],
all_items: vec![pkg.clone()],
header_chips: PreflightHeaderChips::default(),
},
pending_exec_header_chips: Some(PreflightHeaderChips::default()),
..Default::default()
};
// Simulate confirmation - transition directly to PreflightExec for AUR
let header_chips = app.pending_exec_header_chips.take().unwrap_or_default();
app.modal = Modal::PreflightExec {
items: vec![pkg],
action: PreflightAction::Install,
tab: PreflightTab::Summary,
verbose: false,
log_lines: vec![],
abortable: false,
header_chips,
success: None,
};
match app.modal {
Modal::PreflightExec { items, action, .. } => {
assert_eq!(items.len(), 1);
assert_eq!(items[0].name, "yay-bin");
assert_eq!(action, PreflightAction::Install);
}
_ => panic!("Expected PreflightExec modal"),
}
}
#[test]
/// What: Test reinstall confirmation with header chips.
///
/// Inputs:
/// - ConfirmReinstall modal with populated header chips.
///
/// Output:
/// - Header chips are preserved and accessible.
///
/// Details:
/// - Verifies header chip data is available for display.
fn integration_reinstall_confirmation_header_chips() {
use pacsea::state::modal::RiskLevel;
let pkg = create_test_package(
"large-package",
Source::Official {
repo: "extra".into(),
arch: "x86_64".into(),
},
);
let header_chips = PreflightHeaderChips {
package_count: 5,
download_bytes: 50 * 1024 * 1024, // 50 MB
install_delta_bytes: 100 * 1024 * 1024, // 100 MB
aur_count: 2,
risk_score: 15,
risk_level: RiskLevel::Medium,
};
let app = AppState {
modal: Modal::ConfirmReinstall {
items: vec![pkg.clone()],
all_items: vec![pkg],
header_chips: header_chips.clone(),
},
..Default::default()
};
match app.modal {
Modal::ConfirmReinstall {
header_chips: chips,
..
} => {
assert_eq!(chips.package_count, 5);
assert_eq!(chips.download_bytes, 50 * 1024 * 1024);
assert_eq!(chips.install_delta_bytes, 100 * 1024 * 1024);
assert_eq!(chips.aur_count, 2);
assert_eq!(chips.risk_score, 15);
assert_eq!(chips.risk_level, RiskLevel::Medium);
}
_ => panic!("Expected ConfirmReinstall modal"),
}
}
#[test]
/// What: Test reinstall confirmation with empty all_items is edge case.
///
/// Inputs:
/// - ConfirmReinstall modal with empty all_items.
///
/// Output:
/// - Modal handles empty case gracefully.
///
/// Details:
/// - Edge case where no packages would be installed.
fn integration_reinstall_confirmation_empty_all_items() {
let app = AppState {
modal: Modal::ConfirmReinstall {
items: vec![],
all_items: vec![],
header_chips: PreflightHeaderChips::default(),
},
..Default::default()
};
match app.modal {
Modal::ConfirmReinstall {
items, all_items, ..
} => {
assert!(items.is_empty());
assert!(all_items.is_empty());
}
_ => panic!("Expected ConfirmReinstall modal"),
}
}
#[test]
/// What: Test ConfirmBatchUpdate modal state creation.
///
/// Inputs:
/// - Packages for batch update.
///
/// Output:
/// - Modal state is correctly structured.
///
/// Details:
/// - Verifies batch update confirmation modal can be created.
fn integration_confirm_batch_update_modal() {
let items = vec![
create_test_package(
"pkg1",
Source::Official {
repo: "extra".into(),
arch: "x86_64".into(),
},
),
create_test_package("pkg2", Source::Aur),
];
let app = AppState {
modal: Modal::ConfirmBatchUpdate {
items: items.clone(),
dry_run: false,
},
..Default::default()
};
match app.modal {
Modal::ConfirmBatchUpdate { items, dry_run } => {
assert_eq!(items.len(), 2);
assert!(!dry_run);
}
_ => panic!("Expected ConfirmBatchUpdate modal"),
}
}
#[test]
/// What: Test ConfirmBatchUpdate with dry_run flag.
///
/// Inputs:
/// - Batch update with dry_run enabled.
///
/// Output:
/// - dry_run flag is correctly set.
///
/// Details:
/// - Verifies dry_run mode is preserved in modal state.
fn integration_confirm_batch_update_dry_run() {
let items = vec![create_test_package(
"pkg1",
Source::Official {
repo: "extra".into(),
arch: "x86_64".into(),
},
)];
let app = AppState {
modal: Modal::ConfirmBatchUpdate {
items,
dry_run: true,
},
..Default::default()
};
match app.modal {
Modal::ConfirmBatchUpdate { dry_run, .. } => {
assert!(dry_run);
}
_ => panic!("Expected ConfirmBatchUpdate modal"),
}
}
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/tests/install/executor_output.rs | tests/install/executor_output.rs | //! Integration tests for executor output streaming.
//!
//! Tests cover:
//! - `ExecutorOutput::Line` message generation
//! - `ExecutorOutput::ReplaceLastLine` for progress bars
//! - `ExecutorOutput::Finished` with success/failure states
//! - `ExecutorOutput::Error` handling
//! - Large output handling in PreflightExec modal
#![cfg(test)]
use pacsea::install::{ExecutorOutput, ExecutorRequest};
use pacsea::state::{
AppState, Modal, PackageItem, PreflightAction, PreflightTab, Source,
modal::PreflightHeaderChips,
};
/// What: Create a test package item with specified source.
///
/// Inputs:
/// - `name`: Package name
/// - `source`: Package source (Official or AUR)
///
/// Output:
/// - `PackageItem` ready for testing
///
/// Details:
/// - Helper to create test packages with consistent structure
fn create_test_package(name: &str, source: Source) -> PackageItem {
PackageItem {
name: name.into(),
version: "1.0.0".into(),
description: String::new(),
source,
popularity: None,
out_of_date: None,
orphaned: false,
}
}
#[test]
/// What: Test ExecutorOutput::Line variant creation.
///
/// Inputs:
/// - Line of output text.
///
/// Output:
/// - ExecutorOutput::Line with the text.
///
/// Details:
/// - Verifies Line variant can be created and matched.
fn integration_executor_output_line() {
let output = ExecutorOutput::Line("Downloading packages...".to_string());
match output {
ExecutorOutput::Line(text) => {
assert_eq!(text, "Downloading packages...");
}
_ => panic!("Expected ExecutorOutput::Line"),
}
}
#[test]
/// What: Test ExecutorOutput::ReplaceLastLine variant for progress bars.
///
/// Inputs:
/// - Progress bar text with carriage return semantics.
///
/// Output:
/// - ExecutorOutput::ReplaceLastLine with the text.
///
/// Details:
/// - Used for progress bars that overwrite the previous line.
fn integration_executor_output_replace_last_line() {
let output = ExecutorOutput::ReplaceLastLine("[####----] 50%".to_string());
match output {
ExecutorOutput::ReplaceLastLine(text) => {
assert_eq!(text, "[####----] 50%");
}
_ => panic!("Expected ExecutorOutput::ReplaceLastLine"),
}
}
#[test]
/// What: Test ExecutorOutput::Finished variant with success.
///
/// Inputs:
/// - Finished state with success=true and exit_code=0.
///
/// Output:
/// - ExecutorOutput::Finished with correct fields.
///
/// Details:
/// - Indicates successful command completion.
fn integration_executor_output_finished_success() {
let output = ExecutorOutput::Finished {
success: true,
exit_code: Some(0),
failed_command: None,
};
match output {
ExecutorOutput::Finished {
success,
exit_code,
failed_command: _,
} => {
assert!(success);
assert_eq!(exit_code, Some(0));
}
_ => panic!("Expected ExecutorOutput::Finished"),
}
}
#[test]
/// What: Test ExecutorOutput::Finished variant with failure.
///
/// Inputs:
/// - Finished state with success=false and exit_code=1.
///
/// Output:
/// - ExecutorOutput::Finished with failure state.
///
/// Details:
/// - Indicates failed command completion.
fn integration_executor_output_finished_failure() {
let output = ExecutorOutput::Finished {
success: false,
exit_code: Some(1),
failed_command: None,
};
match output {
ExecutorOutput::Finished {
success,
exit_code,
failed_command: _,
} => {
assert!(!success);
assert_eq!(exit_code, Some(1));
}
_ => panic!("Expected ExecutorOutput::Finished"),
}
}
#[test]
/// What: Test ExecutorOutput::Finished with no exit code.
///
/// Inputs:
/// - Finished state with exit_code=None.
///
/// Output:
/// - ExecutorOutput::Finished with None exit_code.
///
/// Details:
/// - Some processes may not provide an exit code.
fn integration_executor_output_finished_no_exit_code() {
let output = ExecutorOutput::Finished {
success: false,
exit_code: None,
failed_command: None,
};
match output {
ExecutorOutput::Finished {
success,
exit_code,
failed_command: _,
} => {
assert!(!success);
assert!(exit_code.is_none());
}
_ => panic!("Expected ExecutorOutput::Finished"),
}
}
#[test]
/// What: Test ExecutorOutput::Error variant.
///
/// Inputs:
/// - Error message string.
///
/// Output:
/// - ExecutorOutput::Error with the message.
///
/// Details:
/// - Used for PTY or command execution errors.
fn integration_executor_output_error() {
let output = ExecutorOutput::Error("Failed to create PTY".to_string());
match output {
ExecutorOutput::Error(msg) => {
assert_eq!(msg, "Failed to create PTY");
}
_ => panic!("Expected ExecutorOutput::Error"),
}
}
#[test]
/// What: Test ExecutorOutput::Error with network-specific error messages.
///
/// Inputs:
/// - Network-related error messages (e.g., "Failed to connect", "DNS resolution failed").
///
/// Output:
/// - ExecutorOutput::Error with network error message.
///
/// Details:
/// - Verifies network errors are properly represented as ExecutorOutput::Error.
/// - Tests error propagation from executor to UI.
fn integration_executor_output_network_error() {
let network_errors = vec![
"Failed to connect to host (network unreachable)",
"Could not resolve host (DNS/network issue)",
"Operation timeout",
"HTTP error from server (code unknown)",
"HTTP 404: Resource not found (package may not exist in repository)",
"HTTP 503: Service temporarily unavailable",
];
for error_msg in network_errors {
let output = ExecutorOutput::Error(error_msg.to_string());
match output {
ExecutorOutput::Error(msg) => {
assert!(msg.contains("network") || msg.contains("timeout") || msg.contains("HTTP"));
// Verify error can be propagated to UI
assert!(!msg.is_empty());
}
_ => panic!("Expected ExecutorOutput::Error"),
}
}
}
#[test]
/// What: Test network error propagation from executor to PreflightExec modal.
///
/// Inputs:
/// - ExecutorOutput::Error with network failure message.
///
/// Output:
/// - Error is properly handled and displayed in PreflightExec modal.
///
/// Details:
/// - Verifies error propagation mechanism works correctly for network failures.
fn integration_executor_output_network_error_propagation() {
let error_output = ExecutorOutput::Error(
"Failed to connect to host (network unreachable)".to_string(),
);
// Simulate error being received and added to PreflightExec modal
let mut app = AppState {
modal: Modal::PreflightExec {
items: vec![create_test_package(
"test-pkg",
Source::Official {
repo: "core".into(),
arch: "x86_64".into(),
},
)],
action: PreflightAction::Install,
tab: PreflightTab::Summary,
verbose: false,
log_lines: vec![],
abortable: false,
header_chips: PreflightHeaderChips::default(),
success: None,
},
..Default::default()
};
// Simulate error being added to log_lines
if let Modal::PreflightExec {
ref mut log_lines, ..
} = app.modal
{
if let ExecutorOutput::Error(msg) = error_output {
log_lines.push(format!("ERROR: {msg}"));
}
}
match app.modal {
Modal::PreflightExec { log_lines, .. } => {
assert_eq!(log_lines.len(), 1);
assert!(log_lines[0].contains("ERROR:"));
assert!(log_lines[0].contains("network unreachable"));
}
_ => panic!("Expected PreflightExec modal"),
}
}
#[test]
/// What: Test PreflightExec modal log_lines append behavior.
///
/// Inputs:
/// - PreflightExec modal with log_lines.
///
/// Output:
/// - log_lines correctly stores output.
///
/// Details:
/// - Simulates output being appended to log panel.
fn integration_preflight_exec_log_lines_append() {
let pkg = create_test_package(
"ripgrep",
Source::Official {
repo: "extra".into(),
arch: "x86_64".into(),
},
);
let mut app = AppState {
modal: Modal::PreflightExec {
items: vec![pkg],
action: PreflightAction::Install,
tab: PreflightTab::Summary,
verbose: false,
log_lines: vec![],
abortable: false,
header_chips: PreflightHeaderChips::default(),
success: None,
},
..Default::default()
};
// Simulate receiving output lines
if let Modal::PreflightExec {
ref mut log_lines, ..
} = app.modal
{
log_lines.push(":: Synchronizing package databases...".to_string());
log_lines.push(" core is up to date".to_string());
log_lines.push(" extra is up to date".to_string());
}
match app.modal {
Modal::PreflightExec { log_lines, .. } => {
assert_eq!(log_lines.len(), 3);
assert_eq!(log_lines[0], ":: Synchronizing package databases...");
assert_eq!(log_lines[1], " core is up to date");
assert_eq!(log_lines[2], " extra is up to date");
}
_ => panic!("Expected PreflightExec modal"),
}
}
#[test]
/// What: Test PreflightExec progress bar replacement in log_lines.
///
/// Inputs:
/// - PreflightExec modal with progress bar updates.
///
/// Output:
/// - Last line is replaced for progress bar updates.
///
/// Details:
/// - Simulates ReplaceLastLine behavior for progress bars.
fn integration_preflight_exec_progress_bar_replacement() {
let pkg = create_test_package(
"ripgrep",
Source::Official {
repo: "extra".into(),
arch: "x86_64".into(),
},
);
let mut app = AppState {
modal: Modal::PreflightExec {
items: vec![pkg],
action: PreflightAction::Install,
tab: PreflightTab::Summary,
verbose: false,
log_lines: vec!["Downloading ripgrep...".to_string()],
abortable: false,
header_chips: PreflightHeaderChips::default(),
success: None,
},
..Default::default()
};
// Simulate progress bar updates (ReplaceLastLine behavior)
if let Modal::PreflightExec {
ref mut log_lines, ..
} = app.modal
{
// First progress update
if !log_lines.is_empty() {
log_lines.pop();
}
log_lines.push("[###-----] 25%".to_string());
// Second progress update
if !log_lines.is_empty() {
log_lines.pop();
}
log_lines.push("[######--] 75%".to_string());
// Final progress update
if !log_lines.is_empty() {
log_lines.pop();
}
log_lines.push("[########] 100%".to_string());
}
match app.modal {
Modal::PreflightExec { log_lines, .. } => {
// Only the final progress bar should remain
assert_eq!(log_lines.len(), 1);
assert_eq!(log_lines[0], "[########] 100%");
}
_ => panic!("Expected PreflightExec modal"),
}
}
#[test]
/// What: Test large output handling in PreflightExec.
///
/// Inputs:
/// - PreflightExec modal with many output lines.
///
/// Output:
/// - All lines are stored correctly.
///
/// Details:
/// - Verifies handling of large output from package operations.
fn integration_preflight_exec_large_output() {
let pkg = create_test_package(
"ripgrep",
Source::Official {
repo: "extra".into(),
arch: "x86_64".into(),
},
);
let mut app = AppState {
modal: Modal::PreflightExec {
items: vec![pkg],
action: PreflightAction::Install,
tab: PreflightTab::Summary,
verbose: false,
log_lines: vec![],
abortable: false,
header_chips: PreflightHeaderChips::default(),
success: None,
},
..Default::default()
};
// Simulate large output (e.g., verbose package list)
if let Modal::PreflightExec {
ref mut log_lines, ..
} = app.modal
{
for i in 0..1000 {
log_lines.push(format!("Processing file {i}/1000: /usr/lib/package/file{i}.so"));
}
}
match app.modal {
Modal::PreflightExec { log_lines, .. } => {
assert_eq!(log_lines.len(), 1000);
assert!(log_lines[0].contains("file 0"));
assert!(log_lines[999].contains("file 999"));
}
_ => panic!("Expected PreflightExec modal"),
}
}
#[test]
/// What: Test ExecutorRequest::Install creation.
///
/// Inputs:
/// - Package items and optional password.
///
/// Output:
/// - ExecutorRequest::Install with correct fields.
///
/// Details:
/// - Verifies Install request can be created for executor.
fn integration_executor_request_install() {
let items = vec![
create_test_package(
"ripgrep",
Source::Official {
repo: "extra".into(),
arch: "x86_64".into(),
},
),
create_test_package("yay-bin", Source::Aur),
];
let request = ExecutorRequest::Install {
items: items.clone(),
password: Some("testpassword".to_string()),
dry_run: false,
};
match request {
ExecutorRequest::Install {
items,
password,
dry_run,
} => {
assert_eq!(items.len(), 2);
assert_eq!(items[0].name, "ripgrep");
assert_eq!(items[1].name, "yay-bin");
assert_eq!(password, Some("testpassword".to_string()));
assert!(!dry_run);
}
_ => panic!("Expected ExecutorRequest::Install"),
}
}
#[test]
/// What: Test ExecutorRequest::Remove creation.
///
/// Inputs:
/// - Package names, password, and cascade mode.
///
/// Output:
/// - ExecutorRequest::Remove with correct fields.
///
/// Details:
/// - Verifies Remove request can be created for executor.
fn integration_executor_request_remove() {
use pacsea::state::modal::CascadeMode;
let names = vec!["pkg1".to_string(), "pkg2".to_string()];
let request = ExecutorRequest::Remove {
names: names.clone(),
password: Some("testpassword".to_string()),
cascade: CascadeMode::Cascade,
dry_run: false,
};
match request {
ExecutorRequest::Remove {
names,
password,
cascade,
dry_run,
} => {
assert_eq!(names.len(), 2);
assert_eq!(names[0], "pkg1");
assert_eq!(password, Some("testpassword".to_string()));
assert_eq!(cascade, CascadeMode::Cascade);
assert!(!dry_run);
}
_ => panic!("Expected ExecutorRequest::Remove"),
}
}
#[test]
/// What: Test ExecutorRequest::Downgrade creation.
///
/// Inputs:
/// - Package names and optional password.
///
/// Output:
/// - ExecutorRequest::Downgrade with correct fields.
///
/// Details:
/// - Verifies Downgrade request can be created for executor.
fn integration_executor_request_downgrade() {
let names = vec!["pkg1".to_string()];
let request = ExecutorRequest::Downgrade {
names: names.clone(),
password: Some("testpassword".to_string()),
dry_run: false,
};
match request {
ExecutorRequest::Downgrade {
names,
password,
dry_run,
} => {
assert_eq!(names.len(), 1);
assert_eq!(names[0], "pkg1");
assert_eq!(password, Some("testpassword".to_string()));
assert!(!dry_run);
}
_ => panic!("Expected ExecutorRequest::Downgrade"),
}
}
#[test]
/// What: Test ExecutorRequest::CustomCommand creation.
///
/// Inputs:
/// - Command string and optional password.
///
/// Output:
/// - ExecutorRequest::CustomCommand with correct fields.
///
/// Details:
/// - Verifies CustomCommand request can be created for executor.
fn integration_executor_request_custom_command() {
let request = ExecutorRequest::CustomCommand {
command: "makepkg -si".to_string(),
password: Some("testpassword".to_string()),
dry_run: false,
};
match request {
ExecutorRequest::CustomCommand {
command,
password,
dry_run,
} => {
assert_eq!(command, "makepkg -si");
assert_eq!(password, Some("testpassword".to_string()));
assert!(!dry_run);
}
_ => panic!("Expected ExecutorRequest::CustomCommand"),
}
}
#[test]
/// What: Test ExecutorRequest::Update creation.
///
/// Inputs:
/// - Commands array and optional password.
///
/// Output:
/// - ExecutorRequest::Update with correct fields.
///
/// Details:
/// - Verifies Update request can be created for executor.
fn integration_executor_request_update() {
let commands = vec![
"sudo pacman -Syu --noconfirm".to_string(),
"paru -Syu --noconfirm".to_string(),
];
let request = ExecutorRequest::Update {
commands: commands.clone(),
password: Some("testpassword".to_string()),
dry_run: false,
};
match request {
ExecutorRequest::Update {
commands,
password,
dry_run,
} => {
assert_eq!(commands.len(), 2);
assert!(commands[0].contains("pacman"));
assert!(commands[1].contains("paru"));
assert_eq!(password, Some("testpassword".to_string()));
assert!(!dry_run);
}
_ => panic!("Expected ExecutorRequest::Update"),
}
}
#[test]
/// What: Test ExecutorRequest::Scan creation.
///
/// Inputs:
/// - Package name and scanner flags.
///
/// Output:
/// - ExecutorRequest::Scan with correct fields.
///
/// Details:
/// - Verifies Scan request can be created for executor.
fn integration_executor_request_scan() {
let request = ExecutorRequest::Scan {
package: "test-pkg".to_string(),
do_clamav: true,
do_trivy: true,
do_semgrep: false,
do_shellcheck: true,
do_virustotal: false,
do_custom: false,
dry_run: false,
};
match request {
ExecutorRequest::Scan {
package,
do_clamav,
do_trivy,
do_semgrep,
do_shellcheck,
do_virustotal,
do_custom,
dry_run,
} => {
assert_eq!(package, "test-pkg");
assert!(do_clamav);
assert!(do_trivy);
assert!(!do_semgrep);
assert!(do_shellcheck);
assert!(!do_virustotal);
assert!(!do_custom);
assert!(!dry_run);
}
_ => panic!("Expected ExecutorRequest::Scan"),
}
}
#[test]
/// What: Test verbose mode in PreflightExec modal.
///
/// Inputs:
/// - PreflightExec modal with verbose=true.
///
/// Output:
/// - verbose flag is correctly set.
///
/// Details:
/// - Verifies verbose mode can be toggled for detailed output.
fn integration_preflight_exec_verbose_mode() {
let pkg = create_test_package(
"ripgrep",
Source::Official {
repo: "extra".into(),
arch: "x86_64".into(),
},
);
let app = AppState {
modal: Modal::PreflightExec {
items: vec![pkg],
action: PreflightAction::Install,
tab: PreflightTab::Summary,
verbose: true,
log_lines: vec![],
abortable: false,
header_chips: PreflightHeaderChips::default(),
success: None,
},
..Default::default()
};
match app.modal {
Modal::PreflightExec { verbose, .. } => {
assert!(verbose);
}
_ => panic!("Expected PreflightExec modal"),
}
}
#[test]
/// What: Test abortable flag in PreflightExec modal.
///
/// Inputs:
/// - PreflightExec modal with abortable=true.
///
/// Output:
/// - abortable flag is correctly set.
///
/// Details:
/// - Verifies abort capability can be enabled for long operations.
fn integration_preflight_exec_abortable() {
let pkg = create_test_package(
"ripgrep",
Source::Official {
repo: "extra".into(),
arch: "x86_64".into(),
},
);
let app = AppState {
modal: Modal::PreflightExec {
items: vec![pkg],
action: PreflightAction::Install,
tab: PreflightTab::Summary,
verbose: false,
log_lines: vec![],
abortable: true,
header_chips: PreflightHeaderChips::default(),
success: None,
},
..Default::default()
};
match app.modal {
Modal::PreflightExec { abortable, .. } => {
assert!(abortable);
}
_ => panic!("Expected PreflightExec modal"),
}
}
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/tests/install/optional_deps_integration.rs | tests/install/optional_deps_integration.rs | //! Integration tests for optional dependencies installation and setup.
//!
//! Tests cover:
//! - `OptionalDeps` modal state
//! - Optional dependency installation
//! - Setup flows (virustotal, aur-sleuth)
//!
//! Note: These tests are expected to fail initially as optional deps installation currently spawns terminals.
#![cfg(test)]
use pacsea::state::{AppState, Modal, types::OptionalDepRow};
/// What: Create a test optional dependency row.
///
/// Inputs:
/// - `package`: Package name
/// - `installed`: Whether package is installed
/// - `selectable`: Whether row is selectable
///
/// Output:
/// - `OptionalDepRow` ready for testing
///
/// Details:
/// - Helper to create test optional dependency rows
fn create_test_row(package: &str, installed: bool, selectable: bool) -> OptionalDepRow {
OptionalDepRow {
label: format!("Test: {package}"),
package: package.into(),
installed,
selectable,
note: None,
}
}
#[test]
/// What: Test `OptionalDeps` modal state creation.
///
/// Inputs:
/// - `OptionalDeps` modal with dependency rows.
///
/// Output:
/// - Modal state is correctly structured.
///
/// Details:
/// - Verifies optional dependencies modal can be created and accessed.
fn integration_optional_deps_modal_state() {
let rows = vec![
create_test_row("paru", false, true),
create_test_row("yay", false, true),
create_test_row("nvim", true, false),
];
let app = AppState {
modal: Modal::OptionalDeps { rows, selected: 0 },
..Default::default()
};
match app.modal {
Modal::OptionalDeps {
rows: ref modal_rows,
selected,
} => {
assert_eq!(modal_rows.len(), 3);
assert_eq!(selected, 0);
assert_eq!(modal_rows[0].package, "paru");
assert!(!modal_rows[0].installed);
assert!(modal_rows[0].selectable);
assert_eq!(modal_rows[2].package, "nvim");
assert!(modal_rows[2].installed);
assert!(!modal_rows[2].selectable);
}
_ => panic!("Expected OptionalDeps modal"),
}
}
#[test]
/// What: Test optional dependency installation command structure.
///
/// Inputs:
/// - Package name for installation.
///
/// Output:
/// - Command structure is correct.
///
/// Details:
/// - Verifies command format for different package types.
/// - Note: Actual execution spawns terminal, so this tests command structure only.
fn integration_optional_deps_command_structure() {
// Test official package installation
let official_cmd = "sudo pacman -S --needed --noconfirm test-pkg";
assert!(official_cmd.contains("pacman"));
assert!(official_cmd.contains("--needed"));
assert!(official_cmd.contains("--noconfirm"));
// Test AUR package installation (paru/yay) - now uses temporary directory
let aur_cmd_paru = "tmp=$(mktemp -d) && cd \"$tmp\" && git clone https://aur.archlinux.org/paru.git && cd paru && makepkg -si";
assert!(aur_cmd_paru.contains("paru"));
assert!(aur_cmd_paru.contains("makepkg"));
assert!(aur_cmd_paru.contains("mktemp"));
// Test dry-run command
let dry_run_cmd = "echo DRY RUN: sudo pacman -S --needed --noconfirm test-pkg";
assert!(dry_run_cmd.contains("DRY RUN"));
}
#[test]
/// What: Test virustotal setup flow.
///
/// Inputs:
/// - `OptionalDeps` modal with virustotal-setup row.
///
/// Output:
/// - `VirusTotalSetup` modal is opened.
///
/// Details:
/// - Verifies that virustotal-setup opens the setup modal.
fn integration_optional_deps_virustotal_setup() {
let rows = vec![create_test_row("virustotal-setup", false, true)];
let mut app = AppState {
modal: Modal::OptionalDeps { rows, selected: 0 },
..Default::default()
};
// The actual flow would call handle_optional_deps_enter
// which should open VirusTotalSetup modal
// We can test the modal state structure
app.modal = Modal::VirusTotalSetup {
input: String::new(),
cursor: 0,
};
match app.modal {
Modal::VirusTotalSetup { .. } => {}
_ => panic!("Expected VirusTotalSetup modal"),
}
}
#[test]
/// What: Test aur-sleuth setup flow.
///
/// Inputs:
/// - `OptionalDeps` modal with aur-sleuth-setup row.
///
/// Output:
/// - Setup command is structured correctly.
///
/// Details:
/// - Verifies that aur-sleuth-setup creates appropriate command.
/// - Note: Actual execution spawns terminal.
fn integration_optional_deps_aur_sleuth_setup() {
// Test that aur-sleuth setup command structure is correct
// The command is a complex shell script, so we just verify it exists
let setup_script = r#"(set -e
if ! command -v aur-sleuth >/dev/null 2>&1; then
echo "aur-sleuth not found."
else
echo "aur-sleuth already installed; continuing to setup"
fi"#;
assert!(setup_script.contains("aur-sleuth"));
assert!(setup_script.contains("command -v"));
}
#[test]
/// What: Test optional dependency row filtering.
///
/// Inputs:
/// - `OptionalDeps` modal with mix of installed and uninstalled packages.
///
/// Output:
/// - Rows are correctly marked as installed/selectable.
///
/// Details:
/// - Verifies that row state reflects installation status.
fn integration_optional_deps_row_filtering() {
let rows = [
create_test_row("installed-pkg", true, false),
create_test_row("uninstalled-pkg", false, true),
]
.to_vec();
assert!(rows[0].installed);
assert!(!rows[0].selectable);
assert!(!rows[1].installed);
assert!(rows[1].selectable);
}
#[test]
/// What: Test that optional deps installation uses `ExecutorRequest` instead of spawning terminals.
///
/// Inputs:
/// - Optional dependency installation structure verification.
///
/// Output:
/// - Verifies that `ExecutorRequest` enum includes `CustomCommand` variant for special packages.
/// - Verifies that `PreflightExec` modal structure supports optional deps installation.
///
/// Details:
/// - Tests that the executor pattern infrastructure is in place for optional deps.
/// - This test verifies the code structure supports executor pattern usage.
fn integration_optional_deps_uses_executor_not_terminal() {
use pacsea::install::ExecutorRequest;
// Verify ExecutorRequest has CustomCommand variant for special packages
let custom_cmd = ExecutorRequest::CustomCommand {
command: "test command".to_string(),
password: None,
dry_run: false,
};
match custom_cmd {
ExecutorRequest::CustomCommand { .. } => {
// Expected - CustomCommand variant exists
}
_ => panic!("ExecutorRequest::CustomCommand variant should exist"),
}
// Verify PreflightExec modal can be created for optional deps
let item = pacsea::state::PackageItem {
name: "test-pkg".to_string(),
version: String::new(),
description: String::new(),
source: pacsea::state::Source::Aur,
popularity: None,
out_of_date: None,
orphaned: false,
};
let modal = Modal::PreflightExec {
items: vec![item],
action: pacsea::state::PreflightAction::Install,
tab: pacsea::state::PreflightTab::Summary,
verbose: false,
log_lines: Vec::new(),
abortable: false,
header_chips: pacsea::state::modal::PreflightHeaderChips::default(),
success: None,
};
match modal {
Modal::PreflightExec { .. } => {
// Expected - PreflightExec modal structure is correct
}
_ => panic!("PreflightExec modal should be creatable for optional deps"),
}
// Verify that AppState has pending_executor_request field
let app = AppState {
pending_executor_request: Some(ExecutorRequest::CustomCommand {
command: "test command".to_string(),
password: None,
dry_run: false,
}),
..Default::default()
};
assert!(
app.pending_executor_request.is_some(),
"AppState should support pending_executor_request for executor pattern"
);
}
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/tests/install/mod.rs | tests/install/mod.rs | //! Integration and UI tests for install operations.
mod direct_install_integration;
mod direct_install_ui;
mod executor_output;
mod install_integration;
mod install_ui;
mod optional_deps_integration;
mod optional_deps_ui;
mod password_prompt;
mod reinstall_confirmation;
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/tests/install/direct_install_integration.rs | tests/install/direct_install_integration.rs | //! Integration tests for direct install/remove operations (bypassing preflight).
//!
//! Tests cover:
//! - Direct install flow for single package
//! - Direct install flow for multiple packages
//! - Direct remove flow
//! - Password prompt handling
//! - Executor request handling
//! - Reinstall/batch update confirmation
#![cfg(test)]
use pacsea::install::ExecutorRequest;
use pacsea::state::{
AppState, Modal, PackageItem, PreflightAction, PreflightTab, Source, modal::CascadeMode,
modal::PasswordPurpose,
};
/// What: Create a test package item with specified source.
///
/// Inputs:
/// - `name`: Package name
/// - `source`: Package source (Official or AUR)
///
/// Output:
/// - `PackageItem` ready for testing
///
/// Details:
/// - Helper to create test packages with consistent structure
fn create_test_package(name: &str, source: Source) -> PackageItem {
PackageItem {
name: name.into(),
version: "1.0.0".into(),
description: String::new(),
source,
popularity: None,
out_of_date: None,
orphaned: false,
}
}
#[test]
/// What: Test direct install for single official package (requires password).
///
/// Inputs:
/// - Official package item
///
/// Output:
/// - Password prompt modal is shown
///
/// Details:
/// - Verifies that official packages trigger password prompt
fn integration_direct_install_single_official() {
let mut app = AppState::default();
let item = create_test_package(
"ripgrep",
Source::Official {
repo: "extra".into(),
arch: "x86_64".into(),
},
);
pacsea::install::start_integrated_install(&mut app, &item, false);
// Verify password prompt is shown (or Alert if user is locked out)
match app.modal {
Modal::PasswordPrompt { purpose, items, .. } => {
assert_eq!(purpose, PasswordPurpose::Install);
assert_eq!(items.len(), 1);
assert_eq!(items[0].name, "ripgrep");
}
Modal::Alert { .. } => {
// User might be locked out - this is acceptable in test environment
// The important thing is that we checked faillock status
}
_ => panic!("Expected PasswordPrompt or Alert modal for official package"),
}
}
#[test]
/// What: Test direct install for single AUR package (password prompt shown).
///
/// Inputs:
/// - AUR package item
///
/// Output:
/// - `PasswordPrompt` modal is shown
///
/// Details:
/// - Verifies that AUR packages now also show password prompt (needed for sudo cache)
fn integration_direct_install_single_aur() {
let mut app = AppState::default();
let item = create_test_package("yay-bin", Source::Aur);
pacsea::install::start_integrated_install(&mut app, &item, false);
// Verify PasswordPrompt modal is shown (AUR packages now also need password for sudo cache)
match app.modal {
Modal::PasswordPrompt { items, purpose, .. } => {
assert_eq!(items.len(), 1);
assert_eq!(items[0].name, "yay-bin");
assert!(matches!(
purpose,
pacsea::state::modal::PasswordPurpose::Install
));
}
_ => panic!("Expected PasswordPrompt modal for AUR package"),
}
}
#[test]
/// What: Test direct install for multiple packages with official packages.
///
/// Inputs:
/// - Multiple package items including official packages
///
/// Output:
/// - Password prompt modal is shown
///
/// Details:
/// - Verifies that batch install with official packages triggers password prompt
fn integration_direct_install_multiple_with_official() {
let mut app = AppState::default();
let items = vec![
create_test_package(
"ripgrep",
Source::Official {
repo: "extra".into(),
arch: "x86_64".into(),
},
),
create_test_package("yay-bin", Source::Aur),
];
pacsea::install::start_integrated_install_all(&mut app, &items, false);
// Verify password prompt is shown (or Alert if user is locked out)
match app.modal {
Modal::PasswordPrompt {
purpose,
items: modal_items,
..
} => {
assert_eq!(purpose, PasswordPurpose::Install);
assert_eq!(modal_items.len(), 2);
}
Modal::Alert { .. } => {
// User might be locked out - this is acceptable in test environment
// The important thing is that we checked faillock status
}
_ => panic!("Expected PasswordPrompt or Alert modal for packages with official"),
}
}
#[test]
/// What: Test direct remove flow.
///
/// Inputs:
/// - Package names to remove
///
/// Output:
/// - Password prompt modal is shown
///
/// Details:
/// - Verifies that remove always shows password prompt
fn integration_direct_remove() {
let mut app = AppState::default();
let names = vec!["test-package-1".to_string(), "test-package-2".to_string()];
pacsea::install::start_integrated_remove_all(&mut app, &names, false, CascadeMode::Basic);
// Verify password prompt is shown (or Alert if user is locked out)
match app.modal {
Modal::PasswordPrompt { purpose, items, .. } => {
assert_eq!(purpose, PasswordPurpose::Remove);
assert_eq!(items.len(), 2);
assert_eq!(items[0].name, "test-package-1");
assert_eq!(items[1].name, "test-package-2");
}
Modal::Alert { .. } => {
// User might be locked out - this is acceptable in test environment
// The important thing is that we checked faillock status
}
_ => panic!("Expected PasswordPrompt or Alert modal for remove"),
}
// Verify cascade mode is stored
assert_eq!(app.remove_cascade_mode, CascadeMode::Basic);
}
#[test]
/// What: Test executor request creation after password submission for install.
///
/// Inputs:
/// - Password prompt modal with password entered
///
/// Output:
/// - ``ExecutorRequest::Install`` is created with password
///
/// Details:
/// - Verifies that executor request includes password
fn integration_direct_install_executor_request() {
let mut app = AppState {
modal: Modal::PasswordPrompt {
purpose: PasswordPurpose::Install,
items: vec![create_test_package(
"ripgrep",
Source::Official {
repo: "extra".into(),
arch: "x86_64".into(),
},
)],
input: "testpassword".to_string(),
cursor: 12,
error: None,
},
pending_exec_header_chips: Some(pacsea::state::modal::PreflightHeaderChips::default()),
..Default::default()
};
// Simulate password submission
let password = if let Modal::PasswordPrompt { ref input, .. } = app.modal {
if input.trim().is_empty() {
None
} else {
Some(input.clone())
}
} else {
None
};
let items = if let Modal::PasswordPrompt { ref items, .. } = app.modal {
items.clone()
} else {
vec![]
};
let header_chips = app.pending_exec_header_chips.take().unwrap_or_default();
let items_clone = items.clone();
app.modal = Modal::PreflightExec {
items: items_clone,
action: PreflightAction::Install,
tab: PreflightTab::Summary,
verbose: false,
log_lines: vec![],
abortable: false,
header_chips,
success: None,
};
app.pending_executor_request = Some(ExecutorRequest::Install {
items,
password,
dry_run: app.dry_run,
});
// Verify executor request
match app.pending_executor_request {
Some(ExecutorRequest::Install {
items,
password: pwd,
..
}) => {
assert_eq!(items.len(), 1);
assert_eq!(items[0].name, "ripgrep");
assert_eq!(pwd, Some("testpassword".to_string()));
}
_ => panic!("Expected Install executor request"),
}
}
#[test]
/// What: Test executor request creation after password submission for remove.
///
/// Inputs:
/// - Password prompt modal with password entered for remove
///
/// Output:
/// - ``ExecutorRequest::Remove`` is created with password and cascade mode
///
/// Details:
/// - Verifies that executor request includes password and cascade mode
fn integration_direct_remove_executor_request() {
let mut app = AppState {
modal: Modal::PasswordPrompt {
purpose: PasswordPurpose::Remove,
items: vec![create_test_package(
"test-package",
Source::Official {
repo: "extra".into(),
arch: "x86_64".into(),
},
)],
input: "testpassword".to_string(),
cursor: 12,
error: None,
},
remove_cascade_mode: CascadeMode::Cascade,
pending_exec_header_chips: Some(pacsea::state::modal::PreflightHeaderChips::default()),
..Default::default()
};
// Simulate password submission
let password = if let Modal::PasswordPrompt { ref input, .. } = app.modal {
if input.trim().is_empty() {
None
} else {
Some(input.clone())
}
} else {
None
};
let items = if let Modal::PasswordPrompt { ref items, .. } = app.modal {
items.clone()
} else {
vec![]
};
let names: Vec<String> = items.iter().map(|p| p.name.clone()).collect();
let cascade = app.remove_cascade_mode;
let header_chips = app.pending_exec_header_chips.take().unwrap_or_default();
app.modal = Modal::PreflightExec {
items,
action: PreflightAction::Remove,
tab: PreflightTab::Summary,
verbose: false,
log_lines: vec![],
abortable: false,
header_chips,
success: None,
};
app.pending_executor_request = Some(ExecutorRequest::Remove {
names,
password,
cascade,
dry_run: app.dry_run,
});
// Verify executor request
match app.pending_executor_request {
Some(ExecutorRequest::Remove {
names,
password: pwd,
cascade,
..
}) => {
assert_eq!(names.len(), 1);
assert_eq!(names[0], "test-package");
assert_eq!(pwd, Some("testpassword".to_string()));
assert_eq!(cascade, CascadeMode::Cascade);
}
_ => panic!("Expected Remove executor request"),
}
}
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/tests/install/direct_install_ui.rs | tests/install/direct_install_ui.rs | //! UI tests for direct install/remove modals.
//!
//! Tests cover:
//! - ``PasswordPrompt`` modal structure for direct install/remove
//! - ``PreflightExec`` modal structure for direct operations
//! - Modal state transitions
#![cfg(test)]
use pacsea::state::{
AppState, Modal, PackageItem, PreflightAction, PreflightTab, Source, modal::CascadeMode,
modal::PasswordPurpose,
};
/// What: Create a test package item with specified source.
///
/// Inputs:
/// - `name`: Package name
/// - `source`: Package source (Official or AUR)
///
/// Output:
/// - `PackageItem` ready for testing
///
/// Details:
/// - Helper to create test packages with consistent structure
fn create_test_package(name: &str, source: Source) -> PackageItem {
PackageItem {
name: name.into(),
version: "1.0.0".into(),
description: String::new(),
source,
popularity: None,
out_of_date: None,
orphaned: false,
}
}
#[test]
/// What: Test ``PasswordPrompt`` modal structure for direct install.
///
/// Inputs:
/// - ``PasswordPrompt`` modal with Install purpose and packages.
///
/// Output:
/// - Modal state is correctly structured.
///
/// Details:
/// - Verifies ``PasswordPrompt`` modal can be created for direct install.
fn ui_direct_install_password_prompt_structure() {
let items = vec![create_test_package(
"ripgrep",
Source::Official {
repo: "extra".into(),
arch: "x86_64".into(),
},
)];
let app = AppState {
modal: Modal::PasswordPrompt {
purpose: PasswordPurpose::Install,
items,
input: String::new(),
cursor: 0,
error: None,
},
pending_exec_header_chips: Some(pacsea::state::modal::PreflightHeaderChips::default()),
..Default::default()
};
match app.modal {
Modal::PasswordPrompt {
purpose,
items: modal_items,
input,
cursor,
error,
} => {
assert_eq!(purpose, PasswordPurpose::Install);
assert_eq!(modal_items.len(), 1);
assert_eq!(modal_items[0].name, "ripgrep");
assert!(input.is_empty());
assert_eq!(cursor, 0);
assert!(error.is_none());
}
_ => panic!("Expected PasswordPrompt modal"),
}
}
#[test]
/// What: Test ``PreflightExec`` modal structure for direct install (AUR package).
///
/// Inputs:
/// - ``PreflightExec`` modal for direct install of AUR package.
///
/// Output:
/// - Modal state is correctly structured.
///
/// Details:
/// - Verifies ``PreflightExec`` modal can be created for direct install.
fn ui_direct_install_preflight_exec_structure() {
let items = vec![create_test_package("yay-bin", Source::Aur)];
let app = AppState {
modal: Modal::PreflightExec {
items: items.clone(),
action: PreflightAction::Install,
tab: PreflightTab::Summary,
verbose: false,
log_lines: vec![],
abortable: false,
header_chips: pacsea::state::modal::PreflightHeaderChips::default(),
success: None,
},
pending_executor_request: Some(pacsea::install::ExecutorRequest::Install {
items,
password: None,
dry_run: false,
}),
..Default::default()
};
match app.modal {
Modal::PreflightExec {
items: modal_items,
action,
tab,
verbose,
log_lines,
abortable,
..
} => {
assert_eq!(modal_items.len(), 1);
assert_eq!(modal_items[0].name, "yay-bin");
assert_eq!(action, PreflightAction::Install);
assert_eq!(tab, PreflightTab::Summary);
assert!(!verbose);
assert!(log_lines.is_empty());
assert!(!abortable);
}
_ => panic!("Expected PreflightExec modal"),
}
}
#[test]
/// What: Test ``PasswordPrompt`` modal structure for direct remove.
///
/// Inputs:
/// - ``PasswordPrompt`` modal with Remove purpose and packages.
///
/// Output:
/// - Modal state is correctly structured.
///
/// Details:
/// - Verifies ``PasswordPrompt`` modal can be created for direct remove.
fn ui_direct_remove_password_prompt_structure() {
let items = vec![
create_test_package(
"test-package-1",
Source::Official {
repo: "extra".into(),
arch: "x86_64".into(),
},
),
create_test_package(
"test-package-2",
Source::Official {
repo: "extra".into(),
arch: "x86_64".into(),
},
),
];
let app = AppState {
modal: Modal::PasswordPrompt {
purpose: PasswordPurpose::Remove,
items,
input: String::new(),
cursor: 0,
error: None,
},
remove_cascade_mode: CascadeMode::Basic,
pending_exec_header_chips: Some(pacsea::state::modal::PreflightHeaderChips::default()),
..Default::default()
};
match app.modal {
Modal::PasswordPrompt {
purpose,
items: modal_items,
..
} => {
assert_eq!(purpose, PasswordPurpose::Remove);
assert_eq!(modal_items.len(), 2);
}
_ => panic!("Expected PasswordPrompt modal"),
}
assert_eq!(app.remove_cascade_mode, CascadeMode::Basic);
}
#[test]
/// What: Test modal transition from ``PasswordPrompt`` to ``PreflightExec`` for direct install.
///
/// Inputs:
/// - ``PasswordPrompt`` modal with Install purpose and password entered.
///
/// Output:
/// - Modal transitions to ``PreflightExec``.
/// - Executor request is created.
///
/// Details:
/// - Verifies modal state transition flow for direct install.
fn ui_direct_install_modal_transition() {
let items = vec![create_test_package(
"ripgrep",
Source::Official {
repo: "extra".into(),
arch: "x86_64".into(),
},
)];
let mut app = AppState {
modal: Modal::PasswordPrompt {
purpose: PasswordPurpose::Install,
items: items.clone(),
input: "testpassword".to_string(),
cursor: 12,
error: None,
},
pending_exec_header_chips: Some(pacsea::state::modal::PreflightHeaderChips::default()),
..Default::default()
};
// Simulate password submission
let password = if let Modal::PasswordPrompt { ref input, .. } = app.modal {
if input.trim().is_empty() {
None
} else {
Some(input.clone())
}
} else {
None
};
let header_chips = app.pending_exec_header_chips.take().unwrap_or_default();
let items_clone = items.clone();
app.modal = Modal::PreflightExec {
items: items_clone,
action: PreflightAction::Install,
tab: PreflightTab::Summary,
verbose: false,
log_lines: vec![],
abortable: false,
header_chips,
success: None,
};
app.pending_executor_request = Some(pacsea::install::ExecutorRequest::Install {
items,
password,
dry_run: app.dry_run,
});
// Verify transition to PreflightExec
assert!(matches!(app.modal, Modal::PreflightExec { .. }));
assert!(app.pending_executor_request.is_some());
}
#[test]
/// What: Test modal transition from ``PasswordPrompt`` to ``PreflightExec`` for direct remove.
///
/// Inputs:
/// - ``PasswordPrompt`` modal with Remove purpose and password entered.
///
/// Output:
/// - Modal transitions to ``PreflightExec``.
/// - Executor request is created with cascade mode.
///
/// Details:
/// - Verifies modal state transition flow for direct remove.
fn ui_direct_remove_modal_transition() {
let items = vec![create_test_package(
"test-package",
Source::Official {
repo: "extra".into(),
arch: "x86_64".into(),
},
)];
let mut app = AppState {
modal: Modal::PasswordPrompt {
purpose: PasswordPurpose::Remove,
items: items.clone(),
input: "testpassword".to_string(),
cursor: 12,
error: None,
},
remove_cascade_mode: CascadeMode::Cascade,
pending_exec_header_chips: Some(pacsea::state::modal::PreflightHeaderChips::default()),
..Default::default()
};
// Simulate password submission
let password = if let Modal::PasswordPrompt { ref input, .. } = app.modal {
if input.trim().is_empty() {
None
} else {
Some(input.clone())
}
} else {
None
};
let names: Vec<String> = items.iter().map(|p| p.name.clone()).collect();
let cascade = app.remove_cascade_mode;
let header_chips = app.pending_exec_header_chips.take().unwrap_or_default();
app.modal = Modal::PreflightExec {
items,
action: PreflightAction::Remove,
tab: PreflightTab::Summary,
verbose: false,
log_lines: vec![],
abortable: false,
header_chips,
success: None,
};
app.pending_executor_request = Some(pacsea::install::ExecutorRequest::Remove {
names,
password,
cascade,
dry_run: app.dry_run,
});
// Verify transition to PreflightExec
assert!(matches!(app.modal, Modal::PreflightExec { .. }));
match app.pending_executor_request {
Some(pacsea::install::ExecutorRequest::Remove { cascade, .. }) => {
assert_eq!(cascade, CascadeMode::Cascade);
}
_ => panic!("Expected Remove executor request"),
}
}
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/tests/install/optional_deps_ui.rs | tests/install/optional_deps_ui.rs | //! UI tests for optional dependencies modal.
//!
//! Tests cover:
//! - `OptionalDeps` modal structure
//! - Optional dependency row structure
//!
//! Note: These tests verify modal state structure rather than actual rendering.
#![cfg(test)]
use pacsea::state::{AppState, Modal, types::OptionalDepRow};
/// What: Create a test optional dependency row.
///
/// Inputs:
/// - `package`: Package name
/// - `installed`: Whether package is installed
/// - `selectable`: Whether row is selectable
///
/// Output:
/// - `OptionalDepRow` ready for testing
///
/// Details:
/// - Helper to create test optional dependency rows
fn create_test_row(package: &str, installed: bool, selectable: bool) -> OptionalDepRow {
OptionalDepRow {
label: format!("Test: {package}"),
package: package.into(),
installed,
selectable,
note: None,
}
}
#[test]
/// What: Test `OptionalDeps` modal structure.
///
/// Inputs:
/// - `OptionalDeps` modal with dependency rows.
///
/// Output:
/// - Modal state is correctly structured.
///
/// Details:
/// - Verifies `OptionalDeps` modal can be created.
fn ui_optional_deps_modal_structure() {
let rows = vec![
create_test_row("paru", false, true),
create_test_row("yay", false, true),
create_test_row("nvim", true, false),
create_test_row("virustotal-setup", false, true),
];
let app = AppState {
modal: Modal::OptionalDeps { rows, selected: 1 },
..Default::default()
};
match app.modal {
Modal::OptionalDeps {
rows: ref modal_rows,
selected,
} => {
assert_eq!(modal_rows.len(), 4);
assert_eq!(selected, 1);
assert_eq!(modal_rows[0].package, "paru");
assert!(!modal_rows[0].installed);
assert!(modal_rows[0].selectable);
assert_eq!(modal_rows[2].package, "nvim");
assert!(modal_rows[2].installed);
assert!(!modal_rows[2].selectable);
}
_ => panic!("Expected OptionalDeps modal"),
}
}
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/tests/preflight_integration/data_arrival.rs | tests/preflight_integration/data_arrival.rs | //! Tests for out-of-order data arrival and cancellation handling.
use pacsea as crate_root;
use tokio::sync::mpsc;
/// What: Creates test packages for testing.
///
/// Inputs: None (uses hardcoded test data).
///
/// Output:
/// - Vector of `PackageItem` instances for testing
///
/// Details:
/// - Creates two packages: `test-package-1` from the `core` repository and `test-package-2` from the `extra` repository
/// - Used to simulate multiple packages in preflight tests
fn create_test_packages() -> Vec<crate_root::state::PackageItem> {
vec![
crate_root::state::PackageItem {
name: "test-package-1".to_string(),
version: "1.0.0".to_string(),
description: String::new(),
source: crate_root::state::Source::Official {
repo: "core".to_string(),
arch: "x86_64".to_string(),
},
popularity: None,
out_of_date: None,
orphaned: false,
},
crate_root::state::PackageItem {
name: "test-package-2".to_string(),
version: "2.0.0".to_string(),
description: String::new(),
source: crate_root::state::Source::Official {
repo: "extra".to_string(),
arch: "x86_64".to_string(),
},
popularity: None,
out_of_date: None,
orphaned: false,
},
]
}
/// What: Sets up preflight modal state with all stages queued.
///
/// Inputs:
/// - `app`: `AppState` to configure
/// - `test_packages`: Packages to use for preflight
///
/// Output: None (modifies `app` in place).
///
/// Details:
/// - Initializes the preflight modal with all stages marked as resolving (summary, deps, files, services)
/// - Sets up empty data structures for all modal fields
/// - Resets cancellation flag to false
/// - Queues all resolution stages for parallel processing
fn setup_preflight_modal(
app: &mut crate_root::state::AppState,
test_packages: &[crate_root::state::PackageItem],
) {
app.install_list = test_packages.to_vec();
app.preflight_cancelled
.store(false, std::sync::atomic::Ordering::Relaxed);
// Queue all stages (simulating parallel kick-off)
app.preflight_summary_items = Some((
test_packages.to_vec(),
crate_root::state::modal::PreflightAction::Install,
));
app.preflight_summary_resolving = true;
app.preflight_deps_items = Some((
test_packages.to_vec(),
crate_root::state::modal::PreflightAction::Install,
));
app.preflight_deps_resolving = true;
app.preflight_files_items = Some(test_packages.to_vec());
app.preflight_files_resolving = true;
app.preflight_services_items = Some(test_packages.to_vec());
app.preflight_services_resolving = true;
// Create modal state
app.modal = crate_root::state::Modal::Preflight {
items: test_packages.to_vec(),
action: crate_root::state::PreflightAction::Install,
tab: crate_root::state::PreflightTab::Summary,
summary: None,
summary_scroll: 0,
header_chips: crate_root::state::modal::PreflightHeaderChips {
package_count: test_packages.len(),
download_bytes: 0,
install_delta_bytes: 0,
aur_count: 0,
risk_score: 0,
risk_level: crate_root::state::modal::RiskLevel::Low,
},
dependency_info: Vec::new(),
dep_selected: 0,
dep_tree_expanded: std::collections::HashSet::new(),
deps_error: None,
file_info: Vec::new(),
file_selected: 0,
file_tree_expanded: std::collections::HashSet::new(),
files_error: None,
service_info: Vec::new(),
service_selected: 0,
services_loaded: false,
services_error: None,
sandbox_info: Vec::new(),
sandbox_selected: 0,
sandbox_tree_expanded: std::collections::HashSet::new(),
sandbox_loaded: true, // No AUR packages, so loaded immediately
sandbox_error: None,
selected_optdepends: std::collections::HashMap::new(),
cascade_mode: crate_root::state::modal::CascadeMode::Basic,
cached_reverse_deps_report: None,
};
}
/// What: Processes files result and verifies it was loaded.
///
/// Inputs:
/// - `app`: `AppState` to update
/// - `files_res_tx`: Sender for files result
/// - `files_res_rx`: Receiver for files result
///
/// Output: None (modifies `app` in place and asserts on state).
///
/// Details:
/// - Sends a test files result through the channel and receives it
/// - Simulates the runtime handler behavior for processing files results
/// - Updates app state if not cancelled, clearing resolving flags
/// - Verifies files are properly loaded in the preflight modal
async fn process_and_verify_files_result(
app: &mut crate_root::state::AppState,
files_res_tx: &mpsc::UnboundedSender<Vec<crate_root::state::modal::PackageFileInfo>>,
files_res_rx: &mut mpsc::UnboundedReceiver<Vec<crate_root::state::modal::PackageFileInfo>>,
) {
let files_result = vec![crate_root::state::modal::PackageFileInfo {
name: "test-package-1".to_string(),
files: vec![],
total_count: 0,
new_count: 0,
changed_count: 0,
removed_count: 0,
config_count: 0,
pacnew_candidates: 0usize,
pacsave_candidates: 0usize,
}];
let _ = files_res_tx.send(files_result.clone());
// Process files result (simulate runtime.rs handler)
if let Some(files) = files_res_rx.recv().await {
let was_preflight = app.preflight_files_resolving;
app.files_resolving = false;
app.preflight_files_resolving = false;
if !app
.preflight_cancelled
.load(std::sync::atomic::Ordering::Relaxed)
{
app.install_list_files = files.clone();
if let crate_root::state::Modal::Preflight { file_info, .. } = &mut app.modal {
*file_info = files;
}
if was_preflight {
app.preflight_files_items = None;
}
}
}
// Verify files are loaded
if let crate_root::state::Modal::Preflight { file_info, .. } = &app.modal {
assert!(!file_info.is_empty(), "Files should be loaded");
}
assert!(
!app.preflight_files_resolving,
"Files resolving flag should be cleared"
);
}
/// What: Processes services result and verifies it was loaded.
///
/// Inputs:
/// - `app`: `AppState` to update
/// - `services_res_tx`: Sender for services result
/// - `services_res_rx`: Receiver for services result
///
/// Output: None (modifies `app` in place and asserts on state).
///
/// Details:
/// - Sends a test services result through the channel and receives it
/// - Simulates the runtime handler behavior for processing services results
/// - Updates app state if not cancelled, marking services as loaded and clearing resolving flags
/// - Verifies services are properly loaded in the preflight modal
async fn process_and_verify_services_result(
app: &mut crate_root::state::AppState,
services_res_tx: &mpsc::UnboundedSender<Vec<crate_root::state::modal::ServiceImpact>>,
services_res_rx: &mut mpsc::UnboundedReceiver<Vec<crate_root::state::modal::ServiceImpact>>,
) {
let services_result = vec![crate_root::state::modal::ServiceImpact {
unit_name: "test.service".to_string(),
providers: vec!["test-package-1".to_string()],
is_active: true,
needs_restart: true,
recommended_decision: crate_root::state::modal::ServiceRestartDecision::Restart,
restart_decision: crate_root::state::modal::ServiceRestartDecision::Restart,
}];
let _ = services_res_tx.send(services_result.clone());
// Process services result
if let Some(services) = services_res_rx.recv().await {
let was_preflight = app.preflight_services_resolving;
app.services_resolving = false;
app.preflight_services_resolving = false;
if !app
.preflight_cancelled
.load(std::sync::atomic::Ordering::Relaxed)
{
app.install_list_services = services.clone();
if let crate_root::state::Modal::Preflight {
service_info,
services_loaded,
..
} = &mut app.modal
{
*service_info = services;
*services_loaded = true;
}
if was_preflight {
app.preflight_services_items = None;
}
}
}
// Verify services are loaded
if let crate_root::state::Modal::Preflight {
service_info,
services_loaded,
..
} = &app.modal
{
assert!(*services_loaded, "Services should be marked as loaded");
assert!(!service_info.is_empty(), "Services should be loaded");
}
assert!(
!app.preflight_services_resolving,
"Services resolving flag should be cleared"
);
}
/// What: Processes deps result and verifies it was loaded.
///
/// Inputs:
/// - `app`: `AppState` to update
/// - `deps_res_tx`: Sender for deps result
/// - `deps_res_rx`: Receiver for deps result
/// - `test_packages`: Packages to filter dependencies
///
/// Output: None (modifies `app` in place and asserts on state).
///
/// Details:
/// - Sends a test dependencies result through the channel and receives it
/// - Simulates the runtime handler behavior for processing dependencies results
/// - Filters dependencies to only include those required by the test packages
/// - Updates app state if not cancelled, clearing resolving flags
/// - Verifies dependencies are properly loaded in the preflight modal
async fn process_and_verify_deps_result(
app: &mut crate_root::state::AppState,
deps_res_tx: &mpsc::UnboundedSender<Vec<crate_root::state::modal::DependencyInfo>>,
deps_res_rx: &mut mpsc::UnboundedReceiver<Vec<crate_root::state::modal::DependencyInfo>>,
test_packages: &[crate_root::state::PackageItem],
) {
let deps_result = vec![crate_root::state::modal::DependencyInfo {
name: "test-dep".to_string(),
version: "1.0.0".to_string(),
status: crate_root::state::modal::DependencyStatus::ToInstall,
source: crate_root::state::modal::DependencySource::Official {
repo: "core".to_string(),
},
required_by: vec!["test-package-1".to_string()],
depends_on: Vec::new(),
is_core: false,
is_system: false,
}];
let _ = deps_res_tx.send(deps_result.clone());
// Process deps result
if let Some(deps) = deps_res_rx.recv().await {
let was_preflight = app.preflight_deps_resolving;
app.deps_resolving = false;
app.preflight_deps_resolving = false;
if !app
.preflight_cancelled
.load(std::sync::atomic::Ordering::Relaxed)
{
app.install_list_deps = deps.clone();
if let crate_root::state::Modal::Preflight {
dependency_info, ..
} = &mut app.modal
{
let item_names: std::collections::HashSet<String> =
test_packages.iter().map(|i| i.name.clone()).collect();
let filtered_deps: Vec<_> = deps
.iter()
.filter(|dep| {
dep.required_by
.iter()
.any(|req_by| item_names.contains(req_by))
})
.cloned()
.collect();
*dependency_info = filtered_deps;
}
if was_preflight {
app.preflight_deps_items = None;
}
}
}
// Verify deps are loaded
if let crate_root::state::Modal::Preflight {
dependency_info, ..
} = &app.modal
{
assert!(!dependency_info.is_empty(), "Dependencies should be loaded");
}
assert!(
!app.preflight_deps_resolving,
"Deps resolving flag should be cleared"
);
}
/// What: Processes summary result and verifies it was loaded.
///
/// Inputs:
/// - `app`: `AppState` to update
/// - `summary_res_tx`: Sender for summary result
/// - `summary_res_rx`: Receiver for summary result
/// - `test_packages`: Packages used for preflight
///
/// Output: None (modifies `app` in place and asserts on state).
///
/// Details:
/// - Sends a test summary result with package count and header chips through the channel and receives it
/// - Simulates the runtime handler behavior for processing summary results
/// - Updates app state if not cancelled, clearing resolving flags
/// - Verifies summary is properly loaded in the preflight modal
async fn process_and_verify_summary_result(
app: &mut crate_root::state::AppState,
summary_res_tx: &mpsc::UnboundedSender<crate_root::logic::preflight::PreflightSummaryOutcome>,
summary_res_rx: &mut mpsc::UnboundedReceiver<
crate_root::logic::preflight::PreflightSummaryOutcome,
>,
test_packages: &[crate_root::state::PackageItem],
) {
let summary_result = crate_root::logic::preflight::PreflightSummaryOutcome {
summary: crate_root::state::modal::PreflightSummaryData {
packages: vec![],
package_count: test_packages.len(),
aur_count: 0,
download_bytes: 1000,
install_delta_bytes: 2000,
risk_score: 0,
risk_level: crate_root::state::modal::RiskLevel::Low,
risk_reasons: vec![],
major_bump_packages: vec![],
core_system_updates: vec![],
pacnew_candidates: 0usize,
pacsave_candidates: 0usize,
config_warning_packages: vec![],
service_restart_units: vec![],
summary_warnings: vec![],
summary_notes: vec![],
},
header: crate_root::state::modal::PreflightHeaderChips {
package_count: test_packages.len(),
download_bytes: 1000,
install_delta_bytes: 2000,
aur_count: 0,
risk_score: 0,
risk_level: crate_root::state::modal::RiskLevel::Low,
},
reverse_deps_report: None,
};
let _ = summary_res_tx.send(summary_result.clone());
// Process summary result
if let Some(summary_outcome) = summary_res_rx.recv().await {
if !app
.preflight_cancelled
.load(std::sync::atomic::Ordering::Relaxed)
&& let crate_root::state::Modal::Preflight {
summary,
header_chips,
..
} = &mut app.modal
{
*summary = Some(Box::new(summary_outcome.summary));
*header_chips = summary_outcome.header;
}
app.preflight_summary_resolving = false;
app.preflight_summary_items = None;
}
// Verify summary is loaded
if let crate_root::state::Modal::Preflight { summary, .. } = &app.modal {
assert!(summary.is_some(), "Summary should be loaded");
}
assert!(
!app.preflight_summary_resolving,
"Summary resolving flag should be cleared"
);
}
/// What: Verifies that all preflight stages are complete.
///
/// Inputs:
/// - `app`: `AppState` to verify
///
/// Output: None (panics on assertion failure).
///
/// Details:
/// - Verifies that all four resolution stages (summary, deps, files, services) have their resolving flags set to false
/// - Ensures all item queues are cleared
/// - This confirms the preflight modal has finished all background work
fn verify_all_stages_complete(app: &crate_root::state::AppState) {
assert!(!app.preflight_summary_resolving);
assert!(!app.preflight_deps_resolving);
assert!(!app.preflight_files_resolving);
assert!(!app.preflight_services_resolving);
assert!(app.preflight_summary_items.is_none());
assert!(app.preflight_deps_items.is_none());
assert!(app.preflight_files_items.is_none());
assert!(app.preflight_services_items.is_none());
}
#[tokio::test]
/// What: Verify that preflight modal handles out-of-order data arrival correctly.
///
/// Inputs:
/// - Preflight modal opened with multiple packages
/// - Background resolution stages complete in non-sequential order (e.g., Files before Deps)
///
/// Output:
/// - Modal state correctly reflects data as it arrives, regardless of order
/// - All stages eventually show as complete
async fn preflight_handles_out_of_order_data_arrival() {
unsafe {
std::env::set_var("PACSEA_TEST_HEADLESS", "1");
}
let mut app = crate_root::state::AppState::default();
let test_packages = create_test_packages();
// Set up channels for runtime (simulating result channels)
let (_deps_req_tx, _deps_req_rx) =
mpsc::unbounded_channel::<Vec<crate_root::state::PackageItem>>();
let (deps_res_tx, mut deps_res_rx) =
mpsc::unbounded_channel::<Vec<crate_root::state::modal::DependencyInfo>>();
let (_files_req_tx, _files_req_rx) =
mpsc::unbounded_channel::<Vec<crate_root::state::PackageItem>>();
let (files_res_tx, mut files_res_rx) =
mpsc::unbounded_channel::<Vec<crate_root::state::modal::PackageFileInfo>>();
let (_services_req_tx, _services_req_rx) =
mpsc::unbounded_channel::<Vec<crate_root::state::PackageItem>>();
let (services_res_tx, mut services_res_rx) =
mpsc::unbounded_channel::<Vec<crate_root::state::modal::ServiceImpact>>();
let (_sandbox_req_tx, _sandbox_req_rx) =
mpsc::unbounded_channel::<Vec<crate_root::state::PackageItem>>();
let (_sandbox_res_tx, _sandbox_res_rx) =
mpsc::unbounded_channel::<Vec<crate_root::logic::sandbox::SandboxInfo>>();
let (_summary_req_tx, _summary_req_rx) = mpsc::unbounded_channel::<(
Vec<crate_root::state::PackageItem>,
crate_root::state::modal::PreflightAction,
)>();
let (summary_res_tx, mut summary_res_rx) =
mpsc::unbounded_channel::<crate_root::logic::preflight::PreflightSummaryOutcome>();
// Open preflight modal and queue all stages
setup_preflight_modal(&mut app, &test_packages);
// Verify all stages are queued
assert!(app.preflight_summary_resolving);
assert!(app.preflight_deps_resolving);
assert!(app.preflight_files_resolving);
assert!(app.preflight_services_resolving);
// Simulate out-of-order completion:
// 1. Files completes first (fastest)
// 2. Services completes second
// 3. Deps completes third
// 4. Summary completes last (slowest)
process_and_verify_files_result(&mut app, &files_res_tx, &mut files_res_rx).await;
process_and_verify_services_result(&mut app, &services_res_tx, &mut services_res_rx).await;
process_and_verify_deps_result(&mut app, &deps_res_tx, &mut deps_res_rx, &test_packages).await;
process_and_verify_summary_result(
&mut app,
&summary_res_tx,
&mut summary_res_rx,
&test_packages,
)
.await;
// Final verification: all stages should be complete
verify_all_stages_complete(&app);
}
#[tokio::test]
/// What: Verify that preflight cancellation aborts in-flight work correctly.
///
/// Inputs:
/// - Preflight modal opened with packages
/// - Background resolution stages started
/// - Modal closed (cancellation triggered)
///
/// Output:
/// - Cancellation flag is set
/// - Queued work items are cleared
/// - Results arriving after cancellation are ignored
async fn preflight_cancellation_aborts_in_flight_work() {
unsafe {
std::env::set_var("PACSEA_TEST_HEADLESS", "1");
}
let mut app = crate_root::state::AppState::default();
let test_packages = vec![crate_root::state::PackageItem {
name: "test-package".to_string(),
version: "1.0.0".to_string(),
description: String::new(),
source: crate_root::state::Source::Official {
repo: "core".to_string(),
arch: "x86_64".to_string(),
},
popularity: None,
out_of_date: None,
orphaned: false,
}];
// Set up channels
let (deps_res_tx, mut deps_res_rx) =
mpsc::unbounded_channel::<Vec<crate_root::state::modal::DependencyInfo>>();
let (files_res_tx, mut files_res_rx) =
mpsc::unbounded_channel::<Vec<crate_root::state::modal::PackageFileInfo>>();
// Open preflight modal and queue work
app.install_list = test_packages.clone();
app.preflight_cancelled
.store(false, std::sync::atomic::Ordering::Relaxed);
app.preflight_deps_items = Some((
test_packages.clone(),
crate_root::state::modal::PreflightAction::Install,
));
app.preflight_deps_resolving = true;
app.preflight_files_items = Some(test_packages.clone());
app.preflight_files_resolving = true;
app.modal = crate_root::state::Modal::Preflight {
items: test_packages.clone(),
action: crate_root::state::PreflightAction::Install,
tab: crate_root::state::PreflightTab::Summary,
summary: None,
summary_scroll: 0,
header_chips: crate_root::state::modal::PreflightHeaderChips {
package_count: 1,
download_bytes: 0,
install_delta_bytes: 0,
aur_count: 0,
risk_score: 0,
risk_level: crate_root::state::modal::RiskLevel::Low,
},
dependency_info: Vec::new(),
dep_selected: 0,
dep_tree_expanded: std::collections::HashSet::new(),
deps_error: None,
file_info: Vec::new(),
file_selected: 0,
file_tree_expanded: std::collections::HashSet::new(),
files_error: None,
service_info: Vec::new(),
service_selected: 0,
services_loaded: false,
services_error: None,
sandbox_info: Vec::new(),
sandbox_selected: 0,
sandbox_tree_expanded: std::collections::HashSet::new(),
sandbox_loaded: true,
sandbox_error: None,
selected_optdepends: std::collections::HashMap::new(),
cascade_mode: crate_root::state::modal::CascadeMode::Basic,
cached_reverse_deps_report: None,
};
// Verify work is queued
assert!(app.preflight_deps_resolving);
assert!(app.preflight_files_resolving);
assert!(app.preflight_deps_items.is_some());
assert!(app.preflight_files_items.is_some());
// Cancel preflight (simulate modal closing)
app.preflight_cancelled
.store(true, std::sync::atomic::Ordering::Relaxed);
app.preflight_deps_items = None;
app.preflight_files_items = None;
app.modal = crate_root::state::Modal::None;
// Verify cancellation flag is set
assert!(
app.preflight_cancelled
.load(std::sync::atomic::Ordering::Relaxed)
);
// Verify queues are cleared
assert!(app.preflight_deps_items.is_none());
assert!(app.preflight_files_items.is_none());
// Simulate results arriving after cancellation
let deps_result = vec![crate_root::state::modal::DependencyInfo {
name: "test-dep".to_string(),
version: "1.0.0".to_string(),
status: crate_root::state::modal::DependencyStatus::ToInstall,
source: crate_root::state::modal::DependencySource::Official {
repo: "core".to_string(),
},
required_by: vec!["test-package".to_string()],
depends_on: Vec::new(),
is_core: false,
is_system: false,
}];
let _ = deps_res_tx.send(deps_result.clone());
// Process result (simulate runtime.rs handler checking cancellation)
if let Some(deps) = deps_res_rx.recv().await {
let cancelled = app
.preflight_cancelled
.load(std::sync::atomic::Ordering::Relaxed);
let was_preflight = app.preflight_deps_resolving;
app.deps_resolving = false;
app.preflight_deps_resolving = false;
if !cancelled {
app.install_list_deps = deps;
if was_preflight {
app.preflight_deps_items = None;
}
} else if was_preflight {
// Result should be ignored when cancelled
app.preflight_deps_items = None;
}
}
// Verify that install_list_deps was NOT updated (cancellation prevented update)
// Since modal is closed, we can't check modal state, but we can verify flags
assert!(!app.preflight_deps_resolving);
assert!(app.preflight_deps_items.is_none());
// Send files result after cancellation
let files_result = vec![crate_root::state::modal::PackageFileInfo {
name: "test-package".to_string(),
files: vec![],
total_count: 0,
new_count: 0,
changed_count: 0,
removed_count: 0,
config_count: 0,
pacnew_candidates: 0usize,
pacsave_candidates: 0usize,
}];
let _ = files_res_tx.send(files_result.clone());
// Process files result
if let Some(files) = files_res_rx.recv().await {
let cancelled = app
.preflight_cancelled
.load(std::sync::atomic::Ordering::Relaxed);
let was_preflight = app.preflight_files_resolving;
app.files_resolving = false;
app.preflight_files_resolving = false;
if !cancelled {
app.install_list_files = files;
if was_preflight {
app.preflight_files_items = None;
}
} else if was_preflight {
app.preflight_files_items = None;
}
}
// Verify flags are cleared
assert!(!app.preflight_files_resolving);
assert!(app.preflight_files_items.is_none());
}
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/tests/preflight_integration/edge_cases.rs | tests/preflight_integration/edge_cases.rs | //! //! Tests for edge cases.
use pacsea as crate_root;
/// What: Switch to Deps tab and sync empty dependencies.
///
/// Inputs:
/// - `app`: Application state with modal
///
/// Output:
/// - Updates modal to Deps tab with empty dependency info
///
/// Details:
/// - Simulates `sync_dependencies` logic for empty results
fn sync_deps_tab_empty(app: &mut crate_root::state::AppState) {
let crate_root::state::Modal::Preflight {
items,
action,
tab,
dependency_info,
dep_selected,
..
} = &mut app.modal
else {
panic!("Expected Preflight modal");
};
*tab = crate_root::state::PreflightTab::Deps;
if matches!(*action, crate_root::state::PreflightAction::Install) {
let item_names: std::collections::HashSet<String> =
items.iter().map(|i| i.name.clone()).collect();
let filtered: Vec<_> = app
.install_list_deps
.iter()
.filter(|dep| {
dep.required_by
.iter()
.any(|req_by| item_names.contains(req_by))
})
.cloned()
.collect();
*dependency_info = filtered;
*dep_selected = 0;
}
}
/// What: Verify Deps tab has empty state without errors.
///
/// Inputs:
/// - `app`: Application state with modal
///
/// Output:
/// - Asserts Deps tab is active with empty dependencies and no errors
///
/// Details:
/// - Verifies graceful handling of empty dependency results
fn verify_deps_tab_empty(app: &crate_root::state::AppState) {
let crate_root::state::Modal::Preflight {
tab,
dependency_info,
deps_error,
..
} = &app.modal
else {
panic!("Expected Preflight modal");
};
assert_eq!(
*tab,
crate_root::state::PreflightTab::Deps,
"Should be on Deps tab"
);
assert!(dependency_info.is_empty(), "Dependencies should be empty");
assert!(
deps_error.is_none(),
"Should not have error for empty results"
);
}
/// What: Switch to Files tab and sync empty files.
///
/// Inputs:
/// - `app`: Application state with modal
///
/// Output:
/// - Updates modal to Files tab with empty file info
///
/// Details:
/// - Simulates `sync_files` logic for empty results
fn sync_files_tab_empty(app: &mut crate_root::state::AppState) {
let crate_root::state::Modal::Preflight {
items,
tab,
file_info,
file_selected,
..
} = &mut app.modal
else {
panic!("Expected Preflight modal");
};
*tab = crate_root::state::PreflightTab::Files;
let item_names: std::collections::HashSet<String> =
items.iter().map(|i| i.name.clone()).collect();
let cached_files: Vec<_> = app
.install_list_files
.iter()
.filter(|file_info| item_names.contains(&file_info.name))
.cloned()
.collect();
*file_info = cached_files;
*file_selected = 0;
}
/// What: Verify Files tab has empty state without errors.
///
/// Inputs:
/// - `app`: Application state with modal
///
/// Output:
/// - Asserts Files tab is active with empty files and no errors
///
/// Details:
/// - Verifies graceful handling of empty file results
fn verify_files_tab_empty(app: &crate_root::state::AppState) {
let crate_root::state::Modal::Preflight {
tab,
file_info,
files_error,
..
} = &app.modal
else {
panic!("Expected Preflight modal");
};
assert_eq!(
*tab,
crate_root::state::PreflightTab::Files,
"Should be on Files tab"
);
assert!(file_info.is_empty(), "Files should be empty");
assert!(
files_error.is_none(),
"Should not have error for empty results"
);
}
/// What: Switch to Services tab and sync empty services.
///
/// Inputs:
/// - `app`: Application state with modal
///
/// Output:
/// - Updates modal to Services tab with empty service info
///
/// Details:
/// - Simulates `sync_services` logic for empty results
fn sync_services_tab_empty(app: &mut crate_root::state::AppState) {
let crate_root::state::Modal::Preflight {
items,
action,
tab,
service_info,
service_selected,
services_loaded,
..
} = &mut app.modal
else {
panic!("Expected Preflight modal");
};
*tab = crate_root::state::PreflightTab::Services;
if matches!(*action, crate_root::state::PreflightAction::Install) {
let item_names: std::collections::HashSet<String> =
items.iter().map(|i| i.name.clone()).collect();
let cached_services: Vec<_> = app
.install_list_services
.iter()
.filter(|s| s.providers.iter().any(|p| item_names.contains(p)))
.cloned()
.collect();
if !cached_services.is_empty() {
*service_info = cached_services;
}
*services_loaded = true;
*service_selected = 0;
}
}
/// What: Verify Services tab has empty state without errors.
///
/// Inputs:
/// - `app`: Application state with modal
///
/// Output:
/// - Asserts Services tab is active with empty services and no errors
///
/// Details:
/// - Verifies graceful handling of empty service results
fn verify_services_tab_empty(app: &crate_root::state::AppState) {
let crate_root::state::Modal::Preflight {
tab,
service_info,
services_loaded,
services_error,
..
} = &app.modal
else {
panic!("Expected Preflight modal");
};
assert_eq!(
*tab,
crate_root::state::PreflightTab::Services,
"Should be on Services tab"
);
assert!(service_info.is_empty(), "Services should be empty");
assert!(*services_loaded, "Services should be marked as loaded");
assert!(
services_error.is_none(),
"Should not have error for empty results"
);
}
/// What: Switch to Sandbox tab and sync empty sandbox.
///
/// Inputs:
/// - `app`: Application state with modal
///
/// Output:
/// - Updates modal to Sandbox tab with empty sandbox info
///
/// Details:
/// - Simulates `sync_sandbox` logic for empty results
/// - Note: Sandbox only applies to AUR packages, so empty is expected for official packages
fn sync_sandbox_tab_empty(app: &mut crate_root::state::AppState) {
let crate_root::state::Modal::Preflight {
items,
action,
tab,
sandbox_info,
sandbox_loaded,
..
} = &mut app.modal
else {
panic!("Expected Preflight modal");
};
*tab = crate_root::state::PreflightTab::Sandbox;
if matches!(*action, crate_root::state::PreflightAction::Install) {
let item_names: std::collections::HashSet<String> =
items.iter().map(|i| i.name.clone()).collect();
let cached_sandbox: Vec<_> = app
.install_list_sandbox
.iter()
.filter(|s| item_names.contains(&s.package_name))
.cloned()
.collect();
if !cached_sandbox.is_empty() {
*sandbox_info = cached_sandbox;
}
*sandbox_loaded = true;
}
}
/// What: Verify Sandbox tab has empty state without errors.
///
/// Inputs:
/// - `app`: Application state with modal
///
/// Output:
/// - Asserts Sandbox tab is active with empty sandbox and no errors
///
/// Details:
/// - Verifies graceful handling of empty sandbox results
fn verify_sandbox_tab_empty(app: &crate_root::state::AppState) {
let crate_root::state::Modal::Preflight {
tab,
sandbox_info,
sandbox_loaded,
sandbox_error,
..
} = &app.modal
else {
panic!("Expected Preflight modal");
};
assert_eq!(
*tab,
crate_root::state::PreflightTab::Sandbox,
"Should be on Sandbox tab"
);
assert!(sandbox_info.is_empty(), "Sandbox should be empty");
assert!(*sandbox_loaded, "Sandbox should be marked as loaded");
assert!(
sandbox_error.is_none(),
"Should not have error for empty results"
);
}
/// What: Verify all tabs have empty state without errors.
///
/// Inputs:
/// - `app`: Application state with modal
///
/// Output:
/// - Asserts all tabs are empty but without errors
///
/// Details:
/// - Final verification that all tabs handle empty results gracefully
fn verify_all_tabs_empty(app: &crate_root::state::AppState) {
let crate_root::state::Modal::Preflight {
dependency_info,
file_info,
service_info,
sandbox_info,
deps_error,
files_error,
services_error,
sandbox_error,
services_loaded,
sandbox_loaded,
..
} = &app.modal
else {
panic!("Expected Preflight modal");
};
assert!(dependency_info.is_empty(), "Deps should be empty");
assert!(deps_error.is_none(), "Deps should not have error");
assert!(file_info.is_empty(), "Files should be empty");
assert!(files_error.is_none(), "Files should not have error");
assert!(service_info.is_empty(), "Services should be empty");
assert!(*services_loaded, "Services should be marked as loaded");
assert!(services_error.is_none(), "Services should not have error");
assert!(sandbox_info.is_empty(), "Sandbox should be empty");
assert!(*sandbox_loaded, "Sandbox should be marked as loaded");
assert!(sandbox_error.is_none(), "Sandbox should not have error");
}
#[test]
/// What: Verify that preflight modal handles empty results gracefully across all tabs.
///
/// Inputs:
/// - Packages in `install_list`
/// - All resolution stages return empty results (no deps, files, services, sandbox)
/// - User switches between all tabs
///
/// Output:
/// - All tabs display appropriate empty state messages
/// - No panics or errors occur
/// - UI remains functional
///
/// Details:
/// - Tests edge case where packages have no dependencies, files, services, or sandbox data
/// - Verifies graceful handling of empty results
/// - Ensures UI doesn't break with empty data
fn preflight_handles_empty_results_gracefully() {
unsafe {
std::env::set_var("PACSEA_TEST_HEADLESS", "1");
}
let mut app = crate_root::state::AppState::default();
let test_packages = vec![crate_root::state::PackageItem {
name: "test-package-empty".to_string(),
version: "1.0.0".to_string(),
description: String::new(),
source: crate_root::state::Source::Official {
repo: "core".to_string(),
arch: "x86_64".to_string(),
},
popularity: None,
out_of_date: None,
orphaned: false,
}];
// All caches are empty (no dependencies, files, services, sandbox)
app.install_list_deps = vec![];
app.install_list_files = vec![];
app.install_list_services = vec![];
app.install_list_sandbox = vec![];
// Set packages in install list
app.install_list = test_packages.clone();
app.preflight_cancelled
.store(false, std::sync::atomic::Ordering::Relaxed);
// Open preflight modal
app.modal = crate_root::state::Modal::Preflight {
items: test_packages.clone(),
action: crate_root::state::PreflightAction::Install,
tab: crate_root::state::PreflightTab::Summary,
summary: None,
summary_scroll: 0,
header_chips: crate_root::state::modal::PreflightHeaderChips {
package_count: test_packages.len(),
download_bytes: 0,
install_delta_bytes: 0,
aur_count: 0,
risk_score: 0,
risk_level: crate_root::state::modal::RiskLevel::Low,
},
dependency_info: Vec::new(),
dep_selected: 0,
dep_tree_expanded: std::collections::HashSet::new(),
deps_error: None,
file_info: Vec::new(),
file_selected: 0,
file_tree_expanded: std::collections::HashSet::new(),
files_error: None,
service_info: Vec::new(),
service_selected: 0,
services_loaded: false,
services_error: None,
sandbox_info: Vec::new(),
sandbox_selected: 0,
sandbox_tree_expanded: std::collections::HashSet::new(),
sandbox_loaded: true,
sandbox_error: None,
selected_optdepends: std::collections::HashMap::new(),
cascade_mode: crate_root::state::modal::CascadeMode::Basic,
cached_reverse_deps_report: None,
};
// Test 1: Switch to Deps tab - should handle empty results
sync_deps_tab_empty(&mut app);
verify_deps_tab_empty(&app);
// Test 2: Switch to Files tab - should handle empty results
sync_files_tab_empty(&mut app);
verify_files_tab_empty(&app);
// Test 3: Switch to Services tab - should handle empty results
sync_services_tab_empty(&mut app);
verify_services_tab_empty(&app);
// Test 4: Switch to Sandbox tab - should handle empty results
sync_sandbox_tab_empty(&mut app);
verify_sandbox_tab_empty(&app);
// Test 5: Switch back to Deps tab - should still handle empty gracefully
sync_deps_tab_empty(&mut app);
verify_deps_tab_empty(&app);
// Final verification: All tabs handle empty results gracefully
verify_all_tabs_empty(&app);
}
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/tests/preflight_integration/package_operations.rs | tests/preflight_integration/package_operations.rs | //! //! Tests for package operations and management.
use pacsea as crate_root;
/// Sets up first package with cached dependencies, files, and services.
///
/// What: Creates a first package and pre-populates app state with its cached data
/// including dependencies, files, and services to simulate a package that was already
/// added and resolved.
///
/// Inputs:
/// - `app`: Mutable reference to `AppState` to populate with cached data
///
/// Output:
/// - Returns the created `PackageItem` for "first-package"
/// - Updates `app.install_list_deps` with 2 dependencies (first-dep-1, first-dep-2)
/// - Updates `app.install_list_files` with file info (2 files, 1 config, 1 pacnew candidate)
/// - Updates `app.install_list_services` with service info (first-service.service)
/// - Sets `app.install_list` to contain the first package
/// - Sets `app.preflight_cancelled` to false
///
/// Details:
/// - Creates a package from "core" repo with version "1.0.0"
/// - Sets up dependencies from both "core" and "extra" repos
/// - Includes one config file that will generate a pacnew file
/// - Service is active and requires restart
fn setup_first_package_with_cache(
app: &mut crate_root::state::AppState,
) -> crate_root::state::PackageItem {
let first_package = crate_root::state::PackageItem {
name: "first-package".to_string(),
version: "1.0.0".to_string(),
description: String::new(),
source: crate_root::state::Source::Official {
repo: "core".to_string(),
arch: "x86_64".to_string(),
},
popularity: None,
out_of_date: None,
orphaned: false,
};
app.install_list_deps = vec![
crate_root::state::modal::DependencyInfo {
name: "first-dep-1".to_string(),
version: "1.0.0".to_string(),
status: crate_root::state::modal::DependencyStatus::ToInstall,
source: crate_root::state::modal::DependencySource::Official {
repo: "core".to_string(),
},
required_by: vec!["first-package".to_string()],
depends_on: Vec::new(),
is_core: false,
is_system: false,
},
crate_root::state::modal::DependencyInfo {
name: "first-dep-2".to_string(),
version: "2.0.0".to_string(),
status: crate_root::state::modal::DependencyStatus::ToInstall,
source: crate_root::state::modal::DependencySource::Official {
repo: "extra".to_string(),
},
required_by: vec!["first-package".to_string()],
depends_on: Vec::new(),
is_core: false,
is_system: false,
},
];
app.install_list_files = vec![crate_root::state::modal::PackageFileInfo {
name: "first-package".to_string(),
files: vec![
crate_root::state::modal::FileChange {
path: "/usr/bin/first".to_string(),
change_type: crate_root::state::modal::FileChangeType::New,
package: "first-package".to_string(),
is_config: false,
predicted_pacnew: false,
predicted_pacsave: false,
},
crate_root::state::modal::FileChange {
path: "/etc/first.conf".to_string(),
change_type: crate_root::state::modal::FileChangeType::New,
package: "first-package".to_string(),
is_config: true,
predicted_pacnew: true,
predicted_pacsave: false,
},
],
total_count: 2,
new_count: 2,
changed_count: 0,
removed_count: 0,
config_count: 1,
pacnew_candidates: 1,
pacsave_candidates: 0,
}];
app.install_list_services = vec![crate_root::state::modal::ServiceImpact {
unit_name: "first-service.service".to_string(),
providers: vec!["first-package".to_string()],
is_active: true,
needs_restart: true,
recommended_decision: crate_root::state::modal::ServiceRestartDecision::Restart,
restart_decision: crate_root::state::modal::ServiceRestartDecision::Restart,
}];
app.install_list = vec![first_package.clone()];
app.preflight_cancelled
.store(false, std::sync::atomic::Ordering::Relaxed);
first_package
}
/// Adds second package with conflict to app state.
///
/// What: Creates a second package and adds its cached data to app state, including
/// a dependency conflict with the first package's dependencies.
///
/// Inputs:
/// - `app`: Mutable reference to `AppState` to append cached data to
///
/// Output:
/// - Returns the created `PackageItem` for "second-package"
/// - Appends to `app.install_list_deps`:
/// - second-dep-1 (`ToInstall` status)
/// - first-dep-1 version 2.0.0 (Conflict status, conflicts with first package)
/// - Appends to `app.install_list_files` with file info (1 file, no config)
/// - Appends to `app.install_list_services` with service info (second-service.service)
///
/// Details:
/// - Creates a package from "extra" repo with version "2.0.0"
/// - Introduces a conflict: requires first-dep-1 version 2.0.0 while first package
/// requires first-dep-1 version 1.0.0
/// - Service is inactive and does not need restart
fn setup_second_package_with_conflict(
app: &mut crate_root::state::AppState,
) -> crate_root::state::PackageItem {
let second_package = crate_root::state::PackageItem {
name: "second-package".to_string(),
version: "2.0.0".to_string(),
description: String::new(),
source: crate_root::state::Source::Official {
repo: "extra".to_string(),
arch: "x86_64".to_string(),
},
popularity: None,
out_of_date: None,
orphaned: false,
};
app.install_list_deps
.push(crate_root::state::modal::DependencyInfo {
name: "second-dep-1".to_string(),
version: "1.0.0".to_string(),
status: crate_root::state::modal::DependencyStatus::ToInstall,
source: crate_root::state::modal::DependencySource::Official {
repo: "core".to_string(),
},
required_by: vec!["second-package".to_string()],
depends_on: Vec::new(),
is_core: false,
is_system: false,
});
app.install_list_deps
.push(crate_root::state::modal::DependencyInfo {
name: "first-dep-1".to_string(),
version: "2.0.0".to_string(),
status: crate_root::state::modal::DependencyStatus::Conflict {
reason: "Conflicts with first-package's dependency first-dep-1 (1.0.0)".to_string(),
},
source: crate_root::state::modal::DependencySource::Official {
repo: "core".to_string(),
},
required_by: vec!["second-package".to_string()],
depends_on: Vec::new(),
is_core: false,
is_system: false,
});
app.install_list_files
.push(crate_root::state::modal::PackageFileInfo {
name: "second-package".to_string(),
files: vec![crate_root::state::modal::FileChange {
path: "/usr/bin/second".to_string(),
change_type: crate_root::state::modal::FileChangeType::New,
package: "second-package".to_string(),
is_config: false,
predicted_pacnew: false,
predicted_pacsave: false,
}],
total_count: 1,
new_count: 1,
changed_count: 0,
removed_count: 0,
config_count: 0,
pacnew_candidates: 0,
pacsave_candidates: 0,
});
app.install_list_services
.push(crate_root::state::modal::ServiceImpact {
unit_name: "second-service.service".to_string(),
providers: vec!["second-package".to_string()],
is_active: false,
needs_restart: false,
recommended_decision: crate_root::state::modal::ServiceRestartDecision::Defer,
restart_decision: crate_root::state::modal::ServiceRestartDecision::Defer,
});
second_package
}
/// Creates and opens preflight modal with both packages.
///
/// What: Initializes and opens a Preflight modal containing both packages for
/// installation, setting up the modal state with default values.
///
/// Inputs:
/// - `app`: Mutable reference to `AppState` to update
/// - `first_package`: First package to include in the modal
/// - `second_package`: Second package to include in the modal
///
/// Output:
/// - Updates `app.install_list` to contain both packages
/// - Sets `app.modal` to Preflight variant with:
/// - Both packages in items list
/// - Install action
/// - Summary tab as initial tab
/// - Empty `dependency_info`, `file_info`, `service_info` (to be populated by tabs)
/// - Default header chips with `package_count=2`
///
/// Details:
/// - Modal starts on Summary tab
/// - All tab-specific data structures are initialized as empty
/// - Header chips indicate 2 packages, 0 download bytes, low risk level
fn open_preflight_modal(
app: &mut crate_root::state::AppState,
first_package: crate_root::state::PackageItem,
second_package: crate_root::state::PackageItem,
) {
app.install_list = vec![first_package.clone(), second_package.clone()];
app.modal = crate_root::state::Modal::Preflight {
items: vec![first_package, second_package],
action: crate_root::state::PreflightAction::Install,
tab: crate_root::state::PreflightTab::Summary,
summary: None,
summary_scroll: 0,
header_chips: crate_root::state::modal::PreflightHeaderChips {
package_count: 2,
download_bytes: 0,
install_delta_bytes: 0,
aur_count: 0,
risk_score: 0,
risk_level: crate_root::state::modal::RiskLevel::Low,
},
dependency_info: Vec::new(),
dep_selected: 0,
dep_tree_expanded: std::collections::HashSet::new(),
deps_error: None,
file_info: Vec::new(),
file_selected: 0,
file_tree_expanded: std::collections::HashSet::new(),
files_error: None,
service_info: Vec::new(),
service_selected: 0,
services_loaded: false,
services_error: None,
sandbox_info: Vec::new(),
sandbox_selected: 0,
sandbox_tree_expanded: std::collections::HashSet::new(),
sandbox_loaded: true,
sandbox_error: None,
selected_optdepends: std::collections::HashMap::new(),
cascade_mode: crate_root::state::modal::CascadeMode::Basic,
cached_reverse_deps_report: None,
};
}
/// Syncs dependencies tab and verifies both packages' dependencies and conflicts.
///
/// What: Switches to Deps tab, syncs dependencies from cache, and verifies that
/// both packages' dependencies are correctly loaded and conflicts are detected.
///
/// Inputs:
/// - `app`: Mutable reference to `AppState` containing preflight modal and cached dependencies
///
/// Output:
/// - Switches modal tab to Deps
/// - Populates `dependency_info` with filtered dependencies from `app.install_list_deps`
/// - Resets `dep_selected` to 0
/// - Asserts that dependencies are loaded correctly
/// - Asserts that conflicts are detected
///
/// Details:
/// - Filters dependencies by checking if any `required_by` package is in the modal's items
/// - Verifies first package has 2 dependencies (first-dep-1, first-dep-2)
/// - Verifies second package has 2 dependencies (second-dep-1, conflict entry for first-dep-1)
/// - Verifies exactly 1 conflict exists (first-dep-1 version conflict)
/// - Verifies first package's first-dep-1 remains `ToInstall` with version 1.0.0
fn test_deps_tab(app: &mut crate_root::state::AppState) {
if let crate_root::state::Modal::Preflight {
items,
action,
tab,
dependency_info,
dep_selected,
..
} = &mut app.modal
{
*tab = crate_root::state::PreflightTab::Deps;
if matches!(*action, crate_root::state::PreflightAction::Install) {
let item_names: std::collections::HashSet<String> =
items.iter().map(|i| i.name.clone()).collect();
let filtered: Vec<_> = app
.install_list_deps
.iter()
.filter(|dep| {
dep.required_by
.iter()
.any(|req_by| item_names.contains(req_by))
})
.cloned()
.collect();
if !filtered.is_empty() {
*dependency_info = filtered;
*dep_selected = 0;
}
}
}
if let crate_root::state::Modal::Preflight {
tab,
dependency_info,
dep_selected,
..
} = &app.modal
{
assert_eq!(
*tab,
crate_root::state::PreflightTab::Deps,
"Should be on Deps tab"
);
assert!(!dependency_info.is_empty(), "Dependencies should be loaded");
let first_deps: Vec<_> = dependency_info
.iter()
.filter(|d| d.required_by.contains(&"first-package".to_string()))
.collect();
assert!(
!first_deps.is_empty(),
"First package's dependencies should be present"
);
assert_eq!(
first_deps.len(),
2,
"First package should have 2 dependencies"
);
let second_deps: Vec<_> = dependency_info
.iter()
.filter(|d| d.required_by.contains(&"second-package".to_string()))
.collect();
assert!(
!second_deps.is_empty(),
"Second package's dependencies should be present"
);
assert_eq!(
second_deps.len(),
2,
"Second package should have 2 dependencies (one is conflict)"
);
let conflicts: Vec<_> = dependency_info
.iter()
.filter(|d| {
matches!(
d.status,
crate_root::state::modal::DependencyStatus::Conflict { .. }
)
})
.collect();
assert!(!conflicts.is_empty(), "Conflict should be detected");
assert_eq!(conflicts.len(), 1, "Should have 1 conflict");
let conflict = conflicts[0];
assert_eq!(conflict.name, "first-dep-1");
assert!(conflict.required_by.contains(&"second-package".to_string()));
let first_dep_1 = dependency_info
.iter()
.find(|d| {
d.name == "first-dep-1"
&& d.required_by.contains(&"first-package".to_string())
&& matches!(
d.status,
crate_root::state::modal::DependencyStatus::ToInstall
)
})
.expect("First package's first-dep-1 should still be ToInstall");
assert_eq!(first_dep_1.version, "1.0.0");
assert_eq!(*dep_selected, 0, "Selection should be reset to 0");
} else {
panic!("Expected Preflight modal");
}
}
/// Syncs files tab and verifies both packages' files are preserved.
///
/// What: Switches to Files tab, syncs file information from cache, and verifies
/// that both packages' file data is correctly loaded and preserved.
///
/// Inputs:
/// - `app`: Mutable reference to `AppState` containing preflight modal and cached file info
///
/// Output:
/// - Switches modal tab to Files
/// - Populates `file_info` with filtered file info from `app.install_list_files`
/// - Resets `file_selected` to 0
/// - Asserts that files are loaded correctly for both packages
///
/// Details:
/// - Filters file info by checking if package name is in the modal's items
/// - Verifies first package has 2 files (1 config file with pacnew candidate)
/// - Verifies second package has 1 file (no config files)
/// - Verifies file counts, config counts, and pacnew candidates are correct
fn test_files_tab(app: &mut crate_root::state::AppState) {
if let crate_root::state::Modal::Preflight {
items,
tab,
file_info,
file_selected,
..
} = &mut app.modal
{
*tab = crate_root::state::PreflightTab::Files;
let item_names: std::collections::HashSet<String> =
items.iter().map(|i| i.name.clone()).collect();
let cached_files: Vec<_> = app
.install_list_files
.iter()
.filter(|file_info| item_names.contains(&file_info.name))
.cloned()
.collect();
if !cached_files.is_empty() {
*file_info = cached_files;
*file_selected = 0;
}
}
if let crate_root::state::Modal::Preflight {
tab,
file_info,
file_selected,
..
} = &app.modal
{
assert_eq!(
*tab,
crate_root::state::PreflightTab::Files,
"Should be on Files tab"
);
assert!(!file_info.is_empty(), "Files should be loaded");
assert_eq!(file_info.len(), 2, "Should have 2 file entries");
let first_files = file_info
.iter()
.find(|f| f.name == "first-package")
.expect("first-package should be found in file_info");
assert_eq!(
first_files.files.len(),
2,
"First package should have 2 files"
);
assert_eq!(first_files.total_count, 2);
assert_eq!(first_files.new_count, 2);
assert_eq!(first_files.config_count, 1);
assert_eq!(first_files.pacnew_candidates, 1);
let second_files = file_info
.iter()
.find(|f| f.name == "second-package")
.expect("second-package should be found in file_info");
assert_eq!(
second_files.files.len(),
1,
"Second package should have 1 file"
);
assert_eq!(second_files.total_count, 1);
assert_eq!(second_files.new_count, 1);
assert_eq!(*file_selected, 0, "Selection should be reset to 0");
} else {
panic!("Expected Preflight modal");
}
}
/// Syncs services tab and verifies both packages' services are preserved.
///
/// What: Switches to Services tab, syncs service information from cache, and
/// verifies that both packages' service data is correctly loaded and preserved.
///
/// Inputs:
/// - `app`: Mutable reference to `AppState` containing preflight modal and cached service info
///
/// Output:
/// - Switches modal tab to Services
/// - Populates `service_info` with filtered services from `app.install_list_services`
/// - Sets `services_loaded` to true
/// - Resets `service_selected` to 0
/// - Asserts that services are loaded correctly for both packages
///
/// Details:
/// - Filters services by checking if any provider is in the modal's items
/// - Verifies first package's service (first-service.service) is active and needs restart
/// - Verifies second package's service (second-service.service) is inactive and defers restart
/// - Verifies service restart decisions match expected values
fn test_services_tab(app: &mut crate_root::state::AppState) {
if let crate_root::state::Modal::Preflight {
items,
action,
tab,
service_info,
service_selected,
services_loaded,
..
} = &mut app.modal
{
*tab = crate_root::state::PreflightTab::Services;
if matches!(*action, crate_root::state::PreflightAction::Install) {
let item_names: std::collections::HashSet<String> =
items.iter().map(|i| i.name.clone()).collect();
let cached_services: Vec<_> = app
.install_list_services
.iter()
.filter(|s| s.providers.iter().any(|p| item_names.contains(p)))
.cloned()
.collect();
if !cached_services.is_empty() {
*service_info = cached_services;
*services_loaded = true;
*service_selected = 0;
}
}
}
if let crate_root::state::Modal::Preflight {
tab,
service_info,
service_selected,
services_loaded,
..
} = &app.modal
{
assert_eq!(
*tab,
crate_root::state::PreflightTab::Services,
"Should be on Services tab"
);
assert!(*services_loaded, "Services should be marked as loaded");
assert!(!service_info.is_empty(), "Services should be loaded");
assert_eq!(service_info.len(), 2, "Should have 2 services");
let first_svc = service_info
.iter()
.find(|s| s.unit_name == "first-service.service")
.expect("first-service.service should be found in service_info");
assert!(first_svc.is_active);
assert!(first_svc.needs_restart);
assert_eq!(
first_svc.restart_decision,
crate_root::state::modal::ServiceRestartDecision::Restart
);
assert!(first_svc.providers.contains(&"first-package".to_string()));
let second_svc = service_info
.iter()
.find(|s| s.unit_name == "second-service.service")
.expect("second-service.service should be found in service_info");
assert!(!second_svc.is_active);
assert!(!second_svc.needs_restart);
assert_eq!(
second_svc.restart_decision,
crate_root::state::modal::ServiceRestartDecision::Defer
);
assert!(second_svc.providers.contains(&"second-package".to_string()));
assert_eq!(*service_selected, 0, "Selection should be reset to 0");
} else {
panic!("Expected Preflight modal");
}
}
/// Verifies that all data for both packages is present in the modal.
///
/// What: Performs final verification that all cached data (dependencies, files,
/// and services) for both packages is present and accessible in the preflight modal.
///
/// Inputs:
/// - `app`: Immutable reference to `AppState` containing the preflight modal
///
/// Output:
/// - Asserts that both packages have dependencies in `dependency_info`
/// - Asserts that both packages have files in `file_info`
/// - Asserts that both packages have services in `service_info`
///
/// Details:
/// - Final comprehensive check after all tabs have been tested
/// - Verifies data preservation across all three data types (deps, files, services)
/// - Ensures no data loss occurred during tab switching and syncing
fn verify_all_data_present(app: &crate_root::state::AppState) {
if let crate_root::state::Modal::Preflight {
dependency_info,
file_info,
service_info,
..
} = &app.modal
{
assert!(
dependency_info
.iter()
.any(|d| d.required_by.contains(&"first-package".to_string())),
"First package should have dependencies"
);
assert!(
dependency_info
.iter()
.any(|d| d.required_by.contains(&"second-package".to_string())),
"Second package should have dependencies"
);
assert!(
file_info.iter().any(|f| f.name == "first-package"),
"First package should have files"
);
assert!(
file_info.iter().any(|f| f.name == "second-package"),
"Second package should have files"
);
assert!(
service_info
.iter()
.any(|s| s.providers.contains(&"first-package".to_string())),
"First package should have services"
);
assert!(
service_info
.iter()
.any(|s| s.providers.contains(&"second-package".to_string())),
"Second package should have services"
);
} else {
panic!("Expected Preflight modal");
}
}
#[test]
/// What: Verify that adding a second package to install list preserves first package's cached data.
///
/// Inputs:
/// - First package already in `install_list` with cached data
/// - Second package added to `install_list`
/// - Preflight modal opened with both packages
///
/// Output:
/// - First package's cached data is preserved (except for conflict checking)
/// - Both packages are correctly loaded in all tabs
/// - Conflicts between packages are detected
///
/// Details:
/// - Tests edge case where install list grows after initial caching
/// - Verifies that existing cached data is not lost when new packages are added
/// - Ensures conflict detection works correctly between packages
fn preflight_preserves_first_package_when_second_package_added() {
unsafe {
std::env::set_var("PACSEA_TEST_HEADLESS", "1");
}
let mut app = crate_root::state::AppState::default();
let first_package = setup_first_package_with_cache(&mut app);
let second_package = setup_second_package_with_conflict(&mut app);
open_preflight_modal(&mut app, first_package, second_package);
test_deps_tab(&mut app);
test_files_tab(&mut app);
test_services_tab(&mut app);
verify_all_data_present(&app);
}
/// Helper: Set up test data for independent loading test.
///
/// Sets up test data for independent loading scenario.
///
/// What: Creates first and second packages with their dependencies, files, and services,
/// simulating a scenario where the first package is partially loaded when the second
/// package is added.
///
/// Inputs:
/// - `app`: Mutable reference to `AppState` to populate with test data
///
/// Output:
/// - Returns tuple of (`first_package`, `second_package`)
/// - Updates `app.install_list` to contain both packages
/// - Sets `app.preflight_deps_resolving` to true (simulating ongoing resolution)
/// - Sets `app.preflight_deps_items` to first package (simulating in-progress resolution)
/// - Populates `app.install_list_deps` with:
/// - First package: 1 dependency (first-dep-1)
/// - Second package: 2 dependencies (second-dep-1, conflict entry for first-dep-1)
/// - Populates `app.install_list_files` with file info for both packages
/// - Populates `app.install_list_services` with service info for second package only
///
/// Details:
/// - First package is partially loaded: dependencies partially loaded (1 of potentially more),
/// files loaded (1 file), services not loaded yet (empty)
/// - Second package is fully loaded independently: all dependencies, files, and services
/// - Includes a conflict: second package requires first-dep-1 version 2.0.0 while
/// first package requires version 1.0.0
fn setup_independent_loading_test_data(
app: &mut crate_root::state::AppState,
) -> (
crate_root::state::PackageItem,
crate_root::state::PackageItem,
) {
// First package
let first_package = crate_root::state::PackageItem {
name: "first-package".to_string(),
version: "1.0.0".to_string(),
description: String::new(),
source: crate_root::state::Source::Official {
repo: "core".to_string(),
arch: "x86_64".to_string(),
},
popularity: None,
out_of_date: None,
orphaned: false,
};
// Simulate first package being added and starting to load
app.install_list = vec![first_package.clone()];
app.preflight_cancelled
.store(false, std::sync::atomic::Ordering::Relaxed);
// First package's dependencies (partially loaded)
app.install_list_deps = vec![crate_root::state::modal::DependencyInfo {
name: "first-dep-1".to_string(),
version: "1.0.0".to_string(),
status: crate_root::state::modal::DependencyStatus::ToInstall,
source: crate_root::state::modal::DependencySource::Official {
repo: "core".to_string(),
},
required_by: vec!["first-package".to_string()],
depends_on: Vec::new(),
is_core: false,
is_system: false,
}];
// First package's files (loaded)
app.install_list_files = vec![crate_root::state::modal::PackageFileInfo {
name: "first-package".to_string(),
files: vec![crate_root::state::modal::FileChange {
path: "/usr/bin/first".to_string(),
change_type: crate_root::state::modal::FileChangeType::New,
package: "first-package".to_string(),
is_config: false,
predicted_pacnew: false,
predicted_pacsave: false,
}],
total_count: 1,
new_count: 1,
changed_count: 0,
removed_count: 0,
config_count: 0,
pacnew_candidates: 0,
pacsave_candidates: 0,
}];
// First package's services (still loading - not in cache yet)
app.install_list_services = vec![];
// Simulate first package's dependency resolution still in progress
app.preflight_deps_resolving = true;
app.preflight_deps_items = Some((
vec![first_package.clone()],
crate_root::state::modal::PreflightAction::Install,
));
// Second package
let second_package = crate_root::state::PackageItem {
name: "second-package".to_string(),
version: "2.0.0".to_string(),
description: String::new(),
source: crate_root::state::Source::Official {
repo: "extra".to_string(),
arch: "x86_64".to_string(),
},
popularity: None,
out_of_date: None,
orphaned: false,
};
// Update install list to include both packages
app.install_list = vec![first_package.clone(), second_package.clone()];
// Add second package's data to cache (independent of first package)
app.install_list_deps
.push(crate_root::state::modal::DependencyInfo {
name: "second-dep-1".to_string(),
version: "1.0.0".to_string(),
status: crate_root::state::modal::DependencyStatus::ToInstall,
source: crate_root::state::modal::DependencySource::Official {
repo: "core".to_string(),
},
required_by: vec!["second-package".to_string()],
depends_on: Vec::new(),
is_core: false,
is_system: false,
});
// Add a conflict: second package requires a different version of first-dep-1
app.install_list_deps
.push(crate_root::state::modal::DependencyInfo {
name: "first-dep-1".to_string(),
version: "2.0.0".to_string(),
status: crate_root::state::modal::DependencyStatus::Conflict {
reason: "Conflicts with first-package's dependency first-dep-1 (1.0.0)".to_string(),
},
source: crate_root::state::modal::DependencySource::Official {
repo: "core".to_string(),
},
required_by: vec!["second-package".to_string()],
depends_on: Vec::new(),
is_core: false,
is_system: false,
});
// Second package's files (loaded independently)
app.install_list_files
.push(crate_root::state::modal::PackageFileInfo {
name: "second-package".to_string(),
files: vec![
crate_root::state::modal::FileChange {
path: "/usr/bin/second".to_string(),
change_type: crate_root::state::modal::FileChangeType::New,
package: "second-package".to_string(),
is_config: false,
predicted_pacnew: false,
predicted_pacsave: false,
},
crate_root::state::modal::FileChange {
path: "/etc/second.conf".to_string(),
change_type: crate_root::state::modal::FileChangeType::New,
package: "second-package".to_string(),
is_config: true,
predicted_pacnew: false,
predicted_pacsave: false,
},
],
total_count: 2,
new_count: 2,
changed_count: 0,
removed_count: 0,
config_count: 1,
pacnew_candidates: 0,
pacsave_candidates: 0,
});
// Second package's services (loaded independently)
app.install_list_services
.push(crate_root::state::modal::ServiceImpact {
unit_name: "second-service.service".to_string(),
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | true |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/tests/preflight_integration/tab_switching.rs | tests/preflight_integration/tab_switching.rs | //! Tests for tab switching behavior.
use super::helpers;
use pacsea as crate_root;
/// What: Create test packages for mixed completion state testing.
///
/// Inputs:
/// - None (uses hardcoded test data)
///
/// Output:
/// - Vector of test packages (one official, one AUR)
///
/// Details:
/// - Creates two test packages: one official package and one AUR package
/// - Used to simulate mixed package sources in tests
fn create_test_packages() -> Vec<crate_root::state::PackageItem> {
vec![
helpers::create_test_package(
"test-package-1",
"1.0.0",
crate_root::state::Source::Official {
repo: "core".to_string(),
arch: "x86_64".to_string(),
},
),
helpers::create_test_package("test-aur-package", "2.0.0", crate_root::state::Source::Aur),
]
}
/// What: Setup app state with mixed completion states.
///
/// Inputs:
/// - `test_packages`: Packages to use in the test
///
/// Output:
/// - `AppState` with pre-populated cache (deps, files) and resolving flags (services, sandbox)
///
/// Details:
/// - Pre-populates cache with dependencies and files (loaded state)
/// - Sets services and sandbox to resolving state (not yet loaded)
/// - Configures install list and cancellation flag
#[allow(clippy::field_reassign_with_default)]
fn setup_mixed_completion_state(
test_packages: &[crate_root::state::PackageItem],
) -> crate_root::state::AppState {
let mut app = crate_root::state::AppState::default();
// Pre-populate cache with dependencies (loaded)
app.install_list_deps = vec![crate_root::state::modal::DependencyInfo {
name: "test-dep-1".to_string(),
version: "1.0.0".to_string(),
status: crate_root::state::modal::DependencyStatus::ToInstall,
source: crate_root::state::modal::DependencySource::Official {
repo: "core".to_string(),
},
required_by: vec!["test-package-1".to_string()],
depends_on: Vec::new(),
is_core: false,
is_system: false,
}];
// Pre-populate cache with files (loaded)
app.install_list_files = vec![crate_root::state::modal::PackageFileInfo {
name: "test-package-1".to_string(),
files: vec![crate_root::state::modal::FileChange {
path: "/usr/bin/test1".to_string(),
change_type: crate_root::state::modal::FileChangeType::New,
package: "test-package-1".to_string(),
is_config: false,
predicted_pacnew: false,
predicted_pacsave: false,
}],
total_count: 1,
new_count: 1,
changed_count: 0,
removed_count: 0,
config_count: 0,
pacnew_candidates: 0,
pacsave_candidates: 0,
}];
// Services are still resolving (not in cache yet)
app.install_list_services = vec![];
app.preflight_services_resolving = true;
app.preflight_services_items = Some(test_packages.to_vec());
// Sandbox is still resolving (not in cache yet)
app.install_list_sandbox = vec![];
app.preflight_sandbox_resolving = true;
let aur_items: Vec<_> = test_packages
.iter()
.filter(|p| matches!(p.source, crate_root::state::Source::Aur))
.cloned()
.collect();
app.preflight_sandbox_items = Some(aur_items);
// Set packages in install list
app.install_list = test_packages.to_vec();
app.preflight_cancelled
.store(false, std::sync::atomic::Ordering::Relaxed);
app
}
/// What: Initialize preflight modal with test packages.
///
/// Inputs:
/// - `app`: Application state to modify
/// - `test_packages`: Packages to include in modal
///
/// Output:
/// - Updates `app.modal` with a new Preflight modal
///
/// Details:
/// - Creates a preflight modal with Install action and Summary tab
/// - Uses helper function to create modal with default values
fn initialize_preflight_modal(
app: &mut crate_root::state::AppState,
test_packages: &[crate_root::state::PackageItem],
) {
app.modal = helpers::create_preflight_modal(
test_packages.to_vec(),
crate_root::state::PreflightAction::Install,
crate_root::state::PreflightTab::Summary,
);
}
/// What: Test switching to Deps tab with loaded data.
///
/// Inputs:
/// - `app`: Application state with pre-populated dependencies
///
/// Output:
/// - Panics if assertions fail
///
/// Details:
/// - Switches to Deps tab and verifies dependencies are loaded immediately
/// - Asserts that tab is correct and dependency data is present
fn test_switch_to_deps_tab(app: &mut crate_root::state::AppState) {
helpers::switch_preflight_tab(app, crate_root::state::PreflightTab::Deps);
let (_, _, tab, dependency_info, ..) = helpers::assert_preflight_modal(app);
assert_eq!(
*tab,
crate_root::state::PreflightTab::Deps,
"Should be on Deps tab"
);
assert!(!dependency_info.is_empty(), "Dependencies should be loaded");
assert_eq!(dependency_info.len(), 1, "Should have 1 dependency");
}
/// What: Test switching to Files tab with loaded data.
///
/// Inputs:
/// - `app`: Application state with pre-populated files
///
/// Output:
/// - Panics if assertions fail
///
/// Details:
/// - Switches to Files tab and verifies files are loaded immediately
/// - Asserts that tab is correct and file data is present
fn test_switch_to_files_tab(app: &mut crate_root::state::AppState) {
helpers::switch_preflight_tab(app, crate_root::state::PreflightTab::Files);
let (_, _, tab, _, file_info, ..) = helpers::assert_preflight_modal(app);
assert_eq!(
*tab,
crate_root::state::PreflightTab::Files,
"Should be on Files tab"
);
assert!(!file_info.is_empty(), "Files should be loaded");
assert_eq!(file_info.len(), 1, "Should have 1 file entry");
}
/// What: Test switching to Services tab while still resolving.
///
/// Inputs:
/// - `app`: Application state with services still resolving
///
/// Output:
/// - Panics if assertions fail
///
/// Details:
/// - Switches to Services tab and verifies loading state is shown
/// - Asserts that services are empty and not marked as loaded
/// - Verifies that resolving flag is still set
fn test_switch_to_services_tab_resolving(app: &mut crate_root::state::AppState) {
helpers::switch_preflight_tab(app, crate_root::state::PreflightTab::Services);
let (_, _, tab, _, _, service_info, _, services_loaded, _) =
helpers::assert_preflight_modal(app);
assert_eq!(
*tab,
crate_root::state::PreflightTab::Services,
"Should be on Services tab"
);
assert!(
service_info.is_empty(),
"Services should be empty (still resolving)"
);
assert!(
!*services_loaded,
"Services should not be marked as loaded (still resolving)"
);
assert!(
app.preflight_services_resolving,
"Services should still be resolving"
);
}
/// What: Test switching to Sandbox tab while still resolving.
///
/// Inputs:
/// - `app`: Application state with sandbox still resolving
///
/// Output:
/// - Panics if assertions fail
///
/// Details:
/// - Switches to Sandbox tab and verifies loading state is shown
/// - Asserts that sandbox info is empty and not marked as loaded
/// - Verifies that resolving flag is still set
fn test_switch_to_sandbox_tab_resolving(app: &mut crate_root::state::AppState) {
helpers::switch_preflight_tab(app, crate_root::state::PreflightTab::Sandbox);
let (_, _, tab, _, _, _, sandbox_info, _, sandbox_loaded) =
helpers::assert_preflight_modal(app);
assert_eq!(
*tab,
crate_root::state::PreflightTab::Sandbox,
"Should be on Sandbox tab"
);
assert!(
sandbox_info.is_empty(),
"Sandbox should be empty (still resolving)"
);
assert!(
!*sandbox_loaded,
"Sandbox should not be marked as loaded (still resolving)"
);
assert!(
app.preflight_sandbox_resolving,
"Sandbox should still be resolving"
);
}
/// What: Verify dependencies persist when switching back to Deps tab.
///
/// Inputs:
/// - `app`: Application state with previously loaded dependencies
///
/// Output:
/// - Panics if assertions fail
///
/// Details:
/// - Verifies that dependency data persists after switching away and back
/// - Asserts that dependencies are still loaded when returning to Deps tab
fn test_deps_persistence(app: &crate_root::state::AppState) {
let (_, _, _, dependency_info, ..) = helpers::assert_preflight_modal(app);
assert!(
!dependency_info.is_empty(),
"Dependencies should still be loaded when switching back"
);
}
/// What: Verify files persist when switching back to Files tab.
///
/// Inputs:
/// - `app`: Application state with previously loaded files
///
/// Output:
/// - Panics if assertions fail
///
/// Details:
/// - Switches back to Files tab and verifies data persistence
/// - Asserts that files are still loaded when returning to Files tab
fn test_files_persistence(app: &mut crate_root::state::AppState) {
helpers::switch_preflight_tab(app, crate_root::state::PreflightTab::Files);
let (_, _, tab, _, file_info, ..) = helpers::assert_preflight_modal(app);
assert_eq!(
*tab,
crate_root::state::PreflightTab::Files,
"Should be back on Files tab"
);
assert!(
!file_info.is_empty(),
"Files should still be loaded when switching back"
);
}
/// What: Verify mixed state is maintained correctly across all tabs.
///
/// Inputs:
/// - `app`: Application state with mixed completion states
///
/// Output:
/// - Panics if assertions fail
///
/// Details:
/// - Verifies that tabs with loaded data (Deps, Files) have data
/// - Verifies that tabs still resolving (Services, Sandbox) are empty
/// - Ensures no data corruption or mixing between tabs
fn verify_mixed_state(app: &crate_root::state::AppState) {
let (
_,
_,
_,
dependency_info,
file_info,
service_info,
sandbox_info,
services_loaded,
sandbox_loaded,
) = helpers::assert_preflight_modal(app);
// Tabs with data should have data
assert!(!dependency_info.is_empty(), "Dependencies should have data");
assert!(!file_info.is_empty(), "Files should have data");
// Tabs still resolving should be empty
assert!(
service_info.is_empty(),
"Services should be empty (still resolving)"
);
assert!(!*services_loaded, "Services should not be loaded");
assert!(
sandbox_info.is_empty(),
"Sandbox should be empty (still resolving)"
);
assert!(!*sandbox_loaded, "Sandbox should not be loaded");
}
#[test]
/// What: Verify that preflight modal handles mixed completion states correctly when switching tabs.
///
/// Inputs:
/// - Packages in `install_list`
/// - Some tabs have data loaded (Deps, Files)
/// - Some tabs are still resolving (Services, Sandbox)
/// - User switches between tabs
///
/// Output:
/// - Tabs with loaded data display correctly
/// - Tabs still resolving show appropriate loading state
/// - No data corruption or mixing between tabs
///
/// Details:
/// - Tests edge case where resolution completes at different times
/// - Verifies that partial data doesn't cause issues when switching tabs
fn preflight_handles_mixed_completion_states_when_switching_tabs() {
unsafe {
std::env::set_var("PACSEA_TEST_HEADLESS", "1");
}
let test_packages = create_test_packages();
let mut app = setup_mixed_completion_state(&test_packages);
initialize_preflight_modal(&mut app, &test_packages);
// Test 1: Switch to Deps tab (has data) - should load immediately
test_switch_to_deps_tab(&mut app);
// Test 2: Switch to Files tab (has data) - should load immediately
test_switch_to_files_tab(&mut app);
// Test 3: Switch to Services tab (still resolving) - should show loading state
test_switch_to_services_tab_resolving(&mut app);
// Test 4: Switch to Sandbox tab (still resolving) - should show loading state
test_switch_to_sandbox_tab_resolving(&mut app);
// Test 5: Switch back to Deps tab - data should still be there
test_deps_persistence(&app);
// Test 6: Switch back to Files tab - data should still be there
test_files_persistence(&mut app);
// Final verification: Mixed state is maintained correctly
verify_mixed_state(&app);
}
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/tests/preflight_integration/tab_variations.rs | tests/preflight_integration/tab_variations.rs | //! //! Tests for tab switching variations.
use super::helpers::*;
use pacsea as crate_root;
#[test]
/// What: Verify that preflight modal loads data correctly regardless of tab switching order.
///
/// Inputs:
/// - Packages in `install_list` with all data cached
/// - User switches tabs in different orders (e.g., Summary → Sandbox → Deps → Files → Services)
///
/// Output:
/// - Each tab loads its data correctly when accessed
/// - Data persists when switching back to previously visited tabs
/// - No data corruption regardless of switching order
///
/// Details:
/// - Tests that tab switching order doesn't affect data loading
/// - Verifies data persistence across tab switches
/// - Ensures no race conditions or data loss
fn preflight_tab_switching_order_variations() {
unsafe {
std::env::set_var("PACSEA_TEST_HEADLESS", "1");
}
let mut app = crate_root::state::AppState::default();
let test_packages = vec![
create_test_package(
"test-package-1",
"1.0.0",
crate_root::state::Source::Official {
repo: "core".to_string(),
arch: "x86_64".to_string(),
},
),
create_test_package("test-aur-package", "2.0.0", crate_root::state::Source::Aur),
];
// Pre-populate cache with all data
app.install_list_deps = vec![crate_root::state::modal::DependencyInfo {
name: "test-dep-1".to_string(),
version: "1.0.0".to_string(),
status: crate_root::state::modal::DependencyStatus::ToInstall,
source: crate_root::state::modal::DependencySource::Official {
repo: "core".to_string(),
},
required_by: vec!["test-package-1".to_string()],
depends_on: Vec::new(),
is_core: false,
is_system: false,
}];
app.install_list_files = vec![crate_root::state::modal::PackageFileInfo {
name: "test-package-1".to_string(),
files: vec![crate_root::state::modal::FileChange {
path: "/usr/bin/test1".to_string(),
change_type: crate_root::state::modal::FileChangeType::New,
package: "test-package-1".to_string(),
is_config: false,
predicted_pacnew: false,
predicted_pacsave: false,
}],
total_count: 1,
new_count: 1,
changed_count: 0,
removed_count: 0,
config_count: 0,
pacnew_candidates: 0,
pacsave_candidates: 0,
}];
app.install_list_services = vec![crate_root::state::modal::ServiceImpact {
unit_name: "test-service.service".to_string(),
providers: vec!["test-package-1".to_string()],
is_active: true,
needs_restart: true,
recommended_decision: crate_root::state::modal::ServiceRestartDecision::Restart,
restart_decision: crate_root::state::modal::ServiceRestartDecision::Restart,
}];
app.install_list_sandbox = vec![crate_root::logic::sandbox::SandboxInfo {
package_name: "test-aur-package".to_string(),
depends: vec![],
makedepends: vec![],
checkdepends: vec![],
optdepends: vec![],
}];
// Set packages in install list
app.install_list = test_packages.clone();
app.preflight_cancelled
.store(false, std::sync::atomic::Ordering::Relaxed);
// Open preflight modal
app.modal = create_preflight_modal(
test_packages,
crate_root::state::PreflightAction::Install,
crate_root::state::PreflightTab::Summary,
);
// Test different tab switching orders
// Order 1: Summary → Sandbox → Deps → Files → Services
switch_preflight_tab(&mut app, crate_root::state::PreflightTab::Sandbox);
switch_preflight_tab(&mut app, crate_root::state::PreflightTab::Deps);
switch_preflight_tab(&mut app, crate_root::state::PreflightTab::Files);
switch_preflight_tab(&mut app, crate_root::state::PreflightTab::Services);
// Verify all tabs have data after Order 1
let (
_,
_,
_,
dependency_info,
file_info,
service_info,
sandbox_info,
services_loaded,
sandbox_loaded,
) = assert_preflight_modal(&app);
assert!(!dependency_info.is_empty(), "Deps should have data");
assert!(!file_info.is_empty(), "Files should have data");
assert!(!service_info.is_empty(), "Services should have data");
assert!(*services_loaded, "Services should be loaded");
assert!(!sandbox_info.is_empty(), "Sandbox should have data");
assert!(*sandbox_loaded, "Sandbox should be loaded");
// Order 2: Services → Files → Deps → Sandbox → Summary
switch_preflight_tab(&mut app, crate_root::state::PreflightTab::Files);
switch_preflight_tab(&mut app, crate_root::state::PreflightTab::Deps);
switch_preflight_tab(&mut app, crate_root::state::PreflightTab::Sandbox);
// Verify all tabs still have data after Order 2
let (
_,
_,
_,
dependency_info,
file_info,
service_info,
sandbox_info,
services_loaded,
sandbox_loaded,
) = assert_preflight_modal(&app);
assert!(!dependency_info.is_empty(), "Deps should still have data");
assert!(!file_info.is_empty(), "Files should still have data");
assert!(!service_info.is_empty(), "Services should still have data");
assert!(*services_loaded, "Services should still be loaded");
assert!(!sandbox_info.is_empty(), "Sandbox should still have data");
assert!(*sandbox_loaded, "Sandbox should still be loaded");
// Order 3: Sandbox → Deps (back to first tab)
switch_preflight_tab(&mut app, crate_root::state::PreflightTab::Deps);
// Final verification: All data persists regardless of switching order
let (
_,
_,
_,
dependency_info,
file_info,
service_info,
sandbox_info,
services_loaded,
sandbox_loaded,
) = assert_preflight_modal(&app);
assert_eq!(dependency_info.len(), 1, "Deps should have 1 dependency");
assert_eq!(file_info.len(), 1, "Files should have 1 file entry");
assert_eq!(service_info.len(), 1, "Services should have 1 service");
assert_eq!(sandbox_info.len(), 1, "Sandbox should have 1 entry");
assert!(*services_loaded, "Services should be loaded");
assert!(*sandbox_loaded, "Sandbox should be loaded");
// Verify data integrity
assert_eq!(
dependency_info[0].name, "test-dep-1",
"Dependency name should match"
);
assert_eq!(
file_info[0].name, "test-package-1",
"File package name should match"
);
assert_eq!(
service_info[0].unit_name, "test-service.service",
"Service unit name should match"
);
assert_eq!(
sandbox_info[0].package_name, "test-aur-package",
"Sandbox package name should match"
);
}
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/tests/preflight_integration/caching.rs | tests/preflight_integration/caching.rs | //! Tests for cached data loading.
use pacsea as crate_root;
/// Helper: Create test packages for the test
fn create_test_packages() -> Vec<crate_root::state::PackageItem> {
vec![
crate_root::state::PackageItem {
name: "test-package-1".to_string(),
version: "1.0.0".to_string(),
description: String::new(),
source: crate_root::state::Source::Official {
repo: "core".to_string(),
arch: "x86_64".to_string(),
},
popularity: None,
out_of_date: None,
orphaned: false,
},
crate_root::state::PackageItem {
name: "test-package-2".to_string(),
version: "2.0.0".to_string(),
description: String::new(),
source: crate_root::state::Source::Official {
repo: "extra".to_string(),
arch: "x86_64".to_string(),
},
popularity: None,
out_of_date: None,
orphaned: false,
},
crate_root::state::PackageItem {
name: "test-aur-package".to_string(),
version: "3.0.0".to_string(),
description: String::new(),
source: crate_root::state::Source::Aur,
popularity: None,
out_of_date: None,
orphaned: false,
},
]
}
/// Helper: Populate app state with test dependencies
fn populate_test_dependencies(app: &mut crate_root::state::AppState) {
app.install_list_deps = vec![
crate_root::state::modal::DependencyInfo {
name: "test-dep-1".to_string(),
version: "1.0.0".to_string(),
status: crate_root::state::modal::DependencyStatus::ToInstall,
source: crate_root::state::modal::DependencySource::Official {
repo: "core".to_string(),
},
required_by: vec!["test-package-1".to_string()],
depends_on: Vec::new(),
is_core: false,
is_system: false,
},
crate_root::state::modal::DependencyInfo {
name: "test-conflict".to_string(),
version: "2.0.0".to_string(),
status: crate_root::state::modal::DependencyStatus::Conflict {
reason: "Conflicts with existing-package (1.0.0)".to_string(),
},
source: crate_root::state::modal::DependencySource::Official {
repo: "extra".to_string(),
},
required_by: vec!["test-package-2".to_string()],
depends_on: Vec::new(),
is_core: false,
is_system: false,
},
crate_root::state::modal::DependencyInfo {
name: "test-dep-2".to_string(),
version: "3.0.0".to_string(),
status: crate_root::state::modal::DependencyStatus::ToUpgrade {
current: "2.0.0".to_string(),
required: "3.0.0".to_string(),
},
source: crate_root::state::modal::DependencySource::Official {
repo: "core".to_string(),
},
required_by: vec!["test-package-1".to_string(), "test-package-2".to_string()],
depends_on: Vec::new(),
is_core: false,
is_system: false,
},
];
}
/// Helper: Populate app state with test files
fn populate_test_files(app: &mut crate_root::state::AppState) {
app.install_list_files = vec![
crate_root::state::modal::PackageFileInfo {
name: "test-package-1".to_string(),
files: vec![
crate_root::state::modal::FileChange {
path: "/usr/bin/test1".to_string(),
change_type: crate_root::state::modal::FileChangeType::New,
package: "test-package-1".to_string(),
is_config: false,
predicted_pacnew: false,
predicted_pacsave: false,
},
crate_root::state::modal::FileChange {
path: "/etc/test1.conf".to_string(),
change_type: crate_root::state::modal::FileChangeType::New,
package: "test-package-1".to_string(),
is_config: true,
predicted_pacnew: true,
predicted_pacsave: false,
},
],
total_count: 2,
new_count: 2,
changed_count: 0,
removed_count: 0,
config_count: 1,
pacnew_candidates: 1,
pacsave_candidates: 0,
},
crate_root::state::modal::PackageFileInfo {
name: "test-package-2".to_string(),
files: vec![crate_root::state::modal::FileChange {
path: "/usr/bin/test2".to_string(),
change_type: crate_root::state::modal::FileChangeType::Changed,
package: "test-package-2".to_string(),
is_config: false,
predicted_pacnew: false,
predicted_pacsave: false,
}],
total_count: 1,
new_count: 0,
changed_count: 1,
removed_count: 0,
config_count: 0,
pacnew_candidates: 0,
pacsave_candidates: 0,
},
];
}
/// Helper: Populate app state with test services
fn populate_test_services(app: &mut crate_root::state::AppState) {
app.install_list_services = vec![
crate_root::state::modal::ServiceImpact {
unit_name: "test-service-1.service".to_string(),
providers: vec!["test-package-1".to_string()],
is_active: true,
needs_restart: true,
recommended_decision: crate_root::state::modal::ServiceRestartDecision::Restart,
restart_decision: crate_root::state::modal::ServiceRestartDecision::Restart,
},
crate_root::state::modal::ServiceImpact {
unit_name: "test-service-2.service".to_string(),
providers: vec!["test-package-2".to_string()],
is_active: false,
needs_restart: false,
recommended_decision: crate_root::state::modal::ServiceRestartDecision::Defer,
restart_decision: crate_root::state::modal::ServiceRestartDecision::Defer,
},
];
}
/// Helper: Populate app state with test sandbox info
fn populate_test_sandbox(app: &mut crate_root::state::AppState) {
app.install_list_sandbox = vec![crate_root::logic::sandbox::SandboxInfo {
package_name: "test-aur-package".to_string(),
depends: vec![
crate_root::logic::sandbox::DependencyDelta {
name: "dep1".to_string(),
is_installed: true,
installed_version: Some("1.0.0".to_string()),
version_satisfied: true,
},
crate_root::logic::sandbox::DependencyDelta {
name: "dep2".to_string(),
is_installed: false,
installed_version: None,
version_satisfied: false,
},
],
makedepends: vec![crate_root::logic::sandbox::DependencyDelta {
name: "make-dep".to_string(),
is_installed: true,
installed_version: Some("2.0.0".to_string()),
version_satisfied: true,
}],
checkdepends: vec![],
optdepends: vec![crate_root::logic::sandbox::DependencyDelta {
name: "opt-dep".to_string(),
is_installed: false,
installed_version: None,
version_satisfied: false,
}],
}];
}
/// Helper: Create initial preflight modal
fn create_preflight_modal(
test_packages: &[crate_root::state::PackageItem],
) -> crate_root::state::Modal {
crate_root::state::Modal::Preflight {
items: test_packages.to_vec(),
action: crate_root::state::PreflightAction::Install,
tab: crate_root::state::PreflightTab::Summary,
summary: None,
summary_scroll: 0,
header_chips: crate_root::state::modal::PreflightHeaderChips {
package_count: test_packages.len(),
download_bytes: 0,
install_delta_bytes: 0,
aur_count: 1,
risk_score: 0,
risk_level: crate_root::state::modal::RiskLevel::Low,
},
dependency_info: Vec::new(),
dep_selected: 0,
dep_tree_expanded: std::collections::HashSet::new(),
deps_error: None,
file_info: Vec::new(),
file_selected: 0,
file_tree_expanded: std::collections::HashSet::new(),
files_error: None,
service_info: Vec::new(),
service_selected: 0,
services_loaded: false,
services_error: None,
sandbox_info: Vec::new(),
sandbox_selected: 0,
sandbox_tree_expanded: std::collections::HashSet::new(),
sandbox_loaded: false,
sandbox_error: None,
selected_optdepends: std::collections::HashMap::new(),
cascade_mode: crate_root::state::modal::CascadeMode::Basic,
cached_reverse_deps_report: None,
}
}
/// Helper: Verify initial modal state is empty
fn verify_initial_state(app: &crate_root::state::AppState) {
let crate_root::state::Modal::Preflight {
dependency_info,
file_info,
service_info,
sandbox_info,
..
} = &app.modal
else {
panic!("Expected Preflight modal");
};
assert!(
dependency_info.is_empty(),
"Dependencies should be empty initially"
);
assert!(file_info.is_empty(), "Files should be empty initially");
assert!(
service_info.is_empty(),
"Services should be empty initially"
);
assert!(sandbox_info.is_empty(), "Sandbox should be empty initially");
}
/// Helper: Get item names from packages
fn get_item_names(items: &[crate_root::state::PackageItem]) -> std::collections::HashSet<String> {
items.iter().map(|i| i.name.clone()).collect()
}
/// Helper: Test and verify Deps tab
fn test_deps_tab(app: &mut crate_root::state::AppState) {
// Switch to Deps tab and sync data
let crate_root::state::Modal::Preflight {
items,
action,
tab,
dependency_info,
dep_selected,
..
} = &mut app.modal
else {
panic!("Expected Preflight modal");
};
*tab = crate_root::state::PreflightTab::Deps;
// Simulate sync_dependencies logic
if matches!(*action, crate_root::state::PreflightAction::Install) {
let item_names = get_item_names(items);
let filtered: Vec<_> = app
.install_list_deps
.iter()
.filter(|dep| {
dep.required_by
.iter()
.any(|req_by| item_names.contains(req_by))
})
.cloned()
.collect();
if !filtered.is_empty() {
*dependency_info = filtered;
*dep_selected = 0;
}
}
// Verify dependencies are loaded
let crate_root::state::Modal::Preflight {
tab,
dependency_info,
dep_selected,
..
} = &app.modal
else {
panic!("Expected Preflight modal");
};
assert_eq!(
*tab,
crate_root::state::PreflightTab::Deps,
"Should be on Deps tab"
);
assert!(!dependency_info.is_empty(), "Dependencies should be loaded");
assert_eq!(dependency_info.len(), 3, "Should have 3 dependencies");
// Verify dependency types are present
let has_to_install = dependency_info.iter().any(|d| {
matches!(
d.status,
crate_root::state::modal::DependencyStatus::ToInstall
)
});
let has_conflict = dependency_info.iter().any(|d| {
matches!(
d.status,
crate_root::state::modal::DependencyStatus::Conflict { .. }
)
});
let has_upgrade = dependency_info.iter().any(|d| {
matches!(
d.status,
crate_root::state::modal::DependencyStatus::ToUpgrade { .. }
)
});
assert!(has_to_install, "Should have ToInstall dependency");
assert!(has_conflict, "Should have Conflict dependency");
assert!(has_upgrade, "Should have ToUpgrade dependency");
assert_eq!(*dep_selected, 0, "Selection should be reset to 0");
}
/// Helper: Test and verify Files tab
fn test_files_tab(app: &mut crate_root::state::AppState) {
// Switch to Files tab and sync data
let crate_root::state::Modal::Preflight {
items,
tab,
file_info,
file_selected,
..
} = &mut app.modal
else {
panic!("Expected Preflight modal");
};
*tab = crate_root::state::PreflightTab::Files;
// Simulate sync_files logic
let item_names = get_item_names(items);
let cached_files: Vec<_> = app
.install_list_files
.iter()
.filter(|file_info| item_names.contains(&file_info.name))
.cloned()
.collect();
if !cached_files.is_empty() {
*file_info = cached_files;
*file_selected = 0;
}
// Verify files are loaded
let crate_root::state::Modal::Preflight {
tab,
file_info,
file_selected,
..
} = &app.modal
else {
panic!("Expected Preflight modal");
};
assert_eq!(
*tab,
crate_root::state::PreflightTab::Files,
"Should be on Files tab"
);
assert!(!file_info.is_empty(), "Files should be loaded");
assert_eq!(file_info.len(), 2, "Should have 2 file entries");
// Verify file data
let pkg1_files = file_info
.iter()
.find(|f| f.name == "test-package-1")
.expect("test-package-1 should be found in file_info");
assert_eq!(pkg1_files.files.len(), 2, "Package 1 should have 2 files");
assert_eq!(pkg1_files.total_count, 2);
assert_eq!(pkg1_files.new_count, 2);
assert_eq!(pkg1_files.config_count, 1);
assert_eq!(pkg1_files.pacnew_candidates, 1);
let pkg2_files = file_info
.iter()
.find(|f| f.name == "test-package-2")
.expect("test-package-2 should be found in file_info");
assert_eq!(pkg2_files.files.len(), 1, "Package 2 should have 1 file");
assert_eq!(pkg2_files.total_count, 1);
assert_eq!(pkg2_files.changed_count, 1);
assert_eq!(*file_selected, 0, "Selection should be reset to 0");
}
/// Helper: Test and verify Services tab
fn test_services_tab(app: &mut crate_root::state::AppState) {
// Switch to Services tab and sync data
let crate_root::state::Modal::Preflight {
items,
action,
tab,
service_info,
service_selected,
services_loaded,
..
} = &mut app.modal
else {
panic!("Expected Preflight modal");
};
*tab = crate_root::state::PreflightTab::Services;
// Simulate sync_services logic
if matches!(*action, crate_root::state::PreflightAction::Install) {
let item_names = get_item_names(items);
let cached_services: Vec<_> = app
.install_list_services
.iter()
.filter(|s| s.providers.iter().any(|p| item_names.contains(p)))
.cloned()
.collect();
if !cached_services.is_empty() {
*service_info = cached_services;
*services_loaded = true;
*service_selected = 0;
}
}
// Verify services are loaded
let crate_root::state::Modal::Preflight {
tab,
service_info,
service_selected,
services_loaded,
..
} = &app.modal
else {
panic!("Expected Preflight modal");
};
assert_eq!(
*tab,
crate_root::state::PreflightTab::Services,
"Should be on Services tab"
);
assert!(*services_loaded, "Services should be marked as loaded");
assert!(!service_info.is_empty(), "Services should be loaded");
assert_eq!(service_info.len(), 2, "Should have 2 services");
// Verify service data
let svc1 = service_info
.iter()
.find(|s| s.unit_name == "test-service-1.service")
.expect("test-service-1.service should be found in service_info");
assert!(svc1.is_active);
assert!(svc1.needs_restart);
assert_eq!(
svc1.restart_decision,
crate_root::state::modal::ServiceRestartDecision::Restart
);
let svc2 = service_info
.iter()
.find(|s| s.unit_name == "test-service-2.service")
.expect("test-service-2.service should be found in service_info");
assert!(!svc2.is_active);
assert!(!svc2.needs_restart);
assert_eq!(
svc2.restart_decision,
crate_root::state::modal::ServiceRestartDecision::Defer
);
assert_eq!(*service_selected, 0, "Selection should be reset to 0");
}
/// Helper: Test and verify Sandbox tab
fn test_sandbox_tab(app: &mut crate_root::state::AppState) {
// Switch to Sandbox tab and sync data
let crate_root::state::Modal::Preflight {
items,
action,
tab,
sandbox_info,
sandbox_loaded,
sandbox_selected,
..
} = &mut app.modal
else {
panic!("Expected Preflight modal");
};
*tab = crate_root::state::PreflightTab::Sandbox;
// Simulate sync_sandbox logic
if matches!(*action, crate_root::state::PreflightAction::Install) {
let item_names = get_item_names(items);
let cached_sandbox: Vec<_> = app
.install_list_sandbox
.iter()
.filter(|s| item_names.contains(&s.package_name))
.cloned()
.collect();
if !cached_sandbox.is_empty() {
*sandbox_info = cached_sandbox;
*sandbox_loaded = true;
*sandbox_selected = 0;
}
}
// Verify sandbox info is loaded
let crate_root::state::Modal::Preflight {
tab,
sandbox_info,
sandbox_selected,
sandbox_loaded,
..
} = &app.modal
else {
panic!("Expected Preflight modal");
};
assert_eq!(
*tab,
crate_root::state::PreflightTab::Sandbox,
"Should be on Sandbox tab"
);
assert!(*sandbox_loaded, "Sandbox should be marked as loaded");
assert!(!sandbox_info.is_empty(), "Sandbox info should be loaded");
assert_eq!(sandbox_info.len(), 1, "Should have 1 sandbox entry");
// Verify sandbox data
let sandbox = sandbox_info
.iter()
.find(|s| s.package_name == "test-aur-package")
.expect("test-aur-package should be found in sandbox_info");
assert_eq!(sandbox.depends.len(), 2, "Should have 2 depends");
assert_eq!(sandbox.makedepends.len(), 1, "Should have 1 makedepends");
assert_eq!(sandbox.checkdepends.len(), 0, "Should have 0 checkdepends");
assert_eq!(sandbox.optdepends.len(), 1, "Should have 1 optdepends");
// Verify dependency details
let dep1 = sandbox
.depends
.iter()
.find(|d| d.name == "dep1")
.expect("dep1 should be found in sandbox.depends");
assert!(dep1.is_installed);
assert_eq!(dep1.installed_version, Some("1.0.0".to_string()));
let dep2 = sandbox
.depends
.iter()
.find(|d| d.name == "dep2")
.expect("dep2 should be found in sandbox.depends");
assert!(!dep2.is_installed);
assert_eq!(*sandbox_selected, 0, "Selection should be reset to 0");
}
/// Helper: Verify all tabs still have data loaded
fn verify_data_persistence(app: &mut crate_root::state::AppState) {
// Switch back to Deps to verify data persists
let crate_root::state::Modal::Preflight { tab, .. } = &mut app.modal else {
panic!("Expected Preflight modal");
};
*tab = crate_root::state::PreflightTab::Deps;
let crate_root::state::Modal::Preflight {
tab,
dependency_info,
file_info,
service_info,
sandbox_info,
..
} = &app.modal
else {
panic!("Expected Preflight modal");
};
assert_eq!(
*tab,
crate_root::state::PreflightTab::Deps,
"Should be back on Deps tab"
);
assert!(
!dependency_info.is_empty(),
"Dependencies should still be loaded"
);
assert!(!file_info.is_empty(), "Files should still be loaded");
assert!(!service_info.is_empty(), "Services should still be loaded");
assert!(!sandbox_info.is_empty(), "Sandbox should still be loaded");
}
#[test]
/// What: Verify that preflight modal correctly loads cached data when packages are already in install list.
///
/// Inputs:
/// - Packages already listed in `install_list`
/// - Pre-populated cache with dependencies (including conflicts), files, services, and sandbox data
/// - Preflight modal opened
///
/// Output:
/// - Deps tab correctly loads and displays dependencies and conflicts
/// - Files tab correctly loads and displays file information
/// - Services tab correctly loads and displays service impacts
/// - Sandbox tab correctly loads and displays sandbox information
///
/// Details:
/// - Tests edge case where data is already cached before preflight starts
/// - Verifies that all tabs correctly sync data from cache to modal state
/// - Ensures UI can display the cached data correctly
fn preflight_loads_cached_data_when_packages_already_in_install_list() {
unsafe {
std::env::set_var("PACSEA_TEST_HEADLESS", "1");
}
let mut app = crate_root::state::AppState::default();
let test_packages = create_test_packages();
// Pre-populate cache with test data
populate_test_dependencies(&mut app);
populate_test_files(&mut app);
populate_test_services(&mut app);
populate_test_sandbox(&mut app);
// Set packages in install list
app.install_list = test_packages.clone();
app.preflight_cancelled
.store(false, std::sync::atomic::Ordering::Relaxed);
// Open preflight modal
app.modal = create_preflight_modal(&test_packages);
// Verify initial state - modal should be empty
verify_initial_state(&app);
// Test each tab
test_deps_tab(&mut app);
test_files_tab(&mut app);
test_services_tab(&mut app);
test_sandbox_tab(&mut app);
// Final verification: All tabs should have loaded their data correctly
verify_data_persistence(&mut app);
}
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/tests/preflight_integration/helpers.rs | tests/preflight_integration/helpers.rs | //! Helper functions for preflight integration tests.
use pacsea as crate_root;
use std::collections::HashMap;
/// What: Merge dependencies with the same name into a single entry.
///
/// Inputs:
/// - `deps`: Vector of dependencies that may contain duplicates
///
/// Output:
/// - Vector of merged dependencies (one per unique name)
///
/// Details:
/// - Combines `required_by` lists from duplicate dependencies
/// - Keeps the worst status (conflicts take precedence)
/// - Keeps the more restrictive version
fn merge_dependencies(
deps: Vec<crate_root::state::modal::DependencyInfo>,
) -> Vec<crate_root::state::modal::DependencyInfo> {
let mut merged: HashMap<String, crate_root::state::modal::DependencyInfo> = HashMap::new();
for dep in deps {
let dep_name = dep.name.clone();
let entry = merged.entry(dep_name.clone()).or_insert_with(|| {
crate_root::state::modal::DependencyInfo {
name: dep_name.clone(),
version: dep.version.clone(),
status: dep.status.clone(),
source: dep.source.clone(),
required_by: dep.required_by.clone(),
depends_on: Vec::new(),
is_core: dep.is_core,
is_system: dep.is_system,
}
});
// Merge required_by lists (combine unique values)
for req_by in dep.required_by {
if !entry.required_by.contains(&req_by) {
entry.required_by.push(req_by);
}
}
// Merge status (keep worst - lower priority number = higher priority)
// Conflicts take precedence
if !matches!(
entry.status,
crate_root::state::modal::DependencyStatus::Conflict { .. }
) {
let existing_priority = dependency_priority(&entry.status);
let new_priority = dependency_priority(&dep.status);
if new_priority < existing_priority {
entry.status = dep.status.clone();
}
}
// Merge version requirements (keep more restrictive)
// But never overwrite a Conflict status
if !dep.version.is_empty()
&& dep.version != entry.version
&& !matches!(
entry.status,
crate_root::state::modal::DependencyStatus::Conflict { .. }
)
&& entry.version.is_empty()
{
entry.version = dep.version;
}
}
let mut result: Vec<_> = merged.into_values().collect();
// Sort dependencies: conflicts first, then missing, then to-install, then installed
result.sort_by(|a, b| {
let priority_a = dependency_priority(&a.status);
let priority_b = dependency_priority(&b.status);
priority_a
.cmp(&priority_b)
.then_with(|| a.name.cmp(&b.name))
});
result
}
/// What: Provide a numeric priority used to order dependency statuses.
///
/// Inputs:
/// - `status`: Dependency status variant subject to sorting.
///
/// Output:
/// - Returns a numeric priority where lower numbers represent higher urgency.
///
/// Details:
/// - Aligns the ordering logic with UI expectations (conflicts first, installed last).
const fn dependency_priority(status: &crate_root::state::modal::DependencyStatus) -> u8 {
match status {
crate_root::state::modal::DependencyStatus::Conflict { .. } => 0,
crate_root::state::modal::DependencyStatus::Missing => 1,
crate_root::state::modal::DependencyStatus::ToInstall => 2,
crate_root::state::modal::DependencyStatus::ToUpgrade { .. } => 3,
crate_root::state::modal::DependencyStatus::Installed { .. } => 4,
}
}
/// What: Create a test package item with specified properties.
///
/// Inputs:
/// - `name`: Package name
/// - `version`: Package version
/// - `source`: Package source (Official or AUR)
///
/// Output:
/// - A `PackageItem` with the specified properties
///
/// Details:
/// - Creates a minimal package item suitable for testing
pub fn create_test_package(
name: impl Into<String>,
version: impl Into<String>,
source: crate_root::state::Source,
) -> crate_root::state::PackageItem {
crate_root::state::PackageItem {
name: name.into(),
version: version.into(),
description: String::new(),
source,
popularity: None,
out_of_date: None,
orphaned: false,
}
}
/// What: Create a default preflight modal state for testing.
///
/// Inputs:
/// - `packages`: Vector of packages to include
/// - `action`: Preflight action (Install or Remove)
/// - `initial_tab`: Initial tab to show
///
/// Output:
/// - A `Modal::Preflight` variant with default values
///
/// Details:
/// - Initializes all modal fields with sensible defaults
/// - Sets up empty collections and default flags
pub fn create_preflight_modal(
packages: Vec<crate_root::state::PackageItem>,
action: crate_root::state::PreflightAction,
initial_tab: crate_root::state::PreflightTab,
) -> crate_root::state::Modal {
let package_count = packages.len();
let aur_count = packages
.iter()
.filter(|p| matches!(p.source, crate_root::state::Source::Aur))
.count();
crate_root::state::Modal::Preflight {
items: packages,
action,
tab: initial_tab,
summary: None,
summary_scroll: 0,
header_chips: crate_root::state::modal::PreflightHeaderChips {
package_count,
download_bytes: 0,
install_delta_bytes: 0,
aur_count,
risk_score: 0,
risk_level: crate_root::state::modal::RiskLevel::Low,
},
dependency_info: Vec::new(),
dep_selected: 0,
dep_tree_expanded: std::collections::HashSet::new(),
deps_error: None,
file_info: Vec::new(),
file_selected: 0,
file_tree_expanded: std::collections::HashSet::new(),
files_error: None,
service_info: Vec::new(),
service_selected: 0,
services_loaded: false,
services_error: None,
sandbox_info: Vec::new(),
sandbox_selected: 0,
sandbox_tree_expanded: std::collections::HashSet::new(),
sandbox_loaded: false,
sandbox_error: None,
selected_optdepends: std::collections::HashMap::new(),
cascade_mode: crate_root::state::modal::CascadeMode::Basic,
cached_reverse_deps_report: None,
}
}
/// What: Switch to a preflight tab and sync data from app cache.
///
/// Inputs:
/// - `app`: Application state with cached data
/// - `tab`: Tab to switch to
///
/// Output:
/// - Updates modal state with synced data from cache
///
/// Details:
/// - Mirrors the sync logic from `src/ui/modals/preflight/helpers/sync.rs`
/// - Only syncs data relevant to the target tab
pub fn switch_preflight_tab(
app: &mut crate_root::state::AppState,
tab: crate_root::state::PreflightTab,
) {
if let crate_root::state::Modal::Preflight {
items,
action,
tab: current_tab,
dependency_info,
dep_selected,
file_info,
file_selected,
service_info,
service_selected,
services_loaded,
sandbox_info,
sandbox_loaded,
..
} = &mut app.modal
{
*current_tab = tab;
let item_names: std::collections::HashSet<String> =
items.iter().map(|i| i.name.clone()).collect();
// Sync dependencies
if matches!(*action, crate_root::state::PreflightAction::Install)
&& (matches!(tab, crate_root::state::PreflightTab::Deps)
|| matches!(tab, crate_root::state::PreflightTab::Summary)
|| dependency_info.is_empty())
{
let filtered: Vec<_> = app
.install_list_deps
.iter()
.filter(|dep| {
dep.required_by
.iter()
.any(|req_by| item_names.contains(req_by))
})
.cloned()
.collect();
if !filtered.is_empty() {
let was_empty = dependency_info.is_empty();
// Merge dependencies with the same name
*dependency_info = merge_dependencies(filtered);
if was_empty {
*dep_selected = 0;
}
}
}
// Sync files
if matches!(tab, crate_root::state::PreflightTab::Files) {
let cached_files: Vec<_> = app
.install_list_files
.iter()
.filter(|file_info| item_names.contains(&file_info.name))
.cloned()
.collect();
if !cached_files.is_empty() {
*file_info = cached_files;
*file_selected = 0;
}
}
// Sync services
if matches!(*action, crate_root::state::PreflightAction::Install)
&& matches!(tab, crate_root::state::PreflightTab::Services)
{
let cached_services: Vec<_> = app
.install_list_services
.iter()
.filter(|s| s.providers.iter().any(|p| item_names.contains(p)))
.cloned()
.collect();
if !cached_services.is_empty() {
*service_info = cached_services;
*services_loaded = true;
*service_selected = 0;
}
}
// Sync sandbox
if matches!(*action, crate_root::state::PreflightAction::Install)
&& matches!(tab, crate_root::state::PreflightTab::Sandbox)
{
let cached_sandbox: Vec<_> = app
.install_list_sandbox
.iter()
.filter(|s| item_names.contains(&s.package_name))
.cloned()
.collect();
if !cached_sandbox.is_empty() {
*sandbox_info = cached_sandbox;
*sandbox_loaded = true;
}
}
}
}
/// What: Assert that the modal is a Preflight variant and return its fields.
///
/// Inputs:
/// - `app`: Application state
///
/// Output:
/// - Tuple of all Preflight modal fields for verification
///
/// Details:
/// - Panics if modal is not Preflight variant
/// - Useful for assertions and verification
/// - Returns references to all fields for easy access
#[allow(clippy::type_complexity)]
pub fn assert_preflight_modal(
app: &crate_root::state::AppState,
) -> (
&Vec<crate_root::state::PackageItem>,
&crate_root::state::PreflightAction,
&crate_root::state::PreflightTab,
&Vec<crate_root::state::modal::DependencyInfo>,
&Vec<crate_root::state::modal::PackageFileInfo>,
&Vec<crate_root::state::modal::ServiceImpact>,
&Vec<crate_root::logic::sandbox::SandboxInfo>,
&bool,
&bool,
) {
if let crate_root::state::Modal::Preflight {
items,
action,
tab,
dependency_info,
file_info,
service_info,
sandbox_info,
services_loaded,
sandbox_loaded,
..
} = &app.modal
{
(
items,
action,
tab,
dependency_info,
file_info,
service_info,
sandbox_info,
services_loaded,
sandbox_loaded,
)
} else {
panic!("Expected Preflight modal");
}
}
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/tests/preflight_integration/skip_preflight.rs | tests/preflight_integration/skip_preflight.rs | //! Tests for `skip_preflight` setting functionality.
//!
//! Tests cover:
//! - Preflight modal is opened when `skip_preflight` = false (default)
//! - `open_preflight_modal` function is callable and handles packages correctly
use pacsea as crate_root;
/// What: Test that `open_preflight_modal` is callable and doesn't panic.
///
/// Details:
/// - Creates a test app state
/// - Calls `open_preflight_modal` with a test package
/// - Verifies the function executes without panicking
/// - Note: Actual `skip_preflight` behavior depends on settings file
#[test]
fn test_open_preflight_modal_callable() {
// Create test app state
let mut app = crate_root::state::AppState::default();
// Create a test package
let test_package = crate_root::state::PackageItem {
name: "test-pkg".to_string(),
version: "1.0.0".to_string(),
description: "Test package".to_string(),
source: crate_root::state::Source::Aur,
popularity: None,
out_of_date: None,
orphaned: false,
};
// Call open_preflight_modal - should not panic
crate_root::events::open_preflight_modal(&mut app, vec![test_package], true);
// Verify the function executed (either opened modal or skipped based on settings)
// The actual modal state depends on skip_preflight setting in config
}
/// What: Test that `open_preflight_modal` handles multiple packages correctly.
///
/// Details:
/// - Creates a test app state
/// - Calls `open_preflight_modal` with multiple test packages
/// - Verifies the function executes without panicking
#[test]
fn test_open_preflight_modal_multiple_packages() {
// Create test app state
let mut app = crate_root::state::AppState::default();
// Create test packages
let test_packages = vec![
crate_root::state::PackageItem {
name: "pkg1".to_string(),
version: "1.0.0".to_string(),
description: "Test package 1".to_string(),
source: crate_root::state::Source::Aur,
popularity: None,
out_of_date: None,
orphaned: false,
},
crate_root::state::PackageItem {
name: "pkg2".to_string(),
version: "2.0.0".to_string(),
description: "Test package 2".to_string(),
source: crate_root::state::Source::Official {
repo: "core".to_string(),
arch: "x86_64".to_string(),
},
popularity: None,
out_of_date: None,
orphaned: false,
},
];
// Call open_preflight_modal - should not panic
crate_root::events::open_preflight_modal(&mut app, test_packages, true);
// Verify the function executed (either opened modal or skipped based on settings)
}
/// What: Test that `open_preflight_modal` with `use_cache=false` works correctly.
///
/// Details:
/// - Creates a test app state
/// - Calls `open_preflight_modal` with `use_cache=false`
/// - Verifies the function executes without panicking
#[test]
fn test_open_preflight_modal_no_cache() {
// Create test app state
let mut app = crate_root::state::AppState::default();
// Create a test package
let test_package = crate_root::state::PackageItem {
name: "test-pkg".to_string(),
version: "1.0.0".to_string(),
description: "Test package".to_string(),
source: crate_root::state::Source::Aur,
popularity: None,
out_of_date: None,
orphaned: false,
};
// Call open_preflight_modal with use_cache=false - should not panic
crate_root::events::open_preflight_modal(&mut app, vec![test_package], false);
// Verify the function executed (either opened modal or skipped based on settings)
}
/// What: Test that Updates modal state is properly initialized.
///
/// Details:
/// - Creates an Updates modal with test entries
/// - Verifies the modal state is correctly set
#[test]
fn test_updates_modal_initialization() {
// Create test app state
let mut app = crate_root::state::AppState::default();
// Create test update entries
let entries = vec![
(
"test-pkg".to_string(),
"1.0.0".to_string(),
"1.1.0".to_string(),
),
(
"another-pkg".to_string(),
"2.0.0".to_string(),
"2.1.0".to_string(),
),
];
// Set up Updates modal
app.modal = crate_root::state::Modal::Updates {
entries,
scroll: 0,
selected: 0,
};
// Verify modal state
match &app.modal {
crate_root::state::Modal::Updates {
entries: modal_entries,
scroll,
selected,
} => {
assert_eq!(modal_entries.len(), 2);
assert_eq!(modal_entries[0].0, "test-pkg");
assert_eq!(modal_entries[1].0, "another-pkg");
assert_eq!(*scroll, 0);
assert_eq!(*selected, 0);
}
_ => panic!("Expected Updates modal"),
}
}
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/tests/preflight_integration/persistence.rs | tests/preflight_integration/persistence.rs | //! //! Tests for persistence across tabs.
use pacsea as crate_root;
/// Helper: Create test packages for service restart decision tests.
///
/// Output:
/// - Vector of test `PackageItem` instances
fn create_test_packages() -> Vec<crate_root::state::PackageItem> {
vec![
crate_root::state::PackageItem {
name: "test-package-1".to_string(),
version: "1.0.0".to_string(),
description: String::new(),
source: crate_root::state::Source::Official {
repo: "core".to_string(),
arch: "x86_64".to_string(),
},
popularity: None,
out_of_date: None,
orphaned: false,
},
crate_root::state::PackageItem {
name: "test-package-2".to_string(),
version: "2.0.0".to_string(),
description: String::new(),
source: crate_root::state::Source::Official {
repo: "extra".to_string(),
arch: "x86_64".to_string(),
},
popularity: None,
out_of_date: None,
orphaned: false,
},
]
}
/// Helper: Create test services for service restart decision tests.
///
/// Output:
/// - Vector of test `ServiceImpact` instances
fn create_test_services() -> Vec<crate_root::state::modal::ServiceImpact> {
vec![
crate_root::state::modal::ServiceImpact {
unit_name: "service-1.service".to_string(),
providers: vec!["test-package-1".to_string()],
is_active: true,
needs_restart: true,
recommended_decision: crate_root::state::modal::ServiceRestartDecision::Restart,
restart_decision: crate_root::state::modal::ServiceRestartDecision::Restart,
},
crate_root::state::modal::ServiceImpact {
unit_name: "service-2.service".to_string(),
providers: vec!["test-package-1".to_string()],
is_active: false,
needs_restart: false,
recommended_decision: crate_root::state::modal::ServiceRestartDecision::Defer,
restart_decision: crate_root::state::modal::ServiceRestartDecision::Defer,
},
crate_root::state::modal::ServiceImpact {
unit_name: "service-3.service".to_string(),
providers: vec!["test-package-2".to_string()],
is_active: true,
needs_restart: true,
recommended_decision: crate_root::state::modal::ServiceRestartDecision::Restart,
restart_decision: crate_root::state::modal::ServiceRestartDecision::Restart,
},
]
}
/// Helper: Create a preflight modal with default settings.
///
/// Inputs:
/// - `test_packages`: Vector of packages to include in modal
///
/// Output:
/// - Preflight modal instance
fn create_preflight_modal(
test_packages: &[crate_root::state::PackageItem],
) -> crate_root::state::Modal {
crate_root::state::Modal::Preflight {
items: test_packages.to_vec(),
action: crate_root::state::PreflightAction::Install,
tab: crate_root::state::PreflightTab::Summary,
summary: None,
summary_scroll: 0,
header_chips: crate_root::state::modal::PreflightHeaderChips {
package_count: test_packages.len(),
download_bytes: 0,
install_delta_bytes: 0,
aur_count: 0,
risk_score: 0,
risk_level: crate_root::state::modal::RiskLevel::Low,
},
dependency_info: Vec::new(),
dep_selected: 0,
dep_tree_expanded: std::collections::HashSet::new(),
deps_error: None,
file_info: Vec::new(),
file_selected: 0,
file_tree_expanded: std::collections::HashSet::new(),
files_error: None,
service_info: Vec::new(),
service_selected: 0,
services_loaded: false,
services_error: None,
sandbox_info: Vec::new(),
sandbox_selected: 0,
sandbox_tree_expanded: std::collections::HashSet::new(),
sandbox_loaded: true,
sandbox_error: None,
selected_optdepends: std::collections::HashMap::new(),
cascade_mode: crate_root::state::modal::CascadeMode::Basic,
cached_reverse_deps_report: None,
}
}
/// Helper: Switch to Services tab and sync services from cache.
///
/// Inputs:
/// - `app`: Application state with `install_list_services` populated
///
/// Details:
/// - Switches modal to Services tab
/// - Syncs services from `install_list_services` cache
fn switch_to_services_tab_and_sync(app: &mut crate_root::state::AppState) {
if let crate_root::state::Modal::Preflight {
items,
action,
tab,
service_info,
service_selected,
services_loaded,
..
} = &mut app.modal
{
*tab = crate_root::state::PreflightTab::Services;
if matches!(*action, crate_root::state::PreflightAction::Install) {
let item_names: std::collections::HashSet<String> =
items.iter().map(|i| i.name.clone()).collect();
let cached_services: Vec<_> = app
.install_list_services
.iter()
.filter(|s| s.providers.iter().any(|p| item_names.contains(p)))
.cloned()
.collect();
if !cached_services.is_empty() {
*service_info = cached_services;
*services_loaded = true;
*service_selected = 0;
}
}
}
}
/// Helper: Re-sync services while preserving existing decisions.
///
/// Inputs:
/// - `app`: Application state with modified service decisions
///
/// Details:
/// - Re-syncs services from cache but preserves user-modified decisions
fn resync_services_preserving_decisions(app: &mut crate_root::state::AppState) {
if let crate_root::state::Modal::Preflight {
items,
action,
service_info,
service_selected,
services_loaded,
..
} = &mut app.modal
&& matches!(*action, crate_root::state::PreflightAction::Install)
&& !items.is_empty()
{
let item_names: std::collections::HashSet<String> =
items.iter().map(|i| i.name.clone()).collect();
let cached_services: Vec<_> = app
.install_list_services
.iter()
.filter(|s| s.providers.iter().any(|p| item_names.contains(p)))
.cloned()
.collect();
if !cached_services.is_empty() {
let existing_decisions: std::collections::HashMap<String, _> = service_info
.iter()
.map(|s| (s.unit_name.clone(), s.restart_decision))
.collect();
*service_info = cached_services;
for service in service_info.iter_mut() {
if let Some(&decision) = existing_decisions.get(&service.unit_name) {
service.restart_decision = decision;
}
}
*services_loaded = true;
*service_selected = 0;
}
}
}
/// Helper: Switch to Deps tab and sync dependencies.
///
/// Inputs:
/// - `app`: Application state
fn switch_to_deps_tab(app: &mut crate_root::state::AppState) {
if let crate_root::state::Modal::Preflight {
items,
action,
tab,
dependency_info,
dep_selected,
..
} = &mut app.modal
{
*tab = crate_root::state::PreflightTab::Deps;
if matches!(*action, crate_root::state::PreflightAction::Install) {
let item_names: std::collections::HashSet<String> =
items.iter().map(|i| i.name.clone()).collect();
let filtered: Vec<_> = app
.install_list_deps
.iter()
.filter(|dep| {
dep.required_by
.iter()
.any(|req_by| item_names.contains(req_by))
})
.cloned()
.collect();
if !filtered.is_empty() {
*dependency_info = filtered;
*dep_selected = 0;
}
}
}
}
/// Helper: Switch to Files tab and sync files.
///
/// Inputs:
/// - `app`: Application state
fn switch_to_files_tab(app: &mut crate_root::state::AppState) {
if let crate_root::state::Modal::Preflight {
items,
tab,
file_info,
file_selected,
..
} = &mut app.modal
{
*tab = crate_root::state::PreflightTab::Files;
let item_names: std::collections::HashSet<String> =
items.iter().map(|i| i.name.clone()).collect();
let cached_files: Vec<_> = app
.install_list_files
.iter()
.filter(|file_info| item_names.contains(&file_info.name))
.cloned()
.collect();
if !cached_files.is_empty() {
*file_info = cached_files;
*file_selected = 0;
}
}
}
/// Helper: Verify service restart decisions match expected values.
///
/// Inputs:
/// - `app`: Application state
/// - `expected_decisions`: `HashMap` mapping service unit names to expected decisions
/// - `context`: Context string for error messages
fn verify_service_decisions(
app: &crate_root::state::AppState,
expected_decisions: &std::collections::HashMap<
String,
crate_root::state::modal::ServiceRestartDecision,
>,
context: &str,
) {
if let crate_root::state::Modal::Preflight { service_info, .. } = &app.modal {
for (unit_name, expected_decision) in expected_decisions {
let service = service_info
.iter()
.find(|s| s.unit_name == *unit_name)
.unwrap_or_else(|| panic!("{unit_name} should be found in service_info"));
assert_eq!(
service.restart_decision, *expected_decision,
"{unit_name} should be {expected_decision:?} {context}"
);
}
} else {
panic!("Expected Preflight modal");
}
}
/// Helper: Modify service restart decisions (simulating user toggles).
///
/// Inputs:
/// - `app`: Application state
/// - `modifications`: Vector of (`unit_name`, `new_decision`) tuples
fn modify_service_decisions(
app: &mut crate_root::state::AppState,
modifications: &[(&str, crate_root::state::modal::ServiceRestartDecision)],
) {
if let crate_root::state::Modal::Preflight { service_info, .. } = &mut app.modal {
for (unit_name, new_decision) in modifications {
if let Some(service) = service_info.iter_mut().find(|s| s.unit_name == *unit_name) {
service.restart_decision = *new_decision;
}
}
}
}
/// Helper: Verify initial service decisions after loading.
///
/// Inputs:
/// - `app`: Application state
fn verify_initial_service_decisions(app: &crate_root::state::AppState) {
if let crate_root::state::Modal::Preflight { service_info, .. } = &app.modal {
assert_eq!(service_info.len(), 3, "Should have 3 services");
assert_eq!(
service_info[0].restart_decision,
crate_root::state::modal::ServiceRestartDecision::Restart
);
assert_eq!(
service_info[1].restart_decision,
crate_root::state::modal::ServiceRestartDecision::Defer
);
assert_eq!(
service_info[2].restart_decision,
crate_root::state::modal::ServiceRestartDecision::Restart
);
} else {
panic!("Expected Preflight modal");
}
}
/// Helper: Verify all service decisions after final sync.
///
/// Inputs:
/// - `app`: Application state
fn verify_final_service_decisions(app: &crate_root::state::AppState) {
if let crate_root::state::Modal::Preflight {
service_info,
services_loaded,
..
} = &app.modal
{
assert!(*services_loaded, "Services should be marked as loaded");
assert_eq!(service_info.len(), 3, "Should have 3 services");
let mut expected = std::collections::HashMap::new();
expected.insert(
"service-1.service".to_string(),
crate_root::state::modal::ServiceRestartDecision::Defer,
);
expected.insert(
"service-2.service".to_string(),
crate_root::state::modal::ServiceRestartDecision::Restart,
);
expected.insert(
"service-3.service".to_string(),
crate_root::state::modal::ServiceRestartDecision::Restart,
);
verify_service_decisions(app, &expected, "after switching back to Services");
} else {
panic!("Expected Preflight modal");
}
}
#[test]
/// What: Verify that service restart decisions persist when switching tabs.
///
/// Inputs:
/// - Packages in `install_list` with services
/// - Preflight modal opened with services loaded
/// - User changes service restart decisions in Services tab
/// - User switches to other tabs and back
///
/// Output:
/// - Service restart decisions remain unchanged when switching tabs
/// - Modified decisions persist across tab switches
/// - All services maintain their `restart_decision` values
///
/// Details:
/// - Tests that user choices for service restart decisions are preserved
/// - Verifies modal state correctly maintains service decisions
/// - Ensures no data loss when switching tabs
fn preflight_persists_service_restart_decisions_across_tabs() {
unsafe {
std::env::set_var("PACSEA_TEST_HEADLESS", "1");
}
let mut app = crate_root::state::AppState::default();
let test_packages = create_test_packages();
let test_services = create_test_services();
// Pre-populate cache with services
app.install_list_services = test_services;
app.install_list = test_packages.clone();
app.preflight_cancelled
.store(false, std::sync::atomic::Ordering::Relaxed);
app.modal = create_preflight_modal(&test_packages);
// Test 1: Switch to Services tab and load services
switch_to_services_tab_and_sync(&mut app);
verify_initial_service_decisions(&app);
// Test 2: Modify service restart decisions (simulating user toggles)
modify_service_decisions(
&mut app,
&[
(
"service-1.service",
crate_root::state::modal::ServiceRestartDecision::Defer,
),
(
"service-2.service",
crate_root::state::modal::ServiceRestartDecision::Restart,
),
],
);
// Verify modified decisions
let mut expected_after_modify = std::collections::HashMap::new();
expected_after_modify.insert(
"service-1.service".to_string(),
crate_root::state::modal::ServiceRestartDecision::Defer,
);
expected_after_modify.insert(
"service-2.service".to_string(),
crate_root::state::modal::ServiceRestartDecision::Restart,
);
expected_after_modify.insert(
"service-3.service".to_string(),
crate_root::state::modal::ServiceRestartDecision::Restart,
);
verify_service_decisions(&app, &expected_after_modify, "after toggle");
// Test 3: Switch to Deps tab - decisions should persist
switch_to_deps_tab(&mut app);
let mut expected_after_deps = std::collections::HashMap::new();
expected_after_deps.insert(
"service-1.service".to_string(),
crate_root::state::modal::ServiceRestartDecision::Defer,
);
expected_after_deps.insert(
"service-2.service".to_string(),
crate_root::state::modal::ServiceRestartDecision::Restart,
);
verify_service_decisions(&app, &expected_after_deps, "after switching to Deps");
// Test 4: Switch to Files tab - decisions should persist
switch_to_files_tab(&mut app);
verify_service_decisions(&app, &expected_after_deps, "after switching to Files");
// Test 5: Switch back to Services tab - decisions should still persist
if let crate_root::state::Modal::Preflight { tab, .. } = &mut app.modal {
*tab = crate_root::state::PreflightTab::Services;
}
resync_services_preserving_decisions(&mut app);
verify_final_service_decisions(&app);
// Final verification: All decisions are preserved
let mut final_expected = std::collections::HashMap::new();
final_expected.insert(
"service-1.service".to_string(),
crate_root::state::modal::ServiceRestartDecision::Defer,
);
final_expected.insert(
"service-2.service".to_string(),
crate_root::state::modal::ServiceRestartDecision::Restart,
);
final_expected.insert(
"service-3.service".to_string(),
crate_root::state::modal::ServiceRestartDecision::Restart,
);
verify_service_decisions(&app, &final_expected, "");
}
/// Helper function to create test AUR packages for optdepends tests.
///
/// What: Creates a vector of test AUR packages.
///
/// Output:
/// - Vector of two test AUR packages
fn create_test_aur_packages() -> Vec<crate_root::state::PackageItem> {
vec![
crate_root::state::PackageItem {
name: "test-aur-pkg-1".to_string(),
version: "1.0.0".to_string(),
description: String::new(),
source: crate_root::state::Source::Aur,
popularity: None,
out_of_date: None,
orphaned: false,
},
crate_root::state::PackageItem {
name: "test-aur-pkg-2".to_string(),
version: "2.0.0".to_string(),
description: String::new(),
source: crate_root::state::Source::Aur,
popularity: None,
out_of_date: None,
orphaned: false,
},
]
}
/// Helper function to create sandbox info with optdepends.
///
/// What: Creates sandbox info for test packages with optional dependencies.
///
/// Output:
/// - Vector of `SandboxInfo` with optdepends
fn create_sandbox_info() -> Vec<crate_root::logic::sandbox::SandboxInfo> {
vec![
crate_root::logic::sandbox::SandboxInfo {
package_name: "test-aur-pkg-1".to_string(),
depends: vec![],
makedepends: vec![],
checkdepends: vec![],
optdepends: vec![
crate_root::logic::sandbox::DependencyDelta {
name: "optdep-1>=1.0.0".to_string(),
is_installed: false,
installed_version: None,
version_satisfied: false,
},
crate_root::logic::sandbox::DependencyDelta {
name: "optdep-2".to_string(),
is_installed: false,
installed_version: None,
version_satisfied: false,
},
crate_root::logic::sandbox::DependencyDelta {
name: "optdep-3: description".to_string(),
is_installed: false,
installed_version: None,
version_satisfied: false,
},
],
},
crate_root::logic::sandbox::SandboxInfo {
package_name: "test-aur-pkg-2".to_string(),
depends: vec![],
makedepends: vec![],
checkdepends: vec![],
optdepends: vec![crate_root::logic::sandbox::DependencyDelta {
name: "optdep-4".to_string(),
is_installed: false,
installed_version: None,
version_satisfied: false,
}],
},
]
}
/// Helper function to setup preflight modal for optdepends tests.
///
/// What: Creates and configures a preflight modal with test packages.
///
/// Inputs:
/// - `test_packages`: Vector of test packages
///
/// Output:
/// - Configured Preflight modal
fn setup_preflight_modal(
test_packages: &[crate_root::state::PackageItem],
) -> crate_root::state::Modal {
crate_root::state::Modal::Preflight {
items: test_packages.to_vec(),
action: crate_root::state::PreflightAction::Install,
tab: crate_root::state::PreflightTab::Summary,
summary: None,
summary_scroll: 0,
header_chips: crate_root::state::modal::PreflightHeaderChips {
package_count: test_packages.len(),
download_bytes: 0,
install_delta_bytes: 0,
aur_count: 2,
risk_score: 0,
risk_level: crate_root::state::modal::RiskLevel::Low,
},
dependency_info: Vec::new(),
dep_selected: 0,
dep_tree_expanded: std::collections::HashSet::new(),
deps_error: None,
file_info: Vec::new(),
file_selected: 0,
file_tree_expanded: std::collections::HashSet::new(),
files_error: None,
service_info: Vec::new(),
service_selected: 0,
services_loaded: false,
services_error: None,
sandbox_info: Vec::new(),
sandbox_selected: 0,
sandbox_tree_expanded: std::collections::HashSet::new(),
sandbox_loaded: false,
sandbox_error: None,
selected_optdepends: std::collections::HashMap::new(),
cascade_mode: crate_root::state::modal::CascadeMode::Basic,
cached_reverse_deps_report: None,
}
}
/// Helper function to switch to Sandbox tab and sync sandbox info.
///
/// What: Switches modal to Sandbox tab and loads sandbox info from cache.
///
/// Inputs:
/// - app: Mutable reference to `AppState`
fn switch_to_sandbox_tab(app: &mut crate_root::state::AppState) {
if let crate_root::state::Modal::Preflight {
items,
action,
tab,
sandbox_info,
sandbox_loaded,
..
} = &mut app.modal
{
*tab = crate_root::state::PreflightTab::Sandbox;
if matches!(*action, crate_root::state::PreflightAction::Install) {
let item_names: std::collections::HashSet<String> =
items.iter().map(|i| i.name.clone()).collect();
let cached_sandbox: Vec<_> = app
.install_list_sandbox
.iter()
.filter(|s| item_names.contains(&s.package_name))
.cloned()
.collect();
if !cached_sandbox.is_empty() {
*sandbox_info = cached_sandbox;
*sandbox_loaded = true;
}
}
}
}
/// Helper function to select optional dependencies.
///
/// What: Simulates user selecting optdepends for test packages.
///
/// Inputs:
/// - app: Mutable reference to `AppState`
fn select_optional_dependencies(app: &mut crate_root::state::AppState) {
if let crate_root::state::Modal::Preflight {
selected_optdepends,
..
} = &mut app.modal
{
selected_optdepends
.entry("test-aur-pkg-1".to_string())
.or_insert_with(std::collections::HashSet::new)
.insert("optdep-1>=1.0.0".to_string());
selected_optdepends
.entry("test-aur-pkg-1".to_string())
.or_insert_with(std::collections::HashSet::new)
.insert("optdep-2".to_string());
selected_optdepends
.entry("test-aur-pkg-2".to_string())
.or_insert_with(std::collections::HashSet::new)
.insert("optdep-4".to_string());
}
}
/// Helper function to verify initial selections.
///
/// What: Verifies that optdepends selections are correct after initial selection.
///
/// Inputs:
/// - app: Reference to `AppState`
fn verify_initial_selections(app: &crate_root::state::AppState) {
if let crate_root::state::Modal::Preflight {
selected_optdepends,
..
} = &app.modal
{
assert_eq!(
selected_optdepends.len(),
2,
"Should have selections for 2 packages"
);
assert!(
selected_optdepends.contains_key("test-aur-pkg-1"),
"Should have selections for test-aur-pkg-1"
);
assert!(
selected_optdepends.contains_key("test-aur-pkg-2"),
"Should have selections for test-aur-pkg-2"
);
let pkg1_selections = selected_optdepends
.get("test-aur-pkg-1")
.expect("test-aur-pkg-1 should be in selected_optdepends");
assert_eq!(
pkg1_selections.len(),
2,
"test-aur-pkg-1 should have 2 selections"
);
assert!(
pkg1_selections.contains("optdep-1>=1.0.0"),
"Should have optdep-1>=1.0.0 selected"
);
assert!(
pkg1_selections.contains("optdep-2"),
"Should have optdep-2 selected"
);
let pkg2_selections = selected_optdepends
.get("test-aur-pkg-2")
.expect("test-aur-pkg-2 should be in selected_optdepends");
assert_eq!(
pkg2_selections.len(),
1,
"test-aur-pkg-2 should have 1 selection"
);
assert!(
pkg2_selections.contains("optdep-4"),
"Should have optdep-4 selected"
);
} else {
panic!("Expected Preflight modal");
}
}
/// Helper function to switch to a tab and verify selections persist.
///
/// What: Switches to a different tab and verifies optdepends selections persist.
///
/// Inputs:
/// - app: Mutable reference to `AppState`
/// - tab: The tab to switch to
fn switch_tab_and_verify_persistence(
app: &mut crate_root::state::AppState,
tab: crate_root::state::PreflightTab,
) {
match tab {
crate_root::state::PreflightTab::Deps => {
if let crate_root::state::Modal::Preflight {
items,
action,
tab: current_tab,
dependency_info,
dep_selected,
..
} = &mut app.modal
{
*current_tab = crate_root::state::PreflightTab::Deps;
if matches!(*action, crate_root::state::PreflightAction::Install) {
let item_names: std::collections::HashSet<String> =
items.iter().map(|i| i.name.clone()).collect();
let filtered: Vec<_> = app
.install_list_deps
.iter()
.filter(|dep| {
dep.required_by
.iter()
.any(|req_by| item_names.contains(req_by))
})
.cloned()
.collect();
if !filtered.is_empty() {
*dependency_info = filtered;
*dep_selected = 0;
}
}
}
}
crate_root::state::PreflightTab::Files => {
if let crate_root::state::Modal::Preflight {
items,
tab: current_tab,
file_info,
file_selected,
..
} = &mut app.modal
{
*current_tab = crate_root::state::PreflightTab::Files;
let item_names: std::collections::HashSet<String> =
items.iter().map(|i| i.name.clone()).collect();
let cached_files: Vec<_> = app
.install_list_files
.iter()
.filter(|file_info| item_names.contains(&file_info.name))
.cloned()
.collect();
if !cached_files.is_empty() {
*file_info = cached_files;
*file_selected = 0;
}
}
}
crate_root::state::PreflightTab::Services => {
if let crate_root::state::Modal::Preflight {
items,
action,
tab: current_tab,
service_info,
service_selected,
services_loaded,
..
} = &mut app.modal
{
*current_tab = crate_root::state::PreflightTab::Services;
if matches!(*action, crate_root::state::PreflightAction::Install) {
let item_names: std::collections::HashSet<String> =
items.iter().map(|i| i.name.clone()).collect();
let cached_services: Vec<_> = app
.install_list_services
.iter()
.filter(|s| s.providers.iter().any(|p| item_names.contains(p)))
.cloned()
.collect();
if !cached_services.is_empty() {
*service_info = cached_services;
*services_loaded = true;
*service_selected = 0;
}
}
}
}
_ => {}
}
// Verify selections persist
if let crate_root::state::Modal::Preflight {
selected_optdepends,
..
} = &app.modal
{
assert_eq!(
selected_optdepends.len(),
2,
"Should still have selections for 2 packages after switching tabs"
);
let pkg1_selections = selected_optdepends
.get("test-aur-pkg-1")
.expect("test-aur-pkg-1 should be in selected_optdepends");
assert_eq!(
pkg1_selections.len(),
2,
"test-aur-pkg-1 should still have 2 selections"
);
let pkg2_selections = selected_optdepends
.get("test-aur-pkg-2")
.expect("test-aur-pkg-2 should be in selected_optdepends");
assert_eq!(
pkg2_selections.len(),
1,
"test-aur-pkg-2 should still have 1 selection"
);
} else {
panic!("Expected Preflight modal");
}
}
/// Helper function to verify final selections after switching back to Sandbox.
///
/// What: Verifies all optdepends selections are preserved after tab switches.
///
/// Inputs:
/// - app: Reference to `AppState`
fn verify_final_selections(app: &crate_root::state::AppState) {
if let crate_root::state::Modal::Preflight {
selected_optdepends,
sandbox_info,
sandbox_loaded,
..
} = &app.modal
{
assert!(*sandbox_loaded, "Sandbox should be marked as loaded");
assert_eq!(sandbox_info.len(), 2, "Should have 2 sandbox entries");
assert_eq!(
selected_optdepends.len(),
2,
"Should still have selections for 2 packages after switching back to Sandbox"
);
let pkg1_selections = selected_optdepends
.get("test-aur-pkg-1")
.expect("test-aur-pkg-1 should be in selected_optdepends");
assert_eq!(
pkg1_selections.len(),
2,
"test-aur-pkg-1 should still have 2 selections"
);
assert!(
pkg1_selections.contains("optdep-1>=1.0.0"),
"optdep-1>=1.0.0 should still be selected"
);
assert!(
pkg1_selections.contains("optdep-2"),
"optdep-2 should still be selected"
);
assert!(
!pkg1_selections.contains("optdep-3: description"),
"optdep-3 should NOT be selected"
);
let pkg2_selections = selected_optdepends
.get("test-aur-pkg-2")
.expect("test-aur-pkg-2 should be in selected_optdepends");
assert_eq!(
pkg2_selections.len(),
1,
"test-aur-pkg-2 should still have 1 selection"
);
assert!(
pkg2_selections.contains("optdep-4"),
"optdep-4 should still be selected"
);
} else {
panic!("Expected Preflight modal");
}
}
/// Helper function to verify `HashMap` structure is correct.
///
/// What: Verifies that `selected_optdepends` `HashMap` has correct structure and values.
///
/// Inputs:
/// - app: Reference to `AppState`
/// - `test_packages`: Reference to test packages
fn verify_hashmap_structure(
app: &crate_root::state::AppState,
test_packages: &[crate_root::state::PackageItem],
) {
if let crate_root::state::Modal::Preflight {
selected_optdepends,
..
} = &app.modal
{
for (pkg_name, optdeps) in selected_optdepends {
assert!(
!optdeps.is_empty(),
"Package {pkg_name} should have at least one selected optdep"
);
assert!(
test_packages.iter().any(|p| p.name == *pkg_name),
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | true |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/tests/preflight_integration/cache_sync.rs | tests/preflight_integration/cache_sync.rs | //! Tests for cache synchronization.
use pacsea as crate_root;
/// Creates a test package item.
///
/// Inputs:
/// - `name`: Package name
/// - `version`: Package version
///
/// Output:
/// - A `PackageItem` with the specified name and version
///
/// Details:
/// - Creates a package with default values for other fields
fn create_test_package(name: &str, version: &str) -> crate_root::state::PackageItem {
crate_root::state::PackageItem {
name: name.to_string(),
version: version.to_string(),
description: String::new(),
source: crate_root::state::Source::Official {
repo: "core".to_string(),
arch: "x86_64".to_string(),
},
popularity: None,
out_of_date: None,
orphaned: false,
}
}
/// Sets up initial app state with test packages and partial cache.
///
/// Inputs:
/// - `test_packages`: Vector of test packages
///
/// Output:
/// - `AppState` with initial cache state (deps cached, files/services resolving)
///
/// Details:
/// - Creates app state with dependencies already cached
/// - Files and services are marked as still resolving
fn setup_initial_app_state(
test_packages: &[crate_root::state::PackageItem],
) -> crate_root::state::AppState {
crate_root::state::AppState {
// Initially, only dependencies are cached
install_list_deps: vec![crate_root::state::modal::DependencyInfo {
name: "test-dep-1".to_string(),
version: "1.0.0".to_string(),
status: crate_root::state::modal::DependencyStatus::ToInstall,
source: crate_root::state::modal::DependencySource::Official {
repo: "core".to_string(),
},
required_by: vec!["test-package-1".to_string()],
depends_on: Vec::new(),
is_core: false,
is_system: false,
}],
// Files are not cached yet (still resolving)
install_list_files: vec![],
preflight_files_resolving: true,
preflight_files_items: Some(test_packages.to_vec()),
// Services are not cached yet (still resolving)
install_list_services: vec![],
preflight_services_resolving: true,
preflight_services_items: Some(test_packages.to_vec()),
// Set packages in install list
install_list: test_packages.to_vec(),
..Default::default()
}
}
/// Opens the preflight modal with test packages.
///
/// Inputs:
/// - `app`: Reference to mutable `AppState`
/// - `test_packages`: Vector of test packages
///
/// Output:
/// - None (modifies app state)
///
/// Details:
/// - Creates a preflight modal in Install mode with Summary tab active
fn open_preflight_modal(
app: &mut crate_root::state::AppState,
test_packages: &[crate_root::state::PackageItem],
) {
app.modal = crate_root::state::Modal::Preflight {
items: test_packages.to_vec(),
action: crate_root::state::PreflightAction::Install,
tab: crate_root::state::PreflightTab::Summary,
summary: None,
summary_scroll: 0,
header_chips: crate_root::state::modal::PreflightHeaderChips {
package_count: test_packages.len(),
download_bytes: 0,
install_delta_bytes: 0,
aur_count: 0,
risk_score: 0,
risk_level: crate_root::state::modal::RiskLevel::Low,
},
dependency_info: Vec::new(),
dep_selected: 0,
dep_tree_expanded: std::collections::HashSet::new(),
deps_error: None,
file_info: Vec::new(),
file_selected: 0,
file_tree_expanded: std::collections::HashSet::new(),
files_error: None,
service_info: Vec::new(),
service_selected: 0,
services_loaded: false,
services_error: None,
sandbox_info: Vec::new(),
sandbox_selected: 0,
sandbox_tree_expanded: std::collections::HashSet::new(),
sandbox_loaded: true,
sandbox_error: None,
selected_optdepends: std::collections::HashMap::new(),
cascade_mode: crate_root::state::modal::CascadeMode::Basic,
cached_reverse_deps_report: None,
};
}
/// Syncs dependencies from cache to modal.
///
/// Inputs:
/// - `app`: Reference to mutable `AppState`
///
/// Output:
/// - None (modifies app state)
///
/// Details:
/// - Switches to Deps tab and syncs dependencies from cache
fn sync_dependencies_tab(app: &mut crate_root::state::AppState) {
if let crate_root::state::Modal::Preflight {
items,
action,
tab,
dependency_info,
dep_selected,
..
} = &mut app.modal
{
*tab = crate_root::state::PreflightTab::Deps;
// Simulate sync_dependencies logic
if matches!(*action, crate_root::state::PreflightAction::Install) {
let item_names: std::collections::HashSet<String> =
items.iter().map(|i| i.name.clone()).collect();
let filtered: Vec<_> = app
.install_list_deps
.iter()
.filter(|dep| {
dep.required_by
.iter()
.any(|req_by| item_names.contains(req_by))
})
.cloned()
.collect();
if !filtered.is_empty() {
*dependency_info = filtered;
*dep_selected = 0;
}
}
}
}
/// Verifies dependencies in the modal match expected state.
///
/// Inputs:
/// - `app`: Reference to `AppState`
/// - `expected_count`: Expected number of dependencies
/// - `expected_names`: Expected dependency names
///
/// Output:
/// - None (panics on failure)
///
/// Details:
/// - Asserts that dependency count and names match expectations
fn verify_dependencies(
app: &crate_root::state::AppState,
expected_count: usize,
expected_names: &[&str],
) {
if let crate_root::state::Modal::Preflight {
dependency_info, ..
} = &app.modal
{
assert_eq!(
dependency_info.len(),
expected_count,
"Should have {expected_count} dependencies"
);
for expected_name in expected_names {
assert!(
dependency_info.iter().any(|d| d.name == *expected_name),
"Should have dependency {expected_name}"
);
}
} else {
panic!("Expected Preflight modal");
}
}
/// Simulates background resolution completing and updating cache.
///
/// Inputs:
/// - `app`: Reference to mutable `AppState`
///
/// Output:
/// - None (modifies app state)
///
/// Details:
/// - Adds new dependency, files, and services to cache
/// - Clears resolving flags
fn simulate_background_resolution_complete(app: &mut crate_root::state::AppState) {
// Add new dependency to cache
app.install_list_deps
.push(crate_root::state::modal::DependencyInfo {
name: "test-dep-2".to_string(),
version: "2.0.0".to_string(),
status: crate_root::state::modal::DependencyStatus::ToInstall,
source: crate_root::state::modal::DependencySource::Official {
repo: "extra".to_string(),
},
required_by: vec!["test-package-1".to_string()],
depends_on: Vec::new(),
is_core: false,
is_system: false,
});
// Files resolution completes - update cache
app.install_list_files = vec![crate_root::state::modal::PackageFileInfo {
name: "test-package-1".to_string(),
files: vec![crate_root::state::modal::FileChange {
path: "/usr/bin/test1".to_string(),
change_type: crate_root::state::modal::FileChangeType::New,
package: "test-package-1".to_string(),
is_config: false,
predicted_pacnew: false,
predicted_pacsave: false,
}],
total_count: 1,
new_count: 1,
changed_count: 0,
removed_count: 0,
config_count: 0,
pacnew_candidates: 0,
pacsave_candidates: 0,
}];
app.preflight_files_resolving = false;
app.preflight_files_items = None;
// Services resolution completes - update cache
app.install_list_services = vec![crate_root::state::modal::ServiceImpact {
unit_name: "test-service.service".to_string(),
providers: vec!["test-package-1".to_string()],
is_active: true,
needs_restart: true,
recommended_decision: crate_root::state::modal::ServiceRestartDecision::Restart,
restart_decision: crate_root::state::modal::ServiceRestartDecision::Restart,
}];
app.preflight_services_resolving = false;
app.preflight_services_items = None;
}
/// Syncs files from cache to modal.
///
/// Inputs:
/// - `app`: Reference to mutable `AppState`
///
/// Output:
/// - None (modifies app state)
///
/// Details:
/// - Switches to Files tab and syncs files from cache
fn sync_files_tab(app: &mut crate_root::state::AppState) {
if let crate_root::state::Modal::Preflight {
items,
tab,
file_info,
file_selected,
..
} = &mut app.modal
{
*tab = crate_root::state::PreflightTab::Files;
// Simulate sync_files logic - should now find cached files
let item_names: std::collections::HashSet<String> =
items.iter().map(|i| i.name.clone()).collect();
let cached_files: Vec<_> = app
.install_list_files
.iter()
.filter(|file_info| item_names.contains(&file_info.name))
.cloned()
.collect();
if !cached_files.is_empty() {
*file_info = cached_files;
*file_selected = 0;
}
}
}
/// Verifies files in the modal match expected state.
///
/// Inputs:
/// - `app`: Reference to `AppState`
/// - `expected_package_name`: Expected package name in file info
///
/// Output:
/// - None (panics on failure)
///
/// Details:
/// - Asserts that files are loaded and match expected package name
fn verify_files(app: &crate_root::state::AppState, expected_package_name: &str) {
if let crate_root::state::Modal::Preflight { tab, file_info, .. } = &app.modal {
assert_eq!(
*tab,
crate_root::state::PreflightTab::Files,
"Should be on Files tab"
);
assert!(
!file_info.is_empty(),
"Files should be loaded from updated cache"
);
assert_eq!(file_info.len(), 1, "Should have 1 file entry");
assert_eq!(
file_info[0].name, expected_package_name,
"File package name should match"
);
} else {
panic!("Expected Preflight modal");
}
}
/// Syncs services from cache to modal.
///
/// Inputs:
/// - `app`: Reference to mutable `AppState`
///
/// Output:
/// - None (modifies app state)
///
/// Details:
/// - Switches to Services tab and syncs services from cache
fn sync_services_tab(app: &mut crate_root::state::AppState) {
if let crate_root::state::Modal::Preflight {
items,
action,
tab,
service_info,
service_selected,
services_loaded,
..
} = &mut app.modal
{
*tab = crate_root::state::PreflightTab::Services;
// Simulate sync_services logic - should now find cached services
if matches!(*action, crate_root::state::PreflightAction::Install) {
let item_names: std::collections::HashSet<String> =
items.iter().map(|i| i.name.clone()).collect();
let cached_services: Vec<_> = app
.install_list_services
.iter()
.filter(|s| s.providers.iter().any(|p| item_names.contains(p)))
.cloned()
.collect();
if !cached_services.is_empty() {
*service_info = cached_services;
*services_loaded = true;
*service_selected = 0;
}
}
}
}
/// Verifies services in the modal match expected state.
///
/// Inputs:
/// - `app`: Reference to `AppState`
/// - `expected_unit_name`: Expected service unit name
///
/// Output:
/// - None (panics on failure)
///
/// Details:
/// - Asserts that services are loaded and match expected unit name
fn verify_services(app: &crate_root::state::AppState, expected_unit_name: &str) {
if let crate_root::state::Modal::Preflight {
tab,
service_info,
services_loaded,
..
} = &app.modal
{
assert_eq!(
*tab,
crate_root::state::PreflightTab::Services,
"Should be on Services tab"
);
assert!(
!service_info.is_empty(),
"Services should be loaded from updated cache"
);
assert!(*services_loaded, "Services should be marked as loaded");
assert_eq!(service_info.len(), 1, "Should have 1 service");
assert_eq!(
service_info[0].unit_name, expected_unit_name,
"Service unit name should match"
);
} else {
panic!("Expected Preflight modal");
}
}
/// Verifies that resolving flags are cleared.
///
/// Inputs:
/// - `app`: Reference to `AppState`
///
/// Output:
/// - None (panics on failure)
///
/// Details:
/// - Asserts that files and services resolving flags are cleared
fn verify_resolving_flags_cleared(app: &crate_root::state::AppState) {
assert!(
!app.preflight_files_resolving,
"Files resolving flag should be cleared"
);
assert!(
app.preflight_files_items.is_none(),
"Files items should be cleared"
);
assert!(
!app.preflight_services_resolving,
"Services resolving flag should be cleared"
);
assert!(
app.preflight_services_items.is_none(),
"Services items should be cleared"
);
}
/// Verifies final state of all tabs.
///
/// Inputs:
/// - `app`: Reference to `AppState`
/// - `expected_dep_count`: Expected number of dependencies
///
/// Output:
/// - None (panics on failure)
///
/// Details:
/// - Asserts that all tabs have updated data
fn verify_final_state(app: &crate_root::state::AppState, expected_dep_count: usize) {
if let crate_root::state::Modal::Preflight {
dependency_info,
file_info,
service_info,
services_loaded,
..
} = &app.modal
{
assert_eq!(
dependency_info.len(),
expected_dep_count,
"Should have {expected_dep_count} dependencies after cache update"
);
assert!(!file_info.is_empty(), "Files should be loaded");
assert!(!service_info.is_empty(), "Services should be loaded");
assert!(*services_loaded, "Services should be marked as loaded");
} else {
panic!("Expected Preflight modal");
}
}
#[test]
/// What: Verify that preflight modal syncs updated cache data when background resolution completes.
///
/// Inputs:
/// - Packages in `install_list`
/// - Preflight modal opened with some data missing
/// - Background resolution completes and updates cache while modal is open
/// - User switches to affected tab
///
/// Output:
/// - Updated data appears when switching to the tab
/// - Old data is replaced with new data
/// - Modal state is correctly updated
///
/// Details:
/// - Tests that cache updates during modal open are handled correctly
/// - Verifies data synchronization when background work completes
/// - Ensures modal reflects latest cached data
fn preflight_syncs_cache_updates_during_modal_open() {
unsafe {
std::env::set_var("PACSEA_TEST_HEADLESS", "1");
}
let test_packages = vec![create_test_package("test-package-1", "1.0.0")];
let mut app = setup_initial_app_state(&test_packages);
open_preflight_modal(&mut app, &test_packages);
// Test 1: Switch to Deps tab - should load initial cached data
sync_dependencies_tab(&mut app);
verify_dependencies(&app, 1, &["test-dep-1"]);
// Simulate background resolution completing and updating cache
simulate_background_resolution_complete(&mut app);
// Test 2: Switch back to Deps tab - should sync updated cache (now has 2 deps)
sync_dependencies_tab(&mut app);
verify_dependencies(&app, 2, &["test-dep-1", "test-dep-2"]);
// Test 3: Switch to Files tab - should load newly cached files
sync_files_tab(&mut app);
verify_files(&app, "test-package-1");
// Test 4: Switch to Services tab - should load newly cached services
sync_services_tab(&mut app);
verify_services(&app, "test-service.service");
// Test 5: Verify resolving flags are cleared
verify_resolving_flags_cleared(&app);
// Final verification: All updated data is present
verify_final_state(&app, 2);
}
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/tests/preflight_integration/conflicts.rs | tests/preflight_integration/conflicts.rs | //! //! Tests for conflict resolution.
use super::helpers::*;
use pacsea as crate_root;
/// What: Create test dependencies including conflicts.
///
/// Output:
/// - Returns vector of dependency info with conflicts
///
/// Details:
/// - Creates dependencies for package-1, package-2, and aur-package
/// - Includes a conflict between package-1 and package-2 for common-dep
fn create_test_dependencies() -> Vec<crate_root::state::modal::DependencyInfo> {
vec![
// Package 1 dependencies
crate_root::state::modal::DependencyInfo {
name: "common-dep".to_string(),
version: "1.0.0".to_string(),
status: crate_root::state::modal::DependencyStatus::ToInstall,
source: crate_root::state::modal::DependencySource::Official {
repo: "core".to_string(),
},
required_by: vec!["package-1".to_string()],
depends_on: Vec::new(),
is_core: false,
is_system: false,
},
crate_root::state::modal::DependencyInfo {
name: "pkg1-dep".to_string(),
version: "1.0.0".to_string(),
status: crate_root::state::modal::DependencyStatus::ToInstall,
source: crate_root::state::modal::DependencySource::Official {
repo: "core".to_string(),
},
required_by: vec!["package-1".to_string()],
depends_on: Vec::new(),
is_core: false,
is_system: false,
},
// Package 2 dependencies - includes conflict with common-dep
crate_root::state::modal::DependencyInfo {
name: "common-dep".to_string(),
version: "2.0.0".to_string(),
status: crate_root::state::modal::DependencyStatus::Conflict {
reason: "Conflicts with package-1's dependency common-dep (1.0.0)".to_string(),
},
source: crate_root::state::modal::DependencySource::Official {
repo: "core".to_string(),
},
required_by: vec!["package-2".to_string()],
depends_on: Vec::new(),
is_core: false,
is_system: false,
},
crate_root::state::modal::DependencyInfo {
name: "pkg2-dep".to_string(),
version: "2.0.0".to_string(),
status: crate_root::state::modal::DependencyStatus::ToInstall,
source: crate_root::state::modal::DependencySource::Official {
repo: "extra".to_string(),
},
required_by: vec!["package-2".to_string()],
depends_on: Vec::new(),
is_core: false,
is_system: false,
},
// AUR package dependency
crate_root::state::modal::DependencyInfo {
name: "aur-dep".to_string(),
version: "1.0.0".to_string(),
status: crate_root::state::modal::DependencyStatus::ToInstall,
source: crate_root::state::modal::DependencySource::Aur,
required_by: vec!["aur-package".to_string()],
depends_on: Vec::new(),
is_core: false,
is_system: false,
},
]
}
/// What: Create test file data for all packages.
///
/// Output:
/// - Returns vector of package file info
///
/// Details:
/// - Creates file changes for package-1, package-2, and aur-package
fn create_test_files() -> Vec<crate_root::state::modal::PackageFileInfo> {
vec![
crate_root::state::modal::PackageFileInfo {
name: "package-1".to_string(),
files: vec![
crate_root::state::modal::FileChange {
path: "/usr/bin/pkg1".to_string(),
change_type: crate_root::state::modal::FileChangeType::New,
package: "package-1".to_string(),
is_config: false,
predicted_pacnew: false,
predicted_pacsave: false,
},
crate_root::state::modal::FileChange {
path: "/etc/pkg1.conf".to_string(),
change_type: crate_root::state::modal::FileChangeType::New,
package: "package-1".to_string(),
is_config: true,
predicted_pacnew: true,
predicted_pacsave: false,
},
],
total_count: 2,
new_count: 2,
changed_count: 0,
removed_count: 0,
config_count: 1,
pacnew_candidates: 1,
pacsave_candidates: 0,
},
crate_root::state::modal::PackageFileInfo {
name: "package-2".to_string(),
files: vec![
crate_root::state::modal::FileChange {
path: "/usr/bin/pkg2".to_string(),
change_type: crate_root::state::modal::FileChangeType::New,
package: "package-2".to_string(),
is_config: false,
predicted_pacnew: false,
predicted_pacsave: false,
},
crate_root::state::modal::FileChange {
path: "/etc/pkg2.conf".to_string(),
change_type: crate_root::state::modal::FileChangeType::Changed,
package: "package-2".to_string(),
is_config: true,
predicted_pacnew: false,
predicted_pacsave: true,
},
],
total_count: 2,
new_count: 1,
changed_count: 1,
removed_count: 0,
config_count: 1,
pacnew_candidates: 0,
pacsave_candidates: 1,
},
crate_root::state::modal::PackageFileInfo {
name: "aur-package".to_string(),
files: vec![crate_root::state::modal::FileChange {
path: "/usr/bin/aur".to_string(),
change_type: crate_root::state::modal::FileChangeType::New,
package: "aur-package".to_string(),
is_config: false,
predicted_pacnew: false,
predicted_pacsave: false,
}],
total_count: 1,
new_count: 1,
changed_count: 0,
removed_count: 0,
config_count: 0,
pacnew_candidates: 0,
pacsave_candidates: 0,
},
]
}
/// What: Create test service data for all packages.
///
/// Output:
/// - Returns vector of service impact info
///
/// Details:
/// - Creates services for package-1, package-2, and aur-package
fn create_test_services() -> Vec<crate_root::state::modal::ServiceImpact> {
vec![
crate_root::state::modal::ServiceImpact {
unit_name: "pkg1.service".to_string(),
providers: vec!["package-1".to_string()],
is_active: true,
needs_restart: true,
recommended_decision: crate_root::state::modal::ServiceRestartDecision::Restart,
restart_decision: crate_root::state::modal::ServiceRestartDecision::Restart,
},
crate_root::state::modal::ServiceImpact {
unit_name: "pkg2.service".to_string(),
providers: vec!["package-2".to_string()],
is_active: false,
needs_restart: false,
recommended_decision: crate_root::state::modal::ServiceRestartDecision::Defer,
restart_decision: crate_root::state::modal::ServiceRestartDecision::Defer,
},
crate_root::state::modal::ServiceImpact {
unit_name: "aur.service".to_string(),
providers: vec!["aur-package".to_string()],
is_active: true,
needs_restart: true,
recommended_decision: crate_root::state::modal::ServiceRestartDecision::Restart,
restart_decision: crate_root::state::modal::ServiceRestartDecision::Restart,
},
]
}
/// What: Create test sandbox data for AUR package.
///
/// Output:
/// - Returns vector of sandbox info
///
/// Details:
/// - Creates sandbox info for aur-package with depends, makedepends, and optdepends
fn create_test_sandbox() -> Vec<crate_root::logic::sandbox::SandboxInfo> {
vec![crate_root::logic::sandbox::SandboxInfo {
package_name: "aur-package".to_string(),
depends: vec![crate_root::logic::sandbox::DependencyDelta {
name: "aur-dep".to_string(),
is_installed: false,
installed_version: None,
version_satisfied: false,
}],
makedepends: vec![crate_root::logic::sandbox::DependencyDelta {
name: "make-dep".to_string(),
is_installed: true,
installed_version: Some("1.0.0".to_string()),
version_satisfied: true,
}],
checkdepends: vec![],
optdepends: vec![crate_root::logic::sandbox::DependencyDelta {
name: "optdep".to_string(),
is_installed: false,
installed_version: None,
version_satisfied: false,
}],
}]
}
/// What: Create test packages for conflict testing.
///
/// Output:
/// - Returns vector of test packages
///
/// Details:
/// - Creates package-1, package-2, and aur-package
fn create_test_packages() -> Vec<crate_root::state::PackageItem> {
vec![
crate_root::state::PackageItem {
name: "package-1".to_string(),
version: "1.0.0".to_string(),
description: String::new(),
source: crate_root::state::Source::Official {
repo: "core".to_string(),
arch: "x86_64".to_string(),
},
popularity: None,
out_of_date: None,
orphaned: false,
},
crate_root::state::PackageItem {
name: "package-2".to_string(),
version: "2.0.0".to_string(),
description: String::new(),
source: crate_root::state::Source::Official {
repo: "extra".to_string(),
arch: "x86_64".to_string(),
},
popularity: None,
out_of_date: None,
orphaned: false,
},
crate_root::state::PackageItem {
name: "aur-package".to_string(),
version: "3.0.0".to_string(),
description: String::new(),
source: crate_root::state::Source::Aur,
popularity: None,
out_of_date: None,
orphaned: false,
},
]
}
/// What: Set up test data with conflicts for all tabs.
///
/// Inputs:
/// - `app`: Application state to populate
///
/// Output:
/// - Returns test packages vector
///
/// Details:
/// - Populates app cache with dependencies (including conflicts), files, services, and sandbox data
/// - Sets up modal state
fn setup_test_data_with_conflicts(
app: &mut crate_root::state::AppState,
) -> Vec<crate_root::state::PackageItem> {
let test_packages = create_test_packages();
// Pre-populate cache with dependencies including conflicts
app.install_list_deps = create_test_dependencies();
// Pre-populate cache with files for all packages
app.install_list_files = create_test_files();
// Pre-populate cache with services for all packages
app.install_list_services = create_test_services();
// Pre-populate cache with sandbox info for AUR package
app.install_list_sandbox = create_test_sandbox();
// Set packages in install list
app.install_list = test_packages.clone();
app.preflight_cancelled
.store(false, std::sync::atomic::Ordering::Relaxed);
// Open preflight modal using helper function
app.modal = create_preflight_modal(
test_packages.clone(),
crate_root::state::PreflightAction::Install,
crate_root::state::PreflightTab::Summary,
);
test_packages
}
/// What: Verify Deps tab shows conflicts correctly.
///
/// Inputs:
/// - `app`: Application state with preflight modal
///
/// Output:
/// - Panics if assertions fail
///
/// Details:
/// - Verifies conflicts are detected and displayed
/// - Verifies non-conflicting dependencies are present
/// - Verifies package-specific dependencies
fn verify_deps_tab(app: &mut crate_root::state::AppState) {
// Manually sync dependencies without merging (like original test)
if let crate_root::state::Modal::Preflight {
items,
action,
tab,
dependency_info,
dep_selected,
..
} = &mut app.modal
{
*tab = crate_root::state::PreflightTab::Deps;
// Simulate sync_dependencies logic without merging
if matches!(*action, crate_root::state::PreflightAction::Install) {
let item_names: std::collections::HashSet<String> =
items.iter().map(|i| i.name.clone()).collect();
let filtered: Vec<_> = app
.install_list_deps
.iter()
.filter(|dep| {
dep.required_by
.iter()
.any(|req_by| item_names.contains(req_by))
})
.cloned()
.collect();
if !filtered.is_empty() {
*dependency_info = filtered;
*dep_selected = 0;
}
}
}
let (_, _, tab, dependency_info, _, _, _, _, _) = assert_preflight_modal(app);
assert_eq!(
*tab,
crate_root::state::PreflightTab::Deps,
"Should be on Deps tab"
);
assert!(!dependency_info.is_empty(), "Dependencies should be loaded");
assert_eq!(dependency_info.len(), 5, "Should have 5 dependencies");
// Verify conflicts are detected
let conflicts: Vec<_> = dependency_info
.iter()
.filter(|d| {
matches!(
d.status,
crate_root::state::modal::DependencyStatus::Conflict { .. }
)
})
.collect();
assert!(!conflicts.is_empty(), "Conflicts should be detected");
assert_eq!(conflicts.len(), 1, "Should have 1 conflict");
assert_eq!(conflicts[0].name, "common-dep");
assert!(conflicts[0].required_by.contains(&"package-2".to_string()));
// Verify non-conflicting dependencies are present
assert_eq!(
dependency_info
.iter()
.filter(|d| {
matches!(
d.status,
crate_root::state::modal::DependencyStatus::ToInstall
)
})
.count(),
4,
"Should have 4 ToInstall dependencies"
);
// Verify package-1's dependencies
let pkg1_deps: Vec<_> = dependency_info
.iter()
.filter(|d| d.required_by.contains(&"package-1".to_string()))
.collect();
assert_eq!(pkg1_deps.len(), 2, "Package-1 should have 2 dependencies");
assert!(pkg1_deps.iter().any(|d| d.name == "common-dep"));
assert!(pkg1_deps.iter().any(|d| d.name == "pkg1-dep"));
// Verify package-2's dependencies (including conflict)
let pkg2_deps: Vec<_> = dependency_info
.iter()
.filter(|d| d.required_by.contains(&"package-2".to_string()))
.collect();
assert_eq!(pkg2_deps.len(), 2, "Package-2 should have 2 dependencies");
assert!(pkg2_deps.iter().any(|d| d.name == "common-dep"));
assert!(pkg2_deps.iter().any(|d| d.name == "pkg2-dep"));
}
/// What: Verify Files tab loads correctly despite conflicts.
///
/// Inputs:
/// - `app`: Application state with preflight modal
///
/// Output:
/// - Panics if assertions fail
///
/// Details:
/// - Verifies all package files are loaded correctly
/// - Verifies file counts and metadata
fn verify_files_tab(app: &mut crate_root::state::AppState) {
switch_preflight_tab(app, crate_root::state::PreflightTab::Files);
let (_, _, tab, _, file_info, _, _, _, _) = assert_preflight_modal(app);
assert_eq!(
*tab,
crate_root::state::PreflightTab::Files,
"Should be on Files tab"
);
assert!(!file_info.is_empty(), "Files should be loaded");
assert_eq!(file_info.len(), 3, "Should have 3 file entries");
// Verify package-1 files are correct
let pkg1_files = file_info
.iter()
.find(|f| f.name == "package-1")
.expect("package-1 should be found in file_info");
assert_eq!(pkg1_files.files.len(), 2, "Package-1 should have 2 files");
assert_eq!(pkg1_files.total_count, 2);
assert_eq!(pkg1_files.new_count, 2);
assert_eq!(pkg1_files.changed_count, 0);
assert_eq!(pkg1_files.config_count, 1);
assert_eq!(pkg1_files.pacnew_candidates, 1);
assert_eq!(pkg1_files.pacsave_candidates, 0);
// Verify package-2 files are correct
let pkg2_files = file_info
.iter()
.find(|f| f.name == "package-2")
.expect("package-2 should be found in file_info");
assert_eq!(pkg2_files.files.len(), 2, "Package-2 should have 2 files");
assert_eq!(pkg2_files.total_count, 2);
assert_eq!(pkg2_files.new_count, 1);
assert_eq!(pkg2_files.changed_count, 1);
assert_eq!(pkg2_files.config_count, 1);
assert_eq!(pkg2_files.pacnew_candidates, 0);
assert_eq!(pkg2_files.pacsave_candidates, 1);
// Verify AUR package files are correct
let aur_files = file_info
.iter()
.find(|f| f.name == "aur-package")
.expect("aur-package should be found in file_info");
assert_eq!(aur_files.files.len(), 1, "AUR package should have 1 file");
assert_eq!(aur_files.total_count, 1);
assert_eq!(aur_files.new_count, 1);
}
/// What: Verify Services tab loads correctly despite conflicts.
///
/// Inputs:
/// - `app`: Application state with preflight modal
///
/// Output:
/// - Panics if assertions fail
///
/// Details:
/// - Verifies all package services are loaded correctly
/// - Verifies service states and restart decisions
fn verify_services_tab(app: &mut crate_root::state::AppState) {
switch_preflight_tab(app, crate_root::state::PreflightTab::Services);
let (_, _, tab, _, _, service_info, _, services_loaded, _) = assert_preflight_modal(app);
assert_eq!(
*tab,
crate_root::state::PreflightTab::Services,
"Should be on Services tab"
);
assert!(*services_loaded, "Services should be marked as loaded");
assert!(!service_info.is_empty(), "Services should be loaded");
assert_eq!(service_info.len(), 3, "Should have 3 services");
// Verify package-1 service
let pkg1_svc = service_info
.iter()
.find(|s| s.unit_name == "pkg1.service")
.expect("pkg1.service should be found in service_info");
assert!(pkg1_svc.is_active);
assert!(pkg1_svc.needs_restart);
assert_eq!(
pkg1_svc.restart_decision,
crate_root::state::modal::ServiceRestartDecision::Restart
);
assert!(pkg1_svc.providers.contains(&"package-1".to_string()));
// Verify package-2 service
let pkg2_svc = service_info
.iter()
.find(|s| s.unit_name == "pkg2.service")
.expect("pkg2.service should be found in service_info");
assert!(!pkg2_svc.is_active);
assert!(!pkg2_svc.needs_restart);
assert_eq!(
pkg2_svc.restart_decision,
crate_root::state::modal::ServiceRestartDecision::Defer
);
assert!(pkg2_svc.providers.contains(&"package-2".to_string()));
// Verify AUR package service
let aur_svc = service_info
.iter()
.find(|s| s.unit_name == "aur.service")
.expect("aur.service should be found in service_info");
assert!(aur_svc.is_active);
assert!(aur_svc.needs_restart);
assert_eq!(
aur_svc.restart_decision,
crate_root::state::modal::ServiceRestartDecision::Restart
);
assert!(aur_svc.providers.contains(&"aur-package".to_string()));
}
/// What: Verify Sandbox tab loads correctly despite conflicts.
///
/// Inputs:
/// - `app`: Application state with preflight modal
///
/// Output:
/// - Panics if assertions fail
///
/// Details:
/// - Verifies AUR package sandbox info is loaded correctly
/// - Verifies dependency details
fn verify_sandbox_tab(app: &mut crate_root::state::AppState) {
switch_preflight_tab(app, crate_root::state::PreflightTab::Sandbox);
let (_, _, tab, _, _, _, sandbox_info, _, sandbox_loaded) = assert_preflight_modal(app);
assert_eq!(
*tab,
crate_root::state::PreflightTab::Sandbox,
"Should be on Sandbox tab"
);
assert!(*sandbox_loaded, "Sandbox should be marked as loaded");
assert!(!sandbox_info.is_empty(), "Sandbox info should be loaded");
assert_eq!(sandbox_info.len(), 1, "Should have 1 sandbox entry");
// Verify AUR package sandbox info
let sandbox = sandbox_info
.iter()
.find(|s| s.package_name == "aur-package")
.expect("aur-package should be found in sandbox_info");
assert_eq!(sandbox.depends.len(), 1, "Should have 1 depends");
assert_eq!(sandbox.makedepends.len(), 1, "Should have 1 makedepends");
assert_eq!(sandbox.checkdepends.len(), 0, "Should have 0 checkdepends");
assert_eq!(sandbox.optdepends.len(), 1, "Should have 1 optdepends");
// Verify dependency details
let dep = sandbox
.depends
.iter()
.find(|d| d.name == "aur-dep")
.expect("aur-dep should be found in sandbox.depends");
assert!(!dep.is_installed);
assert_eq!(dep.installed_version, None);
let makedep = sandbox
.makedepends
.iter()
.find(|d| d.name == "make-dep")
.expect("make-dep should be found in sandbox.makedepends");
assert!(makedep.is_installed);
assert_eq!(makedep.installed_version, Some("1.0.0".to_string()));
}
/// What: Create pacsea-bin test dependencies with conflicts.
///
/// Output:
/// - Returns vector of dependency info for pacsea-bin
///
/// Details:
/// - Creates conflicts with pacsea and pacsea-git
/// - Includes a regular dependency (common-dep) to test conflict preservation
fn create_pacsea_bin_dependencies() -> Vec<crate_root::state::modal::DependencyInfo> {
vec![
// pacsea-bin's conflict with pacsea
crate_root::state::modal::DependencyInfo {
name: "pacsea".to_string(),
version: String::new(),
status: crate_root::state::modal::DependencyStatus::Conflict {
reason: "conflicts with installed package pacsea".to_string(),
},
source: crate_root::state::modal::DependencySource::Official {
repo: "core".to_string(),
},
required_by: vec!["pacsea-bin".to_string()],
depends_on: Vec::new(),
is_core: false,
is_system: false,
},
// pacsea-bin's conflict with pacsea-git
crate_root::state::modal::DependencyInfo {
name: "pacsea-git".to_string(),
version: String::new(),
status: crate_root::state::modal::DependencyStatus::Conflict {
reason: "conflicts with installed package pacsea-git".to_string(),
},
source: crate_root::state::modal::DependencySource::Aur,
required_by: vec!["pacsea-bin".to_string()],
depends_on: Vec::new(),
is_core: false,
is_system: false,
},
// pacsea-bin's regular dependency (to test that conflicts aren't overwritten by deps)
crate_root::state::modal::DependencyInfo {
name: "common-dep".to_string(),
version: "1.0.0".to_string(),
status: crate_root::state::modal::DependencyStatus::ToInstall,
source: crate_root::state::modal::DependencySource::Official {
repo: "core".to_string(),
},
required_by: vec!["pacsea-bin".to_string()],
depends_on: Vec::new(),
is_core: false,
is_system: false,
},
]
}
/// What: Create jujutsu-git test dependencies with conflicts.
///
/// Output:
/// - Returns vector of dependency info for jujutsu-git
///
/// Details:
/// - Creates conflict with jujutsu
/// - Includes common-dep dependency (same as pacsea-bin) to test conflict preservation
/// - Includes unique dependency
fn create_jujutsu_git_dependencies() -> Vec<crate_root::state::modal::DependencyInfo> {
vec![
// jujutsu-git's conflict with jujutsu
crate_root::state::modal::DependencyInfo {
name: "jujutsu".to_string(),
version: String::new(),
status: crate_root::state::modal::DependencyStatus::Conflict {
reason: "conflicts with installed package jujutsu".to_string(),
},
source: crate_root::state::modal::DependencySource::Official {
repo: "community".to_string(),
},
required_by: vec!["jujutsu-git".to_string()],
depends_on: Vec::new(),
is_core: false,
is_system: false,
},
// jujutsu-git also depends on common-dep (same as pacsea-bin)
// This tests that pacsea-bin's conflict entries aren't overwritten
crate_root::state::modal::DependencyInfo {
name: "common-dep".to_string(),
version: "1.0.0".to_string(),
status: crate_root::state::modal::DependencyStatus::ToInstall,
source: crate_root::state::modal::DependencySource::Official {
repo: "core".to_string(),
},
required_by: vec!["jujutsu-git".to_string()],
depends_on: Vec::new(),
is_core: false,
is_system: false,
},
// jujutsu-git's unique dependency
crate_root::state::modal::DependencyInfo {
name: "jujutsu-dep".to_string(),
version: "2.0.0".to_string(),
status: crate_root::state::modal::DependencyStatus::ToInstall,
source: crate_root::state::modal::DependencySource::Official {
repo: "extra".to_string(),
},
required_by: vec!["jujutsu-git".to_string()],
depends_on: Vec::new(),
is_core: false,
is_system: false,
},
]
}
/// What: Set up pacsea-bin in app state with conflicts.
///
/// Inputs:
/// - `app`: Application state to populate
///
/// Output:
/// - Returns pacsea-bin package item
///
/// Details:
/// - Populates app cache with pacsea-bin's dependencies
/// - Sets up install list and preflight modal
fn setup_pacsea_bin(app: &mut crate_root::state::AppState) -> crate_root::state::PackageItem {
let pacsea_bin = create_test_package("pacsea-bin", "0.6.0", crate_root::state::Source::Aur);
app.install_list_deps = create_pacsea_bin_dependencies();
app.install_list = vec![pacsea_bin.clone()];
app.preflight_cancelled
.store(false, std::sync::atomic::Ordering::Relaxed);
app.modal = create_preflight_modal(
vec![pacsea_bin.clone()],
crate_root::state::PreflightAction::Install,
crate_root::state::PreflightTab::Deps,
);
pacsea_bin
}
/// What: Verify pacsea-bin's conflicts are detected correctly.
///
/// Inputs:
/// - `app`: Application state with preflight modal
///
/// Output:
/// - Panics if assertions fail
///
/// Details:
/// - Verifies 2 conflicts are detected (pacsea and pacsea-git)
/// - Verifies conflicts are associated with pacsea-bin
fn verify_pacsea_bin_conflicts(app: &mut crate_root::state::AppState) {
switch_preflight_tab(app, crate_root::state::PreflightTab::Deps);
let (_, _, _, dependency_info, _, _, _, _, _) = assert_preflight_modal(app);
let conflicts: Vec<_> = dependency_info
.iter()
.filter(|d| {
matches!(
d.status,
crate_root::state::modal::DependencyStatus::Conflict { .. }
)
})
.collect();
assert_eq!(
conflicts.len(),
2,
"Should have 2 conflicts after adding pacsea-bin"
);
assert!(
conflicts
.iter()
.any(|c| c.name == "pacsea" && c.required_by.contains(&"pacsea-bin".to_string())),
"pacsea-bin should conflict with pacsea"
);
assert!(
conflicts
.iter()
.any(|c| c.name == "pacsea-git" && c.required_by.contains(&"pacsea-bin".to_string())),
"pacsea-bin should conflict with pacsea-git"
);
}
/// What: Add jujutsu-git to app state and update modal.
///
/// Inputs:
/// - `app`: Application state to update
/// - `pacsea_bin`: Existing pacsea-bin package
///
/// Output:
/// - Returns jujutsu-git package item
///
/// Details:
/// - Extends `install_list_deps` with jujutsu-git's dependencies
/// - Updates install list and modal to include both packages
fn add_jujutsu_git(
app: &mut crate_root::state::AppState,
pacsea_bin: &crate_root::state::PackageItem,
) -> crate_root::state::PackageItem {
let jujutsu_git = create_test_package("jujutsu-git", "0.1.0", crate_root::state::Source::Aur);
app.install_list_deps
.extend(create_jujutsu_git_dependencies());
app.install_list = vec![pacsea_bin.clone(), jujutsu_git.clone()];
app.modal = create_preflight_modal(
vec![pacsea_bin.clone(), jujutsu_git.clone()],
crate_root::state::PreflightAction::Install,
crate_root::state::PreflightTab::Deps,
);
jujutsu_git
}
/// What: Verify all conflicts after both packages are added.
///
/// Inputs:
/// - `app`: Application state with both packages
///
/// Output:
/// - Panics if assertions fail
///
/// Details:
/// - Verifies pacsea-bin's conflicts are still present (not overwritten)
/// - Verifies jujutsu-git's conflicts are detected
/// - Verifies total conflict count
/// - Verifies common-dep is not a conflict
fn verify_all_conflicts_after_both_packages(app: &mut crate_root::state::AppState) {
switch_preflight_tab(app, crate_root::state::PreflightTab::Deps);
let (items, _, _, dependency_info, _, _, _, _, _) = assert_preflight_modal(app);
assert_eq!(items.len(), 2, "Should have 2 packages in install list");
// Verify pacsea-bin's conflicts are still present (not overwritten)
let pacsea_conflicts: Vec<_> = dependency_info
.iter()
.filter(|d| {
matches!(
d.status,
crate_root::state::modal::DependencyStatus::Conflict { .. }
) && d.required_by.contains(&"pacsea-bin".to_string())
})
.collect();
assert_eq!(
pacsea_conflicts.len(),
2,
"pacsea-bin should still have 2 conflicts after adding jujutsu-git"
);
assert!(
pacsea_conflicts.iter().any(|c| c.name == "pacsea"),
"pacsea-bin should still conflict with pacsea"
);
assert!(
pacsea_conflicts.iter().any(|c| c.name == "pacsea-git"),
"pacsea-bin should still conflict with pacsea-git"
);
// Verify jujutsu-git's conflicts are also detected
let jujutsu_conflicts: Vec<_> = dependency_info
.iter()
.filter(|d| {
matches!(
d.status,
crate_root::state::modal::DependencyStatus::Conflict { .. }
) && d.required_by.contains(&"jujutsu-git".to_string())
})
.collect();
assert_eq!(
jujutsu_conflicts.len(),
1,
"jujutsu-git should have 1 conflict"
);
assert!(
jujutsu_conflicts.iter().any(|c| c.name == "jujutsu"),
"jujutsu-git should conflict with jujutsu"
);
// Verify total conflicts count
assert_eq!(
dependency_info
.iter()
.filter(|d| {
matches!(
d.status,
crate_root::state::modal::DependencyStatus::Conflict { .. }
)
})
.count(),
3,
"Should have 3 total conflicts (2 from pacsea-bin, 1 from jujutsu-git)"
);
// Verify that common-dep is not a conflict (it's a regular dependency)
let common_dep = dependency_info
.iter()
.find(|d| d.name == "common-dep")
.expect("common-dep should be present");
assert!(
matches!(
common_dep.status,
crate_root::state::modal::DependencyStatus::ToInstall
),
"common-dep should be ToInstall, not Conflict"
);
assert!(
common_dep.required_by.contains(&"pacsea-bin".to_string()),
"common-dep should be required by pacsea-bin"
);
assert!(
common_dep.required_by.contains(&"jujutsu-git".to_string()),
"common-dep should be required by jujutsu-git"
);
}
/// What: Verify conflicts persist through tab switches.
///
/// Inputs:
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | true |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/tests/preflight_integration/mod.rs | tests/preflight_integration/mod.rs | //! Integration tests for preflight modal optimization features.
//!
//! Tests cover:
//! - Out-of-order data arrival (stages completing in different orders)
//! - Cancellation support (aborting work when modal closes)
//! - Caching and data synchronization
//! - Package operations and management
//! - Tab switching and state management
//! - Error handling and edge cases
//! - AUR and official package mixing
//! - Large datasets and performance
//! - Persistence across tabs
//! - Conflict resolution
//! - Skip preflight setting functionality
//! - Auto-scrolling logs in `PreflightExec`
//! - Risk calculation and scoring
#![cfg(test)]
mod aur_mix;
mod auto_scroll;
mod cache_sync;
mod caching;
mod conflicts;
mod data_arrival;
mod edge_cases;
mod error_handling;
mod helpers;
mod large_datasets;
mod package_operations;
mod persistence;
mod remove_operations;
mod risk_calculation;
mod skip_preflight;
mod tab_switching;
mod tab_variations;
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/tests/preflight_integration/aur_mix.rs | tests/preflight_integration/aur_mix.rs | //! Tests for AUR and official package mixing.
use pacsea as crate_root;
/// What: Create test packages (official and AUR) for testing.
///
/// Inputs: None
///
/// Output:
/// - Vector containing one official and one AUR test package
///
/// Details:
/// - Creates minimal test packages with different sources
fn create_test_packages() -> Vec<crate_root::state::PackageItem> {
vec![
crate_root::state::PackageItem {
name: "test-official-package".to_string(),
version: "1.0.0".to_string(),
description: String::new(),
source: crate_root::state::Source::Official {
repo: "core".to_string(),
arch: "x86_64".to_string(),
},
popularity: None,
out_of_date: None,
orphaned: false,
},
crate_root::state::PackageItem {
name: "test-aur-package".to_string(),
version: "2.0.0".to_string(),
description: String::new(),
source: crate_root::state::Source::Aur,
popularity: None,
out_of_date: None,
orphaned: false,
},
]
}
/// What: Setup test app state with pre-populated cache data.
///
/// Inputs:
/// - `app`: Application state to populate
/// - `test_packages`: Test packages to use
///
/// Output:
/// - App state with all cache data populated
///
/// Details:
/// - Pre-populates dependencies, files, services, and sandbox info
fn setup_test_app_state(
app: &mut crate_root::state::AppState,
test_packages: &[crate_root::state::PackageItem],
) {
// Pre-populate cache with dependencies for both packages
app.install_list_deps = vec![
crate_root::state::modal::DependencyInfo {
name: "official-dep-1".to_string(),
version: "1.0.0".to_string(),
status: crate_root::state::modal::DependencyStatus::ToInstall,
source: crate_root::state::modal::DependencySource::Official {
repo: "core".to_string(),
},
required_by: vec!["test-official-package".to_string()],
depends_on: Vec::new(),
is_core: false,
is_system: false,
},
crate_root::state::modal::DependencyInfo {
name: "aur-dep-1".to_string(),
version: "1.0.0".to_string(),
status: crate_root::state::modal::DependencyStatus::ToInstall,
source: crate_root::state::modal::DependencySource::Aur,
required_by: vec!["test-aur-package".to_string()],
depends_on: Vec::new(),
is_core: false,
is_system: false,
},
];
// Pre-populate cache with files for both packages
app.install_list_files = vec![
crate_root::state::modal::PackageFileInfo {
name: "test-official-package".to_string(),
files: vec![crate_root::state::modal::FileChange {
path: "/usr/bin/official".to_string(),
change_type: crate_root::state::modal::FileChangeType::New,
package: "test-official-package".to_string(),
is_config: false,
predicted_pacnew: false,
predicted_pacsave: false,
}],
total_count: 1,
new_count: 1,
changed_count: 0,
removed_count: 0,
config_count: 0,
pacnew_candidates: 0,
pacsave_candidates: 0,
},
crate_root::state::modal::PackageFileInfo {
name: "test-aur-package".to_string(),
files: vec![crate_root::state::modal::FileChange {
path: "/usr/bin/aur".to_string(),
change_type: crate_root::state::modal::FileChangeType::New,
package: "test-aur-package".to_string(),
is_config: false,
predicted_pacnew: false,
predicted_pacsave: false,
}],
total_count: 1,
new_count: 1,
changed_count: 0,
removed_count: 0,
config_count: 0,
pacnew_candidates: 0,
pacsave_candidates: 0,
},
];
// Pre-populate cache with services for both packages
app.install_list_services = vec![
crate_root::state::modal::ServiceImpact {
unit_name: "official-service.service".to_string(),
providers: vec!["test-official-package".to_string()],
is_active: true,
needs_restart: true,
recommended_decision: crate_root::state::modal::ServiceRestartDecision::Restart,
restart_decision: crate_root::state::modal::ServiceRestartDecision::Restart,
},
crate_root::state::modal::ServiceImpact {
unit_name: "aur-service.service".to_string(),
providers: vec!["test-aur-package".to_string()],
is_active: false,
needs_restart: false,
recommended_decision: crate_root::state::modal::ServiceRestartDecision::Defer,
restart_decision: crate_root::state::modal::ServiceRestartDecision::Defer,
},
];
// Pre-populate cache with sandbox info (only for AUR package)
app.install_list_sandbox = vec![crate_root::logic::sandbox::SandboxInfo {
package_name: "test-aur-package".to_string(),
depends: vec![crate_root::logic::sandbox::DependencyDelta {
name: "aur-dep-1".to_string(),
is_installed: false,
installed_version: None,
version_satisfied: false,
}],
makedepends: vec![],
checkdepends: vec![],
optdepends: vec![],
}];
app.install_list = test_packages.to_vec();
app.preflight_cancelled
.store(false, std::sync::atomic::Ordering::Relaxed);
}
/// What: Initialize preflight modal with test packages.
///
/// Inputs:
/// - `test_packages`: Test packages to include
///
/// Output:
/// - Preflight modal state initialized with test packages
///
/// Details:
/// - Creates modal with Install action and Summary tab
fn create_test_preflight_modal(
test_packages: &[crate_root::state::PackageItem],
) -> crate_root::state::Modal {
crate_root::state::Modal::Preflight {
items: test_packages.to_vec(),
action: crate_root::state::PreflightAction::Install,
tab: crate_root::state::PreflightTab::Summary,
summary: None,
summary_scroll: 0,
header_chips: crate_root::state::modal::PreflightHeaderChips {
package_count: test_packages.len(),
download_bytes: 0,
install_delta_bytes: 0,
aur_count: 1,
risk_score: 0,
risk_level: crate_root::state::modal::RiskLevel::Low,
},
dependency_info: Vec::new(),
dep_selected: 0,
dep_tree_expanded: std::collections::HashSet::new(),
deps_error: None,
file_info: Vec::new(),
file_selected: 0,
file_tree_expanded: std::collections::HashSet::new(),
files_error: None,
service_info: Vec::new(),
service_selected: 0,
services_loaded: false,
services_error: None,
sandbox_info: Vec::new(),
sandbox_selected: 0,
sandbox_tree_expanded: std::collections::HashSet::new(),
sandbox_loaded: false,
sandbox_error: None,
selected_optdepends: std::collections::HashMap::new(),
cascade_mode: crate_root::state::modal::CascadeMode::Basic,
cached_reverse_deps_report: None,
}
}
/// What: Test and verify dependencies tab shows both packages.
///
/// Inputs:
/// - `app`: Application state with modal
///
/// Output:
/// - Verifies dependencies are correctly displayed
///
/// Details:
/// - Switches to Deps tab and syncs data
/// - Asserts both packages have dependencies
fn test_deps_tab(app: &mut crate_root::state::AppState) {
if let crate_root::state::Modal::Preflight {
items,
action,
tab,
dependency_info,
dep_selected,
..
} = &mut app.modal
{
*tab = crate_root::state::PreflightTab::Deps;
if matches!(*action, crate_root::state::PreflightAction::Install) {
let item_names: std::collections::HashSet<String> =
items.iter().map(|i| i.name.clone()).collect();
let filtered: Vec<_> = app
.install_list_deps
.iter()
.filter(|dep| {
dep.required_by
.iter()
.any(|req_by| item_names.contains(req_by))
})
.cloned()
.collect();
if !filtered.is_empty() {
*dependency_info = filtered;
*dep_selected = 0;
}
}
}
if let crate_root::state::Modal::Preflight {
dependency_info, ..
} = &app.modal
{
assert_eq!(
dependency_info.len(),
2,
"Should have 2 dependencies (one for each package)"
);
assert!(
dependency_info
.iter()
.any(|d| d.required_by.contains(&"test-official-package".to_string())),
"Should have dependency for official package"
);
assert!(
dependency_info
.iter()
.any(|d| d.required_by.contains(&"test-aur-package".to_string())),
"Should have dependency for AUR package"
);
} else {
panic!("Expected Preflight modal");
}
}
/// What: Test and verify files tab shows both packages.
///
/// Inputs:
/// - `app`: Application state with modal
///
/// Output:
/// - Verifies files are correctly displayed
///
/// Details:
/// - Switches to Files tab and syncs data
/// - Asserts both packages have files
fn test_files_tab(app: &mut crate_root::state::AppState) {
if let crate_root::state::Modal::Preflight {
items,
tab,
file_info,
file_selected,
..
} = &mut app.modal
{
*tab = crate_root::state::PreflightTab::Files;
let item_names: std::collections::HashSet<String> =
items.iter().map(|i| i.name.clone()).collect();
let cached_files: Vec<_> = app
.install_list_files
.iter()
.filter(|file_info| item_names.contains(&file_info.name))
.cloned()
.collect();
if !cached_files.is_empty() {
*file_info = cached_files;
*file_selected = 0;
}
}
if let crate_root::state::Modal::Preflight { file_info, .. } = &app.modal {
assert_eq!(
file_info.len(),
2,
"Should have 2 file entries (one for each package)"
);
assert!(
file_info.iter().any(|f| f.name == "test-official-package"),
"Should have files for official package"
);
assert!(
file_info.iter().any(|f| f.name == "test-aur-package"),
"Should have files for AUR package"
);
} else {
panic!("Expected Preflight modal");
}
}
/// What: Test and verify services tab shows both packages.
///
/// Inputs:
/// - `app`: Application state with modal
///
/// Output:
/// - Verifies services are correctly displayed
///
/// Details:
/// - Switches to Services tab and syncs data
/// - Asserts both packages have services
fn test_services_tab(app: &mut crate_root::state::AppState) {
if let crate_root::state::Modal::Preflight {
items,
action,
tab,
service_info,
service_selected,
services_loaded,
..
} = &mut app.modal
{
*tab = crate_root::state::PreflightTab::Services;
if matches!(*action, crate_root::state::PreflightAction::Install) {
let item_names: std::collections::HashSet<String> =
items.iter().map(|i| i.name.clone()).collect();
let cached_services: Vec<_> = app
.install_list_services
.iter()
.filter(|s| s.providers.iter().any(|p| item_names.contains(p)))
.cloned()
.collect();
if !cached_services.is_empty() {
*service_info = cached_services;
*services_loaded = true;
*service_selected = 0;
}
}
}
if let crate_root::state::Modal::Preflight {
service_info,
services_loaded,
..
} = &app.modal
{
assert_eq!(
service_info.len(),
2,
"Should have 2 services (one for each package)"
);
assert!(*services_loaded, "Services should be marked as loaded");
assert!(
service_info
.iter()
.any(|s| s.providers.contains(&"test-official-package".to_string())),
"Should have service for official package"
);
assert!(
service_info
.iter()
.any(|s| s.providers.contains(&"test-aur-package".to_string())),
"Should have service for AUR package"
);
} else {
panic!("Expected Preflight modal");
}
}
/// What: Test and verify sandbox tab shows only AUR package.
///
/// Inputs:
/// - `app`: Application state with modal
///
/// Output:
/// - Verifies sandbox only contains AUR package
///
/// Details:
/// - Switches to Sandbox tab and syncs data
/// - Filters to only AUR packages
/// - Asserts only AUR package is in sandbox
fn test_sandbox_tab(app: &mut crate_root::state::AppState) {
if let crate_root::state::Modal::Preflight {
items,
action,
tab,
sandbox_info,
sandbox_loaded,
..
} = &mut app.modal
{
*tab = crate_root::state::PreflightTab::Sandbox;
if matches!(*action, crate_root::state::PreflightAction::Install) {
let aur_items: Vec<_> = items
.iter()
.filter(|p| matches!(p.source, crate_root::state::Source::Aur))
.map(|p| p.name.clone())
.collect();
let cached_sandbox: Vec<_> = app
.install_list_sandbox
.iter()
.filter(|s| aur_items.contains(&s.package_name))
.cloned()
.collect();
if !cached_sandbox.is_empty() {
*sandbox_info = cached_sandbox;
*sandbox_loaded = true;
}
}
}
if let crate_root::state::Modal::Preflight {
tab,
sandbox_info,
sandbox_loaded,
..
} = &app.modal
{
assert_eq!(
*tab,
crate_root::state::PreflightTab::Sandbox,
"Should be on Sandbox tab"
);
assert!(*sandbox_loaded, "Sandbox should be marked as loaded");
assert_eq!(
sandbox_info.len(),
1,
"Should have 1 sandbox entry (only AUR package)"
);
assert_eq!(
sandbox_info[0].package_name, "test-aur-package",
"Sandbox should only contain AUR package"
);
assert!(
!sandbox_info
.iter()
.any(|s| s.package_name == "test-official-package"),
"Official package should NOT be in sandbox"
);
} else {
panic!("Expected Preflight modal");
}
}
/// What: Verify all tabs show correct data.
///
/// Inputs:
/// - `app`: Application state with modal
///
/// Output:
/// - Verifies final state of all tabs
///
/// Details:
/// - Asserts deps, files, services show both packages
/// - Asserts sandbox shows only AUR package
fn verify_all_tabs(app: &crate_root::state::AppState) {
if let crate_root::state::Modal::Preflight {
dependency_info,
file_info,
service_info,
sandbox_info,
..
} = &app.modal
{
assert_eq!(dependency_info.len(), 2, "Deps should show both packages");
assert_eq!(file_info.len(), 2, "Files should show both packages");
assert_eq!(service_info.len(), 2, "Services should show both packages");
assert_eq!(
sandbox_info.len(),
1,
"Sandbox should only show AUR package"
);
assert_eq!(
sandbox_info[0].package_name, "test-aur-package",
"Sandbox should contain AUR package"
);
} else {
panic!("Expected Preflight modal");
}
}
#[test]
/// What: Verify that preflight modal handles mix of AUR and official packages correctly.
///
/// Inputs:
/// - Mix of AUR and official packages in `install_list`
/// - Different loading characteristics for each type
/// - Preflight modal opened with both types
///
/// Output:
/// - Sandbox tab only shows AUR packages
/// - Other tabs (Deps, Files, Services) show all packages
/// - AUR-specific features (sandbox) work correctly
/// - Official packages are excluded from sandbox
///
/// Details:
/// - Tests that filtering works correctly for AUR vs official packages
/// - Verifies sandbox tab only displays AUR packages
/// - Ensures other tabs display all packages regardless of source
fn preflight_handles_aur_and_official_package_mix() {
unsafe {
std::env::set_var("PACSEA_TEST_HEADLESS", "1");
}
let mut app = crate_root::state::AppState::default();
let test_packages = create_test_packages();
setup_test_app_state(&mut app, &test_packages);
app.modal = create_test_preflight_modal(&test_packages);
test_deps_tab(&mut app);
test_files_tab(&mut app);
test_services_tab(&mut app);
test_sandbox_tab(&mut app);
verify_all_tabs(&app);
}
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/tests/preflight_integration/auto_scroll.rs | tests/preflight_integration/auto_scroll.rs | //! Integration tests for auto-scrolling logs in `PreflightExec` modal.
//!
//! Tests cover:
//! - `PreflightExec` modal `log_lines` append behavior
//! - Log panel state after multiple line additions
//! - Progress bar line replacement in `log_lines`
#![cfg(test)]
use pacsea::state::{
AppState, Modal, PackageItem, PreflightAction, PreflightTab, Source,
modal::PreflightHeaderChips,
};
/// What: Create a test package item.
///
/// Inputs:
/// - `name`: Package name
///
/// Output:
/// - `PackageItem` ready for testing
///
/// Details:
/// - Helper to create test packages
fn create_test_package(name: &str) -> PackageItem {
PackageItem {
name: name.into(),
version: "1.0.0".into(),
description: String::new(),
source: Source::Aur,
popularity: None,
out_of_date: None,
orphaned: false,
}
}
#[test]
/// What: Test `PreflightExec` modal initial state with empty `log_lines`.
///
/// Inputs:
/// - `PreflightExec` modal with empty `log_lines`.
///
/// Output:
/// - `log_lines` is empty.
///
/// Details:
/// - Verifies initial state before output starts.
fn integration_preflight_exec_empty_log_lines() {
let app = AppState {
modal: Modal::PreflightExec {
items: vec![create_test_package("test-pkg")],
action: PreflightAction::Install,
tab: PreflightTab::Summary,
verbose: false,
log_lines: vec![],
abortable: false,
header_chips: PreflightHeaderChips::default(),
success: None,
},
..Default::default()
};
match app.modal {
Modal::PreflightExec { log_lines, .. } => {
assert!(log_lines.is_empty());
}
_ => panic!("Expected PreflightExec modal"),
}
}
#[test]
/// What: Test `log_lines` append for single line.
///
/// Inputs:
/// - Single output line added to `log_lines`.
///
/// Output:
/// - `log_lines` contains the line.
///
/// Details:
/// - Simulates receiving one output line.
fn integration_preflight_exec_append_single_line() {
let mut app = AppState {
modal: Modal::PreflightExec {
items: vec![create_test_package("test-pkg")],
action: PreflightAction::Install,
tab: PreflightTab::Summary,
verbose: false,
log_lines: vec![],
abortable: false,
header_chips: PreflightHeaderChips::default(),
success: None,
},
..Default::default()
};
if let Modal::PreflightExec {
ref mut log_lines, ..
} = app.modal
{
log_lines.push(":: Synchronizing package databases...".to_string());
}
match app.modal {
Modal::PreflightExec { log_lines, .. } => {
assert_eq!(log_lines.len(), 1);
assert_eq!(log_lines[0], ":: Synchronizing package databases...");
}
_ => panic!("Expected PreflightExec modal"),
}
}
#[test]
/// What: Test `log_lines` append for multiple lines.
///
/// Inputs:
/// - Multiple output lines added sequentially.
///
/// Output:
/// - All lines are stored in order.
///
/// Details:
/// - Simulates receiving multiple output lines.
fn integration_preflight_exec_append_multiple_lines() {
let mut app = AppState {
modal: Modal::PreflightExec {
items: vec![create_test_package("test-pkg")],
action: PreflightAction::Install,
tab: PreflightTab::Summary,
verbose: false,
log_lines: vec![],
abortable: false,
header_chips: PreflightHeaderChips::default(),
success: None,
},
..Default::default()
};
let lines = vec![
":: Synchronizing package databases...",
" core is up to date",
" extra is up to date",
":: Starting full system upgrade...",
"resolving dependencies...",
"looking for conflicting packages...",
];
if let Modal::PreflightExec {
ref mut log_lines, ..
} = app.modal
{
for line in &lines {
log_lines.push((*line).to_string());
}
}
match app.modal {
Modal::PreflightExec { log_lines, .. } => {
assert_eq!(log_lines.len(), 6);
for (i, expected) in lines.iter().enumerate() {
assert_eq!(log_lines[i], *expected);
}
}
_ => panic!("Expected PreflightExec modal"),
}
}
#[test]
/// What: Test progress bar line replacement.
///
/// Inputs:
/// - Progress bar updates replacing the last line.
///
/// Output:
/// - Last line is replaced, not appended.
///
/// Details:
/// - Simulates `ReplaceLastLine` behavior for progress bars.
fn integration_preflight_exec_progress_bar_replace() {
let mut app = AppState {
modal: Modal::PreflightExec {
items: vec![create_test_package("test-pkg")],
action: PreflightAction::Install,
tab: PreflightTab::Summary,
verbose: false,
log_lines: vec!["Downloading package...".to_string()],
abortable: false,
header_chips: PreflightHeaderChips::default(),
success: None,
},
..Default::default()
};
// Simulate progress bar updates
let progress_updates = vec![
"[#-------] 10%",
"[##------] 25%",
"[####----] 50%",
"[######--] 75%",
"[########] 100%",
];
if let Modal::PreflightExec {
ref mut log_lines, ..
} = app.modal
{
for progress in progress_updates {
// Replace last line (progress bar behavior)
if !log_lines.is_empty() {
log_lines.pop();
}
log_lines.push(progress.to_string());
}
}
match app.modal {
Modal::PreflightExec { log_lines, .. } => {
// Only the final progress should remain
assert_eq!(log_lines.len(), 1);
assert_eq!(log_lines[0], "[########] 100%");
}
_ => panic!("Expected PreflightExec modal"),
}
}
#[test]
/// What: Test that progress bars with carriage returns are properly rendered.
///
/// Inputs:
/// - Progress bar updates using `ReplaceLastLine` behavior.
///
/// Output:
/// - Progress bar updates don't create duplicate lines.
/// - Only the latest progress bar state is displayed.
///
/// Details:
/// - Verifies progress bar rendering handles carriage return semantics correctly.
fn integration_preflight_exec_progress_bar_carriage_return() {
let mut app = AppState {
modal: Modal::PreflightExec {
items: vec![create_test_package("test-pkg")],
action: PreflightAction::Install,
tab: PreflightTab::Summary,
verbose: false,
log_lines: vec![],
abortable: false,
header_chips: PreflightHeaderChips::default(),
success: None,
},
..Default::default()
};
// Simulate progress bar updates with carriage return behavior
if let Modal::PreflightExec {
ref mut log_lines, ..
} = app.modal
{
// Initial message
log_lines.push("Downloading packages...".to_string());
// Progress bar updates (simulating ReplaceLastLine)
for progress in &[
"[#-------] 10%",
"[##------] 25%",
"[####----] 50%",
"[######--] 75%",
"[########] 100%",
] {
if let Some(last) = log_lines.last()
&& last.starts_with('[')
{
log_lines.pop();
}
log_lines.push((*progress).to_string());
}
}
match app.modal {
Modal::PreflightExec { log_lines, .. } => {
// Should only have initial message and final progress bar
assert_eq!(log_lines.len(), 2);
assert_eq!(log_lines[0], "Downloading packages...");
assert_eq!(log_lines[1], "[########] 100%");
// Verify no duplicate progress bars
let progress_bar_count = log_lines.iter().filter(|l| l.starts_with('[')).count();
assert_eq!(
progress_bar_count, 1,
"Should have only one progress bar line"
);
}
_ => panic!("Expected PreflightExec modal"),
}
}
#[test]
/// What: Test mixed regular lines and progress bar updates.
///
/// Inputs:
/// - Regular output lines followed by progress bar updates.
///
/// Output:
/// - Regular lines preserved, progress bar replaces its line.
///
/// Details:
/// - Simulates realistic pacman output pattern.
fn integration_preflight_exec_mixed_output() {
let mut app = AppState {
modal: Modal::PreflightExec {
items: vec![create_test_package("test-pkg")],
action: PreflightAction::Install,
tab: PreflightTab::Summary,
verbose: false,
log_lines: vec![],
abortable: false,
header_chips: PreflightHeaderChips::default(),
success: None,
},
..Default::default()
};
if let Modal::PreflightExec {
ref mut log_lines, ..
} = app.modal
{
// Regular output
log_lines.push(":: Retrieving packages...".to_string());
// Progress bar (multiple updates, only last should remain)
log_lines.push("[#-------] 10%".to_string());
log_lines.pop();
log_lines.push("[####----] 50%".to_string());
log_lines.pop();
log_lines.push("[########] 100%".to_string());
// More regular output after progress
log_lines.push("downloading ripgrep-14.0.0...".to_string());
}
match app.modal {
Modal::PreflightExec { log_lines, .. } => {
assert_eq!(log_lines.len(), 3);
assert_eq!(log_lines[0], ":: Retrieving packages...");
assert_eq!(log_lines[1], "[########] 100%");
assert_eq!(log_lines[2], "downloading ripgrep-14.0.0...");
}
_ => panic!("Expected PreflightExec modal"),
}
}
#[test]
/// What: Test large log output handling.
///
/// Inputs:
/// - Many output lines (simulating verbose package operation).
///
/// Output:
/// - All lines are stored correctly.
///
/// Details:
/// - Verifies handling of large output from verbose operations.
fn integration_preflight_exec_large_log_output() {
let mut app = AppState {
modal: Modal::PreflightExec {
items: vec![create_test_package("test-pkg")],
action: PreflightAction::Install,
tab: PreflightTab::Summary,
verbose: true,
log_lines: vec![],
abortable: false,
header_chips: PreflightHeaderChips::default(),
success: None,
},
..Default::default()
};
if let Modal::PreflightExec {
ref mut log_lines, ..
} = app.modal
{
for i in 0..500 {
log_lines.push(format!("Installing file {i}/500: /usr/lib/pkg/file{i}.so"));
}
}
match app.modal {
Modal::PreflightExec { log_lines, .. } => {
assert_eq!(log_lines.len(), 500);
assert!(log_lines[0].contains("file 0"));
assert!(log_lines[499].contains("file 499"));
}
_ => panic!("Expected PreflightExec modal"),
}
}
#[test]
/// What: Test `log_lines` with ANSI color codes.
///
/// Inputs:
/// - Output lines containing ANSI escape sequences.
///
/// Output:
/// - Lines are stored as-is (rendering strips ANSI).
///
/// Details:
/// - Verifies colored output is preserved in state.
fn integration_preflight_exec_ansi_color_output() {
let mut app = AppState {
modal: Modal::PreflightExec {
items: vec![create_test_package("test-pkg")],
action: PreflightAction::Install,
tab: PreflightTab::Summary,
verbose: false,
log_lines: vec![],
abortable: false,
header_chips: PreflightHeaderChips::default(),
success: None,
},
..Default::default()
};
if let Modal::PreflightExec {
ref mut log_lines, ..
} = app.modal
{
// Lines with ANSI color codes
log_lines.push("\x1b[1;32m::\x1b[0m Synchronizing package databases...".to_string());
log_lines.push("\x1b[1;34m core\x1b[0m is up to date".to_string());
}
match app.modal {
Modal::PreflightExec { log_lines, .. } => {
assert_eq!(log_lines.len(), 2);
assert!(log_lines[0].contains("\x1b["));
assert!(log_lines[1].contains("\x1b["));
}
_ => panic!("Expected PreflightExec modal"),
}
}
#[test]
/// What: Test verbose mode flag in `PreflightExec`.
///
/// Inputs:
/// - `PreflightExec` modal with verbose=true.
///
/// Output:
/// - verbose flag is correctly set.
///
/// Details:
/// - Verifies verbose mode can be enabled.
fn integration_preflight_exec_verbose_flag() {
let app = AppState {
modal: Modal::PreflightExec {
items: vec![create_test_package("test-pkg")],
action: PreflightAction::Install,
tab: PreflightTab::Summary,
verbose: true,
log_lines: vec![],
abortable: false,
header_chips: PreflightHeaderChips::default(),
success: None,
},
..Default::default()
};
match app.modal {
Modal::PreflightExec { verbose, .. } => {
assert!(verbose);
}
_ => panic!("Expected PreflightExec modal"),
}
}
#[test]
/// What: Test abortable flag in `PreflightExec`.
///
/// Inputs:
/// - `PreflightExec` modal with abortable=true.
///
/// Output:
/// - abortable flag is correctly set.
///
/// Details:
/// - Verifies abort capability can be enabled.
fn integration_preflight_exec_abortable_flag() {
let app = AppState {
modal: Modal::PreflightExec {
items: vec![create_test_package("test-pkg")],
action: PreflightAction::Install,
tab: PreflightTab::Summary,
verbose: false,
log_lines: vec![],
abortable: true,
header_chips: PreflightHeaderChips::default(),
success: None,
},
..Default::default()
};
match app.modal {
Modal::PreflightExec { abortable, .. } => {
assert!(abortable);
}
_ => panic!("Expected PreflightExec modal"),
}
}
#[test]
/// What: Test `PreflightExec` with Remove action.
///
/// Inputs:
/// - `PreflightExec` modal for remove operation.
///
/// Output:
/// - action is correctly set to Remove.
///
/// Details:
/// - Verifies removal operations work with `PreflightExec`.
fn integration_preflight_exec_remove_action() {
let app = AppState {
modal: Modal::PreflightExec {
items: vec![create_test_package("test-pkg")],
action: PreflightAction::Remove,
tab: PreflightTab::Summary,
verbose: false,
log_lines: vec![],
abortable: false,
header_chips: PreflightHeaderChips::default(),
success: None,
},
..Default::default()
};
match app.modal {
Modal::PreflightExec { action, .. } => {
assert_eq!(action, PreflightAction::Remove);
}
_ => panic!("Expected PreflightExec modal"),
}
}
#[test]
/// What: Test `PreflightExec` with Downgrade action.
///
/// Inputs:
/// - `PreflightExec` modal for downgrade operation.
///
/// Output:
/// - action is correctly set to Downgrade.
///
/// Details:
/// - Verifies downgrade operations work with `PreflightExec`.
fn integration_preflight_exec_downgrade_action() {
let app = AppState {
modal: Modal::PreflightExec {
items: vec![create_test_package("test-pkg")],
action: PreflightAction::Downgrade,
tab: PreflightTab::Summary,
verbose: false,
log_lines: vec![],
abortable: false,
header_chips: PreflightHeaderChips::default(),
success: None,
},
..Default::default()
};
match app.modal {
Modal::PreflightExec { action, .. } => {
assert_eq!(action, PreflightAction::Downgrade);
}
_ => panic!("Expected PreflightExec modal"),
}
}
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/tests/preflight_integration/remove_operations.rs | tests/preflight_integration/remove_operations.rs | //! //! Tests for remove operations.
use super::helpers;
use pacsea as crate_root;
/// What: Create test reverse dependencies for remove operations.
///
/// Inputs:
/// - None
///
/// Output:
/// - Vector of `DependencyInfo` representing reverse dependencies
///
/// Details:
/// - Creates two test reverse dependencies that depend on test-package-1
fn create_test_reverse_deps() -> Vec<crate_root::state::modal::DependencyInfo> {
vec![
crate_root::state::modal::DependencyInfo {
name: "dependent-package-1".to_string(),
version: "2.0.0".to_string(),
status: crate_root::state::modal::DependencyStatus::Installed {
version: "2.0.0".to_string(),
},
source: crate_root::state::modal::DependencySource::Official {
repo: "extra".to_string(),
},
required_by: vec!["test-package-1".to_string()],
depends_on: vec!["test-package-1".to_string()],
is_core: false,
is_system: false,
},
crate_root::state::modal::DependencyInfo {
name: "dependent-package-2".to_string(),
version: "3.0.0".to_string(),
status: crate_root::state::modal::DependencyStatus::Installed {
version: "3.0.0".to_string(),
},
source: crate_root::state::modal::DependencySource::Official {
repo: "community".to_string(),
},
required_by: vec!["test-package-1".to_string()],
depends_on: vec!["test-package-1".to_string()],
is_core: false,
is_system: false,
},
]
}
/// What: Set up test app state with remove list and preflight modal.
///
/// Inputs:
/// - `test_packages`: Vector of packages to remove
///
/// Output:
/// - `AppState` configured for remove operation testing
///
/// Details:
/// - Configures `remove_list` and `remove_preflight_summary`
/// - Opens preflight modal with Remove action
fn setup_test_app_with_reverse_deps(
test_packages: Vec<crate_root::state::PackageItem>,
) -> crate_root::state::AppState {
crate_root::state::AppState {
remove_list: test_packages.clone(),
preflight_cancelled: std::sync::Arc::new(std::sync::atomic::AtomicBool::new(false)),
remove_preflight_summary: vec![crate_root::state::modal::ReverseRootSummary {
package: "test-package-1".to_string(),
direct_dependents: 2,
transitive_dependents: 0,
total_dependents: 2,
}],
modal: helpers::create_preflight_modal(
test_packages,
crate_root::state::PreflightAction::Remove,
crate_root::state::PreflightTab::Summary,
),
..Default::default()
}
}
/// What: Switch to Deps tab and load reverse dependencies for remove action.
///
/// Inputs:
/// - `app`: Application state
/// - `reverse_deps`: Reverse dependencies to load
///
/// Output:
/// - Updates modal to Deps tab with reverse dependencies loaded
///
/// Details:
/// - Simulates reverse dependency resolution for Remove action
fn switch_to_deps_tab_and_load_reverse_deps(
app: &mut crate_root::state::AppState,
reverse_deps: &[crate_root::state::modal::DependencyInfo],
) {
if let crate_root::state::Modal::Preflight {
action,
tab,
dependency_info,
dep_selected,
..
} = &mut app.modal
{
*tab = crate_root::state::PreflightTab::Deps;
if matches!(*action, crate_root::state::PreflightAction::Remove) {
*dependency_info = reverse_deps.to_vec();
*dep_selected = 0;
}
}
}
/// What: Verify reverse dependencies are correctly loaded and displayed.
///
/// Inputs:
/// - `app`: Application state
///
/// Output:
/// - Asserts that reverse dependencies are correct
///
/// Details:
/// - Verifies tab, action, dependency count, and individual dependency details
fn verify_reverse_dependencies(app: &crate_root::state::AppState) {
let (_, action, tab, dependency_info, _, _, _, _, _) = helpers::assert_preflight_modal(app);
assert_eq!(
*tab,
crate_root::state::PreflightTab::Deps,
"Should be on Deps tab"
);
assert_eq!(
*action,
crate_root::state::PreflightAction::Remove,
"Should be Remove action"
);
assert!(
!dependency_info.is_empty(),
"Reverse dependencies should be loaded"
);
assert_eq!(
dependency_info.len(),
2,
"Should have 2 reverse dependencies"
);
let dep1 = dependency_info
.iter()
.find(|d| d.name == "dependent-package-1")
.expect("dependent-package-1 should be found in dependency_info");
assert_eq!(dep1.version, "2.0.0");
assert!(dep1.depends_on.contains(&"test-package-1".to_string()));
assert!(dep1.required_by.contains(&"test-package-1".to_string()));
let dep2 = dependency_info
.iter()
.find(|d| d.name == "dependent-package-2")
.expect("dependent-package-2 should be found in dependency_info");
assert_eq!(dep2.version, "3.0.0");
assert!(dep2.depends_on.contains(&"test-package-1".to_string()));
assert!(dep2.required_by.contains(&"test-package-1".to_string()));
}
/// What: Verify `remove_preflight_summary` is correctly populated.
///
/// Inputs:
/// - `app`: Application state
///
/// Output:
/// - Asserts that summary contains expected data
///
/// Details:
/// - Verifies summary is not empty and contains correct package and counts
fn verify_remove_preflight_summary(app: &crate_root::state::AppState) {
assert!(
!app.remove_preflight_summary.is_empty(),
"Remove preflight summary should be populated"
);
let summary = &app.remove_preflight_summary[0];
assert_eq!(summary.package, "test-package-1");
assert_eq!(summary.direct_dependents, 2);
assert_eq!(summary.total_dependents, 2);
}
/// What: Switch to Files tab and verify it handles remove action.
///
/// Inputs:
/// - `app`: Application state
///
/// Output:
/// - Updates modal to Files tab and verifies action persists
///
/// Details:
/// - Verifies tab switch and that Remove action is maintained
fn verify_files_tab(app: &mut crate_root::state::AppState) {
if let crate_root::state::Modal::Preflight { tab, .. } = &mut app.modal {
*tab = crate_root::state::PreflightTab::Files;
}
let (_, action, tab, _, _, _, _, _, _) = helpers::assert_preflight_modal(app);
assert_eq!(
*tab,
crate_root::state::PreflightTab::Files,
"Should be on Files tab"
);
assert_eq!(
*action,
crate_root::state::PreflightAction::Remove,
"Should still be Remove action"
);
}
/// What: Verify reverse dependencies persist when switching tabs.
///
/// Inputs:
/// - `app`: Application state
///
/// Output:
/// - Asserts that reverse dependencies are still present
///
/// Details:
/// - Verifies dependency data persists across tab switches
fn verify_deps_persistence(app: &crate_root::state::AppState) {
let (_, _, _, dependency_info, _, _, _, _, _) = helpers::assert_preflight_modal(app);
assert!(
!dependency_info.is_empty(),
"Reverse dependencies should persist when switching back"
);
}
/// What: Perform final verification of remove action specific data.
///
/// Inputs:
/// - `app`: Application state
///
/// Output:
/// - Asserts that all reverse dependencies depend on the package being removed
///
/// Details:
/// - Verifies action is Remove and all dependencies reference test-package-1
fn verify_final_remove_action(app: &crate_root::state::AppState) {
let (_, action, _, dependency_info, _, _, _, _, _) = helpers::assert_preflight_modal(app);
assert_eq!(
*action,
crate_root::state::PreflightAction::Remove,
"Should be Remove action"
);
assert!(
!dependency_info.is_empty(),
"Reverse dependencies should be present"
);
for dep in dependency_info {
assert!(
dep.depends_on.contains(&"test-package-1".to_string()),
"All reverse dependencies should depend on test-package-1"
);
}
}
#[test]
/// What: Verify that preflight modal handles remove action correctly with reverse dependencies.
///
/// Inputs:
/// - Packages in `remove_list`
/// - Preflight modal opened with Remove action
/// - Reverse dependencies resolved
///
/// Output:
/// - Deps tab shows reverse dependencies correctly
/// - Other tabs handle remove action appropriately
/// - Cascade mode affects dependency display
///
/// Details:
/// - Tests preflight modal for remove operations
/// - Verifies reverse dependency resolution works
/// - Ensures remove-specific logic is handled correctly
fn preflight_remove_action_with_reverse_dependencies() {
unsafe {
std::env::set_var("PACSEA_TEST_HEADLESS", "1");
}
let test_packages = vec![helpers::create_test_package(
"test-package-1",
"1.0.0",
crate_root::state::Source::Official {
repo: "core".to_string(),
arch: "x86_64".to_string(),
},
)];
let reverse_deps = create_test_reverse_deps();
let mut app = setup_test_app_with_reverse_deps(test_packages);
// Test 1: Switch to Deps tab - should show reverse dependencies
switch_to_deps_tab_and_load_reverse_deps(&mut app, &reverse_deps);
verify_reverse_dependencies(&app);
// Test 2: Verify remove_preflight_summary is populated
verify_remove_preflight_summary(&app);
// Test 3: Switch to Files tab - should handle remove action
verify_files_tab(&mut app);
// Test 4: Switch back to Deps tab - reverse dependencies should persist
verify_deps_persistence(&app);
// Final verification: Remove action specific data
verify_final_remove_action(&app);
}
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/tests/preflight_integration/error_handling.rs | tests/preflight_integration/error_handling.rs | //! Tests for error handling and partial failures.
use pacsea as crate_root;
/// What: Set up test app state with pre-populated cache data.
///
/// Inputs:
/// - None (uses hardcoded test data)
///
/// Output:
/// - `AppState` with test packages, dependencies, files, and services error
///
/// Details:
/// - Creates test package and pre-populates cache with successful deps/files
/// - Sets up services failure state
fn setup_test_app_state() -> crate_root::state::AppState {
let mut app = crate_root::state::AppState::default();
let test_packages = vec![crate_root::state::PackageItem {
name: "test-package-1".to_string(),
version: "1.0.0".to_string(),
description: String::new(),
source: crate_root::state::Source::Official {
repo: "core".to_string(),
arch: "x86_64".to_string(),
},
popularity: None,
out_of_date: None,
orphaned: false,
}];
// Pre-populate cache with dependencies (successful)
app.install_list_deps = vec![crate_root::state::modal::DependencyInfo {
name: "test-dep-1".to_string(),
version: "1.0.0".to_string(),
status: crate_root::state::modal::DependencyStatus::ToInstall,
source: crate_root::state::modal::DependencySource::Official {
repo: "core".to_string(),
},
required_by: vec!["test-package-1".to_string()],
depends_on: Vec::new(),
is_core: false,
is_system: false,
}];
// Pre-populate cache with files (successful)
app.install_list_files = vec![crate_root::state::modal::PackageFileInfo {
name: "test-package-1".to_string(),
files: vec![crate_root::state::modal::FileChange {
path: "/usr/bin/test1".to_string(),
change_type: crate_root::state::modal::FileChangeType::New,
package: "test-package-1".to_string(),
is_config: false,
predicted_pacnew: false,
predicted_pacsave: false,
}],
total_count: 1,
new_count: 1,
changed_count: 0,
removed_count: 0,
config_count: 0,
pacnew_candidates: 0,
pacsave_candidates: 0,
}];
// Services failed (error in cache)
app.install_list_services = vec![];
app.preflight_services_resolving = false;
app.preflight_services_items = None;
// Set packages in install list
app.install_list = test_packages.clone();
app.preflight_cancelled
.store(false, std::sync::atomic::Ordering::Relaxed);
// Open preflight modal with services error
app.modal = crate_root::state::Modal::Preflight {
items: test_packages.clone(),
action: crate_root::state::PreflightAction::Install,
tab: crate_root::state::PreflightTab::Summary,
summary: None,
summary_scroll: 0,
header_chips: crate_root::state::modal::PreflightHeaderChips {
package_count: test_packages.len(),
download_bytes: 0,
install_delta_bytes: 0,
aur_count: 0,
risk_score: 0,
risk_level: crate_root::state::modal::RiskLevel::Low,
},
dependency_info: Vec::new(),
dep_selected: 0,
dep_tree_expanded: std::collections::HashSet::new(),
deps_error: None,
file_info: Vec::new(),
file_selected: 0,
file_tree_expanded: std::collections::HashSet::new(),
files_error: None,
service_info: Vec::new(),
service_selected: 0,
services_loaded: false,
services_error: Some("Failed to resolve services: systemd not available".to_string()),
sandbox_info: Vec::new(),
sandbox_selected: 0,
sandbox_tree_expanded: std::collections::HashSet::new(),
sandbox_loaded: true,
sandbox_error: None,
selected_optdepends: std::collections::HashMap::new(),
cascade_mode: crate_root::state::modal::CascadeMode::Basic,
cached_reverse_deps_report: None,
};
app
}
/// What: Sync dependencies tab data from app cache.
///
/// Inputs:
/// - `app`: Application state with cached dependencies
///
/// Output:
/// - Updates modal `dependency_info` with filtered dependencies
///
/// Details:
/// - Simulates `sync_dependencies` logic
fn sync_dependencies_tab(app: &mut crate_root::state::AppState) {
if let crate_root::state::Modal::Preflight {
items,
action,
tab,
dependency_info,
dep_selected,
..
} = &mut app.modal
{
*tab = crate_root::state::PreflightTab::Deps;
if matches!(*action, crate_root::state::PreflightAction::Install) {
let item_names: std::collections::HashSet<String> =
items.iter().map(|i| i.name.clone()).collect();
let filtered: Vec<_> = app
.install_list_deps
.iter()
.filter(|dep| {
dep.required_by
.iter()
.any(|req_by| item_names.contains(req_by))
})
.cloned()
.collect();
if !filtered.is_empty() {
*dependency_info = filtered;
*dep_selected = 0;
}
}
}
}
/// What: Sync files tab data from app cache.
///
/// Inputs:
/// - `app`: Application state with cached files
///
/// Output:
/// - Updates modal `file_info` with filtered files
///
/// Details:
/// - Simulates `sync_files` logic
fn sync_files_tab(app: &mut crate_root::state::AppState) {
if let crate_root::state::Modal::Preflight {
items,
tab,
file_info,
file_selected,
..
} = &mut app.modal
{
*tab = crate_root::state::PreflightTab::Files;
let item_names: std::collections::HashSet<String> =
items.iter().map(|i| i.name.clone()).collect();
let cached_files: Vec<_> = app
.install_list_files
.iter()
.filter(|file_info| item_names.contains(&file_info.name))
.cloned()
.collect();
if !cached_files.is_empty() {
*file_info = cached_files;
*file_selected = 0;
}
}
}
/// What: Switch to Services tab.
///
/// Inputs:
/// - `app`: Application state
///
/// Output:
/// - Updates modal tab to Services
///
/// Details:
/// - Only switches tab, doesn't sync data
fn switch_to_services_tab(app: &mut crate_root::state::AppState) {
if let crate_root::state::Modal::Preflight { tab, .. } = &mut app.modal {
*tab = crate_root::state::PreflightTab::Services;
}
}
/// What: Verify Deps tab state after sync.
///
/// Inputs:
/// - `app`: Application state
/// - `expected_message`: Optional custom assertion message
///
/// Output:
/// - Panics if assertions fail
///
/// Details:
/// - Verifies tab is on Deps, dependencies are loaded, and no error exists
fn verify_deps_tab(app: &crate_root::state::AppState, expected_message: &str) {
if let crate_root::state::Modal::Preflight {
tab,
dependency_info,
deps_error,
..
} = &app.modal
{
assert_eq!(
*tab,
crate_root::state::PreflightTab::Deps,
"Should be on Deps tab"
);
assert!(!dependency_info.is_empty(), "Dependencies should be loaded");
assert!(deps_error.is_none(), "{expected_message}");
} else {
panic!("Expected Preflight modal");
}
}
/// What: Verify Files tab state after sync.
///
/// Inputs:
/// - `app`: Application state
/// - `expected_message`: Optional custom assertion message
///
/// Output:
/// - Panics if assertions fail
///
/// Details:
/// - Verifies tab is on Files, files are loaded, and no error exists
fn verify_files_tab(app: &crate_root::state::AppState, expected_message: &str) {
if let crate_root::state::Modal::Preflight {
tab,
file_info,
files_error,
..
} = &app.modal
{
assert_eq!(
*tab,
crate_root::state::PreflightTab::Files,
"Should be on Files tab"
);
assert!(!file_info.is_empty(), "Files should be loaded");
assert!(files_error.is_none(), "{expected_message}");
} else {
panic!("Expected Preflight modal");
}
}
/// What: Verify Services tab shows error state.
///
/// Inputs:
/// - `app`: Application state
///
/// Output:
/// - Panics if assertions fail
///
/// Details:
/// - Verifies tab is on Services, services are empty, not loaded, and error exists
fn verify_services_tab_error(app: &crate_root::state::AppState) {
if let crate_root::state::Modal::Preflight {
tab,
service_info,
services_loaded,
services_error,
..
} = &app.modal
{
assert_eq!(
*tab,
crate_root::state::PreflightTab::Services,
"Should be on Services tab"
);
assert!(service_info.is_empty(), "Services should be empty (failed)");
assert!(!*services_loaded, "Services should not be marked as loaded");
assert!(
services_error.is_some(),
"Services should have error message"
);
assert_eq!(
services_error
.as_ref()
.expect("services_error should be Some"),
"Failed to resolve services: systemd not available",
"Error message should match"
);
} else {
panic!("Expected Preflight modal");
}
}
/// What: Verify final state - successful tabs unaffected by failure.
///
/// Inputs:
/// - `app`: Application state
///
/// Output:
/// - Panics if assertions fail
///
/// Details:
/// - Verifies successful tabs (Deps, Files) have data and no errors
/// - Verifies failed tab (Services) has error and no data
fn verify_final_state(app: &crate_root::state::AppState) {
if let crate_root::state::Modal::Preflight {
dependency_info,
file_info,
service_info,
deps_error,
files_error,
services_error,
services_loaded,
..
} = &app.modal
{
// Successful tabs should have data and no errors
assert!(!dependency_info.is_empty(), "Dependencies should have data");
assert!(deps_error.is_none(), "Deps should not have error");
assert!(!file_info.is_empty(), "Files should have data");
assert!(files_error.is_none(), "Files should not have error");
// Failed tab should have error and no data
assert!(service_info.is_empty(), "Services should be empty (failed)");
assert!(!*services_loaded, "Services should not be loaded");
assert!(
services_error.is_some(),
"Services should have error message"
);
} else {
panic!("Expected Preflight modal");
}
}
#[test]
/// What: Verify that preflight modal handles partial failures correctly.
///
/// Inputs:
/// - Packages in `install_list`
/// - Some tabs resolve successfully (Deps, Files)
/// - One tab fails (Services with error)
/// - User switches between tabs
///
/// Output:
/// - Successful tabs display data correctly
/// - Failed tab displays error message
/// - Other tabs remain functional despite one failure
///
/// Details:
/// - Tests edge case where resolution fails for one tab but succeeds for others
/// - Verifies error messages are shown correctly
/// - Ensures failures don't affect other tabs
fn preflight_handles_partial_failures_correctly() {
unsafe {
std::env::set_var("PACSEA_TEST_HEADLESS", "1");
}
let mut app = setup_test_app_state();
// Test 1: Switch to Deps tab (successful) - should load data
sync_dependencies_tab(&mut app);
verify_deps_tab(&app, "Deps should not have error");
// Test 2: Switch to Files tab (successful) - should load data
sync_files_tab(&mut app);
verify_files_tab(&app, "Files should not have error");
// Test 3: Switch to Services tab (failed) - should show error
switch_to_services_tab(&mut app);
verify_services_tab_error(&app);
// Test 4: Switch back to Deps tab - should still work despite Services failure
sync_dependencies_tab(&mut app);
verify_deps_tab(
&app,
"Deps should not have error (unaffected by Services failure)",
);
// Test 5: Switch back to Files tab - should still work despite Services failure
sync_files_tab(&mut app);
verify_files_tab(
&app,
"Files should not have error (unaffected by Services failure)",
);
// Final verification: Successful tabs unaffected by failure
verify_final_state(&app);
}
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/tests/preflight_integration/risk_calculation.rs | tests/preflight_integration/risk_calculation.rs | //! Integration tests for enhanced preflight risk calculation.
//!
//! Tests cover:
//! - Risk score calculation with dependents (+2 per dependent)
//! - Risk level thresholds (Low/Medium/High)
//! - Header chips update with risk info
//! - Multiple dependents accumulation
#![cfg(test)]
use pacsea::state::{
AppState, Modal, PackageItem, PreflightAction, PreflightTab, Source,
modal::{CascadeMode, PreflightHeaderChips, RiskLevel},
};
use std::collections::HashSet;
/// What: Create a test package item.
///
/// Inputs:
/// - `name`: Package name
/// - `source`: Package source
///
/// Output:
/// - `PackageItem` ready for testing
///
/// Details:
/// - Helper to create test packages
fn create_test_package(name: &str, source: Source) -> PackageItem {
PackageItem {
name: name.into(),
version: "1.0.0".into(),
description: String::new(),
source,
popularity: None,
out_of_date: None,
orphaned: false,
}
}
#[test]
/// What: Test `RiskLevel::Low` default.
///
/// Inputs:
/// - Default risk level.
///
/// Output:
/// - Risk level is Low.
///
/// Details:
/// - Verifies default risk level is Low.
fn integration_risk_level_default_is_low() {
let risk_level = RiskLevel::default();
assert_eq!(risk_level, RiskLevel::Low);
}
#[test]
/// What: Test `PreflightHeaderChips` default values.
///
/// Inputs:
/// - Default header chips.
///
/// Output:
/// - All values are zero/low.
///
/// Details:
/// - Verifies default header chips are neutral.
fn integration_header_chips_default() {
let chips = PreflightHeaderChips::default();
assert_eq!(chips.package_count, 0);
assert_eq!(chips.download_bytes, 0);
assert_eq!(chips.install_delta_bytes, 0);
assert_eq!(chips.aur_count, 0);
assert_eq!(chips.risk_score, 0);
assert_eq!(chips.risk_level, RiskLevel::Low);
}
#[test]
/// What: Test `PreflightHeaderChips` with low risk score.
///
/// Inputs:
/// - Header chips with `risk_score` < 10.
///
/// Output:
/// - Risk level is Low.
///
/// Details:
/// - Verifies low risk threshold.
fn integration_header_chips_low_risk() {
let chips = PreflightHeaderChips {
package_count: 1,
download_bytes: 1024,
install_delta_bytes: 2048,
aur_count: 0,
risk_score: 5,
risk_level: RiskLevel::Low,
};
assert_eq!(chips.risk_score, 5);
assert_eq!(chips.risk_level, RiskLevel::Low);
}
#[test]
/// What: Test `PreflightHeaderChips` with medium risk score.
///
/// Inputs:
/// - Header chips with `risk_score` between 10 and 30.
///
/// Output:
/// - Risk level is Medium.
///
/// Details:
/// - Verifies medium risk threshold.
fn integration_header_chips_medium_risk() {
let chips = PreflightHeaderChips {
package_count: 5,
download_bytes: 50 * 1024 * 1024,
install_delta_bytes: 100 * 1024 * 1024,
aur_count: 2,
risk_score: 20,
risk_level: RiskLevel::Medium,
};
assert_eq!(chips.risk_score, 20);
assert_eq!(chips.risk_level, RiskLevel::Medium);
}
#[test]
/// What: Test `PreflightHeaderChips` with high risk score.
///
/// Inputs:
/// - Header chips with `risk_score` >= 30.
///
/// Output:
/// - Risk level is High.
///
/// Details:
/// - Verifies high risk threshold.
fn integration_header_chips_high_risk() {
let chips = PreflightHeaderChips {
package_count: 10,
download_bytes: 200 * 1024 * 1024,
install_delta_bytes: 500 * 1024 * 1024,
aur_count: 5,
risk_score: 45,
risk_level: RiskLevel::High,
};
assert_eq!(chips.risk_score, 45);
assert_eq!(chips.risk_level, RiskLevel::High);
}
#[test]
/// What: Test risk score with dependent packages.
///
/// Inputs:
/// - Base risk score + 2 per dependent package.
///
/// Output:
/// - Risk score includes dependent contribution.
///
/// Details:
/// - Verifies +2 per dependent package calculation.
fn integration_risk_score_with_dependents() {
let base_risk = 5;
let num_dependents = 3;
let dependent_risk = num_dependents * 2; // +2 per dependent
let total_risk = base_risk + dependent_risk;
let chips = PreflightHeaderChips {
package_count: 1,
download_bytes: 0,
install_delta_bytes: 0,
aur_count: 0,
risk_score: total_risk,
risk_level: if total_risk < 10 {
RiskLevel::Low
} else if total_risk < 30 {
RiskLevel::Medium
} else {
RiskLevel::High
},
};
assert_eq!(chips.risk_score, 11); // 5 + (3 * 2)
assert_eq!(chips.risk_level, RiskLevel::Medium);
}
#[test]
/// What: Test risk score with many dependents pushes to High.
///
/// Inputs:
/// - Many dependent packages.
///
/// Output:
/// - Risk score becomes High.
///
/// Details:
/// - Verifies many dependents can push risk to High level.
fn integration_risk_score_many_dependents() {
let base_risk: u8 = 10;
let num_dependents: u8 = 15;
let dependent_risk: u8 = num_dependents * 2;
let total_risk: u8 = base_risk + dependent_risk;
let risk_level = if total_risk < 10 {
RiskLevel::Low
} else if total_risk < 30 {
RiskLevel::Medium
} else {
RiskLevel::High
};
let chips = PreflightHeaderChips {
package_count: 1,
download_bytes: 0,
install_delta_bytes: 0,
aur_count: 0,
risk_score: total_risk,
risk_level,
};
assert_eq!(chips.risk_score, 40); // 10 + (15 * 2)
assert_eq!(chips.risk_level, RiskLevel::High);
}
#[test]
/// What: Test AUR packages contribute to risk.
///
/// Inputs:
/// - Multiple AUR packages.
///
/// Output:
/// - Risk score includes AUR contribution.
///
/// Details:
/// - Verifies AUR packages increase risk score.
fn integration_risk_score_aur_contribution() {
let aur_count: usize = 3;
// AUR packages typically add 5 points each
let aur_risk: u8 = 15; // 3 * 5
let chips = PreflightHeaderChips {
package_count: 5,
download_bytes: 0,
install_delta_bytes: 0,
aur_count,
risk_score: aur_risk,
risk_level: if aur_risk < 10 {
RiskLevel::Low
} else if aur_risk < 30 {
RiskLevel::Medium
} else {
RiskLevel::High
},
};
assert_eq!(chips.aur_count, 3);
assert_eq!(chips.risk_score, 15);
assert_eq!(chips.risk_level, RiskLevel::Medium);
}
#[test]
/// What: Test Preflight modal with header chips.
///
/// Inputs:
/// - Preflight modal with populated header chips.
///
/// Output:
/// - Header chips are accessible.
///
/// Details:
/// - Verifies header chips are stored in Preflight modal.
fn integration_preflight_modal_header_chips() {
let chips = PreflightHeaderChips {
package_count: 3,
download_bytes: 10 * 1024 * 1024,
install_delta_bytes: 25 * 1024 * 1024,
aur_count: 1,
risk_score: 12,
risk_level: RiskLevel::Medium,
};
let app = AppState {
modal: Modal::Preflight {
items: vec![create_test_package("test-pkg", Source::Aur)],
action: PreflightAction::Install,
tab: PreflightTab::Summary,
summary: None,
summary_scroll: 0,
header_chips: chips,
dependency_info: vec![],
dep_selected: 0,
dep_tree_expanded: HashSet::new(),
deps_error: None,
file_info: vec![],
file_selected: 0,
file_tree_expanded: HashSet::new(),
files_error: None,
service_info: vec![],
service_selected: 0,
services_loaded: false,
services_error: None,
sandbox_info: vec![],
sandbox_selected: 0,
sandbox_tree_expanded: HashSet::new(),
sandbox_loaded: false,
sandbox_error: None,
selected_optdepends: std::collections::HashMap::new(),
cascade_mode: CascadeMode::Basic,
cached_reverse_deps_report: None,
},
..Default::default()
};
match app.modal {
Modal::Preflight { header_chips, .. } => {
assert_eq!(header_chips.package_count, 3);
assert_eq!(header_chips.aur_count, 1);
assert_eq!(header_chips.risk_score, 12);
assert_eq!(header_chips.risk_level, RiskLevel::Medium);
}
_ => panic!("Expected Preflight modal"),
}
}
#[test]
/// What: Test `PreflightExec` modal preserves header chips.
///
/// Inputs:
/// - `PreflightExec` modal with header chips.
///
/// Output:
/// - Header chips are accessible.
///
/// Details:
/// - Verifies header chips persist through modal transition.
fn integration_preflight_exec_header_chips() {
let chips = PreflightHeaderChips {
package_count: 2,
download_bytes: 5 * 1024 * 1024,
install_delta_bytes: 15 * 1024 * 1024,
aur_count: 0,
risk_score: 3,
risk_level: RiskLevel::Low,
};
let app = AppState {
modal: Modal::PreflightExec {
items: vec![create_test_package(
"test-pkg",
Source::Official {
repo: "extra".into(),
arch: "x86_64".into(),
},
)],
success: None,
action: PreflightAction::Install,
tab: PreflightTab::Summary,
verbose: false,
log_lines: vec![],
abortable: false,
header_chips: chips,
},
..Default::default()
};
match app.modal {
Modal::PreflightExec { header_chips, .. } => {
assert_eq!(header_chips.package_count, 2);
assert_eq!(header_chips.risk_score, 3);
assert_eq!(header_chips.risk_level, RiskLevel::Low);
}
_ => panic!("Expected PreflightExec modal"),
}
}
#[test]
/// What: Test `ConfirmReinstall` modal preserves header chips.
///
/// Inputs:
/// - `ConfirmReinstall` modal with header chips.
///
/// Output:
/// - Header chips are accessible.
///
/// Details:
/// - Verifies header chips are available in reinstall confirmation.
fn integration_confirm_reinstall_header_chips() {
let chips = PreflightHeaderChips {
package_count: 1,
download_bytes: 1024,
install_delta_bytes: 2048,
aur_count: 0,
risk_score: 0,
risk_level: RiskLevel::Low,
};
let pkg = create_test_package(
"test-pkg",
Source::Official {
repo: "extra".into(),
arch: "x86_64".into(),
},
);
let app = AppState {
modal: Modal::ConfirmReinstall {
items: vec![pkg.clone()],
all_items: vec![pkg],
header_chips: chips,
},
..Default::default()
};
match app.modal {
Modal::ConfirmReinstall { header_chips, .. } => {
assert_eq!(header_chips.package_count, 1);
assert_eq!(header_chips.risk_level, RiskLevel::Low);
}
_ => panic!("Expected ConfirmReinstall modal"),
}
}
#[test]
/// What: Test risk level boundary at 10 (Low to Medium).
///
/// Inputs:
/// - Risk score at boundary value 10.
///
/// Output:
/// - Risk level is Medium.
///
/// Details:
/// - Verifies boundary condition between Low and Medium.
fn integration_risk_level_boundary_10() {
let chips = PreflightHeaderChips {
package_count: 1,
download_bytes: 0,
install_delta_bytes: 0,
aur_count: 0,
risk_score: 10,
risk_level: RiskLevel::Medium,
};
assert_eq!(chips.risk_score, 10);
assert_eq!(chips.risk_level, RiskLevel::Medium);
}
#[test]
/// What: Test risk level boundary at 30 (Medium to High).
///
/// Inputs:
/// - Risk score at boundary value 30.
///
/// Output:
/// - Risk level is High.
///
/// Details:
/// - Verifies boundary condition between Medium and High.
fn integration_risk_level_boundary_30() {
let chips = PreflightHeaderChips {
package_count: 1,
download_bytes: 0,
install_delta_bytes: 0,
aur_count: 0,
risk_score: 30,
risk_level: RiskLevel::High,
};
assert_eq!(chips.risk_score, 30);
assert_eq!(chips.risk_level, RiskLevel::High);
}
#[test]
/// What: Test combined risk calculation.
///
/// Inputs:
/// - Multiple risk factors combined.
///
/// Output:
/// - Total risk score is sum of factors.
///
/// Details:
/// - Verifies combined risk from AUR, dependents, and base factors.
fn integration_combined_risk_calculation() {
let base_risk: u8 = 5;
let aur_risk: u8 = 2 * 5; // 2 AUR packages * 5
let dependent_risk: u8 = 4 * 2; // 4 dependents * 2
let total_risk: u8 = base_risk + aur_risk + dependent_risk;
let chips = PreflightHeaderChips {
package_count: 5,
download_bytes: 50 * 1024 * 1024,
install_delta_bytes: 100 * 1024 * 1024,
aur_count: 2,
risk_score: total_risk,
risk_level: if total_risk < 10 {
RiskLevel::Low
} else if total_risk < 30 {
RiskLevel::Medium
} else {
RiskLevel::High
},
};
assert_eq!(chips.risk_score, 23); // 5 + 10 + 8
assert_eq!(chips.risk_level, RiskLevel::Medium);
}
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/tests/preflight_integration/large_datasets.rs | tests/preflight_integration/large_datasets.rs | //! Tests for large datasets handling.
use super::helpers::*;
use pacsea as crate_root;
/// What: Create a large set of test packages (mix of official and AUR).
///
/// Inputs:
/// - None
///
/// Output:
/// - Vector of 12 test packages (8 official, 4 AUR)
///
/// Details:
/// - Creates packages with varying properties for testing
fn create_large_test_packages() -> Vec<crate_root::state::PackageItem> {
let mut test_packages = Vec::new();
for i in 1..=8 {
test_packages.push(crate_root::state::PackageItem {
name: format!("test-official-pkg-{i}"),
version: format!("{i}.0.0"),
description: String::new(),
source: crate_root::state::Source::Official {
repo: if i % 2 == 0 { "extra" } else { "core" }.to_string(),
arch: "x86_64".to_string(),
},
popularity: None,
out_of_date: None,
orphaned: false,
});
}
for i in 1..=4 {
test_packages.push(crate_root::state::PackageItem {
name: format!("test-aur-pkg-{i}"),
version: format!("{i}.0.0"),
description: String::new(),
source: crate_root::state::Source::Aur,
popularity: None,
out_of_date: None,
orphaned: false,
});
}
test_packages
}
/// What: Populate app cache with dependencies for test packages.
///
/// Inputs:
/// - `app`: Application state to populate
/// - `test_packages`: Packages to create dependencies for
///
/// Output:
/// - Total count of dependencies created
///
/// Details:
/// - Creates 3-5 dependencies per package
fn populate_dependencies(
app: &mut crate_root::state::AppState,
test_packages: &[crate_root::state::PackageItem],
) -> usize {
let mut expected_dep_count = 0;
for pkg in test_packages {
let dep_count = if pkg.name.contains("official") { 4 } else { 3 };
for j in 1..=dep_count {
app.install_list_deps
.push(crate_root::state::modal::DependencyInfo {
name: format!("{}-dep-{}", pkg.name, j),
version: "1.0.0".to_string(),
status: crate_root::state::modal::DependencyStatus::ToInstall,
source: if pkg.name.contains("aur") {
crate_root::state::modal::DependencySource::Aur
} else {
crate_root::state::modal::DependencySource::Official {
repo: "core".to_string(),
}
},
required_by: vec![pkg.name.clone()],
depends_on: Vec::new(),
is_core: false,
is_system: false,
});
expected_dep_count += 1;
}
}
expected_dep_count
}
/// What: Populate app cache with files for test packages.
///
/// Inputs:
/// - `app`: Application state to populate
/// - `test_packages`: Packages to create files for
///
/// Output:
/// - Total count of files created
///
/// Details:
/// - Creates 2-3 files per package
fn populate_files(
app: &mut crate_root::state::AppState,
test_packages: &[crate_root::state::PackageItem],
) -> usize {
let mut expected_file_count = 0;
for pkg in test_packages {
let file_count = if pkg.name.contains("official") { 3 } else { 2 };
let mut files = Vec::new();
for j in 1..=file_count {
files.push(crate_root::state::modal::FileChange {
path: format!("/usr/bin/{}-file-{}", pkg.name, j),
change_type: crate_root::state::modal::FileChangeType::New,
package: pkg.name.clone(),
is_config: j == file_count,
predicted_pacnew: false,
predicted_pacsave: false,
});
}
app.install_list_files
.push(crate_root::state::modal::PackageFileInfo {
name: pkg.name.clone(),
files: files.clone(),
total_count: file_count,
new_count: file_count,
changed_count: 0,
removed_count: 0,
config_count: 1,
pacnew_candidates: 0,
pacsave_candidates: 0,
});
expected_file_count += file_count;
}
expected_file_count
}
/// What: Populate app cache with services for test packages.
///
/// Inputs:
/// - `app`: Application state to populate
/// - `test_packages`: Packages to create services for
///
/// Output:
/// - Total count of services created
///
/// Details:
/// - Creates 1-2 services per package
fn populate_services(
app: &mut crate_root::state::AppState,
test_packages: &[crate_root::state::PackageItem],
) -> usize {
let mut expected_service_count = 0;
for pkg in test_packages {
let service_count = if pkg.name.contains("official") { 2 } else { 1 };
for j in 1..=service_count {
app.install_list_services
.push(crate_root::state::modal::ServiceImpact {
unit_name: format!("{}-service-{}.service", pkg.name, j),
providers: vec![pkg.name.clone()],
is_active: j == 1,
needs_restart: j == 1,
recommended_decision: crate_root::state::modal::ServiceRestartDecision::Restart,
restart_decision: crate_root::state::modal::ServiceRestartDecision::Restart,
});
expected_service_count += 1;
}
}
expected_service_count
}
/// What: Populate app cache with sandbox info for AUR packages.
///
/// Inputs:
/// - `app`: Application state to populate
/// - `test_packages`: Packages to create sandbox info for
///
/// Output:
/// - Total count of sandbox entries created
///
/// Details:
/// - Only creates sandbox info for AUR packages
fn populate_sandbox(
app: &mut crate_root::state::AppState,
test_packages: &[crate_root::state::PackageItem],
) -> usize {
let mut expected_sandbox_count = 0;
for pkg in test_packages {
if matches!(pkg.source, crate_root::state::Source::Aur) {
app.install_list_sandbox
.push(crate_root::logic::sandbox::SandboxInfo {
package_name: pkg.name.clone(),
depends: vec![crate_root::logic::sandbox::DependencyDelta {
name: format!("{}-sandbox-dep", pkg.name),
is_installed: false,
installed_version: None,
version_satisfied: false,
}],
makedepends: vec![],
checkdepends: vec![],
optdepends: vec![],
});
expected_sandbox_count += 1;
}
}
expected_sandbox_count
}
/// What: Test that the Deps tab loads and displays all dependencies correctly.
///
/// Inputs:
/// - `app`: Application state
/// - `test_packages`: Test packages to verify
/// - `expected_dep_count`: Expected total dependency count
///
/// Output:
/// - None (panics on failure)
///
/// Details:
/// - Verifies dependency count and that each package has correct dependencies
fn test_deps_tab(
app: &mut crate_root::state::AppState,
test_packages: &[crate_root::state::PackageItem],
expected_dep_count: usize,
) {
switch_preflight_tab(app, crate_root::state::PreflightTab::Deps);
let (_, _, _, dependency_info, _, _, _, _, _) = assert_preflight_modal(app);
assert_eq!(
dependency_info.len(),
expected_dep_count,
"Should have all dependencies loaded"
);
for pkg in test_packages {
let expected = if pkg.name.contains("official") { 4 } else { 3 };
assert_eq!(
dependency_info
.iter()
.filter(|d| d.required_by.contains(&pkg.name))
.count(),
expected,
"Package {} should have {} dependencies",
pkg.name,
expected
);
}
}
/// What: Test that the Files tab loads and displays all files correctly.
///
/// Inputs:
/// - `app`: Application state
/// - `test_packages`: Test packages to verify
/// - `expected_file_count`: Expected total file count
///
/// Output:
/// - None (panics on failure)
///
/// Details:
/// - Verifies file info count and total file count
fn test_files_tab(
app: &mut crate_root::state::AppState,
test_packages: &[crate_root::state::PackageItem],
expected_file_count: usize,
) {
switch_preflight_tab(app, crate_root::state::PreflightTab::Files);
let (_, _, _, _, file_info, _, _, _, _) = assert_preflight_modal(app);
assert_eq!(
file_info.len(),
test_packages.len(),
"Should have file info for all packages"
);
let total_files: usize = file_info.iter().map(|f| f.files.len()).sum();
assert_eq!(
total_files, expected_file_count,
"Should have all files loaded"
);
}
/// What: Test that the Services tab loads and displays all services correctly.
///
/// Inputs:
/// - `app`: Application state
/// - `expected_service_count`: Expected total service count
///
/// Output:
/// - None (panics on failure)
///
/// Details:
/// - Verifies service count and that services are marked as loaded
fn test_services_tab(app: &mut crate_root::state::AppState, expected_service_count: usize) {
switch_preflight_tab(app, crate_root::state::PreflightTab::Services);
let (_, _, _, _, _, service_info, _, services_loaded, _) = assert_preflight_modal(app);
assert_eq!(
service_info.len(),
expected_service_count,
"Should have all services loaded"
);
assert!(*services_loaded, "Services should be marked as loaded");
}
/// What: Test that the Sandbox tab loads and displays sandbox info correctly.
///
/// Inputs:
/// - `app`: Application state
/// - `expected_sandbox_count`: Expected total sandbox count
///
/// Output:
/// - None (panics on failure)
///
/// Details:
/// - Verifies sandbox count and that sandbox is marked as loaded
fn test_sandbox_tab(app: &mut crate_root::state::AppState, expected_sandbox_count: usize) {
switch_preflight_tab(app, crate_root::state::PreflightTab::Sandbox);
let (_, _, _, _, _, _, sandbox_info, _, sandbox_loaded) = assert_preflight_modal(app);
assert_eq!(
sandbox_info.len(),
expected_sandbox_count,
"Should have sandbox info for all AUR packages"
);
assert!(*sandbox_loaded, "Sandbox should be marked as loaded");
}
/// What: Test that navigation works correctly with selection indices.
///
/// Inputs:
/// - `app`: Application state
///
/// Output:
/// - None (panics on failure)
///
/// Details:
/// - Verifies that selection indices remain within bounds
fn test_navigation(app: &mut crate_root::state::AppState) {
if let crate_root::state::Modal::Preflight {
dependency_info,
file_info,
service_info,
dep_selected,
file_selected,
service_selected,
..
} = &mut app.modal
{
if !dependency_info.is_empty() {
*dep_selected = dependency_info.len().saturating_sub(1);
}
if !file_info.is_empty() {
*file_selected = file_info.len().saturating_sub(1);
}
if !service_info.is_empty() {
*service_selected = service_info.len().saturating_sub(1);
}
}
let (_, _, _, dependency_info, file_info, service_info, _, _, _) = assert_preflight_modal(app);
if !dependency_info.is_empty()
&& let crate_root::state::Modal::Preflight { dep_selected, .. } = &app.modal
{
assert!(
*dep_selected < dependency_info.len(),
"Dependency selection should be within bounds"
);
}
if !file_info.is_empty()
&& let crate_root::state::Modal::Preflight { file_selected, .. } = &app.modal
{
assert!(
*file_selected < file_info.len(),
"File selection should be within bounds"
);
}
if !service_info.is_empty()
&& let crate_root::state::Modal::Preflight {
service_selected, ..
} = &app.modal
{
assert!(
*service_selected < service_info.len(),
"Service selection should be within bounds"
);
}
}
/// What: Verify data integrity - all packages should have their data.
///
/// Inputs:
/// - `app`: Application state
/// - `test_packages`: Test packages to verify
/// - `expected_dep_count`: Expected dependency count
/// - `expected_service_count`: Expected service count
/// - `expected_sandbox_count`: Expected sandbox count
///
/// Output:
/// - None (panics on failure)
///
/// Details:
/// - Verifies all counts match expected values
/// - Verifies each package has its dependencies, files, services, and sandbox info
fn verify_data_integrity(
app: &crate_root::state::AppState,
test_packages: &[crate_root::state::PackageItem],
expected_dep_count: usize,
expected_service_count: usize,
expected_sandbox_count: usize,
) {
let (_, _, _, dependency_info, file_info, service_info, sandbox_info, _, _) =
assert_preflight_modal(app);
assert_eq!(
dependency_info.len(),
expected_dep_count,
"Dependency count should match expected"
);
assert_eq!(
file_info.len(),
test_packages.len(),
"File info count should match package count"
);
assert_eq!(
service_info.len(),
expected_service_count,
"Service count should match expected"
);
assert_eq!(
sandbox_info.len(),
expected_sandbox_count,
"Sandbox count should match expected"
);
for pkg in test_packages {
assert!(
dependency_info
.iter()
.any(|d| d.required_by.contains(&pkg.name)),
"Package {} should have dependencies",
pkg.name
);
assert!(
file_info.iter().any(|f| f.name == pkg.name),
"Package {} should have file info",
pkg.name
);
assert!(
service_info.iter().any(|s| s.providers.contains(&pkg.name)),
"Package {} should have services",
pkg.name
);
if matches!(pkg.source, crate_root::state::Source::Aur) {
assert!(
sandbox_info.iter().any(|s| s.package_name == pkg.name),
"AUR package {} should have sandbox info",
pkg.name
);
}
}
}
#[test]
/// What: Verify that preflight modal handles large datasets correctly.
///
/// Inputs:
/// - 10+ packages in `install_list` (mix of official and AUR)
/// - Each package has 3-5 dependencies
/// - Each package has 2-3 files
/// - Each package has 1-2 services
/// - AUR packages have sandbox info
/// - User switches between all tabs
///
/// Output:
/// - All tabs load and display correctly with large datasets
/// - Navigation works correctly (selection indices, tree expansion)
/// - Data integrity is maintained (correct counts, no corruption)
///
/// Details:
/// - Tests performance and correctness with large datasets
/// - Verifies that many packages don't cause data corruption
/// - Ensures navigation remains functional with many items
fn preflight_handles_large_datasets_correctly() {
unsafe {
std::env::set_var("PACSEA_TEST_HEADLESS", "1");
}
let mut app = crate_root::state::AppState::default();
let test_packages = create_large_test_packages();
let expected_dep_count = populate_dependencies(&mut app, &test_packages);
let expected_file_count = populate_files(&mut app, &test_packages);
let expected_service_count = populate_services(&mut app, &test_packages);
let expected_sandbox_count = populate_sandbox(&mut app, &test_packages);
app.install_list = test_packages.clone();
app.preflight_cancelled
.store(false, std::sync::atomic::Ordering::Relaxed);
app.modal = create_preflight_modal(
test_packages.clone(),
crate_root::state::PreflightAction::Install,
crate_root::state::PreflightTab::Summary,
);
test_deps_tab(&mut app, &test_packages, expected_dep_count);
test_files_tab(&mut app, &test_packages, expected_file_count);
test_services_tab(&mut app, expected_service_count);
test_sandbox_tab(&mut app, expected_sandbox_count);
test_navigation(&mut app);
verify_data_integrity(
&app,
&test_packages,
expected_dep_count,
expected_service_count,
expected_sandbox_count,
);
}
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
apache/paimon-rust | https://github.com/apache/paimon-rust/blob/1f69c4a6c24f5f7fa858cad57be81fc135c1da71/crates/paimon/src/lib.rs | crates/paimon/src/lib.rs | // Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
mod error;
pub use error::Error;
pub use error::Result;
pub mod file_index;
pub mod io;
pub mod spec;
| rust | Apache-2.0 | 1f69c4a6c24f5f7fa858cad57be81fc135c1da71 | 2026-01-04T20:18:00.429093Z | false |
apache/paimon-rust | https://github.com/apache/paimon-rust/blob/1f69c4a6c24f5f7fa858cad57be81fc135c1da71/crates/paimon/src/error.rs | crates/paimon/src/error.rs | // Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
use snafu::prelude::*;
/// Result type used in paimon.
pub type Result<T, E = Error> = std::result::Result<T, E>;
/// Error type for paimon.
#[derive(Debug, Snafu)]
pub enum Error {
#[snafu(display("Paimon data invalid for {}: {:?}", message, source))]
DataInvalid {
message: String,
#[snafu(backtrace)]
source: snafu::Whatever,
},
#[snafu(
visibility(pub(crate)),
display("Paimon data type invalid for {}", message)
)]
DataTypeInvalid { message: String },
#[snafu(
visibility(pub(crate)),
display("Paimon hitting unexpected error {}: {:?}", message, source)
)]
IoUnexpected {
message: String,
source: opendal::Error,
},
#[snafu(
visibility(pub(crate)),
display("Paimon hitting unsupported io error {}", message)
)]
IoUnsupported { message: String },
#[snafu(
visibility(pub(crate)),
display("Paimon hitting invalid config: {}", message)
)]
ConfigInvalid { message: String },
#[snafu(
visibility(pub(crate)),
display("Paimon hitting unexpected avro error {}: {:?}", message, source)
)]
DataUnexpected {
message: String,
source: apache_avro::Error,
},
#[snafu(
visibility(pub(crate)),
display("Paimon hitting invalid file index format: {}", message)
)]
FileIndexFormatInvalid { message: String },
}
impl From<opendal::Error> for Error {
fn from(source: opendal::Error) -> Self {
// TODO: Simple use IoUnexpected for now
Error::IoUnexpected {
message: "IO operation failed on underlying storage".to_string(),
source,
}
}
}
impl From<apache_avro::Error> for Error {
fn from(source: apache_avro::Error) -> Self {
Error::DataUnexpected {
message: "".to_string(),
source,
}
}
}
| rust | Apache-2.0 | 1f69c4a6c24f5f7fa858cad57be81fc135c1da71 | 2026-01-04T20:18:00.429093Z | false |
apache/paimon-rust | https://github.com/apache/paimon-rust/blob/1f69c4a6c24f5f7fa858cad57be81fc135c1da71/crates/paimon/src/file_index/file_index_format.rs | crates/paimon/src/file_index/file_index_format.rs | // Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
use std::collections::HashMap;
use bytes::{Buf, BufMut, Bytes, BytesMut};
use crate::{
io::{FileIO, FileRead, FileStatus, InputFile, OutputFile},
Error,
};
/// Default 1MB read block size
const READ_BLOCK_SIZE: u64 = 1024 * 1024;
/// Quoted from the Java implement of the structure,
/// `MAGIC`` is used to mark the beginning of a FileFormat structure.
pub const MAGIC: u64 = 1493475289347502;
/// Used to mark an empty INDEX.
pub const EMPTY_INDEX_FLAG: i64 = -1;
#[derive(Debug)]
struct IndexInfo {
start_pos: i64,
length: i64,
}
#[repr(i32)]
#[derive(Debug, PartialEq, Eq)]
enum Version {
V1,
}
/// File index file format. All columns and offsets are stored in the header.
///
/// ```text
/// _____________________________________ _____________________
/// | magic |version|head length |
/// |-------------------------------------|
/// | column number |
/// |-------------------------------------|
/// | column 1 | index number |
/// |-------------------------------------|
/// | index name 1 |start pos |length |
/// |-------------------------------------|
/// | index name 2 |start pos |length |
/// |-------------------------------------|
/// | index name 3 |start pos |length |
/// |-------------------------------------| HEADER
/// | column 2 | index number |
/// |-------------------------------------|
/// | index name 1 |start pos |length |
/// |-------------------------------------|
/// | index name 2 |start pos |length |
/// |-------------------------------------|
/// | index name 3 |start pos |length |
/// |-------------------------------------|
/// | ... |
/// |-------------------------------------|
/// | ... |
/// |-------------------------------------|
/// | redundant length |redundant bytes |
/// |-------------------------------------| ---------------------
/// | BODY |
/// | BODY |
/// | BODY | BODY
/// | BODY |
/// |_____________________________________| _____________________
///
/// - `magic`: 8 bytes long
/// - `version`: 4-byte integer
/// - `head length`: 4-byte integer
/// - `column number`: 4-byte integer
/// - `column x`: variable-length UTF-8 string (length + bytes)
/// - `index number`: 4-byte integer (number of index items below)
/// - `index name x`: variable-length UTF-8 string
/// - `start pos`: 4-byte integer
/// - `length`: 4-byte integer
/// - `redundant length`: 4-byte integer (for compatibility with future versions; content is zero in this version)
/// - `redundant bytes`: variable-length bytes (for compatibility with future versions; empty in this version)
/// - `BODY`: sequence of index data (concatenated index data for each column)
/// ```
///
/// Impl Reference: <https://github.com/apache/paimon/blob/release-0.8.2/paimon-common/src/main/java/org/apache/paimon/fileindex/FileIndexFormat.java>
pub async fn write_column_indexes(
path: &str,
indexes: HashMap<String, HashMap<String, Bytes>>,
) -> crate::Result<OutputFile> {
let file_io = FileIO::from_url(path)?.build()?;
let output = file_io.new_output(path)?;
let mut writer = output.writer().await?;
let mut body_info: HashMap<String, HashMap<String, IndexInfo>> = HashMap::new();
let mut total_data_size = 0;
// Calculate the total data size
for bytes_map in indexes.values() {
for data in bytes_map.values() {
if !data.is_empty() {
total_data_size += data.len();
}
}
}
let mut body = BytesMut::with_capacity(total_data_size);
for (column_name, bytes_map) in indexes.into_iter() {
let inner_map = body_info.entry(column_name.clone()).or_default();
for (index_name, data) in bytes_map {
let start_position = body.len() as i64;
if data.is_empty() {
inner_map.insert(
index_name,
IndexInfo {
start_pos: EMPTY_INDEX_FLAG,
length: 0,
},
);
} else {
body.extend_from_slice(&data);
inner_map.insert(
index_name,
IndexInfo {
start_pos: start_position,
length: body.len() as i64 - start_position,
},
);
}
}
}
// write_head(writer, &body_info).await?;
let head_length = calculate_head_length(&body_info)?;
let mut head_buffer = BytesMut::with_capacity(head_length);
// Magic
head_buffer.put_u64_le(MAGIC);
// Version
head_buffer.put_i32_le(Version::V1 as i32);
// HeadLength
head_buffer.put_i32_le(head_length as i32);
// ColumnSize
head_buffer.put_i32_le(body_info.len() as i32);
for (column_name, index_info) in body_info {
// ColumnName
head_buffer.put_u16_le(column_name.len() as u16);
head_buffer.put_slice(column_name.as_bytes());
// IndexTypeSize
head_buffer.put_i32_le(index_info.len() as i32);
// ColumnInfo,offset = headLength
for (index_name, IndexInfo { start_pos, length }) in index_info {
head_buffer.put_u16_le(index_name.len() as u16);
head_buffer.put_slice(index_name.as_bytes());
let adjusted_start = if start_pos == EMPTY_INDEX_FLAG {
EMPTY_INDEX_FLAG
} else {
start_pos + head_length as i64
};
head_buffer.put_i64_le(adjusted_start);
head_buffer.put_i64_le(length);
}
}
// Redundant length for future compatibility
head_buffer.put_i32_le(0);
// Write into
writer.write(head_buffer.freeze()).await?;
writer.write(body.freeze()).await?;
writer.close().await?;
Ok(output)
}
fn calculate_head_length(
body_info: &HashMap<String, HashMap<String, IndexInfo>>,
) -> crate::Result<usize> {
// Magic + Version + HeadLength + ColumnNumber + RedundantLength
let base_length = 8 + 4 + 4 + 4 + 4;
let mut total_length = base_length;
for (column_name, index_info) in body_info {
// Column name length + actual column name length
total_length += 2 + column_name.len();
// IndexTypeSize (index number)
total_length += 4;
for index_name in index_info.keys() {
// Index name length + actual index name length
total_length += 2 + index_name.len();
// start_pos (8 bytes) + length (8 bytes)
total_length += 16;
}
}
Ok(total_length)
}
pub struct FileIndex {
reader: Box<dyn FileRead>,
header: HashMap<String, HashMap<String, IndexInfo>>,
}
impl FileIndex {
pub async fn get_column_index(
&self,
column_name: &str,
) -> crate::Result<HashMap<String, Bytes>> {
if let Some(index_info) = self.header.get(column_name) {
let mut result = HashMap::new();
for (index_name, info) in index_info {
let bytes = self.get_bytes_with_start_and_length(info).await?;
result.insert(index_name.clone(), bytes);
}
Ok(result)
} else {
Err(Error::FileIndexFormatInvalid {
message: format!("Column '{}' not found in header", column_name),
})
}
}
pub async fn get_index(&self) -> crate::Result<HashMap<String, HashMap<String, Bytes>>> {
let mut result = HashMap::new();
for (column_name, index_info) in self.header.iter() {
let mut column_index = HashMap::new();
for (index_name, info) in index_info {
let bytes = self.get_bytes_with_start_and_length(info).await?;
column_index.insert(index_name.clone(), bytes);
}
result.insert(column_name.clone(), column_index);
}
Ok(result)
}
async fn get_bytes_with_start_and_length(
&self,
index_info: &IndexInfo,
) -> crate::Result<Bytes> {
let data_bytes = self
.reader
.read(index_info.start_pos as u64..(index_info.start_pos + index_info.length) as u64)
.await?;
Ok(data_bytes)
}
}
pub struct FileIndexFormatReader {
reader: Box<dyn FileRead>,
stat: FileStatus,
}
impl FileIndexFormatReader {
pub async fn get_file_index(input_file: InputFile) -> crate::Result<FileIndex> {
let reader = input_file.reader().await?;
let mut file_reader = Self {
reader: Box::new(reader),
stat: input_file.metadata().await?,
};
let header = file_reader.read_header().await?;
Ok(FileIndex {
header,
reader: file_reader.reader,
})
}
async fn read_header(&mut self) -> crate::Result<HashMap<String, HashMap<String, IndexInfo>>> {
let read_size = if self.stat.size < READ_BLOCK_SIZE {
self.stat.size
} else {
READ_BLOCK_SIZE
};
let mut buffer = self.reader.read(0..read_size).await?;
// Magic (8 bytes)
let magic = buffer.get_u64_le();
if magic != MAGIC {
return Err(Error::FileIndexFormatInvalid {
message: format!("Expected MAGIC: {}, but found: {}", MAGIC, magic),
});
}
// Version (4 bytes)
let version = buffer.get_i32_le();
if version != Version::V1 as i32 {
return Err(Error::FileIndexFormatInvalid {
message: format!(
"Unsupported file index version: expected {}, but found: {}",
Version::V1 as i32,
version
),
});
}
// Head Length (4 bytes)
let head_length = buffer.get_i32_le() as usize;
// Ensure the header is fully contained in the buffer
if buffer.len() < head_length {
let remaining = head_length - buffer.len();
let mut remaining_head_buffer = BytesMut::with_capacity(remaining);
let additional_data = self
.reader
.read(buffer.len() as u64..buffer.len() as u64 + remaining as u64)
.await?;
remaining_head_buffer.extend_from_slice(&additional_data);
buffer = Bytes::from(
[buffer.slice(0..), remaining_head_buffer.freeze().slice(0..)].concat(),
);
}
// Column Number (4 bytes)
let column_number = buffer.get_i32_le();
let mut current_offset = 20;
let mut header = HashMap::new();
for _ in 0..column_number {
// Column Name Length (2 bytes)
let column_name_len = buffer.get_u16_le();
current_offset += 2;
// Column Name (variable-length UTF-8 string)
let column_name = String::from_utf8(buffer.split_to(column_name_len as usize).to_vec())
.map_err(|e| Error::FileIndexFormatInvalid {
message: format!("Invalid UTF-8 sequence in column name: {}", e),
})?;
current_offset += column_name_len as u64;
// Index Number (4 bytes)
let index_number = buffer.get_i32_le();
current_offset += 4;
let mut index_info_map = HashMap::new();
for _ in 0..index_number {
// Index Name Length (2 bytes)
let index_name_len = buffer.get_u16_le();
current_offset += 2;
// Index Name (variable-length UTF-8 string)
let index_name =
String::from_utf8(buffer.split_to(index_name_len as usize).to_vec()).unwrap();
current_offset += index_name_len as u64;
// Start Pos (8 bytes)
let start_pos = buffer.get_i64_le();
current_offset += 4;
// Length (8 bytes)
let length = buffer.get_i64_le();
current_offset += 4;
index_info_map.insert(index_name, IndexInfo { start_pos, length });
}
header.insert(column_name, index_info_map);
}
let redundant_length = buffer.get_i32_le() as u64;
current_offset += 4;
if redundant_length > 0 {
let redundant_bytes = buffer.split_to(redundant_length as usize);
if redundant_bytes.len() as u64 != redundant_length {
return Err(Error::FileIndexFormatInvalid {
message: format!(
"Expected to read {} redundant bytes, but found only {}, on offset {}",
redundant_length,
redundant_bytes.len(),
current_offset
),
});
}
}
Ok(header)
}
}
#[cfg(test)]
mod file_index_format_tests {
use super::*;
use bytes::Bytes;
use std::collections::HashMap;
#[tokio::test]
async fn test_single_column_single_index() -> crate::Result<()> {
let path = "memory:/tmp/test_single_column_single_index";
let mut indexes = HashMap::new();
let mut index_map = HashMap::new();
index_map.insert("index1".to_string(), Bytes::from("sample_data"));
indexes.insert("column111".to_string(), index_map);
let output = write_column_indexes(path, indexes.clone()).await?;
let input = output.to_input_file();
let reader = FileIndexFormatReader::get_file_index(input).await?;
let column_data = reader.get_column_index("column111").await?;
assert_eq!(
column_data.get("index1").unwrap(),
&Bytes::from("sample_data")
);
Ok(())
}
#[tokio::test]
async fn test_multiple_columns_multiple_indexes() -> crate::Result<()> {
let path = "memory:/tmp/test_multiple_columns_multiple_indexes";
let mut indexes = HashMap::new();
for col_num in 1..5 {
let column_name = format!("column{}", col_num);
let mut index_map = HashMap::new();
for idx_num in 1..5 {
index_map.insert(
format!("index{}", idx_num),
random_bytes(100 + col_num * idx_num),
);
}
indexes.insert(column_name, index_map);
}
let output = write_column_indexes(path, indexes.clone()).await?;
let input = output.to_input_file();
let reader = FileIndexFormatReader::get_file_index(input).await?;
for (column, index_map) in indexes {
let column_data = reader.get_column_index(&column).await?;
for (index_name, expected_data) in index_map {
assert_eq!(column_data.get(&index_name).unwrap(), &expected_data);
}
}
Ok(())
}
#[tokio::test]
async fn test_empty_file_index() -> crate::Result<()> {
let path = "memory:/tmp/test_empty_file_index";
let mut indexes = HashMap::new();
let mut a_index = HashMap::new();
a_index.insert("b".to_string(), Bytes::new());
a_index.insert("c".to_string(), Bytes::new());
indexes.insert("a".to_string(), a_index);
let output = write_column_indexes(path, indexes.clone()).await?;
let input = output.to_input_file();
let reader = FileIndexFormatReader::get_file_index(input).await?;
let column_indexes = reader.get_column_index("a").await?;
assert_eq!(column_indexes.len(), 2);
assert_eq!(column_indexes.get("b").unwrap(), &Bytes::new());
assert_eq!(column_indexes.get("c").unwrap(), &Bytes::new());
Ok(())
}
#[tokio::test]
async fn test_large_data_set() -> crate::Result<()> {
let path = "memory:/tmp/test_large_data_set";
let mut indexes = HashMap::new();
let mut large_data = HashMap::new();
large_data.insert("large_index".to_string(), random_bytes(100_000_000)); // 100MB data
indexes.insert("large_column".to_string(), large_data);
write_column_indexes(path, indexes.clone()).await?;
let output = write_column_indexes(path, indexes.clone()).await?;
let input = output.to_input_file();
let reader = FileIndexFormatReader::get_file_index(input).await?;
let column_data = reader.get_column_index("large_column").await?;
assert_eq!(
column_data.get("large_index").unwrap(),
&indexes
.get("large_column")
.unwrap()
.get("large_index")
.unwrap()
);
Ok(())
}
fn random_bytes(len: usize) -> Bytes {
use rand::RngCore;
let mut rng = rand::thread_rng();
let mut bytes = vec![0u8; len];
rng.fill_bytes(&mut bytes);
Bytes::from(bytes)
}
}
| rust | Apache-2.0 | 1f69c4a6c24f5f7fa858cad57be81fc135c1da71 | 2026-01-04T20:18:00.429093Z | false |
apache/paimon-rust | https://github.com/apache/paimon-rust/blob/1f69c4a6c24f5f7fa858cad57be81fc135c1da71/crates/paimon/src/file_index/mod.rs | crates/paimon/src/file_index/mod.rs | // Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
mod file_index_format;
pub use file_index_format::*;
| rust | Apache-2.0 | 1f69c4a6c24f5f7fa858cad57be81fc135c1da71 | 2026-01-04T20:18:00.429093Z | false |
apache/paimon-rust | https://github.com/apache/paimon-rust/blob/1f69c4a6c24f5f7fa858cad57be81fc135c1da71/crates/paimon/src/spec/stats.rs | crates/paimon/src/spec/stats.rs | // Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
use serde::{Deserialize, Serialize};
use std::fmt::{Display, Formatter};
/// The statistics for columns, supports the following stats.
///
/// All statistics are stored in the form of a Binary, which can significantly reduce its memory consumption, but the cost is that the column type needs to be known when getting.
///
/// Impl Reference: <https://github.com/apache/paimon/blob/release-0.8.2/paimon-core/src/main/java/org/apache/paimon/stats/FieldStatsArraySerializer.java#L111>
#[derive(PartialEq, Eq, Debug, Clone, Serialize, Deserialize)]
pub struct BinaryTableStats {
/// the minimum values of the columns
#[serde(rename = "_MIN_VALUES", with = "serde_bytes")]
min_values: Vec<u8>,
/// the maximum values of the columns
#[serde(rename = "_MAX_VALUES", with = "serde_bytes")]
max_values: Vec<u8>,
/// the number of nulls of the columns
#[serde(rename = "_NULL_COUNTS")]
null_counts: Vec<i64>,
}
impl BinaryTableStats {
/// Get the minimum values of the columns
#[inline]
pub fn min_values(&self) -> &[u8] {
&self.min_values
}
/// Get the maximum values of the columns
#[inline]
pub fn max_values(&self) -> &[u8] {
&self.max_values
}
/// Get the number of nulls of the columns
#[inline]
pub fn null_counts(&self) -> &Vec<i64> {
&self.null_counts
}
pub fn new(
min_values: Vec<u8>,
max_values: Vec<u8>,
null_counts: Vec<i64>,
) -> BinaryTableStats {
Self {
min_values,
max_values,
null_counts,
}
}
}
impl Display for BinaryTableStats {
fn fmt(&self, _: &mut Formatter<'_>) -> std::fmt::Result {
todo!()
}
}
| rust | Apache-2.0 | 1f69c4a6c24f5f7fa858cad57be81fc135c1da71 | 2026-01-04T20:18:00.429093Z | false |
apache/paimon-rust | https://github.com/apache/paimon-rust/blob/1f69c4a6c24f5f7fa858cad57be81fc135c1da71/crates/paimon/src/spec/objects_file.rs | crates/paimon/src/spec/objects_file.rs | // Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
use crate::Error;
use apache_avro::types::Value;
use apache_avro::{from_value, Reader};
use serde::de::DeserializeOwned;
#[allow(dead_code)]
pub fn from_avro_bytes<T: DeserializeOwned>(bytes: &[u8]) -> crate::Result<Vec<T>> {
let reader = Reader::new(bytes).map_err(Error::from)?;
let records = reader
.collect::<Result<Vec<Value>, _>>()
.map_err(Error::from)?;
let values = Value::Array(records);
from_value::<Vec<T>>(&values).map_err(Error::from)
}
#[cfg(test)]
mod tests {
use crate::spec::manifest_common::FileKind;
use crate::spec::manifest_entry::ManifestEntry;
use crate::spec::objects_file::from_avro_bytes;
use crate::spec::stats::BinaryTableStats;
use crate::spec::{DataFileMeta, ManifestFileMeta};
use chrono::{DateTime, Utc};
#[tokio::test]
async fn test_read_manifest_list() {
let workdir =
std::env::current_dir().unwrap_or_else(|err| panic!("current_dir must exist: {err}"));
let path = workdir
.join("tests/fixtures/manifest/manifest-list-5c7399a0-46ae-4a5e-9c13-3ab07212cdb6-0");
let v = std::fs::read(path.to_str().unwrap()).unwrap();
let res = from_avro_bytes::<ManifestFileMeta>(&v).unwrap();
let value_bytes = vec![
0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 49, 0, 0, 0, 0, 0, 0, 129,
];
assert_eq!(
res,
vec![
ManifestFileMeta::new(
"manifest-19d138df-233f-46f7-beb6-fadaf4741c0e".to_string(),
10,
10,
10,
BinaryTableStats::new(value_bytes.clone(), value_bytes.clone(), vec![1, 2]),
1
),
ManifestFileMeta::new(
"manifest-a703ee48-c411-413e-b84e-c03bdb179631".to_string(),
11,
0,
10,
BinaryTableStats::new(value_bytes.clone(), value_bytes.clone(), vec![1, 2]),
2
)
],
);
}
#[tokio::test]
async fn test_read_manifest_entry() {
let workdir =
std::env::current_dir().unwrap_or_else(|err| panic!("current_dir must exist: {err}"));
let path =
workdir.join("tests/fixtures/manifest/manifest-8ded1f09-fcda-489e-9167-582ac0f9f846-0");
let v = std::fs::read(path.to_str().unwrap()).unwrap();
let res = from_avro_bytes::<ManifestEntry>(&v).unwrap();
let value_bytes = vec![
0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 49, 0, 0, 0, 0, 0, 0, 129, 1, 0, 0, 0, 0, 0, 0, 0,
];
let single_value = vec![0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0];
assert_eq!(
res,
vec![
ManifestEntry::new(
FileKind::Delete,
single_value.clone(),
1,
10,
DataFileMeta {
file_name: "f1.parquet".to_string(),
file_size: 10,
row_count: 100,
min_key: single_value.clone(),
max_key: single_value.clone(),
key_stats: BinaryTableStats::new(
value_bytes.clone(),
value_bytes.clone(),
vec![1, 2]
),
value_stats: BinaryTableStats::new(
value_bytes.clone(),
value_bytes.clone(),
vec![1, 2]
),
min_sequence_number: 1,
max_sequence_number: 100,
schema_id: 0,
level: 1,
extra_files: vec![],
creation_time: "2024-09-06T07:45:55.039+00:00"
.parse::<DateTime<Utc>>()
.unwrap(),
delete_row_count: Some(0),
embedded_index: None,
},
2
),
ManifestEntry::new(
FileKind::Add,
single_value.clone(),
2,
10,
DataFileMeta {
file_name: "f2.parquet".to_string(),
file_size: 10,
row_count: 100,
min_key: single_value.clone(),
max_key: single_value.clone(),
key_stats: BinaryTableStats::new(
value_bytes.clone(),
value_bytes.clone(),
vec![1, 2]
),
value_stats: BinaryTableStats::new(
value_bytes.clone(),
value_bytes.clone(),
vec![1, 2]
),
min_sequence_number: 1,
max_sequence_number: 100,
schema_id: 0,
level: 1,
extra_files: vec![],
creation_time: "2024-09-06T07:45:55.039+00:00"
.parse::<DateTime<Utc>>()
.unwrap(),
delete_row_count: Some(1),
embedded_index: None,
},
2
),
]
)
}
}
| rust | Apache-2.0 | 1f69c4a6c24f5f7fa858cad57be81fc135c1da71 | 2026-01-04T20:18:00.429093Z | false |
apache/paimon-rust | https://github.com/apache/paimon-rust/blob/1f69c4a6c24f5f7fa858cad57be81fc135c1da71/crates/paimon/src/spec/manifest_entry.rs | crates/paimon/src/spec/manifest_entry.rs | // Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
use crate::spec::manifest_common::FileKind;
use crate::spec::DataFileMeta;
use serde::Deserialize;
use serde_with::serde_derive::Serialize;
/// The same {@link Identifier} indicates that the {@link ManifestEntry} refers to the same data file.
///
/// Impl Reference: <https://github.com/apache/paimon/blob/release-0.8.2/paimon-core/src/main/java/org/apache/paimon/manifest/FileEntry.java#L58>
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
pub struct Identifier {
pub partition: Vec<u8>,
pub bucket: i32,
pub level: i32,
pub file_name: String,
}
/// Entry of a manifest file, representing an addition / deletion of a data file.
/// Impl Reference: <https://github.com/apache/paimon/blob/release-0.8.2/paimon-core/src/main/java/org/apache/paimon/manifest/ManifestEntry.java>
#[derive(Debug, PartialEq, Eq, Serialize, Deserialize)]
pub struct ManifestEntry {
#[serde(rename = "_KIND")]
kind: FileKind,
#[serde(rename = "_PARTITION", with = "serde_bytes")]
partition: Vec<u8>,
#[serde(rename = "_BUCKET")]
bucket: i32,
#[serde(rename = "_TOTAL_BUCKETS")]
total_buckets: i32,
#[serde(rename = "_FILE")]
file: DataFileMeta,
#[serde(rename = "_VERSION")]
version: i32,
}
#[allow(dead_code)]
impl ManifestEntry {
fn kind(&self) -> &FileKind {
&self.kind
}
fn partition(&self) -> &Vec<u8> {
&self.partition
}
fn bucket(&self) -> i32 {
self.bucket
}
fn level(&self) -> i32 {
self.file.level
}
fn file_name(&self) -> &str {
&self.file.file_name
}
fn min_key(&self) -> &Vec<u8> {
&self.file.min_key
}
fn max_key(&self) -> &Vec<u8> {
&self.file.max_key
}
fn identifier(&self) -> Identifier {
Identifier {
partition: self.partition.clone(),
bucket: self.bucket,
level: self.file.level,
file_name: self.file.file_name.clone(),
}
}
pub fn total_buckets(&self) -> i32 {
self.total_buckets
}
pub fn file(&self) -> &DataFileMeta {
&self.file
}
pub fn new(
kind: FileKind,
partition: Vec<u8>,
bucket: i32,
total_buckets: i32,
file: DataFileMeta,
version: i32,
) -> Self {
ManifestEntry {
kind,
partition,
bucket,
total_buckets,
file,
version,
}
}
}
| rust | Apache-2.0 | 1f69c4a6c24f5f7fa858cad57be81fc135c1da71 | 2026-01-04T20:18:00.429093Z | false |
apache/paimon-rust | https://github.com/apache/paimon-rust/blob/1f69c4a6c24f5f7fa858cad57be81fc135c1da71/crates/paimon/src/spec/manifest_file_meta.rs | crates/paimon/src/spec/manifest_file_meta.rs | // Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
use crate::spec::stats::BinaryTableStats;
use serde::{Deserialize, Serialize};
use std::fmt::{Display, Formatter};
/// Metadata of a manifest file.
///
/// Impl Reference: <https://github.com/apache/paimon/blob/release-0.8.2/paimon-core/src/main/java/org/apache/paimon/manifest/ManifestFileMeta.java>
#[derive(PartialEq, Eq, Debug, Clone, Serialize, Deserialize)]
pub struct ManifestFileMeta {
#[serde(rename = "_VERSION")]
version: i32,
/// manifest file name
#[serde(rename = "_FILE_NAME")]
file_name: String,
/// manifest file size.
#[serde(rename = "_FILE_SIZE")]
file_size: i64,
/// number added files in manifest.
#[serde(rename = "_NUM_ADDED_FILES")]
num_added_files: i64,
/// number deleted files in manifest.
#[serde(rename = "_NUM_DELETED_FILES")]
num_deleted_files: i64,
/// partition stats, the minimum and maximum values of partition fields in this manifest are beneficial for skipping certain manifest files during queries, it is a SimpleStats.
#[serde(rename = "_PARTITION_STATS")]
partition_stats: BinaryTableStats,
/// schema id when writing this manifest file.
#[serde(rename = "_SCHEMA_ID")]
schema_id: i64,
}
impl ManifestFileMeta {
/// Get the manifest file name
#[inline]
pub fn file_name(&self) -> &str {
self.file_name.as_str()
}
/// Get the manifest file size.
#[inline]
pub fn file_size(&self) -> i64 {
self.file_size
}
/// Get the number added files in manifest.
#[inline]
pub fn num_added_files(&self) -> i64 {
self.num_added_files
}
/// Get the number deleted files in manifest.
#[inline]
pub fn num_deleted_files(&self) -> i64 {
self.num_deleted_files
}
/// Get the partition stats
pub fn partition_stats(&self) -> &BinaryTableStats {
&self.partition_stats
}
/// Get the schema id when writing this manifest file.
#[inline]
pub fn schema_id(&self) -> i64 {
self.schema_id
}
/// Get the version of this manifest file
#[inline]
pub fn version(&self) -> i32 {
self.version
}
#[inline]
pub fn new(
file_name: String,
file_size: i64,
num_added_files: i64,
num_deleted_files: i64,
partition_stats: BinaryTableStats,
schema_id: i64,
) -> ManifestFileMeta {
Self {
version: 2,
file_name,
file_size,
num_added_files,
num_deleted_files,
partition_stats,
schema_id,
}
}
}
impl Display for ManifestFileMeta {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
write!(
f,
"{{{}, {}, {}, {}, {:?}, {}}}",
self.file_name,
self.file_size,
self.num_added_files,
self.num_deleted_files,
self.partition_stats,
self.schema_id
)
}
}
| rust | Apache-2.0 | 1f69c4a6c24f5f7fa858cad57be81fc135c1da71 | 2026-01-04T20:18:00.429093Z | false |
apache/paimon-rust | https://github.com/apache/paimon-rust/blob/1f69c4a6c24f5f7fa858cad57be81fc135c1da71/crates/paimon/src/spec/index_manifest.rs | crates/paimon/src/spec/index_manifest.rs | // Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
use crate::spec::manifest_common::FileKind;
use crate::spec::IndexFileMeta;
use serde::{Deserialize, Serialize};
use std::fmt::{Display, Formatter};
/// Manifest entry for index file.
///
/// Impl Reference: <https://github.com/apache/paimon/blob/release-0.8.2/paimon-core/src/main/java/org/apache/paimon/manifest/IndexManifestEntry.java>
#[derive(Debug, PartialEq, Eq, Serialize, Deserialize)]
pub struct IndexManifestEntry {
#[serde(rename = "_KIND")]
pub kind: FileKind,
#[serde(rename = "_PARTITION", with = "serde_bytes")]
pub partition: Vec<u8>,
#[serde(rename = "_BUCKET")]
pub bucket: i32,
#[serde(flatten)]
pub index_file: IndexFileMeta,
#[serde(rename = "_VERSION")]
pub version: i32,
}
impl Display for IndexManifestEntry {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
write!(
f,
"IndexManifestEntry{{kind={:?}, partition={:?}, bucket={}, index_file={}}}",
self.kind, self.partition, self.bucket, self.index_file,
)
}
}
#[cfg(test)]
mod tests {
use indexmap::IndexMap;
use super::*;
#[test]
fn test_read_index_manifest_file() {
let workdir =
std::env::current_dir().unwrap_or_else(|err| panic!("current_dir must exist: {err}"));
let path = workdir
.join("tests/fixtures/manifest/index-manifest-85cc6729-f5af-431a-a1c3-ef45319328fb-0");
let source = std::fs::read(path.to_str().unwrap()).unwrap();
let mut reader =
serde_avro_fast::object_container_file_encoding::Reader::from_slice(source.as_slice())
.unwrap();
let res: Vec<_> = reader
.deserialize::<IndexManifestEntry>()
.collect::<Result<_, _>>()
.unwrap();
assert_eq!(
res,
vec![
IndexManifestEntry {
version: 1,
kind: FileKind::Add,
partition: vec![0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
bucket: 0,
index_file: IndexFileMeta {
index_type: "HASH".into(),
file_name: "index-a984b43a-c3fb-40b4-ad29-536343c239a6-0".into(),
file_size: 16,
row_count: 4,
deletion_vectors_ranges: None,
}
},
IndexManifestEntry {
version: 1,
kind: FileKind::Add,
partition: vec![0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
bucket: 0,
index_file: IndexFileMeta {
index_type: "DELETION_VECTORS".into(),
file_name: "index-3f0986c5-4398-449b-be82-95f019d7a748-0".into(),
file_size: 33,
row_count: 1,
deletion_vectors_ranges: Some(IndexMap::from([(
"data-9b76122c-6bb5-4952-a946-b5bce29694a1-0.orc".into(),
(1, 24)
)])),
}
}
]
);
}
#[test]
fn test_single_object_serde() {
let sample = IndexManifestEntry {
version: 1,
kind: FileKind::Delete,
partition: vec![0, 1, 0, 2, 0, 3, 0, 4, 0, 5, 0, 6],
bucket: 0,
index_file: IndexFileMeta {
index_type: "DELETION_VECTORS".into(),
file_name: "test1".into(),
file_size: 33,
row_count: 1,
deletion_vectors_ranges: Some(IndexMap::from([("test1".into(), (1, 24))])),
},
};
let schema: serde_avro_fast::Schema = r#"["null", {
"type": "record",
"name": "org.apache.paimon.avro.generated.record",
"fields": [
{"name": "_VERSION", "type": "int"},
{"name": "_KIND", "type": "int"},
{"name": "_PARTITION", "type": "bytes"},
{"name": "_BUCKET", "type": "int"},
{"name": "_INDEX_TYPE", "type": "string"},
{"name": "_FILE_NAME", "type": "string"},
{"name": "_FILE_SIZE", "type": "long"},
{"name": "_ROW_COUNT", "type": "long"},
{
"default": null,
"name": "_DELETIONS_VECTORS_RANGES",
"type": ["null", {
"type": "array",
"items": ["null", {
"type": "record",
"name": "org.apache.paimon.avro.generated.record__DELETIONS_VECTORS_RANGES",
"fields": [
{"name": "f0", "type": "string"},
{"name": "f1", "type": "int"},
{"name": "f2", "type": "int"}
]
}]
}]
}
]
}]"#
.parse().unwrap();
let serializer_config = &mut serde_avro_fast::ser::SerializerConfig::new(&schema);
let encoded = serde_avro_fast::to_single_object_vec(&sample, serializer_config).unwrap();
let decoded: IndexManifestEntry =
serde_avro_fast::from_single_object_slice(encoded.as_slice(), &schema).unwrap();
assert_eq!(sample, decoded);
}
}
| rust | Apache-2.0 | 1f69c4a6c24f5f7fa858cad57be81fc135c1da71 | 2026-01-04T20:18:00.429093Z | false |
apache/paimon-rust | https://github.com/apache/paimon-rust/blob/1f69c4a6c24f5f7fa858cad57be81fc135c1da71/crates/paimon/src/spec/types.rs | crates/paimon/src/spec/types.rs | // Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
use crate::error::*;
use crate::spec::DataField;
use bitflags::bitflags;
use serde::{Deserialize, Serialize};
use serde_utils::DataTypeName;
use serde_with::{serde_as, DeserializeFromStr, FromInto, SerializeDisplay};
use std::{
fmt::{Debug, Display, Formatter},
str::FromStr,
};
bitflags! {
/// An enumeration of Data type families for clustering {@link DataTypeRoot}s into categories.
///
/// Impl Reference: <https://github.com/apache/paimon/blob/release-0.8.2/paimon-common/src/main/java/org/apache/paimon/types/DataTypeFamily.java>
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub struct DataTypeFamily: u32 {
const PREDEFINED = 1 << 0;
const CONSTRUCTED = 1 << 1;
const CHARACTER_STRING = 1 << 2;
const BINARY_STRING = 1 << 3;
const NUMERIC = 1 << 4;
const INTEGER_NUMERIC = 1 << 5;
const EXACT_NUMERIC = 1 << 6;
const APPROXIMATE_NUMERIC = 1 << 7;
const DATETIME = 1 << 8;
const TIME = 1 << 9;
const TIMESTAMP = 1 << 10;
const COLLECTION = 1 << 11;
const EXTENSION = 1 << 12;
}
}
/// Data type for paimon table.
///
/// Impl Reference: <https://github.com/apache/paimon/blob/release-0.8.2/paimon-common/src/main/java/org/apache/paimon/types/DataType.java#L45>
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
#[serde(untagged)]
pub enum DataType {
/// Data type of a boolean with a (possibly) three-valued logic of `TRUE`, `FALSE`, `UNKNOWN`.
Boolean(BooleanType),
/// Data type of a 1-byte (2^8) signed integer with values from -128 to 127.
TinyInt(TinyIntType),
/// Data type of a 2-byte (2^16) signed integer with values from -32,768 to 32,767.
SmallInt(SmallIntType),
/// Data type of a 4-byte (2^32) signed integer with values from -2,147,483,648 to 2,147,483,647.
Int(IntType),
/// Data type of an 8-byte (2^64) signed integer with values from -9,223,372,036,854,775,808 to 9,223,372,036,854,775,807.
BigInt(BigIntType),
/// Data type of a decimal number with fixed precision and scale.
Decimal(DecimalType),
/// Data type of an 8-byte double precision floating point number.
Double(DoubleType),
/// Data type of a 4-byte single precision floating point number.
Float(FloatType),
/// Data type of a fixed-length binary string (=a sequence of bytes).
Binary(BinaryType),
/// Data type of a variable-length binary string (=a sequence of bytes).
VarBinary(VarBinaryType),
/// Data type of a fixed-length character string.
Char(CharType),
/// Data type of a variable-length character string.
VarChar(VarCharType),
/// Data type of a date consisting of `year-month-day` with values ranging from `0000-01-01` to `9999-12-31`
Date(DateType),
/// Data type of a timestamp WITH LOCAL time zone consisting of `year-month-day hour:minute:second[.fractional] zone`.
LocalZonedTimestamp(LocalZonedTimestampType),
/// Data type of a time WITHOUT time zone consisting of `hour:minute:second[.fractional]` with
/// up to nanosecond precision and values ranging from `00:00:00.000000000` to `23:59:59.999999999`.
Time(TimeType),
/// Data type of a timestamp WITHOUT time zone consisting of `year-month-day hour:minute:second[.fractional]` with up to nanosecond precision and values ranging from `0000-01-01 00:00:00.000000000` to `9999-12-31 23:59:59.999999999`.
Timestamp(TimestampType),
/// Data type of an array of elements with same subtype.
Array(ArrayType),
/// Data type of an associative array that maps keys `NULL` to values (including `NULL`).
Map(MapType),
/// Data type of a multiset (=bag). Unlike a set, it allows for multiple instances for each of its
/// elements with a common subtype.
Multiset(MultisetType),
/// Data type of a sequence of fields. A field consists of a field name, field type, and an optional
/// description.
Row(RowType),
}
#[allow(dead_code)]
impl DataType {
fn is_nullable(&self) -> bool {
match self {
DataType::Boolean(v) => v.nullable,
DataType::TinyInt(v) => v.nullable,
DataType::SmallInt(v) => v.nullable,
DataType::Int(v) => v.nullable,
DataType::BigInt(v) => v.nullable,
DataType::Decimal(v) => v.nullable,
DataType::Double(v) => v.nullable,
DataType::Float(v) => v.nullable,
DataType::Binary(v) => v.nullable,
DataType::VarBinary(v) => v.nullable,
DataType::Char(v) => v.nullable,
DataType::VarChar(v) => v.nullable,
DataType::Date(v) => v.nullable,
DataType::LocalZonedTimestamp(v) => v.nullable,
DataType::Time(v) => v.nullable,
DataType::Timestamp(v) => v.nullable,
DataType::Array(v) => v.nullable,
DataType::Map(v) => v.nullable,
DataType::Multiset(v) => v.nullable,
DataType::Row(v) => v.nullable,
}
}
}
/// ArrayType for paimon.
///
/// Data type of an array of elements with same subtype.
///
/// Impl Reference: <https://github.com/apache/paimon/blob/release-0.8.2/paimon-common/src/main/java/org/apache/paimon/types/ArrayType.java>.
#[serde_as]
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
pub struct ArrayType {
#[serde(rename = "type")]
#[serde_as(as = "FromInto<serde_utils::NullableType<serde_utils::ARRAY>>")]
nullable: bool,
#[serde(rename = "element")]
element_type: Box<DataType>,
}
impl ArrayType {
pub fn new(element_type: DataType) -> Self {
Self::with_nullable(true, element_type)
}
pub fn with_nullable(nullable: bool, element_type: DataType) -> Self {
Self {
nullable,
element_type: Box::new(element_type),
}
}
pub fn family(&self) -> DataTypeFamily {
DataTypeFamily::CONSTRUCTED | DataTypeFamily::COLLECTION
}
}
/// BigIntType for paimon.
///
/// Data type of an 8-byte (2^64) signed integer with values from -9,223,372,036,854,775,808 to 9,223,372,036,854,775,807.
///
/// Impl Reference: <https://github.com/apache/paimon/blob/release-0.8.2/paimon-common/src/main/java/org/apache/paimon/types/BigIntType.java>.
#[serde_as]
#[derive(Debug, Clone, PartialEq, Eq, Deserialize, Serialize, Hash)]
#[serde(transparent)]
pub struct BigIntType {
#[serde_as(as = "FromInto<serde_utils::NullableType<serde_utils::BIGINT>>")]
nullable: bool,
}
impl Default for BigIntType {
fn default() -> Self {
Self::new()
}
}
impl BigIntType {
pub fn new() -> Self {
Self::with_nullable(true)
}
pub fn with_nullable(nullable: bool) -> Self {
Self { nullable }
}
pub fn family(&self) -> DataTypeFamily {
DataTypeFamily::PREDEFINED
| DataTypeFamily::NUMERIC
| DataTypeFamily::INTEGER_NUMERIC
| DataTypeFamily::EXACT_NUMERIC
}
}
/// BinaryType for paimon.
///
/// Data type of a fixed-length binary string (=a sequence of bytes).
///
/// Impl Reference: <https://github.com/apache/paimon/blob/release-0.8.2/paimon-common/src/main/java/org/apache/paimon/types/BinaryType.java>.
#[serde_as]
#[derive(Debug, Clone, PartialEq, Eq, SerializeDisplay, DeserializeFromStr, Hash)]
pub struct BinaryType {
nullable: bool,
length: usize,
}
impl Display for BinaryType {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
write!(f, "BINARY({})", self.length)?;
if !self.nullable {
write!(f, " NOT NULL")?;
}
Ok(())
}
}
impl Default for BinaryType {
fn default() -> Self {
Self::new(Self::DEFAULT_LENGTH).unwrap()
}
}
impl FromStr for BinaryType {
type Err = Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
if !s.starts_with(serde_utils::BINARY::NAME) {
return DataTypeInvalidSnafu {
message: "Invalid BINARY type. Expected string to start with 'BINARY'.",
}
.fail();
}
let (open_bracket, close_bracket) = serde_utils::extract_brackets_pos(s, "BinaryType")?;
let length_str = &s[open_bracket + 1..close_bracket];
let length = length_str
.trim()
.parse::<usize>()
.map_err(|_| Error::DataTypeInvalid {
message: "Invalid BINARY length. Unable to parse length as a usize.".to_string(),
})?;
let nullable = !s[close_bracket..].contains("NOT NULL");
Ok(BinaryType { nullable, length })
}
}
impl BinaryType {
pub const MIN_LENGTH: usize = 1;
pub const MAX_LENGTH: usize = usize::MAX;
pub const DEFAULT_LENGTH: usize = 1;
pub fn new(length: usize) -> Result<Self, Error> {
Self::with_nullable(true, length)
}
pub fn with_nullable(nullable: bool, length: usize) -> Result<Self, Error> {
if length < Self::MIN_LENGTH {
return DataTypeInvalidSnafu {
message: "Binary string length must be at least 1.".to_string(),
}
.fail();
}
Ok(Self { nullable, length })
}
pub fn length(&self) -> usize {
self.length
}
pub fn family(&self) -> DataTypeFamily {
DataTypeFamily::PREDEFINED | DataTypeFamily::BINARY_STRING
}
}
/// BooleanType for paimon.
///
/// Data type of a boolean with a (possibly) three-valued logic of `TRUE`, `FALSE`, `UNKNOWN`.
///
/// Impl Reference: <https://github.com/apache/paimon/blob/master/paimon-common/src/release-0.8.2/java/org/apache/paimon/types/BooleanType.java>.
#[serde_as]
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
#[serde(transparent)]
pub struct BooleanType {
#[serde_as(as = "FromInto<serde_utils::NullableType<serde_utils::BOOLEAN>>")]
nullable: bool,
}
impl Default for BooleanType {
fn default() -> Self {
Self::new()
}
}
impl BooleanType {
pub fn new() -> Self {
Self::with_nullable(true)
}
pub fn with_nullable(nullable: bool) -> Self {
Self { nullable }
}
pub fn family(&self) -> DataTypeFamily {
DataTypeFamily::PREDEFINED
}
}
/// CharType for paimon.
///
/// Data type of a fixed-length character string.
///
/// Impl Reference: <https://github.com/apache/paimon/blob/release-0.8.2/paimon-common/src/main/java/org/apache/paimon/types/CharType.java>.
#[serde_as]
#[derive(Debug, Clone, PartialEq, Hash, Eq, SerializeDisplay, DeserializeFromStr)]
pub struct CharType {
nullable: bool,
length: usize,
}
impl Display for CharType {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
write!(f, "CHAR({})", self.length)?;
if !self.nullable {
write!(f, " NOT NULL")?;
}
Ok(())
}
}
impl Default for CharType {
fn default() -> Self {
Self::new(Self::DEFAULT_LENGTH).unwrap()
}
}
impl FromStr for CharType {
type Err = Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
if !s.starts_with(serde_utils::CHAR::NAME) {
return DataTypeInvalidSnafu {
message: "Invalid CHAR type. Expected string to start with 'CHAR'.",
}
.fail();
}
let (open_bracket, close_bracket) = serde_utils::extract_brackets_pos(s, "CharType")?;
let length_str = &s[open_bracket + 1..close_bracket];
let length = length_str
.trim()
.parse::<usize>()
.map_err(|_| Error::DataTypeInvalid {
message: "Invalid CHAR length. Unable to parse length as a usize.".to_string(),
})?;
let nullable = !s[close_bracket..].contains("NOT NULL");
Ok(CharType { nullable, length })
}
}
impl CharType {
pub const DEFAULT_LENGTH: usize = 1;
pub const MIN_LENGTH: usize = 1;
pub const MAX_LENGTH: usize = 255;
pub fn new(length: usize) -> Result<Self, Error> {
Self::with_nullable(true, length)
}
pub fn with_nullable(nullable: bool, length: usize) -> Result<Self, Error> {
if !(Self::MIN_LENGTH..=Self::MAX_LENGTH).contains(&length) {
return DataTypeInvalidSnafu {
message: "Char string length must be between 1 and 255.".to_string(),
}
.fail();
}
Ok(CharType { nullable, length })
}
pub fn length(&self) -> usize {
self.length
}
pub fn family(&self) -> DataTypeFamily {
DataTypeFamily::PREDEFINED | DataTypeFamily::CHARACTER_STRING
}
}
/// DateType for paimon.
///
/// Data type of a date consisting of `year-month-day` with values ranging from `0000-01-01` to `9999-12-31`
///
/// Impl Reference: <https://github.com/apache/paimon/blob/release-0.8.2/paimon-common/src/main/java/org/apache/paimon/types/DateType.java>.
#[serde_as]
#[derive(Debug, Clone, PartialEq, Hash, Eq, Serialize, Deserialize)]
#[serde(transparent)]
pub struct DateType {
#[serde_as(as = "FromInto<serde_utils::NullableType<serde_utils::DATE>>")]
nullable: bool,
}
impl Default for DateType {
fn default() -> Self {
Self::new()
}
}
impl DateType {
pub fn new() -> Self {
Self::with_nullable(true)
}
pub fn with_nullable(nullable: bool) -> Self {
Self { nullable }
}
pub fn family(&self) -> DataTypeFamily {
DataTypeFamily::PREDEFINED | DataTypeFamily::DATETIME
}
}
/// DecimalType for paimon.
///
/// Data type of a decimal number with fixed precision and scale.
///
/// Impl Reference: <https://github.com/apache/paimon/blob/release-0.8.2/paimon-common/src/main/java/org/apache/paimon/types/DecimalType.java>.
#[serde_as]
#[derive(Debug, Clone, PartialEq, Eq, DeserializeFromStr, SerializeDisplay, Hash)]
pub struct DecimalType {
nullable: bool,
precision: u32,
scale: u32,
}
impl Display for DecimalType {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
write!(f, "DECIMAL({}, {})", self.precision, self.scale)?;
if !self.nullable {
write!(f, " NOT NULL")?;
}
Ok(())
}
}
impl Default for DecimalType {
fn default() -> Self {
Self::new(Self::DEFAULT_PRECISION, Self::DEFAULT_SCALE).unwrap()
}
}
impl FromStr for DecimalType {
type Err = Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
if !s.starts_with(serde_utils::DECIMAL::NAME) {
return DataTypeInvalidSnafu {
message: "Invalid DECIMAL type. Expected string to start with 'DECIMAL'.",
}
.fail();
}
let (open_bracket, close_bracket) = serde_utils::extract_brackets_pos(s, "DecimalType")?;
let precision_scale_str = &s[open_bracket + 1..close_bracket];
let (precision, scale) = match precision_scale_str
.split(',')
.collect::<Vec<&str>>()
.as_slice()
{
[precision_str, scale_str] => {
let precision =
precision_str
.trim()
.parse::<u32>()
.map_err(|_| Error::DataTypeInvalid {
message:
"Invalid DECIMAL precision. Unable to parse precision as a u32."
.to_string(),
})?;
let scale =
scale_str
.trim()
.parse::<u32>()
.map_err(|_| Error::DataTypeInvalid {
message: "Invalid DECIMAL scale. Unable to parse scale as a u32."
.to_string(),
})?;
(precision, scale)
}
_ => {
let precision = precision_scale_str.trim().parse::<u32>().map_err(|_| {
Error::DataTypeInvalid {
message: "Invalid DECIMAL precision. Unable to parse precision as a u32."
.to_string(),
}
})?;
(precision, DecimalType::DEFAULT_SCALE)
}
};
let nullable = !s[close_bracket..].contains("NOT NULL");
Ok(DecimalType {
nullable,
precision,
scale,
})
}
}
impl DecimalType {
pub const MIN_PRECISION: u32 = 1;
pub const MAX_PRECISION: u32 = 38;
pub const DEFAULT_PRECISION: u32 = 10;
pub const MIN_SCALE: u32 = 0;
pub const DEFAULT_SCALE: u32 = 0;
pub fn new(precision: u32, scale: u32) -> Result<Self, Error> {
Self::with_nullable(true, precision, scale)
}
pub fn with_nullable(nullable: bool, precision: u32, scale: u32) -> Result<Self, Error> {
if !(Self::MIN_PRECISION..=Self::MAX_PRECISION).contains(&precision) {
return DataTypeInvalidSnafu {
message: format!(
"Decimal precision must be between {} and {} (both inclusive).",
Self::MIN_PRECISION,
Self::MAX_PRECISION
),
}
.fail();
}
if !(Self::MIN_SCALE..=precision).contains(&scale) {
return DataTypeInvalidSnafu {
message: format!(
"Decimal scale must be between {} and {} (both inclusive).",
Self::MIN_SCALE,
precision
),
}
.fail();
}
Ok(DecimalType {
nullable,
precision,
scale,
})
}
pub fn precision(&self) -> u32 {
self.precision
}
pub fn scale(&self) -> u32 {
self.scale
}
pub fn family(&self) -> DataTypeFamily {
DataTypeFamily::PREDEFINED | DataTypeFamily::NUMERIC | DataTypeFamily::EXACT_NUMERIC
}
}
/// DoubleType for paimon.
///
/// Data type of an 8-byte double precision floating point number.
///
/// Impl Reference: <https://github.com/apache/paimon/blob/release-0.8.2/paimon-common/src/main/java/org/apache/paimon/types/DoubleType.java>.
#[serde_as]
#[derive(Debug, Clone, PartialEq, Eq, Deserialize, Serialize, Hash)]
#[serde(transparent)]
pub struct DoubleType {
#[serde_as(as = "FromInto<serde_utils::NullableType<serde_utils::DOUBLE>>")]
nullable: bool,
}
impl Display for DoubleType {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
write!(f, "DOUBLE")?;
if !self.nullable {
write!(f, " NOT NULL")?;
}
Ok(())
}
}
impl Default for DoubleType {
fn default() -> Self {
Self::new()
}
}
impl DoubleType {
pub fn new() -> Self {
Self::with_nullable(true)
}
pub fn with_nullable(nullable: bool) -> Self {
Self { nullable }
}
pub fn family(&self) -> DataTypeFamily {
DataTypeFamily::PREDEFINED | DataTypeFamily::NUMERIC | DataTypeFamily::APPROXIMATE_NUMERIC
}
}
/// FloatType for paimon.
///
/// Impl Reference: <https://github.com/apache/paimon/blob/release-0.8.2/paimon-common/src/main/java/org/apache/paimon/types/FloatType.java>.
#[serde_as]
#[derive(Debug, Clone, PartialEq, Eq, Deserialize, Serialize, Hash)]
#[serde(transparent)]
pub struct FloatType {
#[serde_as(as = "FromInto<serde_utils::NullableType<serde_utils::FLOAT>>")]
nullable: bool,
}
impl Default for FloatType {
fn default() -> Self {
Self::new()
}
}
impl FloatType {
pub fn new() -> Self {
Self::with_nullable(true)
}
pub fn with_nullable(nullable: bool) -> Self {
Self { nullable }
}
pub fn family(&self) -> DataTypeFamily {
DataTypeFamily::PREDEFINED | DataTypeFamily::NUMERIC | DataTypeFamily::APPROXIMATE_NUMERIC
}
}
/// IntType for paimon.
///
/// Data type of a 4-byte (2^32) signed integer with values from -2,147,483,648 to 2,147,483,647.
///
/// Impl Reference: <https://github.com/apache/paimon/blob/release-0.8.2/paimon-common/src/main/java/org/apache/paimon/types/IntType.java>.
#[serde_as]
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
#[serde(transparent)]
pub struct IntType {
#[serde_as(as = "FromInto<serde_utils::NullableType<serde_utils::INT>>")]
nullable: bool,
}
impl Default for IntType {
fn default() -> Self {
Self::new()
}
}
impl IntType {
pub fn new() -> Self {
Self::with_nullable(true)
}
pub fn with_nullable(nullable: bool) -> Self {
Self { nullable }
}
pub fn family(&self) -> DataTypeFamily {
DataTypeFamily::PREDEFINED
| DataTypeFamily::NUMERIC
| DataTypeFamily::INTEGER_NUMERIC
| DataTypeFamily::EXACT_NUMERIC
}
}
/// LocalZonedTimestampType for paimon.
///
/// Data type of a timestamp WITH LOCAL time zone consisting of `year-month-day hour:minute:second[.fractional] zone` with up to nanosecond precision and values ranging from `0000-01-01 00:00:00.000000000 +14:59` to `9999-12-31 23:59:59.999999999 -14:59`. Leap seconds (23:59:60 and 23:59:61) are not supported as the semantics are closer to a point in time than a wall-clock time.
///
/// Impl Reference: <https://github.com/apache/paimon/blob/release-0.8.2/paimon-common/src/main/java/org/apache/paimon/types/TimestampType.java>.
#[serde_as]
#[derive(Debug, Clone, PartialEq, Eq, DeserializeFromStr, SerializeDisplay, Hash)]
pub struct LocalZonedTimestampType {
nullable: bool,
precision: u32,
}
impl Display for LocalZonedTimestampType {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
write!(f, "TIMESTAMP({}) WITH LOCAL TIME ZONE", self.precision)?;
if !self.nullable {
write!(f, " NOT NULL")?;
}
Ok(())
}
}
impl Default for LocalZonedTimestampType {
fn default() -> Self {
Self::new(Self::DEFAULT_PRECISION).unwrap()
}
}
impl FromStr for LocalZonedTimestampType {
type Err = Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
if !s.starts_with(serde_utils::LocalZonedTimestamp::NAME) || !s.contains("WITH") {
return DataTypeInvalidSnafu {
message:
"Invalid LocalZonedTimestamp type. Expected string to start with 'TIMESTAMP'.",
}
.fail();
}
let (open_bracket, close_bracket) =
serde_utils::extract_brackets_pos(s, "LocalZonedTimestampType")?;
let precision_str = &s[open_bracket + 1..close_bracket];
let precision =
precision_str
.trim()
.parse::<u32>()
.map_err(|_| Error::DataTypeInvalid {
message: "Invalid LocalZonedTimestamp length. Unable to parse length as a u32."
.to_string(),
})?;
let nullable = !s[close_bracket..].contains("NOT NULL");
Ok(LocalZonedTimestampType {
nullable,
precision,
})
}
}
impl LocalZonedTimestampType {
pub const MIN_PRECISION: u32 = TimestampType::MIN_PRECISION;
pub const MAX_PRECISION: u32 = TimestampType::MAX_PRECISION;
pub const DEFAULT_PRECISION: u32 = TimestampType::DEFAULT_PRECISION;
pub fn new(precision: u32) -> Result<Self, Error> {
Self::with_nullable(true, precision)
}
pub fn with_nullable(nullable: bool, precision: u32) -> Result<Self, Error> {
if !(Self::MIN_PRECISION..=Self::MAX_PRECISION).contains(&precision) {
return DataTypeInvalidSnafu {
message: format!(
"LocalZonedTimestamp precision must be between {} and {} (both inclusive).",
Self::MIN_PRECISION,
Self::MAX_PRECISION
),
}
.fail();
}
Ok(LocalZonedTimestampType {
nullable,
precision,
})
}
pub fn precision(&self) -> u32 {
self.precision
}
pub fn family(&self) -> DataTypeFamily {
DataTypeFamily::PREDEFINED
| DataTypeFamily::DATETIME
| DataTypeFamily::TIMESTAMP
| DataTypeFamily::EXTENSION
}
}
/// SmallIntType for paimon.
///
/// Data type of a 2-byte (2^16) signed integer with values from -32,768 to 32,767.
///
/// Impl Reference: <https://github.com/apache/paimon/blob/release-0.8.2/paimon-common/src/main/java/org/apache/paimon/types/SmallIntType.java>.
#[serde_as]
#[derive(Debug, Clone, PartialEq, Eq, Deserialize, Serialize, Hash)]
#[serde(transparent)]
pub struct SmallIntType {
#[serde_as(as = "FromInto<serde_utils::NullableType<serde_utils::SMALLINT>>")]
nullable: bool,
}
impl Default for SmallIntType {
fn default() -> Self {
Self::new()
}
}
impl SmallIntType {
pub fn new() -> Self {
Self::with_nullable(true)
}
pub fn with_nullable(nullable: bool) -> Self {
Self { nullable }
}
pub fn family(&self) -> DataTypeFamily {
DataTypeFamily::PREDEFINED
| DataTypeFamily::NUMERIC
| DataTypeFamily::INTEGER_NUMERIC
| DataTypeFamily::EXACT_NUMERIC
}
}
/// TimeType for paimon.
///
/// Data type of a time WITHOUT time zone consisting of `hour:minute:second[.fractional]` with
/// up to nanosecond precision and values ranging from `00:00:00.000000000` to `23:59:59.999999999`.
///
/// Impl Reference: <https://github.com/apache/paimon/blob/release-0.8.2/paimon-common/src/main/java/org/apache/paimon/types/TimeType.java>.
#[serde_as]
#[derive(Debug, Clone, PartialEq, Eq, DeserializeFromStr, SerializeDisplay, Hash)]
pub struct TimeType {
nullable: bool,
precision: u32,
}
impl Display for TimeType {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
write!(f, "TIME({})", self.precision)?;
if !self.nullable {
write!(f, " NOT NULL")?;
}
Ok(())
}
}
impl Default for TimeType {
fn default() -> Self {
Self::new(TimeType::DEFAULT_PRECISION).unwrap()
}
}
impl FromStr for TimeType {
type Err = Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
if !s.starts_with(serde_utils::TIME::NAME) || s.contains("STAMP") {
return DataTypeInvalidSnafu {
message: "Invalid TIME type. Expected string to start with 'TIME'.",
}
.fail();
}
let (open_bracket, close_bracket) = serde_utils::extract_brackets_pos(s, "TimeType")?;
let precision_str = &s[open_bracket + 1..close_bracket];
let precision =
precision_str
.trim()
.parse::<u32>()
.map_err(|_| Error::DataTypeInvalid {
message: "Invalid TIME length. Unable to parse length as a u32.".to_string(),
})?;
let nullable = !s[close_bracket..].contains("NOT NULL");
Ok(TimeType {
nullable,
precision,
})
}
}
impl TimeType {
pub const MIN_PRECISION: u32 = 0;
pub const MAX_PRECISION: u32 = 9;
pub const DEFAULT_PRECISION: u32 = 0;
pub fn new(precision: u32) -> Result<Self, Error> {
Self::with_nullable(true, precision)
}
pub fn with_nullable(nullable: bool, precision: u32) -> Result<Self, Error> {
if !(Self::MIN_PRECISION..=Self::MAX_PRECISION).contains(&precision) {
return DataTypeInvalidSnafu {
message: format!(
"Time precision must be between {} and {} (both inclusive).",
Self::MIN_PRECISION,
Self::MAX_PRECISION
),
}
.fail();
}
Ok(TimeType {
nullable,
precision,
})
}
pub fn precision(&self) -> u32 {
self.precision
}
pub fn family(&self) -> DataTypeFamily {
DataTypeFamily::PREDEFINED | DataTypeFamily::DATETIME | DataTypeFamily::TIME
}
}
/// TimestampType for paimon.
///
/// Data type of a timestamp WITHOUT time zone consisting of `year-month-day hour:minute:second[.fractional]` with up to nanosecond precision and values ranging from `0000-01-01 00:00:00.000000000` to `9999-12-31 23:59:59.999999999`.
///
/// Impl Reference: <https://github.com/apache/paimon/blob/release-0.8.2/paimon-common/src/main/java/org/apache/paimon/types/TimestampType.java>.
#[serde_as]
#[derive(Debug, Clone, PartialEq, Eq, DeserializeFromStr, SerializeDisplay, Hash)]
pub struct TimestampType {
nullable: bool,
precision: u32,
}
impl Display for TimestampType {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
write!(f, "TIMESTAMP({})", self.precision)?;
if !self.nullable {
write!(f, " NOT NULL")?;
}
Ok(())
}
}
impl Default for TimestampType {
fn default() -> Self {
Self::new(Self::DEFAULT_PRECISION).unwrap()
}
}
impl FromStr for TimestampType {
type Err = Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
if !s.starts_with(serde_utils::TIMESTAMP::NAME) {
return DataTypeInvalidSnafu {
message: "Invalid TIMESTAMP type. Expected string to start with 'TIMESTAMP'.",
}
.fail();
}
let (open_bracket, close_bracket) = serde_utils::extract_brackets_pos(s, "TimestampType")?;
let precision_str = &s[open_bracket + 1..close_bracket];
let precision =
precision_str
.trim()
.parse::<u32>()
.map_err(|_| Error::DataTypeInvalid {
message: "Invalid TIMESTAMP precision. Unable to parse precision as a u32."
.to_string(),
})?;
let nullable = !s[close_bracket..].contains("NOT NULL");
Ok(TimestampType {
nullable,
precision,
})
}
}
impl TimestampType {
pub const MIN_PRECISION: u32 = 0;
pub const MAX_PRECISION: u32 = 9;
pub const DEFAULT_PRECISION: u32 = 6;
pub fn new(precision: u32) -> Result<Self, Error> {
Self::with_nullable(true, precision)
}
pub fn with_nullable(nullable: bool, precision: u32) -> Result<Self, Error> {
if !(Self::MIN_PRECISION..=Self::MAX_PRECISION).contains(&precision) {
return DataTypeInvalidSnafu {
message: format!(
"Timestamp precision must be between {} and {} (both inclusive).",
Self::MIN_PRECISION,
Self::MAX_PRECISION
),
}
.fail();
}
Ok(TimestampType {
nullable,
precision,
})
}
pub fn precision(&self) -> u32 {
self.precision
}
pub fn family(&self) -> DataTypeFamily {
DataTypeFamily::PREDEFINED | DataTypeFamily::DATETIME | DataTypeFamily::TIMESTAMP
}
}
/// TinyIntType for paimon.
///
/// Data type of a 1-byte signed integer with values from -128 to 127.
///
/// Impl Reference: <https://github.com/apache/paimon/blob/master/paimon-common/src/release-0.8.2/java/org/apache/paimon/types/TinyIntType.java>.
#[serde_as]
#[derive(Debug, Clone, PartialEq, Eq, Deserialize, Serialize, Hash)]
#[serde(transparent)]
pub struct TinyIntType {
#[serde_as(as = "FromInto<serde_utils::NullableType<serde_utils::TINYINT>>")]
nullable: bool,
}
impl Default for TinyIntType {
fn default() -> Self {
Self::new()
}
}
impl TinyIntType {
pub fn new() -> Self {
Self::with_nullable(true)
}
pub fn with_nullable(nullable: bool) -> Self {
Self { nullable }
}
pub fn family(&self) -> DataTypeFamily {
DataTypeFamily::PREDEFINED
| rust | Apache-2.0 | 1f69c4a6c24f5f7fa858cad57be81fc135c1da71 | 2026-01-04T20:18:00.429093Z | true |
apache/paimon-rust | https://github.com/apache/paimon-rust/blob/1f69c4a6c24f5f7fa858cad57be81fc135c1da71/crates/paimon/src/spec/schema.rs | crates/paimon/src/spec/schema.rs | // Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
use crate::spec::types::DataType;
use serde::{Deserialize, Serialize};
use serde_with::serde_as;
use std::collections::HashMap;
/// The table schema for paimon table.
///
/// Impl References: <https://github.com/apache/paimon/blob/release-0.8.2/paimon-core/src/main/java/org/apache/paimon/schema/TableSchema.java#L47>
#[derive(Debug, Clone, PartialEq, Eq, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct TableSchema {
/// version of schema for paimon
version: i32,
id: i64,
fields: Vec<DataField>,
highest_field_id: i32,
partition_keys: Vec<String>,
primary_keys: Vec<String>,
options: HashMap<String, String>,
comment: Option<String>,
time_millis: i64,
}
/// Data field for paimon table.
///
/// Impl Reference: <https://github.com/apache/paimon/blob/release-0.8.2/paimon-common/src/main/java/org/apache/paimon/types/DataField.java#L40>
#[serde_as]
#[derive(Debug, Clone, PartialEq, Hash, Eq, Deserialize, Serialize)]
pub struct DataField {
id: i32,
name: String,
#[serde(rename = "type")]
typ: DataType,
#[serde(skip_serializing_if = "Option::is_none")]
description: Option<String>,
}
impl DataField {
pub fn new(id: i32, name: String, typ: DataType) -> Self {
Self {
id,
name,
typ,
description: None,
}
}
pub fn id(&self) -> i32 {
self.id
}
pub fn name(&self) -> &str {
&self.name
}
pub fn data_type(&self) -> &DataType {
&self.typ
}
pub fn description(&self) -> Option<&str> {
self.description.as_deref()
}
pub fn with_id(mut self, new_id: i32) -> Self {
self.id = new_id;
self
}
pub fn with_name(mut self, new_name: String) -> Self {
self.name = new_name;
self
}
pub fn with_description(mut self, new_description: Option<String>) -> Self {
self.description = new_description;
self
}
}
pub fn escape_identifier(identifier: &str) -> String {
identifier.replace('"', "\"\"")
}
pub fn escape_single_quotes(text: &str) -> String {
text.replace('\'', "''")
}
#[cfg(test)]
mod tests {
use crate::spec::IntType;
use super::*;
#[test]
fn test_create_data_field() {
let id = 1;
let name = "field1".to_string();
let typ = DataType::Int(IntType::new());
let description = "test description".to_string();
let data_field = DataField::new(id, name.clone(), typ.clone())
.with_description(Some(description.clone()));
assert_eq!(data_field.id(), id);
assert_eq!(data_field.name(), name);
assert_eq!(data_field.data_type(), &typ);
assert_eq!(data_field.description(), Some(description).as_deref());
}
#[test]
fn test_new_id() {
let d_type = DataType::Int(IntType::new());
let new_data_field = DataField::new(1, "field1".to_string(), d_type.clone()).with_id(2);
assert_eq!(new_data_field.id(), 2);
assert_eq!(new_data_field.name(), "field1");
assert_eq!(new_data_field.data_type(), &d_type);
assert_eq!(new_data_field.description(), None);
}
#[test]
fn test_new_name() {
let d_type = DataType::Int(IntType::new());
let new_data_field =
DataField::new(1, "field1".to_string(), d_type.clone()).with_name("field2".to_string());
assert_eq!(new_data_field.id(), 1);
assert_eq!(new_data_field.name(), "field2");
assert_eq!(new_data_field.data_type(), &d_type);
assert_eq!(new_data_field.description(), None);
}
#[test]
fn test_new_description() {
let d_type = DataType::Int(IntType::new());
let new_data_field = DataField::new(1, "field1".to_string(), d_type.clone())
.with_description(Some("new description".to_string()));
assert_eq!(new_data_field.id(), 1);
assert_eq!(new_data_field.name(), "field1");
assert_eq!(new_data_field.data_type(), &d_type);
assert_eq!(new_data_field.description(), Some("new description"));
}
#[test]
fn test_escape_identifier() {
let escaped_identifier = escape_identifier("\"identifier\"");
assert_eq!(escaped_identifier, "\"\"identifier\"\"");
}
#[test]
fn test_escape_single_quotes() {
let escaped_text = escape_single_quotes("text with 'single' quotes");
assert_eq!(escaped_text, "text with ''single'' quotes");
}
}
| rust | Apache-2.0 | 1f69c4a6c24f5f7fa858cad57be81fc135c1da71 | 2026-01-04T20:18:00.429093Z | false |
apache/paimon-rust | https://github.com/apache/paimon-rust/blob/1f69c4a6c24f5f7fa858cad57be81fc135c1da71/crates/paimon/src/spec/mod.rs | crates/paimon/src/spec/mod.rs | // Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
//! Spec module for paimon.
//!
//! All paimon specs types are defined here.
mod data_file;
pub use data_file::*;
mod schema;
pub use schema::*;
mod schema_change;
pub use schema_change::*;
mod snapshot;
pub use snapshot::*;
mod manifest_file_meta;
pub use manifest_file_meta::*;
mod index_file_meta;
pub use index_file_meta::*;
mod index_manifest;
mod manifest_common;
mod manifest_entry;
mod objects_file;
mod stats;
mod types;
pub use types::*;
| rust | Apache-2.0 | 1f69c4a6c24f5f7fa858cad57be81fc135c1da71 | 2026-01-04T20:18:00.429093Z | false |
apache/paimon-rust | https://github.com/apache/paimon-rust/blob/1f69c4a6c24f5f7fa858cad57be81fc135c1da71/crates/paimon/src/spec/index_file_meta.rs | crates/paimon/src/spec/index_file_meta.rs | // Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
use serde::{Deserialize, Serialize};
use std::fmt::{Display, Formatter};
use indexmap::IndexMap;
/// Metadata of index file.
///
/// Impl Reference: <https://github.com/apache/paimon/blob/release-0.8.2/paimon-core/src/main/java/org/apache/paimon/index/IndexFileMeta.java>
#[derive(Debug, PartialEq, Eq, Serialize, Deserialize)]
pub struct IndexFileMeta {
#[serde(rename = "_INDEX_TYPE")]
pub index_type: String,
#[serde(rename = "_FILE_NAME")]
pub file_name: String,
#[serde(rename = "_FILE_SIZE")]
pub file_size: i32,
#[serde(rename = "_ROW_COUNT")]
pub row_count: i32,
// use Indexmap to ensure the order of deletion_vectors_ranges is consistent.
#[serde(
default,
with = "map_serde",
rename = "_DELETIONS_VECTORS_RANGES",
alias = "_DELETION_VECTORS_RANGES"
)]
pub deletion_vectors_ranges: Option<IndexMap<String, (i32, i32)>>,
}
impl Display for IndexFileMeta {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
write!(
f,
"IndexFileMeta{{index_type={}, fileName={}, fileSize={}, rowCount={}, deletion_vectors_ranges={:?}}}",
self.index_type,
self.file_name,
self.file_size,
self.row_count,
self.deletion_vectors_ranges,
)
}
}
mod map_serde {
use indexmap::IndexMap;
use serde::{Deserialize, Deserializer, Serialize, Serializer};
#[derive(Deserialize, Serialize)]
struct Temp {
f0: String,
f1: i32,
f2: i32,
}
pub fn serialize<S>(
date: &Option<IndexMap<String, (i32, i32)>>,
s: S,
) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
match *date {
None => s.serialize_none(),
Some(ref d) => s.collect_seq(d.iter().map(|(s, (i1, i2))| Temp {
f0: s.into(),
f1: *i1,
f2: *i2,
})),
}
}
#[allow(clippy::type_complexity)]
pub fn deserialize<'de, D>(
deserializer: D,
) -> Result<Option<IndexMap<String, (i32, i32)>>, D::Error>
where
D: Deserializer<'de>,
{
match Option::deserialize(deserializer)? {
None => Ok(None),
Some::<Vec<Temp>>(s) => Ok(Some(
s.into_iter()
.map(|t| (t.f0, (t.f1, t.f2)))
.collect::<IndexMap<_, _>>(),
)),
}
}
}
| rust | Apache-2.0 | 1f69c4a6c24f5f7fa858cad57be81fc135c1da71 | 2026-01-04T20:18:00.429093Z | false |
apache/paimon-rust | https://github.com/apache/paimon-rust/blob/1f69c4a6c24f5f7fa858cad57be81fc135c1da71/crates/paimon/src/spec/snapshot.rs | crates/paimon/src/spec/snapshot.rs | // Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use typed_builder::TypedBuilder;
/// Type of changes in this snapshot.
#[derive(Debug, Clone, PartialEq, Eq, Deserialize, Serialize)]
pub enum CommitKind {
/// Changes flushed from the mem table.
APPEND,
/// Changes by compacting existing data files.
COMPACT,
/// Changes that clear up the whole partition and then add new records.
OVERWRITE,
/// Collect statistics.
ANALYZE,
}
/// Snapshot for paimon.
///
/// Impl Reference: <https://github.com/apache/paimon/blob/release-0.8.2/paimon-core/src/main/java/org/apache/paimon/Snapshot.java#L68>.
#[derive(Debug, Clone, PartialEq, Eq, Deserialize, Serialize, TypedBuilder)]
#[serde(rename_all = "camelCase")]
pub struct Snapshot {
/// version of snapshot
version: i32,
id: i64,
schema_id: i64,
/// a manifest list recording all changes from the previous snapshots
base_manifest_list: String,
/// a manifest list recording all new changes occurred in this snapshot
delta_manifest_list: String,
/// a manifest list recording all changelog produced in this snapshot
#[builder(default = None)]
#[serde(skip_serializing_if = "Option::is_none")]
changelog_manifest_list: Option<String>,
/// a manifest recording all index files of this table
#[builder(default = None)]
#[serde(skip_serializing_if = "Option::is_none")]
index_manifest: Option<String>,
/// user who committed this snapshot
commit_user: String,
/// Mainly for snapshot deduplication.
///
/// If multiple snapshots have the same commitIdentifier, reading from any of these snapshots
/// must produce the same table.
///
/// If snapshot A has a smaller commitIdentifier than snapshot B, then snapshot A must be
/// committed before snapshot B, and thus snapshot A must contain older records than snapshot B.
commit_identifier: i64,
commit_kind: CommitKind,
/// timestamp of this snapshot
time_millis: u64,
/// log offsets of all changes occurred in this snapshot
#[builder(default = None)]
#[serde(skip_serializing_if = "Option::is_none")]
log_offsets: Option<HashMap<i32, i64>>,
/// record count of all changes occurred in this snapshot
#[builder(default = None)]
total_record_count: Option<i64>,
/// record count of all new changes occurred in this snapshot
#[builder(default = None)]
delta_record_count: Option<i64>,
/// record count of all changelog produced in this snapshot
#[builder(default = None)]
#[serde(skip_serializing_if = "Option::is_none")]
changelog_record_count: Option<i64>,
/// watermark for input records
#[builder(default = None)]
#[serde(skip_serializing_if = "Option::is_none")]
watermark: Option<i64>,
/// stats file name for statistics of this table
#[builder(default = None)]
#[serde(skip_serializing_if = "Option::is_none")]
statistics: Option<String>,
}
impl Snapshot {
/// Get the version of this snapshot.
#[inline]
pub fn version(&self) -> i32 {
self.version
}
/// Get the id of this snapshot.
#[inline]
pub fn id(&self) -> i64 {
self.id
}
/// Get the schema id of this snapshot.
#[inline]
pub fn schema_id(&self) -> i64 {
self.schema_id
}
/// Get the base manifest list of this snapshot.
#[inline]
pub fn base_manifest_list(&self) -> &str {
&self.base_manifest_list
}
/// Get the delta manifest list of this snapshot.
#[inline]
pub fn delta_manifest_list(&self) -> &str {
&self.delta_manifest_list
}
/// Get the changelog manifest list of this snapshot.
#[inline]
pub fn changelog_manifest_list(&self) -> Option<&str> {
self.changelog_manifest_list.as_deref()
}
/// Get the index manifest of this snapshot.
#[inline]
pub fn index_manifest(&self) -> Option<&str> {
self.index_manifest.as_deref()
}
/// Get the commit user of this snapshot.
#[inline]
pub fn commit_user(&self) -> &str {
&self.commit_user
}
/// Get the commit time of this snapshot.
#[inline]
pub fn time_millis(&self) -> u64 {
self.time_millis
}
/// Get the commit identifier of this snapshot.
#[inline]
pub fn commit_identifier(&self) -> i64 {
self.commit_identifier
}
/// Get the log offsets of this snapshot.
#[inline]
pub fn log_offsets(&self) -> Option<&HashMap<i32, i64>> {
self.log_offsets.as_ref()
}
/// Get the total record count of this snapshot.
#[inline]
pub fn total_record_count(&self) -> Option<i64> {
self.total_record_count
}
/// Get the delta record count of this snapshot.
#[inline]
pub fn delta_record_count(&self) -> Option<i64> {
self.delta_record_count
}
/// Get the changelog record count of this snapshot.
#[inline]
pub fn changelog_record_count(&self) -> Option<i64> {
self.changelog_record_count
}
/// Get the watermark of this snapshot.
#[inline]
pub fn watermark(&self) -> Option<i64> {
self.watermark
}
/// Get the statistics of this snapshot.
#[inline]
pub fn statistics(&self) -> Option<&str> {
self.statistics.as_deref()
}
}
#[cfg(test)]
mod tests {
use super::*;
use pretty_assertions::assert_eq;
use serde_json;
use std::env::current_dir;
fn load_fixture(name: &str) -> String {
let path = current_dir()
.unwrap_or_else(|err| panic!("current_dir must exist: {err}"))
.join(format!("tests/fixtures/snapshot/{name}.json"));
let bytes = std::fs::read(&path)
.unwrap_or_else(|err| panic!("fixtures {path:?} load failed: {err}"));
String::from_utf8(bytes).expect("fixtures content must be valid utf8")
}
fn test_cases() -> Vec<(&'static str, Snapshot)> {
vec![
(
"snapshot-v3",
Snapshot::builder()
.version(3)
.id(2)
.schema_id(0)
.base_manifest_list(
"manifest-list-ea4b892d-edc8-4ee7-9eee-7068b83a947b-0".to_string(),
)
.delta_manifest_list(
"manifest-list-ea4b892d-edc8-4ee7-9eee-7068b83a947b-1".to_string(),
)
.commit_user("abbaac9e-4a17-43e3-b135-2269da263e3a".to_string())
.commit_identifier(9223372036854775807)
.changelog_manifest_list(Some(
"manifest-list-ea4b892d-edc8-4ee7-9eee-7068b83a947b-2".to_string(),
))
.commit_kind(CommitKind::APPEND)
.time_millis(1724509030368)
.log_offsets(Some(HashMap::default()))
.total_record_count(Some(4))
.delta_record_count(Some(2))
.changelog_record_count(Some(2))
.statistics(Some("statistics_string".to_string()))
.build(),
),
(
"snapshot-v3-none-field",
Snapshot::builder()
.version(3)
.id(2)
.schema_id(0)
.base_manifest_list(
"manifest-list-ea4b892d-edc8-4ee7-9eee-7068b83a947b-0".to_string(),
)
.delta_manifest_list(
"manifest-list-ea4b892d-edc8-4ee7-9eee-7068b83a947b-1".to_string(),
)
.commit_user("abbaac9e-4a17-43e3-b135-2269da263e3a".to_string())
.commit_identifier(9223372036854775807)
.changelog_manifest_list(None)
.commit_kind(CommitKind::APPEND)
.time_millis(1724509030368)
.log_offsets(Some(HashMap::default()))
.total_record_count(Some(4))
.delta_record_count(Some(2))
.changelog_record_count(Some(2))
.build(),
),
]
}
#[test]
fn test_snapshot_serialization_deserialization() {
for (name, expect) in test_cases() {
let content = load_fixture(name);
let snapshot: Snapshot =
serde_json::from_str(content.as_str()).expect("Failed to deserialize Snapshot");
assert_eq!(snapshot, expect);
let serialized =
serde_json::to_string(&snapshot).expect("Failed to serialize Snapshot");
let deserialized: Snapshot = serde_json::from_str(&serialized)
.expect("Failed to deserialize serialized Snapshot");
assert_eq!(snapshot, deserialized);
}
}
}
| rust | Apache-2.0 | 1f69c4a6c24f5f7fa858cad57be81fc135c1da71 | 2026-01-04T20:18:00.429093Z | false |
apache/paimon-rust | https://github.com/apache/paimon-rust/blob/1f69c4a6c24f5f7fa858cad57be81fc135c1da71/crates/paimon/src/spec/data_file.rs | crates/paimon/src/spec/data_file.rs | // Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
use crate::spec::stats::BinaryTableStats;
use chrono::serde::ts_milliseconds::deserialize as from_millis;
use chrono::serde::ts_milliseconds::serialize as to_millis;
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use std::fmt::{Display, Formatter};
pub const EMPTY_BINARY_ROW: BinaryRow = BinaryRow::new(0);
/// An implementation of InternalRow.
///
/// Impl Reference: <https://github.com/apache/paimon/blob/release-0.8.2/paimon-common/src/main/java/org/apache/paimon/data/BinaryRow.java>
#[derive(Debug, Eq, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct BinaryRow {
arity: i32,
null_bits_size_in_bytes: i32,
}
impl BinaryRow {
pub const HEADER_SIZE_IN_BYTES: i32 = 8;
pub const fn cal_bit_set_width_in_bytes(arity: i32) -> i32 {
((arity + 63 + Self::HEADER_SIZE_IN_BYTES) / 64) * 8
}
pub const fn cal_fix_part_size_in_bytes(arity: i32) -> i32 {
Self::cal_bit_set_width_in_bytes(arity) + 8 * arity
}
pub const fn new(arity: i32) -> Self {
Self {
arity,
null_bits_size_in_bytes: (arity + 7) / 8,
}
}
}
/// Metadata of a data file.
///
/// Impl References: <https://github.com/apache/paimon/blob/release-0.8.2/paimon-core/src/main/java/org/apache/paimon/io/DataFileMeta.java>
#[derive(Debug, Eq, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct DataFileMeta {
#[serde(rename = "_FILE_NAME")]
pub file_name: String,
#[serde(rename = "_FILE_SIZE")]
pub file_size: i64,
// row_count tells the total number of rows (including add & delete) in this file.
#[serde(rename = "_ROW_COUNT")]
pub row_count: i64,
#[serde(rename = "_MIN_KEY", with = "serde_bytes")]
pub min_key: Vec<u8>,
#[serde(rename = "_MAX_KEY", with = "serde_bytes")]
pub max_key: Vec<u8>,
#[serde(rename = "_KEY_STATS")]
pub key_stats: BinaryTableStats,
#[serde(rename = "_VALUE_STATS")]
pub value_stats: BinaryTableStats,
#[serde(rename = "_MIN_SEQUENCE_NUMBER")]
pub min_sequence_number: i64,
#[serde(rename = "_MAX_SEQUENCE_NUMBER")]
pub max_sequence_number: i64,
#[serde(rename = "_SCHEMA_ID")]
pub schema_id: i64,
#[serde(rename = "_LEVEL")]
pub level: i32,
#[serde(rename = "_EXTRA_FILES")]
pub extra_files: Vec<String>,
#[serde(
rename = "_CREATION_TIME",
serialize_with = "to_millis",
deserialize_with = "from_millis"
)]
pub creation_time: DateTime<Utc>,
#[serde(rename = "_DELETE_ROW_COUNT")]
// rowCount = add_row_count + delete_row_count.
pub delete_row_count: Option<i64>,
// file index filter bytes, if it is small, store in data file meta
#[serde(rename = "_EMBEDDED_FILE_INDEX", with = "serde_bytes")]
pub embedded_index: Option<Vec<u8>>,
}
impl Display for DataFileMeta {
fn fmt(&self, _: &mut Formatter<'_>) -> std::fmt::Result {
todo!()
}
}
#[allow(dead_code)]
impl DataFileMeta {}
| rust | Apache-2.0 | 1f69c4a6c24f5f7fa858cad57be81fc135c1da71 | 2026-01-04T20:18:00.429093Z | false |
apache/paimon-rust | https://github.com/apache/paimon-rust/blob/1f69c4a6c24f5f7fa858cad57be81fc135c1da71/crates/paimon/src/spec/manifest_common.rs | crates/paimon/src/spec/manifest_common.rs | // Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
use serde_repr::{Deserialize_repr, Serialize_repr};
/// Kind of a file.
/// Impl Reference: <https://github.com/apache/paimon/blob/release-0.8.2/paimon-core/src/main/java/org/apache/paimon/manifest/FileKind.java>
#[derive(PartialEq, Eq, Debug, Clone, Serialize_repr, Deserialize_repr)]
#[repr(u8)]
pub enum FileKind {
Add = 0,
Delete = 1,
}
/// The Source of a file.
/// Impl References: <https://github.com/apache/paimon/blob/release-0.8.2/paimon-core/src/main/java/org/apache/paimon/manifest/FileSource.java>
#[derive(PartialEq, Eq, Debug, Clone, Serialize_repr, Deserialize_repr)]
#[repr(u8)]
pub enum FileSource {
Append = 0,
Compact = 1,
}
| rust | Apache-2.0 | 1f69c4a6c24f5f7fa858cad57be81fc135c1da71 | 2026-01-04T20:18:00.429093Z | false |
apache/paimon-rust | https://github.com/apache/paimon-rust/blob/1f69c4a6c24f5f7fa858cad57be81fc135c1da71/crates/paimon/src/spec/schema_change.rs | crates/paimon/src/spec/schema_change.rs | // Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
use crate::spec::DataType;
use serde::{Deserialize, Serialize};
/// Schema change to table.
///
/// Reference: <https://github.com/apache/paimon/blob/release-0.8.2/paimon-core/src/main/java/org/apache/paimon/schema/SchemaChange.java#L36>
#[derive(Debug, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub enum SchemaChange {
/// A SchemaChange to set a table option.
///
/// Reference: <https://github.com/apache/paimon/blob/release-0.8.2/paimon-core/src/main/java/org/apache/paimon/schema/SchemaChange.java#L95>
SetOption { key: String, value: String },
/// A SchemaChange to remove a table option.
///
/// Reference: <https://github.com/apache/paimon/blob/release-0.8.2/paimon-core/src/main/java/org/apache/paimon/schema/SchemaChange.java#L134>
RemoveOption { key: String },
/// A SchemaChange to update a table comment.
///
/// Reference: <https://github.com/apache/paimon/blob/release-0.8.2/paimon-core/src/main/java/org/apache/paimon/schema/SchemaChange.java#L167>
UpdateComment { comment: Option<String> },
/// A SchemaChange to add a new field.
///
/// Reference: <https://github.com/apache/paimon/blob/release-0.8.2/paimon-core/src/main/java/org/apache/paimon/schema/SchemaChange.java#L201>
#[serde(rename_all = "camelCase")]
AddColumn {
field_name: String,
data_type: DataType,
description: Option<String>,
#[serde(rename = "move")]
column_move: Option<ColumnMove>,
},
/// A SchemaChange to rename a field.
///
/// Reference: <https://github.com/apache/paimon/blob/release-0.8.2/paimon-core/src/main/java/org/apache/paimon/schema/SchemaChange.java#L260>
#[serde(rename_all = "camelCase")]
RenameColumn {
field_name: String,
new_name: String,
},
/// A SchemaChange to drop a field.
///
/// Reference: <https://github.com/apache/paimon/blob/release-0.8.2/paimon-core/src/main/java/org/apache/paimon/schema/SchemaChange.java#L302>
#[serde(rename_all = "camelCase")]
DropColumn { field_name: String },
/// A SchemaChange to update the field's type.
///
/// Reference: <https://github.com/apache/paimon/blob/release-0.8.2/paimon-core/src/main/java/org/apache/paimon/schema/SchemaChange.java#L335>
#[serde(rename_all = "camelCase")]
UpdateColumnType {
field_name: String,
data_type: DataType,
},
/// A SchemaChange to update the field's position.
///
/// Reference: <https://github.com/apache/paimon/blob/release-0.8.2/paimon-core/src/main/java/org/apache/paimon/schema/SchemaChange.java#L377>
#[serde(rename_all = "camelCase")]
UpdateColumnPosition {
#[serde(rename = "move")]
column_move: ColumnMove,
},
/// A SchemaChange to update the field's nullability.
///
/// Reference: <https://github.com/apache/paimon/blob/release-0.8.2/paimon-core/src/main/java/org/apache/paimon/schema/SchemaChange.java#L470>
#[serde(rename_all = "camelCase")]
UpdateColumnNullability {
field_name: Vec<String>,
nullable: bool,
},
/// A SchemaChange to update the (nested) field's comment.
///
/// Reference: <https://github.com/apache/paimon/blob/release-0.8.2/paimon-core/src/main/java/org/apache/paimon/schema/SchemaChange.java#L512>
#[serde(rename_all = "camelCase")]
UpdateColumnComment {
field_names: Vec<String>,
new_description: String,
},
}
impl SchemaChange {
/// impl the `set_option`.
pub fn set_option(key: String, value: String) -> Self {
SchemaChange::SetOption { key, value }
}
/// impl the `remove_option`.
pub fn remove_option(key: String) -> Self {
SchemaChange::RemoveOption { key }
}
/// impl the `update_comment`.
pub fn update_comment(comment: Option<String>) -> Self {
SchemaChange::UpdateComment { comment }
}
/// impl the `add_column`.
pub fn add_column(field_name: String, data_type: DataType) -> Self {
SchemaChange::AddColumn {
field_name,
data_type,
description: None,
column_move: None,
}
}
/// impl the `add_column_with_description`.
pub fn add_column_with_description(
field_name: String,
data_type: DataType,
description: String,
) -> Self {
SchemaChange::AddColumn {
field_name,
data_type,
description: Some(description),
column_move: None,
}
}
/// impl the `add_column_with_description_and_column_move`.
pub fn add_column_with_description_and_column_move(
field_name: String,
data_type: DataType,
description: String,
column_move: ColumnMove,
) -> Self {
SchemaChange::AddColumn {
field_name,
data_type,
description: Some(description),
column_move: Some(column_move),
}
}
/// impl the `rename_column`.
pub fn rename_column(field_name: String, new_name: String) -> Self {
SchemaChange::RenameColumn {
field_name,
new_name,
}
}
/// impl the `drop_column`.
pub fn drop_column(field_name: String) -> Self {
SchemaChange::DropColumn { field_name }
}
/// impl the `update_column_type`.
pub fn update_column_type(field_name: String, new_data_type: DataType) -> Self {
SchemaChange::UpdateColumnType {
field_name,
data_type: new_data_type,
}
}
/// impl the `update_column_position`.
pub fn update_column_position(column_move: ColumnMove) -> Self {
SchemaChange::UpdateColumnPosition { column_move }
}
/// impl the `update_column_position`.
pub fn update_column_nullability(field_name: String, new_nullability: bool) -> Self {
SchemaChange::UpdateColumnNullability {
field_name: vec![field_name],
nullable: new_nullability,
}
}
/// impl the `update_columns_nullability`.
pub fn update_columns_nullability(field_names: Vec<String>, new_nullability: bool) -> Self {
SchemaChange::UpdateColumnNullability {
field_name: field_names,
nullable: new_nullability,
}
}
/// impl the `update_column_comment`.
pub fn update_column_comment(field_name: String, comment: String) -> Self {
SchemaChange::UpdateColumnComment {
field_names: vec![field_name],
new_description: comment,
}
}
/// impl the `update_columns_comment`.
pub fn update_columns_comment(field_names: Vec<String>, comment: String) -> Self {
SchemaChange::UpdateColumnComment {
field_names,
new_description: comment,
}
}
}
/// The type of move.
///
/// Reference: <https://github.com/apache/paimon/blob/release-0.8.2/paimon-core/src/main/java/org/apache/paimon/schema/SchemaChange.java#L412>
#[derive(Debug, Clone, PartialEq, Eq, Deserialize, Serialize)]
pub enum ColumnMoveType {
FIRST,
AFTER,
}
/// Represents a requested column move in a struct.
///
/// Reference: <https://github.com/apache/paimon/blob/release-0.8.2/paimon-core/src/main/java/org/apache/paimon/schema/SchemaChange.java#L410>
#[derive(Debug, Clone, PartialEq, Eq, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct ColumnMove {
pub field_name: String,
pub referenced_field_name: Option<String>,
#[serde(rename = "type")]
pub move_type: ColumnMoveType,
}
impl ColumnMove {
/// Get the field name.
pub fn field_name(&self) -> &str {
&self.field_name
}
/// Get the referenced field name.
pub fn referenced_field_name(&self) -> Option<&str> {
self.referenced_field_name.as_deref()
}
/// Get the move type.
pub fn move_type(&self) -> &ColumnMoveType {
&self.move_type
}
/// Create a new `Move` with `FIRST` move type.
pub fn move_first(field_name: String) -> Self {
ColumnMove {
field_name,
referenced_field_name: None,
move_type: ColumnMoveType::FIRST,
}
}
/// Create a new `Move` with `AFTER` move type.
pub fn move_after(field_name: String, referenced_field_name: String) -> Self {
ColumnMove {
field_name,
referenced_field_name: Some(referenced_field_name),
move_type: ColumnMoveType::AFTER,
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::spec::{DoubleType, IntType};
#[test]
fn test_schema_change_serialize_deserialize() {
let json_data = r#"
[
{
"setOption": {
"key": "snapshot.time-retained",
"value": "2h"
}
},
{
"removeOption": {
"key": "compaction.max.file-num"
}
},
{
"updateComment": {
"comment": "table.comment"
}
},
{
"addColumn": {
"fieldName": "col1",
"dataType": "INT",
"description": "col1_description",
"move": {
"fieldName": "col1_first",
"referencedFieldName": null,
"type": "FIRST"
}
}
},
{
"renameColumn": {
"fieldName": "col3",
"newName": "col3_new_name"
}
},
{
"dropColumn": {
"fieldName": "col1"
}
},
{
"updateColumnType": {
"fieldName": "col14",
"dataType": "DOUBLE"
}
},
{
"updateColumnPosition": {
"move": {
"fieldName": "col4_first",
"referencedFieldName": null,
"type": "FIRST"
}
}
},
{
"updateColumnNullability": {
"fieldName": [
"col5",
"f2"
],
"nullable": false
}
},
{
"updateColumnComment": {
"fieldNames": [
"col5",
"f1"
],
"newDescription": "col5 f1 field"
}
}
]"#;
let schema_changes: Vec<SchemaChange> =
serde_json::from_str(json_data).expect("Failed to deserialize SchemaChange.");
assert_eq!(
schema_changes,
vec![
SchemaChange::SetOption {
key: "snapshot.time-retained".to_string(),
value: "2h".to_string(),
},
SchemaChange::RemoveOption {
key: "compaction.max.file-num".to_string(),
},
SchemaChange::UpdateComment {
comment: Some("table.comment".to_string()),
},
SchemaChange::AddColumn {
field_name: "col1".to_string(),
data_type: DataType::Int(IntType::new()),
description: Some("col1_description".to_string()),
column_move: Some(ColumnMove {
field_name: "col1_first".to_string(),
referenced_field_name: None,
move_type: ColumnMoveType::FIRST,
}),
},
SchemaChange::RenameColumn {
field_name: "col3".to_string(),
new_name: "col3_new_name".to_string(),
},
SchemaChange::DropColumn {
field_name: "col1".to_string(),
},
SchemaChange::UpdateColumnType {
field_name: "col14".to_string(),
data_type: DataType::Double(DoubleType::new()),
},
SchemaChange::UpdateColumnPosition {
column_move: ColumnMove {
field_name: "col4_first".to_string(),
referenced_field_name: None,
move_type: ColumnMoveType::FIRST,
},
},
SchemaChange::UpdateColumnNullability {
field_name: vec!["col5".to_string(), "f2".to_string()],
nullable: false,
},
SchemaChange::UpdateColumnComment {
field_names: vec!["col5".to_string(), "f1".to_string()],
new_description: "col5 f1 field".to_string(),
},
]
);
}
#[test]
fn test_column_move_serialize_deserialize() {
let json_data = r#"
[
{
"fieldName": "col1",
"referencedFieldName": null,
"type": "FIRST"
},
{
"fieldName": "col2_after",
"referencedFieldName": "col2",
"type": "AFTER"
}
]"#;
let column_moves: Vec<ColumnMove> = serde_json::from_str(json_data).unwrap();
assert_eq!(
column_moves,
vec![
ColumnMove::move_first("col1".to_string()),
ColumnMove::move_after("col2_after".to_string(), "col2".to_string()),
]
);
}
}
| rust | Apache-2.0 | 1f69c4a6c24f5f7fa858cad57be81fc135c1da71 | 2026-01-04T20:18:00.429093Z | false |
apache/paimon-rust | https://github.com/apache/paimon-rust/blob/1f69c4a6c24f5f7fa858cad57be81fc135c1da71/crates/paimon/src/io/storage_memory.rs | crates/paimon/src/io/storage_memory.rs | // Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
use opendal::services::MemoryConfig;
use opendal::Operator;
use crate::Result;
pub(crate) fn memory_config_build() -> Result<Operator> {
Ok(Operator::from_config(MemoryConfig::default())?.finish())
}
| rust | Apache-2.0 | 1f69c4a6c24f5f7fa858cad57be81fc135c1da71 | 2026-01-04T20:18:00.429093Z | false |
apache/paimon-rust | https://github.com/apache/paimon-rust/blob/1f69c4a6c24f5f7fa858cad57be81fc135c1da71/crates/paimon/src/io/file_io.rs | crates/paimon/src/io/file_io.rs | // Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
use crate::error::*;
use std::collections::HashMap;
use std::ops::Range;
use std::sync::Arc;
use bytes::Bytes;
use chrono::{DateTime, Utc};
use opendal::Operator;
use snafu::ResultExt;
use url::Url;
use super::Storage;
#[derive(Clone, Debug)]
pub struct FileIO {
storage: Arc<Storage>,
}
impl FileIO {
/// Try to infer file io scheme from path.
///
/// The input HashMap is paimon-java's [`Options`](https://github.com/apache/paimon/blob/release-0.8.2/paimon-common/src/main/java/org/apache/paimon/options/Options.java#L60)
pub fn from_url(path: &str) -> crate::Result<FileIOBuilder> {
let url = Url::parse(path).map_err(|_| Error::ConfigInvalid {
message: format!("Invalid URL: {}", path),
})?;
Ok(FileIOBuilder::new(url.scheme()))
}
/// Create a new input file to read data.
///
/// Reference: <https://github.com/apache/paimon/blob/release-0.8.2/paimon-common/src/main/java/org/apache/paimon/fs/FileIO.java#L76>
pub fn new_input(&self, path: &str) -> crate::Result<InputFile> {
let (op, relative_path) = self.storage.create(path)?;
let path = path.to_string();
let relative_path_pos = path.len() - relative_path.len();
Ok(InputFile {
op,
path,
relative_path_pos,
})
}
/// Create a new output file to write data.
///
/// Reference: <https://github.com/apache/paimon/blob/release-0.8.2/paimon-common/src/main/java/org/apache/paimon/fs/FileIO.java#L87>
pub fn new_output(&self, path: &str) -> Result<OutputFile> {
let (op, relative_path) = self.storage.create(path)?;
let path = path.to_string();
let relative_path_pos = path.len() - relative_path.len();
Ok(OutputFile {
op,
path,
relative_path_pos,
})
}
/// Return a file status object that represents the path.
///
/// Reference: <https://github.com/apache/paimon/blob/release-0.8.2/paimon-common/src/main/java/org/apache/paimon/fs/FileIO.java#L97>
pub async fn get_status(&self, path: &str) -> Result<FileStatus> {
let (op, relative_path) = self.storage.create(path)?;
let meta = op.stat(relative_path).await.context(IoUnexpectedSnafu {
message: format!("Failed to get file status for '{}'", path),
})?;
Ok(FileStatus {
size: meta.content_length(),
is_dir: meta.is_dir(),
last_modified: meta.last_modified(),
path: path.to_string(),
})
}
/// List the statuses of the files/directories in the given path if the path is a directory.
///
/// References: <https://github.com/apache/paimon/blob/release-0.8.2/paimon-common/src/main/java/org/apache/paimon/fs/FileIO.java#L105>
///
/// FIXME: how to handle large dir? Better to return a stream instead?
pub async fn list_status(&self, path: &str) -> Result<Vec<FileStatus>> {
let (op, relative_path) = self.storage.create(path)?;
let entries = op.list(relative_path).await.context(IoUnexpectedSnafu {
message: format!("Failed to list files in '{}'", path),
})?;
let mut statuses = Vec::new();
for entry in entries {
let meta = entry.metadata();
statuses.push(FileStatus {
size: meta.content_length(),
is_dir: meta.is_dir(),
path: path.to_string(),
last_modified: meta.last_modified(),
});
}
Ok(statuses)
}
/// Check if exists.
///
/// References: <https://github.com/apache/paimon/blob/release-0.8.2/paimon-common/src/main/java/org/apache/paimon/fs/FileIO.java#L128>
pub async fn exists(&self, path: &str) -> Result<bool> {
let (op, relative_path) = self.storage.create(path)?;
op.is_exist(relative_path).await.context(IoUnexpectedSnafu {
message: format!("Failed to check existence of '{}'", path),
})
}
/// Delete a file.
///
/// Reference: <https://github.com/apache/paimon/blob/release-0.8.2/paimon-common/src/main/java/org/apache/paimon/fs/FileIO.java#L139>
pub async fn delete_file(&self, path: &str) -> Result<()> {
let (op, relative_path) = self.storage.create(path)?;
op.delete(relative_path).await.context(IoUnexpectedSnafu {
message: format!("Failed to delete file '{}'", path),
})?;
Ok(())
}
/// Delete a dir recursively.
///
/// Reference: <https://github.com/apache/paimon/blob/release-0.8.2/paimon-common/src/main/java/org/apache/paimon/fs/FileIO.java#L139>
pub async fn delete_dir(&self, path: &str) -> Result<()> {
let (op, relative_path) = self.storage.create(path)?;
op.remove_all(relative_path)
.await
.context(IoUnexpectedSnafu {
message: format!("Failed to delete directory '{}'", path),
})?;
Ok(())
}
/// Make the given file and all non-existent parents into directories.
///
/// Has the semantics of Unix 'mkdir -p'. Existence of the directory hierarchy is not an error.
///
/// Reference: <https://github.com/apache/paimon/blob/release-0.8.2/paimon-common/src/main/java/org/apache/paimon/fs/FileIO.java#L150>
pub async fn mkdirs(&self, path: &str) -> Result<()> {
let (op, relative_path) = self.storage.create(path)?;
op.create_dir(relative_path)
.await
.context(IoUnexpectedSnafu {
message: format!("Failed to create directory '{}'", path),
})?;
Ok(())
}
/// Renames the file/directory src to dst.
///
/// Reference: <https://github.com/apache/paimon/blob/release-0.8.2/paimon-common/src/main/java/org/apache/paimon/fs/FileIO.java#L159>
pub async fn rename(&self, src: &str, dst: &str) -> Result<()> {
let (op_src, relative_path_src) = self.storage.create(src)?;
let (_, relative_path_dst) = self.storage.create(dst)?;
op_src
.rename(relative_path_src, relative_path_dst)
.await
.context(IoUnexpectedSnafu {
message: format!("Failed to rename '{}' to '{}'", src, dst),
})?;
Ok(())
}
}
#[derive(Debug)]
pub struct FileIOBuilder {
scheme_str: Option<String>,
props: HashMap<String, String>,
}
impl FileIOBuilder {
pub fn new(scheme_str: impl ToString) -> Self {
Self {
scheme_str: Some(scheme_str.to_string()),
props: HashMap::default(),
}
}
pub(crate) fn into_parts(self) -> (String, HashMap<String, String>) {
(self.scheme_str.unwrap_or_default(), self.props)
}
pub fn with_prop(mut self, key: impl ToString, value: impl ToString) -> Self {
self.props.insert(key.to_string(), value.to_string());
self
}
pub fn with_props(
mut self,
args: impl IntoIterator<Item = (impl ToString, impl ToString)>,
) -> Self {
self.props
.extend(args.into_iter().map(|e| (e.0.to_string(), e.1.to_string())));
self
}
pub fn build(self) -> crate::Result<FileIO> {
let storage = Storage::build(self)?;
Ok(FileIO {
storage: Arc::new(storage),
})
}
}
#[async_trait::async_trait]
pub trait FileRead: Send + Unpin + 'static {
async fn read(&self, range: Range<u64>) -> crate::Result<Bytes>;
}
#[async_trait::async_trait]
impl FileRead for opendal::Reader {
async fn read(&self, range: Range<u64>) -> crate::Result<Bytes> {
Ok(opendal::Reader::read(self, range).await?.to_bytes())
}
}
#[async_trait::async_trait]
pub trait FileWrite: Send + Unpin + 'static {
async fn write(&mut self, bs: Bytes) -> crate::Result<()>;
async fn close(&mut self) -> crate::Result<()>;
}
#[async_trait::async_trait]
impl FileWrite for opendal::Writer {
async fn write(&mut self, bs: Bytes) -> crate::Result<()> {
Ok(opendal::Writer::write(self, bs).await?)
}
async fn close(&mut self) -> crate::Result<()> {
Ok(opendal::Writer::close(self).await?)
}
}
#[derive(Clone, Debug)]
pub struct FileStatus {
pub size: u64,
pub is_dir: bool,
pub path: String,
pub last_modified: Option<DateTime<Utc>>,
}
#[derive(Debug)]
pub struct InputFile {
op: Operator,
path: String,
relative_path_pos: usize,
}
impl InputFile {
pub fn location(&self) -> &str {
&self.path
}
pub async fn exists(&self) -> crate::Result<bool> {
Ok(self
.op
.is_exist(&self.path[self.relative_path_pos..])
.await?)
}
pub async fn metadata(&self) -> crate::Result<FileStatus> {
let meta = self.op.stat(&self.path[self.relative_path_pos..]).await?;
Ok(FileStatus {
size: meta.content_length(),
is_dir: meta.is_dir(),
path: self.path.clone(),
last_modified: meta.last_modified(),
})
}
pub async fn read(&self) -> crate::Result<Bytes> {
Ok(self
.op
.read(&self.path[self.relative_path_pos..])
.await?
.to_bytes())
}
pub async fn reader(&self) -> crate::Result<impl FileRead> {
Ok(self.op.reader(&self.path[self.relative_path_pos..]).await?)
}
}
#[derive(Debug, Clone)]
pub struct OutputFile {
op: Operator,
path: String,
relative_path_pos: usize,
}
impl OutputFile {
pub fn location(&self) -> &str {
&self.path
}
pub async fn exists(&self) -> crate::Result<bool> {
Ok(self
.op
.is_exist(&self.path[self.relative_path_pos..])
.await?)
}
pub fn to_input_file(self) -> InputFile {
InputFile {
op: self.op,
path: self.path,
relative_path_pos: self.relative_path_pos,
}
}
pub async fn write(&self, bs: Bytes) -> crate::Result<()> {
let mut writer = self.writer().await?;
writer.write(bs).await?;
writer.close().await
}
pub async fn writer(&self) -> crate::Result<Box<dyn FileWrite>> {
Ok(Box::new(
self.op.writer(&self.path[self.relative_path_pos..]).await?,
))
}
}
#[cfg(test)]
mod file_action_test {
use std::fs;
use super::*;
use bytes::Bytes;
fn setup_memory_file_io() -> FileIO {
let storage = Storage::Memory;
FileIO {
storage: Arc::new(storage),
}
}
fn setup_fs_file_io() -> FileIO {
let storage = Storage::LocalFs;
FileIO {
storage: Arc::new(storage),
}
}
async fn common_test_get_status(file_io: &FileIO, path: &str) {
let output = file_io.new_output(path).unwrap();
let mut writer = output.writer().await.unwrap();
writer.write(Bytes::from("hello world")).await.unwrap();
writer.close().await.unwrap();
let status = file_io.get_status(path).await.unwrap();
assert_eq!(status.size, 11);
file_io.delete_file(path).await.unwrap();
}
async fn common_test_exists(file_io: &FileIO, path: &str) {
let output = file_io.new_output(path).unwrap();
let mut writer = output.writer().await.unwrap();
writer.write(Bytes::from("hello world")).await.unwrap();
writer.close().await.unwrap();
let exists = file_io.exists(path).await.unwrap();
assert!(exists);
file_io.delete_file(path).await.unwrap();
}
async fn common_test_delete_file(file_io: &FileIO, path: &str) {
let output = file_io.new_output(path).unwrap();
let mut writer = output.writer().await.unwrap();
writer.write(Bytes::from("hello world")).await.unwrap();
writer.close().await.unwrap();
file_io.delete_file(path).await.unwrap();
let exists = file_io.exists(path).await.unwrap();
assert!(!exists);
}
async fn common_test_mkdirs(file_io: &FileIO, dir_path: &str) {
file_io.mkdirs(dir_path).await.unwrap();
let exists = file_io.exists(dir_path).await.unwrap();
assert!(exists);
let _ = fs::remove_dir_all(dir_path.strip_prefix("file:/").unwrap());
}
async fn common_test_rename(file_io: &FileIO, src: &str, dst: &str) {
let output = file_io.new_output(src).unwrap();
let mut writer = output.writer().await.unwrap();
writer.write(Bytes::from("hello world")).await.unwrap();
writer.close().await.unwrap();
file_io.rename(src, dst).await.unwrap();
let exists_old = file_io.exists(src).await.unwrap();
let exists_new = file_io.exists(dst).await.unwrap();
assert!(!exists_old);
assert!(exists_new);
file_io.delete_file(dst).await.unwrap();
}
#[tokio::test]
async fn test_delete_file_memory() {
let file_io = setup_memory_file_io();
common_test_delete_file(&file_io, "memory:/test_file_delete_mem").await;
}
#[tokio::test]
async fn test_get_status_fs() {
let file_io = setup_fs_file_io();
common_test_get_status(&file_io, "file:/tmp/test_file_get_status_fs").await;
}
#[tokio::test]
async fn test_exists_fs() {
let file_io = setup_fs_file_io();
common_test_exists(&file_io, "file:/tmp/test_file_exists_fs").await;
}
#[tokio::test]
async fn test_delete_file_fs() {
let file_io = setup_fs_file_io();
common_test_delete_file(&file_io, "file:/tmp/test_file_delete_fs").await;
}
#[tokio::test]
async fn test_mkdirs_fs() {
let file_io = setup_fs_file_io();
common_test_mkdirs(&file_io, "file:/tmp/test_fs_dir/").await;
}
#[tokio::test]
async fn test_rename_fs() {
let file_io = setup_fs_file_io();
common_test_rename(
&file_io,
"file:/tmp/test_file_fs_z",
"file:/tmp/new_test_file_fs_o",
)
.await;
}
}
#[cfg(test)]
mod input_output_test {
use super::*;
use bytes::Bytes;
fn setup_memory_file_io() -> FileIO {
let storage = Storage::Memory;
FileIO {
storage: Arc::new(storage),
}
}
fn setup_fs_file_io() -> FileIO {
let storage = Storage::LocalFs;
FileIO {
storage: Arc::new(storage),
}
}
async fn common_test_output_file_write_and_read(file_io: &FileIO, path: &str) {
let output = file_io.new_output(path).unwrap();
let mut writer = output.writer().await.unwrap();
writer.write(Bytes::from("hello world")).await.unwrap();
writer.close().await.unwrap();
let input = output.to_input_file();
let content = input.read().await.unwrap();
assert_eq!(&content[..], b"hello world");
file_io.delete_file(path).await.unwrap();
}
async fn common_test_output_file_exists(file_io: &FileIO, path: &str) {
let output = file_io.new_output(path).unwrap();
let mut writer = output.writer().await.unwrap();
writer.write(Bytes::from("hello world")).await.unwrap();
writer.close().await.unwrap();
let exists = output.exists().await.unwrap();
assert!(exists);
file_io.delete_file(path).await.unwrap();
}
async fn common_test_input_file_metadata(file_io: &FileIO, path: &str) {
let output = file_io.new_output(path).unwrap();
let mut writer = output.writer().await.unwrap();
writer.write(Bytes::from("hello world")).await.unwrap();
writer.close().await.unwrap();
let input = output.to_input_file();
let metadata = input.metadata().await.unwrap();
assert_eq!(metadata.size, 11);
file_io.delete_file(path).await.unwrap();
}
async fn common_test_input_file_partial_read(file_io: &FileIO, path: &str) {
let output = file_io.new_output(path).unwrap();
let mut writer = output.writer().await.unwrap();
writer.write(Bytes::from("hello world")).await.unwrap();
writer.close().await.unwrap();
let input = output.to_input_file();
let reader = input.reader().await.unwrap();
let partial_content = reader.read(0..5).await.unwrap(); // read "hello"
assert_eq!(&partial_content[..], b"hello");
file_io.delete_file(path).await.unwrap();
}
#[tokio::test]
async fn test_output_file_write_and_read_memory() {
let file_io = setup_memory_file_io();
common_test_output_file_write_and_read(&file_io, "memory:/test_file_rw_mem").await;
}
#[tokio::test]
async fn test_output_file_exists_memory() {
let file_io = setup_memory_file_io();
common_test_output_file_exists(&file_io, "memory:/test_file_exist_mem").await;
}
#[tokio::test]
async fn test_input_file_metadata_memory() {
let file_io = setup_memory_file_io();
common_test_input_file_metadata(&file_io, "memory:/test_file_meta_mem").await;
}
#[tokio::test]
async fn test_input_file_partial_read_memory() {
let file_io = setup_memory_file_io();
common_test_input_file_partial_read(&file_io, "memory:/test_file_part_read_mem").await;
}
#[tokio::test]
async fn test_output_file_write_and_read_fs() {
let file_io = setup_fs_file_io();
common_test_output_file_write_and_read(&file_io, "file:/tmp/test_file_fs_rw").await;
}
#[tokio::test]
async fn test_output_file_exists_fs() {
let file_io = setup_fs_file_io();
common_test_output_file_exists(&file_io, "file:/tmp/test_file_exists").await;
}
#[tokio::test]
async fn test_input_file_metadata_fs() {
let file_io = setup_fs_file_io();
common_test_input_file_metadata(&file_io, "file:/tmp/test_file_meta").await;
}
#[tokio::test]
async fn test_input_file_partial_read_fs() {
let file_io = setup_fs_file_io();
common_test_input_file_partial_read(&file_io, "file:/tmp/test_file_read_fs").await;
}
}
| rust | Apache-2.0 | 1f69c4a6c24f5f7fa858cad57be81fc135c1da71 | 2026-01-04T20:18:00.429093Z | false |
apache/paimon-rust | https://github.com/apache/paimon-rust/blob/1f69c4a6c24f5f7fa858cad57be81fc135c1da71/crates/paimon/src/io/storage.rs | crates/paimon/src/io/storage.rs | // Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
use opendal::{Operator, Scheme};
use crate::error;
use super::FileIOBuilder;
/// The storage carries all supported storage services in paimon
#[derive(Debug)]
pub enum Storage {
#[cfg(feature = "storage-memory")]
Memory,
#[cfg(feature = "storage-fs")]
LocalFs,
}
impl Storage {
pub(crate) fn build(file_io_builder: FileIOBuilder) -> crate::Result<Self> {
let (scheme_str, _) = file_io_builder.into_parts();
let scheme = Self::parse_scheme(&scheme_str)?;
match scheme {
#[cfg(feature = "storage-memory")]
Scheme::Memory => Ok(Self::Memory),
#[cfg(feature = "storage-fs")]
Scheme::Fs => Ok(Self::LocalFs),
_ => Err(error::Error::IoUnsupported {
message: "Unsupported storage feature".to_string(),
}),
}
}
pub(crate) fn create<'a>(&self, path: &'a str) -> crate::Result<(Operator, &'a str)> {
match self {
#[cfg(feature = "storage-memory")]
Storage::Memory => {
let op = super::memory_config_build()?;
if let Some(stripped) = path.strip_prefix("memory:/") {
Ok((op, stripped))
} else {
Ok((op, &path[1..]))
}
}
#[cfg(feature = "storage-fs")]
Storage::LocalFs => {
let op = super::fs_config_build()?;
if let Some(stripped) = path.strip_prefix("file:/") {
Ok((op, stripped))
} else {
Ok((op, &path[1..]))
}
}
}
}
fn parse_scheme(scheme: &str) -> crate::Result<Scheme> {
match scheme {
"memory" => Ok(Scheme::Memory),
"file" | "" => Ok(Scheme::Fs),
s => Ok(s.parse::<Scheme>()?),
}
}
}
| rust | Apache-2.0 | 1f69c4a6c24f5f7fa858cad57be81fc135c1da71 | 2026-01-04T20:18:00.429093Z | false |
apache/paimon-rust | https://github.com/apache/paimon-rust/blob/1f69c4a6c24f5f7fa858cad57be81fc135c1da71/crates/paimon/src/io/mod.rs | crates/paimon/src/io/mod.rs | // Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
mod file_io;
pub use file_io::*;
mod storage;
pub use storage::*;
#[cfg(feature = "storage-fs")]
mod storage_fs;
#[cfg(feature = "storage-fs")]
use storage_fs::*;
#[cfg(feature = "storage-memory")]
mod storage_memory;
#[cfg(feature = "storage-memory")]
use storage_memory::*;
| rust | Apache-2.0 | 1f69c4a6c24f5f7fa858cad57be81fc135c1da71 | 2026-01-04T20:18:00.429093Z | false |
apache/paimon-rust | https://github.com/apache/paimon-rust/blob/1f69c4a6c24f5f7fa858cad57be81fc135c1da71/crates/paimon/src/io/storage_fs.rs | crates/paimon/src/io/storage_fs.rs | // Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
use opendal::services::FsConfig;
use opendal::Operator;
use crate::Result;
/// Build new opendal operator from give path.
pub(crate) fn fs_config_build() -> Result<Operator> {
let mut cfg = FsConfig::default();
cfg.root = Some("/".to_string());
Ok(Operator::from_config(cfg)?.finish())
}
| rust | Apache-2.0 | 1f69c4a6c24f5f7fa858cad57be81fc135c1da71 | 2026-01-04T20:18:00.429093Z | false |
agourlay/hprof-slurp | https://github.com/agourlay/hprof-slurp/blob/7f07e9f13af1583e7780937e5c2a2bb547a80808/src/result_recorder.rs | src/result_recorder.rs | use ahash::AHashMap;
use crossbeam_channel::{Receiver, Sender};
use indoc::formatdoc;
use std::fmt::Write;
use std::thread::JoinHandle;
use std::{mem, thread};
use crate::parser::gc_record::{FieldType, GcRecord};
use crate::parser::record::Record::{
AllocationSites, ControlSettings, CpuSamples, EndThread, GcSegment, HeapDumpEnd, HeapDumpStart,
HeapSummary, LoadClass, StackFrame, StackTrace, StartThread, UnloadClass, Utf8String,
};
use crate::parser::record::{LoadClassData, Record, StackFrameData, StackTraceData};
use crate::rendered_result::{ClassAllocationStats, RenderedResult};
#[derive(Debug, Copy, Clone)]
pub struct ClassInfo {
super_class_object_id: u64,
instance_size: u32,
}
impl ClassInfo {
const fn new(super_class_object_id: u64, instance_size: u32) -> Self {
Self {
super_class_object_id,
instance_size,
}
}
}
#[derive(Debug, Copy, Clone)]
pub struct ClassInstanceCounter {
number_of_instances: u64,
}
impl ClassInstanceCounter {
pub const fn add_instance(&mut self) {
self.number_of_instances += 1;
}
pub const fn empty() -> Self {
Self {
number_of_instances: 0,
}
}
}
#[derive(Debug, Copy, Clone)]
pub struct ArrayCounter {
number_of_arrays: u64,
max_size_seen: u32,
total_number_of_elements: u64,
}
impl ArrayCounter {
pub fn add_elements_from_array(&mut self, elements: u32) {
self.number_of_arrays += 1;
self.total_number_of_elements += u64::from(elements);
if elements > self.max_size_seen {
self.max_size_seen = elements;
}
}
pub const fn empty() -> Self {
Self {
number_of_arrays: 0,
total_number_of_elements: 0,
max_size_seen: 0,
}
}
}
pub struct ResultRecorder {
// Recorder's params
id_size: u32,
list_strings: bool,
// Tag counters
classes_unloaded: u32,
stack_frames: u32,
stack_traces: u32,
start_threads: u32,
end_threads: u32,
heap_summaries: u32,
heap_dumps: u32,
allocation_sites: u32,
control_settings: u32,
cpu_samples: u32,
// GC tag counters
heap_dump_segments_all_sub_records: u32,
heap_dump_segments_gc_root_unknown: u32,
heap_dump_segments_gc_root_thread_object: u32,
heap_dump_segments_gc_root_jni_global: u32,
heap_dump_segments_gc_root_jni_local: u32,
heap_dump_segments_gc_root_java_frame: u32,
heap_dump_segments_gc_root_native_stack: u32,
heap_dump_segments_gc_root_sticky_class: u32,
heap_dump_segments_gc_root_thread_block: u32,
heap_dump_segments_gc_root_monitor_used: u32,
heap_dump_segments_gc_object_array_dump: u32,
heap_dump_segments_gc_instance_dump: u32,
heap_dump_segments_gc_primitive_array_dump: u32,
heap_dump_segments_gc_class_dump: u32,
// Captured state
// "object_id" -> "class_id" -> "class_name_id" -> "utf8_string"
utf8_strings_by_id: AHashMap<u64, Box<str>>,
class_data: Vec<LoadClassData>, // holds class_data
class_data_by_id: AHashMap<u64, usize>, // value is index into class_data
class_data_by_serial_number: AHashMap<u32, usize>, // value is index into class_data
classes_single_instance_size_by_id: AHashMap<u64, ClassInfo>,
classes_all_instance_total_size_by_id: AHashMap<u64, ClassInstanceCounter>,
primitive_array_counters: AHashMap<FieldType, ArrayCounter>,
object_array_counters: AHashMap<u64, ArrayCounter>,
stack_trace_by_serial_number: AHashMap<u32, StackTraceData>,
stack_frame_by_id: AHashMap<u64, StackFrameData>,
}
impl ResultRecorder {
pub fn new(id_size: u32, list_strings: bool) -> Self {
Self {
id_size,
list_strings,
classes_unloaded: 0,
stack_frames: 0,
stack_traces: 0,
start_threads: 0,
end_threads: 0,
heap_summaries: 0,
heap_dumps: 0,
allocation_sites: 0,
control_settings: 0,
cpu_samples: 0,
heap_dump_segments_all_sub_records: 0,
heap_dump_segments_gc_root_unknown: 0,
heap_dump_segments_gc_root_thread_object: 0,
heap_dump_segments_gc_root_jni_global: 0,
heap_dump_segments_gc_root_jni_local: 0,
heap_dump_segments_gc_root_java_frame: 0,
heap_dump_segments_gc_root_native_stack: 0,
heap_dump_segments_gc_root_sticky_class: 0,
heap_dump_segments_gc_root_thread_block: 0,
heap_dump_segments_gc_root_monitor_used: 0,
heap_dump_segments_gc_object_array_dump: 0,
heap_dump_segments_gc_primitive_array_dump: 0,
heap_dump_segments_gc_instance_dump: 0,
heap_dump_segments_gc_class_dump: 0,
utf8_strings_by_id: AHashMap::new(),
class_data: vec![],
class_data_by_id: AHashMap::new(),
class_data_by_serial_number: AHashMap::default(),
classes_single_instance_size_by_id: AHashMap::new(),
classes_all_instance_total_size_by_id: AHashMap::new(),
primitive_array_counters: AHashMap::new(),
object_array_counters: AHashMap::new(),
stack_trace_by_serial_number: AHashMap::default(),
stack_frame_by_id: AHashMap::default(),
}
}
fn get_class_name_string(&self, class_id: u64) -> String {
self.class_data_by_id
.get(&class_id)
.and_then(|data_index| self.class_data.get(*data_index))
.and_then(|class_data| self.utf8_strings_by_id.get(&class_data.class_name_id))
.expect("class_id must have an UTF-8 string representation available")
.replace('/', ".")
}
pub fn start(
mut self,
receive_records: Receiver<Vec<Record>>,
send_result: Sender<RenderedResult>,
send_pooled_vec: Sender<Vec<Record>>,
) -> std::io::Result<JoinHandle<()>> {
thread::Builder::new()
.name("hprof-recorder".to_string())
.spawn(move || {
loop {
if let Ok(mut records) = receive_records.recv() {
self.record_records(&mut records);
// clear values but retain underlying storage
records.clear();
// send back pooled vec (swallow errors as it is possible the receiver was already dropped)
send_pooled_vec.send(records).unwrap_or_default();
} else {
// no more Record to pull, generate and send back results
let rendered_result = RenderedResult {
summary: self.render_summary(),
thread_info: self.render_thread_info(),
memory_usage: self.aggregate_memory_usage(),
duplicated_strings: self.render_duplicated_strings(),
captured_strings: if self.list_strings {
Some(self.render_captured_strings())
} else {
None
},
};
send_result
.send(rendered_result)
.expect("channel should not be closed");
break;
}
}
})
}
fn record_records(&mut self, records: &mut [Record]) {
for record in records.iter_mut() {
match record {
Utf8String { id, str } => {
self.utf8_strings_by_id.insert(*id, mem::take(str));
}
LoadClass(load_class_data) => {
let class_object_id = load_class_data.class_object_id;
let class_serial_number = load_class_data.serial_number;
self.class_data.push(mem::take(load_class_data));
let data_index = self.class_data.len() - 1;
self.class_data_by_id.insert(class_object_id, data_index);
self.class_data_by_serial_number
.insert(class_serial_number, data_index);
}
UnloadClass { .. } => self.classes_unloaded += 1,
StackFrame(stack_frame_data) => {
self.stack_frames += 1;
self.stack_frame_by_id
.insert(stack_frame_data.stack_frame_id, mem::take(stack_frame_data));
}
StackTrace(stack_trace_data) => {
self.stack_traces += 1;
self.stack_trace_by_serial_number
.insert(stack_trace_data.serial_number, mem::take(stack_trace_data));
}
StartThread { .. } => self.start_threads += 1,
EndThread { .. } => self.end_threads += 1,
AllocationSites { .. } => self.allocation_sites += 1,
HeapSummary { .. } => self.heap_summaries += 1,
ControlSettings { .. } => self.control_settings += 1,
CpuSamples { .. } => self.cpu_samples += 1,
HeapDumpEnd { .. } => (),
HeapDumpStart { .. } => self.heap_dumps += 1,
GcSegment(gc_record) => {
self.heap_dump_segments_all_sub_records += 1;
match gc_record {
GcRecord::RootUnknown { .. } => {
self.heap_dump_segments_gc_root_unknown += 1;
}
GcRecord::RootThreadObject { .. } => {
self.heap_dump_segments_gc_root_thread_object += 1;
}
GcRecord::RootJniGlobal { .. } => {
self.heap_dump_segments_gc_root_jni_global += 1;
}
GcRecord::RootJniLocal { .. } => {
self.heap_dump_segments_gc_root_jni_local += 1;
}
GcRecord::RootJavaFrame { .. } => {
self.heap_dump_segments_gc_root_java_frame += 1;
}
GcRecord::RootNativeStack { .. } => {
self.heap_dump_segments_gc_root_native_stack += 1;
}
GcRecord::RootStickyClass { .. } => {
self.heap_dump_segments_gc_root_sticky_class += 1;
}
GcRecord::RootThreadBlock { .. } => {
self.heap_dump_segments_gc_root_thread_block += 1;
}
GcRecord::RootMonitorUsed { .. } => {
self.heap_dump_segments_gc_root_monitor_used += 1;
}
GcRecord::InstanceDump {
class_object_id, ..
} => {
self.classes_all_instance_total_size_by_id
.entry(*class_object_id)
.or_insert_with(ClassInstanceCounter::empty)
.add_instance();
self.heap_dump_segments_gc_instance_dump += 1;
}
GcRecord::ObjectArrayDump {
number_of_elements,
array_class_id,
..
} => {
self.object_array_counters
.entry(*array_class_id)
.or_insert_with(ArrayCounter::empty)
.add_elements_from_array(*number_of_elements);
self.heap_dump_segments_gc_object_array_dump += 1;
}
GcRecord::PrimitiveArrayDump {
number_of_elements,
element_type,
..
} => {
self.primitive_array_counters
.entry(*element_type)
.or_insert_with(ArrayCounter::empty)
.add_elements_from_array(*number_of_elements);
self.heap_dump_segments_gc_primitive_array_dump += 1;
}
GcRecord::ClassDump(class_dump_fields) => {
let class_object_id = class_dump_fields.class_object_id;
self.classes_single_instance_size_by_id
.entry(class_object_id)
.or_insert_with(|| {
let instance_size = class_dump_fields.instance_size;
let super_class_object_id =
class_dump_fields.super_class_object_id;
ClassInfo::new(super_class_object_id, instance_size)
});
self.heap_dump_segments_gc_class_dump += 1;
}
}
}
}
}
}
fn render_captured_strings(&self) -> String {
let mut strings: Vec<_> = self.utf8_strings_by_id.values().collect();
strings.sort_unstable();
let mut result = String::from("\nList of Strings\n");
for s in strings {
result.push_str(s);
result.push('\n');
}
result
}
fn render_duplicated_strings(&self) -> Option<String> {
let mut strings: Vec<_> = self.utf8_strings_by_id.values().collect();
strings.sort_unstable();
let all_len = strings.len();
strings.dedup();
let dedup_len = strings.len();
if all_len == dedup_len {
None
} else {
Some(format!(
"\nFound {} duplicated strings out of {} unique strings\n",
all_len - dedup_len,
all_len
))
}
}
fn render_thread_info(&self) -> String {
let mut thread_info = String::new();
// for each stacktrace
let mut stack_traces: Vec<_> = self
.stack_trace_by_serial_number
.iter()
.filter(|(_, stack)| !stack.stack_frame_ids.is_empty()) // omit empty stacktraces
.collect();
stack_traces.sort_by_key(|(serial_number, _)| **serial_number);
writeln!(
thread_info,
"\nFound {} threads with stacktraces:",
stack_traces.len()
)
.expect("Could not write to thread info");
for (index, (_id, stack_data)) in stack_traces.iter().enumerate() {
write!(thread_info, "\nThread {}\n", index + 1)
.expect("Could not write to thread info");
// for each stack frames
for stack_frame_id in &stack_data.stack_frame_ids {
let stack_frame = self.stack_frame_by_id.get(stack_frame_id).unwrap();
let class_object_id = self
.class_data_by_serial_number
.get(&stack_frame.class_serial_number)
.and_then(|index| self.class_data.get(*index))
.expect("Class not found")
.class_object_id;
let class_name = self.get_class_name_string(class_object_id);
let method_name = self
.utf8_strings_by_id
.get(&stack_frame.method_name_id)
.map_or("unknown method name", |b| &**b);
let file_name = self
.utf8_strings_by_id
.get(&stack_frame.source_file_name_id)
.map_or("unknown source file", |b| &**b);
// >0: normal
// -1: unknown
// -2: compiled method
// -3: native method
let pretty_line_number = match stack_frame.line_number {
-1 => "unknown line number".to_string(),
-2 => "compiled method".to_string(),
-3 => "native method".to_string(),
number => format!("{number}"),
};
// pretty frame output
writeln!(
thread_info,
" at {class_name}.{method_name} ({file_name}:{pretty_line_number})"
)
.expect("Could not write to thread info");
}
}
thread_info
}
fn aggregate_memory_usage(&self) -> Vec<ClassAllocationStats> {
// https://www.baeldung.com/java-memory-layout
// total_size = object_header + data
// on a 64-bit arch.
// object_header = mark(ref_size) + klass(4) + padding_gap(4) = 16 bytes
// data = instance_size + padding_next(??)
let object_header = self.id_size + 4 + 4;
let mut classes_dump_vec: Vec<_> = self
.classes_all_instance_total_size_by_id
.iter()
.map(|(class_id, v)| {
let class_name = self.get_class_name_string(*class_id);
let mut size = 0;
let ClassInfo {
super_class_object_id,
instance_size,
} = self
.classes_single_instance_size_by_id
.get(class_id)
.unwrap();
let mut parent_class_id = *super_class_object_id;
size += instance_size;
// recursively add sizes from parent classes
while parent_class_id != 0 {
let ClassInfo {
super_class_object_id,
instance_size,
} = self
.classes_single_instance_size_by_id
.get(&parent_class_id)
.unwrap();
size += instance_size;
parent_class_id = *super_class_object_id;
}
// add object header
size += object_header;
// add extra padding if any
size += size.rem_euclid(8);
let total_size = u64::from(size) * v.number_of_instances;
ClassAllocationStats::new(
class_name,
v.number_of_instances,
u64::from(size), // all instances have the same size
total_size,
)
})
.collect();
// https://www.baeldung.com/java-memory-layout
// the array's `elements` size is already accounted for via `GcInstanceDump` for objects
// unlike primitives which are packed in the array itself
// array headers already aligned for 64-bit arch - no need for padding
// array_header = mark(ref_size) + klass(4) + array_length(4) = 16 bytes
// data_primitive = primitive_size * length + padding(??)
// data_object = ref_size * length (no padding because the ref size is already aligned!)
let ref_size = u64::from(self.id_size);
let array_header_size = ref_size + 4 + 4;
let array_primitives_dump_vec =
self.primitive_array_counters
.iter()
.map(|(field_type, &ac)| {
let primitive_type = format!("{field_type:?}").to_lowercase();
let primitive_array_label = format!("{primitive_type}[]");
let primitive_size = primitive_byte_size(*field_type);
let cost_of_all_array_headers = array_header_size * ac.number_of_arrays;
let cost_of_all_values = primitive_size * ac.total_number_of_elements;
// info lost at this point to compute the real padding for each array
// assume mid-value of 4 bytes per array for an estimation
let estimated_cost_of_all_padding = ac.number_of_arrays * 4;
let cost_data_largest_array = primitive_size * u64::from(ac.max_size_seen);
let cost_padding_largest_array =
(array_header_size + cost_data_largest_array).rem_euclid(8);
ClassAllocationStats::new(
primitive_array_label,
ac.number_of_arrays,
array_header_size + cost_data_largest_array + cost_padding_largest_array,
cost_of_all_array_headers
+ cost_of_all_values
+ estimated_cost_of_all_padding,
)
});
// For array of objects we are interested in the total size of the array headers and outgoing elements references
let array_objects_dump_vec = self.object_array_counters.iter().map(|(class_id, &ac)| {
let raw_class_name = self.get_class_name_string(*class_id);
let cleaned_class_name: String = if raw_class_name.starts_with("[L") {
// remove '[L' prefix and ';' suffix
raw_class_name
.chars()
.skip(2)
.take(raw_class_name.chars().count() - 3)
.collect()
} else if raw_class_name.starts_with("[[L") {
// remove '[[L' prefix and ';' suffix
raw_class_name
.chars()
.skip(3)
.take(raw_class_name.chars().count() - 4)
.collect()
} else {
// TODO: what are those ([[C, [[D, [[B, [[S ...)? boxed primitives are already present
raw_class_name
};
let object_array_label = format!("{cleaned_class_name}[]");
let cost_of_all_refs = ref_size * ac.total_number_of_elements;
let cost_of_all_array_headers = array_header_size * ac.number_of_arrays;
let cost_of_largest_array_refs = ref_size * u64::from(ac.max_size_seen);
ClassAllocationStats::new(
object_array_label,
ac.number_of_arrays,
array_header_size + cost_of_largest_array_refs,
cost_of_all_array_headers + cost_of_all_refs,
)
});
// Merge results
classes_dump_vec.extend(array_primitives_dump_vec);
classes_dump_vec.extend(array_objects_dump_vec);
// Sort by class name first for stability in test results :s
classes_dump_vec.sort_unstable_by(|a, b| b.class_name.cmp(&a.class_name));
classes_dump_vec
}
pub fn render_summary(&self) -> String {
let top_summary = formatdoc!(
"\nFile content summary:\n
UTF-8 Strings: {}
Classes loaded: {}
Classes unloaded: {}
Stack traces: {}
Stack frames: {}
Start threads: {}
Allocation sites: {}
End threads: {}
Control settings: {}
CPU samples: {}",
self.utf8_strings_by_id.len(),
self.class_data_by_id.len(),
self.classes_unloaded,
self.stack_traces,
self.stack_frames,
self.start_threads,
self.allocation_sites,
self.end_threads,
self.control_settings,
self.cpu_samples
);
let heap_summary = formatdoc!(
"Heap summaries: {}
{} heap dumps containing in total {} segments:
..GC root unknown: {}
..GC root thread objects: {}
..GC root JNI global: {}
..GC root JNI local: {}
..GC root Java frame: {}
..GC root native stack: {}
..GC root sticky class: {}
..GC root thread block: {}
..GC root monitor used: {}
..GC primitive array dump: {}
..GC object array dump: {}
..GC class dump: {}
..GC instance dump: {}",
self.heap_summaries,
self.heap_dumps,
self.heap_dump_segments_all_sub_records,
self.heap_dump_segments_gc_root_unknown,
self.heap_dump_segments_gc_root_thread_object,
self.heap_dump_segments_gc_root_jni_global,
self.heap_dump_segments_gc_root_jni_local,
self.heap_dump_segments_gc_root_java_frame,
self.heap_dump_segments_gc_root_native_stack,
self.heap_dump_segments_gc_root_sticky_class,
self.heap_dump_segments_gc_root_thread_block,
self.heap_dump_segments_gc_root_monitor_used,
self.heap_dump_segments_gc_primitive_array_dump,
self.heap_dump_segments_gc_object_array_dump,
self.heap_dump_segments_gc_class_dump,
self.heap_dump_segments_gc_instance_dump,
);
format!("{top_summary}\n{heap_summary}")
}
}
fn primitive_byte_size(field_type: FieldType) -> u64 {
match field_type {
FieldType::Byte | FieldType::Bool => 1,
FieldType::Char | FieldType::Short => 2,
FieldType::Float | FieldType::Int => 4,
FieldType::Double | FieldType::Long => 8,
FieldType::Object => panic!("object type in primitive array"),
}
}
| rust | Apache-2.0 | 7f07e9f13af1583e7780937e5c2a2bb547a80808 | 2026-01-04T20:18:02.056003Z | false |
agourlay/hprof-slurp | https://github.com/agourlay/hprof-slurp/blob/7f07e9f13af1583e7780937e5c2a2bb547a80808/src/errors.rs | src/errors.rs | use std::any::Any;
use thiserror::Error;
#[derive(Error, Debug)]
pub enum HprofSlurpError {
#[error("input file `{name:?}` not found")]
InputFileNotFound { name: String },
#[error("invalid argument `top` - the value should be strictly positive")]
InvalidTopPositiveInt,
#[error("invalid pointer size - the value should be either `4` or `8`")]
InvalidIdSize,
#[error("invalid content after header")]
InvalidHeaderSize,
#[error("invalid Hprof file - {message:?}")]
InvalidHprofFile { message: String },
#[error("unsupported pointer size - {message:?}")]
UnsupportedIdSize { message: String },
#[error("CLI argument error ({e})")]
ClapError { e: clap::Error },
#[error("standard I/O error ({e})")]
StdIoError { e: std::io::Error },
#[error("standard thread error ({e:?})")]
StdThreadError { e: Box<dyn Any + Send + 'static> },
#[error("serialization error ({e})")]
SerdeError { e: serde_json::Error },
}
impl From<std::io::Error> for HprofSlurpError {
fn from(e: std::io::Error) -> Self {
Self::StdIoError { e }
}
}
impl From<clap::Error> for HprofSlurpError {
fn from(e: clap::Error) -> Self {
Self::ClapError { e }
}
}
impl From<serde_json::Error> for HprofSlurpError {
fn from(e: serde_json::Error) -> Self {
Self::SerdeError { e }
}
}
| rust | Apache-2.0 | 7f07e9f13af1583e7780937e5c2a2bb547a80808 | 2026-01-04T20:18:02.056003Z | false |
agourlay/hprof-slurp | https://github.com/agourlay/hprof-slurp/blob/7f07e9f13af1583e7780937e5c2a2bb547a80808/src/rendered_result.rs | src/rendered_result.rs | use std::{fmt::Write, fs::File, io::BufWriter};
use chrono::Utc;
use serde::Serialize;
use crate::{errors::HprofSlurpError, utils::pretty_bytes_size};
#[derive(Serialize, Clone)]
pub struct ClassAllocationStats {
pub class_name: String,
pub instance_count: u64,
pub largest_allocation_bytes: u64,
pub allocation_size_bytes: u64,
}
impl ClassAllocationStats {
pub const fn new(
class_name: String,
instance_count: u64,
largest_allocation_bytes: u64,
allocation_size_bytes: u64,
) -> Self {
Self {
class_name,
instance_count,
largest_allocation_bytes,
allocation_size_bytes,
}
}
}
#[derive(Serialize)]
pub struct JsonResult {
top_allocated_classes: Vec<ClassAllocationStats>,
top_largest_instances: Vec<ClassAllocationStats>,
}
impl JsonResult {
pub fn new(memory_usage: &mut [ClassAllocationStats], top: usize) -> Self {
// top allocated
memory_usage.sort_by(|a, b| b.allocation_size_bytes.cmp(&a.allocation_size_bytes));
let top_allocated_classes = memory_usage.iter().take(top).cloned().collect();
// Top largest instances
memory_usage.sort_by(|a, b| b.largest_allocation_bytes.cmp(&a.largest_allocation_bytes));
let top_largest_instances = memory_usage.iter().take(top).cloned().collect();
Self {
top_allocated_classes,
top_largest_instances,
}
}
pub fn save_as_file(&self) -> Result<(), HprofSlurpError> {
let file_path = format!("hprof-slurp-{}.json", Utc::now().timestamp_millis());
let file = File::create(&file_path)?;
let writer = BufWriter::new(file);
// Serialize the struct directly to the file via the writer
serde_json::to_writer(writer, &self)?;
println!("Output JSON result file {file_path}");
Ok(())
}
}
pub struct RenderedResult {
pub summary: String,
pub thread_info: String,
pub memory_usage: Vec<ClassAllocationStats>,
pub duplicated_strings: Option<String>,
pub captured_strings: Option<String>,
}
impl RenderedResult {
pub fn serialize(self, top: usize) -> String {
let Self {
summary,
thread_info,
mut memory_usage,
duplicated_strings,
captured_strings,
} = self;
let memory = Self::render_memory_usage(&mut memory_usage, top);
let mut result = format!("{summary}\n{thread_info}\n{memory}");
if let Some(duplicated_strings) = duplicated_strings {
writeln!(result, "{duplicated_strings}").expect("write should not fail");
}
if let Some(list_strings) = captured_strings {
write!(result, "{list_strings}").expect("write should not fail");
}
result
}
fn render_memory_usage(memory_usage: &mut Vec<ClassAllocationStats>, top: usize) -> String {
// Holds the final result
let mut analysis = String::new();
// Total heap size found banner
let total_size = memory_usage
.iter()
.map(|class_allocation_stats| class_allocation_stats.allocation_size_bytes)
.sum();
let display_total_size = pretty_bytes_size(total_size);
writeln!(
analysis,
"Found a total of {display_total_size} of instances allocated on the heap."
)
.expect("Could not write to analysis");
// Top allocated classes analysis
writeln!(analysis, "\nTop {top} allocated classes:\n")
.expect("Could not write to analysis");
memory_usage.sort_by(|a, b| b.allocation_size_bytes.cmp(&a.allocation_size_bytes));
Self::render_table(top, &mut analysis, memory_usage.as_slice());
// Top largest instances analysis
writeln!(analysis, "\nTop {top} largest instances:\n")
.expect("Could not write to analysis");
memory_usage.sort_by(|a, b| b.largest_allocation_bytes.cmp(&a.largest_allocation_bytes));
Self::render_table(top, &mut analysis, memory_usage.as_slice());
analysis
}
// Render table from [(class_name, count, largest_allocation, instance_size)]
fn render_table(top: usize, analysis: &mut String, rows: &[ClassAllocationStats]) {
let rows_formatted: Vec<_> = rows
.iter()
.take(top)
.map(|class_allocation_stats| {
let display_allocation =
pretty_bytes_size(class_allocation_stats.allocation_size_bytes);
let largest_display_allocation =
pretty_bytes_size(class_allocation_stats.largest_allocation_bytes);
(
display_allocation,
class_allocation_stats.instance_count,
largest_display_allocation,
&class_allocation_stats.class_name,
)
})
.collect();
let total_size_header = "Total size";
let total_size_header_padding = Self::padding_for_header(
rows_formatted.as_slice(),
|r| r.0.clone(),
total_size_header,
);
let total_size_len =
total_size_header.chars().count() + total_size_header_padding.chars().count();
let instance_count_header = "Instances";
let instance_count_header_padding = Self::padding_for_header(
rows_formatted.as_slice(),
|r| r.1.to_string(),
instance_count_header,
);
let instance_len =
instance_count_header.chars().count() + instance_count_header_padding.chars().count();
let largest_instance_header = "Largest";
let largest_instance_padding = Self::padding_for_header(
rows_formatted.as_slice(),
|r| r.2.clone(),
largest_instance_header,
);
let largest_len =
largest_instance_header.chars().count() + largest_instance_padding.chars().count();
let class_name_header = "Class name";
let class_name_padding = Self::padding_for_header(
rows_formatted.as_slice(),
|r| r.3.clone(),
class_name_header,
);
let class_name_len = class_name_header.chars().count() + class_name_padding.chars().count();
// headers with padding
let total_size_header = format!(" {total_size_header_padding}{total_size_header} ");
let instance_count_header =
format!(" {instance_count_header_padding}{instance_count_header} ");
let largest_instance_header =
format!(" {largest_instance_padding}{largest_instance_header} ",);
let class_name_header = format!(" {class_name_header}{class_name_padding} ");
// render line before header
Self::render_table_vertical_line(
analysis,
&total_size_header,
&instance_count_header,
&largest_instance_header,
&class_name_header,
);
// render header
writeln!(analysis, "|{total_size_header}|{instance_count_header}|{largest_instance_header}|{class_name_header}|").expect("Could not write to analysis");
// render line after header
Self::render_table_vertical_line(
analysis,
&total_size_header,
&instance_count_header,
&largest_instance_header,
&class_name_header,
);
// render rows
for (allocation_size, count, largest_allocation_size, class_name) in rows_formatted {
let padding_size_str = Self::column_padding(&allocation_size, total_size_len);
let padding_count_str = Self::column_padding(&count.to_string(), instance_len);
let padding_largest_size_str =
Self::column_padding(&largest_allocation_size, largest_len);
let padding_largest_class_name_str = Self::column_padding(class_name, class_name_len);
writeln!(analysis, "| {padding_size_str}{allocation_size} | {padding_count_str}{count} | {padding_largest_size_str}{largest_allocation_size} | {class_name}{padding_largest_class_name_str} |").expect("Could not write to analysis");
}
// render line after rows
Self::render_table_vertical_line(
analysis,
&total_size_header,
&instance_count_header,
&largest_instance_header,
&class_name_header,
);
}
pub fn render_table_vertical_line(
analysis: &mut String,
total_size_header: &str,
instance_count_header: &str,
largest_instance_header: &str,
class_name_header: &str,
) {
analysis.push('+');
analysis.push_str(&("-".repeat(total_size_header.chars().count())));
analysis.push('+');
analysis.push_str(&("-".repeat(instance_count_header.chars().count())));
analysis.push('+');
analysis.push_str(&("-".repeat(largest_instance_header.chars().count())));
analysis.push('+');
analysis.push_str(&("-".repeat(class_name_header.chars().count())));
analysis.push('+');
analysis.push('\n');
}
fn padding_for_header<F>(
rows: &[(String, u64, String, &String)],
field_selector: F,
header_label: &str,
) -> String
where
F: Fn(&(String, u64, String, &String)) -> String,
{
let max_elem_size = rows
.iter()
.map(|d| field_selector(d).chars().count())
.max_by(std::cmp::Ord::cmp)
.expect("Results can't be empty");
Self::column_padding(header_label, max_elem_size)
}
fn column_padding(column_name: &str, max_item_length: usize) -> String {
let column_label_len = column_name.chars().count();
let padding_size = max_item_length.saturating_sub(column_label_len);
" ".repeat(padding_size)
}
}
| rust | Apache-2.0 | 7f07e9f13af1583e7780937e5c2a2bb547a80808 | 2026-01-04T20:18:02.056003Z | false |
agourlay/hprof-slurp | https://github.com/agourlay/hprof-slurp/blob/7f07e9f13af1583e7780937e5c2a2bb547a80808/src/args.rs | src/args.rs | use crate::errors::HprofSlurpError;
use crate::errors::HprofSlurpError::{InputFileNotFound, InvalidTopPositiveInt};
use clap::{Arg, Command};
use clap::{crate_authors, crate_description, crate_name, crate_version};
use std::path::Path;
fn command() -> Command {
Command::new(crate_name!())
.version(crate_version!())
.author(crate_authors!("\n"))
.about(crate_description!())
.arg(
Arg::new("inputFile")
.help("binary hprof input file")
.long("inputFile")
.short('i')
.num_args(1)
.required(true),
)
.arg(
Arg::new("top")
.help("the top results to display")
.long("top")
.short('t')
.num_args(1)
.default_value("20")
.value_parser(clap::value_parser!(usize))
.required(false),
)
.arg(
Arg::new("debug")
.help("debug info")
.long("debug")
.short('d')
.action(clap::ArgAction::SetTrue),
)
.arg(
Arg::new("listStrings")
.help("list all Strings found")
.long("listStrings")
.short('l')
.action(clap::ArgAction::SetTrue),
)
.arg(
Arg::new("json")
.help("additional JSON output in file")
.long("json")
.action(clap::ArgAction::SetTrue),
)
}
pub fn get_args() -> Result<Args, HprofSlurpError> {
let matches = command().get_matches();
let input_file = matches
.get_one::<String>("inputFile")
.expect("impossible")
.trim();
if !Path::new(&input_file).is_file() {
return Err(InputFileNotFound {
name: input_file.to_string(),
});
}
let top: usize = *matches.get_one("top").expect("impossible");
if top == 0 {
return Err(InvalidTopPositiveInt);
}
let debug = matches.get_flag("debug");
let list_strings = matches.get_flag("listStrings");
let json_output = matches.get_flag("json");
let args = Args {
file_path: input_file.to_string(),
top,
debug,
list_strings,
json_output,
};
Ok(args)
}
pub struct Args {
pub file_path: String,
pub top: usize,
pub debug: bool,
pub list_strings: bool,
pub json_output: bool,
}
#[cfg(test)]
mod args_tests {
use crate::args::command;
#[test]
fn verify_command() {
command().debug_assert();
}
}
| rust | Apache-2.0 | 7f07e9f13af1583e7780937e5c2a2bb547a80808 | 2026-01-04T20:18:02.056003Z | false |
agourlay/hprof-slurp | https://github.com/agourlay/hprof-slurp/blob/7f07e9f13af1583e7780937e5c2a2bb547a80808/src/utils.rs | src/utils.rs | const KILOBYTE: f64 = 1024.0;
const MEGABYTE: f64 = KILOBYTE * KILOBYTE;
const GIGABYTE: f64 = KILOBYTE * MEGABYTE;
pub fn pretty_bytes_size(len: u64) -> String {
let float_len = len as f64;
let (unit, value) = if float_len > GIGABYTE {
("GiB", float_len / GIGABYTE)
} else if float_len > MEGABYTE {
("MiB", float_len / MEGABYTE)
} else if float_len > KILOBYTE {
("KiB", float_len / KILOBYTE)
} else {
("bytes", float_len)
};
format!("{value:.2}{unit}")
}
#[cfg(test)]
mod tests {
use super::pretty_bytes_size;
#[test]
fn pretty_size_gb() {
let size: u64 = 1_200_000_000;
assert_eq!(pretty_bytes_size(size), "1.12GiB");
}
#[test]
fn pretty_size_mb() {
let size: u64 = 1_200_000;
assert_eq!(pretty_bytes_size(size), "1.14MiB");
}
#[test]
fn pretty_size_kb() {
let size: u64 = 1_200;
assert_eq!(pretty_bytes_size(size), "1.17KiB");
}
}
| rust | Apache-2.0 | 7f07e9f13af1583e7780937e5c2a2bb547a80808 | 2026-01-04T20:18:02.056003Z | false |
agourlay/hprof-slurp | https://github.com/agourlay/hprof-slurp/blob/7f07e9f13af1583e7780937e5c2a2bb547a80808/src/main.rs | src/main.rs | mod args;
mod errors;
mod parser;
mod prefetch_reader;
mod rendered_result;
mod result_recorder;
mod slurp;
mod utils;
use std::time::Instant;
use rendered_result::JsonResult;
use crate::args::Args;
use crate::args::get_args;
use crate::errors::HprofSlurpError;
use crate::slurp::slurp_file;
fn main() {
std::process::exit(match main_result() {
Ok(()) => 0,
Err(err) => {
eprintln!("error: {err}");
1
}
});
}
fn main_result() -> Result<(), HprofSlurpError> {
let now = Instant::now();
let Args {
file_path,
top,
debug,
list_strings,
json_output,
} = get_args()?;
let mut rendered_result = slurp_file(file_path, debug, list_strings)?;
if json_output {
// only memory usage rendered for now
let json_result = JsonResult::new(&mut rendered_result.memory_usage, top);
json_result.save_as_file()?;
}
print!("{}", rendered_result.serialize(top));
println!("File successfully processed in {:?}", now.elapsed());
Ok(())
}
| rust | Apache-2.0 | 7f07e9f13af1583e7780937e5c2a2bb547a80808 | 2026-01-04T20:18:02.056003Z | false |
agourlay/hprof-slurp | https://github.com/agourlay/hprof-slurp/blob/7f07e9f13af1583e7780937e5c2a2bb547a80808/src/prefetch_reader.rs | src/prefetch_reader.rs | use crossbeam_channel::{Receiver, Sender};
use std::fs::File;
use std::io::{BufReader, Read};
use std::thread;
use std::thread::JoinHandle;
pub struct PrefetchReader {
reader: BufReader<File>,
file_len: usize,
processed_len: usize,
read_size: usize,
}
impl PrefetchReader {
pub const fn new(
reader: BufReader<File>,
file_len: usize,
processed_len: usize,
read_size: usize,
) -> Self {
Self {
reader,
file_len,
processed_len,
read_size,
}
}
pub fn start(
mut self,
send_data: Sender<Vec<u8>>,
receive_pooled_data: Receiver<Vec<u8>>,
) -> std::io::Result<JoinHandle<()>> {
thread::Builder::new()
.name("hprof-prefetch".to_string())
.spawn(move || {
while self.processed_len != self.file_len {
let remaining = self.file_len - self.processed_len;
let next_size = if remaining > self.read_size {
self.read_size
} else {
remaining
};
let mut pooled_buffer = receive_pooled_data
.recv()
.expect("channel should not be closed");
pooled_buffer.resize(next_size, 0);
self.reader
.read_exact(&mut pooled_buffer)
.unwrap_or_else(|e| {
panic!(
"Fail to read buffer for incomplete input:\n
error->{}\n
next->{}\n
processed->{}\n
file_len->{}\n
remaining->{}",
e,
next_size,
self.processed_len,
self.file_len,
self.file_len - self.processed_len
)
});
send_data
.send(pooled_buffer)
.expect("Channel should not be closed");
self.processed_len += next_size;
}
})
}
}
| rust | Apache-2.0 | 7f07e9f13af1583e7780937e5c2a2bb547a80808 | 2026-01-04T20:18:02.056003Z | false |
agourlay/hprof-slurp | https://github.com/agourlay/hprof-slurp/blob/7f07e9f13af1583e7780937e5c2a2bb547a80808/src/slurp.rs | src/slurp.rs | use std::fs::File;
use std::io::{BufReader, Read};
use indicatif::{ProgressBar, ProgressStyle};
use crossbeam_channel::{Receiver, Sender};
use crate::errors::HprofSlurpError;
use crate::errors::HprofSlurpError::{
InvalidHeaderSize, InvalidHprofFile, InvalidIdSize, StdThreadError, UnsupportedIdSize,
};
use crate::parser::file_header_parser::{FileHeader, parse_file_header};
use crate::parser::record::Record;
use crate::parser::record_stream_parser::HprofRecordStreamParser;
use crate::prefetch_reader::PrefetchReader;
use crate::rendered_result::RenderedResult;
use crate::result_recorder::ResultRecorder;
use crate::utils::pretty_bytes_size;
// the exact size of the file header (31 bytes)
const FILE_HEADER_LENGTH: usize = 31;
// 64 MB buffer performs nicely (higher is faster but increases the memory consumption)
pub const READ_BUFFER_SIZE: usize = 64 * 1024 * 1024;
pub fn slurp_file(
file_path: String,
debug_mode: bool,
list_strings: bool,
) -> Result<RenderedResult, HprofSlurpError> {
let file = File::open(file_path)?;
let file_len = file.metadata()?.len() as usize;
let mut reader = BufReader::new(file);
// Parse file header
let header = slurp_header(&mut reader)?;
let id_size = header.size_pointers;
println!(
"Processing {} binary hprof file in '{}' format.",
pretty_bytes_size(file_len as u64),
header.format
);
// Communication channel from pre-fetcher to parser
let (send_data, receive_data): (Sender<Vec<u8>>, Receiver<Vec<u8>>) =
crossbeam_channel::unbounded();
// Communication channel from parser to pre-fetcher (pooled input buffers)
let (send_pooled_data, receive_pooled_data): (Sender<Vec<u8>>, Receiver<Vec<u8>>) =
crossbeam_channel::unbounded();
// Init pooled binary data with more than 1 element to enable the reader to make progress interdependently
for _ in 0..2 {
send_pooled_data
.send(Vec::with_capacity(READ_BUFFER_SIZE))
.expect("pre-fetcher channel should be alive");
}
// Communication channel from parser to recorder
let (send_records, receive_records): (Sender<Vec<Record>>, Receiver<Vec<Record>>) =
crossbeam_channel::unbounded();
// Communication channel from recorder to parser (pooled record buffers)
let (send_pooled_vec, receive_pooled_vec): (Sender<Vec<Record>>, Receiver<Vec<Record>>) =
crossbeam_channel::unbounded();
// Communication channel from recorder to main
let (send_result, receive_result): (Sender<RenderedResult>, Receiver<RenderedResult>) =
crossbeam_channel::unbounded();
// Communication channel from parser to main
let (send_progress, receive_progress): (Sender<usize>, Receiver<usize>) =
crossbeam_channel::unbounded();
// Init pre-fetcher
let prefetcher = PrefetchReader::new(reader, file_len, FILE_HEADER_LENGTH, READ_BUFFER_SIZE);
let prefetch_thread = prefetcher.start(send_data, receive_pooled_data)?;
// Init pooled result vec
send_pooled_vec
.send(Vec::new())
.expect("recorder channel should be alive");
// Init stream parser
let initial_loop_buffer = Vec::with_capacity(READ_BUFFER_SIZE); // will be added to the data pool after the first chunk
let stream_parser = HprofRecordStreamParser::new(
debug_mode,
file_len,
FILE_HEADER_LENGTH,
initial_loop_buffer,
);
// Start stream parser
let parser_thread = stream_parser.start(
receive_data,
send_pooled_data,
send_progress,
receive_pooled_vec,
send_records,
)?;
// Init result recorder
let result_recorder = ResultRecorder::new(id_size, list_strings);
let recorder_thread = result_recorder.start(receive_records, send_result, send_pooled_vec)?;
// Init progress bar
let pb = ProgressBar::new(file_len as u64);
pb.set_style(ProgressStyle::default_bar()
.template("[{elapsed_precise}] [{bar:40.cyan/blue}] {bytes}/{total_bytes} (speed:{bytes_per_sec}) (eta:{eta})")
.expect("templating should never fail")
.progress_chars("#>-"));
// Feed progress bar
while let Ok(processed) = receive_progress.recv() {
pb.set_position(processed as u64);
}
// Finish and remove progress bar
pb.finish_and_clear();
// Wait for final result
let rendered_result = receive_result
.recv()
.expect("result channel should be alive");
// Blocks until pre-fetcher is done
prefetch_thread.join().map_err(|e| StdThreadError { e })?;
// Blocks until parser is done
parser_thread.join().map_err(|e| StdThreadError { e })?;
// Blocks until recorder is done
recorder_thread.join().map_err(|e| StdThreadError { e })?;
Ok(rendered_result)
}
pub fn slurp_header(reader: &mut BufReader<File>) -> Result<FileHeader, HprofSlurpError> {
let mut header_buffer = vec![0; FILE_HEADER_LENGTH];
reader.read_exact(&mut header_buffer)?;
let (rest, header) = parse_file_header(&header_buffer).map_err(|e| InvalidHprofFile {
message: format!("{e:?}"),
})?;
// Invariants
let id_size = header.size_pointers;
if id_size != 4 && id_size != 8 {
return Err(InvalidIdSize);
}
if id_size == 4 {
return Err(UnsupportedIdSize {
message: "32 bits heap dumps are not supported yet".to_string(),
});
}
if !rest.is_empty() {
return Err(InvalidHeaderSize);
}
Ok(header)
}
#[cfg(test)]
mod tests {
use super::*;
use std::fs;
const FILE_PATH_32: &str = "test-heap-dumps/hprof-32.bin";
const FILE_PATH_64: &str = "test-heap-dumps/hprof-64.bin";
const FILE_PATH_RESULT_64: &str = "test-heap-dumps/hprof-64-result.txt";
fn validate_gold_rendered_result(render_result: RenderedResult, gold_path: &str) {
let gold = fs::read_to_string(gold_path).expect("gold file not found!");
// top 20 hardcoded
let expected = render_result.serialize(20);
let mut expected_lines = expected.lines();
for (i1, l1) in gold.lines().enumerate() {
let l2 = expected_lines.next().unwrap();
if l1.trim_end() != l2.trim_end() {
println!("## GOLD line {} ##", i1 + 1);
println!("{}", l1.trim_end());
println!("## ACTUAL ##");
println!("{}", l2.trim_end());
println!("#####");
assert_eq!(l1, l2);
}
}
}
#[test]
fn unsupported_32_bits() {
let file_path = FILE_PATH_32.to_string();
let result = slurp_file(file_path, false, false);
assert!(result.is_err());
}
#[test]
fn supported_64_bits() {
let file_path = FILE_PATH_64.to_string();
let result = slurp_file(file_path, false, false);
assert!(result.is_ok());
validate_gold_rendered_result(result.unwrap(), FILE_PATH_RESULT_64);
}
#[test]
fn file_header_32_bits() {
let file_path = FILE_PATH_32.to_string();
let file = File::open(file_path).unwrap();
let mut reader = BufReader::new(file);
let result = slurp_header(&mut reader);
assert!(result.is_err());
}
#[test]
fn file_header_64_bits() {
let file_path = FILE_PATH_64.to_string();
let file = File::open(file_path).unwrap();
let mut reader = BufReader::new(file);
let file_header = slurp_header(&mut reader).unwrap();
assert_eq!(file_header.size_pointers, 8);
assert_eq!(file_header.format, "JAVA PROFILE 1.0.1".to_string());
}
}
| rust | Apache-2.0 | 7f07e9f13af1583e7780937e5c2a2bb547a80808 | 2026-01-04T20:18:02.056003Z | false |
agourlay/hprof-slurp | https://github.com/agourlay/hprof-slurp/blob/7f07e9f13af1583e7780937e5c2a2bb547a80808/src/parser/primitive_parsers.rs | src/parser/primitive_parsers.rs | use nom::Parser;
use nom::sequence::terminated;
use nom::{IResult, bytes, number};
pub fn parse_c_string(i: &[u8]) -> IResult<&[u8], &[u8]> {
terminated(
bytes::streaming::take_until("\0"),
bytes::streaming::tag("\0"),
)
.parse(i)
}
pub fn parse_i8(i: &[u8]) -> IResult<&[u8], i8> {
number::streaming::be_i8(i)
}
pub fn parse_i16(i: &[u8]) -> IResult<&[u8], i16> {
number::streaming::be_i16(i)
}
pub fn parse_i32(i: &[u8]) -> IResult<&[u8], i32> {
number::streaming::be_i32(i)
}
pub fn parse_i64(i: &[u8]) -> IResult<&[u8], i64> {
number::streaming::be_i64(i)
}
pub fn parse_u16(i: &[u8]) -> IResult<&[u8], u16> {
number::streaming::be_u16(i)
}
pub fn parse_u32(i: &[u8]) -> IResult<&[u8], u32> {
number::streaming::be_u32(i)
}
pub fn parse_u64(i: &[u8]) -> IResult<&[u8], u64> {
number::streaming::be_u64(i)
}
pub fn parse_f32(i: &[u8]) -> IResult<&[u8], f32> {
number::streaming::be_f32(i)
}
pub fn parse_f64(i: &[u8]) -> IResult<&[u8], f64> {
number::streaming::be_f64(i)
}
pub fn parse_u8(i: &[u8]) -> IResult<&[u8], u8> {
number::streaming::be_u8(i)
}
| rust | Apache-2.0 | 7f07e9f13af1583e7780937e5c2a2bb547a80808 | 2026-01-04T20:18:02.056003Z | false |
agourlay/hprof-slurp | https://github.com/agourlay/hprof-slurp/blob/7f07e9f13af1583e7780937e5c2a2bb547a80808/src/parser/record_parser.rs | src/parser/record_parser.rs | extern crate nom;
use crate::parser::gc_record::{
ArrayValue, ClassDumpFields, ConstFieldInfo, FieldInfo, FieldType, FieldValue, GcRecord,
};
use crate::parser::primitive_parsers::{
parse_f32, parse_f64, parse_i8, parse_i16, parse_i32, parse_i64, parse_u8, parse_u16,
parse_u32, parse_u64,
};
use crate::parser::record::{
AllocationSite, CpuSample, LoadClassData, Record, RecordHeader, StackFrameData, StackTraceData,
};
use crate::parser::record_parser::GcRecord::{
ClassDump, InstanceDump, ObjectArrayDump, PrimitiveArrayDump, RootJavaFrame, RootJniGlobal,
RootJniLocal, RootMonitorUsed, RootNativeStack, RootStickyClass, RootThreadBlock,
RootThreadObject, RootUnknown,
};
use crate::parser::record_parser::Record::{
AllocationSites, ControlSettings, CpuSamples, EndThread, GcSegment, HeapDumpEnd, HeapDumpStart,
HeapSummary, LoadClass, StackFrame, StackTrace, StartThread, UnloadClass, Utf8String,
};
use nom::Parser;
use nom::combinator::{flat_map, map};
use nom::error::{ErrorKind, ParseError};
use nom::multi::count;
use nom::sequence::preceded;
use nom::{IResult, bytes};
const TAG_STRING: u8 = 0x01;
const TAG_LOAD_CLASS: u8 = 0x02;
const TAG_UNLOAD_CLASS: u8 = 0x03;
const TAG_STACK_FRAME: u8 = 0x04;
const TAG_STACK_TRACE: u8 = 0x05;
const TAG_ALLOC_SITES: u8 = 0x06;
const TAG_HEAP_SUMMARY: u8 = 0x07;
const TAG_START_THREAD: u8 = 0x0A;
const TAG_END_THREAD: u8 = 0x0B;
const TAG_HEAP_DUMP: u8 = 0x0C;
const TAG_HEAP_DUMP_SEGMENT: u8 = 0x1C;
const TAG_HEAP_DUMP_END: u8 = 0x2C;
const TAG_CONTROL_SETTING: u8 = 0x0E;
const TAG_CPU_SAMPLES: u8 = 0x0D;
const TAG_GC_ROOT_UNKNOWN: u8 = 0xFF;
const TAG_GC_ROOT_JNI_GLOBAL: u8 = 0x01;
const TAG_GC_ROOT_JNI_LOCAL: u8 = 0x02;
const TAG_GC_ROOT_JAVA_FRAME: u8 = 0x03;
const TAG_GC_ROOT_NATIVE_STACK: u8 = 0x04;
const TAG_GC_ROOT_STICKY_CLASS: u8 = 0x05;
const TAG_GC_ROOT_THREAD_BLOCK: u8 = 0x06;
const TAG_GC_ROOT_MONITOR_USED: u8 = 0x07;
const TAG_GC_ROOT_THREAD_OBJ: u8 = 0x08;
const TAG_GC_CLASS_DUMP: u8 = 0x20;
const TAG_GC_INSTANCE_DUMP: u8 = 0x21;
const TAG_GC_OBJ_ARRAY_DUMP: u8 = 0x22;
const TAG_GC_PRIM_ARRAY_DUMP: u8 = 0x23;
// TODO currently defaults to 64 bits - use instead caller's `id_size` to change impl. of parse_id to support 32 bits dump.
const ID_SIZE: u32 = 8;
pub struct HprofRecordParser {
debug_mode: bool,
heap_dump_remaining_len: u32,
}
impl HprofRecordParser {
pub const fn new(debug_mode: bool) -> Self {
Self {
debug_mode,
heap_dump_remaining_len: 0,
}
}
// TODO use nom combinators (instead of Result's)
pub fn parse_hprof_record(&mut self) -> impl FnMut(&[u8]) -> IResult<&[u8], Record> + '_ {
|i| {
if self.heap_dump_remaining_len == 0 {
parse_u8(i).and_then(|(r1, tag)| {
if self.debug_mode {
println!("Found record tag:{} remaining bytes:{}", tag, i.len());
}
match tag {
TAG_STRING => parse_utf8_string(r1),
TAG_LOAD_CLASS => parse_load_class(r1),
TAG_UNLOAD_CLASS => parse_unload_class(r1),
TAG_STACK_FRAME => parse_stack_frame(r1),
TAG_STACK_TRACE => parse_stack_trace(r1),
TAG_ALLOC_SITES => parse_allocation_sites(r1),
TAG_HEAP_SUMMARY => parse_heap_summary(r1),
TAG_START_THREAD => parse_start_thread(r1),
TAG_END_THREAD => parse_end_thread(r1),
TAG_CONTROL_SETTING => parse_control_settings(r1),
TAG_CPU_SAMPLES => parse_cpu_samples(r1),
TAG_HEAP_DUMP_END => parse_heap_dump_end(r1),
TAG_HEAP_DUMP | TAG_HEAP_DUMP_SEGMENT => {
map(parse_header_record, |hr| {
// record expected GC segments length
self.heap_dump_remaining_len = hr.length;
HeapDumpStart { length: hr.length }
})
.parse(r1)
}
x => panic!("{}", format!("unhandled record tag {x}")),
}
})
} else {
// GC record mode
parse_gc_record(i).map(|(r1, gc_sub)| {
let gc_sub_len = i.len() - r1.len();
self.heap_dump_remaining_len -= gc_sub_len as u32;
(r1, GcSegment(gc_sub))
})
}
}
}
pub fn parse_streaming<'a>(
&mut self,
i: &'a [u8],
pooled_vec: &mut Vec<Record>,
) -> IResult<&'a [u8], ()> {
lazy_many1(self.parse_hprof_record(), pooled_vec)(i)
}
}
// TODO change to u32 depending on id_size in header
fn parse_id(i: &[u8]) -> IResult<&[u8], u64> {
parse_u64(i)
}
// copy of nom's many1 but
// - returns values accumulated so far on `nom::Err::Incomplete(_)` if any
// - take a `&mut vector` as input to enable pooling at the call site
pub fn lazy_many1<'a, I, O, E, F>(
mut f: F,
pooled_vec: &'a mut Vec<O>,
) -> impl FnMut(I) -> IResult<I, (), E> + 'a
where
I: Clone + PartialEq,
F: Parser<I, Output = O, Error = E> + 'a,
E: ParseError<I>,
{
move |mut i: I| match f.parse(i.clone()) {
Err(nom::Err::Error(err)) => Err(nom::Err::Error(E::append(i, ErrorKind::Many1, err))),
Err(e) => Err(e),
Ok((i1, o)) => {
pooled_vec.push(o);
i = i1;
loop {
match f.parse(i.clone()) {
Err(nom::Err::Error(_)) => return Ok((i, ())),
// magic line here!
// return Ok(acc) if we have seen at least one element, otherwise fail
Err(nom::Err::Incomplete(_)) => return Ok((i, ())),
Err(e) => return Err(e),
Ok((i1, o)) => {
if i1 == i {
return Err(nom::Err::Error(E::from_error_kind(i, ErrorKind::Many1)));
}
i = i1;
pooled_vec.push(o);
}
}
}
}
}
}
fn parse_gc_record(i: &[u8]) -> IResult<&[u8], GcRecord> {
flat_map(parse_u8, |tag| match tag {
TAG_GC_ROOT_UNKNOWN => parse_gc_root_unknown,
TAG_GC_ROOT_JNI_GLOBAL => parse_gc_root_jni_global,
TAG_GC_ROOT_JNI_LOCAL => parse_gc_root_jni_local,
TAG_GC_ROOT_JAVA_FRAME => parse_gc_root_java_frame,
TAG_GC_ROOT_NATIVE_STACK => parse_gc_root_native_stack,
TAG_GC_ROOT_STICKY_CLASS => parse_gc_root_sticky_class,
TAG_GC_ROOT_THREAD_BLOCK => parse_gc_root_thread_block,
TAG_GC_ROOT_MONITOR_USED => parse_gc_root_monitor_used,
TAG_GC_ROOT_THREAD_OBJ => parse_gc_root_thread_object,
TAG_GC_CLASS_DUMP => parse_gc_class_dump,
TAG_GC_INSTANCE_DUMP => parse_gc_instance_dump,
TAG_GC_OBJ_ARRAY_DUMP => parse_gc_object_array_dump,
TAG_GC_PRIM_ARRAY_DUMP => parse_gc_primitive_array_dump,
x => panic!("{}", format!("unhandled gc record tag {x}")),
})
.parse(i)
}
fn parse_gc_root_unknown(i: &[u8]) -> IResult<&[u8], GcRecord> {
map(parse_id, |object_id| RootUnknown { object_id }).parse(i)
}
fn parse_gc_root_thread_object(i: &[u8]) -> IResult<&[u8], GcRecord> {
map(
(parse_id, parse_u32, parse_u32),
|(thread_object_id, thread_sequence_number, stack_sequence_number)| RootThreadObject {
thread_object_id,
thread_sequence_number,
stack_sequence_number,
},
)
.parse(i)
}
fn parse_gc_root_jni_global(i: &[u8]) -> IResult<&[u8], GcRecord> {
map((parse_id, parse_id), |(object_id, jni_global_ref_id)| {
RootJniGlobal {
object_id,
jni_global_ref_id,
}
})
.parse(i)
}
fn parse_gc_root_jni_local(i: &[u8]) -> IResult<&[u8], GcRecord> {
map(
(parse_id, parse_u32, parse_u32),
|(object_id, thread_serial_number, frame_number_in_stack_trace)| RootJniLocal {
object_id,
thread_serial_number,
frame_number_in_stack_trace,
},
)
.parse(i)
}
fn parse_gc_root_java_frame(i: &[u8]) -> IResult<&[u8], GcRecord> {
map(
(parse_id, parse_u32, parse_u32),
|(object_id, thread_serial_number, frame_number_in_stack_trace)| RootJavaFrame {
object_id,
thread_serial_number,
frame_number_in_stack_trace,
},
)
.parse(i)
}
fn parse_gc_root_native_stack(i: &[u8]) -> IResult<&[u8], GcRecord> {
map(
(parse_id, parse_u32),
|(object_id, thread_serial_number)| RootNativeStack {
object_id,
thread_serial_number,
},
)
.parse(i)
}
fn parse_gc_root_sticky_class(i: &[u8]) -> IResult<&[u8], GcRecord> {
map(parse_id, |object_id| RootStickyClass { object_id }).parse(i)
}
fn parse_gc_root_thread_block(i: &[u8]) -> IResult<&[u8], GcRecord> {
map(
(parse_id, parse_u32),
|(object_id, thread_serial_number)| RootThreadBlock {
object_id,
thread_serial_number,
},
)
.parse(i)
}
fn parse_gc_root_monitor_used(i: &[u8]) -> IResult<&[u8], GcRecord> {
map(parse_id, |object_id| RootMonitorUsed { object_id }).parse(i)
}
fn parse_field_value(ty: FieldType) -> impl Fn(&[u8]) -> IResult<&[u8], FieldValue> {
move |i| match ty {
FieldType::Object => map(parse_id, FieldValue::Object).parse(i),
FieldType::Bool => map(parse_u8, |bu8| FieldValue::Bool(bu8 != 0)).parse(i),
FieldType::Char => map(parse_u16, FieldValue::Char).parse(i),
FieldType::Float => map(parse_f32, FieldValue::Float).parse(i),
FieldType::Double => map(parse_f64, FieldValue::Double).parse(i),
FieldType::Byte => map(parse_i8, FieldValue::Byte).parse(i),
FieldType::Short => map(parse_i16, FieldValue::Short).parse(i),
FieldType::Int => map(parse_i32, FieldValue::Int).parse(i),
FieldType::Long => map(parse_i64, FieldValue::Long).parse(i),
}
}
#[allow(dead_code)]
// could be used in the future to analyze content of largest arrays
fn parse_array_value(
element_type: FieldType,
number_of_elements: u32,
) -> impl Fn(&[u8]) -> IResult<&[u8], ArrayValue> {
move |i| match element_type {
FieldType::Object => panic!("object type in primitive array"),
FieldType::Bool => map(count(parse_u8, number_of_elements as usize), |res| {
ArrayValue::Bool(res.iter().map(|b| *b != 0).collect())
})
.parse(i),
FieldType::Char => map(count(parse_u16, number_of_elements as usize), |res| {
ArrayValue::Char(res)
})
.parse(i),
FieldType::Float => map(count(parse_f32, number_of_elements as usize), |res| {
ArrayValue::Float(res)
})
.parse(i),
FieldType::Double => map(count(parse_f64, number_of_elements as usize), |res| {
ArrayValue::Double(res)
})
.parse(i),
FieldType::Byte => map(count(parse_i8, number_of_elements as usize), |res| {
ArrayValue::Byte(res)
})
.parse(i),
FieldType::Short => map(count(parse_i16, number_of_elements as usize), |res| {
ArrayValue::Short(res)
})
.parse(i),
FieldType::Int => map(count(parse_i32, number_of_elements as usize), |res| {
ArrayValue::Int(res)
})
.parse(i),
FieldType::Long => map(count(parse_i64, number_of_elements as usize), |res| {
ArrayValue::Long(res)
})
.parse(i),
}
}
fn skip_array_value(
element_type: FieldType,
number_of_elements: u32,
) -> impl Fn(&[u8]) -> IResult<&[u8], &[u8]> {
move |i| match element_type {
FieldType::Object => panic!("object type in primitive array"),
FieldType::Bool => bytes::streaming::take(number_of_elements)(i),
FieldType::Char => bytes::streaming::take(number_of_elements * 2)(i),
FieldType::Float => bytes::streaming::take(number_of_elements * 4)(i),
FieldType::Double => bytes::streaming::take(number_of_elements * 8)(i),
FieldType::Byte => bytes::streaming::take(number_of_elements)(i),
FieldType::Short => bytes::streaming::take(number_of_elements * 2)(i),
FieldType::Int => bytes::streaming::take(number_of_elements * 4)(i),
FieldType::Long => bytes::streaming::take(number_of_elements * 8)(i),
}
}
fn parse_field_type(i: &[u8]) -> IResult<&[u8], FieldType> {
map(parse_i8, FieldType::from_value).parse(i)
}
fn parse_const_pool_item(i: &[u8]) -> IResult<&[u8], (ConstFieldInfo, FieldValue)> {
flat_map(
(parse_u16, parse_field_type),
|(const_pool_idx, const_type)| {
map(parse_field_value(const_type), move |fv| {
let const_field_info = ConstFieldInfo {
const_pool_idx,
const_type,
};
(const_field_info, fv)
})
},
)
.parse(i)
}
fn parse_static_field_item(i: &[u8]) -> IResult<&[u8], (FieldInfo, FieldValue)> {
flat_map((parse_id, parse_field_type), |(name_id, field_type)| {
map(parse_field_value(field_type), move |fv| {
let field_info = FieldInfo {
name_id,
field_type,
};
(field_info, fv)
})
})
.parse(i)
}
fn parse_instance_field_item(i: &[u8]) -> IResult<&[u8], FieldInfo> {
map((parse_id, parse_field_type), |(name_id, field_type)| {
FieldInfo {
name_id,
field_type,
}
})
.parse(i)
}
// TODO use nom combinators (instead of Result's)
fn parse_gc_class_dump(i: &[u8]) -> IResult<&[u8], GcRecord> {
let (
r1,
(
class_object_id,
stack_trace_serial_number,
super_class_object_id,
_class_loader_object_id,
_signers_object_id,
_protection_domain_object_id,
_reserved_1,
_reserved_2,
instance_size,
constant_pool_size,
),
) = (
parse_id, parse_u32, parse_id, parse_id, parse_id, parse_id, parse_id, parse_id, parse_u32,
parse_u16,
)
.parse(i)?;
count(parse_const_pool_item, constant_pool_size as usize)
.parse(r1)
.and_then(|(r2, const_fields)| {
parse_u16(r2).and_then(|(r3, static_fields_number)| {
count(parse_static_field_item, static_fields_number as usize)
.parse(r3)
.and_then(|(r4, static_fields)| {
parse_u16(r4).and_then(|(r5, instance_field_number)| {
count(parse_instance_field_item, instance_field_number as usize)
.parse(r5)
.map(|(r6, instance_fields)| {
let class_dump_fields = ClassDumpFields::new(
class_object_id,
stack_trace_serial_number,
super_class_object_id,
instance_size,
const_fields,
static_fields,
instance_fields,
);
let gcd = ClassDump(Box::new(class_dump_fields));
(r6, gcd)
})
})
})
})
})
}
fn parse_gc_instance_dump(i: &[u8]) -> IResult<&[u8], GcRecord> {
flat_map(
(parse_id, parse_u32, parse_id, parse_u32),
|(object_id, stack_trace_serial_number, class_object_id, data_size)| {
map(bytes::streaming::take(data_size), move |_bytes_segment| {
// Important: The actual content of the instance cannot be analyzed at this point because we miss the class information!
// Given that instances are found before the class info in the dump file, it would require two passes on the
// dump file with the additional storage of intermediary results on the disk to fully analyze the instances.
// hprof-slurp performs a single pass and makes no assumptions on the memory or storage available.
InstanceDump {
object_id,
stack_trace_serial_number,
class_object_id,
data_size,
}
})
},
)
.parse(i)
}
fn parse_gc_object_array_dump(i: &[u8]) -> IResult<&[u8], GcRecord> {
flat_map(
(parse_id, parse_u32, parse_u32, parse_id),
|(object_id, stack_trace_serial_number, number_of_elements, array_class_id)| {
map(
bytes::streaming::take(number_of_elements * ID_SIZE),
move |_byte_array_elements| {
// Do not parse the array of object references as it is not needed for any analyses so far.
// see `count(parse_id, number_of_elements as usize)(byte_array_elements)`
ObjectArrayDump {
object_id,
stack_trace_serial_number,
number_of_elements,
array_class_id,
}
},
)
},
)
.parse(i)
}
fn parse_gc_primitive_array_dump(i: &[u8]) -> IResult<&[u8], GcRecord> {
flat_map(
(parse_id, parse_u32, parse_u32, parse_field_type),
|(object_id, stack_trace_serial_number, number_of_elements, element_type)| {
// Do not parse the array of primitives as it is not needed for any analyses so far.
// see `parse_array_value(element_type, number_of_elements)`
map(
skip_array_value(element_type, number_of_elements),
move |_data_array_elements| PrimitiveArrayDump {
object_id,
stack_trace_serial_number,
number_of_elements,
element_type,
},
)
},
)
.parse(i)
}
fn parse_header_record(i: &[u8]) -> IResult<&[u8], RecordHeader> {
map((parse_u32, parse_u32), |(timestamp, length)| RecordHeader {
timestamp,
length,
})
.parse(i)
}
fn parse_utf8_string(i: &[u8]) -> IResult<&[u8], Record> {
flat_map(parse_header_record, |header_record| {
map(
(
parse_id,
bytes::streaming::take(header_record.length - ID_SIZE),
),
|(id, b)| {
let str = String::from_utf8_lossy(b).into();
Utf8String { id, str }
},
)
})
.parse(i)
}
fn parse_load_class(i: &[u8]) -> IResult<&[u8], Record> {
preceded(
parse_header_record,
map(
(parse_u32, parse_id, parse_u32, parse_id),
|(serial_number, class_object_id, stack_trace_serial_number, class_name_id)| {
LoadClass(LoadClassData {
serial_number,
class_object_id,
stack_trace_serial_number,
class_name_id,
})
},
),
)
.parse(i)
}
fn parse_unload_class(i: &[u8]) -> IResult<&[u8], Record> {
preceded(
parse_header_record,
map(parse_u32, |serial_number| UnloadClass { serial_number }),
)
.parse(i)
}
fn parse_stack_frame(i: &[u8]) -> IResult<&[u8], Record> {
preceded(
parse_header_record,
map(
(parse_id, parse_id, parse_id, parse_id, parse_u32, parse_i32),
|(
stack_frame_id,
method_name_id,
method_signature_id,
source_file_name_id,
class_serial_number,
line_number,
)| {
StackFrame(StackFrameData {
stack_frame_id,
method_name_id,
method_signature_id,
source_file_name_id,
class_serial_number,
line_number,
})
},
),
)
.parse(i)
}
fn parse_stack_trace(i: &[u8]) -> IResult<&[u8], Record> {
flat_map(parse_header_record, |header_record| {
// (header_record.length - (3 * parse_u32)) / id_size = (header_record.length - 12) / 8
let stack_frame_ids_len = (header_record.length - 12) / ID_SIZE;
map(
(
parse_u32,
parse_u32,
parse_u32,
count(parse_id, stack_frame_ids_len as usize),
),
|(serial_number, thread_serial_number, number_of_frames, stack_frame_ids)| {
StackTrace(StackTraceData {
serial_number,
thread_serial_number,
number_of_frames,
stack_frame_ids,
})
},
)
})
.parse(i)
}
fn parse_start_thread(i: &[u8]) -> IResult<&[u8], Record> {
preceded(
parse_header_record,
map(
(parse_u32, parse_id, parse_u32, parse_id, parse_id, parse_id),
|(
thread_serial_number,
thread_object_id,
stack_trace_serial_number,
thread_name_id,
thread_group_name_id,
thread_group_parent_name_id,
)| StartThread {
thread_serial_number,
thread_object_id,
stack_trace_serial_number,
thread_name_id,
thread_group_name_id,
thread_group_parent_name_id,
},
),
)
.parse(i)
}
fn parse_heap_summary(i: &[u8]) -> IResult<&[u8], Record> {
preceded(
parse_header_record,
map(
(parse_u32, parse_u32, parse_u64, parse_u64),
|(
total_live_bytes,
total_live_instances,
total_bytes_allocated,
total_instances_allocated,
)| HeapSummary {
total_live_bytes,
total_live_instances,
total_bytes_allocated,
total_instances_allocated,
},
),
)
.parse(i)
}
fn parse_end_thread(i: &[u8]) -> IResult<&[u8], Record> {
preceded(
parse_header_record,
map(parse_u32, |thread_serial_number| EndThread {
thread_serial_number,
}),
)
.parse(i)
}
fn parse_allocation_site(i: &[u8]) -> IResult<&[u8], AllocationSite> {
map(
(
parse_u8, parse_u32, parse_u32, parse_u32, parse_u32, parse_u32, parse_u32,
),
|(
is_array,
class_serial_number,
stack_trace_serial_number,
bytes_alive,
instances_alive,
bytes_allocated,
instances_allocated,
)| {
AllocationSite {
is_array,
class_serial_number,
stack_trace_serial_number,
bytes_alive,
instances_alive,
bytes_allocated,
instances_allocated,
}
},
)
.parse(i)
}
fn parse_allocation_sites(i: &[u8]) -> IResult<&[u8], Record> {
flat_map(
preceded(
parse_header_record,
(
parse_u16, parse_u32, parse_u32, parse_u32, parse_u64, parse_u64, parse_u32,
),
),
|(
flags,
cutoff_ratio,
total_live_bytes,
total_live_instances,
total_bytes_allocated,
total_instances_allocated,
number_of_sites,
)| {
map(
count(parse_allocation_site, number_of_sites as usize),
move |allocation_sites| AllocationSites {
flags,
cutoff_ratio,
total_live_bytes,
total_live_instances,
total_bytes_allocated,
total_instances_allocated,
number_of_sites,
allocation_sites: Box::new(allocation_sites),
},
)
},
)
.parse(i)
}
fn parse_heap_dump_end(i: &[u8]) -> IResult<&[u8], Record> {
map(parse_header_record, |rb| HeapDumpEnd { length: rb.length }).parse(i)
}
fn parse_control_settings(i: &[u8]) -> IResult<&[u8], Record> {
preceded(
parse_header_record,
map((parse_u32, parse_u16), |(flags, stack_trace_depth)| {
ControlSettings {
flags,
stack_trace_depth,
}
}),
)
.parse(i)
}
fn parse_cpu_sample(i: &[u8]) -> IResult<&[u8], CpuSample> {
map(
(parse_u32, parse_u32),
|(number_of_samples, stack_trace_serial_number)| CpuSample {
number_of_samples,
stack_trace_serial_number,
},
)
.parse(i)
}
fn parse_cpu_samples(i: &[u8]) -> IResult<&[u8], Record> {
flat_map(
preceded(parse_header_record, (parse_u32, parse_u32)),
|(total_number_of_samples, number_of_traces)| {
map(
count(parse_cpu_sample, total_number_of_samples as usize),
move |cpu_samples| CpuSamples {
total_number_of_samples,
number_of_traces,
cpu_samples,
},
)
},
)
.parse(i)
}
| rust | Apache-2.0 | 7f07e9f13af1583e7780937e5c2a2bb547a80808 | 2026-01-04T20:18:02.056003Z | false |
agourlay/hprof-slurp | https://github.com/agourlay/hprof-slurp/blob/7f07e9f13af1583e7780937e5c2a2bb547a80808/src/parser/record.rs | src/parser/record.rs | use crate::parser::gc_record::GcRecord;
#[derive(Debug, PartialEq, Eq)]
pub struct RecordHeader {
pub timestamp: u32,
pub length: u32,
}
#[derive(Debug)]
pub struct AllocationSite {
pub is_array: u8,
pub class_serial_number: u32,
pub stack_trace_serial_number: u32,
pub bytes_alive: u32,
pub instances_alive: u32,
pub bytes_allocated: u32,
pub instances_allocated: u32,
}
#[derive(Debug)]
pub struct CpuSample {
pub number_of_samples: u32,
pub stack_trace_serial_number: u32,
}
#[derive(Debug, Default)]
pub struct StackFrameData {
pub stack_frame_id: u64,
pub method_name_id: u64,
pub method_signature_id: u64,
pub source_file_name_id: u64,
pub class_serial_number: u32,
pub line_number: i32,
}
#[derive(Debug, Default)]
pub struct StackTraceData {
pub serial_number: u32,
pub thread_serial_number: u32,
pub number_of_frames: u32,
pub stack_frame_ids: Vec<u64>,
}
#[derive(Debug, Default)]
pub struct LoadClassData {
pub serial_number: u32,
pub class_object_id: u64,
pub stack_trace_serial_number: u32,
pub class_name_id: u64,
}
#[derive(Debug)]
#[allow(clippy::box_collection)]
pub enum Record {
Utf8String {
id: u64,
str: Box<str>,
},
LoadClass(LoadClassData),
UnloadClass {
serial_number: u32,
},
StackFrame(StackFrameData),
StackTrace(StackTraceData),
AllocationSites {
flags: u16,
cutoff_ratio: u32,
total_live_bytes: u32,
total_live_instances: u32,
total_bytes_allocated: u64,
total_instances_allocated: u64,
number_of_sites: u32,
allocation_sites: Box<Vec<AllocationSite>>,
},
StartThread {
thread_serial_number: u32,
thread_object_id: u64,
stack_trace_serial_number: u32,
thread_name_id: u64,
thread_group_name_id: u64,
thread_group_parent_name_id: u64,
},
EndThread {
thread_serial_number: u32,
},
HeapSummary {
total_live_bytes: u32,
total_live_instances: u32,
total_bytes_allocated: u64,
total_instances_allocated: u64,
},
HeapDumpStart {
length: u32,
},
HeapDumpEnd {
length: u32,
},
ControlSettings {
flags: u32,
stack_trace_depth: u16,
},
CpuSamples {
total_number_of_samples: u32,
number_of_traces: u32,
cpu_samples: Vec<CpuSample>,
},
GcSegment(GcRecord),
}
| rust | Apache-2.0 | 7f07e9f13af1583e7780937e5c2a2bb547a80808 | 2026-01-04T20:18:02.056003Z | false |
agourlay/hprof-slurp | https://github.com/agourlay/hprof-slurp/blob/7f07e9f13af1583e7780937e5c2a2bb547a80808/src/parser/mod.rs | src/parser/mod.rs | pub mod file_header_parser;
#[allow(dead_code)]
pub mod gc_record;
mod primitive_parsers;
#[allow(dead_code)]
pub mod record;
pub mod record_parser;
pub mod record_stream_parser;
| rust | Apache-2.0 | 7f07e9f13af1583e7780937e5c2a2bb547a80808 | 2026-01-04T20:18:02.056003Z | false |
agourlay/hprof-slurp | https://github.com/agourlay/hprof-slurp/blob/7f07e9f13af1583e7780937e5c2a2bb547a80808/src/parser/file_header_parser.rs | src/parser/file_header_parser.rs | use crate::parser::primitive_parsers::{parse_c_string, parse_u32, parse_u64};
use nom::IResult;
use nom::Parser;
use nom::combinator::map;
#[derive(Debug, PartialEq, Eq)]
pub struct FileHeader {
pub format: String,
pub size_pointers: u32,
pub timestamp: u64,
}
impl FileHeader {
fn from_bytes(format_b: &[u8], size_pointers: u32, timestamp: u64) -> Self {
Self {
format: String::from_utf8_lossy(format_b).to_string(),
size_pointers,
timestamp,
}
}
}
pub fn parse_file_header(i: &[u8]) -> IResult<&[u8], FileHeader> {
map(
(parse_c_string, parse_u32, parse_u64),
|(format, size_pointers, timestamp)| {
FileHeader::from_bytes(format, size_pointers, timestamp)
},
)
.parse(i)
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn parse_well_formed_header() {
let binary: [u8; 31] = [
74, 65, 86, 65, 32, 80, 82, 79, 70, 73, 76, 69, 32, 49, 46, 48, 46, 50, 0, 0, 0, 0, 8,
0, 0, 1, 118, 111, 186, 173, 167,
];
let expected = FileHeader {
format: "JAVA PROFILE 1.0.2".to_string(),
size_pointers: 8,
timestamp: 1_608_192_273_831,
};
let (rest, header) = parse_file_header(&binary).unwrap();
assert_eq!(header, expected);
assert!(rest.is_empty());
}
#[test]
fn parse_header_too_short() {
let binary: [u8; 30] = [
74, 65, 86, 65, 32, 80, 82, 79, 70, 73, 76, 69, 32, 49, 46, 48, 46, 50, 0, 0, 0, 0, 8,
0, 0, 1, 118, 111, 186, 173,
];
assert!(matches!(
parse_file_header(&binary),
Err(nom::Err::Incomplete(_))
));
}
}
| rust | Apache-2.0 | 7f07e9f13af1583e7780937e5c2a2bb547a80808 | 2026-01-04T20:18:02.056003Z | false |
agourlay/hprof-slurp | https://github.com/agourlay/hprof-slurp/blob/7f07e9f13af1583e7780937e5c2a2bb547a80808/src/parser/gc_record.rs | src/parser/gc_record.rs | #[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)]
pub enum FieldType {
Object = 2,
Bool = 4,
Char = 5,
Float = 6,
Double = 7,
Byte = 8,
Short = 9,
Int = 10,
Long = 11,
}
impl FieldType {
pub fn from_value(v: i8) -> Self {
match v {
2 => Self::Object,
4 => Self::Bool,
5 => Self::Char,
6 => Self::Float,
7 => Self::Double,
8 => Self::Byte,
9 => Self::Short,
10 => Self::Int,
11 => Self::Long,
x => panic!("{}", format!("FieldType {x} not found")),
}
}
}
#[derive(Debug)]
pub struct ConstFieldInfo {
pub const_pool_idx: u16,
pub const_type: FieldType,
}
#[derive(Debug)]
pub struct FieldInfo {
pub name_id: u64,
pub field_type: FieldType,
}
#[derive(Debug)]
pub enum FieldValue {
Bool(bool),
Byte(i8),
Char(u16),
Short(i16),
Int(i32),
Long(i64),
Float(f32),
Double(f64),
Object(u64),
}
#[derive(Debug)]
pub enum ArrayValue {
Bool(Vec<bool>),
Byte(Vec<i8>),
Char(Vec<u16>),
Short(Vec<i16>),
Int(Vec<i32>),
Long(Vec<i64>),
Float(Vec<f32>),
Double(Vec<f64>),
//Object(Vec<u64>),
}
#[derive(Debug)]
pub enum GcRecord {
RootUnknown {
object_id: u64,
},
RootThreadObject {
thread_object_id: u64,
thread_sequence_number: u32,
stack_sequence_number: u32,
},
RootJniGlobal {
object_id: u64,
jni_global_ref_id: u64,
},
RootJniLocal {
object_id: u64,
thread_serial_number: u32,
frame_number_in_stack_trace: u32,
},
RootJavaFrame {
object_id: u64,
thread_serial_number: u32,
frame_number_in_stack_trace: u32,
},
RootNativeStack {
object_id: u64,
thread_serial_number: u32,
},
RootStickyClass {
object_id: u64,
},
RootThreadBlock {
object_id: u64,
thread_serial_number: u32,
},
RootMonitorUsed {
object_id: u64,
},
InstanceDump {
object_id: u64,
stack_trace_serial_number: u32,
class_object_id: u64,
data_size: u32,
},
ObjectArrayDump {
object_id: u64,
stack_trace_serial_number: u32,
number_of_elements: u32,
array_class_id: u64,
},
PrimitiveArrayDump {
object_id: u64,
stack_trace_serial_number: u32,
number_of_elements: u32,
element_type: FieldType,
},
ClassDump(Box<ClassDumpFields>), // rare enough to be boxed to avoid large variant cost
}
#[derive(Debug)]
pub struct ClassDumpFields {
pub class_object_id: u64,
pub stack_trace_serial_number: u32,
pub super_class_object_id: u64,
pub instance_size: u32,
pub const_fields: Vec<(ConstFieldInfo, FieldValue)>,
pub static_fields: Vec<(FieldInfo, FieldValue)>,
pub instance_fields: Vec<FieldInfo>,
}
impl ClassDumpFields {
pub const fn new(
class_object_id: u64,
stack_trace_serial_number: u32,
super_class_object_id: u64,
instance_size: u32,
const_fields: Vec<(ConstFieldInfo, FieldValue)>,
static_fields: Vec<(FieldInfo, FieldValue)>,
instance_fields: Vec<FieldInfo>,
) -> Self {
Self {
class_object_id,
stack_trace_serial_number,
super_class_object_id,
instance_size,
const_fields,
static_fields,
instance_fields,
}
}
}
| rust | Apache-2.0 | 7f07e9f13af1583e7780937e5c2a2bb547a80808 | 2026-01-04T20:18:02.056003Z | false |
agourlay/hprof-slurp | https://github.com/agourlay/hprof-slurp/blob/7f07e9f13af1583e7780937e5c2a2bb547a80808/src/parser/record_stream_parser.rs | src/parser/record_stream_parser.rs | use crate::parser::record::Record;
use crate::parser::record_parser::HprofRecordParser;
use nom::Err;
use nom::Needed::Size;
use nom::Needed::Unknown;
use crate::slurp::READ_BUFFER_SIZE;
use crossbeam_channel::{Receiver, Sender};
use std::thread;
use std::thread::JoinHandle;
pub struct HprofRecordStreamParser {
parser: HprofRecordParser,
debug_mode: bool,
file_len: usize,
processed_len: usize,
loop_buffer: Vec<u8>,
pooled_vec: Vec<Record>,
needed: usize,
}
impl HprofRecordStreamParser {
pub const fn new(
debug_mode: bool,
file_len: usize,
processed_len: usize,
initial_loop_buffer: Vec<u8>,
) -> Self {
let parser = HprofRecordParser::new(debug_mode);
Self {
parser,
debug_mode,
file_len,
processed_len,
loop_buffer: initial_loop_buffer,
pooled_vec: Vec::new(),
needed: 0,
}
}
pub fn start(
mut self,
receive_data: Receiver<Vec<u8>>,
send_pooled_data: Sender<Vec<u8>>,
send_progress: Sender<usize>,
receive_pooled_vec: Receiver<Vec<Record>>,
send_records: Sender<Vec<Record>>,
) -> std::io::Result<JoinHandle<()>> {
thread::Builder::new()
.name("hprof-parser".to_string())
.spawn(move || {
loop {
match receive_data.recv() {
Err(_) => break,
Ok(mut pooled_buffer) => {
// Move input buffer into working buffer
self.loop_buffer.append(&mut pooled_buffer);
// Send back empty pooled_buffer with storage
send_pooled_data.send(pooled_buffer).unwrap_or_default();
if self.needed > self.loop_buffer.len() {
// need more data for the ongoing object
continue;
}
let iteration_res = self
.parser
.parse_streaming(&self.loop_buffer, &mut self.pooled_vec);
match iteration_res {
Ok((rest, ())) => {
let rest_len = rest.len();
let iteration_processed = self.loop_buffer.len() - rest_len;
self.processed_len += iteration_processed;
self.loop_buffer.drain(0..iteration_processed);
assert!(
self.processed_len <= self.file_len,
"Can't process more than the file length (processed:{} vs file:{})",
self.processed_len,
self.file_len
);
send_progress
.send(self.processed_len)
.expect("channel should not be closed");
let mut next_pooled_vec = receive_pooled_vec
.recv()
.expect("channel should not be closed");
// next_pooled_vec contains the records result after the swap
std::mem::swap(&mut next_pooled_vec, &mut self.pooled_vec);
send_records
.send(next_pooled_vec)
.expect("channel should not be closed");
if self.needed > 0 {
// Multi-buffer object successfully parsed
// Do not hold on too much working memory
self.loop_buffer.shrink_to(READ_BUFFER_SIZE * 2);
// Reset extra data needed flag
self.needed = 0;
}
}
Err(Err::Incomplete(Size(n))) => {
if self.debug_mode {
println!("Incomplete: {} bytes required to finish parsing object & current buffer len {}", n.get(), self.loop_buffer.len());
}
// capture needed data (missing + existing)
self.needed = n.get() + self.loop_buffer.len();
}
Err(Err::Incomplete(Unknown)) => {
panic!("Unexpected Incomplete with unknown size")
}
Err(Err::Failure(e)) => {
panic!("parsing failed with {e:?}")
}
Err(Err::Error(e)) => {
panic!("parsing failed with {e:?}")
}
}
}
}
}
})
}
}
| rust | Apache-2.0 | 7f07e9f13af1583e7780937e5c2a2bb547a80808 | 2026-01-04T20:18:02.056003Z | false |
getreu/tp-note | https://github.com/getreu/tp-note/blob/4a373fcf860c8d7a8c3da02f4ab23441f91738ae/tpnote-html2md/src/headers.rs | tpnote-html2md/src/headers.rs | use crate::markup5ever_rcdom;
use super::StructuredPrinter;
use super::TagHandler;
use markup5ever_rcdom::{Handle, NodeData};
#[derive(Default)]
pub struct HeaderHandler {
header_type: String,
}
impl TagHandler for HeaderHandler {
fn handle(&mut self, tag: &Handle, printer: &mut StructuredPrinter) {
self.header_type = match tag.data {
NodeData::Element { ref name, .. } => name.local.to_string(),
_ => String::new(),
};
printer.insert_newline();
printer.insert_newline();
match self.header_type.as_ref() {
"h1" => printer.append_str("# "),
"h2" => printer.append_str("## "),
"h3" => printer.append_str("### "),
"h4" => printer.append_str("#### "),
"h5" => printer.append_str("##### "),
"h6" => printer.append_str("###### "),
_ => {}
}
}
fn after_handle(&mut self, printer: &mut StructuredPrinter) {
printer.insert_newline();
}
}
| rust | Apache-2.0 | 4a373fcf860c8d7a8c3da02f4ab23441f91738ae | 2026-01-04T20:18:01.333543Z | false |
getreu/tp-note | https://github.com/getreu/tp-note/blob/4a373fcf860c8d7a8c3da02f4ab23441f91738ae/tpnote-html2md/src/lib.rs | tpnote-html2md/src/lib.rs | use std::boxed::Box;
use std::collections::HashMap;
use std::sync::LazyLock;
use std::ffi::{CStr, CString};
use std::os::raw::c_char;
use regex::Regex;
use html5ever::driver::ParseOpts;
use html5ever::parse_document;
use html5ever::tendril::TendrilSink;
pub use markup5ever_rcdom::{Handle, NodeData, RcDom};
pub mod anchors;
pub mod codes;
pub mod common;
pub mod containers;
pub mod dummy;
pub mod headers;
pub mod iframes;
pub mod images;
pub mod lists;
pub mod markup5ever_rcdom;
pub mod paragraphs;
pub mod quotes;
pub mod styles;
pub mod tables;
use crate::anchors::AnchorHandler;
use crate::codes::CodeHandler;
use crate::containers::ContainerHandler;
use crate::dummy::DummyHandler;
use crate::dummy::HtmlCherryPickHandler;
use crate::dummy::IdentityHandler;
use crate::headers::HeaderHandler;
use crate::iframes::IframeHandler;
use crate::images::ImgHandler;
use crate::lists::ListHandler;
use crate::lists::ListItemHandler;
use crate::paragraphs::ParagraphHandler;
use crate::quotes::QuoteHandler;
use crate::styles::StyleHandler;
use crate::tables::TableHandler;
static EXCESSIVE_WHITESPACE_PATTERN: LazyLock<Regex> =
LazyLock::new(|| Regex::new("\\s{2,}").unwrap()); // for HTML on-the-fly cleanup
static EMPTY_LINE_PATTERN: LazyLock<Regex> = LazyLock::new(|| Regex::new("(?m)^ +$").unwrap()); // for Markdown post-processing
static EXCESSIVE_NEWLINE_PATTERN: LazyLock<Regex> =
LazyLock::new(|| Regex::new("\\n{3,}").unwrap()); // for Markdown post-processing
static TRAILING_SPACE_PATTERN: LazyLock<Regex> =
LazyLock::new(|| Regex::new("(?m)(\\S) $").unwrap()); // for Markdown post-processing
static LEADING_NEWLINES_PATTERN: LazyLock<Regex> = LazyLock::new(|| Regex::new("^\\n+").unwrap()); // for Markdown post-processing
static LAST_WHITESPACE_PATTERN: LazyLock<Regex> = LazyLock::new(|| Regex::new("\\s+$").unwrap()); // for Markdown post-processing
static START_OF_LINE_PATTERN: LazyLock<Regex> = LazyLock::new(|| Regex::new("(^|\\n) *$").unwrap()); // for Markdown escaping
static MARKDOWN_STARTONLY_KEYCHARS: LazyLock<Regex> =
LazyLock::new(|| Regex::new(r"^(\s*)([=>+\-#])").unwrap()); // for Markdown escaping
static MARKDOWN_MIDDLE_KEYCHARS: LazyLock<Regex> =
LazyLock::new(|| Regex::new(r"[<>*\\_~]").unwrap()); // for Markdown escaping
/// Custom variant of main function. Allows to pass custom tag<->tag factory pairs
/// in order to register custom tag handler for tags you want.
///
/// You can also override standard tag handlers this way
/// # Arguments
/// `html` is source HTML as `String`
/// `custom` is custom tag handler producers for tags you want, can be empty
pub fn parse_html_custom(
html: &str,
custom: &HashMap<String, Box<dyn TagHandlerFactory>>,
) -> String {
let dom = parse_document(RcDom::default(), ParseOpts::default())
.from_utf8()
.read_from(&mut html.as_bytes())
.unwrap();
let mut result = StructuredPrinter::default();
walk(&dom.document, &mut result, custom);
clean_markdown(&result.data)
}
/// Main function of this library. Parses incoming HTML, converts it into Markdown
/// and returns converted string.
/// # Arguments
/// `html` is source HTML as `String`
pub fn parse_html(html: &str) -> String {
parse_html_custom(html, &HashMap::default())
}
/// Same as `parse_html` but retains all "span" html elements intact
/// Markdown parsers usually strip them down when rendering but they
/// may be useful for later processing
pub fn parse_html_extended(html: &str) -> String {
struct SpanAsIsTagFactory;
impl TagHandlerFactory for SpanAsIsTagFactory {
fn instantiate(&self) -> Box<dyn TagHandler> {
Box::new(HtmlCherryPickHandler::default())
}
}
let mut tag_factory: HashMap<String, Box<dyn TagHandlerFactory>> = HashMap::new();
tag_factory.insert(String::from("span"), Box::new(SpanAsIsTagFactory {}));
parse_html_custom(html, &tag_factory)
}
/// Recursively walk through all DOM tree and handle all elements according to
/// HTML tag -> Markdown syntax mapping. Text content is trimmed to one whitespace according to HTML5 rules.
///
/// # Arguments
/// `input` is DOM tree or its subtree
/// `result` is output holder with position and context tracking
/// `custom` is custom tag hadler producers for tags you want, can be empty
fn walk(
input: &Handle,
result: &mut StructuredPrinter,
custom: &HashMap<String, Box<dyn TagHandlerFactory>>,
) {
let mut handler: Box<dyn TagHandler> = Box::new(DummyHandler);
let mut tag_name = String::default();
match input.data {
NodeData::Document | NodeData::Doctype { .. } | NodeData::ProcessingInstruction { .. } => {}
NodeData::Text { ref contents } => {
let mut text = contents.borrow().to_string();
let inside_pre = result.parent_chain.iter().any(|tag| tag == "pre");
if inside_pre {
// this is preformatted text, insert as it is
result.append_str(&text);
} else if !(text.trim().is_empty()
&& (result.data.ends_with('\n') || result.data.ends_with(' ')))
{
// in case it's not just a whitespace after the newline or another whitespace
// regular text, collapse whitespace and newlines in text
let inside_code = result.parent_chain.iter().any(|tag| tag == "code");
if !inside_code {
text = escape_markdown(result, &text);
}
let minified_text = EXCESSIVE_WHITESPACE_PATTERN.replace_all(&text, " ");
let minified_text = minified_text.trim_matches(|ch: char| ch == '\n' || ch == '\r');
result.append_str(minified_text);
}
}
NodeData::Comment { .. } => {} // ignore comments
NodeData::Element { ref name, .. } => {
tag_name = name.local.to_string();
let inside_pre = result.parent_chain.iter().any(|tag| tag == "pre");
if inside_pre {
// don't add any html tags inside the pre section
handler = Box::new(DummyHandler);
} else if custom.contains_key(&tag_name) {
// have user-supplied factory, instantiate a handler for this tag
let factory = custom.get(&tag_name).unwrap();
handler = factory.instantiate();
} else {
// no user-supplied factory, take one of built-in ones
handler = match tag_name.as_ref() {
// containers
"div" | "section" | "header" | "footer" => Box::new(ContainerHandler),
// pagination, breaks
"p" | "br" | "hr" => Box::new(ParagraphHandler::default()),
"q" | "cite" | "blockquote" => Box::new(QuoteHandler::default()),
// spoiler tag
"details" | "summary" => Box::new(HtmlCherryPickHandler::default()),
// formatting
"b" | "i" | "s" | "strong" | "em" | "del" => Box::new(StyleHandler::default()),
"h1" | "h2" | "h3" | "h4" | "h5" | "h6" => Box::new(HeaderHandler::default()),
"pre" | "code" => Box::new(CodeHandler::default()),
// images, links
"img" => Box::new(ImgHandler::default()),
"a" => Box::new(AnchorHandler::default()),
// lists
"ol" | "ul" | "menu" => Box::new(ListHandler),
"li" => Box::new(ListItemHandler::default()),
// as-is
"sub" | "sup" => Box::new(IdentityHandler),
// tables, handled fully internally as markdown can't have nested content in tables
// supports only single tables as of now
"table" => Box::new(TableHandler),
"iframe" => Box::new(IframeHandler),
// other
"html" | "head" | "body" => Box::new(DummyHandler),
_ => Box::new(DummyHandler),
};
}
}
}
// handle this tag, while it's not in parent chain
// and doesn't have child siblings
handler.handle(input, result);
// save this tag name as parent for child nodes
result.parent_chain.push(tag_name.to_string()); // e.g. it was ["body"] and now it's ["body", "p"]
let current_depth = result.parent_chain.len(); // e.g. it was 1 and now it's 2
// create space for siblings of next level
result.siblings.insert(current_depth, vec![]);
for child in input.children.borrow().iter() {
if handler.skip_descendants() {
continue;
}
walk(child, result, custom);
if let NodeData::Element { ref name, .. } = child.data {
result
.siblings
.get_mut(¤t_depth)
.unwrap()
.push(name.local.to_string())
};
}
// clear siblings of next level
result.siblings.remove(¤t_depth);
// release parent tag
result.parent_chain.pop();
// finish handling of tag - parent chain now doesn't contain this tag itself again
handler.after_handle(result);
}
/// This conversion should only be applied to text tags
///
/// Escapes text inside HTML tags so it won't be recognized as Markdown control sequence
/// like list start or bold text style
fn escape_markdown(result: &StructuredPrinter, text: &str) -> String {
// always escape bold/italic/strikethrough
let mut data = MARKDOWN_MIDDLE_KEYCHARS
.replace_all(text, "\\$0")
.to_string();
// if we're at the start of the line we need to escape list- and quote-starting sequences
if START_OF_LINE_PATTERN.is_match(&result.data) {
data = MARKDOWN_STARTONLY_KEYCHARS
.replace(&data, "$1\\$2")
.to_string();
}
// no handling of more complicated cases such as
// ![] or []() ones, for now this will suffice
data
}
/// Called after all processing has been finished
///
/// Clears excessive punctuation that would be trimmed by renderer anyway
fn clean_markdown(text: &str) -> String {
// remove redundant newlines
let intermediate = EMPTY_LINE_PATTERN.replace_all(text, ""); // empty line with trailing spaces, replace with just newline
let intermediate = EXCESSIVE_NEWLINE_PATTERN.replace_all(&intermediate, "\n\n"); // > 3 newlines - not handled by markdown anyway
let intermediate = TRAILING_SPACE_PATTERN.replace_all(&intermediate, "$1"); // trim space if it's just one
let intermediate = LEADING_NEWLINES_PATTERN.replace_all(&intermediate, ""); // trim leading newlines
let intermediate = LAST_WHITESPACE_PATTERN.replace_all(&intermediate, ""); // trim last newlines
intermediate.into_owned()
}
/// Intermediate result of HTML -> Markdown conversion.
///
/// Holds context in the form of parent tags and siblings chain
/// and resulting string of markup content with the current position.
#[derive(Debug, Default)]
pub struct StructuredPrinter {
/// Chain of parents leading to upmost <html> tag
pub parent_chain: Vec<String>,
/// Siblings of currently processed tag in order where they're appearing in html
pub siblings: HashMap<usize, Vec<String>>,
/// resulting markdown document
pub data: String,
}
impl StructuredPrinter {
/// Inserts newline
pub fn insert_newline(&mut self) {
self.append_str("\n");
}
/// Append string to the end of the printer
pub fn append_str(&mut self, it: &str) {
self.data.push_str(it);
}
/// Insert string at specified position of printer, adjust position to the end of inserted string
pub fn insert_str(&mut self, pos: usize, it: &str) {
self.data.insert_str(pos, it);
}
}
/// Tag handler factory. This class is required in providing proper
/// custom tag parsing capabilities to users of this library.
///
/// The problem with directly providing tag handlers is that they're not stateless.
/// Once tag handler is parsing some tag, it holds data, such as start position, indent etc.
/// The only way to create fresh tag handler for each tag is to provide a factory like this one.
///
pub trait TagHandlerFactory {
fn instantiate(&self) -> Box<dyn TagHandler>;
}
/// Trait interface describing abstract handler of arbitrary HTML tag.
pub trait TagHandler {
/// Handle tag encountered when walking HTML tree.
/// This is executed before the children processing
fn handle(&mut self, tag: &Handle, printer: &mut StructuredPrinter);
/// Executed after all children of this tag have been processed
fn after_handle(&mut self, printer: &mut StructuredPrinter);
fn skip_descendants(&self) -> bool {
false
}
}
/// FFI variant for HTML -> Markdown conversion for calling from other languages
#[unsafe(no_mangle)]
#[allow(clippy::not_unsafe_ptr_arg_deref)]
pub extern "C" fn parse(html: *const c_char) -> *const c_char {
let in_html = unsafe { CStr::from_ptr(html) };
let out_md = parse_html(&in_html.to_string_lossy());
CString::new(out_md).unwrap().into_raw()
}
/// Expose the JNI interface for android below
#[cfg(target_os = "android")]
#[allow(non_snake_case)]
pub mod android {
extern crate jni;
use super::parse_html;
use super::parse_html_extended;
use self::jni::JNIEnv;
use self::jni::objects::{JClass, JString};
use self::jni::sys::jstring;
#[no_mangle]
pub unsafe extern "C" fn Java_com_kanedias_html2md_Html2Markdown_parse(
env: JNIEnv,
_clazz: JClass,
html: JString,
) -> jstring {
let html_java: String = env
.get_string(html)
.expect("Couldn't get java string!")
.into();
let markdown = parse_html(&html_java);
let output = env
.new_string(markdown)
.expect("Couldn't create java string!");
output.into_inner()
}
#[no_mangle]
pub unsafe extern "C" fn Java_com_kanedias_html2md_Html2Markdown_parseExtended(
env: JNIEnv,
_clazz: JClass,
html: JString,
) -> jstring {
let html_java: String = env
.get_string(html)
.expect("Couldn't get java string!")
.into();
let markdown = parse_html_extended(&html_java);
let output = env
.new_string(markdown)
.expect("Couldn't create java string!");
output.into_inner()
}
}
| rust | Apache-2.0 | 4a373fcf860c8d7a8c3da02f4ab23441f91738ae | 2026-01-04T20:18:01.333543Z | false |
getreu/tp-note | https://github.com/getreu/tp-note/blob/4a373fcf860c8d7a8c3da02f4ab23441f91738ae/tpnote-html2md/src/anchors.rs | tpnote-html2md/src/anchors.rs | use crate::common::get_tag_attr;
use crate::dummy::IdentityHandler;
use crate::markup5ever_rcdom;
use percent_encoding::percent_decode_str;
use super::StructuredPrinter;
use super::TagHandler;
use markup5ever_rcdom::{Handle, NodeData};
#[derive(Default)]
pub struct AnchorHandler {
start_pos: usize,
url: String,
emit_unchanged: bool,
}
impl TagHandler for AnchorHandler {
fn handle(&mut self, tag: &Handle, printer: &mut StructuredPrinter) {
// Check for a `name` attribute. If it exists, we can't support this
// in markdown, so we must emit this tag unchanged.
if get_tag_attr(tag, "name").is_some() {
let mut identity = IdentityHandler;
identity.handle(tag, printer);
self.emit_unchanged = true;
}
self.start_pos = printer.data.len();
// try to extract a hyperlink
self.url = match tag.data {
NodeData::Element { ref attrs, .. } => {
let attrs = attrs.borrow();
let href = attrs
.iter()
.find(|attr| attr.name.local.to_string() == "href");
match href {
Some(link) => {
let link = &*link.value;
let link = percent_decode_str(link).decode_utf8().unwrap_or_default();
if link.contains(|c: char| c.is_ascii_whitespace()) {
format!("<{}>", link)
} else {
link.to_string()
}
}
None => String::new(),
}
}
_ => String::new(),
};
}
fn after_handle(&mut self, printer: &mut StructuredPrinter) {
if !self.emit_unchanged {
// add braces around already present text, put an url afterwards
printer.insert_str(self.start_pos, "[");
printer.append_str(&format!("]({})", self.url))
}
}
}
| rust | Apache-2.0 | 4a373fcf860c8d7a8c3da02f4ab23441f91738ae | 2026-01-04T20:18:01.333543Z | false |
getreu/tp-note | https://github.com/getreu/tp-note/blob/4a373fcf860c8d7a8c3da02f4ab23441f91738ae/tpnote-html2md/src/styles.rs | tpnote-html2md/src/styles.rs | use crate::markup5ever_rcdom;
use super::StructuredPrinter;
use super::TagHandler;
use markup5ever_rcdom::{Handle, NodeData};
#[derive(Default)]
pub struct StyleHandler {
start_pos: usize,
style_type: String,
}
/// Applies givem `mark` at both start and end indices, updates printer position to the end of text
fn apply_at_bounds(printer: &mut StructuredPrinter, start: usize, end: usize, mark: &str) {
printer.data.insert_str(end, mark);
printer.data.insert_str(start, mark);
}
impl TagHandler for StyleHandler {
fn handle(&mut self, tag: &Handle, printer: &mut StructuredPrinter) {
self.start_pos = printer.data.len();
self.style_type = match tag.data {
NodeData::Element { ref name, .. } => name.local.to_string(),
_ => String::new(),
};
}
fn after_handle(&mut self, printer: &mut StructuredPrinter) {
let non_space_offset = printer.data[self.start_pos..].find(|ch: char| !ch.is_whitespace());
if non_space_offset.is_none() {
// only spaces or no text at all
return;
}
let first_non_space_pos = self.start_pos + non_space_offset.unwrap();
let last_non_space_pos = printer
.data
.trim_end_matches(|ch: char| ch.is_whitespace())
.len();
// finishing markup
match self.style_type.as_ref() {
"b" | "strong" => {
apply_at_bounds(printer, first_non_space_pos, last_non_space_pos, "**")
}
"i" | "em" => apply_at_bounds(printer, first_non_space_pos, last_non_space_pos, "*"),
"s" | "del" => apply_at_bounds(printer, first_non_space_pos, last_non_space_pos, "~~"),
"u" | "ins" => apply_at_bounds(printer, first_non_space_pos, last_non_space_pos, "__"),
_ => {}
}
}
}
| rust | Apache-2.0 | 4a373fcf860c8d7a8c3da02f4ab23441f91738ae | 2026-01-04T20:18:01.333543Z | false |
getreu/tp-note | https://github.com/getreu/tp-note/blob/4a373fcf860c8d7a8c3da02f4ab23441f91738ae/tpnote-html2md/src/markup5ever_rcdom.rs | tpnote-html2md/src/markup5ever_rcdom.rs | // This file is part of commit 8415d50 and a copy of:
// [html5ever/rcdom/lib.rs at main · servo/html5ever · GitHub](https://github.com/servo/html5ever/blob/main/rcdom/lib.rs)
//
// See also:
// [`RcDom` issues in `html2html` example · Issue #555 · servo/html5ever](https://github.com/servo/html5ever/issues/555)
//
//
// Copyright 2014-2017 The html5ever Project Developers. See the
// COPYRIGHT file at the top-level directory of this distribution.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! A simple reference-counted DOM.
//!
//! This is sufficient as a static parse tree, but don't build a
//! web browser using it. :)
//!
//! A DOM is a [tree structure] with ordered children that can be represented in an XML-like
//! format. For example, the following graph
//!
//! ```text
//! div
//! +- "text node"
//! +- span
//! ```
//! in HTML would be serialized as
//!
//! ```html
//! <div>text node<span></span></div>
//! ```
//!
//! See the [document object model article on wikipedia][dom wiki] for more information.
//!
//! This implementation stores the information associated with each node once, and then hands out
//! refs to children. The nodes themselves are reference-counted to avoid copying - you can create
//! a new ref and then a node will outlive the document. Nodes own their children, but only have
//! weak references to their parents.
//!
//! [tree structure]: https://en.wikipedia.org/wiki/Tree_(data_structure)
//! [dom wiki]: https://en.wikipedia.org/wiki/Document_Object_Model
extern crate markup5ever;
extern crate tendril;
use std::borrow::Cow;
use std::cell::{Cell, RefCell};
use std::collections::{HashSet, VecDeque};
use std::default::Default;
use std::fmt;
use std::io;
use std::mem;
use std::rc::{Rc, Weak};
use tendril::StrTendril;
use markup5ever::interface::tree_builder;
use markup5ever::interface::tree_builder::{ElementFlags, NodeOrText, QuirksMode, TreeSink};
use markup5ever::serialize::TraversalScope;
use markup5ever::serialize::TraversalScope::{ChildrenOnly, IncludeNode};
use markup5ever::serialize::{Serialize, Serializer};
use markup5ever::Attribute;
use markup5ever::ExpandedName;
use markup5ever::QualName;
/// The different kinds of nodes in the DOM.
#[derive(Debug)]
pub enum NodeData {
/// The `Document` itself - the root node of a HTML document.
Document,
/// A `DOCTYPE` with name, public id, and system id. See
/// [document type declaration on wikipedia][dtd wiki].
///
/// [dtd wiki]: https://en.wikipedia.org/wiki/Document_type_declaration
Doctype {
name: StrTendril,
public_id: StrTendril,
system_id: StrTendril,
},
/// A text node.
Text { contents: RefCell<StrTendril> },
/// A comment.
Comment { contents: StrTendril },
/// An element with attributes.
Element {
name: QualName,
attrs: RefCell<Vec<Attribute>>,
/// For HTML \<template\> elements, the [template contents].
///
/// [template contents]: https://html.spec.whatwg.org/multipage/#template-contents
template_contents: RefCell<Option<Handle>>,
/// Whether the node is a [HTML integration point].
///
/// [HTML integration point]: https://html.spec.whatwg.org/multipage/#html-integration-point
mathml_annotation_xml_integration_point: bool,
},
/// A Processing instruction.
ProcessingInstruction {
target: StrTendril,
contents: StrTendril,
},
}
/// A DOM node.
pub struct Node {
/// Parent node.
pub parent: Cell<Option<WeakHandle>>,
/// Child nodes of this node.
pub children: RefCell<Vec<Handle>>,
/// Represents this node's data.
pub data: NodeData,
}
impl Node {
/// Create a new node from its contents
pub fn new(data: NodeData) -> Rc<Self> {
Rc::new(Node {
data,
parent: Cell::new(None),
children: RefCell::new(Vec::new()),
})
}
}
impl Drop for Node {
fn drop(&mut self) {
let mut nodes = mem::take(&mut *self.children.borrow_mut());
while let Some(node) = nodes.pop() {
let children = mem::take(&mut *node.children.borrow_mut());
nodes.extend(children.into_iter());
if let NodeData::Element {
ref template_contents,
..
} = node.data
{
if let Some(template_contents) = template_contents.borrow_mut().take() {
nodes.push(template_contents);
}
}
}
}
}
impl fmt::Debug for Node {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
fmt.debug_struct("Node")
.field("data", &self.data)
.field("children", &self.children)
.finish()
}
}
/// Reference to a DOM node.
pub type Handle = Rc<Node>;
/// Weak reference to a DOM node, used for parent pointers.
pub type WeakHandle = Weak<Node>;
/// Append a parentless node to another nodes' children
fn append(new_parent: &Handle, child: Handle) {
let previous_parent = child.parent.replace(Some(Rc::downgrade(new_parent)));
// Invariant: child cannot have existing parent
assert!(previous_parent.is_none());
new_parent.children.borrow_mut().push(child);
}
/// If the node has a parent, get it and this node's position in its children
fn get_parent_and_index(target: &Handle) -> Option<(Handle, usize)> {
match target.parent.take() { Some(weak) => {
let parent = weak.upgrade().expect("dangling weak pointer");
target.parent.set(Some(weak));
let i = match parent
.children
.borrow()
.iter()
.enumerate()
.find(|&(_, child)| Rc::ptr_eq(child, target))
{
Some((i, _)) => i,
None => panic!("have parent but couldn't find in parent's children!"),
};
Some((parent, i))
} _ => {
None
}}
}
fn append_to_existing_text(prev: &Handle, text: &str) -> bool {
match prev.data {
NodeData::Text { ref contents } => {
contents.borrow_mut().push_slice(text);
true
}
_ => false,
}
}
fn remove_from_parent(target: &Handle) {
if let Some((parent, i)) = get_parent_and_index(target) {
parent.children.borrow_mut().remove(i);
target.parent.set(None);
}
}
/// The DOM itself; the result of parsing.
pub struct RcDom {
/// The `Document` itself.
pub document: Handle,
/// Errors that occurred during parsing.
pub errors: RefCell<Vec<Cow<'static, str>>>,
/// The document's quirks mode.
pub quirks_mode: Cell<QuirksMode>,
}
impl TreeSink for RcDom {
type Output = Self;
fn finish(self) -> Self {
self
}
type Handle = Handle;
type ElemName<'a>
= ExpandedName<'a>
where
Self: 'a;
fn parse_error(&self, msg: Cow<'static, str>) {
self.errors.borrow_mut().push(msg);
}
fn get_document(&self) -> Handle {
self.document.clone()
}
fn get_template_contents(&self, target: &Handle) -> Handle {
if let NodeData::Element {
ref template_contents,
..
} = target.data
{
template_contents
.borrow()
.as_ref()
.expect("not a template element!")
.clone()
} else {
panic!("not a template element!")
}
}
fn set_quirks_mode(&self, mode: QuirksMode) {
self.quirks_mode.set(mode);
}
fn same_node(&self, x: &Handle, y: &Handle) -> bool {
Rc::ptr_eq(x, y)
}
fn elem_name<'a>(&self, target: &'a Handle) -> ExpandedName<'a> {
match target.data {
NodeData::Element { ref name, .. } => name.expanded(),
_ => panic!("not an element!"),
}
}
fn create_element(&self, name: QualName, attrs: Vec<Attribute>, flags: ElementFlags) -> Handle {
Node::new(NodeData::Element {
name,
attrs: RefCell::new(attrs),
template_contents: RefCell::new(if flags.template {
Some(Node::new(NodeData::Document))
} else {
None
}),
mathml_annotation_xml_integration_point: flags.mathml_annotation_xml_integration_point,
})
}
fn create_comment(&self, text: StrTendril) -> Handle {
Node::new(NodeData::Comment { contents: text })
}
fn create_pi(&self, target: StrTendril, data: StrTendril) -> Handle {
Node::new(NodeData::ProcessingInstruction {
target,
contents: data,
})
}
fn append(&self, parent: &Handle, child: NodeOrText<Handle>) {
// Append to an existing Text node if we have one.
if let NodeOrText::AppendText(text) = &child {
if let Some(h) = parent.children.borrow().last() {
if append_to_existing_text(h, text) {
return;
}
}
}
append(
parent,
match child {
NodeOrText::AppendText(text) => Node::new(NodeData::Text {
contents: RefCell::new(text),
}),
NodeOrText::AppendNode(node) => node,
},
);
}
fn append_before_sibling(&self, sibling: &Handle, child: NodeOrText<Handle>) {
let (parent, i) = get_parent_and_index(sibling)
.expect("append_before_sibling called on node without parent");
let child = match (child, i) {
// No previous node.
(NodeOrText::AppendText(text), 0) => Node::new(NodeData::Text {
contents: RefCell::new(text),
}),
// Look for a text node before the insertion point.
(NodeOrText::AppendText(text), i) => {
let children = parent.children.borrow();
let prev = &children[i - 1];
if append_to_existing_text(prev, &text) {
return;
}
Node::new(NodeData::Text {
contents: RefCell::new(text),
})
}
// The tree builder promises we won't have a text node after
// the insertion point.
// Any other kind of node.
(NodeOrText::AppendNode(node), _) => node,
};
remove_from_parent(&child);
child.parent.set(Some(Rc::downgrade(&parent)));
parent.children.borrow_mut().insert(i, child);
}
fn append_based_on_parent_node(
&self,
element: &Self::Handle,
prev_element: &Self::Handle,
child: NodeOrText<Self::Handle>,
) {
let parent = element.parent.take();
let has_parent = parent.is_some();
element.parent.set(parent);
if has_parent {
self.append_before_sibling(element, child);
} else {
self.append(prev_element, child);
}
}
fn append_doctype_to_document(
&self,
name: StrTendril,
public_id: StrTendril,
system_id: StrTendril,
) {
append(
&self.document,
Node::new(NodeData::Doctype {
name,
public_id,
system_id,
}),
);
}
fn add_attrs_if_missing(&self, target: &Handle, attrs: Vec<Attribute>) {
let mut existing = if let NodeData::Element { ref attrs, .. } = target.data {
attrs.borrow_mut()
} else {
panic!("not an element")
};
let existing_names = existing
.iter()
.map(|e| e.name.clone())
.collect::<HashSet<_>>();
existing.extend(
attrs
.into_iter()
.filter(|attr| !existing_names.contains(&attr.name)),
);
}
fn remove_from_parent(&self, target: &Handle) {
remove_from_parent(target);
}
fn reparent_children(&self, node: &Handle, new_parent: &Handle) {
let mut children = node.children.borrow_mut();
let mut new_children = new_parent.children.borrow_mut();
for child in children.iter() {
let previous_parent = child.parent.replace(Some(Rc::downgrade(new_parent)));
assert!(Rc::ptr_eq(
node,
&previous_parent.unwrap().upgrade().expect("dangling weak")
))
}
new_children.extend(mem::take(&mut *children));
}
fn is_mathml_annotation_xml_integration_point(&self, target: &Handle) -> bool {
if let NodeData::Element {
mathml_annotation_xml_integration_point,
..
} = target.data
{
mathml_annotation_xml_integration_point
} else {
panic!("not an element!")
}
}
}
impl Default for RcDom {
fn default() -> RcDom {
RcDom {
document: Node::new(NodeData::Document),
errors: Default::default(),
quirks_mode: Cell::new(tree_builder::NoQuirks),
}
}
}
enum SerializeOp {
Open(Handle),
Close(QualName),
}
pub struct SerializableHandle(Handle);
impl From<Handle> for SerializableHandle {
fn from(h: Handle) -> SerializableHandle {
SerializableHandle(h)
}
}
impl Serialize for SerializableHandle {
fn serialize<S>(&self, serializer: &mut S, traversal_scope: TraversalScope) -> io::Result<()>
where
S: Serializer,
{
let mut ops = VecDeque::new();
match traversal_scope {
IncludeNode => ops.push_back(SerializeOp::Open(self.0.clone())),
ChildrenOnly(_) => ops.extend(
self.0
.children
.borrow()
.iter()
.map(|h| SerializeOp::Open(h.clone())),
),
}
while let Some(op) = ops.pop_front() {
match op {
SerializeOp::Open(handle) => match handle.data {
NodeData::Element {
ref name,
ref attrs,
..
} => {
serializer.start_elem(
name.clone(),
attrs.borrow().iter().map(|at| (&at.name, &at.value[..])),
)?;
ops.reserve(1 + handle.children.borrow().len());
ops.push_front(SerializeOp::Close(name.clone()));
for child in handle.children.borrow().iter().rev() {
ops.push_front(SerializeOp::Open(child.clone()));
}
}
NodeData::Doctype { ref name, .. } => serializer.write_doctype(name)?,
NodeData::Text { ref contents } => serializer.write_text(&contents.borrow())?,
NodeData::Comment { ref contents } => serializer.write_comment(contents)?,
NodeData::ProcessingInstruction {
ref target,
ref contents,
} => serializer.write_processing_instruction(target, contents)?,
NodeData::Document => panic!("Can't serialize Document node itself"),
},
SerializeOp::Close(name) => {
serializer.end_elem(name)?;
}
}
}
Ok(())
}
}
| rust | Apache-2.0 | 4a373fcf860c8d7a8c3da02f4ab23441f91738ae | 2026-01-04T20:18:01.333543Z | false |
getreu/tp-note | https://github.com/getreu/tp-note/blob/4a373fcf860c8d7a8c3da02f4ab23441f91738ae/tpnote-html2md/src/quotes.rs | tpnote-html2md/src/quotes.rs | use crate::markup5ever_rcdom;
use super::StructuredPrinter;
use super::TagHandler;
use markup5ever_rcdom::Handle;
#[derive(Default)]
pub struct QuoteHandler {
start_pos: usize,
}
impl TagHandler for QuoteHandler {
fn handle(&mut self, _tag: &Handle, printer: &mut StructuredPrinter) {
self.start_pos = printer.data.len();
printer.insert_newline();
}
fn after_handle(&mut self, printer: &mut StructuredPrinter) {
// replace all newlines with newline + >
let quote = "> ";
let mut index = printer.data.len();
while index > self.start_pos {
if printer.data.as_bytes().get(index) == Some(&b'\n') {
printer.insert_str(index + 1, quote);
}
index -= 1;
}
printer.insert_str(self.start_pos + 1, quote);
printer.insert_newline();
printer.insert_newline();
}
}
| rust | Apache-2.0 | 4a373fcf860c8d7a8c3da02f4ab23441f91738ae | 2026-01-04T20:18:01.333543Z | false |
getreu/tp-note | https://github.com/getreu/tp-note/blob/4a373fcf860c8d7a8c3da02f4ab23441f91738ae/tpnote-html2md/src/iframes.rs | tpnote-html2md/src/iframes.rs | use std::sync::LazyLock;
use super::StructuredPrinter;
use super::TagHandler;
use crate::common::get_tag_attr;
use crate::dummy::IdentityHandler;
use crate::markup5ever_rcdom;
use markup5ever_rcdom::Handle;
use regex::Regex;
/// Pattern that detects iframes with Youtube embedded videos<br/>
/// Examples:
/// * `https://www.youtube.com/embed/zE-dmXZp3nU?wmode=opaque`
/// * `https://www.youtube-nocookie.com/embed/5yo6exIypkY`
/// * `https://www.youtube.com/embed/TXm6IXrbQuM`
static YOUTUBE_PATTERN: LazyLock<Regex> =
LazyLock::new(|| Regex::new(r"www\.youtube(?:-nocookie)?\.com/embed/([-\w]+)").unwrap());
/// Pattern that detects iframes with Instagram embedded photos<br/>
/// Examples:
/// * `https://www.instagram.com/p/B1BKr9Wo8YX/embed/`
/// * `https://www.instagram.com/p/BpKjlo-B4uI/embed/`
static INSTAGRAM_PATTERN: LazyLock<Regex> =
LazyLock::new(|| Regex::new(r"www\.instagram\.com/p/([-\w]+)/embed").unwrap());
/// Patter that detects iframes with VKontakte embedded videos<br/>
/// Examples:
/// * `https://vk.com/video_ext.php?oid=-49423435&id=456245092&hash=e1611aefe899c4f8`
/// * `https://vk.com/video_ext.php?oid=-76477496&id=456239454&hash=ebfdc2d386617b97`
static VK_PATTERN: LazyLock<Regex> = LazyLock::new(|| {
Regex::new(r"vk\.com/video_ext\.php\?oid=(-?\d+)&id=(\d+)&hash=(.*)").unwrap()
});
#[allow(dead_code)]
static YANDEX_MUSIC_TRACK_PATTERN: LazyLock<Regex> =
LazyLock::new(|| Regex::new(r"https://music.yandex.ru/iframe/#track/(\d+)/(\d+)").unwrap());
#[allow(dead_code)]
static YANDEX_MUSIC_ALBUM_PATTERN: LazyLock<Regex> =
LazyLock::new(|| Regex::new(r"https://music.yandex.ru/iframe/#album/(\d+)").unwrap());
#[derive(Default)]
pub struct IframeHandler;
impl TagHandler for IframeHandler {
fn handle(&mut self, tag: &Handle, printer: &mut StructuredPrinter) {
printer.insert_newline();
printer.insert_newline();
let src = get_tag_attr(tag, "src");
//let width = get_tag_attr(tag, "width");
//let height = get_tag_attr(tag, "height");
if src.is_none() {
return;
}
let src = src.unwrap();
if let Some(capture) = YOUTUBE_PATTERN.captures(&src) {
let media_id = capture.get(1).map_or("", |m| m.as_str());
printer.append_str(&format!("[](https://www.youtube.com/watch?v={mid})", mid = media_id));
return;
}
if let Some(capture) = INSTAGRAM_PATTERN.captures(&src) {
let media_id = capture.get(1).map_or("", |m| m.as_str());
printer.append_str(&format!("[](https://www.instagram.com/p/{mid}/embed/)", mid = media_id));
return;
}
if let Some(capture) = VK_PATTERN.captures(&src) {
let owner_id = capture.get(1).map_or("", |m| m.as_str());
let video_id = capture.get(2).map_or("", |m| m.as_str());
let _hash = capture.get(3).map_or("", |m| m.as_str());
printer.append_str(&format!("[](https://vk.com/video{oid}_{vid})", oid = owner_id, vid = video_id));
return;
}
// not found, use generic implementation
let mut identity = IdentityHandler;
identity.handle(tag, printer);
}
fn after_handle(&mut self, printer: &mut StructuredPrinter) {
printer.insert_newline();
printer.insert_newline();
}
fn skip_descendants(&self) -> bool {
true
}
}
| rust | Apache-2.0 | 4a373fcf860c8d7a8c3da02f4ab23441f91738ae | 2026-01-04T20:18:01.333543Z | false |
getreu/tp-note | https://github.com/getreu/tp-note/blob/4a373fcf860c8d7a8c3da02f4ab23441f91738ae/tpnote-html2md/src/lists.rs | tpnote-html2md/src/lists.rs | use crate::markup5ever_rcdom;
use super::StructuredPrinter;
use super::TagHandler;
use markup5ever_rcdom::Handle;
/// gets all list elements registered by a `StructuredPrinter` in reverse order
fn list_hierarchy(printer: &mut StructuredPrinter) -> Vec<&String> {
printer
.parent_chain
.iter()
.rev()
.filter(|&tag| tag == "ul" || tag == "ol" || tag == "menu")
.collect()
}
#[derive(Default)]
pub struct ListHandler;
impl TagHandler for ListHandler {
/// we're entering "ul" or "ol" tag, no "li" handling here
fn handle(&mut self, _tag: &Handle, printer: &mut StructuredPrinter) {
printer.insert_newline();
// insert an extra newline for non-nested lists
if list_hierarchy(printer).is_empty() {
printer.insert_newline();
}
}
/// indent now-ready list
fn after_handle(&mut self, printer: &mut StructuredPrinter) {
printer.insert_newline();
printer.insert_newline();
}
}
#[derive(Default)]
pub struct ListItemHandler {
start_pos: usize,
list_type: String,
}
impl TagHandler for ListItemHandler {
fn handle(&mut self, _tag: &Handle, printer: &mut StructuredPrinter) {
{
let parent_lists = list_hierarchy(printer);
let nearest_parent_list = parent_lists.first();
if nearest_parent_list.is_none() {
// no parent list
// should not happen - html5ever cleans html input when parsing
return;
}
self.list_type = nearest_parent_list.unwrap().to_string();
}
if !printer.data.ends_with('\n') {
// insert newline when declaring a list item only in case there isn't any newline at the end of text
printer.insert_newline();
}
let current_depth = printer.parent_chain.len();
let order = printer.siblings[¤t_depth].len() + 1;
match self.list_type.as_ref() {
"ul" | "menu" => printer.append_str("* "), // unordered list: *, *, *
"ol" => printer.append_str(&(order.to_string() + ". ")), // ordered list: 1, 2, 3
_ => {} // never happens
}
self.start_pos = printer.data.len();
}
fn after_handle(&mut self, printer: &mut StructuredPrinter) {
let padding = match self.list_type.as_ref() {
"ul" => 2,
"ol" => 3,
_ => 4,
};
// need to clean up leading newlines, <p> inside <li> should produce
// valid list element, not an empty line
let index = self.start_pos;
while index < printer.data.len() {
if printer.data.as_bytes().get(index) == Some(&b'\n')
|| printer.data.as_bytes().get(index) == Some(&b' ')
{
printer.data.remove(index);
} else {
break;
}
}
// non-nested indentation (padding). Markdown requires that all
// paragraphs in the list item except first should be indented with at
// least 1 space
let mut index = printer.data.len();
while index > self.start_pos {
if printer.data.as_bytes().get(index) == Some(&b'\n') {
printer.insert_str(index + 1, &" ".repeat(padding));
}
index -= 1;
}
}
}
| rust | Apache-2.0 | 4a373fcf860c8d7a8c3da02f4ab23441f91738ae | 2026-01-04T20:18:01.333543Z | false |
getreu/tp-note | https://github.com/getreu/tp-note/blob/4a373fcf860c8d7a8c3da02f4ab23441f91738ae/tpnote-html2md/src/codes.rs | tpnote-html2md/src/codes.rs | use crate::markup5ever_rcdom;
use super::StructuredPrinter;
use super::TagHandler;
use markup5ever_rcdom::{Handle, NodeData};
#[derive(Default)]
pub struct CodeHandler {
code_type: String,
}
impl CodeHandler {
/// Used in both starting and finishing handling
fn do_handle(&mut self, printer: &mut StructuredPrinter, start: bool) {
let immediate_parent = printer.parent_chain.last().unwrap().to_owned();
if self.code_type == "code" && immediate_parent == "pre" {
// We are already in "code" mode.
return;
}
match self.code_type.as_ref() {
"pre" => {
// Code blocks should have its own paragraph
if start {
printer.insert_newline();
}
printer.append_str("\n```\n");
if !start {
printer.insert_newline();
}
}
"code" | "samp" => printer.append_str("`"),
_ => {}
}
}
}
impl TagHandler for CodeHandler {
fn handle(&mut self, tag: &Handle, printer: &mut StructuredPrinter) {
self.code_type = match tag.data {
NodeData::Element { ref name, .. } => name.local.to_string(),
_ => String::new(),
};
self.do_handle(printer, true);
}
fn after_handle(&mut self, printer: &mut StructuredPrinter) {
self.do_handle(printer, false);
}
}
| rust | Apache-2.0 | 4a373fcf860c8d7a8c3da02f4ab23441f91738ae | 2026-01-04T20:18:01.333543Z | false |
getreu/tp-note | https://github.com/getreu/tp-note/blob/4a373fcf860c8d7a8c3da02f4ab23441f91738ae/tpnote-html2md/src/dummy.rs | tpnote-html2md/src/dummy.rs | use crate::markup5ever_rcdom;
use super::StructuredPrinter;
use super::TagHandler;
use html5ever::serialize;
use html5ever::serialize::{SerializeOpts, TraversalScope};
use markup5ever_rcdom::{Handle, NodeData, SerializableHandle};
#[derive(Default)]
pub struct DummyHandler;
impl TagHandler for DummyHandler {
fn handle(&mut self, _tag: &Handle, _printer: &mut StructuredPrinter) {}
fn after_handle(&mut self, _printer: &mut StructuredPrinter) {}
}
/// Handler that completely copies tag to printer as HTML with all descendants
#[derive(Default)]
pub struct IdentityHandler;
impl TagHandler for IdentityHandler {
fn handle(&mut self, tag: &Handle, printer: &mut StructuredPrinter) {
let mut buffer = vec![];
let options = SerializeOpts {
traversal_scope: TraversalScope::IncludeNode,
..Default::default()
};
let to_be_serialized = SerializableHandle::from(tag.clone());
let result = serialize(&mut buffer, &to_be_serialized, options);
if result.is_err() {
// couldn't serialize the tag
return;
}
let conv = String::from_utf8(buffer);
if conv.is_err() {
// Is non UTF-8 string possible in html5ever?
return;
}
printer.append_str(&conv.unwrap());
}
fn skip_descendants(&self) -> bool {
true
}
fn after_handle(&mut self, _printer: &mut StructuredPrinter) {}
}
/// Handler that copies just one tag and doesn't skip descendants
#[derive(Default)]
pub struct HtmlCherryPickHandler {
tag_name: String,
}
impl TagHandler for HtmlCherryPickHandler {
fn handle(&mut self, tag: &Handle, printer: &mut StructuredPrinter) {
if let NodeData::Element {
ref name,
ref attrs,
..
} = tag.data
{
let attrs = attrs.borrow();
self.tag_name = name.local.to_string();
printer.append_str(&format!("<{}", self.tag_name));
for attr in attrs.iter() {
printer.append_str(&format!(" {}=\"{}\"", attr.name.local, attr.value));
}
printer.append_str(">");
}
}
fn skip_descendants(&self) -> bool {
false
}
fn after_handle(&mut self, printer: &mut StructuredPrinter) {
printer.append_str(&format!("</{}>", self.tag_name));
}
}
| rust | Apache-2.0 | 4a373fcf860c8d7a8c3da02f4ab23441f91738ae | 2026-01-04T20:18:01.333543Z | false |
getreu/tp-note | https://github.com/getreu/tp-note/blob/4a373fcf860c8d7a8c3da02f4ab23441f91738ae/tpnote-html2md/src/common.rs | tpnote-html2md/src/common.rs | use crate::markup5ever_rcdom;
use markup5ever_rcdom::{Handle, NodeData};
pub fn get_tag_attr(tag: &Handle, attr_name: &str) -> Option<String> {
match tag.data {
NodeData::Element { ref attrs, .. } => {
let attrs = attrs.borrow();
let requested_attr = attrs
.iter()
.find(|attr| attr.name.local.to_string() == attr_name);
requested_attr.map(|attr| attr.value.to_string())
}
_ => None,
}
}
| rust | Apache-2.0 | 4a373fcf860c8d7a8c3da02f4ab23441f91738ae | 2026-01-04T20:18:01.333543Z | false |
getreu/tp-note | https://github.com/getreu/tp-note/blob/4a373fcf860c8d7a8c3da02f4ab23441f91738ae/tpnote-html2md/src/tables.rs | tpnote-html2md/src/tables.rs | use crate::markup5ever_rcdom;
use super::StructuredPrinter;
use super::TagHandler;
use super::{clean_markdown, walk};
use std::{cmp, collections::HashMap};
use markup5ever_rcdom::{Handle, NodeData};
#[derive(Default)]
pub struct TableHandler;
impl TagHandler for TableHandler {
fn handle(&mut self, tag: &Handle, printer: &mut StructuredPrinter) {
let mut table_markup = String::new();
let any_matcher = |cell: &Handle| {
let name = tag_name(cell);
name == "td" || name == "th"
};
// detect cell width, counts
let column_count: usize;
let mut column_widths: Vec<usize>;
let rows = find_children(tag, "tr");
{
// detect row count
let most_big_row = rows.iter().max_by(|left, right| {
collect_children(left, any_matcher)
.len()
.cmp(&collect_children(right, any_matcher).len())
});
if most_big_row.is_none() {
// we don't have rows with content at all
return;
}
// have rows with content, set column count
column_count = collect_children(most_big_row.unwrap(), any_matcher).len();
column_widths = vec![3; column_count];
// detect max column width
for row in &rows {
let cells = collect_children(row, any_matcher);
#[allow(clippy::needless_range_loop)]
for index in 0..column_count {
// from regular rows
if let Some(cell) = cells.get(index) {
let text = to_text(cell);
column_widths[index] = cmp::max(column_widths[index], text.chars().count());
}
}
}
}
// header row must always be present
for (idx, row) in rows.iter().enumerate() {
table_markup.push('|');
let cells = collect_children(row, any_matcher);
#[allow(clippy::needless_range_loop)]
for index in 0..column_count {
// we need to fill all cells in a column, even if some rows don't have enough
let padded_cell_text = pad_cell_text(&cells.get(index), column_widths[index]);
table_markup.push_str(&padded_cell_text);
table_markup.push('|');
}
table_markup.push('\n');
if idx == 0 {
// first row is a header row
// add header-body divider row
table_markup.push('|');
#[allow(clippy::needless_range_loop)]
for index in 0..column_count {
let width = column_widths[index];
if width < 3 {
// no point in aligning, just post as-is
table_markup.push_str(&"-".repeat(width));
table_markup.push('|');
continue;
}
// try to detect alignment
let mut alignment = String::new();
if let Some(header_cell) = cells.get(index) {
// we have a header, try to extract alignment from it
alignment = match header_cell.data {
NodeData::Element { ref attrs, .. } => {
let attrs = attrs.borrow();
let align_attr = attrs
.iter()
.find(|attr| attr.name.local.to_string() == "align");
align_attr
.map(|attr| attr.value.to_string())
.unwrap_or_default()
}
_ => String::new(),
};
}
// push lines according to alignment, fallback to default behaviour
match alignment.as_ref() {
"left" => {
table_markup.push(':');
table_markup.push_str(&"-".repeat(width - 1));
}
"center" => {
table_markup.push(':');
table_markup.push_str(&"-".repeat(width - 2));
table_markup.push(':');
}
"right" => {
table_markup.push_str(&"-".repeat(width - 1));
table_markup.push(':');
}
_ => table_markup.push_str(&"-".repeat(width)),
}
table_markup.push('|');
}
table_markup.push('\n');
}
}
printer.insert_newline();
printer.insert_newline();
printer.append_str(&table_markup);
}
fn after_handle(&mut self, _printer: &mut StructuredPrinter) {}
fn skip_descendants(&self) -> bool {
true
}
}
/// Pads cell text from right and left so it looks centered inside the table cell
/// ### Arguments
/// `tag` - optional reference to currently processed handle, text is extracted from here
///
/// `column_width` - precomputed column width to compute padding length from
fn pad_cell_text(tag: &Option<&Handle>, column_width: usize) -> String {
let mut result = String::new();
if let Some(cell) = tag {
// have header at specified position
let text = to_text(cell);
// compute difference between width and text length
let len_diff = column_width - text.chars().count();
if len_diff > 0 {
// should pad
if len_diff > 1 {
// should pad from both sides
let pad_len = len_diff / 2;
let remainder = len_diff % 2;
result.push_str(&" ".repeat(pad_len));
result.push_str(&text);
result.push_str(&" ".repeat(pad_len + remainder));
} else {
// it's just one space, add at the end
result.push_str(&text);
result.push(' ');
}
} else {
// shouldn't pad, text fills whole cell
result.push_str(&text);
}
} else {
// no text in this cell, fill cell with spaces
let pad_len = column_width;
result.push_str(&" ".repeat(pad_len));
}
result
}
/// Extracts tag name from passed tag
/// Returns empty string if it's not an html element
fn tag_name(tag: &Handle) -> String {
match tag.data {
NodeData::Element { ref name, .. } => name.local.to_string(),
_ => String::new(),
}
}
/// Find descendants of this tag with tag name `name`
/// This includes both direct children and descendants
fn find_children(tag: &Handle, name: &str) -> Vec<Handle> {
let mut result: Vec<Handle> = vec![];
let children = tag.children.borrow();
for child in children.iter() {
if tag_name(child) == name {
result.push(child.clone());
}
let mut descendants = find_children(child, name);
result.append(&mut descendants);
}
result
}
/// Collect direct children that satisfy the predicate
/// This doesn't include descendants
fn collect_children<P>(tag: &Handle, predicate: P) -> Vec<Handle>
where
P: Fn(&Handle) -> bool,
{
let mut result: Vec<Handle> = vec![];
let children = tag.children.borrow();
for child in children.iter() {
let candidate = child.clone();
if predicate(&candidate) {
result.push(candidate);
}
}
result
}
/// Convert html tag to text. This collects all tag children in correct order where they're observed
/// and concatenates their text, recursively.
fn to_text(tag: &Handle) -> String {
let mut printer = StructuredPrinter::default();
walk(tag, &mut printer, &HashMap::default());
let result = clean_markdown(&printer.data);
result.replace("\n", "<br/>")
}
| rust | Apache-2.0 | 4a373fcf860c8d7a8c3da02f4ab23441f91738ae | 2026-01-04T20:18:01.333543Z | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.