repo stringlengths 6 65 | file_url stringlengths 81 311 | file_path stringlengths 6 227 | content stringlengths 0 32.8k | language stringclasses 1
value | license stringclasses 7
values | commit_sha stringlengths 40 40 | retrieved_at stringdate 2026-01-04 15:31:58 2026-01-04 20:25:31 | truncated bool 2
classes |
|---|---|---|---|---|---|---|---|---|
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/src/ui/details/pkgbuild.rs | src/ui/details/pkgbuild.rs | use ratatui::{
Frame,
prelude::Rect,
style::{Modifier, Style},
text::{Line, Span},
widgets::{Block, BorderType, Borders, Paragraph, Wrap},
};
use unicode_width::UnicodeWidthStr;
use crate::i18n;
use crate::state::AppState;
use crate::theme::theme;
use super::pkgbuild_highlight;
/// What: Render the PKGBUILD viewer pane with scroll support and action buttons.
///
/// Inputs:
/// - `f`: Frame to render into
/// - `app`: Mutable application state (PKGBUILD text, scroll, cached rects)
/// - `pkgb_area`: Rect assigned to the PKGBUILD pane
///
/// Output:
/// - Draws PKGBUILD text and updates button rectangles for copy/reload interactions.
///
/// Details:
/// - Applies scroll offset, records the scrollable inner region, and toggles presence of the reload
/// button when the cached PKGBUILD belongs to a different package.
pub fn render_pkgbuild(f: &mut Frame, app: &mut AppState, pkgb_area: Rect) {
let th = theme();
let loading_text = i18n::t(app, "app.details.loading_pkgb");
let pkgb_text = app.pkgb_text.as_deref().unwrap_or(&loading_text);
// Remember PKGBUILD rect for mouse interactions (scrolling)
app.pkgb_rect = Some((
pkgb_area.x + 1,
pkgb_area.y + 1,
pkgb_area.width.saturating_sub(2),
pkgb_area.height.saturating_sub(2),
));
// Apply vertical scroll offset by trimming top lines
// First, get all lines (highlighted or plain)
let all_lines = if pkgb_text == loading_text {
// For loading text, use plain text
vec![Line::from(loading_text)]
} else {
// Apply syntax highlighting
pkgbuild_highlight::highlight_pkgbuild(pkgb_text, &th)
};
// Apply scroll offset
let visible_lines: Vec<Line> = all_lines
.into_iter()
.skip(app.pkgb_scroll as usize)
.collect();
// Title with clickable "Copy PKGBUILD" button and optional "Reload PKGBUILD" button
let check_button_label = i18n::t(app, "app.details.copy_pkgbuild");
let pkgb_title_text = i18n::t(app, "app.titles.pkgb");
let mut pkgb_title_spans: Vec<Span> = vec![Span::styled(
pkgb_title_text.clone(),
Style::default().fg(th.overlay1),
)];
pkgb_title_spans.push(Span::raw(" "));
let check_btn_style = Style::default()
.fg(th.mauve)
.bg(th.surface2)
.add_modifier(Modifier::BOLD);
pkgb_title_spans.push(Span::styled(check_button_label.clone(), check_btn_style));
// Check if PKGBUILD is for a different package than currently selected
let current_package = app.results.get(app.selected).map(|i| i.name.as_str());
let needs_reload =
app.pkgb_package_name.as_deref() != current_package && app.pkgb_package_name.is_some();
// Record clickable rect for the "Copy PKGBUILD" button on the top border row
// Use Unicode display width, not byte length, to handle wide characters
let btn_y = pkgb_area.y;
let btn_x = pkgb_area
.x
.saturating_add(1)
.saturating_add(u16::try_from(pkgb_title_text.width()).unwrap_or(u16::MAX))
.saturating_add(2);
let btn_w = u16::try_from(check_button_label.width()).unwrap_or(u16::MAX);
app.pkgb_check_button_rect = Some((btn_x, btn_y, btn_w, 1));
// Add "Reload PKGBUILD" button if needed
app.pkgb_reload_button_rect = None;
if needs_reload {
pkgb_title_spans.push(Span::raw(" "));
let reload_button_label = i18n::t(app, "app.details.reload_pkgbuild");
let reload_btn_style = Style::default()
.fg(th.mauve)
.bg(th.surface2)
.add_modifier(Modifier::BOLD);
pkgb_title_spans.push(Span::styled(reload_button_label.clone(), reload_btn_style));
// Record clickable rect for the reload button
let reload_btn_x = btn_x.saturating_add(btn_w).saturating_add(2);
let reload_btn_w = u16::try_from(reload_button_label.width()).unwrap_or(u16::MAX);
app.pkgb_reload_button_rect = Some((reload_btn_x, btn_y, reload_btn_w, 1));
}
let pkgb = Paragraph::new(visible_lines)
.style(Style::default().fg(th.text).bg(th.base))
.wrap(Wrap { trim: false })
.block(
Block::default()
.title(Line::from(pkgb_title_spans))
.borders(Borders::ALL)
.border_type(BorderType::Rounded)
.border_style(Style::default().fg(th.surface2)),
);
f.render_widget(pkgb, pkgb_area);
}
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/src/ui/middle/search.rs | src/ui/middle/search.rs | use ratatui::{
Frame,
prelude::{Position, Rect},
style::{Modifier, Style},
text::{Line, Span},
widgets::{Block, BorderType, Borders, Paragraph},
};
use crate::i18n;
use crate::state::AppState;
use crate::state::types::AppMode;
use crate::theme::theme;
/// What: Build input line spans with optional selection highlighting.
///
/// Inputs:
/// - `app`: Application state (input, caret, selection, focus, normal mode)
/// - `search_focused`: Whether search pane is focused
/// - `th`: Theme
///
/// Output:
/// - Vector of spans for the input line
///
/// Details:
/// - Shows "> " prefix; in normal mode, highlights selected text with lavender background.
/// - Uses `news_search_input`/`news_search_caret` in News mode, otherwise uses `app.input`/`app.search_caret`.
fn build_input_spans<'a>(
app: &AppState,
search_focused: bool,
th: &'a crate::theme::Theme,
) -> Vec<Span<'a>> {
let mut input_spans: Vec<Span> = Vec::new();
input_spans.push(Span::styled(
"> ",
Style::default().fg(if search_focused {
th.sapphire
} else {
th.overlay1
}),
));
// Determine which input field and caret to use based on mode
let (input_text, caret_ci, select_anchor) = if matches!(app.app_mode, AppMode::News) {
(
&app.news_search_input,
app.news_search_caret,
app.news_search_select_anchor,
)
} else {
(&app.input, app.search_caret, app.search_select_anchor)
};
if search_focused && app.search_normal_mode {
let (sel_from_ci, sel_to_ci) = select_anchor.map_or((caret_ci, caret_ci), |anchor| {
(anchor.min(caret_ci), anchor.max(caret_ci))
});
let cc = input_text.chars().count();
let sel_from_ci = sel_from_ci.min(cc);
let sel_to_ci = sel_to_ci.min(cc);
let from_b = {
if sel_from_ci == 0 {
0
} else {
input_text
.char_indices()
.map(|(i, _)| i)
.nth(sel_from_ci)
.unwrap_or(input_text.len())
}
};
let to_b = {
if sel_to_ci == 0 {
0
} else {
input_text
.char_indices()
.map(|(i, _)| i)
.nth(sel_to_ci)
.unwrap_or(input_text.len())
}
};
let pre = &input_text[..from_b];
let sel = &input_text[from_b..to_b];
let post = &input_text[to_b..];
if !pre.is_empty() {
input_spans.push(Span::styled(
pre.to_string(),
Style::default().fg(if search_focused { th.text } else { th.subtext0 }),
));
}
if sel_from_ci != sel_to_ci {
input_spans.push(Span::styled(
sel.to_string(),
Style::default()
.fg(th.crust)
.bg(th.lavender)
.add_modifier(Modifier::BOLD),
));
}
if !post.is_empty() {
input_spans.push(Span::styled(
post.to_string(),
Style::default().fg(if search_focused { th.text } else { th.subtext0 }),
));
}
} else {
input_spans.push(Span::styled(
input_text.as_str().to_string(),
Style::default().fg(if search_focused { th.text } else { th.subtext0 }),
));
}
input_spans
}
/// What: Build title line with fuzzy/normal mode indicator.
///
/// Inputs:
/// - `app`: Application state
/// - `search_focused`: Whether search pane is focused
/// - `th`: Theme
///
/// Output:
/// - Tuple containing: title line with mode indicator, base title length, and mode text
///
/// Details:
/// - Returns base title length for rectangle calculation
/// - Returns mode text to avoid duplicate i18n lookups
fn build_title_line<'a>(
app: &AppState,
search_focused: bool,
th: &'a crate::theme::Theme,
) -> (Line<'a>, usize, String) {
let search_title_base = if matches!(app.app_mode, AppMode::News) {
"News search".to_string()
} else if search_focused {
i18n::t(app, "app.titles.search_focused")
} else {
i18n::t(app, "app.titles.search")
};
let search_title_color = if search_focused {
th.mauve
} else {
th.overlay1
};
// Calculate title length before building spans (needed for rectangle calculation)
let base_title_len = search_title_base.chars().count();
// Build title with fuzzy/normal indicator
let mut title_spans = vec![Span::styled(
search_title_base,
Style::default().fg(search_title_color),
)];
// Add fuzzy/normal mode indicator
let mode_text = if app.fuzzy_search_enabled {
i18n::t(app, "app.search_mode_fuzzy")
} else {
i18n::t(app, "app.search_mode_normal")
};
let mode_color = if app.fuzzy_search_enabled {
th.sapphire
} else {
th.subtext0
};
title_spans.push(Span::styled(
format!(" [{mode_text}]"),
Style::default().fg(mode_color),
));
(Line::from(title_spans), base_title_len, mode_text)
}
/// What: Calculate and store fuzzy indicator rectangle.
///
/// Inputs:
/// - `app`: Mutable application state
/// - `area`: Search input area
/// - `base_title_len`: Length of base title text
/// - `mode_text`: Mode indicator text
///
/// Output:
/// - None (modifies app state)
///
/// Details:
/// - Stores clickable rectangle for mouse interaction
fn store_fuzzy_indicator_rect(
app: &mut AppState,
area: Rect,
base_title_len: usize,
mode_text: &str,
) {
let mode_indicator_len = mode_text.chars().count() + 3; // +3 for " [ ]"
let max_indicator_width = mode_indicator_len.min(20);
let max_indicator_width_u16 = u16::try_from(max_indicator_width).unwrap_or(u16::MAX);
let available_width = area.width.saturating_sub(max_indicator_width_u16);
let title_end_x = area.x.saturating_add(1).saturating_add(
u16::try_from(
base_title_len
.min(available_width as usize)
.min(u16::MAX as usize),
)
.unwrap_or(u16::MAX),
);
app.fuzzy_indicator_rect = Some((
title_end_x,
area.y,
max_indicator_width_u16,
1, // height
));
}
/// What: Render the search input widget in the center of the middle row.
///
/// Inputs:
/// - `f`: Frame to render into
/// - `app`: Mutable application state (input, caret, selection, focus, fuzzy indicator rect)
/// - `area`: Target rectangle for the search input
///
/// Output:
/// - Draws the search input with optional text selection highlighting and sets cursor position.
///
/// Details:
/// - Shows "> " prefix; in normal mode, highlights selected text with lavender background.
/// - Cursor position is calculated based on caret index and character width.
/// - Records fuzzy indicator rectangle for mouse click detection.
pub fn render_search(f: &mut Frame, app: &mut AppState, area: Rect) {
let th = theme();
let search_focused = matches!(app.focus, crate::state::Focus::Search);
// Build input line with optional selection highlight in Search normal mode
let input_spans = build_input_spans(app, search_focused, &th);
let input_line = Line::from(input_spans);
// Build title with fuzzy/normal indicator
let (search_title, base_title_len, mode_text) = build_title_line(app, search_focused, &th);
let input = Paragraph::new(input_line)
.style(
Style::default()
.fg(if search_focused { th.text } else { th.subtext0 })
.bg(th.base),
)
.block(
Block::default()
.title(search_title)
.borders(Borders::ALL)
.border_type(BorderType::Rounded)
.border_style(Style::default().fg(if search_focused {
th.mauve
} else {
th.surface1
})),
);
f.render_widget(input, area);
// Store clickable rectangle for fuzzy indicator (right side of title)
store_fuzzy_indicator_rect(app, area, base_title_len, &mode_text);
// Cursor in input
let right = area.x + area.width.saturating_sub(1);
// Cursor x: align to caret in characters from start (prefix "> ")
// Use news_search_input/news_search_caret in News mode, otherwise use app.input/app.search_caret
let (input_text, caret_pos) = if matches!(app.app_mode, AppMode::News) {
(&app.news_search_input, app.news_search_caret)
} else {
(&app.input, app.search_caret)
};
let caret_cols: u16 = if search_focused {
let mut ci: u16 = 0;
let mut it = input_text.chars();
for _ in 0..caret_pos {
if it.next().is_some() {
ci = ci.saturating_add(1);
} else {
break;
}
}
ci
} else {
u16::try_from(input_text.len()).unwrap_or(u16::MAX)
};
let x = std::cmp::min(area.x + 1 + 2 + caret_cols, right);
let y = area.y + 1;
f.set_cursor_position(Position::new(x, y));
}
#[cfg(test)]
mod tests {
use super::*;
use ratatui::{Terminal, backend::TestBackend};
/// What: Initialize minimal English translations for search tests.
///
/// Inputs:
/// - `app`: `AppState` to populate with translations
///
/// Output:
/// - Populates `app.translations` and `app.translations_fallback` with search-related translations
///
/// Details:
/// - Sets up only the translations needed for search rendering tests.
fn init_test_translations(app: &mut crate::state::AppState) {
use std::collections::HashMap;
let mut translations = HashMap::new();
translations.insert("app.titles.search".to_string(), "Search".to_string());
translations.insert(
"app.titles.search_focused".to_string(),
"Search".to_string(),
);
app.translations = translations.clone();
app.translations_fallback = translations;
}
/// What: Verify search input renders and sets cursor position correctly when focused.
///
/// Inputs:
/// - Search input with text "hello" and caret at position 3
///
/// Output:
/// - Search input renders without panic, cursor position is set correctly.
///
/// Details:
/// - Tests that cursor position calculation accounts for the "> " prefix and character width.
#[test]
fn search_renders_and_sets_cursor_when_focused() {
let backend = TestBackend::new(100, 30);
let mut term = Terminal::new(backend).expect("Failed to create terminal for test");
let mut app = crate::state::AppState::default();
init_test_translations(&mut app);
app.focus = crate::state::Focus::Search;
app.input = "hello".into();
app.search_caret = 3;
term.draw(|f| {
let area = f.area();
render_search(f, &mut app, area);
})
.expect("Failed to render search pane");
// Cursor position is set by set_cursor_position - verify rendering succeeded
// TestBackend doesn't expose cursor position directly, but rendering
// completing without panic verifies the function works correctly
}
/// What: Verify search input renders without selection highlighting when not in normal mode.
///
/// Inputs:
/// - Search input with text, focused but not in normal mode
///
/// Output:
/// - Search input renders without selection spans.
///
/// Details:
/// - Tests that selection highlighting only appears when both focused and in normal mode.
#[test]
fn search_renders_without_selection_when_not_normal_mode() {
let backend = TestBackend::new(100, 30);
let mut term = Terminal::new(backend).expect("Failed to create terminal for test");
let mut app = crate::state::AppState::default();
init_test_translations(&mut app);
app.focus = crate::state::Focus::Search;
app.input = "test".into();
app.search_normal_mode = false;
app.search_caret = 2;
app.search_select_anchor = Some(1);
term.draw(|f| {
let area = f.area();
render_search(f, &mut app, area);
})
.expect("Failed to render search pane without selection");
// Should render without panic even with selection anchor set but not in normal mode
}
/// What: Verify search input renders with text selection highlighting in normal mode.
///
/// Inputs:
/// - Search input with text "hello", caret at 3, anchor at 1, in normal mode
///
/// Output:
/// - Search input renders with selection highlighting between anchor and caret.
///
/// Details:
/// - Tests that selected text (characters 1-3) is highlighted with lavender background.
#[test]
fn search_renders_with_selection_in_normal_mode() {
let backend = TestBackend::new(100, 30);
let mut term = Terminal::new(backend).expect("Failed to create terminal for test");
let mut app = crate::state::AppState::default();
init_test_translations(&mut app);
app.focus = crate::state::Focus::Search;
app.input = "hello".into();
app.search_normal_mode = true;
app.search_caret = 3;
app.search_select_anchor = Some(1);
term.draw(|f| {
let area = f.area();
render_search(f, &mut app, area);
})
.expect("Failed to render search pane with selection");
// Should render with selection highlighting
}
/// What: Verify search input renders correctly when not focused.
///
/// Inputs:
/// - Search input with text, but focus is on another pane
///
/// Output:
/// - Search input renders with unfocused styling.
///
/// Details:
/// - Tests that unfocused search uses different colors and cursor position calculation.
#[test]
fn search_renders_when_unfocused() {
let backend = TestBackend::new(100, 30);
let mut term = Terminal::new(backend).expect("Failed to create terminal for test");
let mut app = crate::state::AppState::default();
init_test_translations(&mut app);
app.focus = crate::state::Focus::Recent;
app.input = "test".into();
app.search_caret = 2;
term.draw(|f| {
let area = f.area();
render_search(f, &mut app, area);
})
.expect("Failed to render unfocused search pane");
// Should render without panic with unfocused styling
}
/// What: Verify cursor position calculation handles empty input correctly.
///
/// Inputs:
/// - Empty search input with caret at 0
///
/// Output:
/// - Cursor position is set after the "> " prefix.
///
/// Details:
/// - Tests edge case where input is empty and caret is at start.
#[test]
fn search_handles_empty_input() {
let backend = TestBackend::new(100, 30);
let mut term = Terminal::new(backend).expect("failed to create test terminal");
let mut app = crate::state::AppState::default();
init_test_translations(&mut app);
app.focus = crate::state::Focus::Search;
app.input = String::new();
app.search_caret = 0;
term.draw(|f| {
let area = f.area();
render_search(f, &mut app, area);
})
.expect("failed to draw test terminal");
// Should handle empty input without panic
}
}
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/src/ui/middle/install.rs | src/ui/middle/install.rs | use ratatui::{
Frame,
prelude::Rect,
style::{Modifier, Style},
text::{Line, Span},
widgets::{Block, BorderType, Borders, List, ListItem, Paragraph},
};
use unicode_width::UnicodeWidthStr;
use crate::i18n;
use crate::state::{AppState, PackageItem, Source};
use crate::theme::theme;
/// What: Build list items for a package list with selection and loading indicators.
///
/// Inputs:
/// - `indices`: Display indices into the package list
/// - `packages`: Full package list
/// - `selected_idx`: Currently selected display index
/// - `focused`: Whether the pane is focused
/// - `app`: Application state for loading checks
///
/// Output:
/// - Vector of `ListItem` widgets ready for rendering
///
/// Details:
/// - Adds selection indicator, loading indicator, popularity, source, name, and version.
pub(super) fn build_package_list_items<'a, F>(
indices: &[usize],
packages: &'a [PackageItem],
selected_idx: Option<usize>,
focused: bool,
is_loading: F,
) -> Vec<ListItem<'a>>
where
F: Fn(&str) -> bool,
{
let th = theme();
indices
.iter()
.enumerate()
.filter_map(|(display_idx, &i)| packages.get(i).map(|p| (display_idx, p)))
.map(|(display_idx, p)| {
let (src, color) = match &p.source {
Source::Official { repo, .. } => (repo.clone(), th.green),
Source::Aur => ("AUR".to_string(), th.yellow),
};
let mut segs: Vec<Span> = Vec::new();
// Add selection indicator manually if this item is selected
let is_selected = selected_idx == Some(display_idx);
if is_selected {
segs.push(Span::styled(
"▶ ",
Style::default()
.fg(if focused { th.text } else { th.subtext0 })
.bg(if focused { th.surface2 } else { th.base }),
));
} else {
// Add spacing to align with selected items
segs.push(Span::raw(" "));
}
// Add loading indicator if package is being processed
if is_loading(&p.name) {
segs.push(Span::styled(
"⟳ ",
Style::default()
.fg(th.sapphire)
.bg(if is_selected && focused {
th.surface2
} else {
th.base
})
.add_modifier(Modifier::BOLD),
));
} else {
// Add spacing when not loading to maintain alignment
segs.push(Span::raw(" "));
}
if let Some(pop) = p.popularity {
segs.push(Span::styled(
format!("Pop: {pop:.2} "),
Style::default().fg(th.overlay1),
));
}
segs.push(Span::styled(format!("{src} "), Style::default().fg(color)));
segs.push(Span::styled(
p.name.clone(),
Style::default()
.fg(if focused { th.text } else { th.subtext0 })
.add_modifier(Modifier::BOLD),
));
segs.push(Span::styled(
format!(" {}", p.version),
Style::default().fg(if focused { th.overlay1 } else { th.surface2 }),
));
ListItem::new(Line::from(segs))
})
.collect()
}
/// What: Render the normal Install list (single right pane) with Import/Export buttons.
///
/// Inputs:
/// - `f`: Frame to render into
/// - `app`: Application state (install list, focus, selection)
/// - `area`: Target rectangle for the install pane
///
/// Output:
/// - Draws the install list and Import/Export buttons, records inner rect for mouse hit-testing.
///
/// Details:
/// - Shows filtered install list items with selection indicators and loading indicators.
/// - Import/Export buttons are rendered at the bottom border.
pub fn render_install(f: &mut Frame, app: &mut AppState, area: Rect) {
let th = theme();
let install_focused = matches!(app.focus, crate::state::Focus::Install);
// Normal Install List (single right pane)
let indices: Vec<usize> = crate::ui::helpers::filtered_install_indices(app);
let selected_idx = app.install_state.selected();
let install_items = build_package_list_items(
&indices,
&app.install_list,
selected_idx,
install_focused,
|name| crate::ui::helpers::is_package_loading_preflight(app, name),
);
let title_text = if install_focused {
i18n::t(app, "app.titles.install_list_focused")
} else {
i18n::t(app, "app.titles.install_list")
};
let install_block = Block::default()
.title(Line::from(vec![Span::styled(
title_text,
Style::default().fg(if install_focused {
th.mauve
} else {
th.overlay1
}),
)]))
.borders(Borders::ALL)
.border_type(BorderType::Rounded)
.border_style(Style::default().fg(if install_focused {
th.mauve
} else {
th.surface1
}));
let install_list = List::new(install_items)
.style(
Style::default()
.fg(if install_focused {
th.text
} else {
th.subtext0
})
.bg(th.base),
)
.block(install_block)
.highlight_style(Style::default().fg(th.text).bg(th.surface2))
.highlight_symbol(""); // Empty symbol since we're adding it manually
f.render_stateful_widget(install_list, area, &mut app.install_state);
app.install_rect = Some((
area.x + 1,
area.y + 1,
area.width.saturating_sub(2),
area.height.saturating_sub(2),
));
// Bottom border action buttons: Export (left) and Import (right)
render_buttons(f, app, area, install_focused);
}
/// What: Render Import/Export buttons at the bottom border of the install pane.
///
/// Inputs:
/// - `f`: Frame to render into
/// - `app`: Application state (for i18n and storing button rects)
/// - `area`: Target rectangle for the install pane
/// - `install_focused`: Whether the install pane is focused
///
/// Output:
/// - Draws Import and Export buttons, records their rects for mouse hit-testing.
///
/// Details:
/// - Import button is on the far right, Export is to its left with a 2-space gap.
/// - First character of each label is underlined to indicate keyboard shortcut.
fn render_buttons(f: &mut Frame, app: &mut AppState, area: Rect, install_focused: bool) {
let th = theme();
let import_label = i18n::t(app, "app.actions.import");
let export_label = i18n::t(app, "app.actions.export");
// Style similar to other title buttons
let btn_style_active = Style::default()
.fg(th.crust)
.bg(th.green)
.add_modifier(Modifier::BOLD);
let btn_style_inactive = Style::default()
.fg(th.mauve)
.bg(th.surface2)
.add_modifier(Modifier::BOLD);
let style = if install_focused {
btn_style_active
} else {
btn_style_inactive
};
let inner_w = area.width.saturating_sub(2);
let sy = area.y + area.height.saturating_sub(1);
// Import button on the far right
// Use Unicode display width, not byte length, to handle wide characters
let import_w = u16::try_from(import_label.width()).unwrap_or(u16::MAX);
let import_sx = area.x + 1 + inner_w.saturating_sub(import_w);
let import_rect = Rect {
x: import_sx,
y: sy,
width: import_w.min(inner_w),
height: 1,
};
// Split label for styling: first character underlined, rest normal
let import_first_char = import_label
.chars()
.next()
.map(|c| c.to_string())
.unwrap_or_default();
let import_suffix = import_label.chars().skip(1).collect::<String>();
let import_line = Paragraph::new(Line::from(vec![
Span::styled(import_first_char, style.add_modifier(Modifier::UNDERLINED)),
Span::styled(import_suffix, style),
]));
app.install_import_rect = Some((
import_rect.x,
import_rect.y,
import_rect.width,
import_rect.height,
));
f.render_widget(import_line, import_rect);
// Export button to the left of Import with 2 spaces gap
let gap: u16 = 2;
let export_w = u16::try_from(export_label.width()).unwrap_or(u16::MAX);
let export_max_w = inner_w;
let export_right = import_rect.x.saturating_sub(gap);
let export_sx = if export_w > export_right.saturating_sub(area.x + 1) {
area.x + 1
} else {
export_right.saturating_sub(export_w)
};
let export_rect = Rect {
x: export_sx,
y: sy,
width: export_w.min(export_max_w),
height: 1,
};
// Split label for styling: first character underlined, rest normal
let export_first_char = export_label
.chars()
.next()
.map(|c| c.to_string())
.unwrap_or_default();
let export_suffix = export_label.chars().skip(1).collect::<String>();
let export_line = Paragraph::new(Line::from(vec![
Span::styled(export_first_char, style.add_modifier(Modifier::UNDERLINED)),
Span::styled(export_suffix, style),
]));
app.install_export_rect = Some((
export_rect.x,
export_rect.y,
export_rect.width,
export_rect.height,
));
f.render_widget(export_line, export_rect);
}
#[cfg(test)]
mod tests {
use super::*;
use ratatui::{Terminal, backend::TestBackend};
/// What: Initialize minimal English translations for install tests.
///
/// Inputs:
/// - `app`: `AppState` to populate with translations
///
/// Output:
/// - Populates `app.translations` and `app.translations_fallback` with install-related translations
///
/// Details:
/// - Sets up only the translations needed for install rendering tests.
fn init_test_translations(app: &mut crate::state::AppState) {
use std::collections::HashMap;
let mut translations = HashMap::new();
translations.insert("app.titles.install_list".to_string(), "Install".to_string());
translations.insert(
"app.titles.install_list_focused".to_string(),
"Install".to_string(),
);
translations.insert("app.actions.import".to_string(), "Import".to_string());
translations.insert("app.actions.export".to_string(), "Export".to_string());
app.translations = translations.clone();
app.translations_fallback = translations;
}
/// What: Verify install list renders and records rect when visible.
///
/// Inputs:
/// - Install pane is visible with some packages in install list
///
/// Output:
/// - Install list renders and `app.install_rect` is set to inner rectangle.
///
/// Details:
/// - Tests that rect is recorded with borders excluded.
#[test]
fn install_renders_and_records_rect() {
let backend = TestBackend::new(100, 30);
let mut term = Terminal::new(backend).expect("Failed to create terminal for test");
let mut app = crate::state::AppState::default();
init_test_translations(&mut app);
app.install_list.push(crate::state::PackageItem {
name: "test-package".to_string(),
version: "1.0.0".to_string(),
description: String::new(),
source: crate::state::Source::Aur,
popularity: None,
out_of_date: None,
orphaned: false,
});
term.draw(|f| {
let area = f.area();
render_install(f, &mut app, area);
})
.expect("Failed to render install pane");
assert!(app.install_rect.is_some());
assert!(app.install_import_rect.is_some());
assert!(app.install_export_rect.is_some());
}
/// What: Verify install list renders buttons with correct rects.
///
/// Inputs:
/// - Install pane with Import/Export buttons
///
/// Output:
/// - Import and Export button rects are recorded correctly.
///
/// Details:
/// - Tests that Import button is on the right, Export is to its left with gap.
#[test]
fn install_renders_buttons_with_correct_rects() {
let backend = TestBackend::new(100, 30);
let mut term = Terminal::new(backend).expect("Failed to create terminal for test");
let mut app = crate::state::AppState::default();
init_test_translations(&mut app);
term.draw(|f| {
let area = f.area();
render_install(f, &mut app, area);
})
.expect("Failed to render install pane buttons");
let import_rect = app
.install_import_rect
.expect("install_import_rect should be set after rendering");
let export_rect = app
.install_export_rect
.expect("install_export_rect should be set after rendering");
// Import should be to the right of Export
assert!(import_rect.0 > export_rect.0);
// Both should be on the same row (bottom border)
assert_eq!(import_rect.1, export_rect.1);
// Import should be at or near the far right (accounting for borders)
// The area width is 100, so with borders (x+1, width-2), the right edge is at 99
assert!(import_rect.0 + import_rect.2 >= 99);
}
/// What: Verify install list renders with selection indicators.
///
/// Inputs:
/// - Install list with packages and a selected item
///
/// Output:
/// - Selected item displays selection indicator "▶ ".
///
/// Details:
/// - Tests that selection state affects rendering.
#[test]
fn install_renders_with_selection() {
let backend = TestBackend::new(100, 30);
let mut term = Terminal::new(backend).expect("Failed to create terminal for test");
let mut app = crate::state::AppState::default();
init_test_translations(&mut app);
app.install_list.push(crate::state::PackageItem {
name: "package1".to_string(),
version: "1.0.0".to_string(),
description: String::new(),
source: crate::state::Source::Aur,
popularity: None,
out_of_date: None,
orphaned: false,
});
app.install_list.push(crate::state::PackageItem {
name: "package2".to_string(),
version: "2.0.0".to_string(),
description: String::new(),
source: crate::state::Source::Aur,
popularity: None,
out_of_date: None,
orphaned: false,
});
app.install_state.select(Some(0));
term.draw(|f| {
let area = f.area();
render_install(f, &mut app, area);
})
.expect("Failed to render install pane with selection");
// Should render without panic with selection
assert!(app.install_rect.is_some());
}
/// What: Verify install list renders differently when focused vs unfocused.
///
/// Inputs:
/// - Install list rendered first unfocused, then focused
///
/// Output:
/// - Button styles change based on focus state.
///
/// Details:
/// - Tests that focus affects button styling (active vs inactive).
#[test]
fn install_renders_with_focus_styling() {
let backend = TestBackend::new(100, 30);
let mut term = Terminal::new(backend).expect("Failed to create terminal for test");
let mut app = crate::state::AppState::default();
init_test_translations(&mut app);
// Render unfocused
app.focus = crate::state::Focus::Search;
term.draw(|f| {
let area = f.area();
render_install(f, &mut app, area);
})
.expect("Failed to render unfocused install pane");
let unfocused_import_rect = app.install_import_rect;
// Render focused
app.focus = crate::state::Focus::Install;
term.draw(|f| {
let area = f.area();
render_install(f, &mut app, area);
})
.expect("Failed to render focused install pane");
let focused_import_rect = app.install_import_rect;
// Rects should be the same position, but styling differs
assert_eq!(unfocused_import_rect, focused_import_rect);
}
/// What: Verify install list handles empty list correctly.
///
/// Inputs:
/// - Install list with no packages
///
/// Output:
/// - Install list renders without panic.
///
/// Details:
/// - Tests edge case where install list is empty.
#[test]
fn install_handles_empty_list() {
let backend = TestBackend::new(100, 30);
let mut term = Terminal::new(backend).expect("Failed to create terminal for test");
let mut app = crate::state::AppState::default();
init_test_translations(&mut app);
app.install_list.clear();
term.draw(|f| {
let area = f.area();
render_install(f, &mut app, area);
})
.expect("Failed to render empty install pane");
// Should render empty list without panic
assert!(app.install_rect.is_some());
assert!(app.install_import_rect.is_some());
assert!(app.install_export_rect.is_some());
}
}
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/src/ui/middle/mod.rs | src/ui/middle/mod.rs | use ratatui::{
Frame,
layout::{Constraint, Direction, Layout},
prelude::Rect,
style::Style,
};
use crate::i18n;
use crate::state::types::AppMode;
use crate::state::{AppState, Focus};
use crate::theme::theme;
/// Install pane rendering module.
mod install;
/// Installed-only pane rendering module.
mod installed_only;
/// Recent packages pane rendering module.
mod recent;
/// Search input pane rendering module.
mod search;
/// What: Render the middle row: Recent (left), Search input (center), Install list (right).
///
/// Inputs:
/// - `f`: Frame to render into
/// - `app`: Mutable application state (focus, rects, lists, input)
/// - `area`: Target rectangle for the middle row
///
/// Output:
/// - Draws middle panes and updates rects for mouse hit-testing and cursor position.
///
/// Details:
/// - Titles and colors reflect focus; when installed-only mode is active the right column shows
/// Downgrade and Remove subpanes side-by-side.
/// - Records inner rects for Recent/Install/Downgrade and sets the caret position for the Search input.
pub fn render_middle(f: &mut Frame, app: &mut AppState, area: Rect) {
if matches!(app.app_mode, AppMode::News) {
render_middle_news(f, app, area);
return;
}
// Middle row split: left Recent, middle Search input, right Install list
// If a pane is hidden, reassign its percentage to the center pane.
let left_pct = if app.show_recent_pane {
app.layout_left_pct.min(100)
} else {
0
};
let mut right_pct = if app.show_install_pane {
app.layout_right_pct.min(100)
} else {
0
};
// In installed-only mode, enlarge the right pane so Downgrade and Remove lists are each ~50% wider
if app.installed_only_mode && right_pct > 0 {
let max_right = 100u16.saturating_sub(left_pct);
let widened = ((u32::from(right_pct) * 3) / 2)
.min(u32::from(u16::MAX))
.try_into()
.unwrap_or(u16::MAX); // 1.5x
right_pct = widened.min(max_right);
}
let center_pct = 100u16
.saturating_sub(left_pct)
.saturating_sub(right_pct)
.min(100);
let middle = Layout::default()
.direction(Direction::Horizontal)
.constraints([
Constraint::Percentage(left_pct),
Constraint::Percentage(center_pct),
Constraint::Percentage(right_pct),
])
.split(area);
// Search input (center)
search::render_search(f, app, middle[1]);
// Recent searches (left) with filtering (render only if visible and has width)
recent::render_recent(f, app, middle[0]);
// Install/Remove List (right) with filtering (render only if visible and has width)
if app.show_install_pane && middle[2].width > 0 {
if app.installed_only_mode {
installed_only::render_installed_only(f, app, middle[2]);
} else {
install::render_install(f, app, middle[2]);
}
} else {
app.install_rect = None;
// If Install pane is hidden and currently focused, move focus to Search
if matches!(app.focus, Focus::Install) {
app.focus = Focus::Search;
}
}
}
/// What: Render the middle row when news modal is active.
///
/// Inputs:
/// - `f`: Frame to render into.
/// - `app`: Application state.
/// - `area`: Area to render within.
///
/// Output: Renders the middle row with news content.
///
/// Details: Renders a different layout when the news modal is active.
fn render_middle_news(f: &mut Frame, app: &mut AppState, area: Rect) {
let left_pct = if app.show_news_history_pane { 25 } else { 0 };
let right_pct = if app.show_news_bookmarks_pane { 25 } else { 0 };
let center_pct = 100u16
.saturating_sub(left_pct)
.saturating_sub(right_pct)
.min(100);
let middle = Layout::default()
.direction(Direction::Horizontal)
.constraints([
Constraint::Percentage(left_pct),
Constraint::Percentage(center_pct),
Constraint::Percentage(right_pct),
])
.split(area);
// Center search reuses existing search rendering/cursor
search::render_search(f, app, middle[1]);
// Left: news search history
recent::render_news_recent(f, app, middle[0]);
if app.show_news_bookmarks_pane && middle[2].width > 0 {
let th = theme();
let bookmarks_focused = matches!(app.focus, Focus::Install);
let items: Vec<ratatui::widgets::ListItem> = app
.news_bookmarks
.iter()
.map(|b| {
ratatui::widgets::ListItem::new(ratatui::text::Span::styled(
b.item.title.clone(),
Style::default().fg(if bookmarks_focused {
th.text
} else {
th.subtext0
}),
))
})
.collect();
if app.install_state.selected().is_none() && !app.news_bookmarks.is_empty() {
app.install_state.select(Some(0));
}
let title = if bookmarks_focused {
i18n::t(app, "app.titles.news_bookmarks_focused")
} else {
i18n::t(app, "app.titles.news_bookmarks")
};
let list = ratatui::widgets::List::new(items)
.block(
ratatui::widgets::Block::default()
.borders(ratatui::widgets::Borders::ALL)
.border_type(ratatui::widgets::BorderType::Rounded)
.title(title)
.border_style(Style::default().fg(if bookmarks_focused {
th.mauve
} else {
th.surface1
})),
)
.style(
Style::default()
.fg(if bookmarks_focused {
th.text
} else {
th.subtext0
})
.bg(th.base),
)
.highlight_style(Style::default().fg(th.text).bg(th.surface2))
.highlight_symbol("▶ ");
f.render_stateful_widget(list, middle[2], &mut app.install_state);
app.install_rect = Some((middle[2].x, middle[2].y, middle[2].width, middle[2].height));
} else {
app.install_rect = None;
if matches!(app.focus, Focus::Install) {
app.focus = Focus::Search;
}
}
}
#[cfg(test)]
mod tests {
/// What: Verify middle-pane rendering captures layout rectangles and realigns focus when the install pane hides.
///
/// Inputs:
/// - Initial render with recent and install panes visible, followed by a second pass hiding the install pane while focused there.
///
/// Output:
/// - Rectangles recorded for both panes initially, and focus reverts to `Search` once the install pane is hidden.
///
/// Details:
/// - Uses a `TestBackend` to drive rendering without interactive user input.
#[test]
fn middle_sets_rects_and_cursor_positions() {
use ratatui::{Terminal, backend::TestBackend};
let backend = TestBackend::new(120, 30);
let mut term = Terminal::new(backend).expect("Failed to create terminal for test");
let mut app = crate::state::AppState {
show_recent_pane: true,
show_install_pane: true,
focus: crate::state::Focus::Search,
input: "hello".into(),
..Default::default()
};
term.draw(|f| {
let area = f.area();
super::render_middle(f, &mut app, area);
})
.expect("Failed to render middle pane");
assert!(app.recent_rect.is_some());
assert!(app.install_rect.is_some());
// Move focus to Install and re-render; ensure focus fix-up when hidden
app.focus = crate::state::Focus::Install;
app.show_install_pane = false;
term.draw(|f| {
let area = f.area();
super::render_middle(f, &mut app, area);
})
.expect("Failed to render middle pane");
assert!(matches!(app.focus, crate::state::Focus::Search));
}
/// What: Verify layout calculation handles installed-only mode enlargement.
///
/// Inputs:
/// - Installed-only mode with right pane percentage set
///
/// Output:
/// - Right pane is enlarged to 1.5x its original size (up to maximum available).
///
/// Details:
/// - Tests that installed-only mode widens the right pane to accommodate two lists.
#[test]
fn middle_enlarges_right_pane_in_installed_only_mode() {
use ratatui::{Terminal, backend::TestBackend};
let backend = TestBackend::new(120, 30);
let mut term = Terminal::new(backend).expect("Failed to create terminal for test");
let mut app = crate::state::AppState {
show_recent_pane: true,
show_install_pane: true,
installed_only_mode: true,
layout_left_pct: 20,
layout_right_pct: 30, // Should become 45 (30 * 1.5) if space allows
..Default::default()
};
term.draw(|f| {
let area = f.area();
super::render_middle(f, &mut app, area);
})
.expect("Failed to render middle pane");
// Should render without panic with enlarged right pane
assert!(app.install_rect.is_some());
assert!(app.downgrade_rect.is_some());
}
/// What: Verify layout calculation reassigns space when panes are hidden.
///
/// Inputs:
/// - Recent pane hidden, install pane hidden
///
/// Output:
/// - Center pane (search) gets all available space.
///
/// Details:
/// - Tests that hidden panes don't take up space.
#[test]
fn middle_reassigns_space_when_panes_hidden() {
use ratatui::{Terminal, backend::TestBackend};
let backend = TestBackend::new(120, 30);
let mut term = Terminal::new(backend).expect("Failed to create terminal for test");
let mut app = crate::state::AppState {
show_recent_pane: false,
show_install_pane: false,
focus: crate::state::Focus::Search,
input: "test".into(),
..Default::default()
};
term.draw(|f| {
let area = f.area();
super::render_middle(f, &mut app, area);
})
.expect("Failed to render middle pane");
// Recent and install rects should be None
assert!(app.recent_rect.is_none());
assert!(app.install_rect.is_none());
}
/// What: Verify layout calculation handles zero-width areas gracefully.
///
/// Inputs:
/// - Area with zero width
///
/// Output:
/// - Rendering completes without panic.
///
/// Details:
/// - Tests edge case where terminal area is too small.
#[test]
fn middle_handles_zero_width_area() {
use ratatui::{Terminal, backend::TestBackend};
let backend = TestBackend::new(0, 30);
let mut term = Terminal::new(backend).expect("Failed to create terminal for test");
let mut app = crate::state::AppState {
show_recent_pane: true,
show_install_pane: true,
..Default::default()
};
term.draw(|f| {
let area = f.area();
super::render_middle(f, &mut app, area);
})
.expect("Failed to render middle pane");
// Should handle zero width without panic
}
/// What: Verify focus switching when install pane is hidden while focused.
///
/// Inputs:
/// - Install pane is focused, then hidden
///
/// Output:
/// - Focus switches to Search.
///
/// Details:
/// - Tests that focus is automatically corrected when focused pane is hidden.
#[test]
fn middle_switches_focus_when_install_hidden() {
use ratatui::{Terminal, backend::TestBackend};
let backend = TestBackend::new(120, 30);
let mut term = Terminal::new(backend).expect("Failed to create terminal for test");
let mut app = crate::state::AppState {
show_install_pane: true,
focus: crate::state::Focus::Install,
..Default::default()
};
// Hide install pane
app.show_install_pane = false;
term.draw(|f| {
let area = f.area();
super::render_middle(f, &mut app, area);
})
.expect("Failed to render middle pane");
assert!(matches!(app.focus, crate::state::Focus::Search));
}
}
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/src/ui/middle/installed_only.rs | src/ui/middle/installed_only.rs | use ratatui::{
Frame,
layout::{Constraint, Direction, Layout},
prelude::Rect,
style::Style,
text::{Line, Span},
widgets::{Block, BorderType, Borders, List},
};
use crate::i18n;
use crate::state::AppState;
use crate::theme::theme;
/// What: Render Downgrade and Remove lists side-by-side in installed-only mode.
///
/// Inputs:
/// - `f`: Frame to render into
/// - `app`: Application state (downgrade/remove lists, focus, selection)
/// - `area`: Target rectangle for the right pane (will be split 50/50)
///
/// Output:
/// - Draws Downgrade (left) and Remove (right) lists, records rects for mouse hit-testing.
///
/// Details:
/// - Splits the area horizontally into two equal panes.
/// - Import/Export buttons are not shown in installed-only mode.
pub fn render_installed_only(f: &mut Frame, app: &mut AppState, area: Rect) {
let install_focused = matches!(app.focus, crate::state::Focus::Install);
// In installed-only mode, split the right pane into Downgrade (left) and Remove (right)
let right_split = Layout::default()
.direction(Direction::Horizontal)
.constraints([Constraint::Percentage(50), Constraint::Percentage(50)])
.split(area);
// Downgrade List (left)
render_downgrade_list(f, app, right_split[0], install_focused);
// Remove List (right)
render_remove_list(f, app, right_split[1], install_focused);
// Record inner Install rect for mouse hit-testing (map to Remove list area)
app.install_rect = Some((
right_split[1].x + 1,
right_split[1].y + 1,
right_split[1].width.saturating_sub(2),
right_split[1].height.saturating_sub(2),
));
// Import/Export buttons not shown in installed-only mode
app.install_import_rect = None;
app.install_export_rect = None;
}
/// What: Render the Downgrade list in the left half of the installed-only pane.
///
/// Inputs:
/// - `f`: Frame to render into
/// - `app`: Application state (downgrade list, focus, selection)
/// - `area`: Target rectangle for the downgrade list
/// - `install_focused`: Whether the install pane is focused
///
/// Output:
/// - Draws the downgrade list and records inner rect for mouse hit-testing.
fn render_downgrade_list(f: &mut Frame, app: &mut AppState, area: Rect, install_focused: bool) {
let dg_indices: Vec<usize> = (0..app.downgrade_list.len()).collect();
let downgrade_selected_idx = app.downgrade_state.selected();
let downgrade_items = crate::ui::middle::install::build_package_list_items(
&dg_indices,
&app.downgrade_list,
downgrade_selected_idx,
install_focused
&& matches!(
app.right_pane_focus,
crate::state::RightPaneFocus::Downgrade
),
|name| crate::ui::helpers::is_package_loading_preflight(app, name),
);
let downgrade_is_focused = install_focused
&& matches!(
app.right_pane_focus,
crate::state::RightPaneFocus::Downgrade
);
let inner_rect = render_package_list_widget(
f,
downgrade_items,
area,
i18n::t(app, "app.titles.downgrade_list_focused"),
i18n::t(app, "app.titles.downgrade_list"),
downgrade_is_focused,
&mut app.downgrade_state,
);
app.downgrade_rect = Some((
inner_rect.x,
inner_rect.y,
inner_rect.width,
inner_rect.height,
));
}
/// What: Build a styled block for a package list widget.
///
/// Inputs:
/// - `title`: Block title text
/// - `is_focused`: Whether the list is focused
///
/// Output:
/// - Styled `Block` widget ready for use in a list.
///
/// Details:
/// - Applies focused/unfocused styling to title and border.
fn build_package_list_block(title: String, is_focused: bool) -> Block<'static> {
let th = theme();
Block::default()
.title(Line::from(vec![Span::styled(
title,
Style::default().fg(if is_focused { th.mauve } else { th.overlay1 }),
)]))
.borders(Borders::ALL)
.border_type(BorderType::Rounded)
.border_style(Style::default().fg(if is_focused { th.mauve } else { th.surface1 }))
}
/// What: Build and render a package list widget with title and styling.
///
/// Inputs:
/// - `f`: Frame to render into
/// - `items`: List items to render
/// - `area`: Target rectangle
/// - `title_focused`: Title for focused state
/// - `title_unfocused`: Title for unfocused state
/// - `is_focused`: Whether the list is focused
/// - `state`: List state for rendering
///
/// Output:
/// - Renders the list widget and returns the inner rect.
///
/// Details:
/// - Creates a block with title, styles the list, and renders it.
fn render_package_list_widget(
f: &mut Frame,
items: Vec<ratatui::widgets::ListItem<'_>>,
area: Rect,
title_focused: String,
title_unfocused: String,
is_focused: bool,
state: &mut ratatui::widgets::ListState,
) -> Rect {
let th = theme();
let title = if is_focused {
title_focused
} else {
title_unfocused
};
let block = build_package_list_block(title, is_focused);
let list = List::new(items)
.style(
Style::default()
.fg(if is_focused { th.text } else { th.subtext0 })
.bg(th.base),
)
.block(block)
.highlight_style(Style::default().fg(th.text).bg(th.surface2))
.highlight_symbol("");
f.render_stateful_widget(list, area, state);
Rect {
x: area.x + 1,
y: area.y + 1,
width: area.width.saturating_sub(2),
height: area.height.saturating_sub(2),
}
}
/// What: Render the Remove list in the right half of the installed-only pane.
///
/// Inputs:
/// - `f`: Frame to render into
/// - `app`: Application state (remove list, focus, selection)
/// - `area`: Target rectangle for the remove list
/// - `install_focused`: Whether the install pane is focused
///
/// Output:
/// - Draws the remove list.
fn render_remove_list(f: &mut Frame, app: &mut AppState, area: Rect, install_focused: bool) {
let rm_indices: Vec<usize> = (0..app.remove_list.len()).collect();
let remove_selected_idx = app.remove_state.selected();
let remove_items = crate::ui::middle::install::build_package_list_items(
&rm_indices,
&app.remove_list,
remove_selected_idx,
install_focused && matches!(app.right_pane_focus, crate::state::RightPaneFocus::Remove),
|name| crate::ui::helpers::is_package_loading_preflight(app, name),
);
let remove_is_focused =
install_focused && matches!(app.right_pane_focus, crate::state::RightPaneFocus::Remove);
render_package_list_widget(
f,
remove_items,
area,
i18n::t(app, "app.titles.remove_list_focused"),
i18n::t(app, "app.titles.remove_list"),
remove_is_focused,
&mut app.remove_state,
);
}
#[cfg(test)]
mod tests {
use super::*;
use ratatui::{Terminal, backend::TestBackend};
/// What: Initialize minimal English translations for installed-only tests.
///
/// Inputs:
/// - `app`: `AppState` to populate with translations
///
/// Output:
/// - Populates `app.translations` and `app.translations_fallback` with installed-only translations
///
/// Details:
/// - Sets up only the translations needed for installed-only rendering tests.
fn init_test_translations(app: &mut crate::state::AppState) {
use std::collections::HashMap;
let mut translations = HashMap::new();
translations.insert(
"app.titles.downgrade_list".to_string(),
"Downgrade".to_string(),
);
translations.insert(
"app.titles.downgrade_list_focused".to_string(),
"Downgrade".to_string(),
);
translations.insert("app.titles.remove_list".to_string(), "Remove".to_string());
translations.insert(
"app.titles.remove_list_focused".to_string(),
"Remove".to_string(),
);
app.translations = translations.clone();
app.translations_fallback = translations;
}
/// What: Verify installed-only mode renders both downgrade and remove lists.
///
/// Inputs:
/// - Installed-only mode with packages in both downgrade and remove lists
///
/// Output:
/// - Both lists render and `app.install_rect` maps to Remove list area.
///
/// Details:
/// - Tests that area is split 50/50 and rects are recorded correctly.
#[test]
fn installed_only_renders_both_lists() {
let backend = TestBackend::new(100, 30);
let mut term = Terminal::new(backend).expect("Failed to create terminal for test");
let mut app = crate::state::AppState::default();
init_test_translations(&mut app);
app.downgrade_list.push(crate::state::PackageItem {
name: "downgrade-pkg".to_string(),
version: "1.0.0".to_string(),
description: String::new(),
source: crate::state::Source::Aur,
popularity: None,
out_of_date: None,
orphaned: false,
});
app.remove_list.push(crate::state::PackageItem {
name: "remove-pkg".to_string(),
version: "2.0.0".to_string(),
description: String::new(),
source: crate::state::Source::Aur,
popularity: None,
out_of_date: None,
orphaned: false,
});
term.draw(|f| {
let area = f.area();
render_installed_only(f, &mut app, area);
})
.expect("Failed to render installed-only pane");
assert!(app.install_rect.is_some());
assert!(app.downgrade_rect.is_some());
// Import/Export buttons should be hidden
assert!(app.install_import_rect.is_none());
assert!(app.install_export_rect.is_none());
}
/// What: Verify installed-only mode clears button rects.
///
/// Inputs:
/// - Installed-only mode activated
///
/// Output:
/// - `app.install_import_rect` and `app.install_export_rect` are set to `None`.
///
/// Details:
/// - Tests that Import/Export buttons are not shown in installed-only mode.
#[test]
fn installed_only_hides_buttons() {
let backend = TestBackend::new(100, 30);
let mut term = Terminal::new(backend).expect("Failed to create terminal for test");
let mut app = crate::state::AppState::default();
init_test_translations(&mut app);
// Set initial button rects
app.install_import_rect = Some((10, 10, 10, 1));
app.install_export_rect = Some((20, 10, 10, 1));
term.draw(|f| {
let area = f.area();
render_installed_only(f, &mut app, area);
})
.expect("Failed to render installed-only pane without buttons");
assert!(app.install_import_rect.is_none());
assert!(app.install_export_rect.is_none());
}
/// What: Verify installed-only mode splits area correctly.
///
/// Inputs:
/// - Area of width 100 split into two lists
///
/// Output:
/// - Downgrade and Remove lists each get approximately 50% width.
///
/// Details:
/// - Tests that layout splitting produces two equal panes.
#[test]
fn installed_only_splits_area_correctly() {
let backend = TestBackend::new(100, 30);
let mut term = Terminal::new(backend).expect("Failed to create terminal for test");
let mut app = crate::state::AppState::default();
init_test_translations(&mut app);
term.draw(|f| {
let area = f.area();
render_installed_only(f, &mut app, area);
})
.expect("Failed to render installed-only pane with split area");
let downgrade_rect = app
.downgrade_rect
.expect("downgrade_rect should be set after rendering");
let install_rect = app
.install_rect
.expect("install_rect should be set after rendering"); // Maps to Remove list
// Both should have similar widths (accounting for borders)
let downgrade_width = downgrade_rect.2;
let remove_width = install_rect.2;
// Widths should be approximately equal (within 1 pixel due to rounding)
assert!((i32::from(downgrade_width) - i32::from(remove_width)).abs() <= 1);
}
/// What: Verify installed-only mode records downgrade rect.
///
/// Inputs:
/// - Installed-only mode with downgrade list
///
/// Output:
/// - `app.downgrade_rect` is set to inner rectangle of downgrade list.
///
/// Details:
/// - Tests that downgrade rect excludes borders.
#[test]
fn installed_only_records_downgrade_rect() {
let backend = TestBackend::new(100, 30);
let mut term = Terminal::new(backend).expect("Failed to create terminal for test");
let mut app = crate::state::AppState::default();
init_test_translations(&mut app);
term.draw(|f| {
let area = f.area();
render_installed_only(f, &mut app, area);
})
.expect("Failed to render installed-only pane to record downgrade rect");
assert!(app.downgrade_rect.is_some());
let (x, y, w, h) = app
.downgrade_rect
.expect("downgrade_rect should be Some after is_some() check");
// Rect should exclude borders
assert_eq!(x, 1);
assert_eq!(y, 1);
assert!(w > 0);
assert!(h > 0);
}
/// What: Verify installed-only mode handles empty lists.
///
/// Inputs:
/// - Installed-only mode with empty downgrade and remove lists
///
/// Output:
/// - Both lists render without panic.
///
/// Details:
/// - Tests edge case where lists are empty.
#[test]
fn installed_only_handles_empty_lists() {
let backend = TestBackend::new(100, 30);
let mut term = Terminal::new(backend).expect("Failed to create terminal for test");
let mut app = crate::state::AppState::default();
init_test_translations(&mut app);
app.downgrade_list.clear();
app.remove_list.clear();
term.draw(|f| {
let area = f.area();
render_installed_only(f, &mut app, area);
})
.expect("Failed to render installed-only pane with empty lists");
// Should render empty lists without panic
assert!(app.install_rect.is_some());
assert!(app.downgrade_rect.is_some());
}
}
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/src/ui/middle/recent.rs | src/ui/middle/recent.rs | use ratatui::{
Frame,
prelude::Rect,
style::Style,
text::{Line, Span},
widgets::{Block, BorderType, Borders, List, ListItem},
};
use crate::i18n;
use crate::state::AppState;
use crate::theme::theme;
/// What: Render the Recent searches list in the left pane of the middle row.
///
/// Inputs:
/// - `f`: Frame to render into
/// - `app`: Application state (recent list, focus, pane find filter)
/// - `area`: Target rectangle for the recent pane
///
/// Output:
/// - Draws the recent searches list and records inner rect for mouse hit-testing.
///
/// Details:
/// - Shows filtered recent searches; title includes "/pattern" when pane find is active.
/// - Updates `app.recent_rect` with inner rectangle coordinates (excluding borders).
pub fn render_recent(f: &mut Frame, app: &mut AppState, area: Rect) {
if !app.show_recent_pane || area.width == 0 {
app.recent_rect = None;
return;
}
let th = theme();
let recent_focused = matches!(app.focus, crate::state::Focus::Recent);
let recents = app.recent_values();
let rec_inds = crate::ui::helpers::filtered_recent_indices(app);
let rec_items: Vec<ListItem> = rec_inds
.iter()
.filter_map(|&i| recents.get(i))
.map(|s| {
ListItem::new(Span::styled(
s.clone(),
Style::default().fg(if recent_focused { th.text } else { th.subtext0 }),
))
})
.collect();
let recent_title = if recent_focused {
i18n::t(app, "app.titles.recent_focused")
} else {
i18n::t(app, "app.titles.recent")
};
let mut recent_title_spans: Vec<Span> = vec![Span::styled(
recent_title,
Style::default().fg(if recent_focused {
th.mauve
} else {
th.overlay1
}),
)];
if recent_focused && let Some(pat) = &app.pane_find {
recent_title_spans.push(Span::raw(" "));
recent_title_spans.push(Span::styled(
"/",
Style::default()
.fg(th.sapphire)
.add_modifier(ratatui::style::Modifier::BOLD),
));
recent_title_spans.push(Span::styled(pat.clone(), Style::default().fg(th.text)));
}
let rec_block = Block::default()
.title(Line::from(recent_title_spans))
.borders(Borders::ALL)
.border_type(BorderType::Rounded)
.border_style(Style::default().fg(if recent_focused {
th.mauve
} else {
th.surface1
}));
let rec_list = List::new(rec_items)
.style(
Style::default()
.fg(if recent_focused { th.text } else { th.subtext0 })
.bg(th.base),
)
.block(rec_block)
.highlight_style(Style::default().fg(th.text).bg(th.surface2))
.highlight_symbol("▶ ");
f.render_stateful_widget(rec_list, area, &mut app.history_state);
// Record inner Recent rect for mouse hit-testing (inside borders)
app.recent_rect = Some((
area.x + 1,
area.y + 1,
area.width.saturating_sub(2),
area.height.saturating_sub(2),
));
}
/// What: Render the News search history list in the left pane of the news middle row.
///
/// Inputs:
/// - `f`: Frame to render into
/// - `app`: Application state (news recent list, focus, pane find filter)
/// - `area`: Target rectangle for the history pane
///
/// Output:
/// - Draws the news search history and records inner rect for mouse hit-testing.
///
/// Details:
/// - Uses the shared `history_state` for selection; hides when the pane is toggled off.
pub fn render_news_recent(f: &mut Frame, app: &mut AppState, area: Rect) {
if !app.show_news_history_pane || area.width == 0 {
app.recent_rect = None;
if matches!(app.focus, crate::state::Focus::Recent) {
app.focus = crate::state::Focus::Search;
}
return;
}
let th = theme();
let recent_focused = matches!(app.focus, crate::state::Focus::Recent);
let recents = app.news_recent_values();
let rec_inds = crate::ui::helpers::filtered_recent_indices(app);
let rec_items: Vec<ListItem> = rec_inds
.iter()
.filter_map(|&i| recents.get(i))
.map(|s| {
ListItem::new(Span::styled(
s.clone(),
Style::default().fg(if recent_focused { th.text } else { th.subtext0 }),
))
})
.collect();
let recent_title = if recent_focused {
i18n::t(app, "app.titles.news_recent_focused")
} else {
i18n::t(app, "app.titles.news_recent")
};
let mut recent_title_spans: Vec<Span> = vec![Span::styled(
recent_title,
Style::default().fg(if recent_focused {
th.mauve
} else {
th.overlay1
}),
)];
if recent_focused && let Some(pat) = &app.pane_find {
recent_title_spans.push(Span::raw(" "));
recent_title_spans.push(Span::styled(
"/",
Style::default()
.fg(th.sapphire)
.add_modifier(ratatui::style::Modifier::BOLD),
));
recent_title_spans.push(Span::styled(pat.clone(), Style::default().fg(th.text)));
}
let rec_block = Block::default()
.title(Line::from(recent_title_spans))
.borders(Borders::ALL)
.border_type(BorderType::Rounded)
.border_style(Style::default().fg(if recent_focused {
th.mauve
} else {
th.surface1
}));
let rec_list = List::new(rec_items)
.style(
Style::default()
.fg(if recent_focused { th.text } else { th.subtext0 })
.bg(th.base),
)
.block(rec_block)
.highlight_style(Style::default().fg(th.text).bg(th.surface2))
.highlight_symbol("▶ ");
f.render_stateful_widget(rec_list, area, &mut app.history_state);
// Record inner rect for mouse hit-testing (inside borders)
app.recent_rect = Some((
area.x + 1,
area.y + 1,
area.width.saturating_sub(2),
area.height.saturating_sub(2),
));
}
#[cfg(test)]
mod tests {
use super::*;
use ratatui::{Terminal, backend::TestBackend};
/// What: Initialize minimal English translations for recent tests.
///
/// Inputs:
/// - `app`: `AppState` to populate with translations
///
/// Output:
/// - Populates `app.translations` and `app.translations_fallback` with recent-related translations
///
/// Details:
/// - Sets up only the translations needed for recent rendering tests.
fn init_test_translations(app: &mut crate::state::AppState) {
use std::collections::HashMap;
let mut translations = HashMap::new();
translations.insert(
"app.titles.recent".to_string(),
"Search history".to_string(),
);
translations.insert(
"app.titles.recent_focused".to_string(),
"Search history".to_string(),
);
app.translations = translations.clone();
app.translations_fallback = translations;
}
/// What: Verify recent pane renders and records rect when visible.
///
/// Inputs:
/// - Recent pane is visible with some recent searches
///
/// Output:
/// - Recent pane renders and `app.recent_rect` is set to inner rectangle.
///
/// Details:
/// - Tests that rect is recorded with borders excluded (x+1, y+1, width-2, height-2).
#[test]
fn recent_renders_and_records_rect_when_visible() {
let backend = TestBackend::new(100, 30);
let mut term = Terminal::new(backend).expect("Failed to create terminal for test");
let mut app = crate::state::AppState::default();
init_test_translations(&mut app);
app.show_recent_pane = true;
app.load_recent_items(&["package1".to_string(), "package2".to_string()]);
term.draw(|f| {
let area = f.area();
render_recent(f, &mut app, area);
})
.expect("Failed to render recent pane");
assert!(app.recent_rect.is_some());
let (x, y, w, h) = app
.recent_rect
.expect("recent_rect should be Some after is_some() check");
// Rect should exclude borders
assert_eq!(x, 1);
assert_eq!(y, 1);
assert_eq!(w, 98); // 100 - 2
assert_eq!(h, 28); // 30 - 2
}
/// What: Verify recent pane does not render and clears rect when hidden.
///
/// Inputs:
/// - Recent pane is hidden (`show_recent_pane = false`)
///
/// Output:
/// - `app.recent_rect` is set to `None`.
///
/// Details:
/// - Tests that hidden panes don't record rects.
#[test]
fn recent_clears_rect_when_hidden() {
let backend = TestBackend::new(100, 30);
let mut term = Terminal::new(backend).expect("Failed to create terminal for test");
let mut app = crate::state::AppState::default();
init_test_translations(&mut app);
app.show_recent_pane = false;
app.recent_rect = Some((10, 10, 20, 20)); // Set initial value
term.draw(|f| {
let area = f.area();
render_recent(f, &mut app, area);
})
.expect("Failed to render hidden recent pane");
assert!(app.recent_rect.is_none());
}
/// What: Verify recent pane clears rect when area has zero width.
///
/// Inputs:
/// - Recent pane is visible but area has width 0
///
/// Output:
/// - `app.recent_rect` is set to `None`.
///
/// Details:
/// - Tests edge case where pane is visible but has no space to render.
#[test]
fn recent_clears_rect_when_area_zero_width() {
let backend = TestBackend::new(100, 30);
let mut term = Terminal::new(backend).expect("Failed to create terminal for test");
let mut app = crate::state::AppState::default();
init_test_translations(&mut app);
app.show_recent_pane = true;
term.draw(|f| {
let area = ratatui::prelude::Rect {
x: 0,
y: 0,
width: 0,
height: 10,
};
render_recent(f, &mut app, area);
})
.expect("Failed to render recent pane with zero width");
assert!(app.recent_rect.is_none());
}
/// What: Verify recent pane displays pane find filter when active and focused.
///
/// Inputs:
/// - Recent pane is focused with `pane_find` set to "test"
///
/// Output:
/// - Recent pane renders with "/test" in the title.
///
/// Details:
/// - Tests that pane find pattern appears in title when pane is focused.
#[test]
fn recent_displays_pane_find_when_focused() {
let backend = TestBackend::new(100, 30);
let mut term = Terminal::new(backend).expect("Failed to create terminal for test");
let mut app = crate::state::AppState::default();
init_test_translations(&mut app);
app.show_recent_pane = true;
app.focus = crate::state::Focus::Recent;
app.pane_find = Some("test".to_string());
term.draw(|f| {
let area = f.area();
render_recent(f, &mut app, area);
})
.expect("Failed to render recent pane with pane find");
// Should render without panic with pane find in title
assert!(app.recent_rect.is_some());
}
/// What: Verify recent pane does not display pane find when unfocused.
///
/// Inputs:
/// - Recent pane is not focused but `pane_find` is set
///
/// Output:
/// - Recent pane renders without pane find in title.
///
/// Details:
/// - Tests that pane find only appears when pane is focused.
#[test]
fn recent_hides_pane_find_when_unfocused() {
let backend = TestBackend::new(100, 30);
let mut term = Terminal::new(backend).expect("Failed to create terminal for test");
let mut app = crate::state::AppState::default();
init_test_translations(&mut app);
app.show_recent_pane = true;
app.focus = crate::state::Focus::Search;
app.pane_find = Some("test".to_string());
term.draw(|f| {
let area = f.area();
render_recent(f, &mut app, area);
})
.expect("Failed to render unfocused recent pane");
// Should render without pane find in title
assert!(app.recent_rect.is_some());
}
}
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/src/logic/faillock.rs | src/logic/faillock.rs | //! Faillock status checking and configuration parsing.
use std::process::Command;
/// What: Faillock status information for a user.
///
/// Inputs: None (constructed from faillock command output).
///
/// Output: Status information about failed login attempts.
///
/// Details:
/// - Contains the number of failed attempts, maximum allowed attempts,
/// - whether the account is locked, and the lockout duration in minutes.
#[derive(Debug, Clone)]
pub struct FaillockStatus {
/// Number of failed attempts currently recorded.
pub attempts_used: u32,
/// Maximum number of failed attempts before lockout.
pub max_attempts: u32,
/// Whether the account is currently locked.
pub is_locked: bool,
/// Lockout duration in minutes.
pub lockout_duration_minutes: u32,
/// Timestamp of the last failed attempt (if any).
pub last_failed_timestamp: Option<std::time::SystemTime>,
}
/// What: Faillock configuration values.
///
/// Inputs: None (parsed from `/etc/security/faillock.conf`).
///
/// Output: Configuration values for faillock behavior.
///
/// Details:
/// - Contains the deny count (max attempts) and fail interval (lockout duration).
#[derive(Debug, Clone)]
pub struct FaillockConfig {
/// Maximum number of failed attempts before lockout (deny setting).
pub deny: u32,
/// Lockout duration in minutes (`fail_interval` setting).
pub fail_interval: u32,
}
/// What: Check faillock status for a user.
///
/// Inputs:
/// - `username`: Username to check faillock status for.
///
/// Output:
/// - `Ok(FaillockStatus)` with status information, or `Err(String)` on error.
///
/// # Errors
///
/// - Returns `Err` if the `faillock` command cannot be executed.
///
/// Details:
/// - Executes `faillock --user <username>` command.
/// - Parses output to count lines with "V" (valid attempts).
/// - Compares with max attempts from config to determine if locked.
/// - Returns status with attempt count, max attempts, lock status, and lockout duration.
pub fn check_faillock_status(username: &str) -> Result<FaillockStatus, String> {
// Get config first to determine max attempts and lockout duration
let config = parse_faillock_config().unwrap_or(FaillockConfig {
deny: 3,
fail_interval: 15,
});
// Execute faillock command
let output = Command::new("faillock")
.args(["--user", username])
.output()
.map_err(|e| format!("Failed to execute faillock command: {e}"))?;
if !output.status.success() {
// If command fails, assume no lockout (might not be configured)
return Ok(FaillockStatus {
attempts_used: 0,
max_attempts: config.deny,
is_locked: false,
lockout_duration_minutes: config.fail_interval,
last_failed_timestamp: None,
});
}
let output_str = String::from_utf8_lossy(&output.stdout);
let lines: Vec<&str> = output_str.lines().collect();
// Count lines with "V" (valid attempts) - skip header line
// Also track the most recent failed attempt timestamp (the one that triggered lockout)
let mut attempts_used = 0u32;
let mut in_user_section = false;
let mut seen_header = false;
let mut most_recent_timestamp: Option<std::time::SystemTime> = None;
for line in lines {
let trimmed = line.trim();
if trimmed.is_empty() {
continue;
}
// Check if this is the username header line (format: "username:")
if trimmed.ends_with(':') && trimmed.trim_end_matches(':') == username {
in_user_section = true;
seen_header = false; // Reset header flag for this user section
continue;
}
// If we're in the user section, look for lines with "V"
if in_user_section {
// Skip the header line that contains "When", "Type", "Source", "Valid"
// This line also contains "V" but is not an attempt
if !seen_header
&& trimmed.contains("When")
&& trimmed.contains("Type")
&& trimmed.contains("Source")
&& trimmed.contains("Valid")
{
seen_header = true;
continue;
}
// Check if line contains "V" (valid attempt marker)
// Format is typically: "YYYY-MM-DD HH:MM:SS TTY /dev/pts/X V"
// Must be a date-like line (starts with YYYY-MM-DD format)
if (trimmed.contains(" V") || trimmed.ends_with('V'))
&& trimmed.chars().take(4).all(|c| c.is_ascii_digit())
{
attempts_used += 1;
// Parse timestamp from the line (format: "YYYY-MM-DD HH:MM:SS")
// Extract first 19 characters which should be "YYYY-MM-DD HH:MM:SS"
if trimmed.len() >= 19 {
let timestamp_str = &trimmed[0..19];
if let Ok(dt) =
chrono::NaiveDateTime::parse_from_str(timestamp_str, "%Y-%m-%d %H:%M:%S")
{
// Faillock timestamps are in local time, so we need to convert them properly
// First, assume the timestamp is in local timezone
let local_dt = dt.and_local_timezone(chrono::Local);
// Get the single valid timezone conversion (or use UTC as fallback)
let dt_utc = local_dt.single().map_or_else(
|| dt.and_utc(),
|dt_local| dt_local.with_timezone(&chrono::Utc),
);
// Convert chrono DateTime to SystemTime
let unix_timestamp = dt_utc.timestamp();
if unix_timestamp >= 0
&& let Some(st) = std::time::SystemTime::UNIX_EPOCH.checked_add(
std::time::Duration::from_secs(
u64::try_from(unix_timestamp).unwrap_or(0),
),
)
{
// Keep the most recent timestamp (faillock shows oldest first, so last one is newest)
most_recent_timestamp = Some(st);
}
}
}
}
}
}
// Check if user should be locked based on attempts
let should_be_locked = attempts_used >= config.deny;
// If locked, check if lockout has expired based on timestamp
let is_locked = if should_be_locked {
most_recent_timestamp.map_or(should_be_locked, |last_timestamp| {
// Check if lockout duration has passed since last failed attempt
let now = std::time::SystemTime::now();
now.duration_since(last_timestamp).map_or(true, |elapsed| {
let lockout_seconds = u64::from(config.fail_interval) * 60;
// If elapsed time is less than lockout duration, user is still locked
elapsed.as_secs() < lockout_seconds
})
})
} else {
false
};
Ok(FaillockStatus {
attempts_used,
max_attempts: config.deny,
is_locked,
lockout_duration_minutes: config.fail_interval,
last_failed_timestamp: most_recent_timestamp,
})
}
/// What: Parse faillock configuration from `/etc/security/faillock.conf`.
///
/// Inputs: None (reads from system config file).
///
/// Output:
/// - `Ok(FaillockConfig)` with parsed values, or `Err(String)` on error.
///
/// # Errors
///
/// - Returns `Err` if the config file cannot be read (though defaults are used in practice).
///
/// Details:
/// - Reads `/etc/security/faillock.conf`.
/// - Parses `deny` setting (default 3 if commented out).
/// - Parses `fail_interval` setting (default 15 minutes if commented out).
/// - Handles comments (lines starting with `#`) and whitespace.
pub fn parse_faillock_config() -> Result<FaillockConfig, String> {
use std::fs;
let config_path = "/etc/security/faillock.conf";
let Ok(contents) = fs::read_to_string(config_path) else {
// File doesn't exist or can't be read, use defaults
return Ok(FaillockConfig {
deny: 3,
fail_interval: 15,
});
};
let mut deny = 3u32; // Default
let mut fail_interval = 15u32; // Default in minutes
for line in contents.lines() {
let trimmed = line.trim();
// Skip empty lines and full-line comments
if trimmed.is_empty() || trimmed.starts_with('#') {
continue;
}
// Handle inline comments
let line_without_comment = trimmed.split('#').next().unwrap_or("").trim();
// Parse deny setting
if line_without_comment.starts_with("deny")
&& let Some(value_str) = line_without_comment.split('=').nth(1)
{
let value_trimmed = value_str.trim();
if let Ok(value) = value_trimmed.parse::<u32>() {
deny = value;
}
}
// Parse fail_interval setting
if line_without_comment.starts_with("fail_interval")
&& let Some(value_str) = line_without_comment.split('=').nth(1)
{
let value_trimmed = value_str.trim();
if let Ok(value) = value_trimmed.parse::<u32>() {
fail_interval = value;
}
}
}
Ok(FaillockConfig {
deny,
fail_interval,
})
}
/// What: Check if user is locked out and return lockout message if so.
///
/// Inputs:
/// - `username`: Username to check.
/// - `app`: Application state for translations.
///
/// Output:
/// - `Some(message)` if user is locked out, `None` otherwise.
///
/// Details:
/// - Checks faillock status and returns formatted lockout message if locked.
/// - Returns `None` if not locked or if check fails.
/// - Uses translations from `AppState`.
#[must_use]
pub fn get_lockout_message_if_locked(
username: &str,
app: &crate::state::AppState,
) -> Option<String> {
if let Ok(status) = check_faillock_status(username)
&& status.is_locked
{
return Some(crate::i18n::t_fmt(
app,
"app.modals.alert.account_locked_with_time",
&[
&username as &dyn std::fmt::Display,
&status.lockout_duration_minutes,
],
));
}
None
}
/// What: Calculate remaining lockout time in minutes based on last failed attempt timestamp.
///
/// Inputs:
/// - `last_timestamp`: Timestamp of the last failed attempt.
/// - `lockout_duration_minutes`: Total lockout duration in minutes.
///
/// Output:
/// - `Some(minutes)` if still locked out, `None` if lockout has expired.
///
/// Details:
/// - Calculates time elapsed since last failed attempt.
/// - Returns remaining minutes if lockout is still active, `None` if expired.
#[must_use]
pub fn calculate_remaining_lockout_minutes(
last_timestamp: &std::time::SystemTime,
lockout_duration_minutes: u32,
) -> Option<u32> {
let now = std::time::SystemTime::now();
now.duration_since(*last_timestamp)
.map_or(Some(lockout_duration_minutes), |elapsed| {
let lockout_seconds = u64::from(lockout_duration_minutes) * 60;
if elapsed.as_secs() < lockout_seconds {
let remaining_seconds = lockout_seconds - elapsed.as_secs();
let remaining_minutes =
(remaining_seconds / 60) + u64::from(remaining_seconds % 60 > 0);
Some(u32::try_from(remaining_minutes.min(u64::from(u32::MAX))).unwrap_or(u32::MAX))
} else {
None // Lockout expired
}
})
}
/// What: Check faillock status and calculate lockout information for display.
///
/// Inputs:
/// - `username`: Username to check.
///
/// Output:
/// - Tuple of `(is_locked, lockout_until, remaining_minutes)`.
///
/// Details:
/// - Checks faillock status and calculates remaining lockout time if locked.
/// - Returns lockout information for UI display.
#[must_use]
pub fn get_lockout_info(username: &str) -> (bool, Option<std::time::SystemTime>, Option<u32>) {
if let Ok(status) = check_faillock_status(username)
&& status.is_locked
{
if let Some(last_timestamp) = status.last_failed_timestamp {
let remaining = calculate_remaining_lockout_minutes(
&last_timestamp,
status.lockout_duration_minutes,
);
// Calculate lockout_until timestamp
let lockout_until = last_timestamp
+ std::time::Duration::from_secs(u64::from(status.lockout_duration_minutes) * 60);
// Return remaining time - if None, it means lockout expired, show 0
// But if timestamp is in future (timezone issue), remaining should be Some
return (true, Some(lockout_until), remaining);
}
// Locked but no timestamp - still show as locked but no time remaining
return (true, None, Some(0));
}
(false, None, None)
}
#[cfg(test)]
mod tests {
use super::*;
use std::fs;
use std::io::Write;
#[test]
/// What: Test parsing faillock config with defaults.
///
/// Inputs:
/// - Config file that doesn't exist or has commented settings.
///
/// Output:
/// - Returns default values (deny=3, `fail_interval=15`).
///
/// Details:
/// - Verifies default values are used when config is missing or commented.
fn test_parse_faillock_config_defaults() {
// This test may fail if the file exists, but that's okay
// The function should handle missing files gracefully
let _config = parse_faillock_config();
// Just verify it doesn't panic
}
#[test]
/// What: Test parsing faillock config with custom values.
///
/// Inputs:
/// - Temporary config file with custom deny and `fail_interval` values.
///
/// Output:
/// - Returns parsed values from config file.
///
/// Details:
/// - Creates a temporary config file and verifies parsing works correctly.
fn test_parse_faillock_config_custom_values() {
use std::env::temp_dir;
let temp_file = temp_dir().join("test_faillock.conf");
let content = "deny = 5\nfail_interval = 30\n";
if let Ok(mut file) = fs::File::create(&temp_file) {
let _ = file.write_all(content.as_bytes());
// Note: We can't easily test this without mocking file reading
// Just verify the function doesn't panic
let _config = parse_faillock_config();
let _ = fs::remove_file(&temp_file);
}
}
#[test]
/// What: Test parsing faillock config with comments.
///
/// Inputs:
/// - Config file with commented lines and inline comments.
///
/// Output:
/// - Parses values correctly, ignoring comments.
///
/// Details:
/// - Verifies that comments (both full-line and inline) are handled correctly.
fn test_parse_faillock_config_with_comments() {
// The function should handle comments correctly
// Since we can't easily mock file reading, just verify it doesn't panic
let _config = parse_faillock_config();
}
#[test]
/// What: Test faillock status checking handles errors gracefully.
///
/// Inputs:
/// - Username that may or may not have faillock entries.
///
/// Output:
/// - Returns status without panicking.
///
/// Details:
/// - Verifies the function handles various error cases.
fn test_check_faillock_status_handles_errors() {
let username = std::env::var("USER").unwrap_or_else(|_| "testuser".to_string());
let result = check_faillock_status(&username);
// Should return Ok or handle errors gracefully
if let Ok(status) = result {
// Verify status has reasonable values
assert!(status.max_attempts > 0);
assert!(status.lockout_duration_minutes > 0);
} else {
// Error is acceptable (e.g., faillock not configured)
}
}
#[test]
/// What: Test faillock status structure.
///
/// Inputs:
/// - Username.
///
/// Output:
/// - Returns status with all fields populated.
///
/// Details:
/// - Verifies that the status struct contains all expected fields.
fn test_faillock_status_structure() {
let username = std::env::var("USER").unwrap_or_else(|_| "testuser".to_string());
if let Ok(status) = check_faillock_status(&username) {
// Verify all fields are present
let _ = status.attempts_used;
let _ = status.max_attempts;
let _ = status.is_locked;
let _ = status.lockout_duration_minutes;
// Just verify the struct can be accessed
}
}
}
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/src/logic/password.rs | src/logic/password.rs | //! Sudo password validation utilities.
use std::process::Command;
/// What: Validate a sudo password without executing any command.
///
/// Inputs:
/// - `password`: Password to validate.
///
/// Output:
/// - `Ok(true)` if password is valid, `Ok(false)` if invalid, or `Err(String)` on error.
///
/// # Errors
///
/// - Returns `Err` if the validation command cannot be executed (e.g., sudo not available).
///
/// Details:
/// - First invalidates cached sudo credentials with `sudo -k` to ensure fresh validation.
/// - Then executes `printf '%s\n' '<password>' | sudo -S -v` to test password validity.
/// - Uses `printf` instead of `echo` for more reliable password handling.
/// - Uses `sudo -v` which validates credentials without executing a command.
/// - Returns `Ok(true)` if password is valid, `Ok(false)` if invalid.
/// - Handles errors appropriately (e.g., if sudo is not available).
pub fn validate_sudo_password(password: &str) -> Result<bool, String> {
use crate::install::shell_single_quote;
// Escape password for shell safety
let escaped_password = shell_single_quote(password);
// Build command: sudo -k ; printf '%s\n' '<password>' | sudo -S -v
// First, sudo -k invalidates any cached credentials to ensure fresh validation.
// Without this, cached credentials could cause validation to succeed even with wrong password.
// Use printf instead of echo for more reliable password handling.
// sudo -v validates credentials without executing a command.
let cmd = format!("sudo -k ; printf '%s\\n' {escaped_password} | sudo -S -v 2>&1");
// Execute command
let output = Command::new("sh")
.arg("-c")
.arg(&cmd)
.output()
.map_err(|e| format!("Failed to execute sudo validation: {e}"))?;
// Check exit code
// Exit code 0 means password is valid
// Non-zero exit code means password is invalid or other error
// This approach is language-independent as it relies on exit codes, not error messages
if output.status.success() {
Ok(true)
} else {
Ok(false)
}
}
#[cfg(test)]
mod tests {
use super::*;
/// What: Check if passwordless sudo is configured.
///
/// Inputs:
/// - None.
///
/// Output:
/// - `true` if passwordless sudo is available, `false` otherwise.
///
/// Details:
/// - Uses `sudo -n true` to check if sudo can run without a password.
/// - Returns `false` if sudo is not available or requires a password.
fn is_passwordless_sudo() -> bool {
Command::new("sudo")
.args(["-n", "true"])
.stdin(std::process::Stdio::null())
.stdout(std::process::Stdio::null())
.stderr(std::process::Stdio::null())
.status()
.is_ok_and(|s| s.success())
}
#[test]
#[ignore = "Uses sudo with wrong password - may lock user out. Run with --ignored"]
/// What: Test password validation handles invalid passwords.
///
/// Inputs:
/// - Invalid password string.
///
/// Output:
/// - Returns `Ok(false)` for invalid password.
///
/// Details:
/// - Verifies the function correctly identifies invalid passwords.
/// - Skips assertion if passwordless sudo is configured (common in CI).
/// - Marked as ignored to prevent user lockout from failed sudo attempts.
fn test_validate_sudo_password_invalid() {
// Skip test if passwordless sudo is configured (common in CI environments)
if is_passwordless_sudo() {
return;
}
// This test uses an obviously wrong password
// It should return Ok(false) without panicking
let result = validate_sudo_password("definitely_wrong_password_12345");
// Result may be Ok(false) or Err depending on system configuration
if let Ok(valid) = result {
// Should be false for invalid password
assert!(!valid);
} else {
// Error is acceptable (e.g., sudo not available)
}
}
#[test]
#[ignore = "Uses sudo with wrong password - may lock user out. Run with --ignored"]
/// What: Test password validation handles empty passwords.
///
/// Inputs:
/// - Empty password string.
///
/// Output:
/// - Returns `Ok(false)` for empty password.
///
/// Details:
/// - Verifies the function correctly handles empty passwords.
/// - Skips assertion if passwordless sudo is configured (common in CI).
/// - Marked as ignored to prevent user lockout from failed sudo attempts.
fn test_validate_sudo_password_empty() {
// Skip test if passwordless sudo is configured (common in CI environments)
if is_passwordless_sudo() {
return;
}
let result = validate_sudo_password("");
// Empty password should be invalid
if let Ok(valid) = result {
assert!(!valid);
} else {
// Error is acceptable
}
}
#[test]
#[ignore = "Uses sudo with wrong password - may lock user out. Run with --ignored"]
/// What: Test password validation handles special characters.
///
/// Inputs:
/// - Password with special characters that need escaping.
///
/// Output:
/// - Handles special characters without panicking.
///
/// Details:
/// - Verifies the function correctly escapes special characters in passwords.
/// - Marked as ignored to prevent user lockout from failed sudo attempts.
fn test_validate_sudo_password_special_chars() {
// Test with password containing special shell characters
let passwords = vec![
"pass'word",
"pass\"word",
"pass$word",
"pass`word",
"pass\\word",
];
for pass in passwords {
let result = validate_sudo_password(pass);
// Just verify it doesn't panic
let _ = result;
}
}
#[test]
#[ignore = "Uses sudo with wrong password - may lock user out. Run with --ignored"]
/// What: Test password validation function signature.
///
/// Inputs:
/// - Various password strings.
///
/// Output:
/// - Returns Result<bool, String> as expected.
///
/// Details:
/// - Verifies the function returns the correct type.
/// - Marked as ignored to prevent user lockout from failed sudo attempts.
fn test_validate_sudo_password_signature() {
let result: Result<bool, String> = validate_sudo_password("test");
// Verify it returns the correct type
let _ = result;
}
}
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/src/logic/prefetch.rs | src/logic/prefetch.rs | //! Proactive detail fetching for packages near the current selection.
use tokio::sync::mpsc;
use crate::state::{AppState, PackageItem};
/// What: Prefetch details for items near the current selection (alternating above/below).
///
/// Inputs:
/// - `app`: Mutable application state (`results`, `selected`, `details_cache`)
/// - `details_tx`: Channel to enqueue detail requests
///
/// Output:
/// - Enqueues requests for allowed, uncached neighbors within a fixed radius; no return value.
///
/// Details:
/// - Respects `logic::is_allowed` and skips names present in the cache; designed to be cheap.
pub fn ring_prefetch_from_selected(
app: &mut AppState,
details_tx: &mpsc::UnboundedSender<PackageItem>,
) {
let len_u = app.results.len();
if len_u == 0 {
return;
}
let max_radius: usize = 30;
let mut step: usize = 1;
loop {
let progressed_up = if let Some(i) = app.selected.checked_sub(step) {
if let Some(it) = app.results.get(i).cloned()
&& crate::logic::is_allowed(&it.name)
&& !app.details_cache.contains_key(&it.name)
{
let _ = details_tx.send(it);
}
true
} else {
false
};
let below = app.selected + step;
let progressed_down = if below < len_u {
if let Some(it) = app.results.get(below).cloned()
&& crate::logic::is_allowed(&it.name)
&& !app.details_cache.contains_key(&it.name)
{
let _ = details_tx.send(it);
}
true
} else {
false
};
let progressed = progressed_up || progressed_down;
if step >= max_radius || !progressed {
break;
}
step += 1;
}
}
#[cfg(test)]
mod tests {
use super::*;
fn item_official(name: &str, repo: &str) -> PackageItem {
PackageItem {
name: name.to_string(),
version: "1.0".to_string(),
description: format!("{name} desc"),
source: crate::state::Source::Official {
repo: repo.to_string(),
arch: "x86_64".to_string(),
},
popularity: None,
out_of_date: None,
orphaned: false,
}
}
#[tokio::test]
#[allow(clippy::await_holding_lock)]
/// What: Ensure prefetching emits no requests when results are empty.
///
/// Inputs:
/// - Application state with zero search results.
///
/// Output:
/// - No messages received on the details channel within the timeout window.
///
/// Details:
/// - Uses a short timeout to confirm no unexpected sends occur during the async loop.
async fn prefetch_noop_on_empty_results() {
let _guard = crate::global_test_mutex_lock();
let mut app = AppState::default();
let (tx, mut rx) = mpsc::unbounded_channel();
ring_prefetch_from_selected(&mut app, &tx);
let none = tokio::time::timeout(std::time::Duration::from_millis(30), rx.recv())
.await
.ok()
.flatten();
assert!(none.is_none());
}
#[tokio::test]
#[allow(clippy::await_holding_lock)]
/// What: Verify prefetch honours allowed gating and avoids cached entries.
///
/// Inputs:
/// - Results list of three packages with varying allowed states and cache contents.
///
/// Output:
/// - No requests when only the selected item is allowed; afterwards only uncached, allowed neighbor is dispatched.
///
/// Details:
/// - Toggles `set_allowed_only_selected` and `set_allowed_ring`, updating the cache between passes to target specific neighbours.
async fn prefetch_respects_allowed_and_cache() {
let _guard = crate::global_test_mutex_lock();
let mut app = AppState {
results: vec![
item_official("a", "core"),
item_official("b", "extra"),
item_official("c", "extra"),
],
selected: 1,
..Default::default()
};
// Disallow b/c except selected, and cache one neighbor
crate::logic::set_allowed_only_selected(&app);
app.details_cache.insert(
"c".into(),
crate::state::PackageDetails {
name: "c".into(),
..Default::default()
},
);
let (tx, mut rx) = mpsc::unbounded_channel();
ring_prefetch_from_selected(&mut app, &tx);
// With only-selected allowed, neighbors shouldn't be sent
let none = tokio::time::timeout(std::time::Duration::from_millis(60), rx.recv())
.await
.ok()
.flatten();
assert!(none.is_none());
// Now allow ring and clear cache for b, keep c cached
app.details_cache.clear();
app.details_cache.insert(
"c".into(),
crate::state::PackageDetails {
name: "c".into(),
..Default::default()
},
);
crate::logic::set_allowed_ring(&app, 1);
ring_prefetch_from_selected(&mut app, &tx);
// Expect only 'a' (above neighbor) to be sent; 'c' is cached
let sent = tokio::time::timeout(std::time::Duration::from_millis(200), rx.recv())
.await
.ok()
.flatten()
.expect("one sent");
assert_eq!(sent.name, "a");
let none2 = tokio::time::timeout(std::time::Duration::from_millis(60), rx.recv())
.await
.ok()
.flatten();
assert!(none2.is_none());
}
}
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/src/logic/filter.rs | src/logic/filter.rs | //! Package filtering logic for repository and AUR results.
use crate::state::{AppState, PackageItem, Source};
#[inline]
/// What: Conditionally push a `PackageItem` into the filtered results buffer.
///
/// Inputs:
/// - `cond`: When `true`, the item is appended to `out`.
/// - `it`: Candidate package record (moved into the collection when included).
/// - `out`: Destination vector accumulating filtered results.
///
/// Output:
/// - Extends `out` with `it` when `cond` evaluates to `true`; leaves `out` untouched otherwise.
///
/// Details:
/// - Keeps the filtering loop concise by encapsulating the conditional push logic.
fn return_if_true(cond: bool, it: PackageItem, out: &mut Vec<PackageItem>) {
if cond {
out.push(it);
}
}
/// What: Apply current repo/AUR filters to `app.all_results`, write into `app.results`, then sort.
///
/// Inputs:
/// - `app`: Mutable application state containing `all_results`, filter toggles, and selection
///
/// Output:
/// - Updates `app.results`, applies sorting, and preserves selection when possible.
///
/// Details:
/// - Unknown official repos are included only when all official filters are enabled.
/// - Selection is restored by name when present; otherwise clamped or cleared if list is empty.
pub fn apply_filters_and_sort_preserve_selection(app: &mut AppState) {
// Capture previous selected name to preserve when possible
let prev_name = app.results.get(app.selected).map(|p| p.name.clone());
// Filter from all_results into results based on toggles
let mut filtered: Vec<PackageItem> = Vec::with_capacity(app.all_results.len());
for it in app.all_results.iter().cloned() {
let include = match &it.source {
Source::Aur => app.results_filter_show_aur,
Source::Official { repo, .. } => {
// Unified Manjaro detection: name prefix or owner contains "manjaro" when available.
// Prefer details_cache owner if present; fall back to name-only rule.
let owner = app
.details_cache
.get(&it.name)
.map(|d| d.owner.clone())
.unwrap_or_default();
if crate::index::is_manjaro_name_or_owner(&it.name, &owner) {
return_if_true(app.results_filter_show_manjaro, it, &mut filtered);
continue;
}
crate::logic::distro::repo_toggle_for(repo, app)
}
};
if include {
filtered.push(it);
}
}
app.results = filtered;
// Invalidate sort caches since results changed
crate::logic::invalidate_sort_caches(app);
// Apply existing sort policy and preserve selection
crate::logic::sort_results_preserve_selection(app);
// Restore by name if possible
if let Some(name) = prev_name {
if let Some(pos) = app.results.iter().position(|p| p.name == name) {
app.selected = pos;
app.list_state.select(Some(pos));
} else if !app.results.is_empty() {
app.selected = app.selected.min(app.results.len() - 1);
app.list_state.select(Some(app.selected));
} else {
app.selected = 0;
app.list_state.select(None);
}
} else if app.results.is_empty() {
app.selected = 0;
app.list_state.select(None);
} else {
app.selected = app.selected.min(app.results.len() - 1);
app.list_state.select(Some(app.selected));
}
}
#[cfg(test)]
mod tests {
use super::*;
fn item_official(name: &str, repo: &str) -> PackageItem {
PackageItem {
name: name.to_string(),
version: "1.0".to_string(),
description: format!("{name} desc"),
source: Source::Official {
repo: repo.to_string(),
arch: "x86_64".to_string(),
},
popularity: None,
out_of_date: None,
orphaned: false,
}
}
#[test]
/// What: Ensure repo/AUR filters include only enabled repositories while keeping selection stable.
///
/// Inputs:
/// - `app`: `AppState` seeded with mixed official/AUR results and selective filter toggles.
///
/// Output:
/// - Results contain solely core packages after filtering; selection index remains valid.
///
/// Details:
/// - Disables AUR/extra/multilib toggles to confirm `apply_filters_and_sort_preserve_selection`
/// respects flags and prunes disabled repositories.
fn apply_filters_and_preserve_selection() {
let mut app = AppState {
all_results: vec![
PackageItem {
name: "aur1".into(),
version: "1".into(),
description: String::new(),
source: Source::Aur,
popularity: Some(1.0),
out_of_date: None,
orphaned: false,
},
item_official("core1", "core"),
item_official("extra1", "extra"),
item_official("other1", "community"),
],
results_filter_show_aur: false,
results_filter_show_core: true,
results_filter_show_extra: false,
results_filter_show_multilib: false,
..Default::default()
};
apply_filters_and_sort_preserve_selection(&mut app);
assert!(app.results.iter().all(
|p| matches!(&p.source, Source::Official{repo, ..} if repo.eq_ignore_ascii_case("core"))
));
}
#[test]
/// What: Verify `CachyOS` and `EOS` toggles act independently when filtering official repos.
///
/// Inputs:
/// - `app`: `AppState` containing `CachyOS` and `EndeavourOS` entries with toggle combinations.
///
/// Output:
/// - `CachyOS` packages persist while `EOS` entries are removed per toggle state.
///
/// Details:
/// - Confirms `CachyOS` inclusion does not implicitly re-enable `EOS` repositories.
fn apply_filters_cachyos_and_eos_interaction() {
let mut app = AppState {
all_results: vec![
PackageItem {
name: "cx".into(),
version: "1".into(),
description: String::new(),
source: Source::Official {
repo: "cachyos-core".into(),
arch: "x86_64".into(),
},
popularity: None,
out_of_date: None,
orphaned: false,
},
PackageItem {
name: "ey".into(),
version: "1".into(),
description: String::new(),
source: Source::Official {
repo: "endeavouros".into(),
arch: "x86_64".into(),
},
popularity: None,
out_of_date: None,
orphaned: false,
},
item_official("core1", "core"),
],
results_filter_show_core: true,
results_filter_show_extra: true,
results_filter_show_multilib: true,
results_filter_show_eos: false,
results_filter_show_cachyos: true,
..Default::default()
};
apply_filters_and_sort_preserve_selection(&mut app);
assert!(app.results.iter().any(|p| match &p.source {
Source::Official { repo, .. } => repo.to_lowercase().starts_with("cachyos"),
Source::Aur => false,
}));
assert!(app.results.iter().all(|p| match &p.source {
Source::Official { repo, .. } => !repo.eq_ignore_ascii_case("endeavouros"),
Source::Aur => true,
}));
}
#[test]
/// What: Validate inclusion rules for unknown official repositories relative to toggle coverage.
///
/// Inputs:
/// - `app`: `AppState` with an unfamiliar official repo plus standard core entry.
///
/// Output:
/// - Unknown repo excluded when any official toggle is off, then included once all are enabled.
///
/// Details:
/// - Demonstrates that enabling the remaining official toggle (`multilib`) widens acceptance to
/// previously filtered repos.
fn logic_filter_unknown_official_inclusion_policy() {
let mut app = AppState {
all_results: vec![
PackageItem {
name: "x1".into(),
version: "1".into(),
description: String::new(),
source: Source::Official {
repo: "weirdrepo".into(),
arch: "x86_64".into(),
},
popularity: None,
out_of_date: None,
orphaned: false,
},
item_official("core1", "core"),
],
results_filter_show_aur: true,
results_filter_show_core: true,
results_filter_show_extra: true,
results_filter_show_multilib: false,
results_filter_show_eos: true,
results_filter_show_cachyos: true,
..Default::default()
};
apply_filters_and_sort_preserve_selection(&mut app);
assert!(app.results.iter().all(|p| match &p.source {
Source::Official { repo, .. } => repo.eq_ignore_ascii_case("core"),
Source::Aur => false,
}));
app.results_filter_show_multilib = true;
apply_filters_and_sort_preserve_selection(&mut app);
assert!(app.results.iter().any(|p| match &p.source {
Source::Official { repo, .. } => repo.eq_ignore_ascii_case("weirdrepo"),
Source::Aur => false,
}));
}
}
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/src/logic/sort.rs | src/logic/sort.rs | //! Result sorting with selection preservation across sort modes.
//!
//! Implements cache-based O(n) reordering for sort mode switching between cacheable modes.
//! `BestMatches` mode is query-dependent and always performs full O(n log n) sort.
use crate::state::{AppState, PackageItem, SortMode, Source};
use std::collections::hash_map::DefaultHasher;
use std::hash::{Hash, Hasher};
#[cfg(test)]
use std::sync::atomic::{AtomicUsize, Ordering};
#[cfg(test)]
static COMPUTE_REPO_INDICES_CALLS: AtomicUsize = AtomicUsize::new(0);
#[cfg(test)]
static COMPUTE_AUR_INDICES_CALLS: AtomicUsize = AtomicUsize::new(0);
/// What: Compute a signature hash for the results list to validate cache validity.
///
/// Inputs:
/// - `results`: Slice of package items to compute signature for.
///
/// Output:
/// - Returns an order-insensitive `u64` hash based on package names.
///
/// Details:
/// - Used to detect when results have changed, invalidating cached sort orders.
/// - Order-insensitive so mode switches do not invalidate caches.
fn compute_results_signature(results: &[PackageItem]) -> u64 {
// Collect and canonicalize names to be order-insensitive.
let mut names: Vec<&str> = results.iter().map(|p| p.name.as_str()).collect();
names.sort_unstable();
let mut hasher = DefaultHasher::new();
names.len().hash(&mut hasher);
// Mix first/last to avoid hashing full list twice.
if let Some(first) = names.first() {
first.hash(&mut hasher);
}
if let Some(last) = names.last() {
last.hash(&mut hasher);
}
// Aggregate individual name hashes in an order-insensitive way.
let mut aggregate: u64 = 0;
for name in names {
let mut nh = DefaultHasher::new();
name.hash(&mut nh);
aggregate ^= nh.finish();
}
aggregate.hash(&mut hasher);
hasher.finish()
}
/// What: Reorder results vector using cached indices.
///
/// Inputs:
/// - `results`: Mutable reference to results vector.
/// - `indices`: Slice of indices representing the desired sort order.
///
/// Output:
/// - Reorders `results` in-place according to `indices`.
///
/// Details:
/// - Performs O(n) reordering instead of O(n log n) sorting.
/// - Invalid indices are filtered out safely.
fn reorder_from_indices(results: &mut Vec<PackageItem>, indices: &[usize]) {
let reordered: Vec<PackageItem> = indices
.iter()
.filter_map(|&i| results.get(i).cloned())
.collect();
*results = reordered;
}
/// What: Sort results by best match rank based on query.
///
/// Inputs:
/// - `results`: Mutable reference to results vector.
/// - `query`: Search query string for match ranking.
///
/// Output:
/// - Sorts results in-place by match rank (lower is better), with repo order and name as tiebreakers.
///
/// Details:
/// - Used for `BestMatches` sort mode. Query-dependent, so cannot be cached.
fn sort_best_matches(results: &mut [PackageItem], query: &str) {
let ql = query.trim().to_lowercase();
results.sort_by(|a, b| {
let ra = crate::util::match_rank(&a.name, &ql);
let rb = crate::util::match_rank(&b.name, &ql);
if ra != rb {
return ra.cmp(&rb);
}
// Tiebreak: keep pacman repo order first to keep layout familiar
let oa = crate::util::repo_order(&a.source);
let ob = crate::util::repo_order(&b.source);
if oa != ob {
return oa.cmp(&ob);
}
a.name.to_lowercase().cmp(&b.name.to_lowercase())
});
}
/// What: Compute sort order indices for repo-then-name sorting.
///
/// Inputs:
/// - `results`: Slice of package items.
///
/// Output:
/// - Returns vector of indices representing sorted order.
///
/// Details:
/// - Used to populate cache without modifying the original results.
fn compute_repo_then_name_indices(results: &[PackageItem]) -> Vec<usize> {
#[cfg(test)]
COMPUTE_REPO_INDICES_CALLS.fetch_add(1, Ordering::Relaxed);
let mut indices: Vec<usize> = (0..results.len()).collect();
indices.sort_by(|&i, &j| {
let a = &results[i];
let b = &results[j];
let oa = crate::util::repo_order(&a.source);
let ob = crate::util::repo_order(&b.source);
if oa != ob {
return oa.cmp(&ob);
}
a.name.to_lowercase().cmp(&b.name.to_lowercase())
});
indices
}
/// What: Compute sort order indices for AUR-popularity-then-official sorting.
///
/// Inputs:
/// - `results`: Slice of package items.
///
/// Output:
/// - Returns vector of indices representing sorted order.
///
/// Details:
/// - Used to populate cache without modifying the original results.
fn compute_aur_popularity_then_official_indices(results: &[PackageItem]) -> Vec<usize> {
#[cfg(test)]
COMPUTE_AUR_INDICES_CALLS.fetch_add(1, Ordering::Relaxed);
let mut indices: Vec<usize> = (0..results.len()).collect();
indices.sort_by(|&i, &j| {
let a = &results[i];
let b = &results[j];
// AUR first
let aur_a = matches!(a.source, Source::Aur);
let aur_b = matches!(b.source, Source::Aur);
if aur_a != aur_b {
return aur_b.cmp(&aur_a); // true before false
}
if aur_a && aur_b {
// Desc popularity for AUR
let pa = a.popularity.unwrap_or(0.0);
let pb = b.popularity.unwrap_or(0.0);
if (pa - pb).abs() > f64::EPSILON {
return pb.partial_cmp(&pa).unwrap_or(std::cmp::Ordering::Equal);
}
} else {
// Both official: keep pacman order (repo_order), then name
let oa = crate::util::repo_order(&a.source);
let ob = crate::util::repo_order(&b.source);
if oa != ob {
return oa.cmp(&ob);
}
}
a.name.to_lowercase().cmp(&b.name.to_lowercase())
});
indices
}
/// What: Apply the currently selected sorting mode to `app.results` in-place.
///
/// Inputs:
/// - `app`: Mutable application state (`results`, `selected`, `input`, `sort_mode`)
///
/// Output:
/// - Sorts `app.results` and preserves selection by name when possible; otherwise clamps index.
///
/// Details:
/// - Uses cache-based O(n) reordering when switching between cacheable modes (`RepoThenName` and `AurPopularityThenOfficial`).
/// - Performs full O(n log n) sort when cache is invalid or for `BestMatches` mode.
/// - Populates both cache orders eagerly after full sort to enable instant mode switching.
pub fn sort_results_preserve_selection(app: &mut AppState) {
if app.results.is_empty() {
return;
}
let prev_name = app.results.get(app.selected).map(|p| p.name.clone());
// Compute current signature to check cache validity
let current_sig = compute_results_signature(&app.results);
// Check if cache is valid and we can use O(n) reordering
let cache_valid = app.sort_cache_signature == Some(current_sig);
match app.sort_mode {
SortMode::RepoThenName => {
if cache_valid {
if let Some(ref indices) = app.sort_cache_repo_name {
// Cache hit: O(n) reorder
reorder_from_indices(&mut app.results, indices);
} else {
// Cache miss: compute indices from current state, then reorder
let indices = compute_repo_then_name_indices(&app.results);
reorder_from_indices(&mut app.results, &indices);
}
} else {
// Cache invalid: compute indices from current state, then reorder
let indices = compute_repo_then_name_indices(&app.results);
reorder_from_indices(&mut app.results, &indices);
}
// Re-anchor caches to current order to keep future switches correct.
app.sort_cache_repo_name = Some((0..app.results.len()).collect());
app.sort_cache_aur_popularity =
Some(compute_aur_popularity_then_official_indices(&app.results));
app.sort_cache_signature = Some(current_sig);
}
SortMode::AurPopularityThenOfficial => {
if cache_valid {
if let Some(ref indices) = app.sort_cache_aur_popularity {
// Cache hit: O(n) reorder
reorder_from_indices(&mut app.results, indices);
} else {
// Cache miss: compute indices from current state, then reorder
let indices = compute_aur_popularity_then_official_indices(&app.results);
reorder_from_indices(&mut app.results, &indices);
}
} else {
// Cache invalid: compute indices from current state, then reorder
let indices = compute_aur_popularity_then_official_indices(&app.results);
reorder_from_indices(&mut app.results, &indices);
}
// Re-anchor caches to current order to keep future switches correct.
app.sort_cache_repo_name = Some(compute_repo_then_name_indices(&app.results));
app.sort_cache_aur_popularity = Some((0..app.results.len()).collect());
app.sort_cache_signature = Some(current_sig);
}
SortMode::BestMatches => {
// BestMatches is query-dependent, always do full sort and don't cache
sort_best_matches(&mut app.results, &app.input);
// Clear mode-specific caches since BestMatches can't use them
app.sort_cache_repo_name = None;
app.sort_cache_aur_popularity = None;
app.sort_cache_signature = None;
}
}
// Restore selection by name
if let Some(name) = prev_name {
if let Some(pos) = app.results.iter().position(|p| p.name == name) {
app.selected = pos;
app.list_state.select(Some(pos));
} else {
app.selected = app.selected.min(app.results.len().saturating_sub(1));
app.list_state.select(Some(app.selected));
}
}
}
/// What: Invalidate all sort caches.
///
/// Inputs:
/// - `app`: Mutable application state.
///
/// Output:
/// - Clears all sort cache fields.
///
/// Details:
/// - Should be called when results change (new search, filter change, etc.).
pub fn invalidate_sort_caches(app: &mut AppState) {
app.sort_cache_repo_name = None;
app.sort_cache_aur_popularity = None;
app.sort_cache_signature = None;
}
#[cfg(test)]
mod tests {
use super::*;
#[cfg(test)]
/// What: Reset compute index call counters used for instrumentation in tests.
///
/// Inputs:
/// - None.
///
/// Output:
/// - Clears the atomic counters to zero.
///
/// Details:
/// - Keeps tests isolated by removing cross-test coupling from shared state.
fn reset_compute_counters() {
COMPUTE_REPO_INDICES_CALLS.store(0, Ordering::SeqCst);
COMPUTE_AUR_INDICES_CALLS.store(0, Ordering::SeqCst);
}
fn item_official(name: &str, repo: &str) -> crate::state::PackageItem {
crate::state::PackageItem {
name: name.to_string(),
version: "1.0".to_string(),
description: format!("{name} desc"),
source: crate::state::Source::Official {
repo: repo.to_string(),
arch: "x86_64".to_string(),
},
popularity: None,
out_of_date: None,
orphaned: false,
}
}
fn item_aur(name: &str, pop: Option<f64>) -> crate::state::PackageItem {
crate::state::PackageItem {
name: name.to_string(),
version: "1.0".to_string(),
description: format!("{name} desc"),
source: crate::state::Source::Aur,
popularity: pop,
out_of_date: None,
orphaned: false,
}
}
#[test]
/// What: Confirm sorting preserves the selected index while adjusting order across modes, including relevance matching.
///
/// Inputs:
/// - Mixed list of official and AUR results.
/// - Sort mode toggled through `RepoThenName`, `AurPopularityThenOfficial`, and `BestMatches` with input `"bb"`.
///
/// Output:
/// - Selection remains on the prior package and ordering reflects repo priority, popularity preference, and match rank, respectively.
///
/// Details:
/// - Ensures the UI behaviour stays predictable when users toggle sort modes after highlighting a result.
fn sort_preserve_selection_and_best_matches() {
let mut app = AppState {
results: vec![
item_aur("zzz", Some(1.0)),
item_official("aaa", "core"),
item_official("bbb", "extra"),
item_aur("ccc", Some(10.0)),
],
selected: 2,
sort_mode: SortMode::RepoThenName,
..Default::default()
};
app.list_state.select(Some(2));
sort_results_preserve_selection(&mut app);
assert_eq!(
app.results
.iter()
.filter(|p| matches!(p.source, Source::Official { .. }))
.count(),
2
);
assert_eq!(app.results[app.selected].name, "bbb");
app.sort_mode = SortMode::AurPopularityThenOfficial;
sort_results_preserve_selection(&mut app);
let aur_first = &app.results[0];
assert!(matches!(aur_first.source, Source::Aur));
app.input = "bb".into();
app.sort_mode = SortMode::BestMatches;
sort_results_preserve_selection(&mut app);
assert!(
app.results
.iter()
.position(|p| p.name.contains("bb"))
.expect("should find package containing 'bb' in test data")
<= 1
);
}
#[test]
/// What: Validate `BestMatches` tiebreakers prioritise repo order before lexicographic name sorting.
///
/// Inputs:
/// - Three official packages whose names share the `alpha` prefix across `core` and `extra` repos.
///
/// Output:
/// - Sorted list begins with the `core` repo entry, followed by `extra` items in name order.
///
/// Details:
/// - Captures the layered tiebreak logic to catch regressions if repo precedence changes.
fn sort_bestmatches_tiebreak_repo_then_name() {
let mut app = AppState {
results: vec![
item_official("alpha2", "extra"),
item_official("alpha1", "extra"),
item_official("alpha_core", "core"),
],
input: "alpha".into(),
sort_mode: SortMode::BestMatches,
..Default::default()
};
sort_results_preserve_selection(&mut app);
let names: Vec<String> = app.results.iter().map(|p| p.name.clone()).collect();
assert_eq!(names, vec!["alpha_core", "alpha1", "alpha2"]);
}
#[test]
/// What: Ensure results signature is order-insensitive but content-sensitive.
///
/// Inputs:
/// - Same set of packages in different orders.
/// - A variant with an extra package.
///
/// Output:
/// - Signatures match for permutations and differ when content changes.
///
/// Details:
/// - Guards cache reuse when switching sort modes without masking real result changes.
fn results_signature_is_order_insensitive() {
let base = vec![
item_official("aaa", "core"),
item_official("bbb", "extra"),
item_official("ccc", "community"),
];
let permuted = vec![
item_official("ccc", "community"),
item_official("aaa", "core"),
item_official("bbb", "extra"),
];
let mut extended = permuted.clone();
extended.push(item_official("ddd", "community"));
let sig_base = compute_results_signature(&base);
let sig_permuted = compute_results_signature(&permuted);
let sig_extended = compute_results_signature(&extended);
assert_eq!(sig_base, sig_permuted);
assert_ne!(sig_base, sig_extended);
}
#[test]
/// What: Ensure the AUR popularity sort orders helpers by descending popularity with deterministic tie-breaks.
///
/// Inputs:
/// - AUR items sharing the same popularity value and official entries from different repos.
///
/// Output:
/// - AUR items sorted by name when popularity ties, followed by official packages prioritising `core` before `extra`.
///
/// Details:
/// - Verifies the composite comparator remains stable for UI diffs and regression detection.
fn sort_aur_popularity_and_official_tiebreaks() {
let mut app = AppState {
results: vec![
item_aur("aurB", Some(1.0)),
item_aur("aurA", Some(1.0)),
item_official("z_off", "core"),
item_official("a_off", "extra"),
],
sort_mode: SortMode::AurPopularityThenOfficial,
..Default::default()
};
sort_results_preserve_selection(&mut app);
let names: Vec<String> = app.results.iter().map(|p| p.name.clone()).collect();
assert_eq!(names, vec!["aurA", "aurB", "z_off", "a_off"]);
}
#[test]
/// What: Verify cache invalidation clears all sort cache fields.
///
/// Inputs:
/// - `AppState` with manually set cache fields.
///
/// Output:
/// - All cache fields are `None` after invalidation.
///
/// Details:
/// - Tests that `invalidate_sort_caches` properly clears all cache state.
fn sort_cache_invalidation() {
let mut app = AppState {
results: vec![
item_official("pkg1", "core"),
item_official("pkg2", "extra"),
],
sort_mode: SortMode::RepoThenName,
sort_cache_signature: Some(12345),
sort_cache_repo_name: Some(vec![0, 1]),
sort_cache_aur_popularity: Some(vec![1, 0]),
..Default::default()
};
// Invalidate cache
invalidate_sort_caches(&mut app);
assert!(app.sort_cache_signature.is_none());
assert!(app.sort_cache_repo_name.is_none());
assert!(app.sort_cache_aur_popularity.is_none());
}
#[test]
/// What: Verify `BestMatches` mode does not populate mode-specific caches.
///
/// Inputs:
/// - Results list sorted with `BestMatches` mode.
///
/// Output:
/// - Mode-specific caches remain `None` for `BestMatches`.
///
/// Details:
/// - `BestMatches` depends on the query and should not cache mode-specific indices.
fn sort_bestmatches_no_mode_cache() {
let mut app = AppState {
results: vec![
item_official("alpha", "core"),
item_official("beta", "extra"),
],
input: "alph".into(),
sort_mode: SortMode::BestMatches,
..Default::default()
};
sort_results_preserve_selection(&mut app);
// BestMatches should not populate mode-specific caches
assert!(app.sort_cache_repo_name.is_none());
assert!(app.sort_cache_aur_popularity.is_none());
}
#[test]
/// What: Verify cache hit path uses O(n) reordering when cache is valid.
///
/// Inputs:
/// - Results with valid cache signature and cached indices for `RepoThenName`.
///
/// Output:
/// - Results are reordered using cached indices without full sort.
///
/// Details:
/// - Tests that cache-based optimization works correctly.
fn sort_cache_hit_repo_then_name() {
let mut app = AppState {
results: vec![
item_official("zzz", "extra"),
item_official("aaa", "core"),
item_official("bbb", "core"),
],
sort_mode: SortMode::RepoThenName,
..Default::default()
};
// First sort to populate cache
sort_results_preserve_selection(&mut app);
let first_sort_order: Vec<String> = app.results.iter().map(|p| p.name.clone()).collect();
let cached_sig = app.sort_cache_signature;
// Change to different order
app.sort_mode = SortMode::AurPopularityThenOfficial;
sort_results_preserve_selection(&mut app);
// Switch back - should use cache
app.sort_mode = SortMode::RepoThenName;
sort_results_preserve_selection(&mut app);
let second_sort_order: Vec<String> = app.results.iter().map(|p| p.name.clone()).collect();
// Should match first sort order
assert_eq!(first_sort_order, second_sort_order);
assert_eq!(app.sort_cache_signature, cached_sig);
}
#[test]
/// What: Verify cache miss path performs full sort when results change.
///
/// Inputs:
/// - Results with cached signature that doesn't match current results.
///
/// Output:
/// - Full sort is performed and cache is repopulated.
///
/// Details:
/// - Tests that cache invalidation works correctly.
fn sort_cache_miss_on_results_change() {
let mut app = AppState {
results: vec![item_official("aaa", "core"), item_official("bbb", "extra")],
sort_mode: SortMode::RepoThenName,
..Default::default()
};
// First sort to populate cache
sort_results_preserve_selection(&mut app);
let old_sig = app.sort_cache_signature;
// Change results (simulating new search)
app.results = vec![item_official("ccc", "core"), item_official("ddd", "extra")];
// Sort again - should detect cache miss and repopulate
sort_results_preserve_selection(&mut app);
let new_sig = app.sort_cache_signature;
// Signature should be different
assert_ne!(old_sig, new_sig);
assert!(app.sort_cache_repo_name.is_some());
assert!(app.sort_cache_aur_popularity.is_some());
}
#[test]
/// What: Ensure cache invalidation only computes current-mode indices once while rebuilding caches.
///
/// Inputs:
/// - Results with a deliberately mismatched cache signature to force invalidation.
///
/// Output:
/// - Current-mode index computation runs once; cross-mode cache computation still occurs once after reorder.
///
/// Details:
/// - Guards against redundant index work when cache signatures are stale.
fn sort_cache_invalid_computes_indices_once() {
reset_compute_counters();
let mut app = AppState {
results: vec![item_official("bbb", "extra"), item_official("aaa", "core")],
sort_mode: SortMode::RepoThenName,
..Default::default()
};
// Force signature mismatch to hit invalidation path.
let sig = compute_results_signature(&app.results);
app.sort_cache_signature = Some(sig.wrapping_add(1));
sort_results_preserve_selection(&mut app);
assert_eq!(
COMPUTE_REPO_INDICES_CALLS.load(Ordering::SeqCst),
1,
"repo indices should be computed exactly once on cache invalidation"
);
assert_eq!(
COMPUTE_AUR_INDICES_CALLS.load(Ordering::SeqCst),
1,
"aur indices should be recomputed once to re-anchor caches"
);
let names: Vec<String> = app.results.iter().map(|p| p.name.clone()).collect();
assert_eq!(names, vec!["aaa", "bbb"]);
}
#[test]
/// What: Verify switching between cacheable modes uses cached indices.
///
/// Inputs:
/// - Results sorted in `RepoThenName` mode with populated caches.
///
/// Output:
/// - Switching to `AurPopularityThenOfficial` uses cached indices for O(n) reordering.
///
/// Details:
/// - Tests the main optimization: instant mode switching via cache.
fn sort_cache_mode_switching() {
let mut app = AppState {
results: vec![
item_aur("low_pop", Some(1.0)),
item_official("core_pkg", "core"),
item_aur("high_pop", Some(10.0)),
item_official("extra_pkg", "extra"),
],
sort_mode: SortMode::RepoThenName,
..Default::default()
};
// Initial sort - populates both caches
sort_results_preserve_selection(&mut app);
assert!(app.sort_cache_repo_name.is_some());
assert!(app.sort_cache_aur_popularity.is_some());
let repo_order: Vec<String> = app.results.iter().map(|p| p.name.clone()).collect();
// Switch to AUR popularity - should use cache
app.sort_mode = SortMode::AurPopularityThenOfficial;
sort_results_preserve_selection(&mut app);
let _aur_order: Vec<String> = app.results.iter().map(|p| p.name.clone()).collect();
// AUR packages should be first
assert!(matches!(app.results[0].source, Source::Aur));
// Switch back to repo - should use cache
app.sort_mode = SortMode::RepoThenName;
sort_results_preserve_selection(&mut app);
let repo_order_again: Vec<String> = app.results.iter().map(|p| p.name.clone()).collect();
assert_eq!(repo_order, repo_order_again);
}
}
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/src/logic/lists.rs | src/logic/lists.rs | //! Management of install, remove, and downgrade package lists.
use crate::state::{AppState, PackageItem};
use tracing::debug;
/// What: Add a `PackageItem` to the install list if it is not already present.
///
/// Inputs:
/// - `app`: Mutable application state (`install_list` and selection)
/// - `item`: Package to add
///
/// Output:
/// - Inserts at the front on success, marks list dirty, and selects index 0; no-op on dedup.
///
/// Details:
/// - Updates `last_install_change` to support UI throttling of follow-up actions.
/// - Uses `HashSet` for O(1) membership checking instead of linear scan.
pub fn add_to_install_list(app: &mut AppState, item: PackageItem) {
let name_lower = item.name.to_lowercase();
if !app.install_list_names.insert(name_lower) {
return;
}
let prev_len = app.install_list.len();
app.install_list.insert(0, item);
app.install_dirty = true;
app.last_install_change = Some(std::time::Instant::now());
// Always keep cursor on top after adding
app.install_state.select(Some(0));
debug!(
new_len = app.install_list.len(),
previous_len = prev_len,
first = ?app.install_list.first().map(|p| &p.name),
"[State] Added package to install list"
);
}
/// What: Add a `PackageItem` to the remove list if it is not already present.
///
/// Inputs:
/// - `app`: Mutable application state (`remove_list` and selection)
/// - `item`: Package to add
///
/// Output:
/// - Inserts at the front and selects index 0; no-op on dedup.
///
/// Details:
/// - Leaves `remove_list` order deterministic by always pushing new entries to the head.
/// - Uses `HashSet` for O(1) membership checking instead of linear scan.
pub fn add_to_remove_list(app: &mut AppState, item: PackageItem) {
let name_lower = item.name.to_lowercase();
if !app.remove_list_names.insert(name_lower) {
return;
}
let prev_len = app.remove_list.len();
app.remove_list.insert(0, item);
app.remove_state.select(Some(0));
debug!(
new_len = app.remove_list.len(),
previous_len = prev_len,
first = ?app.remove_list.first().map(|p| &p.name),
"[State] Added package to remove list"
);
}
/// What: Add a `PackageItem` to the downgrade list if it is not already present.
///
/// Inputs:
/// - `app`: Mutable application state (`downgrade_list` and selection)
/// - `item`: Package to add
///
/// Output:
/// - Inserts at the front and selects index 0; no-op on dedup.
///
/// Details:
/// - Ensures repeated requests for the same package keep the cursor anchored at the newest item.
/// - Uses `HashSet` for O(1) membership checking instead of linear scan.
pub fn add_to_downgrade_list(app: &mut AppState, item: PackageItem) {
let name_lower = item.name.to_lowercase();
if !app.downgrade_list_names.insert(name_lower) {
return;
}
let prev_len = app.downgrade_list.len();
app.downgrade_list.insert(0, item);
app.downgrade_state.select(Some(0));
debug!(
new_len = app.downgrade_list.len(),
previous_len = prev_len,
first = ?app.downgrade_list.first().map(|p| &p.name),
"[State] Added package to downgrade list"
);
}
#[cfg(test)]
mod tests {
use super::*;
fn item_official(name: &str, repo: &str) -> PackageItem {
PackageItem {
name: name.to_string(),
version: "1.0".to_string(),
description: format!("{name} desc"),
source: crate::state::Source::Official {
repo: repo.to_string(),
arch: "x86_64".to_string(),
},
popularity: None,
out_of_date: None,
orphaned: false,
}
}
#[test]
/// What: Ensure the install list deduplicates entries case-insensitively and updates selection state.
///
/// Inputs:
/// - Two package items whose names differ only by casing.
///
/// Output:
/// - Install list contains a single entry, marked dirty, with the selection pointing at index `0`.
///
/// Details:
/// - Exercises the guard path preventing duplicate installs and verifies the UI selection remains anchored on insert.
fn add_to_install_list_behavior() {
let mut app = AppState::default();
add_to_install_list(&mut app, item_official("pkg1", "core"));
add_to_install_list(&mut app, item_official("Pkg1", "core"));
assert_eq!(app.install_list.len(), 1);
assert!(app.install_dirty);
assert_eq!(app.install_state.selected(), Some(0));
}
#[test]
/// What: Confirm the remove list enforces case-insensitive uniqueness and selection updates.
///
/// Inputs:
/// - Two package items whose names differ only by casing.
///
/// Output:
/// - Remove list retains a single item and its selection index becomes `0`.
///
/// Details:
/// - Protects against regressions where duplicates might shift the selection or leak into the list.
fn add_to_remove_list_behavior() {
let mut app = AppState::default();
add_to_remove_list(&mut app, item_official("pkg1", "extra"));
add_to_remove_list(&mut app, item_official("Pkg1", "extra"));
assert_eq!(app.remove_list.len(), 1);
assert_eq!(app.remove_state.selected(), Some(0));
}
#[test]
/// What: Verify the downgrade list rejects duplicate names regardless of case and updates selection.
///
/// Inputs:
/// - Two package items whose names differ only by casing.
///
/// Output:
/// - Downgrade list contains one item and the selection index resolves to `0`.
///
/// Details:
/// - Ensures repeated downgrade requests do not reorder the cursor unexpectedly.
fn add_to_downgrade_list_behavior() {
let mut app = AppState::default();
add_to_downgrade_list(&mut app, item_official("PkgX", "extra"));
add_to_downgrade_list(&mut app, item_official("pkgx", "extra"));
assert_eq!(app.downgrade_list.len(), 1);
assert_eq!(app.downgrade_state.selected(), Some(0));
}
#[test]
/// What: Verify `HashSet` synchronization after adding and removing items from install list.
///
/// Inputs:
/// - Add items to install list, then remove them.
///
/// Output:
/// - `HashSet` contains names only when items are in the list.
///
/// Details:
/// - Ensures `HashSet` stays synchronized with the `Vec` for O(1) membership checking.
fn install_list_hashset_synchronization() {
let mut app = AppState::default();
add_to_install_list(&mut app, item_official("pkg1", "core"));
add_to_install_list(&mut app, item_official("pkg2", "extra"));
assert!(app.install_list_names.contains("pkg1"));
assert!(app.install_list_names.contains("pkg2"));
assert_eq!(app.install_list_names.len(), 2);
// Remove first item (pkg2 is at index 0 since it was added last)
// Items are inserted at index 0, so order is: [pkg2, pkg1]
let removed_name = app.install_list[0].name.to_lowercase();
app.install_list_names.remove(&removed_name);
app.install_list.remove(0);
// After removing pkg2, pkg1 should remain
assert!(app.install_list_names.contains("pkg1"));
assert!(!app.install_list_names.contains("pkg2"));
assert_eq!(app.install_list_names.len(), 1);
}
#[test]
/// What: Verify `HashSet` synchronization after clearing install list.
///
/// Inputs:
/// - Add items to install list, then clear it.
///
/// Output:
/// - `HashSet` is empty after clearing.
///
/// Details:
/// - Ensures `HashSet` is cleared when list is cleared.
fn install_list_hashset_clear_synchronization() {
let mut app = AppState::default();
add_to_install_list(&mut app, item_official("pkg1", "core"));
add_to_install_list(&mut app, item_official("pkg2", "extra"));
assert_eq!(app.install_list_names.len(), 2);
app.install_list.clear();
app.install_list_names.clear();
assert!(app.install_list_names.is_empty());
}
}
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/src/logic/deps.rs | src/logic/deps.rs | //! Dependency resolution and analysis for preflight checks.
mod aur;
mod parse;
mod query;
mod resolve;
mod reverse;
mod source;
mod srcinfo;
mod status;
mod utils;
use crate::state::modal::{DependencyInfo, DependencyStatus};
use crate::state::types::{PackageItem, Source};
use parse::parse_dep_spec;
use resolve::{batch_fetch_official_deps, fetch_package_conflicts, resolve_package_deps};
use source::{determine_dependency_source, is_system_package};
use status::determine_status;
use std::collections::{HashMap, HashSet};
use utils::dependency_priority;
pub use query::{
get_installed_packages, get_provided_packages, get_upgradable_packages,
is_package_installed_or_provided,
};
pub use reverse::{
ReverseDependencyReport, get_installed_required_by, has_installed_required_by,
resolve_reverse_dependencies,
};
pub use status::{get_installed_version, version_satisfies};
/// What: Check and process conflicts for a package.
///
/// Inputs:
/// - `item`: Package item to check conflicts for.
/// - `root_names`: Set of root package names in the install list.
/// - `installed`: Set of installed package names.
/// - `provided`: Map of provided packages.
/// - `deps`: Mutable reference to the dependency map to update.
///
/// Output:
/// - Updates the `deps` map with conflict entries.
///
/// Details:
/// - Checks conflicts against installed packages and packages in the install list.
/// - Creates conflict entries for both the conflicting package and the current package if needed.
fn process_conflicts(
item: &PackageItem,
root_names: &HashSet<String>,
installed: &HashSet<String>,
provided: &HashSet<String>,
deps: &mut HashMap<String, DependencyInfo>,
) {
let conflicts = fetch_package_conflicts(&item.name, &item.source);
if conflicts.is_empty() {
return;
}
tracing::debug!("Package {} conflicts with: {:?}", item.name, conflicts);
for conflict_name in conflicts {
// Skip self-conflicts (package conflicting with itself)
if conflict_name.eq_ignore_ascii_case(&item.name) {
tracing::debug!(
"Skipping self-conflict: {} conflicts with itself",
item.name
);
continue;
}
// Check if conflict is installed or provided by any installed package
let is_installed = crate::logic::deps::query::is_package_installed_or_provided(
&conflict_name,
installed,
provided,
);
// Check if conflict is in the install list
let is_in_install_list = root_names.contains(&conflict_name);
if !is_installed && !is_in_install_list {
continue;
}
let reason = if is_installed && is_in_install_list {
format!("conflicts with {conflict_name} (installed and in install list)")
} else if is_installed {
format!("conflicts with installed package {conflict_name}")
} else {
format!("conflicts with package {conflict_name} in install list")
};
// Add or update conflict entry for the conflicting package
let entry = deps.entry(conflict_name.clone()).or_insert_with(|| {
// Determine source for conflicting package
let (source, is_core) =
crate::logic::deps::source::determine_dependency_source(&conflict_name, installed);
let is_system =
is_core || crate::logic::deps::source::is_system_package(&conflict_name);
DependencyInfo {
name: conflict_name.clone(),
version: String::new(),
status: DependencyStatus::Conflict {
reason: reason.clone(),
},
source,
required_by: vec![item.name.clone()],
depends_on: Vec::new(),
is_core,
is_system,
}
});
// Update status to Conflict if not already
if !matches!(entry.status, DependencyStatus::Conflict { .. }) {
entry.status = DependencyStatus::Conflict { reason };
}
// Add to required_by if not present
if !entry.required_by.contains(&item.name) {
entry.required_by.push(item.name.clone());
}
// If the conflict is with another package in the install list, also create
// a conflict entry for the current package being checked, so it shows up
// in the UI as having a conflict
if is_in_install_list {
let reverse_reason = format!("conflicts with package {conflict_name} in install list");
let current_entry = deps.entry(item.name.clone()).or_insert_with(|| {
// Determine source for current package
let (dep_source, is_core) =
crate::logic::deps::source::determine_dependency_source(&item.name, installed);
let is_system =
is_core || crate::logic::deps::source::is_system_package(&item.name);
DependencyInfo {
name: item.name.clone(),
version: String::new(),
status: DependencyStatus::Conflict {
reason: reverse_reason.clone(),
},
source: dep_source,
required_by: vec![conflict_name.clone()],
depends_on: Vec::new(),
is_core,
is_system,
}
});
// Update status to Conflict if not already
if !matches!(current_entry.status, DependencyStatus::Conflict { .. }) {
current_entry.status = DependencyStatus::Conflict {
reason: reverse_reason,
};
}
// Add to required_by if not present
if !current_entry.required_by.contains(&conflict_name) {
current_entry.required_by.push(conflict_name.clone());
}
}
}
}
/// What: Process batched dependencies for an official package.
///
/// Inputs:
/// - `name`: Package name.
/// - `dep_names`: Vector of dependency specification strings.
/// - `installed`: Set of installed package names.
/// - `provided`: Map of provided packages.
/// - `upgradable`: Set of upgradable package names.
///
/// Output:
/// - Returns a vector of `DependencyInfo` records.
///
/// Details:
/// - Parses dependency specifications and filters out self-references and .so files.
#[allow(clippy::case_sensitive_file_extension_comparisons)]
fn process_batched_dependencies(
name: &str,
dep_names: Vec<String>,
installed: &HashSet<String>,
provided: &HashSet<String>,
upgradable: &HashSet<String>,
) -> Vec<DependencyInfo> {
let mut deps = Vec::new();
for dep_spec in dep_names {
let (pkg_name, version_req) = parse_dep_spec(&dep_spec);
if pkg_name == name {
continue;
}
let pkg_lower = pkg_name.to_lowercase();
if pkg_lower.ends_with(".so") || pkg_lower.contains(".so.") || pkg_lower.contains(".so=") {
continue;
}
let status = determine_status(&pkg_name, &version_req, installed, provided, upgradable);
let (dep_source, is_core) = determine_dependency_source(&pkg_name, installed);
let is_system = is_core || is_system_package(&pkg_name);
deps.push(DependencyInfo {
name: pkg_name,
version: version_req,
status,
source: dep_source,
required_by: vec![name.to_string()],
depends_on: Vec::new(),
is_core,
is_system,
});
}
deps
}
/// What: Merge a dependency into the dependency map.
///
/// Inputs:
/// - `dep`: Dependency to merge.
/// - `parent_name`: Name of the package that requires this dependency.
/// - `installed`: Set of installed package names.
/// - `provided`: Map of provided packages.
/// - `upgradable`: Set of upgradable package names.
/// - `deps`: Mutable reference to the dependency map to update.
///
/// Output:
/// - Updates the `deps` map with the merged dependency.
///
/// Details:
/// - Merges status (keeps worst), version requirements (keeps more restrictive), and `required_by` lists.
fn merge_dependency(
dep: &DependencyInfo,
parent_name: &str,
installed: &HashSet<String>,
provided: &HashSet<String>,
upgradable: &HashSet<String>,
deps: &mut HashMap<String, DependencyInfo>,
) {
let dep_name = dep.name.clone();
// Check if dependency already exists and get its current state
let existing_dep = deps.get(&dep_name).cloned();
let needs_required_by_update = existing_dep
.as_ref()
.is_none_or(|e| !e.required_by.contains(&parent_name.to_string()));
// Update or create dependency entry
let entry = deps
.entry(dep_name.clone())
.or_insert_with(|| DependencyInfo {
name: dep_name.clone(),
version: dep.version.clone(),
status: dep.status.clone(),
source: dep.source.clone(),
required_by: vec![parent_name.to_string()],
depends_on: Vec::new(),
is_core: dep.is_core,
is_system: dep.is_system,
});
// Update required_by (add the parent if not already present)
if needs_required_by_update {
entry.required_by.push(parent_name.to_string());
}
// Merge status (keep worst)
// But never overwrite a Conflict status - conflicts take precedence
if !matches!(entry.status, DependencyStatus::Conflict { .. }) {
let existing_priority = dependency_priority(&entry.status);
let new_priority = dependency_priority(&dep.status);
if new_priority < existing_priority {
entry.status = dep.status.clone();
}
}
// Merge version requirements (keep more restrictive)
// But never overwrite a Conflict status - conflicts take precedence
if !dep.version.is_empty() && dep.version != entry.version {
// If entry is already a conflict, don't overwrite it with dependency status
if matches!(entry.status, DependencyStatus::Conflict { .. }) {
// Still update version if needed, but keep conflict status
if entry.version.is_empty() {
entry.version.clone_from(&dep.version);
}
return;
}
if entry.version.is_empty() {
entry.version.clone_from(&dep.version);
} else {
// Check which version requirement is more restrictive
let existing_status =
determine_status(&entry.name, &entry.version, installed, provided, upgradable);
let new_status =
determine_status(&entry.name, &dep.version, installed, provided, upgradable);
let existing_req_priority = dependency_priority(&existing_status);
let new_req_priority = dependency_priority(&new_status);
if new_req_priority < existing_req_priority {
entry.version.clone_from(&dep.version);
entry.status = new_status;
}
}
}
}
/// What: Resolve dependencies for a single package.
///
/// Inputs:
/// - `item`: Package item to resolve dependencies for.
/// - `batched_deps_cache`: Optional cache of batched dependencies for official packages.
/// - `installed`: Set of installed package names.
/// - `provided`: Map of provided packages.
/// - `upgradable`: Set of upgradable package names.
///
/// Output:
/// - Returns a result containing a vector of `DependencyInfo` records or an error.
///
/// Details:
/// - Uses batched cache if available for official packages, otherwise calls `resolve_package_deps`.
fn resolve_single_package_deps(
item: &PackageItem,
batched_deps_cache: &HashMap<String, Vec<String>>,
installed: &HashSet<String>,
provided: &HashSet<String>,
upgradable: &HashSet<String>,
) -> Result<Vec<DependencyInfo>, String> {
let name = &item.name;
let source = &item.source;
tracing::debug!(
"Resolving direct dependencies for {} (source: {:?})",
name,
source
);
// Check if we have batched results for this official package
let use_batched = matches!(source, Source::Official { repo, .. } if repo != "local")
&& batched_deps_cache.contains_key(name.as_str());
if use_batched {
// Use batched dependency list
let dep_names = batched_deps_cache
.get(name.as_str())
.cloned()
.unwrap_or_default();
let deps = process_batched_dependencies(name, dep_names, installed, provided, upgradable);
Ok(deps)
} else {
resolve_package_deps(name, source, installed, provided, upgradable)
}
}
/// What: Resolve dependencies for the requested install set while consolidating duplicates.
///
/// Inputs:
/// - `items`: Ordered slice of packages that should be analysed for dependency coverage.
///
/// Output:
/// - Returns a vector of `DependencyInfo` records summarising dependency status and provenance.
///
/// Details:
/// - Resolves ONLY direct dependencies (non-recursive) for each package in the list.
/// - Merges duplicates by name, retaining the most severe status across all requesters.
/// - Populates `depends_on` and `required_by` relationships to reflect dependency relationships.
pub fn resolve_dependencies(items: &[PackageItem]) -> Vec<DependencyInfo> {
let _span = tracing::info_span!(
"resolve_dependencies",
stage = "dependencies",
item_count = items.len()
)
.entered();
let start_time = std::time::Instant::now();
// Only warn if called from UI thread (not from background workers)
// Background workers use spawn_blocking which is fine and expected
let backtrace = std::backtrace::Backtrace::force_capture();
let backtrace_str = format!("{backtrace:?}");
// Only warn if NOT in a blocking task (i.e., called from UI thread/event handlers)
// Check for various indicators that we're in a blocking thread pool
let is_blocking_task = backtrace_str.contains("blocking::task")
|| backtrace_str.contains("blocking::pool")
|| backtrace_str.contains("spawn_blocking");
if !is_blocking_task {
tracing::warn!(
"[Deps] resolve_dependencies called synchronously from UI thread! This will block! Backtrace:\n{}",
backtrace_str
);
}
if items.is_empty() {
tracing::warn!("No packages provided for dependency resolution");
return Vec::new();
}
let mut deps: HashMap<String, DependencyInfo> = HashMap::new();
// Get installed packages set
tracing::info!("Fetching list of installed packages...");
let installed = get_installed_packages();
tracing::info!("Found {} installed packages", installed.len());
// Get all provided packages (e.g., rustup provides rust)
// Note: Provides are checked lazily on-demand for performance, not built upfront
tracing::debug!(
"Provides will be checked lazily on-demand (not building full set for performance)"
);
let provided = get_provided_packages(&installed);
// Get list of upgradable packages to detect if dependencies need upgrades
let upgradable = get_upgradable_packages();
tracing::info!("Found {} upgradable packages", upgradable.len());
// Initialize set of root packages (for tracking)
let root_names: HashSet<String> = items.iter().map(|i| i.name.clone()).collect();
// Check conflicts for packages being installed
// 1. Check conflicts against installed packages
// 2. Check conflicts between packages in the install list
tracing::info!("Checking conflicts for {} package(s)", items.len());
for item in items {
process_conflicts(item, &root_names, &installed, &provided, &mut deps);
}
// Note: Reverse conflict checking (checking all installed packages for conflicts with install list)
// has been removed for performance reasons. Checking 2000+ installed packages would require
// 2000+ calls to pacman -Si / yay -Si, which is extremely slow.
//
// The forward check above is sufficient and fast:
// - For each package in install list, fetch its conflicts once (1-10 calls total)
// - Check if those conflict names are in the installed package set (O(1) HashSet lookup)
// - This catches all conflicts where install list packages conflict with installed packages
//
// Conflicts are typically symmetric (if A conflicts with B, then B conflicts with A),
// so the forward check should catch most cases. If an installed package declares a conflict
// with a package in the install list, it will be detected when we check the install list
// package's conflicts against the installed package set.
// Batch fetch official package dependencies to reduce pacman command overhead
let official_packages: Vec<&str> = items
.iter()
.filter_map(|item| {
if let Source::Official { repo, .. } = &item.source {
if *repo == "local" {
None
} else {
Some(item.name.as_str())
}
} else {
None
}
})
.collect();
let batched_deps_cache = if official_packages.is_empty() {
std::collections::HashMap::new()
} else {
batch_fetch_official_deps(&official_packages)
};
// Resolve ONLY direct dependencies (non-recursive)
// This is faster and avoids resolving transitive dependencies which can be slow and error-prone
for item in items {
match resolve_single_package_deps(
item,
&batched_deps_cache,
&installed,
&provided,
&upgradable,
) {
Ok(resolved_deps) => {
tracing::debug!(
" Found {} dependencies for {}",
resolved_deps.len(),
item.name
);
for dep in resolved_deps {
merge_dependency(
&dep,
&item.name,
&installed,
&provided,
&upgradable,
&mut deps,
);
// DON'T recursively resolve dependencies - only show direct dependencies
// This prevents resolving transitive dependencies which can be slow and error-prone
}
}
Err(e) => {
tracing::warn!(" Failed to resolve dependencies for {}: {}", item.name, e);
}
}
}
let mut result: Vec<DependencyInfo> = deps.into_values().collect();
tracing::info!("Total unique dependencies found: {}", result.len());
// Sort dependencies: conflicts first, then missing, then to-install, then installed
result.sort_by(|a, b| {
let priority_a = dependency_priority(&a.status);
let priority_b = dependency_priority(&b.status);
priority_a
.cmp(&priority_b)
.then_with(|| a.name.cmp(&b.name))
});
let elapsed = start_time.elapsed();
let duration_ms = u64::try_from(elapsed.as_millis()).unwrap_or(u64::MAX);
tracing::info!(
stage = "dependencies",
item_count = items.len(),
result_count = result.len(),
duration_ms = duration_ms,
"Dependency resolution complete"
);
result
}
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/src/logic/summary.rs | src/logic/summary.rs | //! Post-transaction summary computation for file changes and service impacts.
use crate::state::PackageItem;
/// What: Minimal data required to populate the `PostSummary` modal.
///
/// Inputs:
/// - Populated by `compute_post_summary` after pacman inspections.
///
/// Output:
/// - Supplies boolean outcome, counts, and auxiliary labels for post-transaction display.
///
/// Details:
/// - Contains information about what changed during the package operation.
/// - Designed to be serializable/clonable so the UI can render snapshots outside the logic module.
#[derive(Debug, Clone)]
pub struct PostSummaryData {
/// Whether the operation succeeded.
pub success: bool,
/// Number of files that were changed.
pub changed_files: usize,
/// Number of .pacnew files created.
pub pacnew_count: usize,
/// Number of .pacsave files created.
pub pacsave_count: usize,
/// List of service names that need to be restarted.
pub services_pending: Vec<String>,
/// Optional snapshot label if a snapshot was created.
pub snapshot_label: Option<String>,
}
/// What: Count changed files and collect affected systemd services for given packages.
///
/// Inputs:
/// - `names`: Package names whose remote file lists should be inspected.
///
/// Output:
/// - Returns a tuple with the number of file entries and a sorted list of service unit filenames.
///
/// Details:
/// - Queries `pacman -Fl` per package, ignoring directory entries, and extracts `.service` paths.
fn count_changed_files_and_services(names: &[String]) -> (usize, Vec<String>) {
let mut total_files: usize = 0;
let mut services: std::collections::BTreeSet<String> = std::collections::BTreeSet::new();
for name in names {
if let Ok(body) = crate::util::pacman::run_pacman(&["-Fl", name]) {
for line in body.lines() {
// pacman -Fl format: "<pkg> <path>"
if let Some((_pkg, path)) = line.split_once(' ') {
if !path.ends_with('/') {
total_files += 1;
}
if path.starts_with("/usr/lib/systemd/system/") && path.ends_with(".service") {
// take filename
if let Some(stem) = std::path::Path::new(path)
.file_name()
.and_then(|s| s.to_str())
{
services.insert(stem.to_string());
}
}
}
}
}
}
(total_files, services.into_iter().collect())
}
/// What: Scan `/etc` for outstanding `.pacnew` and `.pacsave` files.
///
/// Inputs:
/// - (none): Walks the filesystem directly with a depth guard.
///
/// Output:
/// - Returns counts of `.pacnew` and `.pacsave` files found beneath `/etc`.
///
/// Details:
/// - Ignores very deep directory structures to avoid pathological traversal scenarios.
fn count_pac_conflicts_in_etc() -> (usize, usize) {
fn walk(dir: &std::path::Path, pacnew: &mut usize, pacsave: &mut usize) {
if let Ok(rd) = std::fs::read_dir(dir) {
for entry in rd.flatten() {
let p = entry.path();
if p.is_dir() {
// Limit to reasonable depth to avoid cycles (symlinks ignored)
if p.strip_prefix("/etc")
.is_ok_and(|stripped| stripped.components().count() > 12)
{
continue;
}
walk(&p, pacnew, pacsave);
} else if let Some(name) = p.file_name().and_then(|s| s.to_str()) {
if name.ends_with(".pacnew") {
*pacnew += 1;
}
if name.ends_with(".pacsave") {
*pacsave += 1;
}
}
}
}
}
let mut pn = 0usize;
let mut ps = 0usize;
walk(std::path::Path::new("/etc"), &mut pn, &mut ps);
(pn, ps)
}
/// What: Produce a best-effort summary of potential post-transaction tasks.
///
/// Inputs:
/// - `items`: Packages that were part of the transaction and should inform the summary.
/// - `success`: Execution result: `Some(true)` for success, `Some(false)` for failure, `None` if unknown.
///
/// Output:
/// - Returns a `PostSummaryData` structure with file counts, service hints, and conflict tallies.
///
/// Details:
/// - Combines sync database lookups with an `/etc` scan without performing system modifications.
/// - Uses the provided `success` flag to indicate transaction outcome, defaulting to `false` if unknown.
#[must_use]
pub fn compute_post_summary(items: &[PackageItem], success: Option<bool>) -> PostSummaryData {
let names: Vec<String> = items.iter().map(|p| p.name.clone()).collect();
let (changed_files, services_pending) = count_changed_files_and_services(&names);
let (pacnew_count, pacsave_count) = count_pac_conflicts_in_etc();
PostSummaryData {
success: success.unwrap_or(false),
changed_files,
pacnew_count,
pacsave_count,
services_pending,
snapshot_label: None,
}
}
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/src/logic/mod.rs | src/logic/mod.rs | //! Core non-UI logic split into modular submodules.
pub mod deps;
pub mod distro;
pub mod faillock;
pub mod files;
pub mod filter;
pub mod gating;
pub mod lists;
pub mod password;
pub mod prefetch;
pub mod preflight;
pub mod query;
pub mod sandbox;
pub mod selection;
pub mod services;
pub mod sort;
pub mod summary;
// Re-export public APIs to preserve existing import paths (crate::logic::...)
pub use filter::apply_filters_and_sort_preserve_selection;
pub use gating::{is_allowed, set_allowed_only_selected, set_allowed_ring};
pub use lists::{add_to_downgrade_list, add_to_install_list, add_to_remove_list};
pub use prefetch::ring_prefetch_from_selected;
pub use query::send_query;
pub use selection::move_sel_cached;
pub use services::resolve_service_impacts;
pub use sort::{invalidate_sort_caches, sort_results_preserve_selection};
pub use summary::compute_post_summary;
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/src/logic/selection.rs | src/logic/selection.rs | //! Selection movement and detail coordination during navigation.
use tokio::sync::mpsc;
use crate::state::{AppState, PackageItem};
/// What: Move the selection by `delta` and coordinate detail loading policies.
///
/// Inputs:
/// - `app`: Mutable application state (results, selection, caches, scroll heuristics).
/// - `delta`: Signed offset to apply to the current selection index.
/// - `details_tx`: Channel used to request lazy loading of package details.
/// - `comments_tx`: Channel used to request AUR package comments.
///
/// Output:
/// - Updates selection-related state, potentially sends detail requests, and adjusts gating flags.
///
/// # Panics
/// - Panics if `abs_delta_usize` exceeds `u32::MAX` when converting to `u32`
/// - May panic if `app.list_state.select` is called with an invalid index (depends on the list state implementation)
///
/// Details:
/// - Clamps the selection to valid bounds, refreshes placeholder metadata, and reuses cached entries.
/// - Schedules PKGBUILD reloads when necessary and tracks scroll velocity to throttle prefetching.
/// - Updates comments when package changes and comments are visible (only for AUR packages).
/// - Switches between selected-only gating during fast scrolls and wide ring prefetch for slower navigation.
pub fn move_sel_cached(
app: &mut AppState,
delta: isize,
details_tx: &mpsc::UnboundedSender<PackageItem>,
comments_tx: &mpsc::UnboundedSender<String>,
) {
if app.results.is_empty() {
return;
}
let len = isize::try_from(app.results.len()).unwrap_or(isize::MAX);
let mut idx = isize::try_from(app.selected).unwrap_or(0) + delta;
if idx < 0 {
idx = 0;
}
if idx >= len {
idx = len - 1;
}
app.selected = usize::try_from(idx).unwrap_or(0);
app.list_state.select(Some(app.selected));
if let Some(item) = app.results.get(app.selected).cloned() {
// Focus details on the currently selected item only
app.details_focus = Some(item.name.clone());
// Update details pane immediately with a placeholder reflecting the selection
app.details.name.clone_from(&item.name);
app.details.version.clone_from(&item.version);
app.details.description.clear();
match &item.source {
crate::state::Source::Official { repo, arch } => {
app.details.repository.clone_from(repo);
app.details.architecture.clone_from(arch);
}
crate::state::Source::Aur => {
app.details.repository = "AUR".to_string();
app.details.architecture = "any".to_string();
}
}
if let Some(cached) = app.details_cache.get(&item.name).cloned() {
app.details = cached;
} else {
let _ = details_tx.send(item.clone());
}
// Auto-reload PKGBUILD if visible and for a different package (with debounce)
if app.pkgb_visible {
let needs_reload = app.pkgb_package_name.as_deref() != Some(item.name.as_str());
if needs_reload {
// Instead of immediately loading, schedule a debounced reload
app.pkgb_reload_requested_at = Some(std::time::Instant::now());
app.pkgb_reload_requested_for = Some(item.name.clone());
app.pkgb_text = None; // Clear old PKGBUILD while loading
}
}
// Auto-update comments if visible and for a different package (only for AUR packages)
if app.comments_visible && matches!(item.source, crate::state::Source::Aur) {
let needs_update = app
.comments_package_name
.as_deref()
.is_none_or(|cached_name| cached_name != item.name.as_str());
if needs_update {
// Check if we have cached comments for this package
if app
.comments_package_name
.as_ref()
.is_some_and(|cached_name| {
cached_name == &item.name && !app.comments.is_empty()
})
{
// Use cached comments, just reset scroll
app.comments_scroll = 0;
} else {
// Request new comments
app.comments.clear();
app.comments_package_name = None;
app.comments_fetched_at = None;
app.comments_scroll = 0;
app.comments_loading = true;
app.comments_error = None;
let _ = comments_tx.send(item.name.clone());
}
}
}
}
// Debounce ring prefetch when scrolling fast (>5 items cumulatively)
let abs_delta_usize: usize = if delta < 0 {
usize::try_from(-delta).unwrap_or(0)
} else {
usize::try_from(delta).unwrap_or(0)
};
if abs_delta_usize > 0 {
let add = u32::try_from(abs_delta_usize.min(u32::MAX as usize))
.expect("value is bounded by u32::MAX");
app.scroll_moves = app.scroll_moves.saturating_add(add);
}
if app.need_ring_prefetch {
// tighten allowed set to only current selection during fast scroll
crate::logic::set_allowed_only_selected(app);
app.ring_resume_at =
Some(std::time::Instant::now() + std::time::Duration::from_millis(200));
return;
}
if app.scroll_moves > 5 {
app.need_ring_prefetch = true;
crate::logic::set_allowed_only_selected(app);
app.ring_resume_at =
Some(std::time::Instant::now() + std::time::Duration::from_millis(200));
return;
}
// For small/slow scrolls, allow ring and prefetch immediately
crate::logic::set_allowed_ring(app, 30);
crate::logic::ring_prefetch_from_selected(app, details_tx);
}
#[cfg(test)]
mod tests {
use super::*;
fn item_official(name: &str, repo: &str) -> crate::state::PackageItem {
crate::state::PackageItem {
name: name.to_string(),
version: "1.0".to_string(),
description: format!("{name} desc"),
source: crate::state::Source::Official {
repo: repo.to_string(),
arch: "x86_64".to_string(),
},
popularity: None,
out_of_date: None,
orphaned: false,
}
}
#[tokio::test]
/// What: Move selection with bounds, placeholder details, and request flow.
///
/// Inputs:
/// - `app`: Results list seeded with one AUR and one official package, initial selection at index 0.
/// - `tx`: Unbounded channel capturing detail fetch requests while deltas of +1, -100, and 0 are applied.
///
/// Output:
/// - Mutates `app` so indices clamp within bounds, details placeholders reflect the active selection, and a fetch request emits when switching to the official entry.
///
/// Details:
/// - Uses a timeout on the receiver to assert the async request is produced and verifies placeholder data resets when returning to the AUR result.
async fn move_sel_cached_clamps_and_requests_details() {
let mut app = crate::state::AppState {
results: vec![
crate::state::PackageItem {
name: "aur1".into(),
version: "1".into(),
description: String::new(),
source: crate::state::Source::Aur,
popularity: None,
out_of_date: None,
orphaned: false,
},
item_official("pkg2", "core"),
],
selected: 0,
..Default::default()
};
let (tx, mut rx) = tokio::sync::mpsc::unbounded_channel();
let (comments_tx, _comments_rx) = tokio::sync::mpsc::unbounded_channel();
move_sel_cached(&mut app, 1, &tx, &comments_tx);
assert_eq!(app.selected, 1);
assert_eq!(app.details.repository.to_lowercase(), "core");
assert_eq!(app.details.architecture.to_lowercase(), "x86_64");
let got = tokio::time::timeout(std::time::Duration::from_millis(50), rx.recv())
.await
.ok()
.flatten();
assert!(got.is_some());
move_sel_cached(&mut app, -100, &tx, &comments_tx);
assert_eq!(app.selected, 0);
move_sel_cached(&mut app, 0, &tx, &comments_tx);
assert_eq!(app.details.repository, "AUR");
assert_eq!(app.details.architecture, "any");
}
#[tokio::test]
/// What: Ensure cached details suppress additional fetch requests.
///
/// Inputs:
/// - Results containing the cached package and an existing entry in `details_cache`.
///
/// Output:
/// - No message emitted on the channel and `app.details` populated from the cache.
///
/// Details:
/// - Confirms `move_sel_cached` short-circuits when cache contains the selected package.
async fn move_sel_cached_uses_details_cache() {
let mut app = crate::state::AppState::default();
let pkg = item_official("pkg", "core");
app.results = vec![pkg.clone()];
app.details_cache.insert(
pkg.name.clone(),
crate::state::PackageDetails {
repository: "core".into(),
name: pkg.name.clone(),
version: pkg.version.clone(),
architecture: "x86_64".into(),
..Default::default()
},
);
let (tx, mut rx) = tokio::sync::mpsc::unbounded_channel();
let (comments_tx, _comments_rx) = tokio::sync::mpsc::unbounded_channel();
move_sel_cached(&mut app, 0, &tx, &comments_tx);
let none = tokio::time::timeout(std::time::Duration::from_millis(30), rx.recv())
.await
.ok()
.flatten();
assert!(none.is_none());
assert_eq!(app.details.name, "pkg");
}
#[test]
/// What: Verify fast-scroll gating requests ring prefetch and locks selection.
///
/// Inputs:
/// - `app`: Populated results list with selection moved near the end to trigger fast-scroll logic.
///
/// Output:
/// - `need_ring_prefetch` flag set, `ring_resume_at` populated, and allowed set restricted to the
/// selected package.
///
/// Details:
/// - Simulates a large positive index jump and ensures gating functions mark the correct state and
/// enforce selection-only access.
fn fast_scroll_sets_gating_and_defers_ring() {
let mut app = crate::state::AppState {
results: vec![
item_official("a", "core"),
item_official("b", "extra"),
item_official("c", "extra"),
item_official("d", "extra"),
item_official("e", "extra"),
item_official("f", "extra"),
item_official("g", "extra"),
],
..Default::default()
};
let (tx, _rx) = tokio::sync::mpsc::unbounded_channel::<crate::state::PackageItem>();
let (comments_tx, _comments_rx) = tokio::sync::mpsc::unbounded_channel();
move_sel_cached(&mut app, 6, &tx, &comments_tx);
assert!(app.need_ring_prefetch);
assert!(app.ring_resume_at.is_some());
crate::logic::set_allowed_only_selected(&app);
assert!(crate::logic::is_allowed(&app.results[app.selected].name));
}
}
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/src/logic/gating.rs | src/logic/gating.rs | //! Access control for package detail loading to optimize performance.
use std::collections::HashSet;
use std::sync::{OnceLock, RwLock};
use crate::state::AppState;
/// What: Lazily construct and return the global set of package names permitted for detail fetching.
///
/// Inputs:
/// - (none): Initializes an `RwLock<HashSet<String>>` on first access.
///
/// Output:
/// - Returns a reference to the lock guarding the allowed-name set.
///
/// Details:
/// - Uses `OnceLock` to avoid race conditions during initialization while keeping lookups fast.
fn allowed_set() -> &'static RwLock<HashSet<String>> {
static ALLOWED: OnceLock<RwLock<HashSet<String>>> = OnceLock::new();
ALLOWED.get_or_init(|| RwLock::new(HashSet::new()))
}
/// What: Check whether details loading is currently allowed for a package name.
///
/// Inputs:
/// - `name`: Package name to test
///
/// Output:
/// - `true` when the name is currently allowed; otherwise `false` (or `true` if the lock fails).
///
/// Details:
/// - Fails open when the read lock cannot be acquired to avoid blocking UI interactions.
#[must_use]
pub fn is_allowed(name: &str) -> bool {
allowed_set().read().ok().is_none_or(|s| s.contains(name))
}
/// What: Restrict details loading to only the currently selected package.
///
/// Inputs:
/// - `app`: Application state to read the current selection from
///
/// Output:
/// - Updates the internal allowed set to contain only the selected package; no-op if none.
///
/// Details:
/// - Clears any previously allowed names to prioritise responsiveness during rapid navigation.
pub fn set_allowed_only_selected(app: &AppState) {
if let Some(sel) = app.results.get(app.selected)
&& let Ok(mut w) = allowed_set().write()
{
w.clear();
w.insert(sel.name.clone());
}
}
/// What: Allow details loading for a "ring" around the current selection.
///
/// Inputs:
/// - `app`: Application state to read the current selection and results from
/// - `radius`: Number of neighbors above and below to include
///
/// Output:
/// - Updates the internal allowed set to the ring of names around the selection.
///
/// Details:
/// - Includes the selected package itself and symmetrically expands within bounds while respecting the radius.
pub fn set_allowed_ring(app: &AppState, radius: usize) {
let mut ring: HashSet<String> = HashSet::new();
if let Some(sel) = app.results.get(app.selected) {
ring.insert(sel.name.clone());
}
let len = app.results.len();
let mut step = 1usize;
while step <= radius {
if let Some(i) = app.selected.checked_sub(step)
&& let Some(it) = app.results.get(i)
{
ring.insert(it.name.clone());
}
let below = app.selected + step;
if below < len
&& let Some(it) = app.results.get(below)
{
ring.insert(it.name.clone());
}
step += 1;
}
if let Ok(mut w) = allowed_set().write() {
*w = ring;
}
}
#[cfg(test)]
mod tests {
use super::*;
fn item_official(name: &str, repo: &str) -> crate::state::PackageItem {
crate::state::PackageItem {
name: name.to_string(),
version: "1.0".to_string(),
description: format!("{name} desc"),
source: crate::state::Source::Official {
repo: repo.to_string(),
arch: "x86_64".to_string(),
},
popularity: None,
out_of_date: None,
orphaned: false,
}
}
#[test]
/// What: Check allowed-set helpers toggle between single selection and ring modes.
///
/// Inputs:
/// - Results array with four packages and selected index set to one.
///
/// Output:
/// - Only the selected package allowed initially; after calling `set_allowed_ring`, adjacent packages become allowed.
///
/// Details:
/// - Validates transition between restrictive and radius-based gating policies.
fn allowed_only_selected_and_ring() {
let app = crate::state::AppState {
results: vec![
item_official("a", "core"),
item_official("b", "extra"),
item_official("c", "extra"),
item_official("d", "other"),
],
selected: 1,
..Default::default()
};
set_allowed_only_selected(&app);
assert!(is_allowed("b"));
assert!(!is_allowed("a") || !is_allowed("c") || !is_allowed("d"));
set_allowed_ring(&app, 1);
assert!(is_allowed("a") || is_allowed("c"));
}
}
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/src/logic/query.rs | src/logic/query.rs | //! Search query dispatch and ID management for result correlation.
use tokio::sync::mpsc;
use crate::state::AppState;
/// What: Send the current query text over the search channel with a fresh id.
///
/// Inputs:
/// - `app`: Mutable application state; updates `next_query_id` and `latest_query_id`
/// - `query_tx`: Channel to send the `QueryInput`
///
/// Output:
/// - Sends a `QueryInput` with incremented id and current text; updates ids in `app`.
///
/// Details:
/// - The id allows correlating responses so the UI can discard stale results.
/// - Cache checking happens in `handle_search_results` to avoid architectural changes.
pub fn send_query(app: &mut AppState, query_tx: &mpsc::UnboundedSender<crate::state::QueryInput>) {
let id = app.next_query_id;
app.next_query_id += 1;
app.latest_query_id = id;
let _ = query_tx.send(crate::state::QueryInput {
id,
text: app.input.clone(),
fuzzy: app.fuzzy_search_enabled,
});
}
#[cfg(test)]
mod tests {
use super::*;
#[tokio::test]
/// What: Ensure `send_query` increments identifiers and forwards the current input text.
///
/// Inputs:
/// - `AppState` whose `input` is set to `"hello"`.
///
/// Output:
/// - `latest_query_id` advances to `1` and the channel receives a matching `QueryInput`.
///
/// Details:
/// - Uses a short timeout to guarantee the send occurs asynchronously.
async fn send_query_increments_and_sends() {
let mut app = AppState {
input: "hello".into(),
..Default::default()
};
let (tx, mut rx) = mpsc::unbounded_channel();
send_query(&mut app, &tx);
assert_eq!(app.latest_query_id, 1);
let q = tokio::time::timeout(std::time::Duration::from_millis(50), rx.recv())
.await
.ok()
.flatten()
.expect("query sent");
assert_eq!(q.id, app.latest_query_id);
assert_eq!(q.text, "hello");
assert!(!q.fuzzy); // Default is false
}
}
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/src/logic/distro.rs | src/logic/distro.rs | //! Distro-related logic helpers (filtering and labels).
/// What: Determine whether results from a repository should be visible under current toggles.
///
/// Inputs:
/// - `repo`: Name of the repository associated with a package result.
/// - `app`: Application state providing the filter toggles for official repos.
///
/// Output:
/// - `true` when the repository passes the active filters; otherwise `false`.
///
/// Details:
/// - Normalizes repository names and applies special-handling for EOS/CachyOS/Artix classification helpers.
/// - Unknown repositories are only allowed when every official filter is enabled simultaneously.
#[must_use]
pub fn repo_toggle_for(repo: &str, app: &crate::state::AppState) -> bool {
let r = repo.to_lowercase();
if r == "core" {
app.results_filter_show_core
} else if r == "extra" {
app.results_filter_show_extra
} else if r == "multilib" {
app.results_filter_show_multilib
} else if crate::index::is_eos_repo(&r) {
app.results_filter_show_eos
} else if crate::index::is_cachyos_repo(&r) {
app.results_filter_show_cachyos
} else if crate::index::is_artix_omniverse(&r) {
app.results_filter_show_artix_omniverse
} else if crate::index::is_artix_universe(&r) {
app.results_filter_show_artix_universe
} else if crate::index::is_artix_lib32(&r) {
app.results_filter_show_artix_lib32
} else if crate::index::is_artix_galaxy(&r) {
app.results_filter_show_artix_galaxy
} else if crate::index::is_artix_world(&r) {
app.results_filter_show_artix_world
} else if crate::index::is_artix_system(&r) {
app.results_filter_show_artix_system
} else if crate::index::is_artix_repo(&r) {
// Fallback for any other Artix repo (shouldn't happen, but safe)
app.results_filter_show_artix
} else {
// Unknown official repo: include only when all official filters are enabled
app.results_filter_show_core
&& app.results_filter_show_extra
&& app.results_filter_show_multilib
&& app.results_filter_show_eos
&& app.results_filter_show_cachyos
&& app.results_filter_show_artix
&& app.results_filter_show_artix_omniverse
&& app.results_filter_show_artix_universe
&& app.results_filter_show_artix_lib32
&& app.results_filter_show_artix_galaxy
&& app.results_filter_show_artix_world
&& app.results_filter_show_artix_system
}
}
/// What: Produce a human-friendly label for an official package entry.
///
/// Inputs:
/// - `repo`: Repository reported by the package source.
/// - `name`: Package name used to detect Manjaro naming conventions.
/// - `owner`: Optional upstream owner string available from package metadata.
///
/// Output:
/// - Returns a display label describing the ecosystem the package belongs to.
///
/// Details:
/// - Distinguishes `EndeavourOS`, `CachyOS`, and `Artix Linux` repos (with specific labels for each Artix repo:
/// `OMNI`, `UNI`, `LIB32`, `GALAXY`, `WORLD`, `SYSTEM`), and detects `Manjaro` branding by name/owner heuristics.
/// - Falls back to the raw repository string when no special classification matches.
#[must_use]
pub fn label_for_official(repo: &str, name: &str, owner: &str) -> String {
let r = repo.to_lowercase();
if crate::index::is_eos_repo(&r) {
"EOS".to_string()
} else if crate::index::is_cachyos_repo(&r) {
"CachyOS".to_string()
} else if crate::index::is_artix_omniverse(&r) {
"OMNI".to_string()
} else if crate::index::is_artix_universe(&r) {
"UNI".to_string()
} else if crate::index::is_artix_lib32(&r) {
"LIB32".to_string()
} else if crate::index::is_artix_galaxy(&r) {
"GALAXY".to_string()
} else if crate::index::is_artix_world(&r) {
"WORLD".to_string()
} else if crate::index::is_artix_system(&r) {
"SYSTEM".to_string()
} else if crate::index::is_artix_repo(&r) {
// Fallback for any other Artix repo (shouldn't happen, but safe)
"Artix".to_string()
} else if crate::index::is_manjaro_name_or_owner(name, owner) {
"Manjaro".to_string()
} else {
repo.to_string()
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::state::AppState;
#[test]
/// What: Validate canonical repository toggles deny disabled repositories while permitting enabled ones.
///
/// Inputs:
/// - `app`: Application state with `core` enabled and other official toggles disabled.
///
/// Output:
/// - `repo_toggle_for` allows `core` entries but rejects `extra` and `multilib`.
///
/// Details:
/// - Ensures the per-repository gate respects the individual boolean flags.
fn repo_toggle_respects_individual_flags() {
let app = AppState {
results_filter_show_core: true,
results_filter_show_extra: false,
results_filter_show_multilib: false,
results_filter_show_eos: false,
results_filter_show_cachyos: false,
results_filter_show_artix: false,
results_filter_show_artix_omniverse: false,
results_filter_show_artix_universe: false,
results_filter_show_artix_lib32: false,
results_filter_show_artix_galaxy: false,
results_filter_show_artix_world: false,
results_filter_show_artix_system: false,
..Default::default()
};
assert!(repo_toggle_for("core", &app));
assert!(!repo_toggle_for("extra", &app));
assert!(!repo_toggle_for("multilib", &app));
}
#[test]
/// What: Ensure unknown official repositories require every official toggle to be enabled.
///
/// Inputs:
/// - `app`: Application state with all official flags on, then one flag disabled.
///
/// Output:
/// - Unknown repository accepted when fully enabled and rejected once any flag is turned off.
///
/// Details:
/// - Exercises the fallback clause guarding unfamiliar repositories.
fn repo_toggle_unknown_only_with_full_whitelist() {
let mut app = AppState {
results_filter_show_core: true,
results_filter_show_extra: true,
results_filter_show_multilib: true,
results_filter_show_eos: true,
results_filter_show_cachyos: true,
results_filter_show_artix: true,
results_filter_show_artix_omniverse: true,
results_filter_show_artix_universe: true,
results_filter_show_artix_lib32: true,
results_filter_show_artix_galaxy: true,
results_filter_show_artix_world: true,
results_filter_show_artix_system: true,
..Default::default()
};
assert!(repo_toggle_for("unlisted", &app));
app.results_filter_show_multilib = false;
assert!(!repo_toggle_for("unlisted", &app));
}
#[test]
/// What: Confirm label helper emits ecosystem-specific aliases for recognised repositories.
///
/// Inputs:
/// - Repository/name permutations covering `EndeavourOS`, `CachyOS`, `Artix Linux` (with specific repo labels), `Manjaro`, and a generic repo.
///
/// Output:
/// - Labels reduce to `EOS`, `CachyOS`, `OMNI`, `UNI` (for specific Artix repos), `Manjaro`, and the original repo name respectively.
///
/// Details:
/// - Validates the Manjaro heuristic via package name and the repo classification helpers.
/// - Confirms specific Artix repos return their specific labels (OMNI, UNI, etc.) rather than the generic "Artix" label.
fn label_for_official_prefers_special_cases() {
assert_eq!(label_for_official("endeavouros", "pkg", ""), "EOS");
assert_eq!(label_for_official("cachyos-extra", "pkg", ""), "CachyOS");
assert_eq!(label_for_official("omniverse", "pkg", ""), "OMNI");
assert_eq!(label_for_official("universe", "pkg", ""), "UNI");
assert_eq!(label_for_official("extra", "manjaro-kernel", ""), "Manjaro");
assert_eq!(label_for_official("core", "glibc", ""), "core");
}
}
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/src/logic/services/binaries.rs | src/logic/services/binaries.rs | //! Binary collection and parsing for service impact detection.
use std::collections::HashSet;
use std::path::Path;
use std::process::Command;
use crate::state::types::Source;
use super::command::run_command;
/// What: Collect binary paths shipped by a specific package.
///
/// Inputs:
/// - `package`: Package name for which to inspect the remote file list.
/// - `source`: Source descriptor to determine how to fetch binaries (Official vs AUR).
///
/// Output:
/// - Vector of binary paths (e.g., `/usr/bin/foo`, `/usr/sbin/bar`).
///
/// Details:
/// - For official packages: Executes `pacman -Fl <package>` and filters paths under standard binary directories.
/// - For AUR packages: Uses installed files, paru/yay -Fl, or PKGBUILD parsing as fallback.
/// - Includes executables from `/usr/bin`, `/usr/sbin`, `/bin`, `/sbin`, and `/usr/local/bin`.
pub(super) fn collect_binaries_for_package(
package: &str,
source: &Source,
) -> Result<Vec<String>, String> {
match source {
Source::Official { .. } => {
// Use pacman -Fl for official packages
let output = run_command(
"pacman",
&["-Fl", package],
&format!("pacman -Fl {package}"),
)?;
let binaries = extract_binaries_from_file_list(&output, package);
Ok(binaries)
}
Source::Aur => {
// For AUR packages, use the same fallback chain as file lists
// First, check if package is already installed
if let Ok(installed_files) = crate::logic::files::get_installed_file_list(package)
&& !installed_files.is_empty()
{
let binaries = extract_binaries_from_file_list(
&installed_files
.iter()
.map(|f| format!("{package} {f}"))
.collect::<Vec<_>>()
.join("\n"),
package,
);
if !binaries.is_empty() {
tracing::debug!(
"Found {} binaries from installed AUR package {}",
binaries.len(),
package
);
return Ok(binaries);
}
}
// Try to use paru/yay -Fl if available (works for cached AUR packages)
let has_paru = Command::new("paru").args(["--version"]).output().is_ok();
let has_yay = Command::new("yay").args(["--version"]).output().is_ok();
if has_paru {
tracing::debug!("Trying paru -Fl {} for AUR package binaries", package);
if let Ok(output) = Command::new("paru")
.args(["-Fl", package])
.env("LC_ALL", "C")
.env("LANG", "C")
.output()
&& output.status.success()
{
let text = String::from_utf8_lossy(&output.stdout);
let binaries = extract_binaries_from_file_list(&text, package);
if !binaries.is_empty() {
tracing::debug!(
"Found {} binaries from paru -Fl for {}",
binaries.len(),
package
);
return Ok(binaries);
}
}
}
if has_yay {
tracing::debug!("Trying yay -Fl {} for AUR package binaries", package);
if let Ok(output) = Command::new("yay")
.args(["-Fl", package])
.env("LC_ALL", "C")
.env("LANG", "C")
.output()
&& output.status.success()
{
let text = String::from_utf8_lossy(&output.stdout);
let binaries = extract_binaries_from_file_list(&text, package);
if !binaries.is_empty() {
tracing::debug!(
"Found {} binaries from yay -Fl for {}",
binaries.len(),
package
);
return Ok(binaries);
}
}
}
// Fallback: try to parse PKGBUILD to extract install paths
match crate::logic::files::fetch_pkgbuild_sync(package) {
Ok(pkgbuild) => {
let entry = crate::logic::files::parse_pkgbuild_cached(
package,
None,
crate::logic::files::PkgbuildSourceKind::Aur,
&pkgbuild,
);
let files = entry.install_paths;
let binaries: Vec<String> = files
.into_iter()
.filter(|f| {
f.starts_with("/usr/bin/")
|| f.starts_with("/usr/sbin/")
|| f.starts_with("/bin/")
|| f.starts_with("/sbin/")
|| f.starts_with("/usr/local/bin/")
})
.collect();
if !binaries.is_empty() {
tracing::debug!(
"Found {} binaries from PKGBUILD parsing for {}",
binaries.len(),
package
);
return Ok(binaries);
}
}
Err(e) => {
tracing::debug!("Failed to fetch PKGBUILD for {}: {}", package, e);
}
}
// No binaries available
Ok(Vec::new())
}
}
}
/// What: Extract binary paths from `pacman -Fl` output.
///
/// Inputs:
/// - `file_list`: Raw `pacman -Fl` stdout.
/// - `package`: Package name used to filter unrelated entries.
///
/// Output:
/// - Vector of binary paths sorted in discovery order.
///
/// Details:
/// - Recognises executables under standard binary directories.
/// - Filters out directories and non-executable files.
pub(super) fn extract_binaries_from_file_list(file_list: &str, package: &str) -> Vec<String> {
const BINARY_PREFIXES: [&str; 5] = [
"/usr/bin/",
"/usr/sbin/",
"/bin/",
"/sbin/",
"/usr/local/bin/",
];
let mut seen = HashSet::new();
let mut binaries = Vec::new();
for line in file_list.lines() {
let Some((pkg, raw_path)) = line.split_once(' ') else {
continue;
};
if pkg != package {
continue;
}
let path = raw_path.strip_suffix('/').unwrap_or(raw_path);
// Check if path is under a binary directory
let is_binary = BINARY_PREFIXES
.iter()
.any(|prefix| path.starts_with(prefix));
if is_binary {
// Extract the binary name for matching
if let Some(binary_name) = Path::new(path)
.file_name()
.and_then(|name| name.to_str())
.map(ToString::to_string)
{
// Store full path for exact matching
if seen.insert(path.to_string()) {
binaries.push(path.to_string());
}
// Also store binary name for flexible matching (if not already added)
if seen.insert(binary_name.clone()) {
binaries.push(binary_name);
}
}
}
}
binaries
}
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/src/logic/services/command.rs | src/logic/services/command.rs | //! Command execution utilities for service resolution.
use std::process::Command;
use tracing::{debug, warn};
/// What: Execute a command and capture stdout as UTF-8.
///
/// Inputs:
/// - `program`: Binary to execute.
/// - `args`: Command-line arguments.
/// - `display_label`: Human-friendly command description for logging.
///
/// Output:
/// - Stdout as a `String` on success; error description otherwise.
///
/// Details:
/// - Annotates errors with the supplied `display` string for easier debugging.
pub(super) fn run_command(
program: &str,
args: &[&str],
display_label: &str,
) -> Result<String, String> {
debug!(
command = program,
args = ?args,
display = display_label,
"executing service command"
);
let output = Command::new(program).args(args).output().map_err(|err| {
warn!(
command = program,
args = ?args,
display = display_label,
error = %err,
"failed to spawn command"
);
format!("failed to spawn `{display_label}`: {err}")
})?;
let status_code = output.status.code();
let stdout_len = output.stdout.len();
let stderr_len = output.stderr.len();
if !output.status.success() {
warn!(
command = program,
args = ?args,
display = display_label,
status = ?output.status,
status_code,
stdout_len,
stderr_len,
"command exited with non-zero status"
);
return Err(format!(
"`{display_label}` exited with status {}",
output.status
));
}
debug!(
command = program,
args = ?args,
display = display_label,
status = ?output.status,
status_code,
stdout_len,
stderr_len,
"command completed successfully"
);
String::from_utf8(output.stdout).map_err(|err| {
warn!(
command = program,
args = ?args,
display = display_label,
error = %err,
"command produced invalid UTF-8"
);
format!("`{display_label}` produced invalid UTF-8: {err}")
})
}
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/src/logic/services/tests.rs | src/logic/services/tests.rs | //! Integration tests for service resolution.
// Note: Most unit tests are in their respective module files.
// This file can be used for integration tests if needed in the future.
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/src/logic/services/units.rs | src/logic/services/units.rs | //! Service unit file collection and parsing.
use std::collections::HashSet;
use std::path::Path;
use std::process::Command;
use crate::state::types::Source;
use super::command::run_command;
/// What: Collect service unit filenames shipped by a specific package.
///
/// Inputs:
/// - `package`: Package name for which to inspect the remote file list.
/// - `source`: Source descriptor to determine how to fetch units (Official vs AUR).
///
/// Output:
/// - Vector of unit filenames (e.g., `sshd.service`). Empty when the package
/// ships no systemd units.
///
/// Details:
/// - Executes `pacman -Fl <package>` and filters paths under the standard
/// systemd directories.
/// - For AUR packages, uses fallback methods (installed files, paru/yay -Fl).
pub(super) fn collect_service_units_for_package(
package: &str,
source: &Source,
) -> Result<Vec<String>, String> {
match source {
Source::Official { .. } => {
// Use pacman -Fl for official packages
let output = run_command(
"pacman",
&["-Fl", package],
&format!("pacman -Fl {package}"),
)?;
let units = extract_service_units_from_file_list(&output, package);
Ok(units)
}
Source::Aur => {
// For AUR packages, try fallback methods similar to collect_binaries_for_package
// First, check if package is already installed
if let Ok(installed_files) = crate::logic::files::get_installed_file_list(package)
&& !installed_files.is_empty()
{
let file_list = installed_files
.iter()
.map(|f| format!("{package} {f}"))
.collect::<Vec<_>>()
.join("\n");
let units = extract_service_units_from_file_list(&file_list, package);
if !units.is_empty() {
tracing::debug!(
"Found {} service units from installed AUR package {}",
units.len(),
package
);
return Ok(units);
}
}
// Try to use paru/yay -Fl if available (works for cached AUR packages)
let has_paru = Command::new("paru").args(["--version"]).output().is_ok();
let has_yay = Command::new("yay").args(["--version"]).output().is_ok();
if has_paru {
tracing::debug!("Trying paru -Fl {} for AUR package service units", package);
if let Ok(output) = Command::new("paru")
.args(["-Fl", package])
.env("LC_ALL", "C")
.env("LANG", "C")
.output()
&& output.status.success()
{
let text = String::from_utf8_lossy(&output.stdout);
let units = extract_service_units_from_file_list(&text, package);
if !units.is_empty() {
tracing::debug!(
"Found {} service units from paru -Fl for {}",
units.len(),
package
);
return Ok(units);
}
}
}
if has_yay {
tracing::debug!("Trying yay -Fl {} for AUR package service units", package);
if let Ok(output) = Command::new("yay")
.args(["-Fl", package])
.env("LC_ALL", "C")
.env("LANG", "C")
.output()
&& output.status.success()
{
let text = String::from_utf8_lossy(&output.stdout);
let units = extract_service_units_from_file_list(&text, package);
if !units.is_empty() {
tracing::debug!(
"Found {} service units from yay -Fl for {}",
units.len(),
package
);
return Ok(units);
}
}
}
// For AUR packages without file lists available, return empty (not an error)
// The binary-based detection will still work
tracing::debug!(
"No file list available for AUR package {} (will use binary-based detection)",
package
);
Ok(Vec::new())
}
}
}
/// What: Extract unit filenames from `pacman -Fl` output.
///
/// Inputs:
/// - `file_list`: Raw `pacman -Fl` stdout.
/// - `package`: Package name used to filter unrelated entries in the output.
///
/// Output:
/// - Vector of unit filenames sorted in discovery order.
///
/// Details:
/// - Recognises units residing under `/usr/lib/systemd/system/` or the legacy
/// `/lib/systemd/system/` prefixes.
/// - Discards duplicate unit entries while preserving discovery order.
pub(super) fn extract_service_units_from_file_list(file_list: &str, package: &str) -> Vec<String> {
let mut seen = HashSet::new();
let mut units = Vec::new();
for line in file_list.lines() {
let Some((pkg, raw_path)) = line.split_once(' ') else {
continue;
};
if pkg != package {
continue;
}
let path = raw_path.strip_suffix('/').unwrap_or(raw_path);
if !is_service_path(path) {
continue;
}
if let Some(file_name) = Path::new(path)
.file_name()
.and_then(|name| name.to_str())
.map(ToString::to_string)
.filter(|name| seen.insert(name.clone()))
{
units.push(file_name);
}
}
units
}
/// What: Determine whether a path refers to a systemd service unit file.
///
/// Inputs:
/// - `path`: File path extracted from `pacman -Fl`.
///
/// Output:
/// - `true` when the path resides under a known systemd unit directory and
/// ends with `.service`; otherwise `false`.
///
/// Details:
/// - Supports both `/usr/lib/systemd/system` and `/lib/systemd/system` roots.
pub(super) fn is_service_path(path: &str) -> bool {
const PREFIXES: [&str; 2] = ["/usr/lib/systemd/system/", "/lib/systemd/system/"];
PREFIXES
.iter()
.any(|prefix| path.starts_with(prefix) && path.ends_with(".service"))
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
/// What: Ensure unit extraction recognises service files and ignores others.
///
/// Inputs:
/// - Synthetic `pacman -Fl` output containing service files, directories, and
/// irrelevant paths.
///
/// Output:
/// - Confirms only valid `.service` entries are returned.
///
/// Details:
/// - Verifies both `/usr/lib/systemd/system/` and `/lib/systemd/system/` paths.
fn extract_service_units_from_file_list_filters_correctly() {
let output = "\
mockpkg /usr/lib/systemd/system/example.service
mockpkg /usr/lib/systemd/system/example.service/
mockpkg /usr/lib/systemd/system/example.timer
mockpkg /lib/systemd/system/legacy.service
mockpkg /usr/bin/mock
otherpkg /usr/lib/systemd/system/other.service
";
let units = extract_service_units_from_file_list(output, "mockpkg");
assert_eq!(
units,
vec!["example.service".to_string(), "legacy.service".to_string()]
);
}
#[test]
/// What: Ensure duplicate `.service` listings are deduplicated without disturbing order.
///
/// Inputs:
/// - Synthetic `pacman -Fl` output containing repeated entries for the same units.
///
/// Output:
/// - Confirms the resulting list contains each unit once in discovery order.
///
/// Details:
/// - Validates that later duplicates are ignored and first occurrences are retained.
fn extract_service_units_from_file_list_deduplicates_preserving_order() {
let output = "\
mockpkg /usr/lib/systemd/system/alpha.service
mockpkg /usr/lib/systemd/system/beta.service
mockpkg /usr/lib/systemd/system/alpha.service/
mockpkg /usr/lib/systemd/system/gamma.service
mockpkg /usr/lib/systemd/system/beta.service/
";
let units = extract_service_units_from_file_list(output, "mockpkg");
assert_eq!(
units,
vec![
"alpha.service".to_string(),
"beta.service".to_string(),
"gamma.service".to_string()
]
);
}
}
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/src/logic/services/mod.rs | src/logic/services/mod.rs | //! Service impact resolution for the preflight "Services" tab.
mod binaries;
mod command;
mod systemd;
mod units;
use std::collections::BTreeMap;
use std::path::Path;
use crate::state::modal::{ServiceImpact, ServiceRestartDecision};
use crate::state::{PackageItem, PreflightAction};
use binaries::collect_binaries_for_package;
use systemd::{fetch_active_service_binaries, fetch_active_units};
use units::collect_service_units_for_package;
/// What: Resolve systemd service impacts for the selected transaction items.
///
/// Inputs:
/// - `items`: Packages being installed or removed.
/// - `action`: Preflight action (install/update vs. remove).
///
/// Output:
/// - Vector of `ServiceImpact` entries representing impacted systemd units.
///
/// Details:
/// - Inspects `pacman -Fl` output for each package to find shipped unit files.
/// - Determines which units are currently active via `systemctl list-units`.
/// - Heuristically detects binaries that impact active units, even without unit files.
/// - Computes a recommended restart decision; defaults to defer when the unit
/// is inactive or the action is a removal.
pub fn resolve_service_impacts(
items: &[PackageItem],
action: PreflightAction,
) -> Vec<ServiceImpact> {
let _span = tracing::info_span!(
"resolve_service_impacts",
stage = "services",
item_count = items.len()
)
.entered();
let start_time = std::time::Instant::now();
let mut unit_to_providers: BTreeMap<String, Vec<String>> = BTreeMap::new();
// First pass: collect units shipped by packages
for item in items {
match collect_service_units_for_package(&item.name, &item.source) {
Ok(units) => {
for unit in units {
let providers = unit_to_providers.entry(unit).or_default();
if !providers.iter().any(|name| name == &item.name) {
providers.push(item.name.clone());
}
}
}
Err(err) => {
// Only warn for official packages - AUR packages are expected to fail with pacman -Fl
if matches!(item.source, crate::state::types::Source::Official { .. }) {
tracing::warn!(
"Failed to resolve service units for package {}: {}",
item.name,
err
);
} else {
tracing::debug!(
"Could not resolve service units for AUR package {} (expected): {}",
item.name,
err
);
}
}
}
}
let active_units = fetch_active_units().unwrap_or_else(|err| {
tracing::warn!("Unable to query active services: {}", err);
std::collections::BTreeSet::new()
});
// Second pass: detect binaries that impact active units (heuristic enhancement)
// For both Install and Remove: detect which active services use binaries from packages
if !active_units.is_empty() {
// Get ExecStart paths for all active services
let active_service_binaries = fetch_active_service_binaries(&active_units);
// For each package, check if any of its binaries match active service binaries
for item in items {
match collect_binaries_for_package(&item.name, &item.source) {
Ok(binaries) => {
for binary in binaries {
// Check if this binary is used by any active service
for (unit_name, service_binaries) in &active_service_binaries {
if service_binaries.iter().any(|sb| {
// Match exact path, or match binary name
// Handle cases like: service uses "/usr/bin/foo", package provides "/usr/bin/foo"
// or service uses "/usr/bin/foo", package provides "foo"
sb == &binary
|| binary.ends_with(sb)
|| sb.ends_with(&binary)
|| (binary.contains('/')
&& sb.contains('/')
&& Path::new(sb).file_name()
== Path::new(&binary).file_name())
}) {
let providers =
unit_to_providers.entry(unit_name.clone()).or_default();
if !providers.iter().any(|name| name == &item.name) {
providers.push(item.name.clone());
let action_desc = if matches!(action, PreflightAction::Install)
{
"installing"
} else {
"removing"
};
tracing::debug!(
"Detected binary impact: {} package {} provides {} used by active service {}",
action_desc,
item.name,
binary,
unit_name
);
}
}
}
}
}
Err(err) => {
tracing::debug!(
"Failed to collect binaries for package {}: {}",
item.name,
err
);
}
}
}
}
let results: Vec<ServiceImpact> = unit_to_providers
.into_iter()
.map(|(unit_name, mut providers)| {
providers.sort();
let is_active = active_units.contains(&unit_name);
// For Install: services need restart after installing new packages
// For Remove: services will break if active (warn user, but no restart decision needed)
let needs_restart = matches!(action, PreflightAction::Install) && is_active;
let recommended_decision = if needs_restart {
ServiceRestartDecision::Restart
} else {
ServiceRestartDecision::Defer
};
ServiceImpact {
unit_name,
providers,
is_active,
needs_restart,
recommended_decision,
restart_decision: recommended_decision,
}
})
.collect();
let elapsed = start_time.elapsed();
let duration_ms = u64::try_from(elapsed.as_millis()).unwrap_or(u64::MAX);
tracing::info!(
stage = "services",
item_count = items.len(),
result_count = results.len(),
duration_ms = duration_ms,
"Service resolution complete"
);
results
}
#[cfg(test)]
mod tests {
use super::*;
use crate::state::modal::ServiceRestartDecision;
#[test]
/// What: Verify the recommended decision logic defaults to defer when inactive.
///
/// Inputs:
/// - Crafted service impacts simulating inactive units.
///
/// Output:
/// - Ensures `resolve_service_impacts` would compute `Defer` when `needs_restart` is false.
///
/// Details:
/// - Uses direct struct construction to avoid spawning commands in the test.
fn recommended_decision_default_is_defer_when_inactive() {
let impact = ServiceImpact {
unit_name: "example.service".into(),
providers: vec!["pkg".into()],
is_active: false,
needs_restart: false,
recommended_decision: ServiceRestartDecision::Defer,
restart_decision: ServiceRestartDecision::Defer,
};
assert_eq!(impact.recommended_decision, ServiceRestartDecision::Defer);
}
}
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/src/logic/services/systemd.rs | src/logic/services/systemd.rs | //! Systemd querying and parsing functions.
use std::collections::{BTreeMap, BTreeSet};
use super::command::run_command;
/// What: Fetch the set of currently active systemd services.
///
/// Inputs:
/// - None.
///
/// Output:
/// - `BTreeSet` containing unit names (e.g., `sshd.service`). Errors when the
/// `systemctl` command fails.
///
/// Details:
/// - Runs `systemctl list-units --type=service --no-legend --state=active`.
pub(super) fn fetch_active_units() -> Result<BTreeSet<String>, String> {
let output = run_command(
"systemctl",
&[
"list-units",
"--type=service",
"--no-legend",
"--state=active",
],
"systemctl list-units --type=service",
)?;
Ok(parse_active_units(&output))
}
/// What: Parse `systemctl list-units` output for active service names.
///
/// Inputs:
/// - `systemctl_output`: Raw stdout captured from the `systemctl` command.
///
/// Output:
/// - Sorted set of service names (`BTreeSet<String>`).
///
/// Details:
/// - Splits by whitespace and captures the first field on each line, ignoring
/// empty lines.
pub(super) fn parse_active_units(systemctl_output: &str) -> BTreeSet<String> {
systemctl_output
.lines()
.filter_map(|line| {
let unit = line.split_whitespace().next()?;
if unit.ends_with(".service") {
Some(unit.to_string())
} else {
None
}
})
.collect()
}
/// What: Fetch `ExecStart` binary paths for active systemd services.
///
/// Inputs:
/// - `active_units`: Set of active unit names.
///
/// Output:
/// - Map from unit name to vector of binary paths used by that service.
///
/// Details:
/// - Uses `systemctl show` to get `ExecStart` paths for each active service.
/// - Parses `ExecStart` to extract binary paths (handles paths with arguments).
/// - Errors are handled gracefully by skipping failed units.
pub(super) fn fetch_active_service_binaries(
active_units: &BTreeSet<String>,
) -> BTreeMap<String, Vec<String>> {
let mut unit_to_binaries: BTreeMap<String, Vec<String>> = BTreeMap::new();
for unit in active_units {
match run_command(
"systemctl",
&["show", unit, "-p", "ExecStart"],
&format!("systemctl show {unit} -p ExecStart"),
) {
Ok(output) => {
let binaries = parse_execstart_paths(&output);
if !binaries.is_empty() {
unit_to_binaries.insert(unit.clone(), binaries);
}
}
Err(err) => {
tracing::debug!("Failed to get ExecStart for {}: {}", unit, err);
}
}
}
unit_to_binaries
}
/// What: Parse `ExecStart` paths from `systemctl show` output.
///
/// Inputs:
/// - `systemctl_output`: Raw stdout from `systemctl show -p ExecStart`.
///
/// Output:
/// - Vector of binary paths extracted from `ExecStart`.
///
/// Details:
/// - Handles `ExecStart` format: `ExecStart=/usr/bin/binary --args`
/// - Extracts the binary path (first token after `ExecStart=`)
/// - Handles multiple `ExecStart` entries (`ExecStart`, `ExecStartPre`, etc.)
pub(super) fn parse_execstart_paths(systemctl_output: &str) -> Vec<String> {
let mut binaries = Vec::new();
for line in systemctl_output.lines() {
if let Some(exec_line) = line.strip_prefix("ExecStart=") {
// Extract the binary path (first token, may be quoted)
let path = exec_line
.split_whitespace()
.next()
.unwrap_or(exec_line)
.trim_matches('"')
.trim_matches('\'');
if !path.is_empty() && !path.starts_with('-') {
binaries.push(path.to_string());
}
} else if line.starts_with("ExecStartPre=") || line.starts_with("ExecStartPost=") {
// Also consider ExecStartPre/Post binaries
if let Some(exec_line) = line.split_once('=') {
let path = exec_line
.1
.split_whitespace()
.next()
.unwrap_or(exec_line.1)
.trim_matches('"')
.trim_matches('\'');
if !path.is_empty() && !path.starts_with('-') {
binaries.push(path.to_string());
}
}
}
}
binaries
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
/// What: Confirm parsing of active units handles typical `systemctl` output.
///
/// Inputs:
/// - Representative `systemctl list-units` snippet with multiple columns.
///
/// Output:
/// - Validates only `.service` units are captured in a sorted set.
///
/// Details:
/// - Ensures secondary tokens (loaded/active/running) do not impact parsing.
fn parse_active_units_extracts_first_column() {
let output = "\
sshd.service loaded active running OpenSSH Daemon
cups.service loaded active running CUPS Scheduler
dbus.socket loaded active running D-Bus Socket
";
let active = parse_active_units(output);
let expected: BTreeSet<String> = ["sshd.service", "cups.service"]
.iter()
.map(ToString::to_string)
.collect();
assert_eq!(active, expected);
}
}
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/src/logic/preflight/command.rs | src/logic/preflight/command.rs | //! Command execution abstraction for preflight operations.
//!
//! This module provides the [`CommandRunner`] trait and implementations for
//! executing system commands, enabling testability through dependency injection.
use std::fmt;
/// What: Abstract command execution interface used for spawning helper
/// binaries such as `pacman`.
///
/// Inputs:
/// - `program`: Executable name to run (for example, `"pacman"`).
/// - `args`: Slice of positional arguments passed to the executable.
///
/// Output:
/// - `Ok(String)` containing UTF-8 stdout on success.
/// - `Err(CommandError)` when the invocation fails or stdout is not valid UTF-8.
///
/// # Errors
/// - Returns `Err(CommandError::Io)` when command spawning or execution fails
/// - Returns `Err(CommandError::Utf8)` when stdout cannot be decoded as UTF-8
/// - Returns `Err(CommandError::Failed)` when the command exits with a non-zero status
///
/// Details:
/// - Implementations may stub command results to enable deterministic unit
/// testing.
/// - Production code relies on [`SystemCommandRunner`].
pub trait CommandRunner {
/// # Errors
/// - Returns `Err(CommandError::Io)` when command spawning or execution fails
/// - Returns `Err(CommandError::Utf8)` when stdout cannot be decoded as UTF-8
/// - Returns `Err(CommandError::Failed)` when the command exits with a non-zero status
fn run(&self, program: &str, args: &[&str]) -> Result<String, CommandError>;
}
/// What: Real command runner backed by `std::process::Command`.
///
/// Inputs: Satisfies the [`CommandRunner`] trait without additional parameters.
///
/// Output:
/// - Executes commands on the host system and captures stdout.
///
/// # Errors
/// - Returns `Err(CommandError::Io)` when command spawning or execution fails
/// - Returns `Err(CommandError::Utf8)` when stdout cannot be decoded as UTF-8
/// - Returns `Err(CommandError::Failed)` when the command exits with a non-zero status
///
/// Details:
/// - Errors from `std::process::Command::output` are surfaced as
/// [`CommandError::Io`].
#[derive(Default)]
pub struct SystemCommandRunner;
impl CommandRunner for SystemCommandRunner {
fn run(&self, program: &str, args: &[&str]) -> Result<String, CommandError> {
let output = std::process::Command::new(program).args(args).output()?;
if !output.status.success() {
return Err(CommandError::Failed {
program: program.to_string(),
args: args.iter().map(ToString::to_string).collect(),
status: output.status,
});
}
Ok(String::from_utf8(output.stdout)?)
}
}
/// What: Error type capturing command spawning, execution, and decoding
/// failures.
///
/// Inputs: Generated internally by helper routines.
///
/// Output: Implements `Display`/`Error` for ergonomic propagation.
///
/// Details:
/// - Represents various failure modes when executing system commands.
/// - Wraps I/O errors, UTF-8 conversion failures, parsing issues, and
/// non-success exit statuses.
#[derive(Debug)]
pub enum CommandError {
/// I/O error occurred.
Io(std::io::Error),
/// UTF-8 decoding error occurred.
Utf8(std::string::FromUtf8Error),
/// Command execution failed.
Failed {
/// Program name that failed.
program: String,
/// Command arguments.
args: Vec<String>,
/// Exit status of the failed command.
status: std::process::ExitStatus,
},
/// Parse error when processing command output.
Parse {
/// Program name that produced invalid output.
program: String,
/// Field name that failed to parse.
field: String,
},
}
impl fmt::Display for CommandError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::Io(err) => write!(f, "I/O error: {err}"),
Self::Utf8(err) => write!(f, "UTF-8 decoding error: {err}"),
Self::Failed {
program,
args,
status,
} => {
write!(f, "{program:?} {args:?} exited with status {status}")
}
Self::Parse { program, field } => {
write!(
f,
"{program} output did not contain expected field \"{field}\""
)
}
}
}
}
impl std::error::Error for CommandError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match self {
Self::Io(err) => Some(err),
Self::Utf8(err) => Some(err),
Self::Failed { .. } | Self::Parse { .. } => None,
}
}
}
impl From<std::io::Error> for CommandError {
fn from(value: std::io::Error) -> Self {
Self::Io(value)
}
}
impl From<std::string::FromUtf8Error> for CommandError {
fn from(value: std::string::FromUtf8Error) -> Self {
Self::Utf8(value)
}
}
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/src/logic/preflight/version.rs | src/logic/preflight/version.rs | //! Version comparison utilities for preflight analysis.
//!
//! This module provides functions to compare version strings and detect
//! major version bumps.
use std::cmp::Ordering;
/// What: Compare dotted version strings numerically.
///
/// Inputs:
/// - `a`: Left-hand version.
/// - `b`: Right-hand version.
///
/// Output:
/// - `Ordering` indicating which version is greater.
///
/// Details:
/// - Splits on `.` and `-`, comparing numeric segments when possible and
/// falling back to lexicographical comparison.
pub(super) fn compare_versions(a: &str, b: &str) -> Ordering {
let a_parts: Vec<&str> = a.split(['.', '-']).collect();
let b_parts: Vec<&str> = b.split(['.', '-']).collect();
let len = a_parts.len().max(b_parts.len());
for idx in 0..len {
let a_seg = a_parts.get(idx).copied().unwrap_or("0");
let b_seg = b_parts.get(idx).copied().unwrap_or("0");
match (a_seg.parse::<i64>(), b_seg.parse::<i64>()) {
(Ok(a_num), Ok(b_num)) => match a_num.cmp(&b_num) {
Ordering::Equal => {}
ord => return ord,
},
_ => match a_seg.cmp(b_seg) {
Ordering::Equal => {}
ord => return ord,
},
}
}
Ordering::Equal
}
/// What: Determine whether `new` constitutes a major version bump relative to
/// `old`.
///
/// Inputs:
/// - `old`: Currently installed version.
/// - `new`: Target version.
///
/// Output:
/// - `true` when the major component increased; `false` otherwise.
///
/// Details:
/// - Parses the first numeric segment (before `.`/`-`) for comparison.
pub(super) fn is_major_version_bump(old: &str, new: &str) -> bool {
match (extract_major_component(old), extract_major_component(new)) {
(Some(old_major), Some(new_major)) => new_major > old_major,
_ => false,
}
}
/// What: Extract the leading numeric component from a version string.
///
/// Inputs:
/// - `version`: Version string to parse.
///
/// Output:
/// - `Some(u64)` for the first numeric segment.
/// - `None` when parsing fails.
///
/// Details:
/// - Splits on `.` and `-`, treating the first token as the major component.
fn extract_major_component(version: &str) -> Option<u64> {
let token = version.split(['.', '-']).next()?;
token.parse::<u64>().ok()
}
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/src/logic/preflight/tests.rs | src/logic/preflight/tests.rs | //! Unit tests for preflight summary computation.
use super::*;
use crate::state::types::Source;
use std::collections::HashMap;
use std::os::unix::process::ExitStatusExt;
use std::sync::Mutex;
type MockCommandKey = (String, Vec<String>);
type MockCommandResult = Result<String, CommandError>;
type MockResponseMap = HashMap<MockCommandKey, MockCommandResult>;
#[derive(Default)]
struct MockRunner {
responses: Mutex<MockResponseMap>,
}
impl MockRunner {
#[allow(clippy::missing_const_for_fn)]
fn with(responses: MockResponseMap) -> Self {
Self {
responses: Mutex::new(responses),
}
}
}
impl CommandRunner for MockRunner {
fn run(&self, program: &str, args: &[&str]) -> Result<String, CommandError> {
let key = (
program.to_string(),
args.iter().map(ToString::to_string).collect::<Vec<_>>(),
);
let mut guard = self.responses.lock().expect("poisoned responses mutex");
guard.remove(&key).unwrap_or_else(|| {
Err(CommandError::Failed {
program: program.to_string(),
args: args.iter().map(ToString::to_string).collect(),
status: std::process::ExitStatus::from_raw(1),
})
})
}
}
#[test]
/// What: Ensure core package major bumps elevate risk and populate notes.
///
/// Inputs:
/// - Single core package (`systemd`) transitioning from `1.0.0` to `2.0.0`.
///
/// Output:
/// - Risk score escalates to the "High" bucket with appropriate notes and chip totals.
fn summary_identifies_core_major_bump() {
let mut responses = HashMap::new();
responses.insert(
("pacman".into(), vec!["-Q".into(), "systemd".into()]),
Ok("systemd 1.0.0\n".to_string()),
);
responses.insert(
("pacman".into(), vec!["-Qi".into(), "systemd".into()]),
Ok("Name : systemd\nInstalled Size : 4.00 MiB\n".to_string()),
);
responses.insert(
("pacman".into(), vec!["-Si".into(), "extra/systemd".into()]),
Ok("Repository : extra\nName : systemd\nVersion : 2.0.0\nDownload Size : 2.00 MiB\nInstalled Size : 5.00 MiB\n".to_string()),
);
let runner = MockRunner::with(responses);
let item = PackageItem {
name: "systemd".into(),
version: "2.0.0".into(),
description: "system init".into(),
source: Source::Official {
repo: "extra".into(),
arch: "x86_64".into(),
},
popularity: None,
out_of_date: None,
orphaned: false,
};
let outcome = compute_preflight_summary_with_runner(&[item], PreflightAction::Install, &runner);
assert_eq!(outcome.summary.package_count, 1);
assert_eq!(outcome.summary.aur_count, 0);
assert_eq!(outcome.summary.risk_score, 5);
assert_eq!(outcome.summary.risk_level, RiskLevel::High);
assert!(
outcome
.summary
.major_bump_packages
.iter()
.any(|name| name == "systemd")
);
assert!(
outcome
.summary
.core_system_updates
.iter()
.any(|name| name == "systemd")
);
assert_eq!(
outcome.summary.download_bytes,
2 * 1024 * 1024,
"Download bytes should match pacman -Si output"
);
assert_eq!(
outcome.summary.install_delta_bytes,
i64::from(5 * 1024 * 1024) - i64::from(4 * 1024 * 1024),
"Install delta should reflect target minus current size"
);
assert!(
outcome
.summary
.summary_warnings
.iter()
.any(|reason| reason.contains("Core/system"))
);
assert_eq!(outcome.header.risk_score, 5);
assert_eq!(outcome.header.package_count, 1);
assert_eq!(
outcome.summary.packages[0].install_delta_bytes,
Some(i64::from(5 * 1024 * 1024) - i64::from(4 * 1024 * 1024))
);
}
#[test]
/// What: Confirm AUR-only transactions contribute to risk heuristics even without metadata.
///
/// Inputs:
/// - Single AUR package with no pacman metadata responses configured.
///
/// Output:
/// - Risk score increments by the AUR heuristic and remains within the "Medium" bucket.
fn summary_handles_aur_without_metadata() {
let runner = MockRunner::default();
let item = PackageItem {
name: "my-aur-tool".into(),
version: "1.4.0".into(),
description: "AUR utility".into(),
source: Source::Aur,
popularity: Some(42.0),
out_of_date: None,
orphaned: false,
};
let outcome = compute_preflight_summary_with_runner(&[item], PreflightAction::Install, &runner);
assert_eq!(outcome.summary.package_count, 1);
assert_eq!(outcome.summary.aur_count, 1);
assert_eq!(outcome.summary.risk_score, 2);
assert_eq!(outcome.summary.risk_level, RiskLevel::Medium);
assert!(
outcome
.summary
.risk_reasons
.iter()
.any(|reason| reason.contains("AUR"))
);
assert_eq!(outcome.header.aur_count, 1);
}
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/src/logic/preflight/batch.rs | src/logic/preflight/batch.rs | //! Batch fetching utilities for package metadata.
//!
//! This module provides functions to efficiently fetch installed versions and
//! sizes for multiple packages in batches.
use super::command::{CommandError, CommandRunner};
use super::metadata::{
fetch_installed_size, fetch_installed_version, parse_pacman_key_values, parse_size_to_bytes,
};
use crate::state::types::PackageItem;
/// What: Batch fetch installed versions for multiple packages using `pacman -Q`.
///
/// Inputs:
/// - `runner`: Command executor.
/// - `items`: Packages to query.
///
/// Output:
/// - Vector of results, one per package (Ok(version) or Err).
///
/// Details:
/// - Batches queries into chunks of 50 to avoid command-line length limits.
/// - `pacman -Q` outputs "name version" per line, one per package.
pub(super) fn batch_fetch_installed_versions<R: CommandRunner>(
runner: &R,
items: &[PackageItem],
) -> Vec<Result<String, CommandError>> {
const BATCH_SIZE: usize = 50;
let mut results = Vec::with_capacity(items.len());
for chunk in items.chunks(BATCH_SIZE) {
let names: Vec<&str> = chunk.iter().map(|i| i.name.as_str()).collect();
let mut args = vec!["-Q"];
args.extend(names.iter().copied());
match runner.run("pacman", &args) {
Ok(output) => {
// Parse output: each line is "name version"
let mut version_map = std::collections::HashMap::new();
for line in output.lines() {
let mut parts = line.split_whitespace();
if let (Some(name), Some(version)) = (parts.next(), parts.next_back()) {
version_map.insert(name, version.to_string());
}
}
// Map results back to original order
for item in chunk {
if let Some(version) = version_map.get(item.name.as_str()) {
results.push(Ok(version.clone()));
} else {
results.push(Err(CommandError::Parse {
program: "pacman -Q".to_string(),
field: format!("version for {}", item.name),
}));
}
}
}
Err(_) => {
// If batch fails, fall back to individual queries
for item in chunk {
match fetch_installed_version(runner, &item.name) {
Ok(v) => results.push(Ok(v)),
Err(err) => results.push(Err(err)),
}
}
}
}
}
results
}
/// What: Batch fetch installed sizes for multiple packages using `pacman -Qi`.
///
/// Inputs:
/// - `runner`: Command executor.
/// - `items`: Packages to query.
///
/// Output:
/// - Vector of results, one per package (`Ok(size_bytes)` or `Err`).
///
/// Details:
/// - Batches queries into chunks of 50 to avoid command-line length limits.
/// - Parses multi-package `pacman -Qi` output (packages separated by blank lines).
pub(super) fn batch_fetch_installed_sizes<R: CommandRunner>(
runner: &R,
items: &[PackageItem],
) -> Vec<Result<u64, CommandError>> {
const BATCH_SIZE: usize = 50;
let mut results = Vec::with_capacity(items.len());
for chunk in items.chunks(BATCH_SIZE) {
let names: Vec<&str> = chunk.iter().map(|i| i.name.as_str()).collect();
let mut args = vec!["-Qi"];
args.extend(names.iter().copied());
match runner.run("pacman", &args) {
Ok(output) => {
// Parse multi-package output: packages are separated by blank lines
let mut package_blocks = Vec::new();
let mut current_block = String::new();
for line in output.lines() {
if line.trim().is_empty() {
if !current_block.is_empty() {
package_blocks.push(current_block.clone());
current_block.clear();
}
} else {
current_block.push_str(line);
current_block.push('\n');
}
}
if !current_block.is_empty() {
package_blocks.push(current_block);
}
// Parse each block to extract package name and size
let mut size_map = std::collections::HashMap::new();
for block in package_blocks {
let block_fields = parse_pacman_key_values(&block);
if let (Some(name), Some(size_str)) = (
block_fields.get("Name").map(|s| s.trim()),
block_fields.get("Installed Size").map(|s| s.trim()),
) && let Some(size_bytes) = parse_size_to_bytes(size_str)
{
size_map.insert(name.to_string(), size_bytes);
}
}
// Map results back to original order
for item in chunk {
if let Some(size) = size_map.get(&item.name) {
results.push(Ok(*size));
} else {
results.push(Err(CommandError::Parse {
program: "pacman -Qi".to_string(),
field: format!("Installed Size for {}", item.name),
}));
}
}
}
Err(_) => {
// If batch fails, fall back to individual queries
for item in chunk {
match fetch_installed_size(runner, &item.name) {
Ok(s) => results.push(Ok(s)),
Err(err) => results.push(Err(err)),
}
}
}
}
}
results
}
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/src/logic/preflight/mod.rs | src/logic/preflight/mod.rs | //! Preflight summary computation helpers.
//!
//! The routines in this module gather package metadata, estimate download and
//! install deltas, and derive risk heuristics used to populate the preflight
//! modal. All command execution is abstracted behind [`CommandRunner`] so the
//! logic can be exercised in isolation.
mod batch;
mod command;
mod metadata;
mod version;
use crate::state::modal::{
PreflightAction, PreflightHeaderChips, PreflightPackageSummary, PreflightSummaryData, RiskLevel,
};
use crate::state::types::{PackageItem, Source};
use std::cmp::Ordering;
pub use command::{CommandError, CommandRunner, SystemCommandRunner};
use batch::{batch_fetch_installed_sizes, batch_fetch_installed_versions};
use version::{compare_versions, is_major_version_bump};
/// Packages that contribute additional risk when present in a transaction.
const CORE_CRITICAL_PACKAGES: &[&str] = &[
"linux",
"linux-lts",
"linux-zen",
"systemd",
"glibc",
"openssl",
"pacman",
"bash",
"util-linux",
"filesystem",
];
/// What: Outcome of preflight summary computation.
///
/// Inputs: Produced by the summary computation helpers from package items and dependencies.
///
/// Output:
/// - `summary`: Structured data powering the Summary tab.
/// - `header`: Condensed metrics displayed in the modal header and execution sidebar.
/// - `reverse_deps_report`: Optional reverse dependency report for Remove actions,
/// cached to avoid redundant resolution when switching to the Deps tab.
///
/// Details:
/// - Bundled together so downstream code can reuse the derived chip data without recomputation.
/// - Contains the preflight summary data along with header metrics and optional reverse dependency information.
/// - For Remove actions, the reverse dependency report is computed during summary
/// computation and cached here to avoid recomputation when the user switches tabs.
#[derive(Debug, Clone)]
pub struct PreflightSummaryOutcome {
/// Preflight summary data.
pub summary: PreflightSummaryData,
/// Header chip metrics.
pub header: PreflightHeaderChips,
/// Cached reverse dependency report for Remove actions (None for Install actions).
pub reverse_deps_report: Option<crate::logic::deps::ReverseDependencyReport>,
}
/// What: Compute preflight summary data using the system command runner.
///
/// Inputs:
/// - `items`: Packages scheduled for install/update/remove.
/// - `action`: Active operation (install vs. remove) shaping the analysis.
///
/// Output:
/// - [`PreflightSummaryOutcome`] combining Summary tab data and header chips.
///
/// Details:
/// - Delegates to [`compute_preflight_summary_with_runner`] with
/// [`SystemCommandRunner`].
/// - Metadata lookups that fail are logged and treated as best-effort.
#[must_use]
pub fn compute_preflight_summary(
items: &[PackageItem],
action: PreflightAction,
) -> PreflightSummaryOutcome {
let runner = SystemCommandRunner;
compute_preflight_summary_with_runner(items, action, &runner)
}
/// What: Intermediate state accumulated during package processing.
///
/// Inputs: Built incrementally while iterating packages.
///
/// Output: Used to construct the final summary and risk calculations.
///
/// Details: Groups related mutable state to reduce parameter passing.
struct ProcessingState {
/// Packages being processed for preflight.
packages: Vec<PreflightPackageSummary>,
/// Count of AUR packages.
aur_count: usize,
/// Total download size in bytes.
total_download_bytes: u64,
/// Total install size delta in bytes (can be negative).
total_install_delta_bytes: i64,
/// Packages with major version bumps.
major_bump_packages: Vec<String>,
/// Core system packages being updated.
core_system_updates: Vec<String>,
/// Whether any package has a major version bump.
any_major_bump: bool,
/// Whether any core system package is being updated.
any_core_update: bool,
/// Whether any AUR package is included.
any_aur: bool,
}
impl ProcessingState {
/// What: Create a new processing state with specified capacity.
///
/// Inputs:
/// - `capacity`: Initial capacity for the packages vector.
///
/// Output: New `ProcessingState` with empty collections.
///
/// Details: Initializes all fields to default/empty values with the specified capacity.
fn new(capacity: usize) -> Self {
Self {
packages: Vec::with_capacity(capacity),
aur_count: 0,
total_download_bytes: 0,
total_install_delta_bytes: 0,
major_bump_packages: Vec::new(),
core_system_updates: Vec::new(),
any_major_bump: false,
any_core_update: false,
any_aur: false,
}
}
}
/// What: Process a single package item and update processing state.
///
/// Inputs:
/// - `item`: Package to process.
/// - `action`: Install vs. remove context.
/// - `runner`: Command execution abstraction.
/// - `installed_version`: Previously fetched installed version (if any).
/// - `installed_size`: Previously fetched installed size (if any).
/// - `state`: Mutable state accumulator.
///
/// Output: Updates `state` in place.
///
/// Details:
/// - Fetches metadata for official packages.
/// - Computes version comparisons and notes.
/// - Detects core packages and major version bumps.
fn process_package_item<R: CommandRunner>(
item: &PackageItem,
action: PreflightAction,
runner: &R,
installed_version: Option<String>,
installed_size: Option<u64>,
state: &mut ProcessingState,
) {
if matches!(item.source, Source::Aur) {
state.aur_count += 1;
state.any_aur = true;
}
if installed_version.is_none() {
tracing::debug!(
"Preflight summary: failed to fetch installed version for {}",
item.name
);
}
if installed_size.is_none() {
tracing::debug!(
"Preflight summary: failed to fetch installed size for {}",
item.name
);
}
let (download_bytes, install_size_target) = fetch_package_metadata(runner, item);
let install_delta_bytes = calculate_install_delta(action, install_size_target, installed_size);
if let Some(bytes) = download_bytes {
state.total_download_bytes = state.total_download_bytes.saturating_add(bytes);
}
if let Some(delta) = install_delta_bytes {
state.total_install_delta_bytes = state.total_install_delta_bytes.saturating_add(delta);
}
let (notes, is_major_bump, is_downgrade) = analyze_version_changes(
installed_version.as_ref(),
&item.version,
action,
item.name.clone(),
&mut state.major_bump_packages,
&mut state.any_major_bump,
);
let core_note = check_core_package(
item,
action,
&mut state.core_system_updates,
&mut state.any_core_update,
);
let mut all_notes = notes;
if let Some(note) = core_note {
all_notes.push(note);
}
// For Install actions, add note about installed packages that depend on this package
if matches!(action, PreflightAction::Install) && installed_version.is_some() {
let dependents = crate::logic::deps::get_installed_required_by(&item.name);
if !dependents.is_empty() {
let dependents_list = if dependents.len() <= 3 {
dependents.join(", ")
} else {
format!(
"{} (and {} more)",
dependents[..3].join(", "),
dependents.len() - 3
)
};
all_notes.push(format!("Required by installed packages: {dependents_list}"));
}
}
state.packages.push(PreflightPackageSummary {
name: item.name.clone(),
source: item.source.clone(),
installed_version,
target_version: item.version.clone(),
is_downgrade,
is_major_bump,
download_bytes,
install_delta_bytes,
notes: all_notes,
});
}
/// What: Fetch metadata for official and AUR packages.
///
/// Inputs:
/// - `runner`: Command execution abstraction.
/// - `item`: Package item to fetch metadata for.
///
/// Output: Tuple of (`download_bytes`, `install_size_target`), both `Option`.
///
/// Details:
/// - For official packages: uses `pacman -Si`.
/// - For AUR packages: checks local caches (pacman cache, AUR helper caches) for built package files.
fn fetch_package_metadata<R: CommandRunner>(
runner: &R,
item: &PackageItem,
) -> (Option<u64>, Option<u64>) {
match &item.source {
Source::Official { repo, .. } => {
match metadata::fetch_official_metadata(runner, repo, &item.name, item.version.as_str())
{
Ok(meta) => (meta.download_size, meta.install_size),
Err(err) => {
tracing::debug!(
"Preflight summary: failed to fetch metadata for {repo}/{pkg}: {err}",
pkg = item.name
);
(None, None)
}
}
}
Source::Aur => {
let meta =
metadata::fetch_aur_metadata(runner, &item.name, Some(item.version.as_str()));
if meta.download_size.is_some() || meta.install_size.is_some() {
tracing::debug!(
"Preflight summary: found AUR package sizes for {}: DL={:?}, Install={:?}",
item.name,
meta.download_size,
meta.install_size
);
}
(meta.download_size, meta.install_size)
}
}
}
/// What: Calculate install size delta based on action type.
///
/// Inputs:
/// - `action`: Install vs. remove context.
/// - `install_size_target`: Target install size (for installs).
/// - `installed_size`: Current installed size.
///
/// Output: Delta in bytes (positive for installs, negative for removes).
///
/// Details: Returns None if metadata is unavailable.
fn calculate_install_delta(
action: PreflightAction,
install_size_target: Option<u64>,
installed_size: Option<u64>,
) -> Option<i64> {
match action {
PreflightAction::Install => install_size_target.and_then(|target| {
let current = installed_size.unwrap_or(0);
let target_i64 = i64::try_from(target).ok()?;
let current_i64 = i64::try_from(current).ok()?;
Some(target_i64 - current_i64)
}),
PreflightAction::Remove => {
installed_size.and_then(|size| i64::try_from(size).ok().map(|s| -s))
}
PreflightAction::Downgrade => install_size_target.and_then(|target| {
// For downgrade, calculate delta similar to install (replacing with older version)
let current = installed_size.unwrap_or(0);
let target_i64 = i64::try_from(target).ok()?;
let current_i64 = i64::try_from(current).ok()?;
Some(target_i64 - current_i64)
}),
}
}
/// What: Analyze version changes and generate notes.
///
/// Inputs:
/// - `installed_version`: Current installed version (if any).
/// - `target_version`: Target version.
/// - `action`: Install vs. remove context.
/// - `package_name`: Name of the package.
/// - `major_bump_packages`: Mutable list to append to if major bump detected.
/// - `any_major_bump`: Mutable flag to set if major bump detected.
///
/// Output: Tuple of (`notes`, `is_major_bump`, `is_downgrade`).
///
/// Details: Detects downgrades, major version bumps, and new installations.
fn analyze_version_changes(
installed_version: Option<&String>,
target_version: &str,
action: PreflightAction,
package_name: String,
major_bump_packages: &mut Vec<String>,
any_major_bump: &mut bool,
) -> (Vec<String>, bool, bool) {
let mut notes = Vec::new();
let mut is_major_bump = false;
let mut is_downgrade = false;
if let Some(current) = installed_version {
match compare_versions(current, target_version) {
Ordering::Greater => {
if matches!(action, PreflightAction::Install) {
is_downgrade = true;
notes.push(format!("Downgrade detected: {current} → {target_version}"));
}
}
Ordering::Less => {
if is_major_version_bump(current, target_version) {
is_major_bump = true;
*any_major_bump = true;
major_bump_packages.push(package_name);
notes.push(format!("Major version bump: {current} → {target_version}"));
}
}
Ordering::Equal => {}
}
} else if matches!(action, PreflightAction::Install) {
notes.push("New installation".to_string());
}
(notes, is_major_bump, is_downgrade)
}
/// What: Check if package is a core/system package and generate note.
///
/// Inputs:
/// - `item`: Package item to check.
/// - `action`: Install vs. remove context.
/// - `core_system_updates`: Mutable list to append to if core package.
/// - `any_core_update`: Mutable flag to set if core package.
///
/// Output: Optional note string if core package detected.
///
/// Details: Normalizes package name for comparison against critical packages list.
fn check_core_package(
item: &PackageItem,
action: PreflightAction,
core_system_updates: &mut Vec<String>,
any_core_update: &mut bool,
) -> Option<String> {
let normalized_name = item.name.to_ascii_lowercase();
if CORE_CRITICAL_PACKAGES
.iter()
.any(|candidate| normalized_name == *candidate)
{
*any_core_update = true;
core_system_updates.push(item.name.clone());
Some(if matches!(action, PreflightAction::Remove) {
"Removing core/system package".to_string()
} else {
"Core/system package update".to_string()
})
} else {
None
}
}
/// What: Calculate risk reasons and score from processing state.
///
/// Inputs:
/// - `state`: Processing state with accumulated flags.
/// - `pacnew_candidates`: Count of packages that may produce .pacnew files.
/// - `service_restart_units`: List of services that need restart.
/// - `action`: Preflight action (Install vs Remove).
/// - `dependent_count`: Number of packages that depend on packages being removed (for Remove actions).
///
/// Output: Tuple of (`risk_reasons`, `risk_score`, `risk_level`).
///
/// Details: Applies the risk heuristic scoring system.
fn calculate_risk_metrics(
state: &ProcessingState,
pacnew_candidates: usize,
service_restart_units: &[String],
action: PreflightAction,
dependent_count: usize,
) -> (Vec<String>, u8, RiskLevel) {
let mut risk_reasons = Vec::new();
let mut risk_score: u8 = 0;
if state.any_core_update {
risk_reasons.push("Core/system packages involved (+3)".to_string());
risk_score = risk_score.saturating_add(3);
}
if state.any_major_bump {
risk_reasons.push("Major version bump detected (+2)".to_string());
risk_score = risk_score.saturating_add(2);
}
if state.any_aur {
risk_reasons.push("AUR packages included (+2)".to_string());
risk_score = risk_score.saturating_add(2);
}
if pacnew_candidates > 0 {
risk_reasons.push("Configuration files may produce .pacnew (+1)".to_string());
risk_score = risk_score.saturating_add(1);
}
if !service_restart_units.is_empty() {
risk_reasons.push("Services likely require restart (+1)".to_string());
risk_score = risk_score.saturating_add(1);
}
// For Remove actions, add risk when removing packages with dependencies
if matches!(action, PreflightAction::Remove) && dependent_count > 0 {
let risk_points = if dependent_count >= 5 {
3 // High risk for many dependencies
} else if dependent_count >= 2 {
2 // Medium risk for multiple dependencies
} else {
1 // Low risk for single dependency
};
risk_reasons.push(format!(
"Removing packages with {dependent_count} dependent package(s) (+{risk_points})"
));
risk_score = risk_score.saturating_add(risk_points);
}
// For Install actions, add risk when updating packages with installed dependents
// Add +2 risk points for each installed package that depends on packages being updated
if matches!(action, PreflightAction::Install) && dependent_count > 0 {
let risk_points = dependent_count.saturating_mul(2).min(255); // +2 per dependent package, cap at u8::MAX
let risk_points_u8 = u8::try_from(risk_points).unwrap_or(255);
risk_reasons.push(format!(
"{dependent_count} installed package(s) depend on packages being updated (+{risk_points_u8})"
));
risk_score = risk_score.saturating_add(risk_points_u8);
}
let risk_level = match risk_score {
0 => RiskLevel::Low,
1..=4 => RiskLevel::Medium,
_ => RiskLevel::High,
};
(risk_reasons, risk_score, risk_level)
}
/// What: Build summary notes from processing state.
///
/// Inputs:
/// - `state`: Processing state with accumulated flags.
///
/// Output: Vector of summary note strings.
///
/// Details: Generates informational notes for the summary tab.
fn build_summary_notes(state: &ProcessingState) -> Vec<String> {
let mut notes = Vec::new();
if state.any_core_update {
notes.push("Core/system packages will be modified.".to_string());
}
if state.any_major_bump {
notes.push("Major version changes detected; review changelogs.".to_string());
}
if state.any_aur {
notes.push("AUR packages present; build steps may vary.".to_string());
}
notes
}
/// What: Process all package items and populate processing state.
///
/// Inputs:
/// - `items`: Packages to process.
/// - `action`: Install vs. remove context.
/// - `runner`: Command execution abstraction.
/// - `state`: Mutable state accumulator.
///
/// Output: Updates `state` in place.
///
/// Details: Batch fetches installed versions/sizes and processes each package.
fn process_all_packages<R: CommandRunner>(
items: &[PackageItem],
action: PreflightAction,
runner: &R,
state: &mut ProcessingState,
) {
let installed_versions = batch_fetch_installed_versions(runner, items);
let installed_sizes = batch_fetch_installed_sizes(runner, items);
for (idx, item) in items.iter().enumerate() {
let installed_version = installed_versions
.get(idx)
.and_then(|v| v.as_ref().ok())
.cloned();
let installed_size = installed_sizes
.get(idx)
.and_then(|s| s.as_ref().ok())
.copied();
process_package_item(
item,
action,
runner,
installed_version,
installed_size,
state,
);
}
}
/// What: Resolve reverse dependencies for Remove actions and count installed dependents for Install actions.
///
/// Inputs:
/// - `items`: Packages being removed or installed/updated.
/// - `action`: Preflight action (Install vs Remove).
///
/// Output: Tuple of (`dependent_count`, `reverse_deps_report`).
///
/// Details:
/// - For Remove actions: resolves and counts all dependent packages.
/// - For Install actions: counts the total number of installed packages that depend on packages being updated.
fn resolve_reverse_deps(
items: &[PackageItem],
action: PreflightAction,
) -> (usize, Option<crate::logic::deps::ReverseDependencyReport>) {
if matches!(action, PreflightAction::Remove) {
let report = crate::logic::deps::resolve_reverse_dependencies(items);
let count = report.dependencies.len();
(count, Some(report))
} else {
// For Install actions, count the total number of installed dependent packages
// across all packages being updated
let mut total_dependents = 0;
for item in items {
// Only check installed packages (updates/reinstalls)
if crate::index::is_installed(&item.name) {
let dependents = crate::logic::deps::get_installed_required_by(&item.name);
total_dependents += dependents.len();
}
}
(total_dependents, None)
}
}
/// What: Build summary data structure from processing state and risk metrics.
///
/// Inputs:
/// - `state`: Processing state with accumulated data.
/// - `items`: Original package items (for count).
/// - `risk_reasons`: Risk reason strings.
/// - `risk_score`: Calculated risk score.
/// - `risk_level`: Calculated risk level.
///
/// Output: [`PreflightSummaryData`] structure.
///
/// Details: Constructs the complete summary data structure.
fn build_summary_data(
state: ProcessingState,
items: &[PackageItem],
risk_reasons: &[String],
risk_score: u8,
risk_level: RiskLevel,
) -> PreflightSummaryData {
let summary_notes = build_summary_notes(&state);
let mut summary_warnings = Vec::new();
if summary_warnings.is_empty() {
summary_warnings.extend(risk_reasons.iter().cloned());
}
PreflightSummaryData {
packages: state.packages,
package_count: items.len(),
aur_count: state.aur_count,
download_bytes: state.total_download_bytes,
install_delta_bytes: state.total_install_delta_bytes,
risk_score,
risk_level,
risk_reasons: risk_reasons.to_vec(),
major_bump_packages: state.major_bump_packages,
core_system_updates: state.core_system_updates,
pacnew_candidates: 0,
pacsave_candidates: 0,
config_warning_packages: Vec::new(),
service_restart_units: Vec::new(),
summary_warnings,
summary_notes,
}
}
/// What: Build header chips from extracted state values and risk metrics.
///
/// Inputs:
/// - `package_count`: Number of packages.
/// - `download_bytes`: Total download size in bytes.
/// - `install_delta_bytes`: Total install size delta in bytes.
/// - `aur_count`: Number of AUR packages.
/// - `risk_score`: Calculated risk score.
/// - `risk_level`: Calculated risk level.
///
/// Output: [`PreflightHeaderChips`] structure.
///
/// Details: Constructs the header chip metrics.
const fn build_header_chips(
package_count: usize,
download_bytes: u64,
install_delta_bytes: i64,
aur_count: usize,
risk_score: u8,
risk_level: RiskLevel,
) -> PreflightHeaderChips {
PreflightHeaderChips {
package_count,
download_bytes,
install_delta_bytes,
aur_count,
risk_score,
risk_level,
}
}
/// What: Compute preflight summary data using a custom command runner.
///
/// Inputs:
/// - `items`: Packages to analyse.
/// - `action`: Install vs. remove context.
/// - `runner`: Command execution abstraction (mockable).
///
/// Output:
/// - [`PreflightSummaryOutcome`] with fully materialised Summary data and
/// header chip metrics.
///
/// Details:
/// - Fetches installed versions/sizes via `pacman` when possible.
/// - Applies the initial risk heuristic outlined in the specification.
/// - Gracefully degrades metrics when metadata is unavailable.
pub fn compute_preflight_summary_with_runner<R: CommandRunner>(
items: &[PackageItem],
action: PreflightAction,
runner: &R,
) -> PreflightSummaryOutcome {
let _span = tracing::info_span!(
"compute_preflight_summary",
stage = "summary",
item_count = items.len()
)
.entered();
let start_time = std::time::Instant::now();
let mut state = ProcessingState::new(items.len());
process_all_packages(items, action, runner, &mut state);
let (dependent_count, reverse_deps_report) = resolve_reverse_deps(items, action);
let (risk_reasons, risk_score, risk_level) =
calculate_risk_metrics(&state, 0, &[], action, dependent_count);
let header = build_header_chips(
items.len(),
state.total_download_bytes,
state.total_install_delta_bytes,
state.aur_count,
risk_score,
risk_level,
);
let summary = build_summary_data(state, items, &risk_reasons, risk_score, risk_level);
let elapsed = start_time.elapsed();
let duration_ms = u64::try_from(elapsed.as_millis()).unwrap_or(u64::MAX);
tracing::info!(
stage = "summary",
item_count = items.len(),
duration_ms = duration_ms,
"Preflight summary computation complete"
);
PreflightSummaryOutcome {
summary,
header,
reverse_deps_report,
}
}
#[cfg(all(test, unix))]
mod tests;
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/src/logic/preflight/metadata.rs | src/logic/preflight/metadata.rs | //! Package metadata fetching and parsing utilities.
//!
//! This module provides functions to fetch package metadata from pacman and
//! parse the output into structured data.
use super::command::{CommandError, CommandRunner};
use std::collections::HashMap;
use std::fs;
use std::path::{Path, PathBuf};
/// What: Extract remote download/install sizes for an official package via
/// `pacman -Si`.
///
/// Inputs:
/// - `runner`: Command executor.
/// - `repo`: Repository name (e.g., `"core"`).
/// - `name`: Package identifier.
/// - `expected_version`: Version string to cross-check.
///
/// Output:
/// - `Ok(OfficialMetadata)` containing optional size metrics.
/// - `Err(CommandError)` when the command fails.
///
/// Details:
/// - Performs best-effort verification of the returned version, logging
/// mismatches for diagnostics.
pub(super) fn fetch_official_metadata<R: CommandRunner>(
runner: &R,
repo: &str,
name: &str,
expected_version: &str,
) -> Result<OfficialMetadata, CommandError> {
let spec = format!("{repo}/{name}");
let output = runner.run("pacman", &["-Si", &spec])?;
let fields = parse_pacman_key_values(&output);
if let Some(version) = fields.get("Version")
&& version.trim() != expected_version
{
tracing::debug!(
"Preflight summary: pacman -Si reported version {} for {} (expected {})",
version.trim(),
spec,
expected_version
);
}
let download_size = fields
.get("Download Size")
.and_then(|raw| parse_size_to_bytes(raw));
let install_size = fields
.get("Installed Size")
.and_then(|raw| parse_size_to_bytes(raw));
Ok(OfficialMetadata {
download_size,
install_size,
})
}
/// What: Retrieve installed package version via `pacman -Q`.
///
/// Inputs:
/// - `runner`: Command executor.
/// - `name`: Package identifier.
///
/// Output:
/// - `Ok(String)` containing the installed version.
/// - `Err(CommandError)` when fetch fails.
///
/// Details:
/// - Trims stdout and returns the last whitespace-separated token.
pub(super) fn fetch_installed_version<R: CommandRunner>(
runner: &R,
name: &str,
) -> Result<String, CommandError> {
let output = runner.run("pacman", &["-Q", name])?;
let mut parts = output.split_whitespace();
let _pkg_name = parts.next();
parts
.next_back()
.map(ToString::to_string)
.ok_or_else(|| CommandError::Parse {
program: "pacman -Q".to_string(),
field: "version".to_string(),
})
}
/// What: Retrieve the installed size of a package via `pacman -Qi`.
///
/// Inputs:
/// - `runner`: Command executor.
/// - `name`: Package identifier.
///
/// Output:
/// - `Ok(u64)` representing bytes installed.
/// - `Err(CommandError)` when parsing fails.
///
/// Details:
/// - Parses the `Installed Size` field using [`parse_size_to_bytes`].
pub(super) fn fetch_installed_size<R: CommandRunner>(
runner: &R,
name: &str,
) -> Result<u64, CommandError> {
let output = runner.run("pacman", &["-Qi", name])?;
let fields = parse_pacman_key_values(&output);
fields
.get("Installed Size")
.and_then(|raw| parse_size_to_bytes(raw))
.ok_or_else(|| CommandError::Parse {
program: "pacman -Qi".to_string(),
field: "Installed Size".to_string(),
})
}
/// What: Metadata extracted from `pacman -Si` to inform download/install
/// calculations.
///
/// Inputs: Populated by [`fetch_official_metadata`].
///
/// Output: Holds optional download and install sizes in bytes.
///
/// Details:
/// - Values are `None` when the upstream output omits a field.
#[derive(Default, Debug)]
pub struct OfficialMetadata {
/// Download size in bytes, if available.
pub(crate) download_size: Option<u64>,
/// Install size in bytes, if available.
pub(crate) install_size: Option<u64>,
}
/// What: Transform pacman key-value output into a `HashMap`.
///
/// Inputs:
/// - `output`: Raw stdout from `pacman` invocations.
///
/// Output:
/// - `HashMap<String, String>` mapping field names to raw string values.
///
/// Details:
/// - Continuation lines (prefixed with a space) are appended to the previous
/// key's value.
pub(super) fn parse_pacman_key_values(output: &str) -> HashMap<String, String> {
let mut map = HashMap::new();
let mut last_key: Option<String> = None;
for line in output.lines() {
if line.trim().is_empty() {
continue;
}
if let Some((key, value)) = line.split_once(':') {
let key = key.trim().to_string();
let val = value.trim().to_string();
map.insert(key.clone(), val);
last_key = Some(key);
} else if line.starts_with(' ')
&& let Some(key) = &last_key
{
map.entry(key.clone())
.and_modify(|existing| {
if !existing.ends_with(' ') {
existing.push(' ');
}
existing.push_str(line.trim());
})
.or_insert_with(|| line.trim().to_string());
}
}
map
}
/// What: Convert human-readable pacman size strings to bytes.
///
/// Inputs:
/// - `raw`: String such as `"1.5 MiB"` or `"512 KiB"`.
///
/// Output:
/// - `Some(u64)` with byte representation on success.
/// - `None` when parsing fails.
///
/// Details:
/// - Supports B, KiB, MiB, GiB, and TiB units.
pub(super) fn parse_size_to_bytes(raw: &str) -> Option<u64> {
// Maximum f64 value that fits in u64 (2^64 - 1, but f64 can represent up to 2^53 exactly)
// For values beyond 2^53, we check if they exceed u64::MAX by comparing with a threshold
const MAX_U64_AS_F64: f64 = 18_446_744_073_709_551_615.0; // u64::MAX as approximate f64
let mut parts = raw.split_whitespace();
let number = parts.next()?.replace(',', "");
let value = number.parse::<f64>().ok()?;
let unit = parts.next().unwrap_or("B");
let multiplier = match unit {
"KiB" => 1024.0,
"MiB" => 1024.0 * 1024.0,
"GiB" => 1024.0 * 1024.0 * 1024.0,
"TiB" => 1024.0 * 1024.0 * 1024.0 * 1024.0,
_ => 1.0,
};
let result = value * multiplier;
// Check bounds: negative values are invalid
if result < 0.0 {
return None;
}
if result > MAX_U64_AS_F64 {
return None;
}
#[allow(clippy::cast_possible_truncation, clippy::cast_sign_loss)]
let bytes = result.round() as u64;
Some(bytes)
}
/// What: Find AUR package file in pacman cache or AUR helper caches.
///
/// Inputs:
/// - `name`: Package name to search for.
/// - `version`: Package version (optional, for matching).
///
/// Output:
/// - `Some(PathBuf)` pointing to the package file if found.
/// - `None` if no package file is found in any cache.
///
/// Details:
/// - Checks pacman cache (`/var/cache/pacman/pkg/`).
/// - Checks AUR helper caches (paru/yay build directories).
/// - Matches package files by name prefix and optionally by version.
fn find_aur_package_file(name: &str, version: Option<&str>) -> Option<PathBuf> {
// Try pacman cache first (fastest, most reliable)
if let Ok(pacman_cache) = Path::new("/var/cache/pacman/pkg").read_dir() {
for entry in pacman_cache.flatten() {
let path = entry.path();
if let Some(file_name) = path.file_name().and_then(|n| n.to_str()) {
// Match package name prefix (e.g., "yay-12.3.2-1-x86_64.pkg.tar.zst")
if file_name.starts_with(name)
&& (file_name.ends_with(".pkg.tar.zst") || file_name.ends_with(".pkg.tar.xz"))
{
// If version specified, try to match it
if let Some(ver) = version {
if file_name.contains(ver) {
return Some(path);
}
} else {
return Some(path);
}
}
}
}
}
// Try AUR helper caches
if let Ok(home) = std::env::var("HOME") {
let cache_paths = [
format!("{home}/.cache/paru/clone/{name}"),
format!("{home}/.cache/yay/{name}"),
];
for cache_base in cache_paths {
let cache_dir = Path::new(&cache_base);
if let Ok(entries) = fs::read_dir(cache_dir) {
for entry in entries.flatten() {
let path = entry.path();
if path.is_file()
&& let Some(file_name) = path.file_name().and_then(|n| n.to_str())
&& file_name.starts_with(name)
&& (file_name.ends_with(".pkg.tar.zst")
|| file_name.ends_with(".pkg.tar.xz"))
{
if let Some(ver) = version {
if file_name.contains(ver) {
return Some(path);
}
} else {
return Some(path);
}
}
}
}
}
}
None
}
/// What: Extract download and install sizes from an AUR package file.
///
/// Inputs:
/// - `runner`: Command executor.
/// - `pkg_path`: Path to the package file.
///
/// Output:
/// - `OfficialMetadata` with `download_size` (file size) and `install_size` (from package metadata).
///
/// Details:
/// - Download size is the actual file size on disk.
/// - Install size is extracted via `pacman -Qp` command.
/// - Errors are handled gracefully by returning None values.
fn extract_aur_package_sizes<R: CommandRunner>(runner: &R, pkg_path: &Path) -> OfficialMetadata {
// Get download size (file size on disk)
let download_size = fs::metadata(pkg_path).ok().map(|meta| meta.len());
// Get install size from package metadata
let install_size = pkg_path.to_str().and_then(|pkg_str| {
runner
.run("pacman", &["-Qp", pkg_str])
.ok()
.and_then(|output| {
let fields = parse_pacman_key_values(&output);
fields
.get("Installed Size")
.and_then(|raw| parse_size_to_bytes(raw))
})
});
OfficialMetadata {
download_size,
install_size,
}
}
/// What: Fetch metadata for AUR packages by checking local caches.
///
/// Inputs:
/// - `runner`: Command executor.
/// - `name`: Package name.
/// - `version`: Package version (optional, for matching).
///
/// Output:
/// - `OfficialMetadata` with sizes if package file found in cache.
///
/// Details:
/// - Checks pacman cache and AUR helper caches for built package files.
/// - Extracts sizes from found package files.
/// - Returns None values if package file is not found (graceful degradation).
/// - Errors are handled gracefully by returning None values.
pub(super) fn fetch_aur_metadata<R: CommandRunner>(
runner: &R,
name: &str,
version: Option<&str>,
) -> OfficialMetadata {
find_aur_package_file(name, version).map_or(
// Package file not found in cache - return None values (graceful degradation)
OfficialMetadata {
download_size: None,
install_size: None,
},
|pkg_path| extract_aur_package_sizes(runner, &pkg_path),
)
}
#[cfg(not(windows))]
#[cfg(test)]
mod tests {
use super::*;
use crate::logic::preflight::command::{CommandError, CommandRunner};
use std::collections::HashMap;
use std::os::unix::process::ExitStatusExt;
use std::sync::Mutex;
type MockCommandKey = (String, Vec<String>);
type MockCommandResult = Result<String, CommandError>;
type MockResponseMap = HashMap<MockCommandKey, MockCommandResult>;
#[derive(Default)]
struct MockRunner {
responses: Mutex<MockResponseMap>,
}
impl MockRunner {
fn with(responses: MockResponseMap) -> Self {
Self {
responses: Mutex::new(responses),
}
}
}
impl CommandRunner for MockRunner {
fn run(&self, program: &str, args: &[&str]) -> Result<String, CommandError> {
let key = (
program.to_string(),
args.iter().map(ToString::to_string).collect::<Vec<_>>(),
);
let mut guard = self.responses.lock().expect("poisoned responses mutex");
guard.remove(&key).unwrap_or_else(|| {
Err(CommandError::Failed {
program: program.to_string(),
args: args.iter().map(ToString::to_string).collect(),
status: std::process::ExitStatus::from_raw(1),
})
})
}
}
#[test]
/// What: Ensure `parse_size_to_bytes` correctly converts various size formats.
///
/// Inputs:
/// - Various size strings with different units (B, KiB, MiB, GiB, TiB).
///
/// Output:
/// - Returns correct byte counts for valid inputs.
///
/// Details:
/// - Tests edge cases like decimal values and comma separators.
fn test_parse_size_to_bytes() {
assert_eq!(parse_size_to_bytes("10 B"), Some(10));
assert_eq!(parse_size_to_bytes("1 KiB"), Some(1024));
assert_eq!(parse_size_to_bytes("2.5 MiB"), Some(2_621_440));
assert_eq!(parse_size_to_bytes("1.5 GiB"), Some(1_610_612_736));
assert_eq!(parse_size_to_bytes("1,234.5 MiB"), Some(1_294_467_072));
assert_eq!(parse_size_to_bytes("invalid"), None);
assert_eq!(parse_size_to_bytes(""), None);
}
#[test]
/// What: Ensure AUR metadata fetching returns None when package file is not found.
///
/// Inputs:
/// - AUR package name that doesn't exist in any cache.
///
/// Output:
/// - Returns `Ok(OfficialMetadata)` with `None` values for both sizes.
///
/// Details:
/// - Tests graceful degradation when package file is not available.
fn test_fetch_aur_metadata_not_found() {
let runner = MockRunner::default();
let meta = fetch_aur_metadata(&runner, "nonexistent-package", Some("1.0.0"));
assert_eq!(meta.download_size, None);
assert_eq!(meta.install_size, None);
}
#[test]
/// What: Ensure AUR metadata fetching extracts sizes from package file when found.
///
/// Inputs:
/// - Mock package file path and `pacman -Qp` output with install size.
///
/// Output:
/// - Returns `Ok(OfficialMetadata)` with extracted sizes.
///
/// Details:
/// - Tests size extraction from package metadata via `pacman -Qp`.
fn test_extract_aur_package_sizes() {
// Create a temporary file for testing
let temp_dir = std::env::temp_dir().join(format!("pacsea_test_{}", std::process::id()));
std::fs::create_dir_all(&temp_dir).expect("failed to create test temp directory");
let pkg_path = temp_dir.join("test-1.0.0-1-x86_64.pkg.tar.zst");
std::fs::write(&pkg_path, b"fake package data").expect("failed to write test package file");
// Set up mock response using the actual temp file path
let mut responses = HashMap::new();
responses.insert(
(
"pacman".into(),
vec!["-Qp".into(), pkg_path.to_string_lossy().to_string()],
),
Ok("Name : test\nInstalled Size : 5.00 MiB\n".to_string()),
);
let runner = MockRunner::with(responses);
let meta = extract_aur_package_sizes(&runner, &pkg_path);
// Download size should be the file size (17 bytes in this case)
assert_eq!(meta.download_size, Some(17));
// Install size should be parsed from pacman -Qp output
assert_eq!(meta.install_size, Some(5 * 1024 * 1024));
// Cleanup
let _ = std::fs::remove_dir_all(&temp_dir);
}
}
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/src/logic/files/db_sync.rs | src/logic/files/db_sync.rs | //! Database synchronization functions for pacman file database.
use std::path::Path;
use std::process::Command;
use std::time::SystemTime;
/// What: Retrieve the most recent modification timestamp of the pacman sync database.
///
/// Inputs:
/// - (none): Reads metadata from `/var/lib/pacman/sync` on the local filesystem.
///
/// Output:
/// - Returns the latest `SystemTime` seen among `.files` databases, or `None` if unavailable.
///
/// Details:
/// - Inspects only files ending with the `.files` extension to match pacman's file list databases.
pub fn get_file_db_sync_timestamp() -> Option<SystemTime> {
// Check modification time of pacman sync database files
// The sync database files are in /var/lib/pacman/sync/
let sync_dir = Path::new("/var/lib/pacman/sync");
if !sync_dir.exists() {
tracing::debug!("Pacman sync directory does not exist");
return None;
}
// Get the most recent modification time from any .files database
let mut latest_time: Option<SystemTime> = None;
if let Ok(entries) = std::fs::read_dir(sync_dir) {
for entry in entries.flatten() {
let path = entry.path();
// Look for .files database files (e.g., core.files, extra.files)
if path.extension().and_then(|s| s.to_str()) == Some("files")
&& let Ok(metadata) = std::fs::metadata(&path)
&& let Ok(modified) = metadata.modified()
{
latest_time = Some(latest_time.map_or(modified, |prev| {
if modified > prev { modified } else { prev }
}));
}
}
}
latest_time
}
/// What: Summarize sync database staleness with age, formatted date, and UI color bucket.
///
/// Inputs:
/// - (none): Uses `get_file_db_sync_timestamp` to determine the last sync.
///
/// Output:
/// - Returns `(age_days, formatted_date, color_category)` or `None` when the timestamp cannot be read.
///
/// Details:
/// - Buckets age into three categories: green (<7 days), yellow (<30 days), red (>=30 days).
#[must_use]
pub fn get_file_db_sync_info() -> Option<(u64, String, u8)> {
let sync_time = get_file_db_sync_timestamp()?;
let now = SystemTime::now();
let age = now.duration_since(sync_time).ok()?;
let age_days = age.as_secs() / 86400; // Convert to days
// Format date
let date_str = crate::util::ts_to_date(
sync_time
.duration_since(SystemTime::UNIX_EPOCH)
.ok()
.and_then(|d| i64::try_from(d.as_secs()).ok()),
);
// Determine color category
let color_category = if age_days < 7 {
0 // Green (< week)
} else if age_days < 30 {
1 // Yellow (< month)
} else {
2 // Red (>= month)
};
Some((age_days, date_str, color_category))
}
/// What: Check if the pacman file database is stale and needs syncing.
///
/// Inputs:
/// - `max_age_days`: Maximum age in days before considering the database stale.
///
/// Output:
/// - Returns `Some(true)` if stale, `Some(false)` if fresh, `None` if timestamp cannot be determined.
///
/// Details:
/// - Uses `get_file_db_sync_timestamp()` to check the last sync time.
#[must_use]
pub fn is_file_db_stale(max_age_days: u64) -> Option<bool> {
let sync_time = get_file_db_sync_timestamp()?;
let now = SystemTime::now();
let age = now.duration_since(sync_time).ok()?;
let age_days = age.as_secs() / 86400;
Some(age_days >= max_age_days)
}
/// What: Attempt a best-effort synchronization of the pacman file database.
///
/// Inputs:
/// - `force`: If true, sync regardless of timestamp. If false, only sync if stale.
/// - `max_age_days`: Maximum age in days before considering the database stale (default: 7).
///
/// Output:
/// - Returns `Ok(true)` if sync was performed, `Ok(false)` if sync was skipped (fresh DB), `Err` if sync failed.
///
/// # Errors
/// - Returns `Err` when `pacman -Fy` command execution fails (I/O error)
/// - Returns `Err` when `pacman -Fy` exits with non-zero status
///
/// Details:
/// - Checks timestamp first if `force` is false, only syncing when stale.
/// - Intended to reduce false negatives when later querying remote file lists.
pub fn ensure_file_db_synced(force: bool, max_age_days: u64) -> Result<bool, String> {
// Check if we need to sync
if force {
tracing::debug!("Force syncing pacman file database...");
} else if let Some(is_stale) = is_file_db_stale(max_age_days) {
if is_stale {
tracing::debug!(
"File database is stale (older than {} days), syncing...",
max_age_days
);
} else {
tracing::debug!("File database is fresh, skipping sync");
return Ok(false);
}
} else {
// Can't determine timestamp, try to sync anyway
tracing::debug!("Cannot determine file database timestamp, attempting sync...");
}
let output = Command::new("pacman")
.args(["-Fy"])
.env("LC_ALL", "C")
.env("LANG", "C")
.output()
.map_err(|e| format!("Failed to execute pacman -Fy: {e}"))?;
if output.status.success() {
tracing::debug!("File database sync successful");
Ok(true)
} else {
let stderr = String::from_utf8_lossy(&output.stderr);
let error_msg = format!("File database sync failed: {stderr}");
tracing::warn!("{}", error_msg);
Err(error_msg)
}
}
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/src/logic/files/tests.rs | src/logic/files/tests.rs | //! Tests for file resolution and parsing functions.
use super::pkgbuild_parse::{
parse_backup_array_content, parse_backup_from_pkgbuild, parse_backup_from_srcinfo,
};
use super::resolution::{resolve_install_files, resolve_remove_files};
use crate::state::modal::FileChangeType;
use crate::state::types::Source;
use std::fs;
use std::io::Write;
use std::os::unix::fs::PermissionsExt;
use tempfile::tempdir;
struct PathGuard {
original: Option<String>,
}
impl PathGuard {
fn push(dir: &std::path::Path) -> Self {
let original = std::env::var("PATH").ok();
// If PATH is missing or empty, use a default system PATH
let base_path = original
.as_ref()
.filter(|p| !p.is_empty())
.map_or("/usr/bin:/bin:/usr/local/bin", String::as_str);
let mut new_path = dir.display().to_string();
new_path.push(':');
new_path.push_str(base_path);
unsafe {
std::env::set_var("PATH", &new_path);
}
Self { original }
}
}
impl Drop for PathGuard {
fn drop(&mut self) {
if let Some(ref orig) = self.original {
// Only restore if the original PATH was valid (not empty)
if orig.is_empty() {
// If original was empty, restore to a default system PATH
unsafe {
std::env::set_var("PATH", "/usr/bin:/bin:/usr/local/bin");
}
} else {
unsafe {
std::env::set_var("PATH", orig);
}
}
} else {
// If PATH was missing, set a default system PATH
unsafe {
std::env::set_var("PATH", "/usr/bin:/bin:/usr/local/bin");
}
}
}
}
fn write_executable(dir: &std::path::Path, name: &str, body: &str) {
let path = dir.join(name);
let mut file = fs::File::create(&path).expect("create stub");
file.write_all(body.as_bytes()).expect("write stub");
let mut perms = fs::metadata(&path).expect("meta").permissions();
perms.set_mode(0o755);
fs::set_permissions(&path, perms).expect("chmod stub");
}
#[test]
fn test_parse_backup_from_pkgbuild_single_line() {
let pkgbuild = r"
pkgname=test
pkgver=1.0
backup=('/etc/config' '/etc/other.conf')
";
let backup_files = parse_backup_from_pkgbuild(pkgbuild);
assert_eq!(backup_files.len(), 2);
assert!(backup_files.contains(&"/etc/config".to_string()));
assert!(backup_files.contains(&"/etc/other.conf".to_string()));
}
#[test]
fn test_parse_backup_from_pkgbuild_multi_line() {
let pkgbuild = r"
pkgname=test
pkgver=1.0
backup=(
'/etc/config'
'/etc/other.conf'
'/etc/more.conf'
)
";
let backup_files = parse_backup_from_pkgbuild(pkgbuild);
assert_eq!(backup_files.len(), 3);
assert!(backup_files.contains(&"/etc/config".to_string()));
assert!(backup_files.contains(&"/etc/other.conf".to_string()));
assert!(backup_files.contains(&"/etc/more.conf".to_string()));
}
#[test]
fn test_parse_backup_from_srcinfo() {
let srcinfo = r"
pkgbase = test-package
pkgname = test-package
pkgver = 1.0.0
backup = /etc/config
backup = /etc/other.conf
backup = /etc/more.conf
";
let backup_files = parse_backup_from_srcinfo(srcinfo);
assert_eq!(backup_files.len(), 3);
assert!(backup_files.contains(&"/etc/config".to_string()));
assert!(backup_files.contains(&"/etc/other.conf".to_string()));
assert!(backup_files.contains(&"/etc/more.conf".to_string()));
}
#[test]
fn test_parse_backup_array_content() {
let content = "'/etc/config' '/etc/other.conf'";
let mut backup_files = Vec::new();
parse_backup_array_content(content, &mut backup_files);
assert_eq!(backup_files.len(), 2);
assert!(backup_files.contains(&"/etc/config".to_string()));
assert!(backup_files.contains(&"/etc/other.conf".to_string()));
}
#[test]
/// What: Resolve install file information using stubbed pacman output while verifying pacnew detection.
///
/// Inputs:
/// - Stub `pacman` script returning canned `-Fl`, `-Ql`, and `-Qii` outputs for package `pkg`.
///
/// Output:
/// - `resolve_install_files` reports one changed config file and one new regular file with pacnew prediction.
///
/// Details:
/// - Uses a temporary PATH override and the global test mutex to isolate command stubbing from other tests.
fn resolve_install_files_marks_changed_and_new_entries() {
let _test_guard = crate::global_test_mutex_lock();
// Ensure PATH is in a clean state before modifying it
if std::env::var("PATH").is_err() {
unsafe { std::env::set_var("PATH", "/usr/bin:/bin:/usr/local/bin") };
}
let dir = tempdir().expect("tempdir");
let _path_guard = PathGuard::push(dir.path());
// Small delay to ensure PATH is propagated to child processes
std::thread::sleep(std::time::Duration::from_millis(10));
write_executable(
dir.path(),
"pacman",
r#"#!/bin/sh
if [ "$1" = "--version" ]; then
exit 0
fi
if [ "$1" = "-Fl" ]; then
cat <<'EOF'
pkg /etc/app.conf
pkg /usr/share/doc/
pkg /usr/bin/newtool
EOF
exit 0
fi
if [ "$1" = "-Ql" ]; then
cat <<'EOF'
pkg /etc/app.conf
EOF
exit 0
fi
if [ "$1" = "-Qii" ]; then
cat <<'EOF'
Backup Files : /etc/app.conf
EOF
exit 0
fi
if [ "$1" = "-Fy" ]; then
exit 0
fi
exit 1
"#,
);
let source = Source::Official {
repo: "core".into(),
arch: "x86_64".into(),
};
let info = resolve_install_files("pkg", &source).expect("install resolution");
assert_eq!(info.total_count, 2);
assert_eq!(info.new_count, 1);
assert_eq!(info.changed_count, 1);
assert_eq!(info.config_count, 1);
assert_eq!(info.pacnew_candidates, 1);
let mut paths: Vec<&str> = info.files.iter().map(|f| f.path.as_str()).collect();
paths.sort_unstable();
assert_eq!(paths, vec!["/etc/app.conf", "/usr/bin/newtool"]);
let config_entry = info
.files
.iter()
.find(|f| f.path == "/etc/app.conf")
.expect("config entry");
assert!(matches!(config_entry.change_type, FileChangeType::Changed));
assert!(config_entry.predicted_pacnew);
assert!(!config_entry.predicted_pacsave);
let new_entry = info
.files
.iter()
.find(|f| f.path == "/usr/bin/newtool")
.expect("new entry");
assert!(matches!(new_entry.change_type, FileChangeType::New));
assert!(!new_entry.predicted_pacnew);
}
#[test]
/// What: Resolve removal file information with stubbed pacman output to confirm pacsave predictions.
///
/// Inputs:
/// - Stub `pacman` script returning canned `-Ql` and `-Qii` outputs listing a config and regular file.
///
/// Output:
/// - `resolve_remove_files` reports both files as removed while flagging the config as a pacsave candidate.
///
/// Details:
/// - Shares the PATH guard helper to ensure the stubbed command remains isolated per test.
fn resolve_remove_files_marks_pacsave_candidates() {
let _test_guard = crate::global_test_mutex_lock();
// Ensure PATH is in a clean state before modifying it
if std::env::var("PATH").is_err() {
unsafe { std::env::set_var("PATH", "/usr/bin:/bin:/usr/local/bin") };
}
let dir = tempdir().expect("tempdir");
let _path_guard = PathGuard::push(dir.path());
// Small delay to ensure PATH is propagated to child processes
std::thread::sleep(std::time::Duration::from_millis(10));
write_executable(
dir.path(),
"pacman",
r#"#!/bin/sh
if [ "$1" = "--version" ]; then
exit 0
fi
if [ "$1" = "-Ql" ]; then
cat <<'EOF'
pkg /etc/app.conf
pkg /usr/bin/newtool
EOF
exit 0
fi
if [ "$1" = "-Qii" ]; then
cat <<'EOF'
Backup Files : /etc/app.conf
EOF
exit 0
fi
if [ "$1" = "-Fy" ] || [ "$1" = "-Fl" ]; then
exit 0
fi
exit 1
"#,
);
let info = resolve_remove_files("pkg").expect("remove resolution");
assert_eq!(info.removed_count, 2);
assert_eq!(info.config_count, 1);
assert_eq!(info.pacsave_candidates, 1);
let config_entry = info
.files
.iter()
.find(|f| f.path == "/etc/app.conf")
.expect("config entry");
assert!(config_entry.is_config);
assert!(config_entry.predicted_pacsave);
assert!(!config_entry.predicted_pacnew);
let regular_entry = info
.files
.iter()
.find(|f| f.path == "/usr/bin/newtool")
.expect("regular entry");
assert!(!regular_entry.is_config);
assert!(!regular_entry.predicted_pacsave);
}
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/src/logic/files/pkgbuild_cache.rs | src/logic/files/pkgbuild_cache.rs | //! Disk-persisted LRU cache for parsed PKGBUILD data.
use crate::logic::files::pkgbuild_parse::{
parse_backup_from_pkgbuild, parse_install_paths_from_pkgbuild,
};
use crate::state::Source;
use lru::LruCache;
use serde::{Deserialize, Serialize};
use std::collections::hash_map::DefaultHasher;
use std::fs;
use std::hash::{Hash, Hasher};
use std::num::NonZeroUsize;
use std::path::PathBuf;
#[cfg(test)]
use std::sync::Arc;
use std::sync::{Mutex, OnceLock};
#[cfg(test)]
use std::thread::ThreadId;
/// Maximum number of PKGBUILD entries to cache.
const CACHE_CAPACITY: usize = 200;
/// Environment variable name for custom PKGBUILD cache path.
const CACHE_PATH_ENV: &str = "PACSEA_PKGBUILD_CACHE_PATH";
/// What: Source kind for PKGBUILD files.
///
/// Inputs: Determined from package source.
///
/// Output: Enum indicating where the PKGBUILD came from.
///
/// Details: Used to categorize PKGBUILD files by their origin (AUR, Official, or Unknown).
#[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq, Eq, Hash)]
pub enum PkgbuildSourceKind {
/// PKGBUILD came from AUR.
Aur,
/// PKGBUILD came from official repositories.
Official,
/// Source could not be determined.
Unknown,
}
impl From<&Source> for PkgbuildSourceKind {
fn from(src: &Source) -> Self {
match src {
Source::Aur => Self::Aur,
Source::Official { .. } => Self::Official,
}
}
}
/// What: Cached PKGBUILD parse entry.
///
/// Inputs: Parsed from PKGBUILD file.
///
/// Output: Structured PKGBUILD metadata.
///
/// Details: Stores parsed PKGBUILD information for caching purposes.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct PkgbuildParseEntry {
/// Package name.
pub name: String,
/// Package version.
pub version: String,
/// Source kind (AUR, Official, or Unknown).
pub source: PkgbuildSourceKind,
/// PKGBUILD file signature hash.
pub pkgbuild_signature: u64,
/// List of backup files specified in the PKGBUILD.
pub backup_files: Vec<String>,
/// List of install paths specified in the PKGBUILD.
pub install_paths: Vec<String>,
}
/// What: On-disk cache structure for PKGBUILD entries.
///
/// Inputs: Loaded from disk cache file.
///
/// Output: Serialized cache data.
///
/// Details: Used for persisting PKGBUILD cache to disk.
#[derive(Debug, Serialize, Deserialize)]
struct PkgbuildCacheDisk {
/// Cached PKGBUILD parse entries.
entries: Vec<PkgbuildParseEntry>,
}
/// What: In-memory cache state for PKGBUILD entries.
///
/// Inputs: Initialized with cache path.
///
/// Output: Manages LRU cache and dirty flag.
///
/// Details: Tracks cache state including LRU cache, file path, and whether changes need to be persisted.
#[derive(Debug)]
struct PkgbuildCacheState {
/// LRU cache of PKGBUILD entries.
lru: LruCache<String, PkgbuildParseEntry>,
/// Path to the cache file on disk.
path: PathBuf,
/// Whether the cache has been modified and needs to be saved.
dirty: bool,
}
impl PkgbuildCacheState {
/// What: Create a new PKGBUILD cache state.
///
/// Inputs:
/// - `path`: Path to the cache file on disk.
///
/// Output: New cache state with empty LRU cache.
///
/// Details: Initializes a new cache state with the specified path and an empty LRU cache.
fn new(path: PathBuf) -> Self {
Self {
lru: LruCache::new(
NonZeroUsize::new(CACHE_CAPACITY)
.unwrap_or_else(|| NonZeroUsize::new(1).expect("non-zero capacity")),
),
path,
dirty: false,
}
}
/// What: Load cache entries from disk.
///
/// Inputs: None (uses self.path).
///
/// Output: Populates the LRU cache with entries from disk.
///
/// Details: Reads the cache file from disk and populates the in-memory cache. Silently handles missing files.
fn load_from_disk(&mut self) {
let raw = match fs::read_to_string(&self.path) {
Ok(raw) => raw,
Err(e) => {
if e.kind() != std::io::ErrorKind::NotFound {
tracing::warn!(
path = %self.path.display(),
error = %e,
"[PKGBUILD cache] Failed to read cache file"
);
}
return;
}
};
let parsed: PkgbuildCacheDisk = match serde_json::from_str(&raw) {
Ok(cache) => cache,
Err(e) => {
tracing::warn!(
path = %self.path.display(),
error = %e,
"[PKGBUILD cache] Failed to parse cache file"
);
return;
}
};
// Insert from least-recent to most-recent to preserve order when iterating.
for entry in parsed.entries.into_iter().rev() {
let key = cache_key(&entry.name, &entry.version, entry.source);
let _ = self.lru.put(key, entry);
}
tracing::info!(
path = %self.path.display(),
count = self.lru.len(),
"[PKGBUILD cache] Loaded cache entries"
);
}
/// What: Write cache to disk if it has been modified.
///
/// Inputs: None (uses self state).
///
/// Output: Writes cache to disk if dirty flag is set.
///
/// Details: Serializes the cache entries and writes them to disk, then clears the dirty flag.
fn flush_if_dirty(&mut self) {
if !self.dirty {
return;
}
let payload = PkgbuildCacheDisk {
entries: self.lru.iter().map(|(_, v)| v.clone()).collect(),
};
let Ok(serialized) = serde_json::to_string(&payload) else {
tracing::warn!("[PKGBUILD cache] Failed to serialize cache payload");
return;
};
if let Some(parent) = self.path.parent()
&& let Err(e) = fs::create_dir_all(parent)
{
tracing::warn!(
path = %self.path.display(),
error = %e,
"[PKGBUILD cache] Failed to create parent directory"
);
return;
}
match fs::write(&self.path, serialized) {
Ok(()) => {
tracing::debug!(
path = %self.path.display(),
entries = self.lru.len(),
"[PKGBUILD cache] Persisted cache to disk"
);
self.dirty = false;
}
Err(e) => {
tracing::warn!(
path = %self.path.display(),
error = %e,
"[PKGBUILD cache] Failed to write cache to disk"
);
}
}
}
}
/// What: Get the path to the PKGBUILD cache file.
///
/// Inputs: None.
///
/// Output: Path to the cache file.
///
/// Details: Checks environment variable first, otherwise uses default path in lists directory.
fn cache_path() -> PathBuf {
if let Ok(path) = std::env::var(CACHE_PATH_ENV) {
return PathBuf::from(path);
}
crate::theme::lists_dir().join("pkgbuild_parse_cache.json")
}
/// What: Get the global cache state singleton.
///
/// Inputs: None.
///
/// Output: Reference to the global cache state mutex.
///
/// Details: Initializes the cache state on first access, loading from disk if available.
fn cache_state() -> &'static Mutex<PkgbuildCacheState> {
static STATE: OnceLock<Mutex<PkgbuildCacheState>> = OnceLock::new();
STATE.get_or_init(|| {
let path = cache_path();
let mut state = PkgbuildCacheState::new(path);
state.load_from_disk();
Mutex::new(state)
})
}
/// What: Compute a hash signature for PKGBUILD contents.
///
/// Inputs:
/// - `contents`: PKGBUILD file contents.
///
/// Output: 64-bit hash signature.
///
/// Details: Uses `DefaultHasher` to compute a hash of the PKGBUILD contents for cache invalidation.
fn compute_signature(contents: &str) -> u64 {
let mut hasher = DefaultHasher::new();
contents.hash(&mut hasher);
hasher.finish()
}
/// What: Generate a cache key for a PKGBUILD entry.
///
/// Inputs:
/// - `name`: Package name.
/// - `version`: Package version.
/// - `source`: Source kind.
///
/// Output: Cache key string.
///
/// Details: Creates a unique cache key by combining package name, version, and source kind.
fn cache_key(name: &str, version: &str, source: PkgbuildSourceKind) -> String {
format!("{name}::{version}::{source:?}")
}
#[cfg(test)]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub(in crate::logic::files) enum CacheTestHookPoint {
AfterLookup,
}
#[cfg(test)]
pub(in crate::logic::files) type CacheTestHook = dyn Fn(CacheTestHookPoint) + Send + Sync + 'static;
#[cfg(test)]
#[derive(Clone)]
struct CacheTestHookEntry {
hook: Arc<CacheTestHook>,
thread_id: ThreadId,
}
#[cfg(test)]
fn cache_test_hook_slot() -> &'static Mutex<Option<CacheTestHookEntry>> {
static HOOK: OnceLock<Mutex<Option<CacheTestHookEntry>>> = OnceLock::new();
HOOK.get_or_init(|| Mutex::new(None))
}
#[cfg(test)]
/// What: Temporarily register a cache test hook for synchronization.
///
/// Inputs:
/// - `hook`: Callback executed when a cache hook point is reached.
/// - `thread_id`: Thread id to match before invoking the hook.
///
/// Output:
/// - Guard that clears the hook on drop to restore default behavior.
///
/// Details:
/// - Only compiled in tests; the hook is global and not re-entrant.
pub fn set_cache_test_hook(hook: Arc<CacheTestHook>, thread_id: ThreadId) -> CacheTestHookGuard {
if let Ok(mut slot) = cache_test_hook_slot().lock() {
*slot = Some(CacheTestHookEntry { hook, thread_id });
}
CacheTestHookGuard
}
#[cfg(test)]
/// What: RAII guard that removes the active cache test hook on drop.
///
/// Inputs: None.
///
/// Output:
/// - Clears any registered test hook when dropped.
///
/// Details:
/// - Scope the guard to the duration the hook should stay active.
pub struct CacheTestHookGuard;
#[cfg(test)]
impl Drop for CacheTestHookGuard {
fn drop(&mut self) {
if let Ok(mut slot) = cache_test_hook_slot().lock() {
slot.take();
}
}
}
#[cfg(test)]
fn invoke_cache_test_hook(point: CacheTestHookPoint) {
// Clone hook entry and release slot mutex before invoking so that other threads
// can still check the hook slot while this thread is blocked inside the callback.
let entry = cache_test_hook_slot()
.lock()
.ok()
.and_then(|slot| slot.clone());
if let Some(hook) = entry
&& std::thread::current().id() == hook.thread_id
{
(hook.hook)(point);
}
}
/// What: Parse PKGBUILD data while leveraging a disk-backed LRU cache.
///
/// Inputs:
/// - `name`: Package name used for keying and install path inference.
/// - `version`: Package version (fall back to `"unknown"` if empty).
/// - `source`: Source kind for keying (Aur/Official/Unknown).
/// - `pkgbuild`: Raw PKGBUILD text to parse.
///
/// Output:
/// - Parsed entry containing backup files and install paths. On cache hit with matching
/// signature, returns the cached entry. On cache miss or signature mismatch, parses
/// fresh data, updates the cache, and returns the new entry.
///
/// Details:
/// - Uses a signature of the PKGBUILD text to detect staleness even when version is unchanged.
/// - Cache is bounded to 200 entries and persists to disk via `flush_pkgbuild_cache()`.
pub fn parse_pkgbuild_cached(
name: &str,
version: Option<&str>,
source: PkgbuildSourceKind,
pkgbuild: &str,
) -> PkgbuildParseEntry {
let normalized_version = version
.filter(|v| !v.is_empty())
.map_or_else(|| "unknown".to_string(), ToString::to_string);
let signature = compute_signature(pkgbuild);
let key = cache_key(name, &normalized_version, source);
let prior_signature = if let Ok(mut guard) = cache_state().lock()
&& let Some(entry) = guard.lru.get(&key)
{
if entry.pkgbuild_signature == signature {
return entry.clone();
}
Some(entry.pkgbuild_signature)
} else {
None
};
#[cfg(test)]
invoke_cache_test_hook(CacheTestHookPoint::AfterLookup);
let parsed = PkgbuildParseEntry {
name: name.to_string(),
version: normalized_version,
source,
pkgbuild_signature: signature,
backup_files: parse_backup_from_pkgbuild(pkgbuild),
install_paths: parse_install_paths_from_pkgbuild(pkgbuild, name),
};
let mut guard = match cache_state().lock() {
Ok(guard) => guard,
Err(poisoned) => {
tracing::warn!(
"[PKGBUILD cache] Cache mutex poisoned; continuing with recovered state"
);
poisoned.into_inner()
}
};
if let Some(entry) = guard.lru.get(&key) {
if entry.pkgbuild_signature == signature {
return entry.clone();
}
if prior_signature.is_some() && prior_signature == Some(entry.pkgbuild_signature) {
let _ = guard.lru.put(key, parsed.clone());
guard.dirty = true;
return parsed;
}
return entry.clone();
}
let _ = guard.lru.put(key, parsed.clone());
guard.dirty = true;
parsed
}
/// What: Persist the PKGBUILD parse cache to disk when dirty.
///
/// Inputs: None.
///
/// Output:
/// - Best-effort disk write of the cache file; clears the dirty flag on success.
///
/// Details:
/// - Safe to call frequently; returns immediately when nothing has changed.
pub fn flush_pkgbuild_cache() {
if let Ok(mut guard) = cache_state().lock() {
guard.flush_if_dirty();
}
}
#[cfg(test)]
pub fn reset_cache_for_tests(path: PathBuf) {
if let Ok(mut guard) = cache_state().lock() {
let mut state = PkgbuildCacheState::new(path);
state.load_from_disk();
*guard = state;
}
}
#[cfg(test)]
pub fn peek_cache_entry_for_tests(
name: &str,
version: &str,
source: PkgbuildSourceKind,
) -> Option<PkgbuildParseEntry> {
let key = cache_key(name, version, source);
cache_state()
.lock()
.ok()
.and_then(|mut guard| guard.lru.get(&key).cloned())
}
#[cfg(test)]
mod tests {
use super::*;
use std::sync::atomic::{AtomicBool, Ordering};
use std::sync::{Arc, Barrier, mpsc};
use std::time::Duration;
fn sample_pkgbuild() -> String {
r#"
pkgname=sample
pkgver=1.2.3
pkgrel=1
backup=('etc/sample.conf' '/etc/sample.d/more.conf')
package() {
install -Dm755 "$srcdir/sample" "$pkgdir/usr/bin/sample"
install -Dm644 "$srcdir/sample.conf" "$pkgdir/etc/sample.conf"
}
"#
.to_string()
}
fn temp_cache_path(label: &str) -> PathBuf {
let mut path = std::env::temp_dir();
path.push(format!(
"pacsea_pkgb_cache_{label}_{}_{}.json",
std::process::id(),
std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
.expect("system time ok")
.as_nanos()
));
path
}
#[test]
fn cache_hit_returns_same_signature_entry() {
let path = temp_cache_path("hit");
reset_cache_for_tests(path);
let text = sample_pkgbuild();
let entry = parse_pkgbuild_cached("sample", Some("1.2.3"), PkgbuildSourceKind::Aur, &text);
assert!(entry.backup_files.contains(&"etc/sample.conf".to_string()));
assert!(entry.install_paths.contains(&"/usr/bin/sample".to_string()));
let hit = parse_pkgbuild_cached("sample", Some("1.2.3"), PkgbuildSourceKind::Aur, &text);
assert_eq!(hit.pkgbuild_signature, entry.pkgbuild_signature);
assert_eq!(hit.install_paths, entry.install_paths);
}
#[test]
fn cache_miss_on_signature_change_reparses() {
let path = temp_cache_path("miss");
reset_cache_for_tests(path);
let text = sample_pkgbuild();
let _ = parse_pkgbuild_cached("sample", Some("1.2.3"), PkgbuildSourceKind::Official, &text);
let modified = format!("{text}\n# change");
let updated = parse_pkgbuild_cached(
"sample",
Some("1.2.3"),
PkgbuildSourceKind::Official,
&modified,
);
assert!(updated.pkgbuild_signature != compute_signature(&text));
}
#[test]
fn flush_and_reload_persists_entries() {
let path = temp_cache_path("persist");
reset_cache_for_tests(path.clone());
let text = sample_pkgbuild();
let entry = parse_pkgbuild_cached("sample", Some("1.2.3"), PkgbuildSourceKind::Aur, &text);
flush_pkgbuild_cache();
reset_cache_for_tests(path);
let cached = peek_cache_entry_for_tests("sample", "1.2.3", PkgbuildSourceKind::Aur)
.expect("entry should reload");
assert_eq!(cached.pkgbuild_signature, entry.pkgbuild_signature);
assert_eq!(cached.backup_files, entry.backup_files);
}
#[test]
fn cache_evicts_oldest_when_capacity_exceeded() {
let path = temp_cache_path("evict");
reset_cache_for_tests(path);
let text = sample_pkgbuild();
for i in 0..(CACHE_CAPACITY + 5) {
let name = format!("pkg{i}");
parse_pkgbuild_cached(&name, Some("1"), PkgbuildSourceKind::Unknown, &text);
}
assert!(
peek_cache_entry_for_tests("pkg0", "1", PkgbuildSourceKind::Unknown).is_none(),
"oldest entry should be evicted past capacity"
);
}
#[test]
fn concurrent_parse_does_not_overwrite_newer_entry() {
let path = temp_cache_path("concurrent");
reset_cache_for_tests(path);
let name = "racepkg";
let stale_pkgbuild = sample_pkgbuild();
let newer_pkgbuild = r#"
pkgname=sample
pkgver=9.9.9
pkgrel=1
backup=('etc/sample.conf')
package() {
install -Dm755 "$srcdir/sample" "$pkgdir/usr/bin/sample"
install -Dm644 "$srcdir/sample.conf" "$pkgdir/etc/sample.conf"
}
"#
.to_string();
let (reached_tx, reached_rx) = mpsc::channel();
let (resume_tx, resume_rx) = mpsc::channel();
let resume_rx = Arc::new(Mutex::new(resume_rx));
let hook_consumed = Arc::new(AtomicBool::new(false));
let hook_flag = Arc::clone(&hook_consumed);
let hook_resume = Arc::clone(&resume_rx);
let hook = Arc::new(move |point: CacheTestHookPoint| {
if point == CacheTestHookPoint::AfterLookup
&& hook_flag
.compare_exchange(false, true, Ordering::SeqCst, Ordering::SeqCst)
.is_ok()
{
let _ = reached_tx.send(());
hook_resume
.lock()
.expect("resume_rx lock poisoned")
.recv()
.expect("resume signal should arrive");
}
});
let start_barrier = Arc::new(Barrier::new(2));
let stale_pkgbuild_for_thread = stale_pkgbuild.clone();
let stale_start = Arc::clone(&start_barrier);
let stale_handle = std::thread::spawn(move || {
stale_start.wait();
parse_pkgbuild_cached(
name,
Some("1.2.3"),
PkgbuildSourceKind::Aur,
&stale_pkgbuild_for_thread,
)
});
let stale_thread_id = stale_handle.thread().id();
let _guard = set_cache_test_hook(hook, stale_thread_id);
start_barrier.wait();
reached_rx
.recv_timeout(Duration::from_secs(2))
.expect("stale thread should reach hook before proceeding");
let newer_pkgbuild_for_thread = newer_pkgbuild.clone();
let new_handle = std::thread::spawn(move || {
parse_pkgbuild_cached(
name,
Some("1.2.3"),
PkgbuildSourceKind::Aur,
&newer_pkgbuild_for_thread,
)
});
let new_entry = new_handle
.join()
.expect("new parsing thread should finish without panic");
resume_tx
.send(())
.expect("should release stale thread after new parse completes");
let stale_entry = stale_handle
.join()
.expect("stale parsing thread should finish without panic");
let cached = peek_cache_entry_for_tests(name, "1.2.3", PkgbuildSourceKind::Aur)
.expect("cache entry should exist after concurrent parses");
let stale_signature = compute_signature(&stale_pkgbuild);
let new_signature = compute_signature(&newer_pkgbuild);
assert_eq!(
cached.pkgbuild_signature, new_signature,
"newer entry must remain in cache"
);
assert_eq!(
cached.pkgbuild_signature, new_entry.pkgbuild_signature,
"cache entry should match result of newer parse"
);
assert_ne!(
cached.pkgbuild_signature, stale_signature,
"stale parse must not overwrite newer cache entry"
);
// When the stale thread loses the race, it should return the cached (newer)
// entry rather than its own stale parse result.
assert_eq!(
stale_entry.pkgbuild_signature, new_entry.pkgbuild_signature,
"stale thread should return cached newer entry after losing race"
);
assert_ne!(
stale_signature, new_signature,
"test setup should use distinct PKGBUILD contents"
);
}
}
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/src/logic/files/resolution.rs | src/logic/files/resolution.rs | //! File resolution functions for install and remove operations.
use super::backup::get_backup_files;
use super::lists::{get_installed_file_list, get_remote_file_list};
use crate::state::modal::{FileChange, FileChangeType, PackageFileInfo};
use crate::state::types::Source;
use std::collections::{HashMap, HashSet};
use std::process::Command;
/// What: Batch fetch remote file lists for multiple official packages using `pacman -Fl`.
///
/// Inputs:
/// - `packages`: Slice of (`package_name`, source) tuples for official packages.
///
/// Output:
/// - `HashMap` mapping package name to its remote file list.
///
/// Details:
/// - Batches queries into chunks of 50 to avoid command-line length limits.
/// - Parses multi-package `pacman -Fl` output (format: "<pkg> <path>" per line).
#[must_use]
pub fn batch_get_remote_file_lists(packages: &[(&str, &Source)]) -> HashMap<String, Vec<String>> {
const BATCH_SIZE: usize = 50;
let mut result_map = HashMap::new();
// Group packages by repo to batch them together
let mut repo_groups: HashMap<String, Vec<&str>> = HashMap::new();
for (name, source) in packages {
if let Source::Official { repo, .. } = source {
let repo_key = if repo.is_empty() {
String::new()
} else {
repo.clone()
};
repo_groups.entry(repo_key).or_default().push(name);
}
}
for (repo, names) in repo_groups {
for chunk in names.chunks(BATCH_SIZE) {
let specs: Vec<String> = chunk
.iter()
.map(|name| {
if repo.is_empty() {
(*name).to_string()
} else {
format!("{repo}/{name}")
}
})
.collect();
let mut args = vec!["-Fl"];
args.extend(specs.iter().map(String::as_str));
match Command::new("pacman")
.args(&args)
.env("LC_ALL", "C")
.env("LANG", "C")
.output()
{
Ok(output) if output.status.success() => {
let text = String::from_utf8_lossy(&output.stdout);
// Parse pacman -Fl output: format is "<pkg> <path>"
// Group by package name
let mut pkg_files: HashMap<String, Vec<String>> = HashMap::new();
for line in text.lines() {
if let Some((pkg, path)) = line.split_once(' ') {
// Extract package name (remove repo prefix if present)
let pkg_name = if let Some((_, name)) = pkg.split_once('/') {
name
} else {
pkg
};
pkg_files
.entry(pkg_name.to_string())
.or_default()
.push(path.to_string());
}
}
result_map.extend(pkg_files);
}
_ => {
// If batch fails, fall back to individual queries (but don't do it here to avoid recursion)
// The caller will handle individual queries
break;
}
}
}
}
result_map
}
/// What: Dispatch to the correct file resolution routine based on preflight action.
///
/// Inputs:
/// - `name`: Package name being evaluated.
/// - `source`: Package source needed for install lookups.
/// - `action`: Whether the package is being installed or removed.
///
/// Output:
/// - Returns a `PackageFileInfo` on success or an error message.
///
/// # Errors
/// - Returns `Err` when file resolution fails for install or remove operations (see `resolve_install_files` and `resolve_remove_files`)
///
/// Details:
/// - Delegates to either `resolve_install_files` or `resolve_remove_files`.
pub fn resolve_package_files(
name: &str,
source: &Source,
action: crate::state::modal::PreflightAction,
) -> Result<PackageFileInfo, String> {
match action {
crate::state::modal::PreflightAction::Install => resolve_install_files(name, source),
crate::state::modal::PreflightAction::Remove => resolve_remove_files(name),
crate::state::modal::PreflightAction::Downgrade => resolve_downgrade_files(name, source),
}
}
/// What: Determine new and changed files introduced by installing or upgrading a package.
///
/// Inputs:
/// - `name`: Package name examined.
/// - `source`: Source repository information for remote lookups.
///
/// Output:
/// - Returns a populated `PackageFileInfo` or an error when file lists cannot be retrieved.
///
/// # Errors
/// - Returns `Err` when remote file list retrieval fails (see `get_remote_file_list`)
/// - Returns `Err` when installed file list retrieval fails (see `get_installed_file_list`)
///
/// Details:
/// - Compares remote file listings with locally installed files and predicts potential `.pacnew` creations.
pub fn resolve_install_files(name: &str, source: &Source) -> Result<PackageFileInfo, String> {
// Get remote file list
let remote_files = get_remote_file_list(name, source)?;
resolve_install_files_with_remote_list(name, source, remote_files)
}
/// What: Determine new and changed files using a pre-fetched remote file list.
///
/// Inputs:
/// - `name`: Package name examined.
/// - `source`: Source repository information (for backup file lookup).
/// - `remote_files`: Pre-fetched remote file list.
///
/// Output:
/// - Returns a populated `PackageFileInfo`.
///
/// # Errors
/// - Returns `Err` when installed file list retrieval fails (see `get_installed_file_list`)
/// - Returns `Err` when backup files retrieval fails (see `get_backup_files`)
///
/// Details:
/// - Compares remote file listings with locally installed files and predicts potential `.pacnew` creations.
pub fn resolve_install_files_with_remote_list(
name: &str,
source: &Source,
remote_files: Vec<String>,
) -> Result<PackageFileInfo, String> {
// Get installed file list (if package is already installed)
let installed_files = get_installed_file_list(name).unwrap_or_default();
let installed_set: HashSet<&str> = installed_files.iter().map(String::as_str).collect();
let mut file_changes = Vec::new();
let mut new_count = 0;
let mut changed_count = 0;
let mut config_count = 0;
let mut pacnew_candidates = 0;
// Get backup files for this package (for pacnew/pacsave prediction)
let backup_files = get_backup_files(name, source).unwrap_or_default();
let backup_set: HashSet<&str> = backup_files.iter().map(String::as_str).collect();
for path in remote_files {
let is_config = path.starts_with("/etc/");
let is_dir = path.ends_with('/');
// Skip directories for now (we can add them later if needed)
if is_dir {
continue;
}
let change_type = if installed_set.contains(path.as_str()) {
changed_count += 1;
FileChangeType::Changed
} else {
new_count += 1;
FileChangeType::New
};
if is_config {
config_count += 1;
}
// Predict pacnew: file is in backup array and exists (will be changed)
let predicted_pacnew = backup_set.contains(path.as_str())
&& installed_set.contains(path.as_str())
&& is_config;
if predicted_pacnew {
pacnew_candidates += 1;
}
file_changes.push(FileChange {
path,
change_type,
package: name.to_string(),
is_config,
predicted_pacnew,
predicted_pacsave: false, // Only for remove operations
});
}
// Sort files by path for consistent display
file_changes.sort_by(|a, b| a.path.cmp(&b.path));
Ok(PackageFileInfo {
name: name.to_string(),
files: file_changes,
total_count: new_count + changed_count,
new_count,
changed_count,
removed_count: 0,
config_count,
pacnew_candidates,
pacsave_candidates: 0,
})
}
/// What: Enumerate files that would be removed when uninstalling a package.
///
/// Inputs:
/// - `name`: Package scheduled for removal.
///
/// Output:
/// - Returns a `PackageFileInfo` capturing removed files and predicted `.pacsave` candidates.
///
/// # Errors
/// - Returns `Err` when installed file list retrieval fails (see `get_installed_file_list`)
/// - Returns `Err` when backup files retrieval fails (see `get_backup_files`)
///
/// Details:
/// - Reads installed file lists and backup arrays to flag configuration files requiring user attention.
pub fn resolve_remove_files(name: &str) -> Result<PackageFileInfo, String> {
// Get installed file list
let installed_files = get_installed_file_list(name)?;
let mut file_changes = Vec::new();
let mut config_count = 0;
let mut pacsave_candidates = 0;
// Get backup files for this package (for pacsave prediction)
let backup_files = get_backup_files(
name,
&Source::Official {
repo: String::new(),
arch: String::new(),
},
)
.unwrap_or_default();
let backup_set: HashSet<&str> = backup_files.iter().map(String::as_str).collect();
for path in installed_files {
let is_config = path.starts_with("/etc/");
let is_dir = path.ends_with('/');
// Skip directories for now
if is_dir {
continue;
}
if is_config {
config_count += 1;
}
// Predict pacsave: file is in backup array and will be removed
let predicted_pacsave = backup_set.contains(path.as_str()) && is_config;
if predicted_pacsave {
pacsave_candidates += 1;
}
file_changes.push(FileChange {
path,
change_type: FileChangeType::Removed,
package: name.to_string(),
is_config,
predicted_pacnew: false,
predicted_pacsave,
});
}
// Sort files by path for consistent display
file_changes.sort_by(|a, b| a.path.cmp(&b.path));
let removed_count = file_changes.len();
Ok(PackageFileInfo {
name: name.to_string(),
files: file_changes,
total_count: removed_count,
new_count: 0,
changed_count: 0,
removed_count,
config_count,
pacnew_candidates: 0,
pacsave_candidates,
})
}
/// What: Enumerate files that would be changed when downgrading a package.
///
/// Inputs:
/// - `name`: Package scheduled for downgrade.
/// - `source`: Source repository information for remote lookups.
///
/// Output:
/// - Returns a `PackageFileInfo` capturing changed files (downgrade replaces files with older versions).
///
/// # Errors
/// - Returns `Err` when remote file list retrieval fails (see `get_remote_file_list`)
/// - Returns `Err` when installed file list retrieval fails (see `get_installed_file_list`)
///
/// Details:
/// - For downgrade, files that exist in both current and target versions are marked as "Changed".
/// - Files are compared between installed and remote (older) versions.
pub fn resolve_downgrade_files(name: &str, source: &Source) -> Result<PackageFileInfo, String> {
// Get remote file list (older version - what we're downgrading TO)
let remote_files = get_remote_file_list(name, source)?;
// Get installed file list (current version - what we're downgrading FROM)
let installed_files = get_installed_file_list(name)?;
// Normalize paths (remove trailing slashes for comparison)
let normalize_path = |p: &str| p.trim_end_matches('/').to_string();
let installed_set: HashSet<String> =
installed_files.iter().map(|p| normalize_path(p)).collect();
let remote_set: HashSet<String> = remote_files.iter().map(|p| normalize_path(p)).collect();
// Get backup files for this package (for pacnew prediction)
let backup_files = get_backup_files(name, source).unwrap_or_default();
let backup_set: HashSet<String> = backup_files.iter().map(|p| normalize_path(p)).collect();
let mut file_changes = Vec::new();
let mut changed_count = 0;
let mut new_count = 0;
let mut config_count = 0;
let mut pacnew_candidates = 0;
// Iterate over installed files to find files that will be changed
// Files that exist in both versions are "Changed" (being replaced with older version)
for path in installed_files {
let normalized_path = normalize_path(&path);
let is_config = path.starts_with("/etc/");
let is_dir = path.ends_with('/');
// Skip directories for now
if is_dir {
continue;
}
if is_config {
config_count += 1;
}
// If file exists in remote (older) version, it's being changed (downgraded)
if remote_set.contains(&normalized_path) {
changed_count += 1;
// Predict pacnew: file is in backup array and exists (will be changed to older version)
let predicted_pacnew = backup_set.contains(&normalized_path) && is_config;
if predicted_pacnew {
pacnew_candidates += 1;
}
file_changes.push(FileChange {
path,
change_type: FileChangeType::Changed,
package: name.to_string(),
is_config,
predicted_pacnew,
predicted_pacsave: false,
});
}
// Files that exist only in installed (newer) version but not in remote (older) version are "Removed"
else {
file_changes.push(FileChange {
path,
change_type: FileChangeType::Removed,
package: name.to_string(),
is_config,
predicted_pacnew: false,
predicted_pacsave: backup_set.contains(&normalized_path) && is_config,
});
}
}
// Also check for files that exist only in remote (older) version but not installed (newer) version - these are "New"
for path in remote_files {
let normalized_path = normalize_path(&path);
let is_config = path.starts_with("/etc/");
let is_dir = path.ends_with('/');
// Skip directories for now
if is_dir {
continue;
}
// If file doesn't exist in installed version, it's "New" (will be added back)
if !installed_set.contains(&normalized_path) {
new_count += 1;
file_changes.push(FileChange {
path,
change_type: FileChangeType::New,
package: name.to_string(),
is_config,
predicted_pacnew: false,
predicted_pacsave: false,
});
}
}
// Sort files by path for consistent display
file_changes.sort_by(|a, b| a.path.cmp(&b.path));
let removed_count = file_changes
.iter()
.filter(|f| matches!(f.change_type, FileChangeType::Removed))
.count();
Ok(PackageFileInfo {
name: name.to_string(),
files: file_changes,
total_count: changed_count + new_count + removed_count,
new_count,
changed_count,
removed_count,
config_count,
pacnew_candidates,
pacsave_candidates: 0,
})
}
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/src/logic/files/lists.rs | src/logic/files/lists.rs | //! File list retrieval functions for remote and installed packages.
use super::pkgbuild_cache::{PkgbuildSourceKind, parse_pkgbuild_cached};
use super::pkgbuild_fetch::fetch_pkgbuild_sync;
use crate::state::types::Source;
use std::process::Command;
/// What: Parse file list from pacman/paru/yay command output.
///
/// Inputs:
/// - `output`: Command output containing file list in format "<pkg> <path>".
///
/// Output:
/// - Returns vector of file paths extracted from the output.
///
/// Details:
/// - Parses lines in format "<pkg> <path>" and extracts the path component.
fn parse_file_list_from_output(output: &[u8]) -> Vec<String> {
let text = String::from_utf8_lossy(output);
text.lines()
.filter_map(|line| line.split_once(' ').map(|(_pkg, path)| path.to_string()))
.collect()
}
/// What: Try to get file list using an AUR helper command (paru or yay).
///
/// Inputs:
/// - `helper`: Name of the helper command ("paru" or "yay").
/// - `name`: Package name to query.
///
/// Output:
/// - Returns Some(Vec<String>) if successful, None otherwise.
///
/// Details:
/// - Executes helper -Fl command and parses the output.
/// - Returns None if command fails or produces no files.
fn try_aur_helper_file_list(helper: &str, name: &str) -> Option<Vec<String>> {
tracing::debug!("Trying {} -Fl {} for AUR package file list", helper, name);
let output = Command::new(helper)
.args(["-Fl", name])
.env("LC_ALL", "C")
.env("LANG", "C")
.output()
.ok()?;
if !output.status.success() {
return None;
}
let files = parse_file_list_from_output(&output.stdout);
if files.is_empty() {
return None;
}
tracing::debug!(
"Found {} files from {} -Fl for {}",
files.len(),
helper,
name
);
Some(files)
}
/// What: Get file list for AUR package using multiple fallback strategies.
///
/// Inputs:
/// - `name`: Package name to query.
///
/// Output:
/// - Returns file list if found, empty vector if no sources available.
///
/// Details:
/// - Tries installed files, then paru/yay, then PKGBUILD parsing.
fn get_aur_file_list(name: &str) -> Vec<String> {
// First, check if package is already installed
if let Ok(installed_files) = get_installed_file_list(name)
&& !installed_files.is_empty()
{
tracing::debug!(
"Found {} files from installed AUR package {}",
installed_files.len(),
name
);
return installed_files;
}
// Try to use paru/yay -Fl if available (works for cached AUR packages)
let has_paru = Command::new("paru").args(["--version"]).output().is_ok();
let has_yay = Command::new("yay").args(["--version"]).output().is_ok();
if has_paru && let Some(files) = try_aur_helper_file_list("paru", name) {
return files;
}
if has_yay && let Some(files) = try_aur_helper_file_list("yay", name) {
return files;
}
// Fallback: try to parse PKGBUILD to extract install paths
if let Ok(pkgbuild) = fetch_pkgbuild_sync(name) {
let entry = parse_pkgbuild_cached(name, None, PkgbuildSourceKind::Aur, &pkgbuild);
let files = entry.install_paths;
if !files.is_empty() {
tracing::debug!(
"Found {} files from PKGBUILD parsing for {}",
files.len(),
name
);
return files;
}
} else {
tracing::debug!("Failed to fetch PKGBUILD for {}", name);
}
// No file list available
tracing::debug!(
"AUR package {}: file list not available (not installed, not cached, PKGBUILD parsing failed)",
name
);
Vec::new()
}
/// What: Get file list for official repository package.
///
/// Inputs:
/// - `name`: Package name to query.
/// - `repo`: Repository name (empty string if not specified).
///
/// Output:
/// - Returns file list or error if command fails.
///
/// Details:
/// - Uses pacman -Fl command. Returns empty list if file database is not synced.
fn get_official_file_list(name: &str, repo: &str) -> Result<Vec<String>, String> {
tracing::debug!("Running: pacman -Fl {}", name);
let spec = if repo.is_empty() {
name.to_string()
} else {
format!("{repo}/{name}")
};
let output = Command::new("pacman")
.args(["-Fl", &spec])
.env("LC_ALL", "C")
.env("LANG", "C")
.output()
.map_err(|e| {
tracing::error!("Failed to execute pacman -Fl {}: {}", spec, e);
format!("pacman -Fl failed: {e}")
})?;
if !output.status.success() {
let stderr = String::from_utf8_lossy(&output.stderr);
// Check if error is due to missing file database
if stderr.contains("database file") && stderr.contains("does not exist") {
tracing::warn!(
"File database not synced for {} (pacman -Fy requires root). Skipping file list.",
name
);
return Ok(Vec::new()); // Return empty instead of error
}
tracing::error!(
"pacman -Fl {} failed with status {:?}: {}",
spec,
output.status.code(),
stderr
);
return Err(format!("pacman -Fl failed for {spec}: {stderr}"));
}
let files = parse_file_list_from_output(&output.stdout);
tracing::debug!("Found {} files in remote package {}", files.len(), name);
Ok(files)
}
/// What: Fetch the list of files published in repositories for a given package.
///
/// Inputs:
/// - `name`: Package name in question.
/// - `source`: Source descriptor differentiating official repositories from AUR packages.
///
/// Output:
/// - Returns the list of file paths or an error when retrieval fails.
///
/// # Errors
/// - Returns `Err` when `pacman -Fl` command execution fails for official packages
/// - Returns `Err` when file database is not synced and command fails
///
/// Details:
/// - Uses `pacman -Fl` for official packages and currently returns an empty list for AUR entries.
pub fn get_remote_file_list(name: &str, source: &Source) -> Result<Vec<String>, String> {
match source {
Source::Official { repo, .. } => get_official_file_list(name, repo),
Source::Aur => Ok(get_aur_file_list(name)),
}
}
/// What: Retrieve the list of files currently installed for a package.
///
/// Inputs:
/// - `name`: Package name queried via `pacman -Ql`.
///
/// Output:
/// - Returns file paths owned by the package or an empty list when it is not installed.
///
/// # Errors
/// - Returns `Err` when `pacman -Ql` command execution fails (I/O error)
/// - Returns `Err` when `pacman -Ql` exits with non-zero status for reasons other than package not found
///
/// Details:
/// - Logs errors if the command fails for reasons other than the package being absent.
pub fn get_installed_file_list(name: &str) -> Result<Vec<String>, String> {
tracing::debug!("Running: pacman -Ql {}", name);
let output = Command::new("pacman")
.args(["-Ql", name])
.env("LC_ALL", "C")
.env("LANG", "C")
.output()
.map_err(|e| {
tracing::error!("Failed to execute pacman -Ql {}: {}", name, e);
format!("pacman -Ql failed: {e}")
})?;
if !output.status.success() {
// Package not installed - this is OK for install operations
let stderr = String::from_utf8_lossy(&output.stderr);
if stderr.contains("was not found") {
tracing::debug!("Package {} is not installed", name);
return Ok(Vec::new());
}
tracing::error!(
"pacman -Ql {} failed with status {:?}: {}",
name,
output.status.code(),
stderr
);
return Err(format!("pacman -Ql failed for {name}: {stderr}"));
}
let files = parse_file_list_from_output(&output.stdout);
tracing::debug!("Found {} files in installed package {}", files.len(), name);
Ok(files)
}
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/src/logic/files/pkgbuild_fetch.rs | src/logic/files/pkgbuild_fetch.rs | //! PKGBUILD fetching functions.
use crate::util::{curl_args, percent_encode};
use std::process::Command;
use std::sync::Mutex;
use std::time::{Duration, Instant};
/// Rate limiter for PKGBUILD requests to avoid overwhelming AUR servers.
///
/// Tracks the timestamp of the last PKGBUILD request to enforce minimum intervals.
static PKGBUILD_RATE_LIMITER: Mutex<Option<Instant>> = Mutex::new(None);
/// Minimum interval between PKGBUILD requests in milliseconds.
///
/// Prevents overwhelming AUR servers with too many rapid requests.
const PKGBUILD_MIN_INTERVAL_MS: u64 = 500;
/// What: Try to find PKGBUILD in a directory structure.
///
/// Inputs:
/// - `base_dir`: Base directory to search in.
/// - `name`: Package name for logging.
/// - `helper_name`: Helper name for logging (e.g., "paru" or "yay").
///
/// Output:
/// - Returns PKGBUILD content if found, or None.
///
/// Details:
/// - First checks `base_dir`/`PKGBUILD`, then searches subdirectories.
fn find_pkgbuild_in_dir(
base_dir: &std::path::Path,
name: &str,
helper_name: &str,
) -> Option<String> {
// Try direct path first
let pkgbuild_path = base_dir.join("PKGBUILD");
if let Ok(text) = std::fs::read_to_string(&pkgbuild_path)
&& text.contains("pkgname")
{
tracing::debug!("Found PKGBUILD for {} via {} -G", name, helper_name);
return Some(text);
}
// Search in subdirectories
let Ok(entries) = std::fs::read_dir(base_dir) else {
return None;
};
for entry in entries.flatten() {
if !entry.path().is_dir() {
continue;
}
let pkgbuild_path = entry.path().join("PKGBUILD");
if let Ok(text) = std::fs::read_to_string(&pkgbuild_path)
&& text.contains("pkgname")
{
tracing::debug!(
"Found PKGBUILD for {} via {} -G (in subdir)",
name,
helper_name
);
return Some(text);
}
}
None
}
/// What: Try to get PKGBUILD using a helper command (paru -G or yay -G).
///
/// Inputs:
/// - `helper`: Helper command name ("paru" or "yay").
/// - `name`: Package name.
///
/// Output:
/// - Returns PKGBUILD content if found, or None.
///
/// Details:
/// - Executes helper -G command in a temp directory and searches for PKGBUILD.
fn try_helper_command(helper: &str, name: &str) -> Option<String> {
let temp_dir = std::env::temp_dir().join(format!("pacsea_pkgbuild_{name}"));
let _ = std::fs::create_dir_all(&temp_dir);
let output = Command::new(helper)
.args(["-G", name])
.current_dir(&temp_dir)
.output()
.ok()?;
if !output.status.success() {
let _ = std::fs::remove_dir_all(&temp_dir);
return None;
}
let result = find_pkgbuild_in_dir(&temp_dir.join(name), name, helper);
let _ = std::fs::remove_dir_all(&temp_dir);
result
}
/// What: Try to read PKGBUILD directly from known cache paths.
///
/// Inputs:
/// - `name`: Package name.
/// - `home`: Home directory path.
///
/// Output:
/// - Returns PKGBUILD content if found, or None.
///
/// Details:
/// - Checks standard cache locations for paru and yay.
fn try_direct_cache_paths(name: &str, home: &str) -> Option<String> {
let cache_paths = [
format!("{home}/.cache/paru/clone/{name}/PKGBUILD"),
format!("{home}/.cache/yay/{name}/PKGBUILD"),
];
for path_str in cache_paths {
if let Ok(text) = std::fs::read_to_string(&path_str)
&& text.contains("pkgname")
{
tracing::debug!("Found PKGBUILD for {} in cache: {}", name, path_str);
return Some(text);
}
}
None
}
/// What: Try to find PKGBUILD in cache subdirectories.
///
/// Inputs:
/// - `name`: Package name.
/// - `home`: Home directory path.
///
/// Output:
/// - Returns PKGBUILD content if found, or None.
///
/// Details:
/// - Searches cache directories for packages that might be in subdirectories.
fn try_cache_subdirectories(name: &str, home: &str) -> Option<String> {
let cache_bases = [
format!("{home}/.cache/paru/clone"),
format!("{home}/.cache/yay"),
];
for cache_base in cache_bases {
let Ok(entries) = std::fs::read_dir(&cache_base) else {
continue;
};
for entry in entries.flatten() {
let path = entry.path();
if !path.is_dir() {
continue;
}
let matches_name = path
.file_name()
.and_then(|n| n.to_str())
.is_some_and(|n| n.contains(name));
if !matches_name {
continue;
}
// Check direct PKGBUILD
let pkgbuild_path = path.join("PKGBUILD");
if let Ok(text) = std::fs::read_to_string(&pkgbuild_path)
&& text.contains("pkgname")
{
tracing::debug!(
"Found PKGBUILD for {} in cache subdirectory: {:?}",
name,
pkgbuild_path
);
return Some(text);
}
// Check subdirectories
let Ok(sub_entries) = std::fs::read_dir(&path) else {
continue;
};
for sub_entry in sub_entries.flatten() {
if !sub_entry.path().is_dir() {
continue;
}
let pkgbuild_path = sub_entry.path().join("PKGBUILD");
if let Ok(text) = std::fs::read_to_string(&pkgbuild_path)
&& text.contains("pkgname")
{
tracing::debug!(
"Found PKGBUILD for {} in cache subdirectory: {:?}",
name,
pkgbuild_path
);
return Some(text);
}
}
}
}
None
}
/// What: Get PKGBUILD from yay/paru cache (offline method).
///
/// Inputs:
/// - `name`: Package name.
///
/// Output:
/// - Returns PKGBUILD content if found in cache, or None.
///
/// Details:
/// - Checks yay cache (~/.cache/yay) and paru cache (~/.cache/paru).
/// - Also tries using `yay -G` or `paru -G` commands.
#[must_use]
pub fn get_pkgbuild_from_cache(name: &str) -> Option<String> {
// Try helper commands first (fastest, uses helper's cache)
if let Some(text) = try_helper_command("paru", name) {
return Some(text);
}
if let Some(text) = try_helper_command("yay", name) {
return Some(text);
}
// Try reading directly from cache directories
let home = std::env::var("HOME").ok()?;
if let Some(text) = try_direct_cache_paths(name, &home) {
return Some(text);
}
// Try finding PKGBUILD in cache subdirectories
try_cache_subdirectories(name, &home)
}
/// What: Fetch PKGBUILD content synchronously (blocking).
///
/// Inputs:
/// - `name`: Package name.
///
/// Output:
/// - Returns PKGBUILD content as a string, or an error if fetch fails.
///
/// # Errors
/// - Returns `Err` when network request fails (curl execution error)
/// - Returns `Err` when PKGBUILD cannot be fetched from AUR or official repositories
/// - Returns `Err` when rate limiting mutex is poisoned
///
/// # Panics
/// - Panics if the rate limiting mutex is poisoned
///
/// Details:
/// - First tries offline methods (yay/paru cache, yay -G, paru -G).
/// - Then tries AUR with rate limiting (500ms between requests).
/// - Falls back to official GitLab repos for official packages.
/// - Uses curl to fetch PKGBUILD from AUR or official GitLab repos.
pub fn fetch_pkgbuild_sync(name: &str) -> Result<String, String> {
// 1. Try offline methods first (yay/paru cache)
if let Some(cached) = get_pkgbuild_from_cache(name) {
tracing::debug!("Using cached PKGBUILD for {} (offline)", name);
return Ok(cached);
}
// 2. Rate limiting: ensure minimum interval between requests
{
let mut last_request = PKGBUILD_RATE_LIMITER
.lock()
.expect("PKGBUILD rate limiter mutex poisoned");
if let Some(last) = *last_request {
let elapsed = last.elapsed();
if elapsed < Duration::from_millis(PKGBUILD_MIN_INTERVAL_MS) {
let delay = Duration::from_millis(PKGBUILD_MIN_INTERVAL_MS)
.checked_sub(elapsed)
.expect("elapsed should be less than PKGBUILD_MIN_INTERVAL_MS");
tracing::debug!(
"Rate limiting PKGBUILD request for {}: waiting {:?}",
name,
delay
);
std::thread::sleep(delay);
}
}
*last_request = Some(Instant::now());
}
// 3. Try AUR first (works for both AUR and official packages via AUR mirror)
let url_aur = format!(
"https://aur.archlinux.org/cgit/aur.git/plain/PKGBUILD?h={}",
percent_encode(name)
);
tracing::debug!("Fetching PKGBUILD from AUR: {}", url_aur);
let args = curl_args(&url_aur, &[]);
let output = Command::new("curl").args(&args).output();
let aur_failed_http_error = match &output {
Ok(output) if output.status.success() => {
let text = String::from_utf8_lossy(&output.stdout).to_string();
if !text.trim().is_empty() && text.contains("pkgname") {
return Ok(text);
}
false
}
Ok(output) => {
// curl with -f flag returns exit code 22 for HTTP errors like 502
// If AUR returns 502 (Bad Gateway), don't try GitLab fallback
// GitLab should only be used for official packages, not AUR packages
// AUR 502 indicates a temporary AUR server issue, not that the package doesn't exist in AUR
output.status.code().is_some_and(|code| code == 22)
}
_ => false,
};
if aur_failed_http_error {
tracing::debug!(
"AUR returned HTTP error (likely 502) for {} - skipping GitLab fallback (likely AUR package or temporary AUR issue)",
name
);
return Err("AUR returned HTTP error (likely 502 Bad Gateway)".to_string());
}
// Fallback to official GitLab repos (only for official packages, not AUR)
let url_main = format!(
"https://gitlab.archlinux.org/archlinux/packaging/packages/{}/-/raw/main/PKGBUILD",
percent_encode(name)
);
tracing::debug!("Fetching PKGBUILD from GitLab main: {}", url_main);
let args = curl_args(&url_main, &[]);
let output = Command::new("curl").args(&args).output();
match output {
Ok(output) if output.status.success() => {
let text = String::from_utf8_lossy(&output.stdout).to_string();
// Validate that we got a PKGBUILD, not HTML (e.g., login page)
if !text.trim().is_empty()
&& (text.contains("pkgname") || text.contains("pkgver") || text.contains("pkgdesc"))
&& !text.trim_start().starts_with("<!DOCTYPE")
&& !text.trim_start().starts_with("<html")
{
return Ok(text);
}
tracing::warn!(
"GitLab main returned invalid PKGBUILD (likely HTML): first 200 chars: {:?}",
text.chars().take(200).collect::<String>()
);
}
_ => {}
}
// Try master branch as fallback
let url_master = format!(
"https://gitlab.archlinux.org/archlinux/packaging/packages/{}/-/raw/master/PKGBUILD",
percent_encode(name)
);
tracing::debug!("Fetching PKGBUILD from GitLab master: {}", url_master);
let args = curl_args(&url_master, &[]);
let output = Command::new("curl")
.args(&args)
.output()
.map_err(|e| format!("curl failed: {e}"))?;
if !output.status.success() {
return Err(format!(
"curl failed with status: {:?}",
output.status.code()
));
}
let text = String::from_utf8_lossy(&output.stdout).to_string();
if text.trim().is_empty() {
return Err("Empty PKGBUILD content".to_string());
}
// Validate that we got a PKGBUILD, not HTML (e.g., login page)
if text.trim_start().starts_with("<!DOCTYPE") || text.trim_start().starts_with("<html") {
tracing::warn!(
"GitLab master returned HTML instead of PKGBUILD: first 200 chars: {:?}",
text.chars().take(200).collect::<String>()
);
return Err("GitLab returned HTML page instead of PKGBUILD".to_string());
}
if !text.contains("pkgname") && !text.contains("pkgver") && !text.contains("pkgdesc") {
tracing::warn!(
"GitLab master returned content that doesn't look like PKGBUILD: first 200 chars: {:?}",
text.chars().take(200).collect::<String>()
);
return Err("Response doesn't appear to be a valid PKGBUILD".to_string());
}
Ok(text)
}
/// What: Fetch .SRCINFO content synchronously (blocking).
///
/// Inputs:
/// - `name`: AUR package name.
///
/// Output:
/// - Returns .SRCINFO content as a string, or an error if fetch fails.
///
/// # Errors
/// - Returns `Err` when network request fails (curl execution error)
/// - Returns `Err` when .SRCINFO cannot be fetched from AUR
///
/// Details:
/// - Downloads .SRCINFO from AUR cgit repository.
pub fn fetch_srcinfo_sync(name: &str) -> Result<String, String> {
crate::util::srcinfo::fetch_srcinfo(name, None)
}
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/src/logic/files/mod.rs | src/logic/files/mod.rs | //! File list resolution and diff computation for preflight checks.
mod backup;
mod db_sync;
mod lists;
mod pkgbuild_cache;
mod pkgbuild_fetch;
mod pkgbuild_parse;
mod resolution;
pub use backup::{get_backup_files, get_backup_files_from_installed};
pub use db_sync::{
ensure_file_db_synced, get_file_db_sync_info, get_file_db_sync_timestamp, is_file_db_stale,
};
pub use lists::{get_installed_file_list, get_remote_file_list};
pub use pkgbuild_cache::{PkgbuildSourceKind, flush_pkgbuild_cache, parse_pkgbuild_cached};
pub use pkgbuild_fetch::{fetch_pkgbuild_sync, fetch_srcinfo_sync, get_pkgbuild_from_cache};
pub use pkgbuild_parse::{
parse_backup_array_content, parse_backup_from_pkgbuild, parse_backup_from_srcinfo,
parse_install_paths_from_pkgbuild,
};
pub use resolution::{
batch_get_remote_file_lists, resolve_install_files, resolve_install_files_with_remote_list,
resolve_package_files, resolve_remove_files,
};
use crate::state::modal::PackageFileInfo;
use crate::state::types::PackageItem;
/// What: Determine file-level changes for a set of packages under a specific preflight action.
///
/// Inputs:
/// - `items`: Package descriptors under consideration.
/// - `action`: Preflight action (install or remove) influencing the comparison strategy.
///
/// Output:
/// - Returns a vector of `PackageFileInfo` entries describing per-package file deltas.
///
/// Details:
/// - Invokes pacman commands to compare remote and installed file lists while preserving package order.
#[allow(clippy::missing_const_for_fn)]
pub fn resolve_file_changes(
items: &[PackageItem],
action: crate::state::modal::PreflightAction,
) -> Vec<PackageFileInfo> {
// Check if file database is stale, but don't force sync (let user decide)
// Only sync if database doesn't exist or is very old (>30 days)
const MAX_AUTO_SYNC_AGE_DAYS: u64 = 30;
let _span = tracing::info_span!(
"resolve_file_changes",
stage = "files",
item_count = items.len()
)
.entered();
let start_time = std::time::Instant::now();
if items.is_empty() {
tracing::warn!("No packages provided for file resolution");
return Vec::new();
}
match ensure_file_db_synced(false, MAX_AUTO_SYNC_AGE_DAYS) {
Ok(synced) => {
if synced {
tracing::info!("File database was synced automatically (was very stale)");
} else {
tracing::debug!("File database is fresh, no sync needed");
}
}
Err(e) => {
// Sync failed (likely requires root), but continue anyway
tracing::warn!("File database sync failed: {} (continuing without sync)", e);
}
}
// Batch fetch remote file lists for all official packages to reduce pacman command overhead
let official_packages: Vec<(&str, &crate::state::types::Source)> = items
.iter()
.filter_map(|item| {
if matches!(item.source, crate::state::types::Source::Official { .. }) {
Some((item.name.as_str(), &item.source))
} else {
None
}
})
.collect();
let batched_remote_files_cache = if !official_packages.is_empty()
&& matches!(action, crate::state::modal::PreflightAction::Install)
{
resolution::batch_get_remote_file_lists(&official_packages)
} else {
std::collections::HashMap::new()
};
let mut results = Vec::new();
for (idx, item) in items.iter().enumerate() {
tracing::info!(
"[{}/{}] Resolving files for package: {} ({:?})",
idx + 1,
items.len(),
item.name,
item.source
);
// Check if we have batched results for this official package
let use_batched = matches!(action, crate::state::modal::PreflightAction::Install)
&& matches!(item.source, crate::state::types::Source::Official { .. })
&& batched_remote_files_cache.contains_key(item.name.as_str());
match if use_batched {
// Use batched file list
let remote_files = batched_remote_files_cache
.get(item.name.as_str())
.cloned()
.unwrap_or_default();
resolution::resolve_install_files_with_remote_list(
&item.name,
&item.source,
remote_files,
)
} else {
resolution::resolve_package_files(&item.name, &item.source, action)
} {
Ok(file_info) => {
tracing::info!(
" Found {} files for {} ({} new, {} changed, {} removed)",
file_info.total_count,
item.name,
file_info.new_count,
file_info.changed_count,
file_info.removed_count
);
results.push(file_info);
}
Err(e) => {
tracing::warn!(" Failed to resolve files for {}: {}", item.name, e);
// Create empty entry to maintain package order
results.push(PackageFileInfo {
name: item.name.clone(),
files: Vec::new(),
total_count: 0,
new_count: 0,
changed_count: 0,
removed_count: 0,
config_count: 0,
pacnew_candidates: 0,
pacsave_candidates: 0,
});
}
}
}
let elapsed = start_time.elapsed();
let duration_ms = u64::try_from(elapsed.as_millis()).unwrap_or(u64::MAX);
tracing::info!(
stage = "files",
item_count = items.len(),
result_count = results.len(),
duration_ms = duration_ms,
"File resolution complete"
);
results
}
#[cfg(all(test, unix))]
mod tests;
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/src/logic/files/backup.rs | src/logic/files/backup.rs | //! Backup file detection and retrieval functions.
use super::pkgbuild_cache::{PkgbuildSourceKind, parse_pkgbuild_cached};
use super::pkgbuild_fetch::{fetch_pkgbuild_sync, fetch_srcinfo_sync};
use super::pkgbuild_parse::parse_backup_from_srcinfo;
use crate::state::types::Source;
use std::process::Command;
/// What: Identify files marked for backup handling during install or removal operations.
///
/// Inputs:
/// - `name`: Package whose backup array should be inspected.
/// - `source`: Source descriptor to decide how to gather backup information.
///
/// Output:
/// - Returns a list of backup file paths or an empty list when the data cannot be retrieved.
///
/// # Errors
/// - Returns `Err` when `pacman -Qii` command execution fails for installed packages
/// - Returns `Err` when PKGBUILD or .SRCINFO fetch fails and no fallback is available
///
/// Details:
/// - Prefers querying the installed package via `pacman -Qii`; falls back to best-effort heuristics.
pub fn get_backup_files(name: &str, source: &Source) -> Result<Vec<String>, String> {
// First try: if package is installed, use pacman -Qii
if let Ok(backup_files) = get_backup_files_from_installed(name)
&& !backup_files.is_empty()
{
tracing::debug!(
"Found {} backup files from installed package {}",
backup_files.len(),
name
);
return Ok(backup_files);
}
// Second try: parse from PKGBUILD/.SRCINFO (best-effort, may fail)
match source {
Source::Official { .. } => {
// Try to fetch PKGBUILD and parse backup array
match fetch_pkgbuild_sync(name) {
Ok(pkgbuild) => {
let entry =
parse_pkgbuild_cached(name, None, PkgbuildSourceKind::Official, &pkgbuild);
let backup_files = entry.backup_files;
if !backup_files.is_empty() {
tracing::debug!(
"Found {} backup files from PKGBUILD for {}",
backup_files.len(),
name
);
return Ok(backup_files);
}
}
Err(e) => {
tracing::debug!("Failed to fetch PKGBUILD for {}: {}", name, e);
}
}
Ok(Vec::new())
}
Source::Aur => {
// Try to fetch .SRCINFO first (more reliable for AUR)
match fetch_srcinfo_sync(name) {
Ok(srcinfo) => {
let backup_files = parse_backup_from_srcinfo(&srcinfo);
if !backup_files.is_empty() {
tracing::debug!(
"Found {} backup files from .SRCINFO for {}",
backup_files.len(),
name
);
return Ok(backup_files);
}
}
Err(e) => {
tracing::debug!("Failed to fetch .SRCINFO for {}: {}", name, e);
}
}
// Fallback to PKGBUILD if .SRCINFO failed
match fetch_pkgbuild_sync(name) {
Ok(pkgbuild) => {
let entry =
parse_pkgbuild_cached(name, None, PkgbuildSourceKind::Aur, &pkgbuild);
let backup_files = entry.backup_files;
if !backup_files.is_empty() {
tracing::debug!(
"Found {} backup files from PKGBUILD for {}",
backup_files.len(),
name
);
return Ok(backup_files);
}
}
Err(e) => {
tracing::debug!("Failed to fetch PKGBUILD for {}: {}", name, e);
}
}
Ok(Vec::new())
}
}
}
/// What: Collect backup file entries for an installed package through `pacman -Qii`.
///
/// Inputs:
/// - `name`: Installed package identifier.
///
/// Output:
/// - Returns the backup array as a vector of file paths or an empty list when not installed.
///
/// # Errors
/// - Returns `Err` when `pacman -Qii` command execution fails (I/O error)
/// - Returns `Err` when `pacman -Qii` exits with non-zero status for reasons other than package not found
///
/// Details:
/// - Parses the `Backup Files` section, handling wrapped lines to ensure complete coverage.
pub fn get_backup_files_from_installed(name: &str) -> Result<Vec<String>, String> {
tracing::debug!("Running: pacman -Qii {}", name);
let output = Command::new("pacman")
.args(["-Qii", name])
.env("LC_ALL", "C")
.env("LANG", "C")
.output()
.map_err(|e| {
tracing::error!("Failed to execute pacman -Qii {}: {}", name, e);
format!("pacman -Qii failed: {e}")
})?;
if !output.status.success() {
// Package not installed - this is OK
let stderr = String::from_utf8_lossy(&output.stderr);
if stderr.contains("was not found") {
tracing::debug!("Package {} is not installed", name);
return Ok(Vec::new());
}
tracing::error!(
"pacman -Qii {} failed with status {:?}: {}",
name,
output.status.code(),
stderr
);
return Err(format!("pacman -Qii failed for {name}: {stderr}"));
}
let text = String::from_utf8_lossy(&output.stdout);
let mut backup_files = Vec::new();
let mut in_backup_section = false;
// Parse pacman -Qii output: look for "Backup Files" field
for line in text.lines() {
if line.starts_with("Backup Files") {
in_backup_section = true;
// Extract files from the same line if present
if let Some(colon_pos) = line.find(':') {
let files_str = line[colon_pos + 1..].trim();
if !files_str.is_empty() && files_str != "None" {
for file in files_str.split_whitespace() {
backup_files.push(file.to_string());
}
}
}
} else if in_backup_section {
// Continuation lines (indented)
if line.starts_with(" ") || line.starts_with('\t') {
for file in line.split_whitespace() {
backup_files.push(file.to_string());
}
} else {
// End of backup section
break;
}
}
}
tracing::debug!(
"Found {} backup files for installed package {}",
backup_files.len(),
name
);
Ok(backup_files)
}
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/src/logic/files/pkgbuild_parse.rs | src/logic/files/pkgbuild_parse.rs | //! PKGBUILD parsing functions.
/// What: Parse backup array from PKGBUILD content.
///
/// Inputs:
/// - `pkgbuild`: Raw PKGBUILD file content.
///
/// Output:
/// - Returns a vector of backup file paths.
///
/// Details:
/// - Parses bash array syntax: `backup=('file1' 'file2' '/etc/config')`
/// - Handles single-line and multi-line array definitions.
#[must_use]
pub fn parse_backup_from_pkgbuild(pkgbuild: &str) -> Vec<String> {
let mut backup_files = Vec::new();
let mut in_backup_array = false;
for line in pkgbuild.lines() {
let line = line.trim();
// Skip comments and empty lines
if line.is_empty() || line.starts_with('#') {
continue;
}
// Look for backup= array declaration
if line.starts_with("backup=") || line.starts_with("backup =") {
in_backup_array = true;
// Check if array is on single line: backup=('file1' 'file2')
if let Some(start) = line.find('(')
&& let Some(end) = line.rfind(')')
{
let array_content = &line[start + 1..end];
parse_backup_array_content(array_content, &mut backup_files);
in_backup_array = false;
} else if line.contains('(') {
// Multi-line array starting
if let Some(start) = line.find('(') {
let array_content = &line[start + 1..];
parse_backup_array_content(array_content, &mut backup_files);
}
}
} else if in_backup_array {
// Continuation of multi-line array
// Check if array ends
if line.contains(')') {
if let Some(end) = line.rfind(')') {
let remaining = &line[..end];
parse_backup_array_content(remaining, &mut backup_files);
}
in_backup_array = false;
} else {
// Still in array, parse this line
parse_backup_array_content(line, &mut backup_files);
}
}
}
backup_files
}
/// What: Parse backup array content (handles quoted strings).
///
/// Inputs:
/// - `content`: String content containing quoted file paths.
/// - `backup_files`: Vector to append parsed file paths to.
///
/// Details:
/// - Extracts quoted strings (single or double quotes) from array content.
pub fn parse_backup_array_content(content: &str, backup_files: &mut Vec<String>) {
let mut in_quotes = false;
let mut quote_char = '\0';
let mut current_file = String::new();
for ch in content.chars() {
match ch {
'\'' | '"' => {
if !in_quotes {
in_quotes = true;
quote_char = ch;
} else if ch == quote_char {
// End of quoted string
if !current_file.is_empty() {
backup_files.push(current_file.clone());
current_file.clear();
}
in_quotes = false;
quote_char = '\0';
} else {
// Different quote type, treat as part of string
current_file.push(ch);
}
}
_ if in_quotes => {
current_file.push(ch);
}
_ => {
// Skip whitespace and other characters outside quotes
}
}
}
// Handle unclosed quote (edge case)
if !current_file.is_empty() && in_quotes {
backup_files.push(current_file);
}
}
/// What: Parse backup array from .SRCINFO content.
///
/// Inputs:
/// - `srcinfo`: Raw .SRCINFO file content.
///
/// Output:
/// - Returns a vector of backup file paths.
///
/// Details:
/// - Parses key-value pairs: `backup = file1`
/// - Handles multiple backup entries.
#[must_use]
pub fn parse_backup_from_srcinfo(srcinfo: &str) -> Vec<String> {
let mut backup_files = Vec::new();
for line in srcinfo.lines() {
let line = line.trim();
if line.is_empty() || line.starts_with('#') {
continue;
}
// .SRCINFO format: backup = file_path
if let Some((key, value)) = line.split_once('=') {
let key = key.trim();
let value = value.trim();
if key == "backup" && !value.is_empty() {
backup_files.push(value.to_string());
}
}
}
backup_files
}
/// What: Parse install paths from PKGBUILD content.
///
/// Inputs:
/// - `pkgbuild`: Raw PKGBUILD file content.
/// - `pkgname`: Package name (used for default install paths).
///
/// Output:
/// - Returns a vector of file paths that would be installed.
///
/// Details:
/// - Parses `package()` functions and `install` scripts to extract file paths.
/// - Handles common patterns like `install -Dm755`, `cp`, `mkdir -p`, etc.
/// - Extracts paths from `package()` functions that use `install` commands.
/// - This is a best-effort heuristic and may not capture all files.
#[must_use]
pub fn parse_install_paths_from_pkgbuild(pkgbuild: &str, pkgname: &str) -> Vec<String> {
let mut files = Vec::new();
let mut in_package_function = false;
let mut package_function_depth = 0;
for line in pkgbuild.lines() {
let trimmed = line.trim();
// Skip comments and empty lines
if trimmed.is_empty() || trimmed.starts_with('#') {
continue;
}
// Detect package() function start
if trimmed.starts_with("package()") || trimmed.starts_with("package_") {
in_package_function = true;
package_function_depth = 0;
continue;
}
// Track function depth (handle nested functions)
if in_package_function {
if trimmed.contains('{') {
package_function_depth += trimmed.matches('{').count();
}
if trimmed.contains('}') {
let closing_count = trimmed.matches('}').count();
if package_function_depth >= closing_count {
package_function_depth -= closing_count;
} else {
package_function_depth = 0;
}
if package_function_depth == 0 {
in_package_function = false;
continue;
}
}
// Parse install commands within package() function
// Common patterns:
// install -Dm755 "$srcdir/binary" "$pkgdir/usr/bin/binary"
// install -Dm644 "$srcdir/config" "$pkgdir/etc/config"
// cp -r "$srcdir/data" "$pkgdir/usr/share/app"
if trimmed.contains("install") && trimmed.contains("$pkgdir") {
// Extract destination path from install command
// Pattern: install ... "$pkgdir/path/to/file"
if let Some(pkgdir_pos) = trimmed.find("$pkgdir") {
let after_pkgdir = &trimmed[pkgdir_pos + 7..]; // Skip "$pkgdir"
// Find the path (may be quoted)
let path_start = after_pkgdir
.chars()
.position(|c| c != ' ' && c != '/' && c != '"' && c != '\'')
.unwrap_or(0);
let path_part = &after_pkgdir[path_start..];
// Extract path until space, quote, or end
let path_end = path_part
.chars()
.position(|c| c == ' ' || c == '"' || c == '\'' || c == ';')
.unwrap_or(path_part.len());
let mut path = path_part[..path_end].to_string();
// Remove leading slash if present (we'll add it)
if path.starts_with('/') {
path.remove(0);
}
if !path.is_empty() {
{
let path_str = &path;
files.push(format!("/{path_str}"));
}
}
}
} else if trimmed.contains("cp") && trimmed.contains("$pkgdir") {
// Extract destination from cp command
// Pattern: cp ... "$pkgdir/path/to/file"
if let Some(pkgdir_pos) = trimmed.find("$pkgdir") {
let after_pkgdir = &trimmed[pkgdir_pos + 7..];
let path_start = after_pkgdir
.chars()
.position(|c| c != ' ' && c != '/' && c != '"' && c != '\'')
.unwrap_or(0);
let path_part = &after_pkgdir[path_start..];
let path_end = path_part
.chars()
.position(|c| c == ' ' || c == '"' || c == '\'' || c == ';')
.unwrap_or(path_part.len());
let mut path = path_part[..path_end].to_string();
if path.starts_with('/') {
path.remove(0);
}
if !path.is_empty() {
{
let path_str = &path;
files.push(format!("/{path_str}"));
}
}
}
}
}
}
// Remove duplicates and sort
files.sort();
files.dedup();
// If we didn't find any files, try to infer common paths based on package name
if files.is_empty() {
// Common default paths for AUR packages
files.push(format!("/usr/bin/{pkgname}"));
files.push(format!("/usr/share/{pkgname}"));
}
files
}
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/src/logic/deps/srcinfo.rs | src/logic/deps/srcinfo.rs | //! Parser for AUR .SRCINFO files.
// Re-export for backward compatibility
pub use crate::util::srcinfo::fetch_srcinfo;
/// What: Parse dependencies from .SRCINFO content.
///
/// Inputs:
/// - `srcinfo`: Raw .SRCINFO file content.
///
/// Output:
/// - Returns a tuple of (depends, makedepends, checkdepends, optdepends) vectors.
///
/// Details:
/// - Parses key-value pairs from .SRCINFO format.
/// - Handles array fields that can appear multiple times.
/// - Filters out virtual packages (.so files).
#[allow(clippy::case_sensitive_file_extension_comparisons)]
pub(super) fn parse_srcinfo_deps(
srcinfo: &str,
) -> (Vec<String>, Vec<String>, Vec<String>, Vec<String>) {
let mut depends = Vec::new();
let mut makedepends = Vec::new();
let mut checkdepends = Vec::new();
let mut optdepends = Vec::new();
for line in srcinfo.lines() {
let line = line.trim();
if line.is_empty() || line.starts_with('#') {
continue;
}
// .SRCINFO format: key = value
if let Some((key, value)) = line.split_once('=') {
let key = key.trim();
let value = value.trim();
// Filter out virtual packages (.so files)
let value_lower = value.to_lowercase();
if value_lower.ends_with(".so")
|| value_lower.contains(".so.")
|| value_lower.contains(".so=")
{
continue;
}
match key {
"depends" => depends.push(value.to_string()),
"makedepends" => makedepends.push(value.to_string()),
"checkdepends" => checkdepends.push(value.to_string()),
"optdepends" => optdepends.push(value.to_string()),
_ => {}
}
}
}
(depends, makedepends, checkdepends, optdepends)
}
/// What: Parse conflicts from .SRCINFO content.
///
/// Inputs:
/// - `srcinfo`: Raw .SRCINFO file content.
///
/// Output:
/// - Returns a vector of conflicting package names.
///
/// Details:
/// - Parses "conflicts" key-value pairs from .SRCINFO format.
/// - Handles array fields that can appear multiple times.
/// - Filters out virtual packages (.so files) and extracts package names from version constraints.
#[allow(clippy::case_sensitive_file_extension_comparisons)]
pub(super) fn parse_srcinfo_conflicts(srcinfo: &str) -> Vec<String> {
use super::parse::parse_dep_spec;
let mut conflicts = Vec::new();
for line in srcinfo.lines() {
let line = line.trim();
if line.is_empty() || line.starts_with('#') {
continue;
}
// .SRCINFO format: key = value
if let Some((key, value)) = line.split_once('=') {
let key = key.trim();
let value = value.trim();
if key == "conflicts" {
// Filter out virtual packages (.so files)
let value_lower = value.to_lowercase();
if value_lower.ends_with(".so")
|| value_lower.contains(".so.")
|| value_lower.contains(".so=")
{
continue;
}
// Extract package name (remove version constraints if present)
let (pkg_name, _) = parse_dep_spec(value);
if !pkg_name.is_empty() {
conflicts.push(pkg_name);
}
}
}
}
conflicts
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_parse_srcinfo_deps() {
let srcinfo = r"
pkgbase = test-package
pkgname = test-package
pkgver = 1.0.0
pkgrel = 1
depends = foo
depends = bar>=1.2.3
makedepends = make
makedepends = gcc
checkdepends = check
optdepends = optional: optional-package
depends = libfoo.so=1-64
";
let (depends, makedepends, checkdepends, optdepends) = parse_srcinfo_deps(srcinfo);
// Should have 2 depends (foo and bar>=1.2.3), libfoo.so should be filtered
assert_eq!(depends.len(), 2);
assert!(depends.contains(&"foo".to_string()));
assert!(depends.contains(&"bar>=1.2.3".to_string()));
// Should have 2 makedepends
assert_eq!(makedepends.len(), 2);
assert!(makedepends.contains(&"make".to_string()));
assert!(makedepends.contains(&"gcc".to_string()));
// Should have 1 checkdepends
assert_eq!(checkdepends.len(), 1);
assert!(checkdepends.contains(&"check".to_string()));
// Should have 1 optdepends (with "optional:" prefix)
assert_eq!(optdepends.len(), 1);
assert!(optdepends.contains(&"optional: optional-package".to_string()));
}
#[test]
/// What: Confirm conflicts parsing extracts package names from .SRCINFO.
///
/// Inputs:
/// - Sample .SRCINFO content with conflicts field.
///
/// Output:
/// - Returns vector of conflicting package names.
///
/// Details:
/// - Validates parsing logic handles multiple conflict entries.
fn test_parse_srcinfo_conflicts() {
let srcinfo = r"
pkgbase = test-package
pkgname = test-package
pkgver = 1.0.0
pkgrel = 1
conflicts = conflicting-pkg1
conflicts = conflicting-pkg2>=2.0
conflicts = libfoo.so=1-64
";
let conflicts = parse_srcinfo_conflicts(srcinfo);
// Should have 2 conflicts (conflicting-pkg1 and conflicting-pkg2), libfoo.so should be filtered
assert_eq!(conflicts.len(), 2);
assert!(conflicts.contains(&"conflicting-pkg1".to_string()));
assert!(conflicts.contains(&"conflicting-pkg2".to_string()));
}
#[test]
/// What: Ensure conflicts parsing handles empty .SRCINFO correctly.
///
/// Inputs:
/// - .SRCINFO content without conflicts field.
///
/// Output:
/// - Returns empty vector.
///
/// Details:
/// - Confirms graceful handling of missing conflicts.
fn test_parse_srcinfo_conflicts_empty() {
let srcinfo = r"
pkgbase = test-package
pkgname = test-package
pkgver = 1.0.0
";
let conflicts = parse_srcinfo_conflicts(srcinfo);
assert!(conflicts.is_empty());
}
}
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/src/logic/deps/reverse.rs | src/logic/deps/reverse.rs | //! Reverse dependency analysis for removal preflight checks.
use crate::state::modal::{DependencyInfo, DependencySource, DependencyStatus, ReverseRootSummary};
use crate::state::types::PackageItem;
use std::collections::{BTreeMap, HashMap, HashSet, VecDeque, hash_map::Entry};
use std::process::{Command, Stdio};
/// What: Aggregate data produced by the reverse dependency walk for removal checks.
///
/// Inputs:
/// - Populated internally by `resolve_reverse_dependencies`; external callers supply removal targets only.
///
/// Output:
/// - Provides flattened dependency records and per-root summaries for UI consumption.
///
/// Details:
/// - Serves as the transfer structure between the resolution logic and the preflight modal renderer.
#[derive(Debug, Clone, Default)]
pub struct ReverseDependencyReport {
/// Flattened dependency info reused by the Preflight modal UI.
pub dependencies: Vec<DependencyInfo>,
/// Per-root summary statistics for the Summary tab.
pub summaries: Vec<ReverseRootSummary>,
}
/// What: Internal working state used while traversing reverse dependencies.
///
/// Inputs:
/// - Constructed from user-selected removal targets and lazily populated with pacman metadata.
///
/// Output:
/// - Retains cached package information, aggregation maps, and bookkeeping sets during traversal.
///
/// Details:
/// - Encapsulates shared collections so helper methods can mutate state without leaking implementation details.
struct ReverseResolverState {
/// Aggregated reverse dependency entries by package name.
aggregated: HashMap<String, AggregatedEntry>,
/// Cache of package information by package name.
cache: HashMap<String, PkgInfo>,
/// Set of missing package names.
missing: HashSet<String>,
/// Set of target package names for reverse dependency resolution.
target_names: HashSet<String>,
}
impl ReverseResolverState {
/// What: Initialize traversal state for the provided removal targets.
///
/// Inputs:
/// - `targets`: Packages selected for removal.
///
/// Output:
/// - Returns a state object preloaded with target name bookkeeping.
///
/// Details:
/// - Prepares aggregation maps and caches so subsequent queries can avoid redundant pacman calls.
fn new(targets: &[PackageItem]) -> Self {
let target_names = targets.iter().map(|pkg| pkg.name.clone()).collect();
Self {
aggregated: HashMap::new(),
cache: HashMap::new(),
missing: HashSet::new(),
target_names,
}
}
/// What: Fetch and cache package information for a given name.
///
/// Inputs:
/// - `name`: Package whose metadata should be retrieved via `pacman -Qi`.
///
/// Output:
/// - Returns package info when available; otherwise caches the miss and yields `None`.
///
/// Details:
/// - Avoids repeated command executions by memoizing both hits and misses across the traversal.
fn pkg_info(&mut self, name: &str) -> Option<PkgInfo> {
if let Some(info) = self.cache.get(name) {
return Some(info.clone());
}
if self.missing.contains(name) {
return None;
}
match fetch_pkg_info(name) {
Ok(info) => {
self.cache.insert(name.to_string(), info.clone());
Some(info)
}
Err(err) => {
tracing::warn!("Failed to query pacman -Qi {}: {}", name, err);
self.missing.insert(name.to_string());
None
}
}
}
/// What: Update aggregation records to reflect a discovered reverse dependency relationship.
///
/// Inputs:
/// - `dependent`: Package that depends on the current node.
/// - `parent`: Immediate package causing the dependency (may be empty).
/// - `root`: Root removal target currently being explored.
/// - `depth`: Distance from the root in the traversal.
///
/// Output:
/// - Mutates internal maps to capture per-root relationships and selection flags.
///
/// Details:
/// - Consolidates metadata per dependent package while preserving shortest depth and parent sets per root.
fn update_entry(&mut self, dependent: &str, parent: &str, root: &str, depth: usize) {
if dependent.eq_ignore_ascii_case(root) {
return;
}
let Some(info) = self.pkg_info(dependent) else {
return;
};
let selected = self.target_names.contains(dependent);
match self.aggregated.entry(dependent.to_owned()) {
Entry::Occupied(mut entry) => {
let data = entry.get_mut();
data.info = info;
if selected {
data.selected_for_removal = true;
}
let relation = data
.per_root
.entry(root.to_string())
.or_insert_with(RootRelation::new);
relation.record(parent, depth);
}
Entry::Vacant(slot) => {
let mut data = AggregatedEntry {
info,
per_root: HashMap::new(),
selected_for_removal: selected,
};
data.per_root
.entry(root.to_string())
.or_insert_with(RootRelation::new)
.record(parent, depth);
slot.insert(data);
}
}
}
}
/// What: Snapshot of metadata retrieved from pacman's local database for traversal decisions.
///
/// Inputs:
/// - Filled by `fetch_pkg_info`, capturing fields relevant to reverse dependency aggregation.
///
/// Output:
/// - Provides reusable package details to avoid multiple CLI invocations.
///
/// Details:
/// - Stores only the subset of fields necessary for summarising conflicts and dependencies.
#[derive(Clone, Debug)]
struct PkgInfo {
/// Package name.
name: String,
/// Package version.
version: String,
/// Repository name (None for AUR packages).
repo: Option<String>,
/// Package groups.
groups: Vec<String>,
/// Packages that require this package.
required_by: Vec<String>,
/// Whether package was explicitly installed.
explicit: bool,
}
/// What: Aggregated view of a dependent package across all removal roots.
///
/// Inputs:
/// - Populated incrementally as `update_entry` discovers new relationships.
///
/// Output:
/// - Captures per-root metadata along with selection status for downstream conversion.
///
/// Details:
/// - Maintains deduplicated parent sets for each root to explain conflict chains clearly.
#[derive(Clone, Debug)]
struct AggregatedEntry {
/// Package information.
info: PkgInfo,
/// Relationship information per removal root.
per_root: HashMap<String, RootRelation>,
/// Whether this package is selected for removal.
selected_for_removal: bool,
}
/// What: Relationship summary between a dependent package and a particular removal root.
///
/// Inputs:
/// - Updated as traversal discovers parents contributing to the dependency.
///
/// Output:
/// - Tracks unique parent names and the minimum depth from the root.
///
/// Details:
/// - Used to distinguish direct versus transitive dependents in the final summary.
#[derive(Clone, Debug)]
struct RootRelation {
/// Set of parent package names that contribute to this dependency.
parents: HashSet<String>,
/// Minimum depth from the removal root to this package.
min_depth: usize,
}
impl RootRelation {
/// What: Construct an empty relation ready to collect parent metadata.
///
/// Inputs:
/// - (none): Starts with default depth and empty parent set.
///
/// Output:
/// - Returns a relation with `usize::MAX` depth and no parents recorded.
///
/// Details:
/// - The sentinel depth ensures first updates always win when computing minimum distance.
fn new() -> Self {
Self {
parents: HashSet::new(),
min_depth: usize::MAX,
}
}
/// What: Record a traversal parent contributing to the dependency chain.
///
/// Inputs:
/// - `parent`: Name of the package one level closer to the root.
/// - `depth`: Current depth from the root target.
///
/// Output:
/// - Updates internal parent set and minimum depth as appropriate.
///
/// Details:
/// - Ignores empty parent identifiers and keeps the shallowest depth observed for summarisation.
fn record(&mut self, parent: &str, depth: usize) {
if !parent.is_empty() {
self.parents.insert(parent.to_string());
}
if depth < self.min_depth {
self.min_depth = depth;
}
}
/// What: Report the closest distance from this dependent to the root target.
///
/// Inputs:
/// - (none): Uses previously recorded depth values.
///
/// Output:
/// - Returns the smallest depth stored during traversal.
///
/// Details:
/// - Allows callers to classify dependencies as direct when the minimum depth is one.
const fn min_depth(&self) -> usize {
self.min_depth
}
}
/// What: Resolve reverse dependency impact for the packages selected for removal.
///
/// Inputs:
/// - `targets`: Packages the user intends to uninstall.
///
/// Output:
/// - Returns a `ReverseDependencyReport` describing affected packages and summary statistics.
///
/// Details:
/// - Performs a breadth-first search using `pacman -Qi` metadata, aggregating per-root relationships.
pub fn resolve_reverse_dependencies(targets: &[PackageItem]) -> ReverseDependencyReport {
tracing::info!(
"Starting reverse dependency resolution for {} target(s)",
targets.len()
);
if targets.is_empty() {
return ReverseDependencyReport::default();
}
let mut state = ReverseResolverState::new(targets);
for target in targets {
let root = target.name.trim();
if root.is_empty() {
continue;
}
if state.pkg_info(root).is_none() {
tracing::warn!(
"Skipping reverse dependency walk for {} (not installed)",
root
);
continue;
}
let mut visited: HashSet<String> = HashSet::new();
visited.insert(root.to_string());
let mut queue: VecDeque<(String, usize)> = VecDeque::new();
queue.push_back((root.to_string(), 0));
while let Some((current, depth)) = queue.pop_front() {
let Some(info) = state.pkg_info(¤t) else {
continue;
};
for dependent in info.required_by.iter().filter(|name| !name.is_empty()) {
state.update_entry(dependent, ¤t, root, depth + 1);
if visited.insert(dependent.clone()) {
queue.push_back((dependent.clone(), depth + 1));
}
}
}
}
let ReverseResolverState { aggregated, .. } = state;
let mut summary_map: HashMap<String, ReverseRootSummary> = HashMap::new();
for entry in aggregated.values() {
for (root, relation) in &entry.per_root {
let summary = summary_map
.entry(root.clone())
.or_insert_with(|| ReverseRootSummary {
package: root.clone(),
..Default::default()
});
if relation.parents.contains(root) || relation.min_depth() == 1 {
summary.direct_dependents += 1;
} else {
summary.transitive_dependents += 1;
}
summary.total_dependents = summary.direct_dependents + summary.transitive_dependents;
}
}
for target in targets {
summary_map
.entry(target.name.clone())
.or_insert_with(|| ReverseRootSummary {
package: target.name.clone(),
..Default::default()
});
}
let mut summaries: Vec<ReverseRootSummary> = summary_map.into_values().collect();
summaries.sort_by(|a, b| a.package.cmp(&b.package));
let mut dependencies: Vec<DependencyInfo> = aggregated
.into_iter()
.map(|(name, entry)| convert_entry(name, entry))
.collect();
dependencies.sort_by(|a, b| a.name.cmp(&b.name));
tracing::info!(
"Reverse dependency resolution complete ({} impacted packages)",
dependencies.len()
);
ReverseDependencyReport {
dependencies,
summaries,
}
}
/// What: Convert an aggregated reverse dependency entry into UI-facing metadata.
///
/// Inputs:
/// - `name`: Canonical dependent package name.
/// - `entry`: Aggregated structure containing metadata and per-root relations.
///
/// Output:
/// - Returns a `DependencyInfo` tailored for preflight summaries with conflict reasoning.
///
/// Details:
/// - Merges parent sets, sorts presentation fields, and infers system/core flags for display.
fn convert_entry(name: String, entry: AggregatedEntry) -> DependencyInfo {
let AggregatedEntry {
info,
per_root,
selected_for_removal,
} = entry;
let PkgInfo {
name: pkg_name,
version,
repo,
groups,
required_by: _,
explicit,
} = info;
let mut required_by: Vec<String> = per_root.keys().cloned().collect();
required_by.sort();
let mut all_parents: HashSet<String> = HashSet::new();
for relation in per_root.values() {
all_parents.extend(relation.parents.iter().cloned());
}
let mut depends_on: Vec<String> = all_parents.into_iter().collect();
depends_on.sort();
let mut reason_parts: Vec<String> = Vec::new();
for (root, relation) in &per_root {
let depth = relation.min_depth();
let mut parents: Vec<String> = relation.parents.iter().cloned().collect();
parents.sort();
if depth <= 1 {
reason_parts.push(format!("requires {root}"));
} else {
let via = if parents.is_empty() {
"unknown".to_string()
} else {
parents.join(", ")
};
reason_parts.push(format!("blocks {root} (depth {depth} via {via})"));
}
}
if selected_for_removal {
reason_parts.push("already selected for removal".to_string());
}
if explicit {
reason_parts.push("explicitly installed".to_string());
}
reason_parts.sort();
let reason = if reason_parts.is_empty() {
"required by removal targets".to_string()
} else {
reason_parts.join("; ")
};
let source = match repo.as_deref() {
Some(repo) if repo.eq_ignore_ascii_case("local") || repo.is_empty() => {
DependencySource::Local
}
Some(repo) => DependencySource::Official {
repo: repo.to_string(),
},
None => DependencySource::Local,
};
let is_core = repo
.as_deref()
.is_some_and(|r| r.eq_ignore_ascii_case("core"));
let is_system = groups
.iter()
.any(|g| matches!(g.as_str(), "base" | "base-devel"));
let display_name = if pkg_name.is_empty() { name } else { pkg_name };
DependencyInfo {
name: display_name,
version,
status: DependencyStatus::Conflict { reason },
source,
required_by,
depends_on,
is_core,
is_system,
}
}
/// What: Check if a package has any installed packages in its "Required By" field.
///
/// Inputs:
/// - `name`: Package name to check.
///
/// Output:
/// - Returns `true` if the package has at least one installed package in its "Required By" field, `false` otherwise.
///
/// Details:
/// - Runs `pacman -Qi` to query package information and parses the "Required By" field.
/// - Checks each package in "Required By" against the installed package cache.
/// - Returns `false` if the package is not installed or if querying fails.
#[must_use]
pub fn has_installed_required_by(name: &str) -> bool {
match fetch_pkg_info(name) {
Ok(info) => info
.required_by
.iter()
.any(|pkg| crate::index::is_installed(pkg)),
Err(err) => {
tracing::debug!("Failed to query pacman -Qi {}: {}", name, err);
false
}
}
}
/// What: Get the list of installed packages that depend on a package.
///
/// Inputs:
/// - `name`: Package name to check.
///
/// Output:
/// - Returns a vector of package names that are installed and depend on the package, or an empty vector on failure.
///
/// Details:
/// - Runs `pacman -Qi` to query package information and parses the "Required By" field.
/// - Filters the "Required By" list to only include installed packages.
/// - Returns an empty vector if the package is not installed or if querying fails.
#[must_use]
pub fn get_installed_required_by(name: &str) -> Vec<String> {
match fetch_pkg_info(name) {
Ok(info) => info
.required_by
.iter()
.filter(|pkg| crate::index::is_installed(pkg))
.cloned()
.collect(),
Err(err) => {
tracing::debug!("Failed to query pacman -Qi {}: {}", name, err);
Vec::new()
}
}
}
/// What: Query pacman for detailed information about an installed package.
///
/// Inputs:
/// - `name`: Package name passed to `pacman -Qi`.
///
/// Output:
/// - Returns a `PkgInfo` snapshot or an error string if the query fails.
///
/// Details:
/// - Parses key-value fields such as repository, groups, and required-by lists for downstream processing.
fn fetch_pkg_info(name: &str) -> Result<PkgInfo, String> {
tracing::debug!("Running: pacman -Qi {}", name);
let output = Command::new("pacman")
.args(["-Qi", name])
.env("LC_ALL", "C")
.env("LANG", "C")
.stdin(Stdio::null())
.stdout(Stdio::piped())
.stderr(Stdio::piped())
.output()
.map_err(|e| format!("pacman -Qi {name} failed: {e}"))?;
if !output.status.success() {
let stderr = String::from_utf8_lossy(&output.stderr);
return Err(format!(
"pacman -Qi {} exited with {:?}: {}",
name, output.status, stderr
));
}
let text = String::from_utf8_lossy(&output.stdout);
let map = parse_key_value_output(&text);
let required_by = split_ws_or_none(map.get("Required By"));
let groups = split_ws_or_none(map.get("Groups"));
let version = map.get("Version").cloned().unwrap_or_default();
let repo = map.get("Repository").cloned();
let install_reason = map
.get("Install Reason")
.cloned()
.unwrap_or_default()
.to_lowercase();
let explicit = install_reason.contains("explicit");
Ok(PkgInfo {
name: map.get("Name").cloned().unwrap_or_else(|| name.to_string()),
version,
repo,
groups,
required_by,
explicit,
})
}
/// What: Parse pacman key-value output into a searchable map.
///
/// Inputs:
/// - `text`: Multi-line output containing colon-separated fields with optional wrapped lines.
///
/// Output:
/// - Returns a `BTreeMap` mapping field names to their consolidated string values.
///
/// Details:
/// - Handles indented continuation lines by appending them to the most recently parsed key.
fn parse_key_value_output(text: &str) -> BTreeMap<String, String> {
let mut map: BTreeMap<String, String> = BTreeMap::new();
let mut last_key: Option<String> = None;
for line in text.lines() {
if line.trim().is_empty() {
continue;
}
if let Some((k, v)) = line.split_once(':') {
let key = k.trim().to_string();
let val = v.trim().to_string();
last_key = Some(key.clone());
map.insert(key, val);
} else if (line.starts_with(' ') || line.starts_with('\t'))
&& let Some(key) = &last_key
{
let entry = map.entry(key.clone()).or_default();
if !entry.ends_with(' ') {
entry.push(' ');
}
entry.push_str(line.trim());
}
}
map
}
/// What: Break a whitespace-separated field into individual tokens, ignoring sentinel values.
///
/// Inputs:
/// - `field`: Optional string obtained from pacman metadata.
///
/// Output:
/// - Returns a vector of tokens or an empty vector when the field is missing or marked as "None".
///
/// Details:
/// - Trims surrounding whitespace before evaluating the contents to avoid spurious blank entries.
fn split_ws_or_none(field: Option<&String>) -> Vec<String> {
field.map_or_else(Vec::new, |value| {
let trimmed = value.trim();
if trimmed.is_empty() || trimmed.eq_ignore_ascii_case("none") {
Vec::new()
} else {
trimmed
.split_whitespace()
.map(ToString::to_string)
.collect()
}
})
}
#[cfg(test)]
mod tests {
use super::*;
use crate::state::types::{PackageItem, Source};
use std::collections::HashMap;
fn pkg_item(name: &str) -> PackageItem {
PackageItem {
name: name.into(),
version: "1.0".into(),
description: "test".into(),
source: Source::Official {
repo: "extra".into(),
arch: "x86_64".into(),
},
popularity: None,
out_of_date: None,
orphaned: false,
}
}
fn pkg_info_stub(name: &str) -> PkgInfo {
PkgInfo {
name: name.into(),
version: "2.0".into(),
repo: Some("extra".into()),
groups: Vec::new(),
required_by: Vec::new(),
explicit: false,
}
}
#[test]
/// What: Verify `update_entry` marks target packages and records per-root relations correctly.
///
/// Inputs:
/// - `targets`: Root and dependent package items forming the resolver seed.
/// - `state`: Fresh `ReverseResolverState` with cached info for the dependent package.
///
/// Output:
/// - Aggregated entry reflects selection, contains relation for the root, and tracks parents.
///
/// Details:
/// - Ensures depth calculation and parent recording occur when updating the entry for a target
/// package linked to a specified root.
fn update_entry_tracks_root_relations_and_selection() {
let targets = vec![pkg_item("root"), pkg_item("app")];
let mut state = ReverseResolverState::new(&targets);
state.cache.insert("app".into(), pkg_info_stub("app"));
state.update_entry("app", "root", "root", 1);
let entry = state
.aggregated
.get("app")
.expect("aggregated entry populated");
assert!(entry.selected_for_removal, "target membership flagged");
assert_eq!(entry.info.name, "app");
let relation = entry
.per_root
.get("root")
.expect("relation stored for root");
assert_eq!(relation.min_depth(), 1);
assert!(relation.parents.contains("root"));
}
#[test]
/// What: Confirm `convert_entry` surfaces conflict reasons, metadata, and flags accurately.
///
/// Inputs:
/// - `entry`: Aggregated dependency entry with multiple root relations and metadata toggles.
///
/// Output:
/// - Resulting `DependencyInfo` carries conflict status, sorted relations, and flag booleans.
///
/// Details:
/// - Validates that reasons mention blocking roots, selection state, explicit install, and core/system
/// classification while preserving alias names and parent ordering.
fn convert_entry_produces_conflict_reason_and_flags() {
let mut relation_a = RootRelation::new();
relation_a.record("root", 1);
let mut relation_b = RootRelation::new();
relation_b.record("parent_x", 2);
relation_b.record("parent_y", 2);
let entry = AggregatedEntry {
info: PkgInfo {
name: "dep_alias".into(),
version: "3.1".into(),
repo: Some("core".into()),
groups: vec!["base".into()],
required_by: Vec::new(),
explicit: true,
},
per_root: HashMap::from([("root".into(), relation_a), ("other".into(), relation_b)]),
selected_for_removal: true,
};
let info = convert_entry("dep".into(), entry);
let DependencyStatus::Conflict { reason } = &info.status else {
panic!("expected conflict status");
};
assert!(reason.contains("requires root"));
assert!(reason.contains("blocks other"));
assert!(reason.contains("already selected for removal"));
assert!(reason.contains("explicitly installed"));
assert_eq!(info.required_by, vec!["other", "root"]);
assert_eq!(info.depends_on, vec!["parent_x", "parent_y", "root"]);
assert!(info.is_core);
assert!(info.is_system);
assert_eq!(info.name, "dep_alias");
}
#[test]
/// What: Ensure pacman-style key/value parsing merges wrapped descriptions.
///
/// Inputs:
/// - `sample`: Multi-line text where description continues on the next indented line.
///
/// Output:
/// - Parsed map flattens wrapped lines and retains other keys verbatim.
///
/// Details:
/// - Simulates `pacman -Qi` output to verify `parse_key_value_output` concatenates continuation
/// lines into a single value.
fn parse_key_value_output_merges_wrapped_lines() {
let sample = "Name : pkg\nDescription : Short desc\n continuation line\nRequired By : foo bar\nInstall Reason : Explicitly installed\n";
let map = parse_key_value_output(sample);
assert_eq!(map.get("Name"), Some(&"pkg".to_string()));
assert_eq!(
map.get("Description"),
Some(&"Short desc continuation line".to_string())
);
assert_eq!(map.get("Required By"), Some(&"foo bar".to_string()));
}
#[test]
/// What: Validate whitespace splitting helper ignores empty and "none" values.
///
/// Inputs:
/// - `field`: Optional strings containing "None", whitespace, words, or `None`.
///
/// Output:
/// - Returns empty vector for none-like inputs and splits valid whitespace-separated tokens.
///
/// Details:
/// - Covers uppercase "None", blank strings, regular word lists, and the absence of a value.
fn split_ws_or_none_handles_none_and_empty() {
assert!(split_ws_or_none(Some(&"None".to_string())).is_empty());
assert!(split_ws_or_none(Some(&" ".to_string())).is_empty());
let list = split_ws_or_none(Some(&"foo bar".to_string()));
assert_eq!(list, vec!["foo", "bar"]);
assert!(split_ws_or_none(None).is_empty());
}
#[cfg(not(target_os = "windows"))]
#[test]
/// What: Verify `has_installed_required_by` correctly identifies packages with installed dependents.
///
/// Inputs:
/// - Package name that may or may not be installed.
///
/// Output:
/// - Returns `false` for non-existent packages, `true` if package has installed packages in "Required By".
///
/// Details:
/// - Tests the function with a non-existent package (should return false).
/// - Note: Testing with real packages requires system state and is better suited for integration tests.
fn has_installed_required_by_returns_false_for_nonexistent_package() {
// Test with a package that definitely doesn't exist
let result = has_installed_required_by("this-package-definitely-does-not-exist-12345");
assert!(!result, "should return false for non-existent package");
}
}
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/src/logic/deps/source.rs | src/logic/deps/source.rs | //! Dependency source determination utilities.
use crate::state::modal::DependencySource;
use std::collections::HashSet;
use std::process::{Command, Stdio};
/// What: Infer the origin repository for a dependency currently under analysis.
///
/// Inputs:
/// - `name`: Candidate dependency package name.
/// - `installed`: Set of locally installed package names used to detect presence.
///
/// Output:
/// - Returns a tuple with the determined `DependencySource` and a flag indicating core membership.
///
/// Details:
/// - Prefers inspecting `pacman -Qi` metadata when the package is installed; otherwise defaults to heuristics.
/// - Downgrades gracefully to official classifications when the repository field cannot be read.
pub(super) fn determine_dependency_source(
name: &str,
installed: &HashSet<String>,
) -> (DependencySource, bool) {
if !installed.contains(name) {
// Not installed - check if it exists in official repos first
// Only default to AUR if it's not found in official repos
let output = Command::new("pacman")
.args(["-Si", name])
.env("LC_ALL", "C")
.env("LANG", "C")
.stdin(Stdio::null())
.stdout(Stdio::null())
.stderr(Stdio::null())
.output();
if let Ok(output) = output
&& output.status.success()
{
// Package exists in official repos - determine which repo
let text = String::from_utf8_lossy(&output.stdout);
for line in text.lines() {
if line.starts_with("Repository")
&& let Some(colon_pos) = line.find(':')
{
let repo = line[colon_pos + 1..].trim().to_lowercase();
let is_core = repo == "core";
return (DependencySource::Official { repo }, is_core);
}
}
// Found in official repos but couldn't determine repo - assume extra
return (
DependencySource::Official {
repo: "extra".to_string(),
},
false,
);
}
// Not found in official repos - this could be:
// 1. A binary/script provided by a package (not a package itself) - should be Missing
// 2. A virtual package (.so file) - should be filtered out earlier
// 3. A real AUR package - but we can't distinguish without checking AUR
//
// IMPORTANT: We don't try AUR here because:
// - Most dependencies are from official repos or are binaries/scripts
// - Trying AUR for every unknown dependency causes unnecessary API calls
// - Real AUR packages should be explicitly specified by the user, not discovered as dependencies
// - If it's truly an AUR dependency, it will be marked as Missing and the user can handle it
tracing::debug!(
"Package {} not found in official repos and not installed - will be marked as Missing (skipping AUR check)",
name
);
// Return AUR but the resolve logic should check if it exists before trying API
// Actually, let's return Official with a special marker - but that won't work with current code
// Better: return AUR but add a check in resolve_package_deps to verify it exists first
return (DependencySource::Aur, false);
}
// Package is installed - check which repository it came from
let output = Command::new("pacman")
.args(["-Qi", name])
.env("LC_ALL", "C")
.env("LANG", "C")
.stdin(Stdio::null())
.stdout(Stdio::piped())
.stderr(Stdio::piped())
.output();
match output {
Ok(output) if output.status.success() => {
let text = String::from_utf8_lossy(&output.stdout);
// Look for "Repository" field in pacman -Qi output
for line in text.lines() {
if line.starts_with("Repository")
&& let Some(colon_pos) = line.find(':')
{
let repo = line[colon_pos + 1..].trim().to_lowercase();
let is_core = repo == "core";
// Handle local packages specially
if repo == "local" || repo.is_empty() {
return (DependencySource::Local, false);
}
return (DependencySource::Official { repo }, is_core);
}
}
}
_ => {
// Fallback: try pacman -Q to see if it's installed
// If we can't determine repo, assume it's from an official repo
tracing::debug!(
"Could not determine repository for {}, assuming official",
name
);
}
}
// Default: assume official repository (most installed packages are)
let is_core = is_system_package(name);
(
DependencySource::Official {
repo: if is_core {
"core".to_string()
} else {
"extra".to_string()
},
},
is_core,
)
}
/// What: Identify whether a dependency belongs to a curated list of critical system packages.
///
/// Inputs:
/// - `name`: Package name to compare against the predefined system set.
///
/// Output:
/// - `true` when the package is considered a core system component; otherwise `false`.
///
/// Details:
/// - Used to highlight packages whose removal or downgrade should be discouraged.
pub(super) fn is_system_package(name: &str) -> bool {
// List of critical system packages
let system_packages = [
"glibc",
"linux",
"systemd",
"pacman",
"bash",
"coreutils",
"gcc",
"binutils",
"filesystem",
"util-linux",
"shadow",
"sed",
"grep",
];
system_packages.contains(&name)
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
/// What: Confirm `is_system_package` recognizes curated critical packages.
///
/// Inputs:
/// - `names`: Sample package names including system and non-system entries.
///
/// Output:
/// - Returns `true` for known core packages and `false` for unrelated software.
///
/// Details:
/// - Exercises both positive (glibc, linux) and negative (firefox) cases to validate membership.
fn is_system_package_detects_core() {
assert!(is_system_package("glibc"));
assert!(is_system_package("linux"));
assert!(!is_system_package("firefox"));
}
}
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/src/logic/deps/parse.rs | src/logic/deps/parse.rs | //! Parsing utilities for dependency specifications.
use std::collections::HashSet;
use std::sync::OnceLock;
/// What: Get all possible localized labels for "Depends On" field from pacman/yay/paru output.
///
/// Output:
/// - `HashSet` of all possible labels across all locales
///
/// Details:
/// - Loads labels from locale files at runtime
/// - Falls back to hardcoded list if locale files can't be loaded
/// - Cached on first access for performance
fn get_depends_labels() -> &'static HashSet<String> {
static LABELS: OnceLock<HashSet<String>> = OnceLock::new();
LABELS.get_or_init(|| {
let mut labels = HashSet::new();
// Try to load from all locale files
let locales_dir = crate::i18n::find_locales_dir().unwrap_or_else(|| {
std::path::PathBuf::from(env!("CARGO_MANIFEST_DIR"))
.join("config")
.join("locales")
});
if let Ok(entries) = std::fs::read_dir(&locales_dir) {
for entry in entries.flatten() {
if let Some(file_name) = entry.file_name().to_str()
&& file_name.to_lowercase().ends_with(".yml")
{
let locale = file_name.strip_suffix(".yml").unwrap_or(file_name);
if let Ok(translations) = crate::i18n::load_locale_file(locale, &locales_dir) {
// Extract labels from app.parsing.pacman_depends_labels (can be array or single string)
if let Some(labels_str) =
translations.get("app.parsing.pacman_depends_labels")
&& let Ok(yaml_value) =
serde_norway::from_str::<serde_norway::Value>(labels_str)
&& let Some(seq) = yaml_value.as_sequence()
{
// Parse YAML array
for item in seq {
if let Some(label) = item.as_str() {
labels.insert(label.to_string());
}
}
} else if let Some(label) =
translations.get("app.parsing.pacman_depends_label")
{
// Fallback to single label format
labels.insert(label.clone());
}
}
}
}
}
// Fallback: add common labels if loading failed or didn't find all
labels.insert("Depends On".to_string());
labels.insert("Ist abhängig von".to_string());
labels.insert("Dépend de".to_string());
labels.insert("Depende de".to_string());
labels.insert("Dipende da".to_string());
labels.insert("Zależy od".to_string());
labels.insert("Зависит от".to_string());
labels.insert("依存".to_string());
labels
})
}
/// What: Get all possible localized "None" equivalents.
///
/// Output:
/// - `HashSet` of all possible "None" labels across all locales
fn get_none_labels() -> &'static HashSet<String> {
static LABELS: OnceLock<HashSet<String>> = OnceLock::new();
LABELS.get_or_init(|| {
let mut labels = HashSet::new();
// Try to load from all locale files
let locales_dir = crate::i18n::find_locales_dir().unwrap_or_else(|| {
std::path::PathBuf::from(env!("CARGO_MANIFEST_DIR"))
.join("config")
.join("locales")
});
if let Ok(entries) = std::fs::read_dir(&locales_dir) {
for entry in entries.flatten() {
if let Some(file_name) = entry.file_name().to_str()
&& file_name.to_lowercase().ends_with(".yml")
{
let locale = file_name.strip_suffix(".yml").unwrap_or(file_name);
if let Ok(translations) = crate::i18n::load_locale_file(locale, &locales_dir) {
// Extract labels from app.parsing.pacman_none_labels (can be array or single string)
if let Some(labels_str) = translations.get("app.parsing.pacman_none_labels")
&& let Ok(yaml_value) =
serde_norway::from_str::<serde_norway::Value>(labels_str)
&& let Some(seq) = yaml_value.as_sequence()
{
// Parse YAML array
for item in seq {
if let Some(label) = item.as_str() {
labels.insert(label.to_string());
}
}
} else if let Some(label) =
translations.get("app.parsing.pacman_none_label")
{
// Fallback to single label format
labels.insert(label.clone());
}
}
}
}
}
// Fallback: add common labels
labels.insert("None".to_string());
labels.insert("Keine".to_string());
labels.insert("Aucune".to_string());
labels.insert("Ninguna".to_string());
labels.insert("Nessuna".to_string());
labels
})
}
/// What: Extract dependency specifications from the `pacman -Si` "Depends On" field.
///
/// Inputs:
/// - `text`: Raw stdout emitted by `pacman -Si` for a package.
///
/// Output:
/// - Returns package specification strings without virtual shared-library entries.
///
/// Details:
/// - Scans the "Depends On" line, split on whitespace, and removes `.so` patterns that represent virtual deps.
/// - Validates that tokens look like valid package names (alphanumeric, dashes, underscores, version operators).
/// - Filters out common words and description text that might be parsed incorrectly.
pub(super) fn parse_pacman_si_deps(text: &str) -> Vec<String> {
let depends_labels = get_depends_labels();
let none_labels = get_none_labels();
for line in text.lines() {
// Check if line starts with any known "Depends On" label
let is_depends_line = depends_labels.iter().any(|label| line.starts_with(label))
|| (line.contains("Depends") && line.contains("On"));
if is_depends_line && let Some(colon_pos) = line.find(':') {
let deps_str = line[colon_pos + 1..].trim();
// Check if deps_str matches any "None" equivalent
if deps_str.is_empty()
|| none_labels
.iter()
.any(|label| deps_str.eq_ignore_ascii_case(label))
{
return Vec::new();
}
// Split by whitespace, filter out empty strings and .so files (virtual packages)
// Also filter out tokens that don't look like package names
return deps_str
.split_whitespace()
.map(|s| s.trim().to_string())
.filter(|s| {
#[allow(clippy::case_sensitive_file_extension_comparisons)]
{
if s.is_empty() {
return false;
}
// Filter out .so files (virtual packages)
// Patterns: "libedit.so=0-64", "libgit2.so", "libfoo.so.1"
let s_lower = s.to_lowercase();
if s_lower.ends_with(".so")
|| s_lower.contains(".so.")
|| s_lower.contains(".so=")
{
return false;
}
}
// Filter out common words that might appear in descriptions
// These are not valid package names
let common_words = [
"for", "to", "with", "is", "that", "using", "usually", "bundled",
"bindings", "tooling", "the", "and", "or", "in", "on", "at", "by", "from",
"as", "if", "when", "where", "which", "what", "how", "why",
];
let lower = s.to_lowercase();
if common_words.contains(&lower.as_str()) {
return false;
}
// Filter out tokens that are too short (likely not package names)
// Package names are typically at least 2 characters
if s.len() < 2 {
return false;
}
// Filter out tokens that look like description text
// Valid package names contain alphanumeric, dashes, underscores, and version operators
// But shouldn't be just punctuation or start/end with certain characters
let first_char = s.chars().next().unwrap_or(' ');
if !first_char.is_alphanumeric() && first_char != '-' && first_char != '_' {
return false;
}
// Filter out tokens ending with colons (likely from error messages or malformed output)
if s.ends_with(':') {
return false;
}
// Check if it contains at least one alphanumeric character
if !s.chars().any(char::is_alphanumeric) {
return false;
}
true
})
.collect();
}
}
Vec::new()
}
/// What: Extract conflict specifications from the `pacman -Si` "Conflicts With" field.
///
/// Inputs:
/// - `text`: Raw stdout emitted by `pacman -Si` for a package.
///
/// Output:
/// - Returns package names that conflict with this package.
///
/// Details:
/// - Scans the "Conflicts With" line, splits on whitespace, and filters out invalid entries.
/// - Similar to `parse_pacman_si_deps` but for conflicts field.
#[allow(clippy::case_sensitive_file_extension_comparisons)]
pub(super) fn parse_pacman_si_conflicts(text: &str) -> Vec<String> {
let none_labels = get_none_labels();
for line in text.lines() {
// Check if line starts with "Conflicts With" (or localized variants)
let is_conflicts_line = line.starts_with("Conflicts With")
|| line.starts_with("Konflikt mit")
|| (line.contains("Conflicts") && line.contains("With"));
if is_conflicts_line && let Some(colon_pos) = line.find(':') {
let conflicts_str = line[colon_pos + 1..].trim();
// Check if conflicts_str matches any "None" equivalent
if conflicts_str.is_empty()
|| none_labels
.iter()
.any(|label| conflicts_str.eq_ignore_ascii_case(label))
{
return Vec::new();
}
// Split by whitespace and parse package names (may include version constraints)
return conflicts_str
.split_whitespace()
.map(|s| s.trim().to_string())
.filter(|s| {
if s.is_empty() {
return false;
}
// Filter out .so files (virtual packages)
let s_lower = s.to_lowercase();
if s_lower.ends_with(".so")
|| s_lower.contains(".so.")
|| s_lower.contains(".so=")
{
return false;
}
// Filter out common words
let common_words = [
"for", "to", "with", "is", "that", "using", "usually", "bundled",
"bindings", "tooling", "the", "and", "or", "in", "on", "at", "by", "from",
"as", "if", "when", "where", "which", "what", "how", "why",
];
let lower = s.to_lowercase();
if common_words.contains(&lower.as_str()) {
return false;
}
// Filter out tokens that are too short
if s.len() < 2 {
return false;
}
// Filter out tokens that don't look like package names
let first_char = s.chars().next().unwrap_or(' ');
if !first_char.is_alphanumeric() && first_char != '-' && first_char != '_' {
return false;
}
if s.ends_with(':') {
return false;
}
if !s.chars().any(char::is_alphanumeric) {
return false;
}
true
})
.map(|s| {
// Extract package name (remove version constraints if present)
parse_dep_spec(&s).0
})
.collect();
}
}
Vec::new()
}
/// What: Split a dependency specification into name and version requirement components.
///
/// Inputs:
/// - `spec`: Dependency string from pacman helpers (e.g., `python>=3.12`).
///
/// Output:
/// - Returns a tuple `(name, version_constraint)` with an empty constraint when none is present.
///
/// Details:
/// - Searches for comparison operators in precedence order to avoid mis-parsing combined expressions.
pub(super) fn parse_dep_spec(spec: &str) -> (String, String) {
for op in ["<=", ">=", "=", "<", ">"] {
if let Some(pos) = spec.find(op) {
let name = spec[..pos].trim().to_string();
let version = spec[pos..].trim().to_string();
return (name, version);
}
}
(spec.trim().to_string(), String::new())
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
/// What: Confirm dependency specs without operators return empty version constraints.
///
/// Inputs:
/// - Spec string `"glibc"` with no comparison operator.
///
/// Output:
/// - Tuple of name `"glibc"` and empty version string.
///
/// Details:
/// - Guards the default branch where no recognised operator exists.
fn parse_dep_spec_basic() {
let (name, version) = parse_dep_spec("glibc");
assert_eq!(name, "glibc");
assert_eq!(version, "");
}
#[test]
/// What: Ensure specs containing `>=` split into name and constraint correctly.
///
/// Inputs:
/// - Spec string `"python>=3.12"`.
///
/// Output:
/// - Returns name `"python"` and version `">=3.12"`.
///
/// Details:
/// - Exercises multi-character operator detection order.
fn parse_dep_spec_with_version() {
let (name, version) = parse_dep_spec("python>=3.12");
assert_eq!(name, "python");
assert_eq!(version, ">=3.12");
}
#[test]
/// What: Verify equality constraints are detected and returned verbatim.
///
/// Inputs:
/// - Spec string `"firefox=121.0"`.
///
/// Output:
/// - Produces name `"firefox"` and version `"=121.0"`.
///
/// Details:
/// - Confirms the operator precedence loop catches single-character `=` after multi-character checks.
fn parse_dep_spec_equals() {
let (name, version) = parse_dep_spec("firefox=121.0");
assert_eq!(name, "firefox");
assert_eq!(version, "=121.0");
}
#[test]
/// What: Confirm conflicts parsing extracts package names from pacman output.
///
/// Inputs:
/// - Sample pacman -Si output with "Conflicts With" field.
///
/// Output:
/// - Returns vector of conflicting package names.
///
/// Details:
/// - Validates parsing logic handles whitespace-separated conflict lists.
fn parse_pacman_si_conflicts_basic() {
let text =
"Name : test-package\nConflicts With : conflicting-pkg1 conflicting-pkg2\n";
let conflicts = parse_pacman_si_conflicts(text);
assert_eq!(conflicts.len(), 2);
assert!(conflicts.contains(&"conflicting-pkg1".to_string()));
assert!(conflicts.contains(&"conflicting-pkg2".to_string()));
}
#[test]
/// What: Ensure conflicts parsing handles version constraints correctly.
///
/// Inputs:
/// - Pacman output with conflicts containing version constraints.
///
/// Output:
/// - Returns package names without version constraints.
///
/// Details:
/// - Confirms version operators are stripped from conflict names.
fn parse_pacman_si_conflicts_with_versions() {
let text = "Name : test-package\nConflicts With : old-pkg<2.0 new-pkg>=3.0\n";
let conflicts = parse_pacman_si_conflicts(text);
assert_eq!(conflicts.len(), 2);
assert!(conflicts.contains(&"old-pkg".to_string()));
assert!(conflicts.contains(&"new-pkg".to_string()));
}
#[test]
/// What: Validate conflicts parsing handles "None" correctly.
///
/// Inputs:
/// - Pacman output with "Conflicts With: None".
///
/// Output:
/// - Returns empty vector.
///
/// Details:
/// - Ensures "None" label is recognized and filtered out.
fn parse_pacman_si_conflicts_none() {
let text = "Name : test-package\nConflicts With : None\n";
let conflicts = parse_pacman_si_conflicts(text);
assert!(conflicts.is_empty());
}
}
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/src/logic/deps/status.rs | src/logic/deps/status.rs | //! Dependency status determination and version checking.
use crate::logic::deps::is_package_installed_or_provided;
use crate::state::modal::DependencyStatus;
use std::collections::HashSet;
use std::process::{Command, Stdio};
/// What: Evaluate a dependency's installation status relative to required versions.
///
/// Inputs:
/// - `name`: Dependency package identifier.
/// - `version_req`: Optional version constraint string (e.g., `>=1.2`).
/// - `installed`: Set of names currently installed on the system.
/// - `upgradable`: Set of names pacman reports as upgradable.
///
/// Output:
/// - Returns a `DependencyStatus` describing whether installation, upgrade, or no action is needed.
///
/// Details:
/// - Combines local database queries with helper functions to capture upgrade requirements and conflicts.
pub(super) fn determine_status(
name: &str,
version_req: &str,
installed: &HashSet<String>,
provided: &HashSet<String>,
upgradable: &HashSet<String>,
) -> DependencyStatus {
// Check if package is installed or provided by an installed package
if !is_package_installed_or_provided(name, installed, provided) {
return DependencyStatus::ToInstall;
}
// Check if package is upgradable (even without version requirement)
let is_upgradable = upgradable.contains(name);
// If version requirement is specified, check if it matches
if !version_req.is_empty() {
// Try to get installed version
if let Ok(installed_version) = get_installed_version(name) {
// Simple version comparison (basic implementation)
if !version_satisfies(&installed_version, version_req) {
return DependencyStatus::ToUpgrade {
current: installed_version,
required: version_req.to_string(),
};
}
// Version requirement satisfied, but check if package is upgradable anyway
if is_upgradable {
// Get available version from pacman -Si if possible
let available_version =
get_available_version(name).unwrap_or_else(|| "newer".to_string());
return DependencyStatus::ToUpgrade {
current: installed_version,
required: available_version,
};
}
return DependencyStatus::Installed {
version: installed_version,
};
}
}
// Installed but no version check needed - check if upgradable
if is_upgradable {
match get_installed_version(name) {
Ok(current_version) => {
let available_version =
get_available_version(name).unwrap_or_else(|| "newer".to_string());
return DependencyStatus::ToUpgrade {
current: current_version,
required: available_version,
};
}
Err(_) => {
return DependencyStatus::ToUpgrade {
current: "installed".to_string(),
required: "newer".to_string(),
};
}
}
}
// Installed and up-to-date - get actual version
get_installed_version(name).map_or_else(
|_| DependencyStatus::Installed {
version: "installed".to_string(),
},
|version| DependencyStatus::Installed { version },
)
}
/// What: Query the repositories for the latest available version of a package.
///
/// Inputs:
/// - `name`: Package name looked up via `pacman -Si`.
///
/// Output:
/// - Returns the version string advertised in the repositories, or `None` on failure.
///
/// Details:
/// - Strips revision suffixes (e.g., `-1`) so comparisons focus on the base semantic version.
pub(super) fn get_available_version(name: &str) -> Option<String> {
let output = Command::new("pacman")
.args(["-Si", name])
.env("LC_ALL", "C")
.env("LANG", "C")
.stdin(Stdio::null())
.stdout(Stdio::piped())
.stderr(Stdio::piped())
.output()
.ok()?;
if !output.status.success() {
return None;
}
let text = String::from_utf8_lossy(&output.stdout);
for line in text.lines() {
if line.starts_with("Version")
&& let Some(colon_pos) = line.find(':')
{
let version = line[colon_pos + 1..].trim();
// Remove revision suffix if present
let version = version.split('-').next().unwrap_or(version);
return Some(version.to_string());
}
}
None
}
/// What: Retrieve the locally installed version of a package.
///
/// Inputs:
/// - `name`: Package to query via `pacman -Q`.
///
/// Output:
/// - Returns the installed version string on success; otherwise an error message.
///
/// # Errors
/// - Returns `Err` when `pacman -Q` command execution fails (I/O error)
/// - Returns `Err` when the package is not found or not installed
/// - Returns `Err` when the version string cannot be parsed from command output
///
/// Details:
/// - Normalizes versions by removing revision suffixes to facilitate requirement comparisons.
pub fn get_installed_version(name: &str) -> Result<String, String> {
let output = Command::new("pacman")
.args(["-Q", name])
.env("LC_ALL", "C")
.env("LANG", "C")
.stdin(Stdio::null())
.stdout(Stdio::piped())
.stderr(Stdio::piped())
.output()
.map_err(|e| format!("pacman -Q failed: {e}"))?;
if !output.status.success() {
return Err("Package not found".to_string());
}
let text = String::from_utf8_lossy(&output.stdout);
if let Some(line) = text.lines().next() {
// Format: "name version" or "name version-revision"
if let Some(space_pos) = line.find(' ') {
let version = line[space_pos + 1..].trim();
// Remove revision suffix if present (e.g., "1.2.3-1" -> "1.2.3")
let version = version.split('-').next().unwrap_or(version);
return Ok(version.to_string());
}
}
Err("Could not parse version".to_string())
}
/// What: Perform a simplified comparison between an installed version and a requirement expression.
///
/// Inputs:
/// - `installed`: Version string currently present on the system.
/// - `requirement`: Comparison expression such as `>=1.2` or `=2.0`.
///
/// Output:
/// - `true` when the expression evaluates in favor of the installed version; otherwise `false`.
///
/// Details:
/// - Uses straightforward string comparisons rather than full semantic version parsing, matching pacman's format.
#[must_use]
pub fn version_satisfies(installed: &str, requirement: &str) -> bool {
// This is a simplified version checker
// For production, use a proper version comparison library
requirement.strip_prefix(">=").map_or_else(
|| {
requirement.strip_prefix("<=").map_or_else(
|| {
requirement.strip_prefix("=").map_or_else(
|| {
requirement.strip_prefix(">").map_or_else(
|| {
requirement.strip_prefix("<").map_or_else(
|| {
// No version requirement, assume satisfied
true
},
|req_ver| installed < req_ver,
)
},
|req_ver| installed > req_ver,
)
},
|req_ver| installed == req_ver,
)
},
|req_ver| installed <= req_ver,
)
},
|req_ver| installed >= req_ver,
)
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
/// What: Ensure relational comparison operators behave according to the simplified string checks.
///
/// Inputs:
/// - `>=`, `<=`, `>`, `<`, and `=` requirements evaluated against representative version strings.
///
/// Output:
/// - Verifies truthiness for matching cases and falseness for mismatched comparisons.
///
/// Details:
/// - Confirms the helper remains stable for the ordering relied upon by dependency diagnostics.
fn version_satisfies_relational_operators() {
assert!(version_satisfies("2.0", ">=1.5"));
assert!(!version_satisfies("1.0", ">=1.5"));
assert!(version_satisfies("1.5", "<=1.5"));
assert!(version_satisfies("1.6", ">1.5"));
assert!(!version_satisfies("1.4", ">1.5"));
assert!(version_satisfies("1.5", "=1.5"));
assert!(!version_satisfies("1.6", "<1.5"));
}
#[test]
/// What: Confirm the helper defaults to success when no requirement string is provided.
///
/// Inputs:
/// - Empty and non-operator requirement strings.
///
/// Output:
/// - Returns `true`, indicating no additional comparison is enforced.
///
/// Details:
/// - Guards the fallback branch used by callers that lack explicit version constraints.
fn version_satisfies_defaults_to_true_without_constraint() {
assert!(version_satisfies("2.0", ""));
assert!(version_satisfies("2.0", "n/a"));
}
}
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/src/logic/deps/utils.rs | src/logic/deps/utils.rs | //! Utility functions for dependency resolution.
use crate::state::modal::DependencyStatus;
/// What: Provide a numeric priority used to order dependency statuses.
///
/// Inputs:
/// - `status`: Dependency status variant subject to sorting.
///
/// Output:
/// - Returns a numeric priority where lower numbers represent higher urgency.
///
/// Details:
/// - Aligns the ordering logic with UI expectations (conflicts first, installed last).
pub(super) const fn dependency_priority(status: &DependencyStatus) -> u8 {
match status {
DependencyStatus::Conflict { .. } => 0,
DependencyStatus::Missing => 1,
DependencyStatus::ToInstall => 2,
DependencyStatus::ToUpgrade { .. } => 3,
DependencyStatus::Installed { .. } => 4,
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
/// What: Validate dependency priorities rank conflict states ahead of installed cases.
///
/// Inputs:
/// - Constructs each `DependencyStatus` variant with lightweight sample payloads.
///
/// Output:
/// - Asserts the assigned numeric priorities ascend from conflict through installed statuses.
///
/// Details:
/// - Guards the ordering relied upon by sorting logic so that regression changes surface quickly.
fn dependency_priority_orders_by_severity() {
let conflict = dependency_priority(&DependencyStatus::Conflict {
reason: String::new(),
});
let missing = dependency_priority(&DependencyStatus::Missing);
let install = dependency_priority(&DependencyStatus::ToInstall);
let upgrade = dependency_priority(&DependencyStatus::ToUpgrade {
current: "1".into(),
required: "2".into(),
});
let installed = dependency_priority(&DependencyStatus::Installed {
version: "1".into(),
});
assert!(conflict < missing);
assert!(missing < install);
assert!(install < upgrade);
assert!(upgrade < installed);
}
}
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/src/logic/deps/resolve.rs | src/logic/deps/resolve.rs | //! Core dependency resolution logic for individual packages.
use super::parse::{parse_dep_spec, parse_pacman_si_conflicts, parse_pacman_si_deps};
use super::source::{determine_dependency_source, is_system_package};
use super::srcinfo::{fetch_srcinfo, parse_srcinfo_conflicts, parse_srcinfo_deps};
use super::status::determine_status;
use crate::logic::files::get_pkgbuild_from_cache;
use crate::logic::sandbox::{parse_pkgbuild_conflicts, parse_pkgbuild_deps};
use crate::state::modal::DependencyInfo;
use crate::state::types::Source;
use std::collections::{HashMap, HashSet};
use std::process::{Command, Stdio};
/// What: Batch fetch dependency lists for multiple official packages using `pacman -Si`.
///
/// Inputs:
/// - `names`: Package names to query (must be official packages, not local).
///
/// Output:
/// - `HashMap` mapping package name to its dependency list (`Vec<String>`).
///
/// Details:
/// - Batches queries into chunks of 50 to avoid command-line length limits.
/// - Parses multi-package `pacman -Si` output (packages separated by blank lines).
pub(super) fn batch_fetch_official_deps(names: &[&str]) -> HashMap<String, Vec<String>> {
const BATCH_SIZE: usize = 50;
let mut result_map = HashMap::new();
for chunk in names.chunks(BATCH_SIZE) {
let mut args = vec!["-Si"];
args.extend(chunk.iter().copied());
match Command::new("pacman")
.args(&args)
.env("LC_ALL", "C")
.env("LANG", "C")
.stdin(Stdio::null())
.stdout(Stdio::piped())
.stderr(Stdio::piped())
.output()
{
Ok(output) if output.status.success() => {
let text = String::from_utf8_lossy(&output.stdout);
// Parse multi-package output: packages are separated by blank lines
let mut package_blocks = Vec::new();
let mut current_block = String::new();
for line in text.lines() {
if line.trim().is_empty() {
if !current_block.is_empty() {
package_blocks.push(current_block.clone());
current_block.clear();
}
} else {
current_block.push_str(line);
current_block.push('\n');
}
}
if !current_block.is_empty() {
package_blocks.push(current_block);
}
// Parse each block to extract package name and dependencies
for block in package_blocks {
let dep_names = parse_pacman_si_deps(&block);
// Extract package name from block
if let Some(name_line) =
block.lines().find(|l| l.trim_start().starts_with("Name"))
&& let Some((_, name)) = name_line.split_once(':')
{
let pkg_name = name.trim().to_string();
result_map.insert(pkg_name, dep_names);
}
}
}
_ => {
// If batch fails, fall back to individual queries (but don't do it here to avoid recursion)
// The caller will handle individual queries
break;
}
}
}
result_map
}
/// What: Check if a command is available in PATH.
///
/// Inputs:
/// - `cmd`: Command name to check.
///
/// Output:
/// - Returns true if the command exists and can be executed.
///
/// Details:
/// - Uses a simple version check to verify command availability.
fn is_command_available(cmd: &str) -> bool {
Command::new(cmd)
.args(["--version"])
.stdin(Stdio::null())
.stdout(Stdio::null())
.stderr(Stdio::null())
.output()
.is_ok()
}
/// What: Check if a package name should be filtered out (virtual package or self-reference).
///
/// Inputs:
/// - `pkg_name`: Package name to check.
/// - `parent_name`: Name of the parent package (to detect self-references).
///
/// Output:
/// - Returns true if the package should be filtered out.
///
/// Details:
/// - Filters out .so files (virtual packages) and self-references.
#[allow(clippy::case_sensitive_file_extension_comparisons)]
fn should_filter_dependency(pkg_name: &str, parent_name: &str) -> bool {
let pkg_lower = pkg_name.to_lowercase();
pkg_name == parent_name
|| pkg_lower.ends_with(".so")
|| pkg_lower.contains(".so.")
|| pkg_lower.contains(".so=")
}
/// What: Convert a dependency spec into a `DependencyInfo` record.
///
/// Inputs:
/// - `dep_spec`: Dependency specification string (may include version requirements).
/// - `parent_name`: Name of the package that requires this dependency.
/// - `installed`: Set of locally installed packages.
/// - `provided`: Set of package names provided by installed packages.
/// - `upgradable`: Set of packages flagged for upgrades.
///
/// Output:
/// - Returns Some(DependencyInfo) if the dependency should be included, None if filtered.
///
/// Details:
/// - Parses the dependency spec, filters out virtual packages and self-references,
/// and determines status, source, and system package flags.
fn process_dependency_spec(
dep_spec: &str,
parent_name: &str,
installed: &HashSet<String>,
provided: &HashSet<String>,
upgradable: &HashSet<String>,
) -> Option<DependencyInfo> {
let (pkg_name, version_req) = parse_dep_spec(dep_spec);
if should_filter_dependency(&pkg_name, parent_name) {
if pkg_name == parent_name {
tracing::debug!("Skipping self-reference: {} == {}", pkg_name, parent_name);
} else {
tracing::debug!("Filtering out virtual package: {}", pkg_name);
}
return None;
}
let status = determine_status(&pkg_name, &version_req, installed, provided, upgradable);
let (source, is_core) = determine_dependency_source(&pkg_name, installed);
let is_system = is_core || is_system_package(&pkg_name);
Some(DependencyInfo {
name: pkg_name,
version: version_req,
status,
source,
required_by: vec![parent_name.to_string()],
depends_on: Vec::new(),
is_core,
is_system,
})
}
/// What: Process a list of dependency specs into `DependencyInfo` records.
///
/// Inputs:
/// - `dep_specs`: Vector of dependency specification strings.
/// - `parent_name`: Name of the package that requires these dependencies.
/// - `installed`: Set of locally installed packages.
/// - `provided`: Set of package names provided by installed packages.
/// - `upgradable`: Set of packages flagged for upgrades.
///
/// Output:
/// - Returns a vector of `DependencyInfo` records (filtered).
///
/// Details:
/// - Processes each dependency spec and collects valid dependencies.
fn process_dependency_specs(
dep_specs: Vec<String>,
parent_name: &str,
installed: &HashSet<String>,
provided: &HashSet<String>,
upgradable: &HashSet<String>,
) -> Vec<DependencyInfo> {
dep_specs
.into_iter()
.filter_map(|dep_spec| {
process_dependency_spec(&dep_spec, parent_name, installed, provided, upgradable)
})
.collect()
}
/// What: Resolve dependencies for a local package using pacman -Qi.
///
/// Inputs:
/// - `name`: Package name.
/// - `installed`: Set of locally installed packages.
/// - `provided`: Set of package names provided by installed packages.
/// - `upgradable`: Set of packages flagged for upgrades.
///
/// Output:
/// - Returns a vector of `DependencyInfo` records or an error string.
///
/// Details:
/// - Uses pacman -Qi to get dependency information for locally installed packages.
fn resolve_local_package_deps(
name: &str,
installed: &HashSet<String>,
provided: &HashSet<String>,
upgradable: &HashSet<String>,
) -> Result<Vec<DependencyInfo>, String> {
tracing::debug!("Running: pacman -Qi {} (local package)", name);
let output = Command::new("pacman")
.args(["-Qi", name])
.env("LC_ALL", "C")
.env("LANG", "C")
.stdin(Stdio::null())
.stdout(Stdio::piped())
.stderr(Stdio::piped())
.output()
.map_err(|e| {
tracing::error!("Failed to execute pacman -Qi {}: {}", name, e);
format!("pacman -Qi failed: {e}")
})?;
if !output.status.success() {
let stderr = String::from_utf8_lossy(&output.stderr);
tracing::warn!(
"pacman -Qi {} failed with status {:?}: {}",
name,
output.status.code(),
stderr
);
return Ok(Vec::new());
}
let text = String::from_utf8_lossy(&output.stdout);
tracing::debug!("pacman -Qi {} output ({} bytes)", name, text.len());
let dep_names = parse_pacman_si_deps(&text);
tracing::debug!(
"Parsed {} dependency names from pacman -Qi output",
dep_names.len()
);
Ok(process_dependency_specs(
dep_names, name, installed, provided, upgradable,
))
}
/// What: Resolve dependencies for an official package using pacman -Si.
///
/// Inputs:
/// - `name`: Package name.
/// - `repo`: Repository name (for logging).
/// - `installed`: Set of locally installed packages.
/// - `provided`: Set of package names provided by installed packages.
/// - `upgradable`: Set of packages flagged for upgrades.
///
/// Output:
/// - Returns a vector of `DependencyInfo` records or an error string.
///
/// Details:
/// - Uses pacman -Si to get dependency information for official packages.
fn resolve_official_package_deps(
name: &str,
repo: &str,
installed: &HashSet<String>,
provided: &HashSet<String>,
upgradable: &HashSet<String>,
) -> Result<Vec<DependencyInfo>, String> {
tracing::debug!("Running: pacman -Si {} (repo: {})", name, repo);
let output = Command::new("pacman")
.args(["-Si", name])
.env("LC_ALL", "C")
.env("LANG", "C")
.stdin(Stdio::null())
.stdout(Stdio::piped())
.stderr(Stdio::piped())
.output()
.map_err(|e| {
tracing::error!("Failed to execute pacman -Si {}: {}", name, e);
format!("pacman -Si failed: {e}")
})?;
if !output.status.success() {
let stderr = String::from_utf8_lossy(&output.stderr);
tracing::error!(
"pacman -Si {} failed with status {:?}: {}",
name,
output.status.code(),
stderr
);
return Err(format!("pacman -Si failed for {name}: {stderr}"));
}
let text = String::from_utf8_lossy(&output.stdout);
tracing::debug!("pacman -Si {} output ({} bytes)", name, text.len());
let dep_names = parse_pacman_si_deps(&text);
tracing::debug!(
"Parsed {} dependency names from pacman -Si output",
dep_names.len()
);
Ok(process_dependency_specs(
dep_names, name, installed, provided, upgradable,
))
}
/// What: Try to resolve dependencies using an AUR helper (paru or yay).
///
/// Inputs:
/// - `helper`: Helper command name ("paru" or "yay").
/// - `name`: Package name.
/// - `installed`: Set of locally installed packages.
/// - `provided`: Set of package names provided by installed packages.
/// - `upgradable`: Set of packages flagged for upgrades.
///
/// Output:
/// - Returns Some(Vec<DependencyInfo>) if successful, None otherwise.
///
/// Details:
/// - Executes helper -Si command and parses the output for dependencies.
fn try_helper_resolution(
helper: &str,
name: &str,
installed: &HashSet<String>,
provided: &HashSet<String>,
upgradable: &HashSet<String>,
) -> Option<Vec<DependencyInfo>> {
tracing::debug!("Trying {} -Si {} for dependency resolution", helper, name);
let output = Command::new(helper)
.args(["-Si", name])
.env("LC_ALL", "C")
.env("LANG", "C")
.stdin(Stdio::null())
.stdout(Stdio::piped())
.stderr(Stdio::piped())
.output()
.ok()?;
if !output.status.success() {
let stderr = String::from_utf8_lossy(&output.stderr);
tracing::debug!(
"{} -Si {} failed (will try other methods): {}",
helper,
name,
stderr.trim()
);
return None;
}
let text = String::from_utf8_lossy(&output.stdout);
tracing::debug!("{} -Si {} output ({} bytes)", helper, name, text.len());
let dep_names = parse_pacman_si_deps(&text);
if dep_names.is_empty() {
return None;
}
tracing::info!(
"Using {} to resolve runtime dependencies for {} (will fetch .SRCINFO for build-time deps)",
helper,
name
);
let deps = process_dependency_specs(dep_names, name, installed, provided, upgradable);
Some(deps)
}
/// What: Enhance dependency list with .SRCINFO data.
///
/// Inputs:
/// - `name`: Package name.
/// - `deps`: Existing dependency list to enhance.
/// - `installed`: Set of locally installed packages.
/// - `provided`: Set of package names provided by installed packages.
/// - `upgradable`: Set of packages flagged for upgrades.
///
/// Output:
/// - Returns the enhanced dependency list.
///
/// Details:
/// - Fetches and parses .SRCINFO to add missing depends entries.
fn enhance_with_srcinfo(
name: &str,
mut deps: Vec<DependencyInfo>,
installed: &HashSet<String>,
provided: &HashSet<String>,
upgradable: &HashSet<String>,
) -> Vec<DependencyInfo> {
let srcinfo_text = match fetch_srcinfo(name, Some(10)) {
Ok(text) => text,
Err(e) => {
tracing::warn!(
"Could not fetch .SRCINFO for {}: {} (build-time dependencies will be missing)",
name,
e
);
return deps;
}
};
tracing::debug!("Successfully fetched .SRCINFO for {}", name);
let (srcinfo_depends, srcinfo_makedepends, srcinfo_checkdepends, srcinfo_optdepends) =
parse_srcinfo_deps(&srcinfo_text);
tracing::debug!(
"Parsed .SRCINFO: {} depends, {} makedepends, {} checkdepends, {} optdepends",
srcinfo_depends.len(),
srcinfo_makedepends.len(),
srcinfo_checkdepends.len(),
srcinfo_optdepends.len()
);
let existing_dep_names: HashSet<String> = deps.iter().map(|d| d.name.clone()).collect();
deps.extend(
srcinfo_depends
.into_iter()
.filter_map(|dep_spec| {
process_dependency_spec(&dep_spec, name, installed, provided, upgradable)
})
.filter(|dep_info| !existing_dep_names.contains(&dep_info.name)),
);
tracing::info!(
"Enhanced dependency list with .SRCINFO data: total {} dependencies",
deps.len()
);
deps
}
/// What: Fallback to cached PKGBUILD for dependency resolution.
///
/// Inputs:
/// - `name`: Package name.
/// - `installed`: Set of locally installed packages.
/// - `provided`: Set of package names provided by installed packages.
/// - `upgradable`: Set of packages flagged for upgrades.
///
/// Output:
/// - Returns a vector of `DependencyInfo` records if `PKGBUILD` is found, empty vector otherwise.
///
/// Details:
/// - Attempts to use cached PKGBUILD when .SRCINFO is unavailable (offline fallback).
fn fallback_to_pkgbuild(
name: &str,
installed: &HashSet<String>,
provided: &HashSet<String>,
upgradable: &HashSet<String>,
) -> Vec<DependencyInfo> {
let Some(pkgbuild_text) = get_pkgbuild_from_cache(name) else {
tracing::debug!(
"No cached PKGBUILD available for {} (offline, no dependencies resolved)",
name
);
return Vec::new();
};
tracing::info!(
"Using cached PKGBUILD for {} to resolve dependencies (offline fallback)",
name
);
let (pkgbuild_depends, _, _, _) = parse_pkgbuild_deps(&pkgbuild_text);
let deps = process_dependency_specs(pkgbuild_depends, name, installed, provided, upgradable);
tracing::info!(
"Resolved {} dependencies from cached PKGBUILD for {}",
deps.len(),
name
);
deps
}
/// What: Resolve dependencies for an AUR package.
///
/// Inputs:
/// - `name`: Package name.
/// - `installed`: Set of locally installed packages.
/// - `provided`: Set of package names provided by installed packages.
/// - `upgradable`: Set of packages flagged for upgrades.
///
/// Output:
/// - Returns a vector of `DependencyInfo` records.
///
/// Details:
/// - Tries paru/yay first, then falls back to .SRCINFO and cached PKGBUILD.
fn resolve_aur_package_deps(
name: &str,
installed: &HashSet<String>,
provided: &HashSet<String>,
upgradable: &HashSet<String>,
) -> Vec<DependencyInfo> {
tracing::debug!(
"Attempting to resolve AUR package: {} (will skip if not found)",
name
);
// Try paru first
let (mut deps, mut used_helper) = if is_command_available("paru")
&& let Some(helper_deps) =
try_helper_resolution("paru", name, installed, provided, upgradable)
{
(helper_deps, true)
} else {
(Vec::new(), false)
};
// Try yay if paru didn't work
if !used_helper
&& is_command_available("yay")
&& let Some(helper_deps) =
try_helper_resolution("yay", name, installed, provided, upgradable)
{
deps = helper_deps;
used_helper = true;
}
if !used_helper {
tracing::debug!(
"Skipping AUR API for {} - paru/yay failed or not available (likely not a real package)",
name
);
}
// Always try to enhance with .SRCINFO
deps = enhance_with_srcinfo(name, deps, installed, provided, upgradable);
// Fallback to PKGBUILD if no dependencies were found
if !used_helper && deps.is_empty() {
deps = fallback_to_pkgbuild(name, installed, provided, upgradable);
}
deps
}
/// What: Resolve direct dependency metadata for a single package.
///
/// Inputs:
/// - `name`: Package identifier whose dependencies should be enumerated.
/// - `source`: Source enum describing whether the package is official or AUR.
/// - `installed`: Set of locally installed packages for status determination.
/// - `provided`: Set of package names provided by installed packages.
/// - `upgradable`: Set of packages flagged for upgrades, used to detect stale dependencies.
///
/// Output:
/// - Returns a vector of `DependencyInfo` records or an error string when resolution fails.
///
/// Details:
/// - Invokes pacman or AUR helpers depending on source, filtering out virtual entries and self references.
pub(super) fn resolve_package_deps(
name: &str,
source: &Source,
installed: &HashSet<String>,
provided: &HashSet<String>,
upgradable: &HashSet<String>,
) -> Result<Vec<DependencyInfo>, String> {
let deps = match source {
Source::Official { repo, .. } => {
if repo == "local" {
resolve_local_package_deps(name, installed, provided, upgradable)?
} else {
resolve_official_package_deps(name, repo, installed, provided, upgradable)?
}
}
Source::Aur => resolve_aur_package_deps(name, installed, provided, upgradable),
};
tracing::debug!("Resolved {} dependencies for package {}", deps.len(), name);
Ok(deps)
}
/// What: Fetch conflicts for a package from pacman or AUR sources.
///
/// Inputs:
/// - `name`: Package identifier.
/// - `source`: Source enum describing whether the package is official or AUR.
///
/// Output:
/// - Returns a vector of conflicting package names, or empty vector on error.
///
/// Details:
/// - For official packages, uses `pacman -Si` to get conflicts.
/// - For AUR packages, tries paru/yay first, then falls back to .SRCINFO.
pub(super) fn fetch_package_conflicts(name: &str, source: &Source) -> Vec<String> {
match source {
Source::Official { repo, .. } => {
// Handle local packages specially - use pacman -Qi instead of -Si
if repo == "local" {
tracing::debug!("Running: pacman -Qi {} (local package, conflicts)", name);
if let Ok(output) = Command::new("pacman")
.args(["-Qi", name])
.env("LC_ALL", "C")
.env("LANG", "C")
.stdin(Stdio::null())
.stdout(Stdio::piped())
.stderr(Stdio::piped())
.output()
&& output.status.success()
{
let text = String::from_utf8_lossy(&output.stdout);
return parse_pacman_si_conflicts(&text);
}
return Vec::new();
}
// Use pacman -Si to get conflicts
tracing::debug!("Running: pacman -Si {} (conflicts)", name);
if let Ok(output) = Command::new("pacman")
.args(["-Si", name])
.env("LC_ALL", "C")
.env("LANG", "C")
.stdin(Stdio::null())
.stdout(Stdio::piped())
.stderr(Stdio::piped())
.output()
&& output.status.success()
{
let text = String::from_utf8_lossy(&output.stdout);
return parse_pacman_si_conflicts(&text);
}
Vec::new()
}
Source::Aur => {
// Try paru/yay first
let has_paru = Command::new("paru")
.args(["--version"])
.stdin(Stdio::null())
.stdout(Stdio::null())
.stderr(Stdio::null())
.output()
.is_ok();
let has_yay = Command::new("yay")
.args(["--version"])
.stdin(Stdio::null())
.stdout(Stdio::null())
.stderr(Stdio::null())
.output()
.is_ok();
if has_paru {
tracing::debug!("Trying paru -Si {} for conflicts", name);
if let Ok(output) = Command::new("paru")
.args(["-Si", name])
.env("LC_ALL", "C")
.env("LANG", "C")
.stdin(Stdio::null())
.stdout(Stdio::piped())
.stderr(Stdio::piped())
.output()
&& output.status.success()
{
let text = String::from_utf8_lossy(&output.stdout);
let conflicts = parse_pacman_si_conflicts(&text);
if !conflicts.is_empty() {
return conflicts;
}
}
}
if has_yay {
tracing::debug!("Trying yay -Si {} for conflicts", name);
if let Ok(output) = Command::new("yay")
.args(["-Si", name])
.env("LC_ALL", "C")
.env("LANG", "C")
.stdin(Stdio::null())
.stdout(Stdio::piped())
.stderr(Stdio::piped())
.output()
&& output.status.success()
{
let text = String::from_utf8_lossy(&output.stdout);
let conflicts = parse_pacman_si_conflicts(&text);
if !conflicts.is_empty() {
return conflicts;
}
}
}
// Fall back to .SRCINFO
if let Ok(srcinfo_text) = fetch_srcinfo(name, Some(10)) {
tracing::debug!("Using .SRCINFO for conflicts of {}", name);
let conflicts = parse_srcinfo_conflicts(&srcinfo_text);
if !conflicts.is_empty() {
return conflicts;
}
}
// Fall back to cached PKGBUILD if .SRCINFO didn't have conflicts or wasn't available
if let Some(pkgbuild_text) = get_pkgbuild_from_cache(name) {
tracing::debug!("Using cached PKGBUILD for conflicts of {}", name);
let conflicts = parse_pkgbuild_conflicts(&pkgbuild_text);
if !conflicts.is_empty() {
tracing::info!(
"Found {} conflicts from PKGBUILD for {}",
conflicts.len(),
name
);
return conflicts;
}
}
Vec::new()
}
}
}
#[cfg(all(test, unix))]
mod tests {
use super::*;
use std::fs;
use std::io::Write;
use std::os::unix::fs::PermissionsExt;
use tempfile::tempdir;
struct PathGuard {
original: Option<String>,
}
impl PathGuard {
fn push(dir: &std::path::Path) -> Self {
let original = std::env::var("PATH").ok();
// If PATH is missing or empty, use a default system PATH
let base_path = original
.as_ref()
.filter(|p| !p.is_empty())
.map_or("/usr/bin:/bin:/usr/local/bin", String::as_str);
let mut new_path = dir.display().to_string();
new_path.push(':');
new_path.push_str(base_path);
unsafe {
std::env::set_var("PATH", &new_path);
}
Self { original }
}
}
impl Drop for PathGuard {
fn drop(&mut self) {
if let Some(ref orig) = self.original {
// Only restore if the original PATH was valid (not empty)
if orig.is_empty() {
// If original was empty, restore to a default system PATH
unsafe {
std::env::set_var("PATH", "/usr/bin:/bin:/usr/local/bin");
}
} else {
unsafe {
std::env::set_var("PATH", orig);
}
}
} else {
// If PATH was missing, set a default system PATH
unsafe {
std::env::set_var("PATH", "/usr/bin:/bin:/usr/local/bin");
}
}
}
}
fn write_executable(dir: &std::path::Path, name: &str, body: &str) {
let path = dir.join(name);
let mut file = fs::File::create(&path).expect("create stub");
file.write_all(body.as_bytes()).expect("write stub");
let mut perms = fs::metadata(&path).expect("meta").permissions();
perms.set_mode(0o755);
fs::set_permissions(&path, perms).expect("chmod stub");
}
#[test]
/// What: Confirm official dependency resolution consumes the pacman stub output and filters virtual entries.
///
/// Inputs:
/// - Staged `pacman` shell script that prints a crafted `-Si` response including `.so` and versioned dependencies.
///
/// Output:
/// - Dependency vector contains only the real packages with preserved version requirements and `required_by` set.
///
/// Details:
/// - Guards against regressions in parsing logic for the pacman path while isolating the function from system binaries via PATH overrides.
fn resolve_official_uses_pacman_si_stub() {
let dir = tempdir().expect("tempdir");
let _test_guard = crate::global_test_mutex_lock();
// Ensure PATH is in a clean state before modifying it
if std::env::var("PATH").is_err() {
unsafe { std::env::set_var("PATH", "/usr/bin:/bin:/usr/local/bin") };
}
let _guard = PathGuard::push(dir.path());
// Small delay to ensure PATH is propagated to child processes
std::thread::sleep(std::time::Duration::from_millis(10));
write_executable(
dir.path(),
"pacman",
r#"#!/bin/sh
if [ "$1" = "--version" ]; then
exit 0
fi
if [ "$1" = "-Si" ]; then
cat <<'EOF'
Name : pkg
Depends On : dep1 libplaceholder.so other>=1.2
EOF
exit 0
fi
exit 1
"#,
);
let installed = HashSet::new();
let upgradable = HashSet::new();
let provided = HashSet::new();
let deps = resolve_package_deps(
"pkg",
&Source::Official {
repo: "extra".into(),
arch: "x86_64".into(),
},
&installed,
&provided,
&upgradable,
)
.expect("resolve succeeds");
assert_eq!(deps.len(), 2);
let mut names: Vec<&str> = deps.iter().map(|d| d.name.as_str()).collect();
names.sort_unstable();
assert_eq!(names, vec!["dep1", "other"]);
let other = deps
.iter()
.find(|d| d.name == "other")
.expect("other present");
assert_eq!(other.version, ">=1.2");
assert_eq!(other.required_by, vec!["pkg".to_string()]);
}
#[test]
/// What: Verify the AUR branch leverages the helper stub output and skips self-referential dependencies.
///
/// Inputs:
/// - PATH-injected `paru` script responding to `--version` and `-Si`, plus inert stubs for `yay` and `pacman`.
///
/// Output:
/// - Dependency list reflects helper-derived entries while omitting the package itself.
///
/// Details:
/// - Ensures helper discovery short-circuits the API fallback and that parsing behaves consistently for AUR responses.
fn resolve_aur_prefers_paru_stub_and_skips_self() {
let dir = tempdir().expect("tempdir");
let _test_guard = crate::global_test_mutex_lock();
// Ensure PATH is in a clean state before modifying it
if std::env::var("PATH").is_err() {
unsafe { std::env::set_var("PATH", "/usr/bin:/bin:/usr/local/bin") };
}
let _guard = PathGuard::push(dir.path());
// Enable curl PATH lookup override so our fake curl is used instead of /usr/bin/curl
unsafe { std::env::set_var("PACSEA_CURL_PATH", "1") };
// Small delay to ensure PATH is propagated to child processes
std::thread::sleep(std::time::Duration::from_millis(10));
write_executable(
dir.path(),
"paru",
r#"#!/bin/sh
if [ "$1" = "--version" ]; then
exit 0
fi
if [ "$1" = "-Si" ]; then
cat <<'EOF'
Name : pkg
Depends On : pkg helper extra>=2.0
EOF
exit 0
fi
exit 1
"#,
);
write_executable(
dir.path(),
"yay",
"#!/bin/sh\nif [ \"$1\" = \"--version\" ]; then exit 0; fi\nexit 1\n",
);
write_executable(
dir.path(),
"pacman",
"#!/bin/sh\nif [ \"$1\" = \"--version\" ]; then exit 0; fi\nexit 1\n",
);
write_executable(
dir.path(),
"curl",
"#!/bin/sh\nif [ \"$1\" = \"--version\" ]; then exit 0; fi\nexit 1\n",
);
let installed = HashSet::new();
let upgradable = HashSet::new();
let provided = HashSet::new();
let deps = resolve_package_deps("pkg", &Source::Aur, &installed, &provided, &upgradable)
.expect("resolve succeeds");
// Clean up env var
unsafe { std::env::remove_var("PACSEA_CURL_PATH") };
assert_eq!(deps.len(), 2);
let mut names: Vec<&str> = deps.iter().map(|d| d.name.as_str()).collect();
names.sort_unstable();
assert_eq!(names, vec!["extra", "helper"]);
let extra = deps
.iter()
.find(|d| d.name == "extra")
.expect("extra present");
assert_eq!(extra.version, ">=2.0");
assert_eq!(extra.required_by, vec!["pkg".to_string()]);
}
}
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/src/logic/deps/query.rs | src/logic/deps/query.rs | //! Package querying functions for dependency resolution.
use std::collections::HashSet;
use std::hash::BuildHasher;
use std::process::{Command, Stdio};
/// What: Collect names of packages that have upgrades available via pacman.
///
/// Inputs:
/// - (none): Reads upgrade information by invoking `pacman -Qu`.
///
/// Output:
/// - Returns a set containing package names that pacman reports as upgradable.
///
/// Details:
/// - Trims each line from the command output and extracts the leading package token before version metadata.
/// - Gracefully handles command failures by returning an empty set to avoid blocking dependency checks.
pub fn get_upgradable_packages() -> HashSet<String> {
tracing::debug!("Running: pacman -Qu");
let output = Command::new("pacman")
.args(["-Qu"])
.env("LC_ALL", "C")
.env("LANG", "C")
.stdin(Stdio::null())
.stdout(Stdio::piped())
.stderr(Stdio::piped())
.output();
match output {
Ok(output) => {
if output.status.success() {
let text = String::from_utf8_lossy(&output.stdout);
// pacman -Qu outputs "name old-version -> new-version" or just "name" for AUR packages
let packages: HashSet<String> = text
.lines()
.filter_map(|line| {
let line = line.trim();
if line.is_empty() {
return None;
}
// Extract package name (everything before space or "->")
Some(line.find(' ').map_or_else(
|| line.to_string(),
|space_pos| line[..space_pos].trim().to_string(),
))
})
.collect();
tracing::debug!(
"Successfully retrieved {} upgradable packages",
packages.len()
);
packages
} else {
// No upgradable packages or error - return empty set
HashSet::new()
}
}
Err(e) => {
tracing::debug!("Failed to execute pacman -Qu: {} (assuming no upgrades)", e);
HashSet::new()
}
}
}
/// What: Enumerate all currently installed packages on the system.
///
/// Inputs:
/// - (none): Invokes `pacman -Qq` to query the local database.
///
/// Output:
/// - Returns a set of package names installed on the machine; empty on failure.
///
/// Details:
/// - Uses pacman's quiet format to obtain trimmed names and logs errors where available for diagnostics.
pub fn get_installed_packages() -> HashSet<String> {
tracing::debug!("Running: pacman -Qq");
let output = Command::new("pacman")
.args(["-Qq"])
.env("LC_ALL", "C")
.env("LANG", "C")
.stdin(Stdio::null())
.stdout(Stdio::piped())
.stderr(Stdio::piped())
.output();
match output {
Ok(output) => {
if output.status.success() {
let text = String::from_utf8_lossy(&output.stdout);
let packages: HashSet<String> =
text.lines().map(|s| s.trim().to_string()).collect();
tracing::debug!(
"Successfully retrieved {} installed packages",
packages.len()
);
packages
} else {
let stderr = String::from_utf8_lossy(&output.stderr);
tracing::error!(
"pacman -Qq failed with status {:?}: {}",
output.status.code(),
stderr
);
HashSet::new()
}
}
Err(e) => {
tracing::error!("Failed to execute pacman -Qq: {}", e);
HashSet::new()
}
}
}
/// What: Check if a specific package name is provided by any installed package (lazy check).
///
/// Inputs:
/// - `name`: Package name to check.
/// - `installed`: Set of installed package names (used to optimize search).
///
/// Output:
/// - Returns `Some(package_name)` if the name is provided by an installed package, `None` otherwise.
///
/// Details:
/// - Uses `pacman -Qqo` to efficiently check if any installed package provides the name.
/// - This is much faster than querying all packages upfront.
/// - Returns the name of the providing package for debugging purposes.
fn check_if_provided<S: BuildHasher + Default>(
name: &str,
_installed: &HashSet<String, S>,
) -> Option<String> {
// Use pacman -Qqo to check which package provides this name
// This is efficient - pacman does the lookup internally
let output = Command::new("pacman")
.args(["-Qqo", name])
.env("LC_ALL", "C")
.env("LANG", "C")
.stdin(Stdio::null())
.stdout(Stdio::piped())
.stderr(Stdio::piped())
.output();
match output {
Ok(output) if output.status.success() => {
let text = String::from_utf8_lossy(&output.stdout);
let providing_pkg = text.lines().next().map(|s| s.trim().to_string());
if let Some(providing_pkg) = &providing_pkg {
tracing::debug!("{} is provided by {}", name, providing_pkg);
}
providing_pkg
}
_ => None,
}
}
/// What: Build an empty provides set (for API compatibility).
///
/// Inputs:
/// - `installed`: Set of installed package names (unused, kept for API compatibility).
///
/// Output:
/// - Returns an empty set (provides are now checked lazily).
///
/// Details:
/// - This function is kept for API compatibility but no longer builds the full provides set.
/// - Provides are now checked on-demand using `check_if_provided()` for better performance.
#[must_use]
pub fn get_provided_packages<S: BuildHasher + Default>(
_installed: &HashSet<String, S>,
) -> HashSet<String> {
// Return empty set - provides are now checked lazily on-demand
// This avoids querying all installed packages upfront, which was very slow
HashSet::new()
}
/// What: Check if a package is installed or provided by an installed package.
///
/// Inputs:
/// - `name`: Package name to check.
/// - `installed`: Set of directly installed package names.
/// - `provided`: Set of package names provided by installed packages (unused, kept for API compatibility).
///
/// Output:
/// - Returns `true` if the package is directly installed or provided by an installed package.
///
/// Details:
/// - First checks if the package is directly installed.
/// - Then lazily checks if it's provided by any installed package using `pacman -Qqo`.
/// - This handles cases like `rustup` providing `rust` efficiently without querying all packages upfront.
#[must_use]
pub fn is_package_installed_or_provided<S: BuildHasher + Default>(
name: &str,
installed: &HashSet<String, S>,
_provided: &HashSet<String, S>,
) -> bool {
// First check if directly installed
if installed.contains(name) {
return true;
}
// Lazy check if provided by any installed package (much faster than building full set upfront)
check_if_provided(name, installed).is_some()
}
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/src/logic/deps/aur.rs | src/logic/deps/aur.rs | //! AUR-specific dependency resolution.
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/src/logic/sandbox/analyze.rs | src/logic/sandbox/analyze.rs | //! Analysis functions for comparing dependencies against host environment.
use crate::logic::sandbox::parse::parse_pkgbuild_deps;
use crate::logic::sandbox::parse::parse_srcinfo_deps;
use crate::logic::sandbox::types::{DependencyDelta, SandboxInfo};
use std::collections::HashSet;
use std::process::{Command, Stdio};
/// What: Analyze package dependencies from .SRCINFO content.
///
/// Inputs:
/// - `package_name`: AUR package name.
/// - `srcinfo_text`: .SRCINFO content.
/// - `installed`: Set of installed package names.
/// - `provided`: Set of package names provided by installed packages.
///
/// Output:
/// - `SandboxInfo` with dependency deltas.
pub(super) fn analyze_package_from_srcinfo(
package_name: &str,
srcinfo_text: &str,
installed: &HashSet<String>,
provided: &HashSet<String>,
) -> SandboxInfo {
let (depends, makedepends, checkdepends, optdepends) = parse_srcinfo_deps(srcinfo_text);
// Analyze each dependency against host environment
let depends_delta = analyze_dependencies(&depends, installed, provided);
let makedepends_delta = analyze_dependencies(&makedepends, installed, provided);
let checkdepends_delta = analyze_dependencies(&checkdepends, installed, provided);
let optdepends_delta = analyze_dependencies(&optdepends, installed, provided);
SandboxInfo {
package_name: package_name.to_string(),
depends: depends_delta,
makedepends: makedepends_delta,
checkdepends: checkdepends_delta,
optdepends: optdepends_delta,
}
}
/// What: Analyze package dependencies from PKGBUILD content.
///
/// Inputs:
/// - `package_name`: AUR package name.
/// - `pkgbuild_text`: PKGBUILD content.
/// - `installed`: Set of installed package names.
/// - `provided`: Set of package names provided by installed packages.
///
/// Output:
/// - `SandboxInfo` with dependency deltas.
pub(super) fn analyze_package_from_pkgbuild(
package_name: &str,
pkgbuild_text: &str,
installed: &HashSet<String>,
provided: &HashSet<String>,
) -> SandboxInfo {
let (depends, makedepends, checkdepends, optdepends) = parse_pkgbuild_deps(pkgbuild_text);
// Analyze each dependency against host environment
let depends_delta = analyze_dependencies(&depends, installed, provided);
let makedepends_delta = analyze_dependencies(&makedepends, installed, provided);
let checkdepends_delta = analyze_dependencies(&checkdepends, installed, provided);
let optdepends_delta = analyze_dependencies(&optdepends, installed, provided);
SandboxInfo {
package_name: package_name.to_string(),
depends: depends_delta,
makedepends: makedepends_delta,
checkdepends: checkdepends_delta,
optdepends: optdepends_delta,
}
}
/// What: Analyze dependencies against the host environment.
///
/// Inputs:
/// - `deps`: Vector of dependency specifications.
/// - `installed`: Set of installed package names.
///
/// Output:
/// - Vector of `DependencyDelta` entries showing status of each dependency.
///
/// Details:
/// - Skips local packages entirely.
pub(super) fn analyze_dependencies(
deps: &[String],
installed: &HashSet<String>,
provided: &HashSet<String>,
) -> Vec<DependencyDelta> {
deps.iter()
.filter_map(|dep_spec| {
// Extract package name (may include version requirements)
let pkg_name = extract_package_name(dep_spec);
// Check if package is installed or provided by an installed package
let is_installed = crate::logic::deps::is_package_installed_or_provided(
&pkg_name, installed, provided,
);
// Skip local packages - they're not relevant for sandbox analysis
if is_installed && is_local_package(&pkg_name) {
return None;
}
// Try to get installed version
let installed_version = if is_installed {
crate::logic::deps::get_installed_version(&pkg_name).ok()
} else {
None
};
// Check if version requirement is satisfied
let version_satisfied = installed_version
.as_ref()
.is_some_and(|version| crate::logic::deps::version_satisfies(version, dep_spec));
Some(DependencyDelta {
name: dep_spec.clone(),
is_installed,
installed_version,
version_satisfied,
})
})
.collect()
}
/// What: Extract package name from a dependency specification.
///
/// Inputs:
/// - `dep_spec`: Dependency specification (e.g., "foo>=1.2", "bar", "baz: description").
///
/// Output:
/// - Package name without version requirements or description.
#[must_use]
pub fn extract_package_name(dep_spec: &str) -> String {
// Handle optdepends format: "package: description"
let name = dep_spec
.find(':')
.map_or_else(|| dep_spec, |colon_pos| &dep_spec[..colon_pos]);
// Remove version operators: >=, <=, ==, >, <
name.trim()
.split(">=")
.next()
.unwrap_or(name)
.split("<=")
.next()
.unwrap_or(name)
.split("==")
.next()
.unwrap_or(name)
.split('>')
.next()
.unwrap_or(name)
.split('<')
.next()
.unwrap_or(name)
.trim()
.to_string()
}
/// What: Check if a package is a local package.
///
/// Inputs:
/// - `name`: Package name to check.
///
/// Output:
/// - `true` if the package is local, `false` otherwise.
fn is_local_package(name: &str) -> bool {
let output = Command::new("pacman")
.args(["-Qi", name])
.env("LC_ALL", "C")
.env("LANG", "C")
.stdin(Stdio::null())
.stdout(Stdio::piped())
.stderr(Stdio::piped())
.output();
match output {
Ok(output) if output.status.success() => {
let text = String::from_utf8_lossy(&output.stdout);
// Look for "Repository" field in pacman -Qi output
for line in text.lines() {
if line.starts_with("Repository")
&& let Some(colon_pos) = line.find(':')
{
let repo = line[colon_pos + 1..].trim().to_lowercase();
return repo == "local" || repo.is_empty();
}
}
}
_ => {
// If we can't determine, assume it's not local
return false;
}
}
false
}
/// What: Get the set of installed packages.
///
/// Inputs:
/// - None.
///
/// Output:
/// - Set of installed package names.
pub(super) fn get_installed_packages() -> std::collections::HashSet<String> {
crate::logic::deps::get_installed_packages()
}
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/src/logic/sandbox/parse.rs | src/logic/sandbox/parse.rs | //! Parsing functions for .SRCINFO and PKGBUILD dependency extraction.
/// What: Parse dependencies from .SRCINFO content.
///
/// Inputs:
/// - `srcinfo`: Raw .SRCINFO file content.
///
/// Output:
/// - Returns a tuple of (depends, makedepends, checkdepends, optdepends) vectors.
#[allow(clippy::case_sensitive_file_extension_comparisons)]
pub(super) fn parse_srcinfo_deps(
srcinfo: &str,
) -> (Vec<String>, Vec<String>, Vec<String>, Vec<String>) {
let mut depends = Vec::new();
let mut makedepends = Vec::new();
let mut checkdepends = Vec::new();
let mut optdepends = Vec::new();
for line in srcinfo.lines() {
let line = line.trim();
if line.is_empty() || line.starts_with('#') {
continue;
}
// .SRCINFO format: key = value
if let Some((key, value)) = line.split_once('=') {
let key = key.trim();
let value = value.trim();
// Filter out virtual packages (.so files)
let value_lower = value.to_lowercase();
if value_lower.ends_with(".so")
|| value_lower.contains(".so.")
|| value_lower.contains(".so=")
{
continue;
}
match key {
"depends" => depends.push(value.to_string()),
"makedepends" => makedepends.push(value.to_string()),
"checkdepends" => checkdepends.push(value.to_string()),
"optdepends" => optdepends.push(value.to_string()),
_ => {}
}
}
}
(depends, makedepends, checkdepends, optdepends)
}
/// What: Parse dependencies from PKGBUILD content.
///
/// Inputs:
/// - `pkgbuild`: Raw PKGBUILD file content.
///
/// Output:
/// - Returns a tuple of (depends, makedepends, checkdepends, optdepends) vectors.
///
/// Details:
/// - Parses bash array syntax: `depends=('foo' 'bar>=1.2')` (single-line)
/// - Also handles `depends+=` patterns used in functions like `package()`
/// - Handles both quoted and unquoted dependencies
/// - Also handles multi-line arrays:
/// ```text
/// depends=(
/// 'foo'
/// 'bar>=1.2'
/// )
/// ```
/// - Filters out .so files (virtual packages) and invalid package names
/// - Only parses specific dependency fields (depends, makedepends, checkdepends, optdepends)
pub fn parse_pkgbuild_deps(pkgbuild: &str) -> (Vec<String>, Vec<String>, Vec<String>, Vec<String>) {
tracing::debug!(
"parse_pkgbuild_deps: Starting parse, PKGBUILD length={}, first 500 chars: {:?}",
pkgbuild.len(),
pkgbuild.chars().take(500).collect::<String>()
);
let mut depends = Vec::new();
let mut makedepends = Vec::new();
let mut checkdepends = Vec::new();
let mut optdepends = Vec::new();
let lines: Vec<&str> = pkgbuild.lines().collect();
tracing::debug!(
"parse_pkgbuild_deps: Total lines in PKGBUILD: {}",
lines.len()
);
let mut i = 0;
while i < lines.len() {
let line = lines[i].trim();
i += 1;
if line.is_empty() || line.starts_with('#') {
continue;
}
// Parse array declarations: depends=('foo' 'bar') or depends=( or depends+=('foo' 'bar')
if let Some((key, value)) = line.split_once('=') {
let key = key.trim();
let value = value.trim();
// Handle both depends= and depends+= patterns
let base_key = key.strip_suffix('+').map_or(key, |stripped| stripped);
// Only parse specific dependency fields, ignore other PKGBUILD fields
if !matches!(
base_key,
"depends" | "makedepends" | "checkdepends" | "optdepends"
) {
continue;
}
tracing::debug!(
"parse_pkgbuild_deps: Found key-value pair: key='{}', base_key='{}', value='{}'",
key,
base_key,
value.chars().take(100).collect::<String>()
);
// Check if this is an array declaration
if value.starts_with('(') {
tracing::debug!(
"parse_pkgbuild_deps: Detected array declaration for key='{}'",
key
);
let deps = find_matching_closing_paren(value).map_or_else(
|| {
// Multi-line array: depends=(
// 'foo'
// 'bar'
// )
tracing::debug!("Parsing multi-line {} array", key);
let mut array_lines = Vec::new();
// Collect lines until we find the closing parenthesis
while i < lines.len() {
let next_line = lines[i].trim();
i += 1;
// Skip empty lines and comments
if next_line.is_empty() || next_line.starts_with('#') {
continue;
}
// Check if this line closes the array
if next_line == ")" {
break;
}
// Check if this line contains a closing parenthesis (may be on same line as content)
if let Some(paren_pos) = next_line.find(')') {
// Extract content before the closing paren
let content_before_paren = &next_line[..paren_pos].trim();
if !content_before_paren.is_empty() {
array_lines.push((*content_before_paren).to_string());
}
break;
}
// Add this line to the array content
array_lines.push(next_line.to_string());
}
// Parse all collected lines as array content
// Ensure proper spacing between items (each line should be a separate item)
let array_content = array_lines
.iter()
.map(|s| s.trim())
.filter(|s| !s.is_empty())
.collect::<Vec<_>>()
.join(" ");
tracing::debug!(
"Collected {} lines for multi-line {} array: {}",
array_lines.len(),
key,
array_content
);
let parsed = parse_array_content(&array_content);
tracing::debug!("Parsed array content: {:?}", parsed);
parsed
},
|closing_paren_pos| {
// Single-line array (may have content after closing paren): depends=('foo' 'bar') or depends+=('foo' 'bar') other_code
let array_content = &value[1..closing_paren_pos];
tracing::debug!("Parsing single-line {} array: {}", key, array_content);
let parsed = parse_array_content(array_content);
tracing::debug!("Parsed array content: {:?}", parsed);
parsed
},
);
// Filter out invalid dependencies (.so files, invalid names, etc.)
let filtered_deps: Vec<String> = deps
.into_iter()
.filter_map(|dep| {
let dep_trimmed = dep.trim();
if dep_trimmed.is_empty() {
return None;
}
// Filter out .so files (virtual packages)
let dep_lower = dep_trimmed.to_lowercase();
if std::path::Path::new(&dep_lower)
.extension()
.is_some_and(|ext| ext.eq_ignore_ascii_case("so"))
|| dep_lower.contains(".so.")
|| dep_lower.contains(".so=")
{
return None;
}
// Filter out names ending with ) - this is a parsing error
// But first check if it's actually a valid name with version constraint ending in )
// like "package>=1.0)" which would be a parsing error
if dep_trimmed.ends_with(')') {
// Check if it might be a valid version constraint that accidentally ends with )
// If it contains version operators before the ), it's likely a parsing error
if dep_trimmed.contains(">=")
|| dep_trimmed.contains("<=")
|| dep_trimmed.contains("==")
{
// This looks like "package>=1.0)" which is invalid
return None;
}
// Otherwise, it might be "package)" which is also invalid
return None;
}
// Filter out names that don't look like package names
// Package names should start with alphanumeric or underscore
let first_char = dep_trimmed.chars().next().unwrap_or(' ');
if !first_char.is_alphanumeric() && first_char != '_' {
return None;
}
// Filter out names that are too short
if dep_trimmed.len() < 2 {
return None;
}
// Filter out names containing invalid characters (but allow version operators)
// Allow: alphanumeric, dash, underscore, and version operators (>=, <=, ==, >, <)
let has_valid_chars = dep_trimmed
.chars()
.any(|c| c.is_alphanumeric() || c == '-' || c == '_');
if !has_valid_chars {
return None;
}
Some(dep_trimmed.to_string())
})
.collect();
// Add dependencies to the appropriate vector (using base_key to handle both = and +=)
match base_key {
"depends" => depends.extend(filtered_deps),
"makedepends" => makedepends.extend(filtered_deps),
"checkdepends" => checkdepends.extend(filtered_deps),
"optdepends" => optdepends.extend(filtered_deps),
_ => {}
}
}
}
}
(depends, makedepends, checkdepends, optdepends)
}
/// What: Find the position of the matching closing parenthesis in a string.
///
/// Inputs:
/// - `s`: String starting with an opening parenthesis.
///
/// Output:
/// - `Some(position)` if a matching closing parenthesis is found, `None` otherwise.
///
/// Details:
/// - Handles nested parentheses and quoted strings.
fn find_matching_closing_paren(s: &str) -> Option<usize> {
let mut depth = 0;
let mut in_quotes = false;
let mut quote_char = '\0';
for (pos, ch) in s.char_indices() {
match ch {
'\'' | '"' => {
if !in_quotes {
in_quotes = true;
quote_char = ch;
} else if ch == quote_char {
in_quotes = false;
quote_char = '\0';
}
}
'(' if !in_quotes => {
depth += 1;
}
')' if !in_quotes => {
depth -= 1;
if depth == 0 {
return Some(pos);
}
}
_ => {}
}
}
None
}
/// What: Parse quoted and unquoted strings from bash array content.
///
/// Inputs:
/// - `content`: Array content string (e.g., "'foo' 'bar>=1.2'" or "libcairo.so libdbus-1.so").
///
/// Output:
/// - Vector of dependency strings.
///
/// Details:
/// - Handles both quoted ('foo') and unquoted (foo) dependencies.
/// - Splits on whitespace for unquoted values.
fn parse_array_content(content: &str) -> Vec<String> {
let mut deps = Vec::new();
let mut in_quotes = false;
let mut quote_char = '\0';
let mut current = String::new();
for ch in content.chars() {
match ch {
'\'' | '"' => {
if !in_quotes {
in_quotes = true;
quote_char = ch;
} else if ch == quote_char {
if !current.is_empty() {
deps.push(current.clone());
current.clear();
}
in_quotes = false;
quote_char = '\0';
} else {
current.push(ch);
}
}
_ if in_quotes => {
current.push(ch);
}
ch if ch.is_whitespace() => {
// Whitespace outside quotes - end current unquoted value
if !current.is_empty() {
deps.push(current.clone());
current.clear();
}
}
_ => {
// Non-whitespace character outside quotes - add to current value
current.push(ch);
}
}
}
// Handle unclosed quote or trailing unquoted value
if !current.is_empty() {
deps.push(current);
}
deps
}
/// What: Parse conflicts from PKGBUILD content.
///
/// Inputs:
/// - `pkgbuild`: Raw PKGBUILD file content.
///
/// Output:
/// - Returns a vector of conflicting package names.
///
/// Details:
/// - Parses bash array syntax: `conflicts=('foo' 'bar')` (single-line)
/// - Also handles `conflicts+=` patterns used in functions like `package()`
/// - Handles both quoted and unquoted conflicts
/// - Also handles multi-line arrays:
/// ```text
/// conflicts=(
/// 'foo'
/// 'bar'
/// )
/// ```
/// - Filters out .so files (virtual packages) and invalid package names
/// - Extracts package names from version constraints (e.g., "jujutsu-git>=1.0" -> "jujutsu-git")
pub fn parse_pkgbuild_conflicts(pkgbuild: &str) -> Vec<String> {
tracing::debug!(
"parse_pkgbuild_conflicts: Starting parse, PKGBUILD length={}",
pkgbuild.len()
);
let mut conflicts = Vec::new();
let lines: Vec<&str> = pkgbuild.lines().collect();
let mut i = 0;
while i < lines.len() {
let line = lines[i].trim();
i += 1;
if line.is_empty() || line.starts_with('#') {
continue;
}
// Parse array declarations: conflicts=('foo' 'bar') or conflicts=( or conflicts+=('foo' 'bar')
if let Some((key, value)) = line.split_once('=') {
let key = key.trim();
let value = value.trim();
// Handle both conflicts= and conflicts+= patterns
let base_key = key.strip_suffix('+').map_or(key, |stripped| stripped);
// Only parse conflicts field
if base_key != "conflicts" {
continue;
}
tracing::debug!(
"parse_pkgbuild_conflicts: Found key-value pair: key='{}', base_key='{}', value='{}'",
key,
base_key,
value.chars().take(100).collect::<String>()
);
// Check if this is an array declaration
if value.starts_with('(') {
tracing::debug!(
"parse_pkgbuild_conflicts: Detected array declaration for key='{}'",
key
);
let conflict_deps = find_matching_closing_paren(value).map_or_else(
|| {
// Multi-line array: conflicts=(
// 'foo'
// 'bar'
// )
tracing::debug!("Parsing multi-line {} array", key);
let mut array_lines = Vec::new();
// Collect lines until we find the closing parenthesis
while i < lines.len() {
let next_line = lines[i].trim();
i += 1;
// Skip empty lines and comments
if next_line.is_empty() || next_line.starts_with('#') {
continue;
}
// Check if this line closes the array
if next_line == ")" {
break;
}
// Check if this line contains a closing parenthesis (may be on same line as content)
if let Some(paren_pos) = next_line.find(')') {
// Extract content before the closing paren
let content_before_paren = &next_line[..paren_pos].trim();
if !content_before_paren.is_empty() {
array_lines.push((*content_before_paren).to_string());
}
break;
}
// Add this line to the array content
array_lines.push(next_line.to_string());
}
// Parse all collected lines as array content
let array_content = array_lines
.iter()
.map(|s| s.trim())
.filter(|s| !s.is_empty())
.collect::<Vec<_>>()
.join(" ");
tracing::debug!(
"Collected {} lines for multi-line {} array: {}",
array_lines.len(),
key,
array_content
);
let parsed = parse_array_content(&array_content);
tracing::debug!("Parsed array content: {:?}", parsed);
parsed
},
|closing_paren_pos| {
// Single-line array (may have content after closing paren): conflicts=('foo' 'bar') or conflicts+=('foo' 'bar') other_code
let array_content = &value[1..closing_paren_pos];
tracing::debug!("Parsing single-line {} array: {}", key, array_content);
let parsed = parse_array_content(array_content);
tracing::debug!("Parsed array content: {:?}", parsed);
parsed
},
);
// Filter out invalid conflicts (.so files, invalid names, etc.)
let filtered_conflicts: Vec<String> = conflict_deps
.into_iter()
.filter_map(|conflict| {
let conflict_trimmed = conflict.trim();
if conflict_trimmed.is_empty() {
return None;
}
// Filter out .so files (virtual packages)
let conflict_lower = conflict_trimmed.to_lowercase();
if std::path::Path::new(&conflict_lower)
.extension()
.is_some_and(|ext| ext.eq_ignore_ascii_case("so"))
|| conflict_lower.contains(".so.")
|| conflict_lower.contains(".so=")
{
return None;
}
// Filter out names ending with ) - this is a parsing error
if conflict_trimmed.ends_with(')') {
return None;
}
// Filter out names that don't look like package names
let first_char = conflict_trimmed.chars().next().unwrap_or(' ');
if !first_char.is_alphanumeric() && first_char != '_' {
return None;
}
// Filter out names that are too short
if conflict_trimmed.len() < 2 {
return None;
}
// Filter out names containing invalid characters (but allow version operators)
let has_valid_chars = conflict_trimmed
.chars()
.any(|c| c.is_alphanumeric() || c == '-' || c == '_');
if !has_valid_chars {
return None;
}
// Extract package name (remove version constraints if present)
// Use a simple approach: split on version operators
let pkg_name = conflict_trimmed.find(['>', '<', '=']).map_or_else(
|| conflict_trimmed.to_string(),
|pos| conflict_trimmed[..pos].trim().to_string(),
);
if pkg_name.is_empty() {
None
} else {
Some(pkg_name)
}
})
.collect();
// Add conflicts to the vector (using base_key to handle both = and +=)
conflicts.extend(filtered_conflicts);
}
}
}
conflicts
}
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/src/logic/sandbox/tests.rs | src/logic/sandbox/tests.rs | //! Unit tests for sandbox parsing functions.
use crate::logic::sandbox::parse::{parse_pkgbuild_conflicts, parse_pkgbuild_deps};
#[test]
/// What: Test parsing dependencies from PKGBUILD with depends= syntax.
///
/// Inputs:
/// - PKGBUILD with standard depends= array.
///
/// Output:
/// - Correctly parsed dependencies.
///
/// Details:
/// - Validates basic dependency parsing works.
fn test_parse_pkgbuild_deps_basic() {
let pkgbuild = r"
pkgname=test-package
pkgver=1.0.0
depends=('foo' 'bar>=1.2')
makedepends=('make' 'gcc')
";
let (depends, makedepends, checkdepends, optdepends) = parse_pkgbuild_deps(pkgbuild);
assert_eq!(depends.len(), 2);
assert!(depends.contains(&"foo".to_string()));
assert!(depends.contains(&"bar>=1.2".to_string()));
assert_eq!(makedepends.len(), 2);
assert!(makedepends.contains(&"make".to_string()));
assert!(makedepends.contains(&"gcc".to_string()));
assert_eq!(checkdepends.len(), 0);
assert_eq!(optdepends.len(), 0);
}
#[test]
/// What: Test parsing dependencies with depends+= syntax in `package()` function.
///
/// Inputs:
/// - PKGBUILD with depends+= inside `package()` function.
///
/// Output:
/// - Correctly parsed dependencies from depends+=, filtering out .so files.
///
/// Details:
/// - Validates that depends+= patterns are detected and parsed.
/// - Validates that .so files (virtual packages) are filtered out.
fn test_parse_pkgbuild_deps_append() {
let pkgbuild = r#"
pkgname=test-package
pkgver=1.0.0
package() {
depends+=(foo bar)
cd $_pkgname
make DESTDIR="$pkgdir" PREFIX=/usr install
}
"#;
let (depends, makedepends, checkdepends, optdepends) = parse_pkgbuild_deps(pkgbuild);
assert_eq!(depends.len(), 2);
assert!(depends.contains(&"foo".to_string()));
assert!(depends.contains(&"bar".to_string()));
assert_eq!(makedepends.len(), 0);
assert_eq!(checkdepends.len(), 0);
assert_eq!(optdepends.len(), 0);
}
#[test]
/// What: Test parsing unquoted dependencies and filtering .so files.
///
/// Inputs:
/// - PKGBUILD with unquoted dependencies including .so files.
///
/// Output:
/// - Correctly parsed unquoted dependencies, with .so files filtered out.
///
/// Details:
/// - Validates that unquoted dependencies are parsed correctly.
/// - Validates that .so files (virtual packages) are filtered out.
fn test_parse_pkgbuild_deps_unquoted() {
let pkgbuild = r"
pkgname=test-package
depends=(foo bar libcairo.so libdbus-1.so)
";
let (depends, makedepends, checkdepends, optdepends) = parse_pkgbuild_deps(pkgbuild);
// .so files should be filtered out
assert_eq!(depends.len(), 2);
assert!(depends.contains(&"foo".to_string()));
assert!(depends.contains(&"bar".to_string()));
assert_eq!(makedepends.len(), 0);
assert_eq!(checkdepends.len(), 0);
assert_eq!(optdepends.len(), 0);
}
#[test]
/// What: Test parsing multi-line dependency arrays.
///
/// Inputs:
/// - PKGBUILD with multi-line depends array.
///
/// Output:
/// - Correctly parsed dependencies from multi-line array.
///
/// Details:
/// - Validates multi-line array parsing works correctly.
fn test_parse_pkgbuild_deps_multiline() {
let pkgbuild = r"
pkgname=test-package
depends=(
'foo'
'bar>=1.2'
'baz'
)
";
let (depends, makedepends, checkdepends, optdepends) = parse_pkgbuild_deps(pkgbuild);
assert_eq!(depends.len(), 3);
assert!(depends.contains(&"foo".to_string()));
assert!(depends.contains(&"bar>=1.2".to_string()));
assert!(depends.contains(&"baz".to_string()));
assert_eq!(makedepends.len(), 0);
assert_eq!(checkdepends.len(), 0);
assert_eq!(optdepends.len(), 0);
}
#[test]
/// What: Test parsing makedepends+= syntax.
///
/// Inputs:
/// - PKGBUILD with makedepends+= pattern.
///
/// Output:
/// - Correctly parsed makedepends from += pattern.
///
/// Details:
/// - Validates that makedepends+= is also handled.
fn test_parse_pkgbuild_deps_makedepends_append() {
let pkgbuild = r"
pkgname=test-package
build() {
makedepends+=(cmake ninja)
cmake -B build
}
";
let (depends, makedepends, checkdepends, optdepends) = parse_pkgbuild_deps(pkgbuild);
assert_eq!(makedepends.len(), 2);
assert!(makedepends.contains(&"cmake".to_string()));
assert!(makedepends.contains(&"ninja".to_string()));
assert_eq!(depends.len(), 0);
assert_eq!(checkdepends.len(), 0);
assert_eq!(optdepends.len(), 0);
}
#[test]
/// What: Test parsing jujutsu-git package scenario with various edge cases.
///
/// Inputs:
/// - PKGBUILD similar to jujutsu-git with multi-line arrays, .so files, and other fields.
///
/// Output:
/// - Correctly parsed dependencies, filtering out .so files and other PKGBUILD fields.
///
/// Details:
/// - Validates that other PKGBUILD fields (arch, pkgdesc, url, license, source) are ignored.
/// - Validates that .so files are filtered out.
/// - Validates that multi-line arrays are parsed correctly.
fn test_parse_pkgbuild_deps_jujutsu_git_scenario() {
let pkgbuild = r"
pkgname=jujutsu-git
pkgver=0.1.0
pkgdesc=Git-compatible VCS that is both simple and powerful
url=https://github.com/martinvonz/jj
license=(Apache-2.0)
arch=(i686 x86_64 armv6h armv7h)
depends=(
glibc
libc.so
libm.so
)
makedepends=(
libgit2
libgit2.so
libssh2
libssh2.so)
openssh
git)
cargo
checkdepends=()
optdepends=()
source=($pkgname::git+$url)
";
let (depends, makedepends, checkdepends, optdepends) = parse_pkgbuild_deps(pkgbuild);
// depends should only contain glibc, .so files filtered out
assert_eq!(depends.len(), 1);
assert!(depends.contains(&"glibc".to_string()));
// makedepends should contain libgit2, libssh2
// .so files are filtered out
// Note: openssh, git), and cargo are after the array closes, so they're not part of makedepends
assert_eq!(makedepends.len(), 2);
assert!(makedepends.contains(&"libgit2".to_string()));
assert!(makedepends.contains(&"libssh2".to_string()));
assert_eq!(checkdepends.len(), 0);
assert_eq!(optdepends.len(), 0);
}
#[test]
/// What: Test that other PKGBUILD fields are ignored.
///
/// Inputs:
/// - PKGBUILD with various non-dependency fields.
///
/// Output:
/// - Only dependency fields are parsed, other fields are ignored.
///
/// Details:
/// - Validates that fields like arch, pkgdesc, url, license, source are not parsed as dependencies.
fn test_parse_pkgbuild_deps_ignore_other_fields() {
let pkgbuild = r"
pkgname=test-package
pkgver=1.0.0
pkgdesc=Test package description
url=https://example.com
license=(MIT)
arch=(x86_64)
source=($pkgname-$pkgver.tar.gz)
depends=(foo bar)
makedepends=(make)
";
let (depends, makedepends, checkdepends, optdepends) = parse_pkgbuild_deps(pkgbuild);
// Only depends and makedepends should be parsed
assert_eq!(depends.len(), 2);
assert!(depends.contains(&"foo".to_string()));
assert!(depends.contains(&"bar".to_string()));
assert_eq!(makedepends.len(), 1);
assert!(makedepends.contains(&"make".to_string()));
assert_eq!(checkdepends.len(), 0);
assert_eq!(optdepends.len(), 0);
}
#[test]
/// What: Test filtering of invalid package names.
///
/// Inputs:
/// - PKGBUILD with invalid dependency names.
///
/// Output:
/// - Invalid names are filtered out.
///
/// Details:
/// - Validates that names ending with ), containing =, or too short are filtered.
fn test_parse_pkgbuild_deps_filter_invalid_names() {
// Test filtering of invalid names (using single-line format for reliability)
let pkgbuild = r"
depends=('valid-package' 'invalid)' '=invalid' 'a' 'valid>=1.0')
";
let (depends, makedepends, checkdepends, optdepends) = parse_pkgbuild_deps(pkgbuild);
// Only valid package names should remain
// Note: 'invalid)' should be filtered out (ends with ))
// Note: '=invalid' should be filtered out (starts with =)
// Note: 'a' should be filtered out (too short)
// So we should have: valid-package and valid>=1.0
assert_eq!(depends.len(), 2);
assert!(depends.contains(&"valid-package".to_string()));
assert!(depends.contains(&"valid>=1.0".to_string()));
assert_eq!(makedepends.len(), 0);
assert_eq!(checkdepends.len(), 0);
assert_eq!(optdepends.len(), 0);
}
#[test]
/// What: Test parsing conflicts from PKGBUILD with conflicts= syntax.
///
/// Inputs:
/// - PKGBUILD with standard conflicts= array.
///
/// Output:
/// - Correctly parsed conflicts.
///
/// Details:
/// - Validates basic conflict parsing works.
/// - Tests the jujutsu/jujutsu-git scenario.
fn test_parse_pkgbuild_conflicts_basic() {
let pkgbuild = r"
pkgname=jujutsu-git
pkgver=0.1.0
conflicts=('jujutsu')
";
let conflicts = parse_pkgbuild_conflicts(pkgbuild);
assert_eq!(conflicts.len(), 1);
assert!(conflicts.contains(&"jujutsu".to_string()));
}
#[test]
/// What: Test parsing conflicts with multi-line arrays.
///
/// Inputs:
/// - PKGBUILD with multi-line conflicts array.
///
/// Output:
/// - Correctly parsed conflicts from multi-line array.
///
/// Details:
/// - Validates multi-line array parsing works for conflicts.
fn test_parse_pkgbuild_conflicts_multiline() {
let pkgbuild = r"
pkgname=pacsea-git
pkgver=0.1.0
conflicts=(
'pacsea'
'pacsea-bin'
)
";
let conflicts = parse_pkgbuild_conflicts(pkgbuild);
assert_eq!(conflicts.len(), 2);
assert!(conflicts.contains(&"pacsea".to_string()));
assert!(conflicts.contains(&"pacsea-bin".to_string()));
}
#[test]
/// What: Test parsing conflicts with version constraints.
///
/// Inputs:
/// - PKGBUILD with conflicts containing version constraints.
///
/// Output:
/// - Correctly parsed conflicts with version constraints stripped.
///
/// Details:
/// - Validates that version constraints are removed from conflict names.
fn test_parse_pkgbuild_conflicts_with_versions() {
let pkgbuild = r"
pkgname=test-package
conflicts=('old-pkg<2.0' 'new-pkg>=3.0')
";
let conflicts = parse_pkgbuild_conflicts(pkgbuild);
assert_eq!(conflicts.len(), 2);
assert!(conflicts.contains(&"old-pkg".to_string()));
assert!(conflicts.contains(&"new-pkg".to_string()));
}
#[test]
/// What: Test filtering .so files from conflicts.
///
/// Inputs:
/// - PKGBUILD with conflicts including .so files.
///
/// Output:
/// - .so files are filtered out.
///
/// Details:
/// - Validates that virtual packages (.so files) are filtered from conflicts.
fn test_parse_pkgbuild_conflicts_filter_so() {
let pkgbuild = r"
pkgname=test-package
conflicts=('foo' 'libcairo.so' 'bar' 'libdbus-1.so=1-64')
";
let conflicts = parse_pkgbuild_conflicts(pkgbuild);
// .so files should be filtered out
assert_eq!(conflicts.len(), 2);
assert!(conflicts.contains(&"foo".to_string()));
assert!(conflicts.contains(&"bar".to_string()));
}
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/src/logic/sandbox/fetch.rs | src/logic/sandbox/fetch.rs | //! Fetching functions for AUR package metadata.
// Re-export async fetch function for convenience
pub use crate::util::srcinfo::fetch_srcinfo_async;
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/src/logic/sandbox/types.rs | src/logic/sandbox/types.rs | //! Type definitions for sandbox analysis.
/// What: Information about a dependency's status in the host environment.
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
pub struct DependencyDelta {
/// Package name (may include version requirements)
pub name: String,
/// Whether this dependency is installed on the host
pub is_installed: bool,
/// Installed version (if available)
pub installed_version: Option<String>,
/// Whether the installed version satisfies the requirement
pub version_satisfied: bool,
}
/// What: Sandbox analysis result for an AUR package.
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
pub struct SandboxInfo {
/// Package name
pub package_name: String,
/// Runtime dependencies (depends)
pub depends: Vec<DependencyDelta>,
/// Build-time dependencies (makedepends)
pub makedepends: Vec<DependencyDelta>,
/// Test dependencies (checkdepends)
pub checkdepends: Vec<DependencyDelta>,
/// Optional dependencies (optdepends)
pub optdepends: Vec<DependencyDelta>,
}
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/src/logic/sandbox/mod.rs | src/logic/sandbox/mod.rs | //! AUR sandbox preflight checks for build dependencies.
mod analyze;
mod fetch;
mod parse;
mod types;
#[cfg(test)]
mod tests;
pub use analyze::extract_package_name;
pub use parse::{parse_pkgbuild_conflicts, parse_pkgbuild_deps};
pub use types::{DependencyDelta, SandboxInfo};
use crate::logic::sandbox::analyze::{
analyze_package_from_pkgbuild, analyze_package_from_srcinfo, get_installed_packages,
};
use crate::logic::sandbox::fetch::fetch_srcinfo_async;
use crate::state::types::PackageItem;
use futures::stream::{FuturesUnordered, StreamExt};
/// What: Create an empty `SandboxInfo` for a package when analysis fails.
///
/// Inputs:
/// - `name`: Package name.
///
/// Output:
/// - Empty `SandboxInfo` with the package name.
///
/// Details:
/// - Used as fallback when analysis fails to ensure package appears in results.
const fn create_empty_sandbox_info(name: String) -> SandboxInfo {
SandboxInfo {
package_name: name,
depends: Vec::new(),
makedepends: Vec::new(),
checkdepends: Vec::new(),
optdepends: Vec::new(),
}
}
/// What: Handle .SRCINFO analysis for a package.
///
/// Inputs:
/// - `name`: Package name.
/// - `srcinfo_text`: .SRCINFO file content.
/// - `installed`: Installed packages set.
/// - `provided`: Provided packages set.
///
/// Output:
/// - `SandboxInfo` on success.
///
/// Details:
/// - Analyzes dependencies from .SRCINFO and creates `SandboxInfo`.
fn handle_srcinfo_analysis(
name: &str,
srcinfo_text: &str,
installed: &std::collections::HashSet<String>,
provided: &std::collections::HashSet<String>,
) -> SandboxInfo {
analyze_package_from_srcinfo(name, srcinfo_text, installed, provided)
}
/// What: Handle PKGBUILD fallback analysis for a package.
///
/// Inputs:
/// - `name`: Package name.
/// - `pkgbuild_text`: PKGBUILD file content.
/// - `installed`: Installed packages set.
/// - `provided`: Provided packages set.
///
/// Output:
/// - `SandboxInfo` on success.
///
/// Details:
/// - Analyzes dependencies from PKGBUILD when .SRCINFO is unavailable.
fn handle_pkgbuild_analysis(
name: &str,
pkgbuild_text: &str,
installed: &std::collections::HashSet<String>,
provided: &std::collections::HashSet<String>,
) -> SandboxInfo {
let info = analyze_package_from_pkgbuild(name, pkgbuild_text, installed, provided);
let total_deps = info.depends.len()
+ info.makedepends.len()
+ info.checkdepends.len()
+ info.optdepends.len();
tracing::info!(
"Parsed PKGBUILD for {}: {} total dependencies (depends={}, makedepends={}, checkdepends={}, optdepends={})",
name,
total_deps,
info.depends.len(),
info.makedepends.len(),
info.checkdepends.len(),
info.optdepends.len()
);
info
}
/// What: Process a single AUR package to resolve sandbox information.
///
/// Inputs:
/// - `name`: Package name.
/// - `client`: HTTP client for fetching.
/// - `installed`: Installed packages set.
/// - `provided`: Provided packages set.
///
/// Output:
/// - `Some(SandboxInfo)` if resolved, `None` otherwise.
///
/// Details:
/// - Tries .SRCINFO first, falls back to PKGBUILD if needed.
async fn process_sandbox_package(
name: String,
client: reqwest::Client,
installed: std::collections::HashSet<String>,
provided: std::collections::HashSet<String>,
) -> Option<SandboxInfo> {
match fetch_srcinfo_async(&client, &name).await {
Ok(srcinfo_text) => Some(handle_srcinfo_analysis(
&name,
&srcinfo_text,
&installed,
&provided,
)),
Err(e) => {
tracing::debug!(
"Failed to fetch .SRCINFO for {}: {}, trying PKGBUILD",
name,
e
);
let name_for_fallback = name.clone();
let installed_for_fallback = installed.clone();
let provided_for_fallback = provided.clone();
match tokio::task::spawn_blocking(move || {
crate::logic::files::fetch_pkgbuild_sync(&name_for_fallback)
})
.await
{
Ok(Ok(pkgbuild_text)) => {
tracing::debug!(
"Successfully fetched PKGBUILD for {}, parsing dependencies",
name
);
Some(handle_pkgbuild_analysis(
&name,
&pkgbuild_text,
&installed_for_fallback,
&provided_for_fallback,
))
}
Ok(Err(e)) => {
tracing::warn!("Failed to fetch PKGBUILD for {}: {}", name, e);
tracing::info!(
"Creating empty sandbox info for {} (both .SRCINFO and PKGBUILD fetch failed)",
name
);
Some(create_empty_sandbox_info(name))
}
Err(e) => {
tracing::warn!(
"Failed to spawn blocking task for PKGBUILD fetch for {}: {}",
name,
e
);
tracing::info!(
"Creating empty sandbox info for {} (spawn task failed)",
name
);
Some(create_empty_sandbox_info(name))
}
}
}
}
}
/// What: Resolve sandbox information for AUR packages using async HTTP.
///
/// Inputs:
/// - `items`: AUR packages to analyze.
///
/// Output:
/// - Vector of `SandboxInfo` entries, one per AUR package.
///
/// Details:
/// - Fetches `.SRCINFO` for each AUR package in parallel using async HTTP.
/// - Parses dependencies and compares against host environment.
/// - Returns empty vector if no AUR packages are present.
pub async fn resolve_sandbox_info_async(items: &[PackageItem]) -> Vec<SandboxInfo> {
let aur_items: Vec<_> = items
.iter()
.filter(|i| matches!(i.source, crate::state::Source::Aur))
.collect();
let span = tracing::info_span!(
"resolve_sandbox_info",
stage = "sandbox",
item_count = aur_items.len()
);
let _guard = span.enter();
let start_time = std::time::Instant::now();
let installed = get_installed_packages();
let provided = crate::logic::deps::get_provided_packages(&installed);
// Fetch all .SRCINFO files in parallel
let client = reqwest::Client::builder()
.timeout(std::time::Duration::from_secs(10))
.build()
.unwrap_or_else(|_| reqwest::Client::new());
let mut fetch_futures = FuturesUnordered::new();
for item in items {
if matches!(item.source, crate::state::Source::Aur) {
let name = item.name.clone();
let installed_clone = installed.clone();
let provided_clone = provided.clone();
let client_clone = client.clone();
fetch_futures.push(process_sandbox_package(
name,
client_clone,
installed_clone,
provided_clone,
));
}
}
// Collect all results as they complete
let mut results = Vec::new();
while let Some(result) = fetch_futures.next().await {
if let Some(info) = result {
results.push(info);
}
}
let elapsed = start_time.elapsed();
let duration_ms = u64::try_from(elapsed.as_millis()).unwrap_or(u64::MAX);
tracing::info!(
stage = "sandbox",
item_count = aur_items.len(),
result_count = results.len(),
duration_ms = duration_ms,
"Sandbox resolution complete"
);
results
}
/// What: Resolve sandbox information for AUR packages (synchronous wrapper for async version).
///
/// Inputs:
/// - `items`: AUR packages to analyze.
///
/// Output:
/// - Vector of `SandboxInfo` entries, one per AUR package.
///
/// # Panics
/// - Panics if a tokio runtime cannot be created when no runtime handle is available
///
/// Details:
/// - Wraps the async version for use in blocking contexts.
#[must_use]
pub fn resolve_sandbox_info(items: &[PackageItem]) -> Vec<SandboxInfo> {
// Use tokio runtime handle if available, otherwise create a new runtime
tokio::runtime::Handle::try_current().map_or_else(
|_| {
// No runtime available, create a new one
let rt = tokio::runtime::Runtime::new().unwrap_or_else(|e| {
tracing::error!(
"Failed to create tokio runtime for sandbox resolution: {}",
e
);
panic!("Cannot resolve sandbox info without tokio runtime");
});
rt.block_on(resolve_sandbox_info_async(items))
},
|handle| handle.block_on(resolve_sandbox_info_async(items)),
)
}
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/src/install/direct.rs | src/install/direct.rs | //! Direct install/remove operations using integrated processes (bypassing preflight).
use crate::state::{AppState, PackageItem, modal::CascadeMode};
/// What: Start integrated install process for a single package (bypassing preflight).
///
/// Inputs:
/// - `app`: Mutable application state
/// - `item`: Package to install
/// - `dry_run`: Whether to run in dry-run mode
///
/// Output:
/// - Transitions to `PasswordPrompt` (all installs need password for sudo)
///
/// Details:
/// - Both official packages (sudo pacman) and AUR packages (paru/yay need sudo for final step)
/// require password, so always show `PasswordPrompt`
/// - Uses `ExecutorRequest::Install` for execution
pub fn start_integrated_install(app: &mut AppState, item: &PackageItem, dry_run: bool) {
use crate::state::modal::PreflightHeaderChips;
app.dry_run = dry_run;
// Check faillock status before showing password prompt
let username = std::env::var("USER").unwrap_or_else(|_| "user".to_string());
if let Some(lockout_msg) = crate::logic::faillock::get_lockout_message_if_locked(&username, app)
{
// User is locked out - show warning and don't show password prompt
app.modal = crate::state::Modal::Alert {
message: lockout_msg,
};
return;
}
// Show password prompt for all installs (official and AUR both need sudo)
app.modal = crate::state::Modal::PasswordPrompt {
purpose: crate::state::modal::PasswordPurpose::Install,
items: vec![item.clone()],
input: String::new(),
cursor: 0,
error: None,
};
app.pending_exec_header_chips = Some(PreflightHeaderChips::default());
}
/// What: Start integrated install process for multiple packages (bypassing preflight).
///
/// Inputs:
/// - `app`: Mutable application state
/// - `items`: Packages to install
/// - `dry_run`: Whether to run in dry-run mode
///
/// Output:
/// - Transitions to `PasswordPrompt` (all installs need password for sudo)
///
/// Details:
/// - Both official packages (sudo pacman) and AUR packages (paru/yay need sudo for final step)
/// require password, so always show `PasswordPrompt`
/// - Uses `ExecutorRequest::Install` for execution
pub fn start_integrated_install_all(app: &mut AppState, items: &[PackageItem], dry_run: bool) {
use crate::state::modal::PreflightHeaderChips;
app.dry_run = dry_run;
// Check faillock status before showing password prompt
let username = std::env::var("USER").unwrap_or_else(|_| "user".to_string());
if let Some(lockout_msg) = crate::logic::faillock::get_lockout_message_if_locked(&username, app)
{
// User is locked out - show warning and don't show password prompt
app.modal = crate::state::Modal::Alert {
message: lockout_msg,
};
return;
}
// Show password prompt for all installs (official and AUR both need sudo)
app.modal = crate::state::Modal::PasswordPrompt {
purpose: crate::state::modal::PasswordPurpose::Install,
items: items.to_vec(),
input: String::new(),
cursor: 0,
error: None,
};
app.pending_exec_header_chips = Some(PreflightHeaderChips::default());
}
/// What: Start integrated remove process (bypassing preflight).
///
/// Inputs:
/// - `app`: Mutable application state
/// - `names`: Package names to remove
/// - `dry_run`: Whether to run in dry-run mode
/// - `cascade_mode`: Cascade removal mode
///
/// Output:
/// - Transitions to `PasswordPrompt` (remove always needs sudo)
///
/// Details:
/// - Remove operations always need sudo, so always show `PasswordPrompt`
/// - Uses `ExecutorRequest::Remove` for execution
pub fn start_integrated_remove_all(
app: &mut AppState,
names: &[String],
dry_run: bool,
cascade_mode: CascadeMode,
) {
use crate::state::modal::PreflightHeaderChips;
app.dry_run = dry_run;
app.remove_cascade_mode = cascade_mode;
// Convert names to PackageItem for password prompt (we only need names, so create minimal items)
let items: Vec<PackageItem> = names
.iter()
.map(|name| PackageItem {
name: name.clone(),
version: String::new(),
description: String::new(),
source: crate::state::Source::Official {
repo: String::new(),
arch: String::new(),
},
popularity: None,
out_of_date: None,
orphaned: false,
})
.collect();
// Remove operations always need sudo (pacman -R requires sudo regardless of package source)
// Check faillock status before showing password prompt
let username = std::env::var("USER").unwrap_or_else(|_| "user".to_string());
if let Some(lockout_msg) = crate::logic::faillock::get_lockout_message_if_locked(&username, app)
{
// User is locked out - show warning and don't show password prompt
app.modal = crate::state::Modal::Alert {
message: lockout_msg,
};
return;
}
// Always show password prompt - user can press Enter if passwordless sudo is configured
app.modal = crate::state::Modal::PasswordPrompt {
purpose: crate::state::modal::PasswordPurpose::Remove,
items,
input: String::new(),
cursor: 0,
error: None,
};
app.pending_exec_header_chips = Some(PreflightHeaderChips::default());
}
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/src/install/command.rs | src/install/command.rs | //! Builds shell commands for installing packages via pacman or AUR helpers.
use crate::state::{PackageItem, Source};
use super::utils::shell_single_quote;
/// What: Build the common AUR install body that prefers `paru` and falls back to `yay`.
///
/// Input:
/// - `flags`: Flag string forwarded to the helper (e.g. `-S --needed`).
/// - `n`: Space-separated package names to install.
///
/// Output:
/// - Parenthesised shell snippet `(if ... fi)` without the trailing hold suffix.
///
/// Details:
/// - Prefers `paru` if available, otherwise falls back to `yay`.
/// - Shows error message if no AUR helper is found.
#[must_use]
pub fn aur_install_body(flags: &str, n: &str) -> String {
format!(
"(if command -v paru >/dev/null 2>&1; then \
paru {flags} {n}; \
elif command -v yay >/dev/null 2>&1; then \
yay {flags} {n}; \
else \
echo 'No AUR helper (paru/yay) found.'; \
fi)"
)
}
/// What: Build a shell command to install `item` and indicate whether `sudo` is used.
///
/// Input:
/// - `item`: Package to install (official via pacman, AUR via helper).
/// - `password`: Optional sudo password; when present, wires `sudo -S` with a pipe.
/// - `dry_run`: When `true`, prints the command instead of executing.
///
/// Output:
/// - Tuple `(command_string, uses_sudo)` with a shell-ready command and whether it requires sudo.
///
/// Details:
/// - Uses `--needed` flag for new installs, omits it for reinstalls.
/// - Adds a hold tail so spawned terminals remain open after completion.
#[must_use]
pub fn build_install_command(
item: &PackageItem,
password: Option<&str>,
dry_run: bool,
) -> (String, bool) {
match &item.source {
Source::Official { .. } => {
let reinstall = crate::index::is_installed(&item.name);
let base_cmd = if reinstall {
format!("pacman -S --noconfirm {}", item.name)
} else {
format!("pacman -S --needed --noconfirm {}", item.name)
};
let hold_tail = "; echo; echo 'Finished.'; echo 'Press any key to close...'; read -rn1 -s _ || (echo; echo 'Press Ctrl+C to close'; sleep infinity)";
if dry_run {
let cmd = format!("sudo {base_cmd}{hold_tail}");
let quoted = shell_single_quote(&cmd);
let bash = format!("echo DRY RUN: {quoted}");
return (bash, true);
}
let pass = password.unwrap_or("");
if pass.is_empty() {
let bash = format!("sudo {base_cmd}{hold_tail}");
(bash, true)
} else {
let escaped = shell_single_quote(pass);
let pipe = format!("echo {escaped} | ");
let bash = format!("{pipe}sudo -S {base_cmd}{hold_tail}");
(bash, true)
}
}
Source::Aur => {
let hold_tail = "; echo; echo 'Press any key to close...'; read -rn1 -s _ || (echo; echo 'Press Ctrl+C to close'; sleep infinity)";
let reinstall = crate::index::is_installed(&item.name);
let flags = if reinstall {
"-S --noconfirm"
} else {
"-S --needed --noconfirm"
};
let aur_cmd = if dry_run {
let cmd = format!(
"paru {flags} {n} || yay {flags} {n}{hold}",
n = item.name,
hold = hold_tail,
flags = flags
);
let quoted = shell_single_quote(&cmd);
format!("echo DRY RUN: {quoted}")
} else {
format!(
"{body}{hold}",
body = aur_install_body(flags, &item.name),
hold = hold_tail
)
};
(aur_cmd, false)
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
/// What: Check the pacman command builder for official packages handles sudo, password prompts, and dry-run mode.
///
/// Inputs:
/// - Official package metadata.
/// - Optional password string.
/// - Dry-run flag toggled between `false` and `true`.
///
/// Output:
/// - Returns commands containing the expected pacman flags, optional `sudo -S` echo, and dry-run prefix.
///
/// Details:
/// - Ensures the hold-tail message persists and the helper flags remain in sync with UI behaviour.
fn install_build_install_command_official_variants() {
let pkg = PackageItem {
name: "ripgrep".into(),
version: "14".into(),
description: String::new(),
source: Source::Official {
repo: "extra".into(),
arch: "x86_64".into(),
},
popularity: None,
out_of_date: None,
orphaned: false,
};
let (cmd1, uses_sudo1) = build_install_command(&pkg, None, false);
assert!(uses_sudo1);
assert!(cmd1.contains("sudo pacman -S --needed --noconfirm ripgrep"));
assert!(cmd1.contains("Press any key to close"));
let (cmd2, uses_sudo2) = build_install_command(&pkg, Some("pa's"), false);
assert!(uses_sudo2);
assert!(cmd2.contains("echo "));
assert!(cmd2.contains("sudo -S pacman -S --needed --noconfirm ripgrep"));
let (cmd3, uses_sudo3) = build_install_command(&pkg, None, true);
assert!(uses_sudo3);
// Dry-run commands are now properly quoted to avoid syntax errors
assert!(cmd3.starts_with("echo DRY RUN: '"));
assert!(cmd3.contains("sudo pacman -S --needed --noconfirm ripgrep"));
}
#[test]
/// What: Verify AUR command construction selects the correct helper and respects dry-run output.
///
/// Inputs:
/// - AUR package metadata.
/// - Dry-run flag toggled between `false` and `true`.
///
/// Output:
/// - Produces scripts that prefer `paru`, fall back to `yay`, and emit a dry-run echo when requested.
///
/// Details:
/// - Asserts the crafted shell script still includes the hold-tail prompt and missing-helper warning.
fn install_build_install_command_aur_variants() {
let pkg = PackageItem {
name: "yay-bin".into(),
version: "1".into(),
description: String::new(),
source: Source::Aur,
popularity: None,
out_of_date: None,
orphaned: false,
};
let (cmd1, uses_sudo1) = build_install_command(&pkg, None, false);
assert!(!uses_sudo1);
assert!(cmd1.contains("command -v paru"));
assert!(cmd1.contains("paru -S --needed --noconfirm yay-bin"));
assert!(cmd1.contains("elif command -v yay"));
assert!(cmd1.contains("No AUR helper"));
assert!(cmd1.contains("Press any key to close"));
let (cmd2, uses_sudo2) = build_install_command(&pkg, None, true);
assert!(!uses_sudo2);
// Dry-run commands are now properly quoted to avoid syntax errors
assert!(cmd2.starts_with("echo DRY RUN: '"));
assert!(cmd2.contains("paru -S --needed --noconfirm yay-bin"));
}
}
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/src/install/batch.rs | src/install/batch.rs | #[cfg(not(target_os = "windows"))]
use crate::state::Source;
#[allow(unused_imports)]
use std::process::Command;
use crate::state::PackageItem;
#[cfg(not(target_os = "windows"))]
use super::command::aur_install_body;
#[cfg(not(target_os = "windows"))]
use super::logging::log_installed;
#[cfg(not(target_os = "windows"))]
use super::utils::{choose_terminal_index_prefer_path, command_on_path, shell_single_quote};
#[cfg(not(target_os = "windows"))]
/// What: Build the shell command string for batch package installation.
///
/// Input:
/// - `items`: Packages to install
/// - `official`: Names of official packages
/// - `aur`: Names of AUR packages
/// - `dry_run`: When `true`, prints commands instead of executing
///
/// Output:
/// - Shell command string with hold tail appended
///
/// Details:
/// - Official packages are grouped into a single `pacman` invocation
/// - AUR packages are installed via `paru`/`yay` (prompts to install a helper if missing)
/// - Appends a "hold" tail so the terminal remains open after command completion
fn build_batch_install_command(
items: &[PackageItem],
official: &[String],
aur: &[String],
dry_run: bool,
) -> String {
let hold_tail = "; echo; echo 'Finished.'; echo 'Press any key to close...'; read -rn1 -s _ || (echo; echo 'Press Ctrl+C to close'; sleep infinity)";
if dry_run {
if !aur.is_empty() {
let all: Vec<String> = items.iter().map(|p| p.name.clone()).collect();
let cmd = format!(
"(paru -S --needed --noconfirm {n} || yay -S --needed --noconfirm {n}){hold}",
n = all.join(" "),
hold = hold_tail
);
let quoted = shell_single_quote(&cmd);
format!("echo DRY RUN: {quoted}")
} else if !official.is_empty() {
let cmd = format!(
"sudo pacman -S --needed --noconfirm {n}{hold}",
n = official.join(" "),
hold = hold_tail
);
let quoted = shell_single_quote(&cmd);
format!("echo DRY RUN: {quoted}")
} else {
format!("echo DRY RUN: nothing to install{hold_tail}")
}
} else if !aur.is_empty() {
let all: Vec<String> = items.iter().map(|p| p.name.clone()).collect();
let n = all.join(" ");
format!(
"{body}{hold}",
body = aur_install_body("-S --needed --noconfirm", &n),
hold = hold_tail
)
} else if !official.is_empty() {
// Check if any packages have version info (coming from updates window)
let has_versions = items
.iter()
.any(|item| matches!(item.source, Source::Official { .. }) && !item.version.is_empty());
let reinstall_any = items.iter().any(|item| {
matches!(item.source, Source::Official { .. }) && crate::index::is_installed(&item.name)
});
if has_versions && reinstall_any {
// Coming from updates window - sync database first, then install
format!(
"sudo bash -c 'pacman -Sy --noconfirm && pacman -S --noconfirm {n}'{hold}",
n = official.join(" "),
hold = hold_tail
)
} else {
format!(
"sudo pacman -S --needed --noconfirm {n}{hold}",
n = official.join(" "),
hold = hold_tail
)
}
} else {
format!("echo nothing to install{hold_tail}")
}
}
#[cfg(not(target_os = "windows"))]
/// What: Attempt to spawn a terminal with the given command string.
///
/// Input:
/// - `term`: Terminal executable name
/// - `args`: Arguments for the terminal
/// - `needs_xfce_command`: Whether this terminal needs special xfce4-terminal command handling
/// - `cmd_str`: Command string to execute in the terminal
///
/// Output:
/// - `Ok(())` if the terminal was successfully spawned, `Err(())` otherwise
///
/// Details:
/// - Handles special cases for `konsole` (`Wayland`), `gnome-console`/`kgx` (rendering), and `xfce4-terminal` (command format)
/// - Sets up `PACSEA_TEST_OUT` environment variable if present
fn try_spawn_terminal(
term: &str,
args: &[&str],
needs_xfce_command: bool,
cmd_str: &str,
) -> Result<(), ()> {
let mut cmd = Command::new(term);
if needs_xfce_command && term == "xfce4-terminal" {
let quoted = shell_single_quote(cmd_str);
cmd.arg("--command").arg(format!("bash -lc {quoted}"));
} else {
cmd.args(args.iter().copied()).arg(cmd_str);
}
if let Ok(p) = std::env::var("PACSEA_TEST_OUT") {
if let Some(parent) = std::path::Path::new(&p).parent() {
let _ = std::fs::create_dir_all(parent);
}
cmd.env("PACSEA_TEST_OUT", p);
}
if term == "konsole" && std::env::var_os("WAYLAND_DISPLAY").is_some() {
cmd.env("QT_LOGGING_RULES", "qt.qpa.wayland.textinput=false");
}
if term == "gnome-console" || term == "kgx" {
cmd.env("GSK_RENDERER", "cairo");
cmd.env("LIBGL_ALWAYS_SOFTWARE", "1");
}
cmd.spawn().map(|_| ()).map_err(|_| ())
}
#[cfg(not(target_os = "windows"))]
/// What: Spawn a terminal to install a batch of packages.
///
/// Input:
/// - `items`: Packages to install
/// - `dry_run`: When `true`, prints commands instead of executing
///
/// Output:
/// - Launches a terminal (or falls back to `bash`) running the composed install commands.
///
/// Details:
/// - Official packages are grouped into a single `pacman` invocation
/// - AUR packages are installed via `paru`/`yay` (prompts to install a helper if missing)
/// - Prefers common terminals (GNOME Console/Terminal, kitty, alacritty, xterm, xfce4-terminal, etc.); falls back to `bash`
/// - Appends a "hold" tail so the terminal remains open after command completion
/// - During tests, this is a no-op to avoid opening real terminal windows.
pub fn spawn_install_all(items: &[PackageItem], dry_run: bool) {
// Skip actual spawning during tests unless PACSEA_TEST_OUT is set (indicates a test with fake terminal)
#[cfg(test)]
if std::env::var("PACSEA_TEST_OUT").is_err() {
return;
}
let mut official: Vec<String> = Vec::new();
let mut aur: Vec<String> = Vec::new();
for it in items {
match it.source {
Source::Official { .. } => official.push(it.name.clone()),
Source::Aur => aur.push(it.name.clone()),
}
}
let names_vec: Vec<String> = items.iter().map(|p| p.name.clone()).collect();
tracing::info!(
total = items.len(),
aur_count = aur.len(),
official_count = official.len(),
dry_run = dry_run,
names = %names_vec.join(" "),
"spawning install"
);
let cmd_str = build_batch_install_command(items, &official, &aur, dry_run);
// Prefer GNOME Terminal when running under GNOME desktop
let is_gnome = std::env::var("XDG_CURRENT_DESKTOP")
.ok()
.is_some_and(|v| v.to_uppercase().contains("GNOME"));
let terms_gnome_first: &[(&str, &[&str], bool)] = &[
("gnome-terminal", &["--", "bash", "-lc"], false),
("gnome-console", &["--", "bash", "-lc"], false),
("kgx", &["--", "bash", "-lc"], false),
("alacritty", &["-e", "bash", "-lc"], false),
("kitty", &["bash", "-lc"], false),
("xterm", &["-hold", "-e", "bash", "-lc"], false),
("konsole", &["-e", "bash", "-lc"], false),
("xfce4-terminal", &[], true),
("tilix", &["--", "bash", "-lc"], false),
("mate-terminal", &["--", "bash", "-lc"], false),
];
let terms_default: &[(&str, &[&str], bool)] = &[
("alacritty", &["-e", "bash", "-lc"], false),
("kitty", &["bash", "-lc"], false),
("xterm", &["-hold", "-e", "bash", "-lc"], false),
("gnome-terminal", &["--", "bash", "-lc"], false),
("gnome-console", &["--", "bash", "-lc"], false),
("kgx", &["--", "bash", "-lc"], false),
("konsole", &["-e", "bash", "-lc"], false),
("xfce4-terminal", &[], true),
("tilix", &["--", "bash", "-lc"], false),
("mate-terminal", &["--", "bash", "-lc"], false),
];
let terms = if is_gnome {
terms_gnome_first
} else {
terms_default
};
let mut launched = false;
if let Some(idx) = choose_terminal_index_prefer_path(terms) {
let (term, args, needs_xfce_command) = terms[idx];
match try_spawn_terminal(term, args, needs_xfce_command, &cmd_str) {
Ok(()) => {
tracing::info!(terminal = %term, total = items.len(), aur_count = aur.len(), official_count = official.len(), dry_run = dry_run, names = %names_vec.join(" "), "launched terminal for install");
launched = true;
}
Err(()) => {
tracing::warn!(terminal = %term, names = %names_vec.join(" "), "failed to spawn terminal, trying next");
}
}
}
if !launched {
for (term, args, needs_xfce_command) in terms {
if command_on_path(term) {
match try_spawn_terminal(term, args, *needs_xfce_command, &cmd_str) {
Ok(()) => {
tracing::info!(terminal = %term, total = items.len(), aur_count = aur.len(), official_count = official.len(), dry_run = dry_run, names = %names_vec.join(" "), "launched terminal for install");
launched = true;
break;
}
Err(()) => {
tracing::warn!(terminal = %term, names = %names_vec.join(" "), "failed to spawn terminal, trying next");
}
}
}
}
}
if !launched {
let res = Command::new("bash").args(["-lc", &cmd_str]).spawn();
if let Err(e) = res {
tracing::error!(error = %e, names = %names_vec.join(" "), "failed to spawn bash to run install command");
} else {
tracing::info!(total = items.len(), aur_count = aur.len(), official_count = official.len(), dry_run = dry_run, names = %names_vec.join(" "), "launched bash for install");
}
}
if !dry_run {
let names: Vec<String> = items.iter().map(|p| p.name.clone()).collect();
if !names.is_empty()
&& let Err(e) = log_installed(&names)
{
tracing::warn!(error = %e, count = names.len(), "failed to write install audit log");
}
}
}
#[cfg(all(test, not(target_os = "windows")))]
mod tests {
#[test]
/// What: Confirm batch installs launch gnome-terminal with the expected separator arguments.
///
/// Inputs:
/// - Shim `gnome-terminal` scripted to capture argv via `PACSEA_TEST_OUT`.
/// - `spawn_install_all` invoked with two official packages in dry-run mode.
///
/// Output:
/// - Captured argument list starts with `--`, `bash`, `-lc`, validating safe command invocation.
///
/// Details:
/// - Overrides `PATH` and environment variables, then restores them to avoid leaking state across tests.
fn install_batch_uses_gnome_terminal_double_dash() {
use std::fs;
use std::os::unix::fs::PermissionsExt;
use std::path::PathBuf;
let mut dir: PathBuf = std::env::temp_dir();
dir.push(format!(
"pacsea_test_inst_batch_gnome_{}_{}",
std::process::id(),
std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
.expect("System time is before UNIX epoch")
.as_nanos()
));
let _ = fs::create_dir_all(&dir);
let mut out_path = dir.clone();
out_path.push("args.txt");
let mut term_path = dir.clone();
term_path.push("gnome-terminal");
let script = "#!/bin/sh\n: > \"$PACSEA_TEST_OUT\"\nfor a in \"$@\"; do printf '%s\n' \"$a\" >> \"$PACSEA_TEST_OUT\"; done\n";
fs::write(&term_path, script.as_bytes()).expect("Failed to write test terminal script");
let mut perms = fs::metadata(&term_path)
.expect("Failed to read test terminal script metadata")
.permissions();
perms.set_mode(0o755);
fs::set_permissions(&term_path, perms)
.expect("Failed to set test terminal script permissions");
let orig_path = std::env::var_os("PATH");
unsafe {
std::env::set_var("PATH", dir.display().to_string());
std::env::set_var("PACSEA_TEST_OUT", out_path.display().to_string());
}
let items = vec![
crate::state::PackageItem {
name: "rg".into(),
version: "1".into(),
description: String::new(),
source: crate::state::Source::Official {
repo: "extra".into(),
arch: "x86_64".into(),
},
popularity: None,
out_of_date: None,
orphaned: false,
},
crate::state::PackageItem {
name: "fd".into(),
version: "1".into(),
description: String::new(),
source: crate::state::Source::Official {
repo: "extra".into(),
arch: "x86_64".into(),
},
popularity: None,
out_of_date: None,
orphaned: false,
},
];
super::spawn_install_all(&items, true);
std::thread::sleep(std::time::Duration::from_millis(50));
let body = fs::read_to_string(&out_path).expect("fake terminal args file written");
let lines: Vec<&str> = body.lines().collect();
assert!(lines.len() >= 3, "expected at least 3 args, got: {body}");
assert_eq!(lines[0], "--");
assert_eq!(lines[1], "bash");
assert_eq!(lines[2], "-lc");
unsafe {
if let Some(v) = orig_path {
std::env::set_var("PATH", v);
} else {
std::env::remove_var("PATH");
}
std::env::remove_var("PACSEA_TEST_OUT");
}
}
}
#[cfg(target_os = "windows")]
/// What: Present an informational install message on Windows where package management is unsupported.
///
/// Input:
/// - `items`: Packages the user attempted to install.
/// - `dry_run`: When `true`, uses `PowerShell` to simulate the install operation.
///
/// Output:
/// - Launches a detached `PowerShell` window (if available) for dry-run simulation, or `cmd` window otherwise.
///
/// Details:
/// - When `dry_run` is true and `PowerShell` is available, uses `PowerShell` to simulate the batch install with Write-Host.
/// - Always logs install attempts when not in `dry_run` to remain consistent with Unix behaviour.
/// - During tests, this is a no-op to avoid opening real terminal windows.
#[allow(unused_variables, clippy::missing_const_for_fn)]
pub fn spawn_install_all(items: &[PackageItem], dry_run: bool) {
#[cfg(not(test))]
{
let mut names: Vec<String> = items.iter().map(|p| p.name.clone()).collect();
if names.is_empty() {
names.push("nothing".into());
}
let names_str = names.join(" ");
if dry_run && super::utils::is_powershell_available() {
// Use PowerShell to simulate the batch install operation
let powershell_cmd = format!(
"Write-Host 'DRY RUN: Simulating batch install of {}' -ForegroundColor Yellow; Write-Host 'Packages: {}' -ForegroundColor Cyan; Write-Host ''; Write-Host 'Press any key to close...'; $null = $Host.UI.RawUI.ReadKey('NoEcho,IncludeKeyDown')",
names.len(),
names_str.replace('\'', "''")
);
let _ = Command::new("powershell.exe")
.args(["-NoProfile", "-Command", &powershell_cmd])
.spawn();
} else {
let msg = if dry_run {
format!("DRY RUN: install {names_str}")
} else {
format!("Install {names_str} (not supported on Windows)")
};
let _ = Command::new("cmd")
.args([
"/C",
"start",
"Pacsea Install",
"cmd",
"/K",
&format!("echo {msg}"),
])
.spawn();
}
if !dry_run {
let _ = super::logging::log_installed(&names);
}
}
}
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/src/install/shell.rs | src/install/shell.rs | use std::process::Command;
#[cfg(not(target_os = "windows"))]
use super::utils::{choose_terminal_index_prefer_path, command_on_path, shell_single_quote};
#[cfg(not(target_os = "windows"))]
/// What: Spawn a terminal to run a `&&`-joined series of shell commands with a hold tail.
///
/// Input:
/// - `cmds`: Ordered list of shell snippets to execute.
///
/// Output:
/// - Starts the preferred terminal (or `bash`) running the composed command sequence.
///
/// Details:
/// - Defers to `spawn_shell_commands_in_terminal_with_hold` to add the default hold tail.
/// - During tests, this is a no-op to avoid opening real terminal windows, unless `PACSEA_TEST_OUT` is set.
pub fn spawn_shell_commands_in_terminal(cmds: &[String]) {
// Skip actual spawning during tests unless PACSEA_TEST_OUT is set (indicates a test with fake terminal)
#[cfg(test)]
if std::env::var("PACSEA_TEST_OUT").is_err() {
return;
}
// Default wrapper keeps the terminal open after commands complete
spawn_shell_commands_in_terminal_with_hold(cmds, true);
}
#[cfg(not(target_os = "windows"))]
/// What: Write a log message to terminal.log file.
///
/// Input:
/// - `message`: The log message to write.
///
/// Output:
/// - Writes the message to terminal.log, creating the log directory if needed.
///
/// Details:
/// - Silently ignores errors if the log file cannot be opened or written.
fn log_to_terminal_log(message: &str) {
let mut lp = crate::theme::logs_dir();
lp.push("terminal.log");
if let Some(parent) = lp.parent() {
let _ = std::fs::create_dir_all(parent);
}
if let Ok(mut file) = std::fs::OpenOptions::new()
.create(true)
.append(true)
.open(&lp)
{
let _ = std::io::Write::write_all(&mut file, message.as_bytes());
}
}
#[cfg(not(target_os = "windows"))]
/// What: Configure environment variables for a terminal command based on terminal type and environment.
///
/// Input:
/// - `cmd`: The Command to configure.
/// - `term`: Terminal binary name.
/// - `is_wayland`: Whether running under Wayland.
///
/// Output:
/// - Modifies the command with appropriate environment variables.
///
/// Details:
/// - Sets `PACSEA_TEST_OUT` if present in environment.
/// - Suppresses `Konsole` `Wayland` warnings on `Wayland`.
/// - Forces software rendering for `GNOME Console` and `kgx`.
fn configure_terminal_env(cmd: &mut Command, term: &str, is_wayland: bool) {
if let Ok(p) = std::env::var("PACSEA_TEST_OUT") {
if let Some(parent) = std::path::Path::new(&p).parent() {
let _ = std::fs::create_dir_all(parent);
}
cmd.env("PACSEA_TEST_OUT", p);
}
if term == "konsole" && is_wayland {
cmd.env("QT_LOGGING_RULES", "qt.qpa.wayland.textinput=false");
}
if term == "gnome-console" || term == "kgx" {
cmd.env("GSK_RENDERER", "cairo");
cmd.env("LIBGL_ALWAYS_SOFTWARE", "1");
}
}
#[cfg(not(target_os = "windows"))]
/// What: Build and spawn a terminal command with logging.
///
/// Input:
/// - `term`: Terminal binary name.
/// - `args`: Terminal arguments.
/// - `needs_xfce_command`: Whether to use xfce4-terminal special command format.
/// - `script_exec`: The script execution command string.
/// - `cmd_str`: The full command string for logging.
/// - `is_wayland`: Whether running under Wayland.
/// - `detach_stdio`: Whether to detach stdio streams.
///
/// Output:
/// - Returns `Ok(true)` if spawn succeeded, `Ok(false)` if it failed, or `Err` on error.
///
/// Details:
/// - Logs spawn attempt and result to terminal.log.
/// - Configures terminal-specific environment variables.
fn try_spawn_terminal(
term: &str,
args: &[&str],
needs_xfce_command: bool,
script_exec: &str,
cmd_str: &str,
is_wayland: bool,
detach_stdio: bool,
) -> Result<bool, std::io::Error> {
let mut cmd = Command::new(term);
if needs_xfce_command && term == "xfce4-terminal" {
let quoted = shell_single_quote(script_exec);
cmd.arg("--command").arg(format!("bash -lc {quoted}"));
} else {
cmd.args(args.iter().copied()).arg(script_exec);
}
configure_terminal_env(&mut cmd, term, is_wayland);
let cmd_len = cmd_str.len();
log_to_terminal_log(&format!(
"spawn term={term} args={args:?} xfce_mode={needs_xfce_command} cmd_len={cmd_len}\n"
));
if detach_stdio {
cmd.stdin(std::process::Stdio::null())
.stdout(std::process::Stdio::null())
.stderr(std::process::Stdio::null());
}
let res = cmd.spawn();
match &res {
Ok(child) => {
log_to_terminal_log(&format!("spawn result: ok pid={}\n", child.id()));
}
Err(e) => {
log_to_terminal_log(&format!("spawn result: err error={e}\n"));
}
}
res.map(|_| true)
}
#[cfg(not(target_os = "windows"))]
/// What: Create a temporary script file with the command string.
///
/// Input:
/// - `cmd_str`: The command string to write to the script.
///
/// Output:
/// - Path to the created temporary script file.
///
/// Details:
/// - Creates a bash script with executable permissions.
fn create_temp_script(cmd_str: &str) -> std::path::PathBuf {
let mut p = std::env::temp_dir();
let ts = std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
.map(|d| d.as_nanos())
.unwrap_or(0);
p.push(format!("pacsea_scan_{}_{}.sh", std::process::id(), ts));
let _ = std::fs::write(&p, format!("#!/bin/bash\n{cmd_str}\n"));
#[cfg(unix)]
{
use std::os::unix::fs::PermissionsExt;
if let Ok(meta) = std::fs::metadata(&p) {
let mut perms = meta.permissions();
perms.set_mode(0o700);
let _ = std::fs::set_permissions(&p, perms);
}
}
p
}
#[cfg(not(target_os = "windows"))]
/// What: Persist the command string to a log file for debugging.
///
/// Input:
/// - `cmd_str`: The command string to log.
///
/// Output:
/// - None (writes to log file).
fn persist_command_to_log(cmd_str: &str) {
let mut lp = crate::theme::logs_dir();
lp.push("last_terminal_cmd.log");
if let Some(parent) = lp.parent() {
let _ = std::fs::create_dir_all(parent);
}
let _ = std::fs::write(&lp, format!("{cmd_str}\n"));
}
#[cfg(not(target_os = "windows"))]
/// What: Build the list of terminal candidates with preference ordering.
///
/// Input:
/// - `is_gnome`: Whether running under GNOME desktop.
///
/// Output:
/// - Vector of terminal candidates with (`name`, `args`, `needs_xfce_command`) tuples.
///
/// Details:
/// - Prioritizes GNOME terminals when under GNOME, otherwise uses default order.
/// - Moves user-preferred terminal to the front if configured.
fn build_terminal_candidates(is_gnome: bool) -> Vec<(&'static str, &'static [&'static str], bool)> {
let terms_gnome_first: &[(&str, &[&str], bool)] = &[
("gnome-terminal", &["--", "bash", "-lc"], false),
("gnome-console", &["--", "bash", "-lc"], false),
("kgx", &["--", "bash", "-lc"], false),
("alacritty", &["-e", "bash", "-lc"], false),
("ghostty", &["-e", "bash", "-lc"], false),
("kitty", &["bash", "-lc"], false),
("xterm", &["-hold", "-e", "bash", "-lc"], false),
("konsole", &["-e", "bash", "-lc"], false),
("xfce4-terminal", &[], true),
("tilix", &["--", "bash", "-lc"], false),
("mate-terminal", &["--", "bash", "-lc"], false),
];
let terms_default: &[(&str, &[&str], bool)] = &[
("alacritty", &["-e", "bash", "-lc"], false),
("ghostty", &["-e", "bash", "-lc"], false),
("kitty", &["bash", "-lc"], false),
("xterm", &["-hold", "-e", "bash", "-lc"], false),
("gnome-terminal", &["--", "bash", "-lc"], false),
("gnome-console", &["--", "bash", "-lc"], false),
("kgx", &["--", "bash", "-lc"], false),
("konsole", &["-e", "bash", "-lc"], false),
("xfce4-terminal", &[], true),
("tilix", &["--", "bash", "-lc"], false),
("mate-terminal", &["--", "bash", "-lc"], false),
];
let mut terms_owned: Vec<(&str, &[&str], bool)> = if is_gnome {
terms_gnome_first.to_vec()
} else {
terms_default.to_vec()
};
let preferred = crate::theme::settings()
.preferred_terminal
.trim()
.to_string();
if !preferred.is_empty()
&& let Some(pos) = terms_owned
.iter()
.position(|(name, _, _)| *name == preferred)
{
let entry = terms_owned.remove(pos);
terms_owned.insert(0, entry);
}
terms_owned
}
#[cfg(not(target_os = "windows"))]
/// What: Attempt to spawn a terminal from the candidates list.
///
/// Input:
/// - `terms_owned`: List of terminal candidates.
/// - `script_exec`: Script execution command string.
/// - `cmd_str`: Full command string for logging.
/// - `is_wayland`: Whether running under Wayland.
///
/// Output:
/// - `true` if a terminal was successfully spawned, `false` otherwise.
fn attempt_terminal_spawn(
terms_owned: &[(&str, &[&str], bool)],
script_exec: &str,
cmd_str: &str,
is_wayland: bool,
) -> bool {
if let Some(idx) = choose_terminal_index_prefer_path(terms_owned) {
let (term, args, needs_xfce_command) = terms_owned[idx];
return try_spawn_terminal(
term,
args,
needs_xfce_command,
script_exec,
cmd_str,
is_wayland,
true,
)
.unwrap_or(false);
}
for (term, args, needs_xfce_command) in terms_owned.iter().copied() {
if command_on_path(term)
&& try_spawn_terminal(
term,
args,
needs_xfce_command,
script_exec,
cmd_str,
is_wayland,
false,
)
.unwrap_or(false)
{
return true;
}
}
false
}
#[cfg(not(target_os = "windows"))]
/// What: Spawn a terminal to execute shell commands and optionally append a hold tail.
///
/// Input:
/// - `cmds`: Ordered list of shell snippets to execute.
/// - `hold`: When `true`, keeps the terminal open after command completion.
///
/// Output:
/// - Launches a terminal (or `bash`) running a temporary script that encapsulates the commands.
///
/// Details:
/// - Persists the command to a temp script to avoid argument-length issues.
/// - Prefers user-configured terminals, applies desktop-specific environment tweaks, and logs spawn attempts.
/// - During tests, this is a no-op to avoid opening real terminal windows.
pub fn spawn_shell_commands_in_terminal_with_hold(cmds: &[String], hold: bool) {
// Skip actual spawning during tests unless PACSEA_TEST_OUT is set (indicates a test with fake terminal)
#[cfg(test)]
if std::env::var("PACSEA_TEST_OUT").is_err() {
return;
}
if cmds.is_empty() {
return;
}
let hold_tail = "; echo; echo 'Finished.'; echo 'Press any key to close...'; read -rn1 -s _ || (echo; echo 'Press Ctrl+C to close'; sleep infinity)";
let joined = cmds.join(" && ");
let cmd_str = if hold {
format!("{joined}{hold_tail}")
} else {
joined
};
let script_path = create_temp_script(&cmd_str);
let script_path_str = script_path.to_string_lossy().to_string();
let script_exec = format!("bash {}", shell_single_quote(&script_path_str));
persist_command_to_log(&cmd_str);
let desktop_env = std::env::var("XDG_CURRENT_DESKTOP").unwrap_or_default();
let is_gnome = desktop_env.to_uppercase().contains("GNOME");
let is_wayland = std::env::var_os("WAYLAND_DISPLAY").is_some();
let terms_owned = build_terminal_candidates(is_gnome);
log_to_terminal_log(&format!(
"env desktop={} wayland={} script={} cmd_len={}\n",
desktop_env,
is_wayland,
script_path_str,
cmd_str.len()
));
let launched = attempt_terminal_spawn(&terms_owned, &script_exec, &cmd_str, is_wayland);
if !launched {
log_to_terminal_log(&format!(
"spawn term=bash args={:?} cmd_len={}\n",
["-lc"],
cmd_str.len()
));
let res = Command::new("bash").args(["-lc", &script_exec]).spawn();
match &res {
Ok(child) => {
log_to_terminal_log(&format!("spawn result: ok pid={}\n", child.id()));
}
Err(e) => {
log_to_terminal_log(&format!("spawn result: err error={e}\n"));
}
}
}
}
#[cfg(all(test, not(target_os = "windows")))]
mod tests {
#[test]
/// What: Ensure `spawn_shell_commands_in_terminal` invokes GNOME Terminal with a double-dash separator.
///
/// Inputs:
/// - `cmds`: Single echo command executed via a temporary mock `gnome-terminal` script.
///
/// Output:
/// - Captured argv begins with `--`, `bash`, `-lc`, confirming safe argument ordering.
///
/// Details:
/// - Rewrites `PATH` to point at a fake executable that records arguments, then restores env vars
/// after spawning the terminal command.
fn shell_uses_gnome_terminal_double_dash() {
use std::fs;
use std::os::unix::fs::PermissionsExt;
use std::path::PathBuf;
let mut dir: PathBuf = std::env::temp_dir();
dir.push(format!(
"pacsea_test_shell_gnome_{}_{}",
std::process::id(),
std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
.expect("System time is before UNIX epoch")
.as_nanos()
));
fs::create_dir_all(&dir).expect("create test directory");
let mut out_path = dir.clone();
out_path.push("args.txt");
let mut term_path = dir.clone();
term_path.push("gnome-terminal");
let script = "#!/bin/sh\n: > \"$PACSEA_TEST_OUT\"\nfor a in \"$@\"; do printf '%s\n' \"$a\" >> \"$PACSEA_TEST_OUT\"; done\n";
fs::write(&term_path, script.as_bytes()).expect("failed to write test terminal script");
let mut perms = fs::metadata(&term_path)
.expect("failed to read test terminal script metadata")
.permissions();
perms.set_mode(0o755);
fs::set_permissions(&term_path, perms)
.expect("failed to set test terminal script permissions");
let orig_path = std::env::var_os("PATH");
unsafe {
std::env::set_var("PATH", dir.display().to_string());
std::env::set_var("PACSEA_TEST_OUT", out_path.display().to_string());
}
let cmds = vec!["echo hi".to_string()];
super::spawn_shell_commands_in_terminal(&cmds);
// Wait for file to be created with retries
let mut attempts = 0;
while !out_path.exists() && attempts < 50 {
std::thread::sleep(std::time::Duration::from_millis(10));
attempts += 1;
}
let body = fs::read_to_string(&out_path).expect("fake terminal args file written");
let lines: Vec<&str> = body.lines().collect();
assert!(lines.len() >= 3, "expected at least 3 args, got: {body}");
assert_eq!(lines[0], "--");
assert_eq!(lines[1], "bash");
assert_eq!(lines[2], "-lc");
unsafe {
if let Some(v) = orig_path {
std::env::set_var("PATH", v);
} else {
std::env::remove_var("PATH");
}
std::env::remove_var("PACSEA_TEST_OUT");
}
}
}
#[cfg(target_os = "windows")]
/// What: Display the intended shell command sequence on Windows where execution is unsupported.
///
/// Input:
/// - `cmds`: Command fragments to present to the user.
///
/// Output:
/// - Launches a `PowerShell` window (if available and command contains "DRY RUN") for dry-run simulation, or `cmd` window otherwise.
///
/// Details:
/// - When commands contain "DRY RUN" and `PowerShell` is available, uses `PowerShell` to simulate the operation.
/// - Joins commands with `&&` for readability and uses `start` to detach the window.
pub fn spawn_shell_commands_in_terminal(cmds: &[String]) {
let msg = if cmds.is_empty() {
"Nothing to run".to_string()
} else {
cmds.join(" && ")
};
// Check if this is a dry-run operation (for downgrade, etc.)
let is_dry_run = msg.contains("DRY RUN");
if is_dry_run && super::utils::is_powershell_available() {
// Use PowerShell to simulate the operation
let escaped_msg = msg.replace('\'', "''");
let powershell_cmd = format!(
"Write-Host '{escaped_msg}' -ForegroundColor Yellow; Write-Host ''; Write-Host 'Press any key to close...'; $null = $Host.UI.RawUI.ReadKey('NoEcho,IncludeKeyDown')"
);
let _ = Command::new("powershell.exe")
.args(["-NoProfile", "-Command", &powershell_cmd])
.spawn();
} else {
let _ = Command::new("cmd")
.args([
"/C",
"start",
"Pacsea Update",
"cmd",
"/K",
&format!("echo {msg}"),
])
.spawn();
}
}
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/src/install/utils.rs | src/install/utils.rs | #[cfg(target_os = "windows")]
/// What: Determine whether a command is available on the Windows `PATH`.
///
/// Input:
/// - `cmd`: Executable name to probe.
///
/// Output:
/// - `true` when the command resolves via the `which` crate; otherwise `false`.
///
/// Details:
/// - Leverages `which::which`, inheriting its support for PATHEXT resolution.
#[must_use]
pub fn command_on_path(cmd: &str) -> bool {
which::which(cmd).is_ok()
}
#[cfg(target_os = "windows")]
/// What: Check if `PowerShell` is available on Windows.
///
/// Output:
/// - `true` when `PowerShell` can be found on PATH; otherwise `false`.
///
/// Details:
/// - Checks for `powershell.exe` or `pwsh.exe` (`PowerShell` Core) on the system.
pub fn is_powershell_available() -> bool {
command_on_path("powershell.exe") || command_on_path("pwsh.exe")
}
#[cfg(not(target_os = "windows"))]
/// What: Determine whether a command is available on the Unix `PATH`.
///
/// Input:
/// - `cmd`: Program name or explicit path to inspect.
///
/// Output:
/// - `true` when an executable file is found and marked executable.
///
/// Details:
/// - Accepts explicit paths (containing path separators) and honours Unix permission bits.
/// - Falls back to scanning `PATH`, and on Windows builds respects `PATHEXT` as well.
#[must_use]
pub fn command_on_path(cmd: &str) -> bool {
use std::path::Path;
fn is_exec(p: &std::path::Path) -> bool {
if !p.is_file() {
return false;
}
#[cfg(unix)]
{
use std::os::unix::fs::PermissionsExt;
if let Ok(meta) = std::fs::metadata(p) {
return meta.permissions().mode() & 0o111 != 0;
}
false
}
#[cfg(not(unix))]
{
true
}
}
if cmd.contains(std::path::MAIN_SEPARATOR) {
return is_exec(Path::new(cmd));
}
if let Some(paths) = std::env::var_os("PATH") {
for dir in std::env::split_paths(&paths) {
let candidate = dir.join(cmd);
if is_exec(&candidate) {
return true;
}
#[cfg(windows)]
{
if let Some(pathext) = std::env::var_os("PATHEXT") {
for ext in pathext.to_string_lossy().split(';') {
let candidate = dir.join(format!("{cmd}{ext}"));
if candidate.is_file() {
return true;
}
}
}
}
}
}
false
}
#[cfg(not(target_os = "windows"))]
/// What: Locate the first available terminal executable from a preference list.
///
/// Input:
/// - `terms`: Tuples of `(binary, args, needs_xfce_command)` ordered by preference.
///
/// Output:
/// - `Some(index)` pointing into `terms` when a binary is found; otherwise `None`.
///
/// Details:
/// - Iterates directories in `PATH`, favouring the earliest match respecting executable bits.
pub fn choose_terminal_index_prefer_path(terms: &[(&str, &[&str], bool)]) -> Option<usize> {
use std::os::unix::fs::PermissionsExt;
if let Some(paths) = std::env::var_os("PATH") {
for dir in std::env::split_paths(&paths) {
for (i, (name, _args, _hold)) in terms.iter().enumerate() {
let candidate = dir.join(name);
if candidate.is_file()
&& let Ok(meta) = std::fs::metadata(&candidate)
&& meta.permissions().mode() & 0o111 != 0
{
return Some(i);
}
}
}
}
None
}
/// What: Safely single-quote an arbitrary string for POSIX shells.
///
/// Input:
/// - `s`: Text to quote.
///
/// Output:
/// - New string wrapped in single quotes, escaping embedded quotes via the `'
/// '"'"'` sequence.
///
/// Details:
/// - Returns `''` for empty input so the shell treats it as an empty argument.
#[must_use]
pub fn shell_single_quote(s: &str) -> String {
if s.is_empty() {
return "''".to_string();
}
let mut out = String::with_capacity(s.len() + 2);
out.push('\'');
for ch in s.chars() {
if ch == '\'' {
out.push_str("'\"'\"'");
} else {
out.push(ch);
}
}
out.push('\'');
out
}
#[cfg(all(test, not(target_os = "windows")))]
mod tests {
#[test]
/// What: Validate that `command_on_path` recognises executables present on the customised `PATH`.
///
/// Inputs:
/// - Temporary directory containing a shim `mycmd` script made executable.
/// - Environment `PATH` overridden to reference only the temp directory.
///
/// Output:
/// - Returns `true` for `mycmd` and `false` for a missing binary, confirming detection logic.
///
/// Details:
/// - Restores the original `PATH` and cleans up the temporary directory after assertions.
fn utils_command_on_path_detects_executable() {
use std::fs;
use std::os::unix::fs::PermissionsExt;
use std::path::PathBuf;
let mut dir: PathBuf = std::env::temp_dir();
dir.push(format!(
"pacsea_test_utils_path_{}_{}",
std::process::id(),
std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
.expect("System time is before UNIX epoch")
.as_nanos()
));
let _ = fs::create_dir_all(&dir);
let mut cmd_path = dir.clone();
cmd_path.push("mycmd");
fs::write(&cmd_path, b"#!/bin/sh\nexit 0\n").expect("Failed to write test command script");
let mut perms = fs::metadata(&cmd_path)
.expect("Failed to read test command script metadata")
.permissions();
perms.set_mode(0o755);
fs::set_permissions(&cmd_path, perms)
.expect("Failed to set test command script permissions");
let orig_path = std::env::var_os("PATH");
unsafe { std::env::set_var("PATH", dir.display().to_string()) };
assert!(super::command_on_path("mycmd"));
assert!(!super::command_on_path("notexist"));
unsafe {
if let Some(v) = orig_path {
std::env::set_var("PATH", v);
} else {
std::env::remove_var("PATH");
}
}
let _ = fs::remove_dir_all(&dir);
}
#[test]
/// What: Ensure `choose_terminal_index_prefer_path` honours the preference ordering when multiple terminals exist.
///
/// Inputs:
/// - Temporary directory with an executable `kitty` shim placed on `PATH`.
/// - Preference list where `gnome-terminal` precedes `kitty` but is absent.
///
/// Output:
/// - Function returns index `1`, selecting `kitty`, the first available terminal in the list.
///
/// Details:
/// - Saves and restores the `PATH` environment variable while ensuring the temp directory is removed.
fn utils_choose_terminal_index_prefers_first_present_in_terms_order() {
use std::fs;
use std::os::unix::fs::PermissionsExt;
use std::path::PathBuf;
let mut dir: PathBuf = std::env::temp_dir();
dir.push(format!(
"pacsea_test_utils_terms_{}_{}",
std::process::id(),
std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
.expect("System time is before UNIX epoch")
.as_nanos()
));
let _ = fs::create_dir_all(&dir);
let mut kitty = dir.clone();
kitty.push("kitty");
fs::write(&kitty, b"#!/bin/sh\nexit 0\n").expect("Failed to write test kitty script");
let mut perms = fs::metadata(&kitty)
.expect("Failed to read test kitty script metadata")
.permissions();
perms.set_mode(0o755);
fs::set_permissions(&kitty, perms).expect("Failed to set test kitty script permissions");
let terms: &[(&str, &[&str], bool)] =
&[("gnome-terminal", &[], false), ("kitty", &[], false)];
let orig_path = std::env::var_os("PATH");
unsafe { std::env::set_var("PATH", dir.display().to_string()) };
let idx = super::choose_terminal_index_prefer_path(terms).expect("index");
assert_eq!(idx, 1);
unsafe {
if let Some(v) = orig_path {
std::env::set_var("PATH", v);
} else {
std::env::remove_var("PATH");
}
}
let _ = fs::remove_dir_all(&dir);
}
#[test]
/// What: Check that `shell_single_quote` escapes edge cases safely.
///
/// Inputs:
/// - Three sample strings: empty, plain ASCII, and text containing a single quote.
///
/// Output:
/// - Returns properly quoted strings, using `''` for empty and the standard POSIX escape for embedded quotes.
///
/// Details:
/// - Covers representative cases without filesystem interaction to guard future regressions.
fn utils_shell_single_quote_handles_edges() {
assert_eq!(super::shell_single_quote(""), "''");
assert_eq!(super::shell_single_quote("abc"), "'abc'");
assert_eq!(super::shell_single_quote("a'b"), "'a'\"'\"'b'");
}
}
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/src/install/executor.rs | src/install/executor.rs | //! PTY-based command executor for in-TUI execution.
use crate::state::{PackageItem, modal::CascadeMode};
/// What: Request types for command execution.
///
/// Inputs:
/// - Various operation types (Install, Remove, etc.) with their parameters.
///
/// Output:
/// - Sent to executor worker to trigger command execution.
///
/// Details:
/// - Each variant contains all necessary information to build and execute the command.
#[derive(Debug, Clone)]
pub enum ExecutorRequest {
/// Install packages.
Install {
/// Packages to install.
items: Vec<PackageItem>,
/// Optional sudo password for official packages.
password: Option<String>,
/// Whether to run in dry-run mode.
dry_run: bool,
},
/// Remove packages.
Remove {
/// Package names to remove.
names: Vec<String>,
/// Optional sudo password.
password: Option<String>,
/// Cascade removal mode.
cascade: CascadeMode,
/// Whether to run in dry-run mode.
dry_run: bool,
},
/// Downgrade packages.
Downgrade {
/// Package names to downgrade.
names: Vec<String>,
/// Optional sudo password.
password: Option<String>,
/// Whether to run in dry-run mode.
dry_run: bool,
},
/// Custom command execution (for special cases like paru/yay installation).
CustomCommand {
/// Command string to execute.
command: String,
/// Optional sudo password for commands that need sudo (e.g., makepkg -si).
password: Option<String>,
/// Whether to run in dry-run mode.
dry_run: bool,
},
/// System update (mirrors, pacman, AUR, cache).
Update {
/// Commands to execute in sequence.
commands: Vec<String>,
/// Optional sudo password for commands that need sudo.
password: Option<String>,
/// Whether to run in dry-run mode.
dry_run: bool,
},
/// Security scan for AUR package (excluding aur-sleuth).
Scan {
/// Package name to scan.
package: String,
/// Scan configuration flags.
do_clamav: bool,
/// Trivy scan flag.
do_trivy: bool,
/// Semgrep scan flag.
do_semgrep: bool,
/// `ShellCheck` scan flag.
do_shellcheck: bool,
/// `VirusTotal` scan flag.
do_virustotal: bool,
/// Custom pattern scan flag.
do_custom: bool,
/// Whether to run in dry-run mode.
dry_run: bool,
},
}
/// What: Output messages from command execution.
///
/// Inputs:
/// - Generated by executor worker during command execution.
///
/// Output:
/// - Sent to main event loop for display in `PreflightExec` modal.
///
/// Details:
/// - Line messages contain output from the `PTY`, Finished indicates completion.
#[derive(Debug, Clone)]
pub enum ExecutorOutput {
/// Output line from command execution.
Line(String),
/// Replace the last line (used for progress bars with carriage return).
ReplaceLastLine(String),
/// Command execution finished.
Finished {
/// Whether execution succeeded.
success: bool,
/// Exit code if available.
exit_code: Option<i32>,
/// Name of the failed command (if execution failed and this is an update operation).
failed_command: Option<String>,
},
/// Error occurred during execution.
Error(String),
}
/// What: Build install command string without hold tail for `PTY` execution.
///
/// Inputs:
/// - `items`: Packages to install.
/// - `_password`: Optional sudo password (unused - password is written to PTY stdin when sudo prompts).
/// - `dry_run`: Whether to run in dry-run mode.
///
/// Output:
/// - Command string ready for `PTY` execution (no hold tail).
///
/// Details:
/// - Groups official and `AUR` packages separately.
/// - Uses `--noconfirm` for non-interactive execution.
/// - Always uses `sudo -S` for official packages (password written to PTY stdin when sudo prompts).
/// - Removes hold tail since we're not spawning a terminal.
#[must_use]
pub fn build_install_command_for_executor(
items: &[PackageItem],
password: Option<&str>,
dry_run: bool,
) -> String {
use super::command::aur_install_body;
use super::utils::shell_single_quote;
use crate::state::Source;
let mut official: Vec<String> = Vec::new();
let mut aur: Vec<String> = Vec::new();
for item in items {
match item.source {
Source::Official { .. } => official.push(item.name.clone()),
Source::Aur => aur.push(item.name.clone()),
}
}
if dry_run {
if !aur.is_empty() {
let all: Vec<String> = items.iter().map(|p| p.name.clone()).collect();
// Check if any packages are already installed (reinstall scenario)
// Use comprehensive check that includes packages provided by installed packages
let installed_set = crate::logic::deps::get_installed_packages();
let provided_set = crate::logic::deps::get_provided_packages(&installed_set);
let has_reinstall = items.iter().any(|item| {
crate::logic::deps::is_package_installed_or_provided(
&item.name,
&installed_set,
&provided_set,
)
});
let flags = if has_reinstall {
"--noconfirm"
} else {
"--needed --noconfirm"
};
let cmd = format!(
"(paru -S {flags} {n} || yay -S {flags} {n})",
n = all.join(" "),
flags = flags
);
let quoted = shell_single_quote(&cmd);
format!("echo DRY RUN: {quoted}")
} else if !official.is_empty() {
// Check if any packages are already installed (reinstall scenario)
// Use comprehensive check that includes packages provided by installed packages
let installed_set = crate::logic::deps::get_installed_packages();
let provided_set = crate::logic::deps::get_provided_packages(&installed_set);
let has_reinstall = official.iter().any(|name| {
crate::logic::deps::is_package_installed_or_provided(
name,
&installed_set,
&provided_set,
)
});
let flags = if has_reinstall {
"--noconfirm"
} else {
"--needed --noconfirm"
};
let cmd = format!(
"sudo pacman -S {flags} {n}",
n = official.join(" "),
flags = flags
);
let quoted = shell_single_quote(&cmd);
format!("echo DRY RUN: {quoted}")
} else {
"echo DRY RUN: nothing to install".to_string()
}
} else if !aur.is_empty() {
let all: Vec<String> = items.iter().map(|p| p.name.clone()).collect();
let n = all.join(" ");
// Check if any packages are already installed (reinstall scenario)
// Use comprehensive check that includes packages provided by installed packages
let installed_set = crate::logic::deps::get_installed_packages();
let provided_set = crate::logic::deps::get_provided_packages(&installed_set);
let has_reinstall = items.iter().any(|item| {
crate::logic::deps::is_package_installed_or_provided(
&item.name,
&installed_set,
&provided_set,
)
});
let flags = if has_reinstall {
"-S --noconfirm"
} else {
"-S --needed --noconfirm"
};
aur_install_body(flags, &n)
} else if !official.is_empty() {
// Check if any packages are already installed (reinstall scenario)
// Use comprehensive check that includes packages provided by installed packages
let installed_set = crate::logic::deps::get_installed_packages();
let provided_set = crate::logic::deps::get_provided_packages(&installed_set);
let has_reinstall = official.iter().any(|name| {
crate::logic::deps::is_package_installed_or_provided(
name,
&installed_set,
&provided_set,
)
});
let flags = if has_reinstall {
"--noconfirm"
} else {
"--needed --noconfirm"
};
// Sync database first (pacman -Sy) to ensure latest versions are available,
// then install the packages
let install_cmd = format!("pacman -S {flags} {}", official.join(" "));
// Use printf to pipe password to sudo -S (more reliable than echo)
password.map_or_else(
|| format!("sudo pacman -Sy && sudo {install_cmd}"),
|pass| {
let escaped = shell_single_quote(pass);
// Sync first, then install - use single password for both
format!("printf '%s\\n' {escaped} | sudo -S pacman -Sy && printf '%s\\n' {escaped} | sudo -S {install_cmd}")
},
)
} else {
"echo nothing to install".to_string()
}
}
/// What: Build remove command string without hold tail for `PTY` execution.
///
/// Inputs:
/// - `names`: Package names to remove.
/// - `password`: Optional sudo password (password is written to PTY stdin when sudo prompts).
/// - `cascade`: Cascade removal mode.
/// - `dry_run`: Whether to run in dry-run mode.
///
/// Output:
/// - Command string ready for `PTY` execution (no hold tail).
///
/// Details:
/// - Uses `-R`, `-Rs`, or `-Rns` based on cascade mode.
/// - Uses `--noconfirm` for non-interactive execution.
/// - Always uses `sudo -S` for remove operations (password written to PTY stdin when sudo prompts).
/// - Removes hold tail since we're not spawning a terminal.
#[must_use]
pub fn build_remove_command_for_executor(
names: &[String],
password: Option<&str>,
cascade: crate::state::modal::CascadeMode,
dry_run: bool,
) -> String {
use super::utils::shell_single_quote;
if names.is_empty() {
return if dry_run {
"echo DRY RUN: nothing to remove".to_string()
} else {
"echo nothing to remove".to_string()
};
}
let flag = cascade.flag();
let names_str = names.join(" ");
if dry_run {
let cmd = format!("sudo pacman {flag} --noconfirm {names_str}");
let quoted = shell_single_quote(&cmd);
format!("echo DRY RUN: {quoted}")
} else {
let base_cmd = format!("pacman {flag} --noconfirm {names_str}");
// Use printf to pipe password to sudo -S (more reliable than echo)
password.map_or_else(
|| format!("sudo {base_cmd}"),
|pass| {
let escaped = shell_single_quote(pass);
format!("printf '%s\\n' {escaped} | sudo -S {base_cmd}")
},
)
}
}
/// What: Build downgrade command string without hold tail for `PTY` execution.
///
/// Inputs:
/// - `names`: Package names to downgrade.
/// - `_password`: Optional sudo password (unused - password is written to PTY stdin when sudo prompts).
/// - `dry_run`: Whether to run in dry-run mode.
///
/// Output:
/// - Command string ready for `PTY` execution (no hold tail).
///
/// Details:
/// - Uses the `downgrade` tool to downgrade packages.
/// - Checks if `downgrade` tool is available before executing.
/// - Password is written to PTY stdin when sudo prompts, so we don't need to pipe it here.
/// - Removes hold tail since we're not spawning a terminal.
#[must_use]
pub fn build_downgrade_command_for_executor(
names: &[String],
_password: Option<&str>,
dry_run: bool,
) -> String {
use super::utils::shell_single_quote;
if names.is_empty() {
return if dry_run {
"echo DRY RUN: nothing to downgrade".to_string()
} else {
"echo nothing to downgrade".to_string()
};
}
let names_str = names.join(" ");
if dry_run {
let cmd = format!("sudo downgrade {names_str}");
let quoted = shell_single_quote(&cmd);
format!("echo DRY RUN: {quoted}")
} else {
// Check if downgrade tool is available, then execute
// Note: The check uses sudo but password will be written to PTY stdin when sudo prompts
let base_cmd = format!(
"if (command -v downgrade >/dev/null 2>&1) || sudo pacman -Qi downgrade >/dev/null 2>&1; then sudo downgrade {names_str}; else echo 'downgrade tool not found. Install \"downgrade\" package.'; fi"
);
// Password is written to PTY stdin when sudo prompts, so we don't need to pipe it here
// Just return the command as-is
base_cmd
}
}
/// What: Build system update command string by chaining multiple commands.
///
/// Inputs:
/// - `commands`: List of commands to execute in sequence.
/// - `password`: Optional sudo password for commands that need sudo.
/// - `dry_run`: Whether to run in dry-run mode.
///
/// Output:
/// - Command string ready for `PTY` execution (commands chained with `&&`).
///
/// Details:
/// - Chains commands with `&&` so execution stops on first failure.
/// - For commands starting with `sudo`, pipes password if provided.
/// - In dry-run mode, wraps each command in `echo DRY RUN:`.
/// - Removes hold tail since we're not spawning a terminal.
#[must_use]
pub fn build_update_command_for_executor(
commands: &[String],
password: Option<&str>,
dry_run: bool,
) -> String {
use super::utils::shell_single_quote;
if commands.is_empty() {
return if dry_run {
"echo DRY RUN: nothing to update".to_string()
} else {
"echo nothing to update".to_string()
};
}
let processed_commands: Vec<String> = if dry_run {
// Check if we should simulate failure for testing (first command only, if it's pacman)
let simulate_failure = std::env::var("PACSEA_TEST_SIMULATE_PACMAN_FAILURE").is_ok()
&& !commands.is_empty()
&& commands[0].contains("pacman");
if simulate_failure {
tracing::info!(
"[DRY-RUN] Simulating pacman failure for testing - first command will fail with exit code 1"
);
}
commands
.iter()
.enumerate()
.map(|(idx, c)| {
// Properly quote the command to avoid syntax errors with complex shell constructs
let quoted = shell_single_quote(c);
if simulate_failure && idx == 0 {
// Simulate pacman failure for testing confirmation popup
// Use false to ensure the command fails with exit code 1
// The && will prevent subsequent commands from running
format!("echo DRY RUN: {quoted} && false")
} else {
format!("echo DRY RUN: {quoted}")
}
})
.collect()
} else {
commands
.iter()
.map(|cmd| {
// Check if command needs sudo and has password
password.map_or_else(
|| cmd.clone(),
|pass| {
if cmd.starts_with("sudo ") {
// Extract the command after "sudo "
let base_cmd = cmd.strip_prefix("sudo ").unwrap_or(cmd);
let escaped = shell_single_quote(pass);
format!("printf '%s\\n' {escaped} | sudo -S {base_cmd}")
} else {
// Command doesn't need password or already has it handled
cmd.clone()
}
},
)
})
.collect()
};
let joined = processed_commands.join(" && ");
// If password is provided and any command contains "sudo", cache credentials first
// This handles cases where sudo is called inside shell scripts (like mirror update)
// that don't start with "sudo " but contain sudo calls internally
if let Some(pass) = password {
let has_sudo_anywhere = commands.iter().any(|c| c.contains("sudo"));
if has_sudo_anywhere {
let escaped = shell_single_quote(pass);
// Cache sudo credentials first using sudo -v, then run the commands
// Using `;` ensures commands run even if credential caching has issues
return format!("printf '%s\\n' {escaped} | sudo -S -v 2>/dev/null ; {joined}");
}
}
joined
}
/// What: Build scan command string for `PTY` execution (excluding aur-sleuth).
///
/// Inputs:
/// - `package`: Package name to scan.
/// - `do_clamav`/`do_trivy`/`do_semgrep`/`do_shellcheck`/`do_virustotal`/`do_custom`: Scan configuration flags.
/// - `dry_run`: Whether to run in dry-run mode.
///
/// Output:
/// - Command string ready for `PTY` execution (no hold tail, excludes aur-sleuth).
///
/// Details:
/// - Builds scan pipeline commands excluding aur-sleuth (which runs separately in terminal).
/// - Sets environment variables for scan configuration.
/// - Removes hold tail since we're not spawning a terminal.
#[cfg(not(target_os = "windows"))]
#[must_use]
#[allow(clippy::fn_params_excessive_bools, clippy::too_many_arguments)]
pub fn build_scan_command_for_executor(
package: &str,
do_clamav: bool,
do_trivy: bool,
do_semgrep: bool,
do_shellcheck: bool,
do_virustotal: bool,
do_custom: bool,
dry_run: bool,
) -> String {
use super::utils::shell_single_quote;
use crate::install::scan::pkg::build_scan_cmds_for_pkg_without_sleuth;
// Prepend environment exports so subsequent steps honor the selection
let mut cmds: Vec<String> = Vec::new();
cmds.push(format!(
"export PACSEA_SCAN_DO_CLAMAV={}",
if do_clamav { "1" } else { "0" }
));
cmds.push(format!(
"export PACSEA_SCAN_DO_TRIVY={}",
if do_trivy { "1" } else { "0" }
));
cmds.push(format!(
"export PACSEA_SCAN_DO_SEMGREP={}",
if do_semgrep { "1" } else { "0" }
));
cmds.push(format!(
"export PACSEA_SCAN_DO_SHELLCHECK={}",
if do_shellcheck { "1" } else { "0" }
));
cmds.push(format!(
"export PACSEA_SCAN_DO_VIRUSTOTAL={}",
if do_virustotal { "1" } else { "0" }
));
cmds.push(format!(
"export PACSEA_SCAN_DO_CUSTOM={}",
if do_custom { "1" } else { "0" }
));
// Export default pattern sets
cmds.push("export PACSEA_PATTERNS_CRIT='/dev/(tcp|udp)/|bash -i *>& *[^ ]*/dev/(tcp|udp)/[0-9]+|exec [0-9]{2,}<>/dev/(tcp|udp)/|rm -rf[[:space:]]+/|dd if=/dev/zero of=/dev/sd[a-z]|[>]{1,2}[[:space:]]*/dev/sd[a-z]|: *\\(\\) *\\{ *: *\\| *: *& *\\};:|/etc/sudoers([[:space:]>]|$)|echo .*[>]{2}.*(/etc/sudoers|/root/.ssh/authorized_keys)|/etc/ld\\.so\\.preload|LD_PRELOAD=|authorized_keys.*[>]{2}|ssh-rsa [A-Za-z0-9+/=]+.*[>]{2}.*authorized_keys|curl .*(169\\.254\\.169\\.254)'".to_string());
cmds.push("export PACSEA_PATTERNS_HIGH='eval|base64 -d|wget .*(sh|bash|dash|ksh|zsh)([^A-Za-z]|$)|curl .*(sh|bash|dash|ksh|zsh)([^A-Za-z]|$)|sudo[[:space:]]|chattr[[:space:]]|useradd|adduser|groupadd|systemctl|service[[:space:]]|crontab|/etc/cron\\.|[>]{2}.*(\\.bashrc|\\.bash_profile|/etc/profile|\\.zshrc)|cat[[:space:]]+/etc/shadow|cat[[:space:]]+~/.ssh/id_rsa|cat[[:space:]]+~/.bash_history|systemctl stop (auditd|rsyslog)|service (auditd|rsyslog) stop|scp .*@|curl -F|nc[[:space:]].*<|tar -czv?f|zip -r'".to_string());
cmds.push("export PACSEA_PATTERNS_MEDIUM='whoami|uname -a|hostname|id|groups|nmap|netstat -anp|ss -anp|ifconfig|ip addr|arp -a|grep -ri .*secret|find .*-name.*(password|\\.key)|env[[:space:]]*\\|[[:space:]]*grep -i pass|wget https?://|curl https?://'".to_string());
cmds.push("export PACSEA_PATTERNS_LOW='http_proxy=|https_proxy=|ALL_PROXY=|yes[[:space:]]+> */dev/null *&|ulimit -n [0-9]{5,}'".to_string());
// Append the scan pipeline commands (excluding sleuth)
cmds.extend(build_scan_cmds_for_pkg_without_sleuth(package));
let full_cmd = cmds.join(" && ");
if dry_run {
let quoted = shell_single_quote(&full_cmd);
format!("echo DRY RUN: {quoted}")
} else {
full_cmd
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::state::Source;
/// What: Create a test package item with specified source.
///
/// Inputs:
/// - `name`: Package name
/// - `source`: Package source (Official or AUR)
///
/// Output:
/// - `PackageItem` ready for testing
///
/// Details:
/// - Helper to create test packages with consistent structure
fn create_test_package(name: &str, source: Source) -> PackageItem {
PackageItem {
name: name.into(),
version: "1.0.0".into(),
description: String::new(),
source,
popularity: None,
out_of_date: None,
orphaned: false,
}
}
#[test]
/// What: Verify executor command builder creates correct commands without hold tail.
///
/// Inputs:
/// - Official and AUR packages.
/// - Optional password.
/// - Dry-run flag.
///
/// Output:
/// - Commands without hold tail, suitable for PTY execution.
///
/// Details:
/// - Ensures commands are properly formatted and don't include terminal hold prompts.
fn executor_build_install_command_variants() {
let official_pkg = create_test_package(
"ripgrep",
Source::Official {
repo: "extra".into(),
arch: "x86_64".into(),
},
);
let aur_pkg = create_test_package("yay-bin", Source::Aur);
// Official package without password
// Check if package is installed to determine expected flags
let installed_set = crate::logic::deps::get_installed_packages();
let provided_set = crate::logic::deps::get_provided_packages(&installed_set);
let is_installed = crate::logic::deps::is_package_installed_or_provided(
"ripgrep",
&installed_set,
&provided_set,
);
let cmd1 =
build_install_command_for_executor(std::slice::from_ref(&official_pkg), None, false);
if is_installed {
// If installed, should use only --noconfirm
assert!(cmd1.contains("sudo pacman -S --noconfirm ripgrep"));
assert!(!cmd1.contains("--needed"));
} else {
// If not installed, should use --needed --noconfirm
assert!(cmd1.contains("sudo pacman -S --needed --noconfirm ripgrep"));
}
assert!(!cmd1.contains("Press any key to close"));
// Official package with password
let cmd2 = build_install_command_for_executor(
std::slice::from_ref(&official_pkg),
Some("pass"),
false,
);
assert!(cmd2.contains("printf "));
if is_installed {
// If installed, should use only --noconfirm
assert!(cmd2.contains("sudo -S pacman -S --noconfirm ripgrep"));
assert!(!cmd2.contains("--needed"));
} else {
// If not installed, should use --needed --noconfirm
assert!(cmd2.contains("sudo -S pacman -S --needed --noconfirm ripgrep"));
}
// AUR package
let cmd3 = build_install_command_for_executor(std::slice::from_ref(&aur_pkg), None, false);
assert!(cmd3.contains("command -v paru"));
assert!(!cmd3.contains("Press any key to close"));
// Dry run
let cmd4 = build_install_command_for_executor(&[official_pkg], None, true);
assert!(cmd4.starts_with("echo DRY RUN:"));
}
#[test]
/// What: Verify command builder handles mixed official and AUR packages.
///
/// Inputs:
/// - Mixed list of official and AUR packages.
///
/// Output:
/// - Command that installs all packages using appropriate tool.
///
/// Details:
/// - When AUR packages are present, command should use AUR helper for all packages.
fn executor_build_mixed_packages() {
let official_pkg = create_test_package(
"ripgrep",
Source::Official {
repo: "extra".into(),
arch: "x86_64".into(),
},
);
let aur_pkg = create_test_package("yay-bin", Source::Aur);
let cmd = build_install_command_for_executor(&[official_pkg, aur_pkg], None, false);
// When AUR packages are present, should use AUR helper
assert!(cmd.contains("command -v paru") || cmd.contains("command -v yay"));
}
#[test]
/// What: Verify command builder handles empty package list.
///
/// Inputs:
/// - Empty package list.
///
/// Output:
/// - Command that indicates nothing to install.
///
/// Details:
/// - Empty list should produce a safe no-op command.
fn executor_build_empty_list() {
let cmd = build_install_command_for_executor(&[], None, false);
assert!(cmd.contains("nothing to install") || cmd.is_empty());
}
#[test]
/// What: Verify command builder handles multiple official packages.
///
/// Inputs:
/// - Multiple official packages.
///
/// Output:
/// - Command that installs all packages via pacman.
///
/// Details:
/// - Multiple packages should be space-separated in the command.
fn executor_build_multiple_official() {
let pkg1 = create_test_package(
"ripgrep",
Source::Official {
repo: "extra".into(),
arch: "x86_64".into(),
},
);
let pkg2 = create_test_package(
"fd",
Source::Official {
repo: "extra".into(),
arch: "x86_64".into(),
},
);
// Check if packages are installed to determine expected flags
let installed_set = crate::logic::deps::get_installed_packages();
let provided_set = crate::logic::deps::get_provided_packages(&installed_set);
let ripgrep_installed = crate::logic::deps::is_package_installed_or_provided(
"ripgrep",
&installed_set,
&provided_set,
);
let fd_installed = crate::logic::deps::is_package_installed_or_provided(
"fd",
&installed_set,
&provided_set,
);
let has_reinstall = ripgrep_installed || fd_installed;
let cmd = build_install_command_for_executor(&[pkg1, pkg2], None, false);
assert!(cmd.contains("ripgrep"));
assert!(cmd.contains("fd"));
if has_reinstall {
// If any package is installed, should use only --noconfirm
assert!(cmd.contains("pacman -S --noconfirm"));
assert!(!cmd.contains("--needed"));
} else {
// If no packages are installed, should use --needed --noconfirm
assert!(cmd.contains("pacman -S --needed --noconfirm"));
}
}
#[test]
/// What: Verify dry-run mode produces echo commands.
///
/// Inputs:
/// - Package list with `dry_run=true`.
///
/// Output:
/// - Command that starts with "echo DRY RUN:".
///
/// Details:
/// - Dry-run should never execute actual install commands.
fn executor_build_dry_run() {
let pkg = create_test_package(
"ripgrep",
Source::Official {
repo: "extra".into(),
arch: "x86_64".into(),
},
);
let cmd = build_install_command_for_executor(&[pkg], None, true);
assert!(cmd.starts_with("echo DRY RUN:"));
// In dry-run mode, the command is wrapped in echo, so it may contain the original command text
// The important thing is that it starts with "echo DRY RUN:" which prevents execution
}
#[test]
/// What: Verify password is properly escaped in command.
///
/// Inputs:
/// - Official package with password containing special characters.
///
/// Output:
/// - Command with properly escaped password.
///
/// Details:
/// - Password should be single-quoted to prevent shell injection.
fn executor_build_password_escaping() {
let pkg = create_test_package(
"ripgrep",
Source::Official {
repo: "extra".into(),
arch: "x86_64".into(),
},
);
// Password with special characters
let password = "pass'word\"with$special";
let cmd = build_install_command_for_executor(&[pkg], Some(password), false);
assert!(cmd.contains("printf"));
assert!(cmd.contains("sudo -S"));
// Password should be properly quoted
assert!(cmd.contains('\'') || cmd.contains('"'));
}
#[test]
/// What: Verify remove command builder creates correct commands without hold tail.
///
/// Inputs:
/// - Package names, cascade mode, optional password, dry-run flag.
///
/// Output:
/// - Commands without hold tail, suitable for PTY execution.
///
/// Details:
/// - Ensures commands are properly formatted and don't include terminal hold prompts.
fn executor_build_remove_command_variants() {
use crate::state::modal::CascadeMode;
let names = vec!["test-pkg1".to_string(), "test-pkg2".to_string()];
// Basic mode without password
let cmd1 = build_remove_command_for_executor(&names, None, CascadeMode::Basic, false);
assert!(cmd1.contains("sudo pacman -R --noconfirm"));
assert!(cmd1.contains("test-pkg1"));
assert!(cmd1.contains("test-pkg2"));
assert!(!cmd1.contains("Press any key to close"));
// Cascade mode with password
let cmd2 =
build_remove_command_for_executor(&names, Some("pass"), CascadeMode::Cascade, false);
assert!(cmd2.contains("printf "));
assert!(cmd2.contains("sudo -S pacman -Rs --noconfirm"));
// CascadeWithConfigs mode
let cmd3 =
build_remove_command_for_executor(&names, None, CascadeMode::CascadeWithConfigs, false);
assert!(cmd3.contains("sudo pacman -Rns --noconfirm"));
// Dry run
let cmd4 = build_remove_command_for_executor(&names, None, CascadeMode::Basic, true);
assert!(cmd4.starts_with("echo DRY RUN:"));
assert!(cmd4.contains("pacman -R --noconfirm"));
// Empty list
let cmd5 = build_remove_command_for_executor(&[], None, CascadeMode::Basic, false);
assert_eq!(cmd5, "echo nothing to remove");
}
}
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/src/install/mod.rs | src/install/mod.rs | //! Modular install subsystem.
//!
//! This module splits the previous monolithic `install.rs` into focused
//! submodules. Public API is preserved via re-exports.
/// Batch installation operations.
mod batch;
pub mod command;
/// Direct installation operations.
mod direct;
/// Executor for package operations.
mod executor;
/// Logging utilities for install operations.
mod logging;
/// Package removal operations.
mod remove;
/// Security scanning operations.
mod scan;
/// Shell command execution.
mod shell;
/// Single package installation.
mod single;
/// Utility functions for install operations.
mod utils;
pub use batch::spawn_install_all;
pub use logging::log_removed;
mod patterns;
pub use remove::{check_config_directories, spawn_remove_all};
#[cfg(not(target_os = "windows"))]
pub use scan::spawn::build_sleuth_command_for_terminal;
#[cfg(not(target_os = "windows"))]
#[allow(clippy::too_many_arguments)]
/// What: Load user-configured suspicious patterns and launch the AUR scan pipeline.
///
/// Input:
/// - `pkg`: Package name passed to the scanner.
/// - `do_clamav`/`do_trivy`/`do_semgrep`/`do_shellcheck`/`do_virustotal`/`do_custom`/`do_sleuth`: Toggles for optional scan stages.
///
/// Output:
/// - Spawns a terminal executing the scan workflow defined in `scan::spawn_aur_scan_for_with_config`.
///
/// Details:
/// - Loads `pattern.conf`, publishes severity regexes via environment variables, and lets the scan module honour them.
/// - Environment overrides take precedence so UI toggles and config-driven patterns cooperate.
#[allow(clippy::fn_params_excessive_bools)]
pub fn spawn_aur_scan_for_with_config(
pkg: &str,
do_clamav: bool,
do_trivy: bool,
do_semgrep: bool,
do_shellcheck: bool,
do_virustotal: bool,
do_custom: bool,
do_sleuth: bool,
) {
// Load configurable suspicious patterns (pattern.conf), override defaults via env vars
let sets = crate::install::patterns::load();
unsafe {
std::env::set_var("PACSEA_PATTERNS_CRIT", &sets.critical);
}
unsafe {
std::env::set_var("PACSEA_PATTERNS_HIGH", &sets.high);
}
unsafe {
std::env::set_var("PACSEA_PATTERNS_MEDIUM", &sets.medium);
}
unsafe {
std::env::set_var("PACSEA_PATTERNS_LOW", &sets.low);
}
// Forward to scanner; scan.rs will export defaults, but our env vars take precedence in crit/high/med/low expansions
scan::spawn_aur_scan_for_with_config(
pkg,
do_clamav,
do_trivy,
do_semgrep,
do_shellcheck,
do_virustotal,
do_custom,
do_sleuth,
);
}
pub use direct::{
start_integrated_install, start_integrated_install_all, start_integrated_remove_all,
};
#[cfg(not(target_os = "windows"))]
pub use executor::build_scan_command_for_executor;
pub use executor::{
ExecutorOutput, ExecutorRequest, build_downgrade_command_for_executor,
build_install_command_for_executor, build_remove_command_for_executor,
build_update_command_for_executor,
};
pub use shell::spawn_shell_commands_in_terminal;
pub use single::spawn_install;
pub use utils::command_on_path;
pub use utils::shell_single_quote;
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/src/install/patterns.rs | src/install/patterns.rs | /*!
Pattern configuration loader for the custom suspicious-patterns scan.
Purpose:
- Allow users to tune suspicious pattern categories via a simple config file:
`$XDG_CONFIG_HOME`/pacsea/pattern.conf (or `$HOME`/.config/pacsea/pattern.conf)
Format:
- INI-like sections: [critical], [high], [medium], [low]
- Each non-empty, non-comment line within a section is treated as a raw ERE (Extended Regex)
fragment (compatible with `grep -E`). At runtime, all lines in a section are joined with `|`.
- Comments start with '#', '//' or ';'. Empty lines are ignored.
Example pattern.conf:
```ini
# Customize suspicious patterns (ERE fragments)
[critical]
/dev/(tcp|udp)/
rm -rf[[:space:]]+/
: *\(\) *\{ *: *\| *: *& *\};:
/etc/sudoers([[:space:]>]|$)
[high]
eval
base64 -d
wget .*(sh|bash)([^A-Za-z]|$)
curl .*(sh|bash)([^A-Za-z]|$)
[medium]
whoami
uname -a
grep -ri .*secret
[low]
http_proxy=
https_proxy=
```
Notes:
- This loader returns joined strings for each category. The scanner shells them into `grep -Eo`.
- Defaults are chosen to mirror built-in patterns used by the scan pipeline.
*/
#[cfg(not(target_os = "windows"))]
use std::fs;
#[cfg(not(target_os = "windows"))]
use std::path::PathBuf;
#[cfg(not(target_os = "windows"))]
/// Grouped suspicious pattern sets (ERE fragments joined by `|`).
#[derive(Clone, Debug)]
pub struct PatternSets {
/// Critical-severity indicators. High-confidence red flags.
pub critical: String,
/// High-severity indicators. Strong suspicious behaviors.
pub high: String,
/// Medium-severity indicators. Recon/sensitive searches and downloads.
pub medium: String,
/// Low-severity indicators. Environment hints/noise.
pub low: String,
}
#[cfg(not(target_os = "windows"))]
impl Default for PatternSets {
fn default() -> Self {
// Defaults intentionally mirror the scanner's built-in bash ERE sets.
// These are intended for grep -E (ERE) within bash, not Rust regex compilation.
let critical = r"(/dev/(tcp|udp)/|bash -i *>& *[^ ]*/dev/(tcp|udp)/[0-9]+|exec [0-9]{2,}<>/dev/(tcp|udp)/|rm -rf[[:space:]]+/|dd if=/dev/zero of=/dev/sd[a-z]|[>]{1,2}[[:space:]]*/dev/sd[a-z]|: *\(\) *\{ *: *\| *: *& *\};:|/etc/sudoers([[:space:]>]|$)|echo .*[>]{2}.*(/etc/sudoers|/root/.ssh/authorized_keys)|/etc/ld\.so\.preload|LD_PRELOAD=|authorized_keys.*[>]{2}|ssh-rsa [A-Za-z0-9+/=]+.*[>]{2}.*authorized_keys|curl .*(169\.254\.169\.254))".to_string();
let high = r"(eval|base64 -d|wget .*(sh|bash|dash|ksh|zsh)([^A-Za-z]|$)|curl .*(sh|bash|dash|ksh|zsh)([^A-Za-z]|$)|sudo[[:space:]]|chattr[[:space:]]|useradd|adduser|groupadd|systemctl|service[[:space:]]|crontab|/etc/cron\.|[>]{2}.*(\.bashrc|\.bash_profile|/etc/profile|\.zshrc)|cat[[:space:]]+/etc/shadow|cat[[:space:]]+~/.ssh/id_rsa|cat[[:space:]]+~/.bash_history|systemctl stop (auditd|rsyslog)|service (auditd|rsyslog) stop|scp .*@|curl -F|nc[[:space:]].*<|tar -czv?f|zip -r)".to_string();
let medium = r"(whoami|uname -a|hostname|id|groups|nmap|netstat -anp|ss -anp|ifconfig|ip addr|arp -a|grep -ri .*secret|find .*-name.*(password|\.key)|env[[:space:]]*\|[[:space:]]*grep -i pass|wget https?://|curl https?://)".to_string();
let low = r"(http_proxy=|https_proxy=|ALL_PROXY=|yes[[:space:]]+> */dev/null *&|ulimit -n [0-9]{5,})".to_string();
Self {
critical,
high,
medium,
low,
}
}
}
/// What: Security section priority levels.
///
/// Inputs: Used for categorizing security patterns.
///
/// Output: Priority level enum.
///
/// Details: Represents different priority levels for security pattern matching.
#[cfg(not(target_os = "windows"))]
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
enum Section {
/// Critical priority section.
Critical,
/// High priority section.
High,
/// Medium priority section.
Medium,
/// Low priority section.
Low,
}
#[cfg(not(target_os = "windows"))]
/// What: Load suspicious pattern sets from the user's `pattern.conf`.
///
/// Input:
/// - Reads `$XDG_CONFIG_HOME/pacsea/pattern.conf` (falling back to `$HOME/.config/pacsea/pattern.conf`).
///
/// Output:
/// - `PatternSets` containing joined regex fragments for each severity bucket.
///
/// Details:
/// - Falls back to built-in defaults when the file is missing or malformed.
/// - Uses simple INI-style parsing, ignoring unknown sections and comments.
pub fn load() -> PatternSets {
let mut out = PatternSets::default();
let path = config_path();
if let Ok(content) = fs::read_to_string(&path) {
let parsed = parse(&content, &out);
out = parsed;
} else {
// Keep defaults when missing/unreadable
}
out
}
#[cfg(not(target_os = "windows"))]
/// What: Resolve the canonical location of `pattern.conf` in the Pacsea config directory.
///
/// Input:
/// - None (derives the path from Pacsea's configuration base).
///
/// Output:
/// - Absolute `PathBuf` pointing to `pattern.conf`.
///
/// Details:
/// - Relies on `crate::theme::config_dir()` to honour XDG overrides.
fn config_path() -> PathBuf {
crate::theme::config_dir().join("pattern.conf")
}
#[cfg(not(target_os = "windows"))]
/// What: Parse raw `pattern.conf` content into severity buckets.
///
/// Input:
/// - `content`: File body to parse.
/// - `defaults`: Existing `PatternSets` used when a section is absent or empty.
///
/// Output:
/// - `PatternSets` with each section joined by `|`.
///
/// Details:
/// - Treats lines beginning with `#`, `//`, or `;` as comments.
/// - Recognises `[critical]`, `[high]`, `[medium]`, and `[low]` sections (case-insensitive aliases allowed).
/// - Unrecognised sections are ignored without error.
fn parse(content: &str, defaults: &PatternSets) -> PatternSets {
use Section::{Critical, High, Low, Medium};
let mut cur: Option<Section> = None;
let mut c: Vec<String> = Vec::new();
let mut h: Vec<String> = Vec::new();
let mut m: Vec<String> = Vec::new();
let mut l: Vec<String> = Vec::new();
for raw in content.lines() {
let line = raw.trim();
if line.is_empty()
|| line.starts_with('#')
|| line.starts_with("//")
|| line.starts_with(';')
{
continue;
}
if line.starts_with('[')
&& let Some(end) = line.find(']')
{
let name = line[1..end].to_ascii_lowercase();
cur = match name.as_str() {
"critical" | "crit" => Some(Critical),
"high" | "hi" => Some(High),
"medium" | "med" => Some(Medium),
"low" => Some(Low),
_ => None,
};
continue;
}
if let Some(sec) = cur {
// Store raw ERE fragments for later `|` join
match sec {
Critical => c.push(line.to_string()),
High => h.push(line.to_string()),
Medium => m.push(line.to_string()),
Low => l.push(line.to_string()),
}
}
}
let critical = if c.is_empty() {
defaults.critical.clone()
} else {
c.join("|")
};
let high = if h.is_empty() {
defaults.high.clone()
} else {
h.join("|")
};
let medium = if m.is_empty() {
defaults.medium.clone()
} else {
m.join("|")
};
let low = if l.is_empty() {
defaults.low.clone()
} else {
l.join("|")
};
PatternSets {
critical,
high,
medium,
low,
}
}
#[cfg(all(test, not(target_os = "windows")))]
mod tests {
use super::*;
#[test]
/// What: Ensure `load` falls back to defaults when no pattern configuration file exists.
///
/// Input:
/// - Temporary HOME without an accompanying `pattern.conf`.
///
/// Output:
/// - Loaded pattern sets match `PatternSets::default`.
///
/// Details:
/// - Redirects `HOME`, guards with the theme mutex, and removes the temp directory after assertions.
fn load_returns_defaults_when_config_missing() {
use std::fs;
use std::path::PathBuf;
let _guard = crate::theme::test_mutex()
.lock()
.expect("Test mutex poisoned");
let mut dir: PathBuf = std::env::temp_dir();
dir.push(format!(
"pacsea_test_patterns_load_missing_{}_{}",
std::process::id(),
std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
.expect("System time is before UNIX epoch")
.as_nanos()
));
let _ = fs::create_dir_all(&dir);
let orig_home = std::env::var_os("HOME");
let orig_xdg = std::env::var_os("XDG_CONFIG_HOME");
unsafe {
std::env::set_var("HOME", dir.display().to_string());
std::env::remove_var("XDG_CONFIG_HOME");
}
let defaults = PatternSets::default();
let loaded = super::load();
assert_eq!(loaded.critical, defaults.critical);
assert_eq!(loaded.high, defaults.high);
assert_eq!(loaded.medium, defaults.medium);
assert_eq!(loaded.low, defaults.low);
unsafe {
if let Some(v) = orig_home {
std::env::set_var("HOME", v);
} else {
std::env::remove_var("HOME");
}
if let Some(v) = orig_xdg {
std::env::set_var("XDG_CONFIG_HOME", v);
} else {
std::env::remove_var("XDG_CONFIG_HOME");
}
}
let _ = fs::remove_dir_all(&dir);
}
#[test]
/// What: Ensure `load` honours pattern definitions from an on-disk configuration file.
///
/// Input:
/// - Temporary HOME containing a handwritten `pattern.conf` with custom sections.
///
/// Output:
/// - Loaded pattern sets reflect the configured critical/high/medium/low regexes.
///
/// Details:
/// - Writes `pattern.conf` under Pacsea's config directory, then restores environment variables and removes artifacts.
fn load_reads_pattern_conf_overrides() {
use std::fs;
use std::path::PathBuf;
let _guard = crate::theme::test_mutex()
.lock()
.expect("Test mutex poisoned");
let mut dir: PathBuf = std::env::temp_dir();
dir.push(format!(
"pacsea_test_patterns_load_conf_{}_{}",
std::process::id(),
std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
.expect("System time is before UNIX epoch")
.as_nanos()
));
let _ = fs::create_dir_all(&dir);
let orig_home = std::env::var_os("HOME");
let orig_xdg = std::env::var_os("XDG_CONFIG_HOME");
unsafe {
std::env::set_var("HOME", dir.display().to_string());
std::env::remove_var("XDG_CONFIG_HOME");
}
let config_dir = crate::theme::config_dir();
let pattern_path = config_dir.join("pattern.conf");
let body = "[critical]\nfoo\n\n[high]\nbar\n\n[medium]\nmid\n\n[low]\nlo\n";
fs::write(&pattern_path, body).expect("failed to write test pattern config file");
let loaded = super::load();
assert_eq!(loaded.critical, "foo");
assert_eq!(loaded.high, "bar");
assert_eq!(loaded.medium, "mid");
assert_eq!(loaded.low, "lo");
unsafe {
if let Some(v) = orig_home {
std::env::set_var("HOME", v);
} else {
std::env::remove_var("HOME");
}
if let Some(v) = orig_xdg {
std::env::set_var("XDG_CONFIG_HOME", v);
} else {
std::env::remove_var("XDG_CONFIG_HOME");
}
}
let _ = fs::remove_dir_all(&dir);
}
#[test]
/// What: Confirm `parse` falls back to default regex sets when the config snippet is empty.
///
/// Inputs:
/// - Blank configuration string.
/// - `PatternSets::default()` as the baseline values.
///
/// Output:
/// - Returns a `PatternSets` identical to the defaults.
///
/// Details:
/// - Exercises the early-return path that clones defaults for each severity bucket.
fn parse_uses_defaults_when_empty() {
let d = PatternSets::default();
let p = parse("", &d);
assert_eq!(p.critical, d.critical);
assert_eq!(p.high, d.high);
assert_eq!(p.medium, d.medium);
assert_eq!(p.low, d.low);
}
#[test]
/// What: Ensure `parse` concatenates multi-line sections with `|` to form extended regexes.
///
/// Inputs:
/// - Config snippet containing multiple severities with repeated entries.
///
/// Output:
/// - Generated pattern strings join entries with `|` while preserving singleton sections.
///
/// Details:
/// - Verifies each severity bucket independently to catch regressions in join order.
fn parse_joins_lines_with_or() {
let d = PatternSets::default();
let cfg = r"
[critical]
a
b
c
[high]
foo
bar
[medium]
x
[low]
l1
l2
";
let p = parse(cfg, &d);
assert_eq!(p.critical, "a|b|c");
assert_eq!(p.high, "foo|bar");
assert_eq!(p.medium, "x");
assert_eq!(p.low, "l1|l2");
}
#[test]
/// What: Verify `parse` ignores comments, unknown sections, and insignificant whitespace.
///
/// Inputs:
/// - Config snippet with comment prefixes (`#`, `;`, `//`), extra indentation, and an unknown header.
///
/// Output:
/// - Patterns exclude commented lines, skip the unknown section, and trim whitespace in recognised sections.
///
/// Details:
/// - Confirms default fallback remains for untouched severities while demonstrating indentation trimming for `low`.
fn parse_handles_comments_and_whitespace() {
let d = PatternSets::default();
let cfg = r"
# comment
; also comment
// yet another
[critical]
a
#ignored
b
[unknown] # ignored section (no effect)
[high]
foo
[low]
l1
";
let p = parse(cfg, &d);
assert_eq!(p.critical, "a|b");
assert_eq!(p.high, "foo");
assert_eq!(p.medium, d.medium);
assert_eq!(p.low, "l1");
}
}
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/src/install/single.rs | src/install/single.rs | #[allow(unused_imports)]
use std::process::Command;
use crate::state::PackageItem;
#[cfg(not(target_os = "windows"))]
use crate::state::Source;
#[cfg(not(target_os = "windows"))]
use super::command::build_install_command;
#[cfg(all(target_os = "windows", not(test)))]
use super::command::build_install_command;
#[cfg(not(target_os = "windows"))]
use super::logging::log_installed;
#[cfg(not(target_os = "windows"))]
use super::utils::{choose_terminal_index_prefer_path, command_on_path, shell_single_quote};
#[cfg(not(target_os = "windows"))]
/// What: Attempt to spawn a terminal with the given command.
///
/// Input:
/// - `term`: Terminal binary name
/// - `args`: Arguments to pass to the terminal
/// - `needs_xfce_command`: Whether this terminal needs special xfce4-terminal command handling
/// - `cmd_str`: The install command to execute
/// - `item_name`: Package name for logging
/// - `src`: Source type ("official" or "aur") for logging
/// - `dry_run`: Whether this is a dry run
///
/// Output:
/// - `true` if the terminal was successfully spawned, `false` otherwise
///
/// Details:
/// - Handles `xfce4-terminal` special command format and sets up `PACSEA_TEST_OUT` environment variable if needed.
fn try_spawn_terminal(
term: &str,
args: &[&str],
needs_xfce_command: bool,
cmd_str: &str,
item_name: &str,
src: &str,
dry_run: bool,
) -> bool {
let mut cmd = Command::new(term);
if needs_xfce_command && term == "xfce4-terminal" {
let quoted = shell_single_quote(cmd_str);
cmd.arg("--command").arg(format!("bash -lc {quoted}"));
} else {
cmd.args(args.iter().copied()).arg(cmd_str);
}
if let Ok(p) = std::env::var("PACSEA_TEST_OUT") {
if let Some(parent) = std::path::Path::new(&p).parent() {
let _ = std::fs::create_dir_all(parent);
}
cmd.env("PACSEA_TEST_OUT", p);
}
match cmd.spawn() {
Ok(_) => {
tracing::info!(
terminal = %term,
names = %item_name,
total = 1,
aur_count = usize::from(src == "aur"),
official_count = usize::from(src == "official"),
dry_run,
"launched terminal for install"
);
true
}
Err(e) => {
tracing::warn!(
terminal = %term,
error = %e,
names = %item_name,
"failed to spawn terminal, trying next"
);
false
}
}
}
#[cfg(not(target_os = "windows"))]
/// What: Get the terminal preference list based on desktop environment.
///
/// Input:
/// - None (reads `XDG_CURRENT_DESKTOP` environment variable)
///
/// Output:
/// - Slice of terminal tuples `(name, args, needs_xfce_command)` ordered by preference
///
/// Details:
/// - Prefers GNOME terminals when running under GNOME desktop, otherwise uses default ordering.
fn get_terminal_preferences() -> &'static [(&'static str, &'static [&'static str], bool)] {
let is_gnome = std::env::var("XDG_CURRENT_DESKTOP")
.ok()
.is_some_and(|v| v.to_uppercase().contains("GNOME"));
if is_gnome {
&[
("gnome-terminal", &["--", "bash", "-lc"], false),
("gnome-console", &["--", "bash", "-lc"], false),
("kgx", &["--", "bash", "-lc"], false),
("alacritty", &["-e", "bash", "-lc"], false),
("kitty", &["bash", "-lc"], false),
("xterm", &["-hold", "-e", "bash", "-lc"], false),
("konsole", &["-e", "bash", "-lc"], false),
("xfce4-terminal", &[], true),
("tilix", &["--", "bash", "-lc"], false),
("mate-terminal", &["--", "bash", "-lc"], false),
]
} else {
&[
("alacritty", &["-e", "bash", "-lc"], false),
("kitty", &["bash", "-lc"], false),
("xterm", &["-hold", "-e", "bash", "-lc"], false),
("gnome-terminal", &["--", "bash", "-lc"], false),
("gnome-console", &["--", "bash", "-lc"], false),
("kgx", &["--", "bash", "-lc"], false),
("konsole", &["-e", "bash", "-lc"], false),
("xfce4-terminal", &[], true),
("tilix", &["--", "bash", "-lc"], false),
("mate-terminal", &["--", "bash", "-lc"], false),
]
}
}
#[cfg(not(target_os = "windows"))]
/// What: Spawn a terminal to install a single package.
///
/// Input:
/// - item to install; password for sudo on official installs (optional); `dry_run` to print instead of execute
///
/// Output:
/// - Launches a terminal (or `bash`) running `pacman`/`paru`/`yay` to perform the install
///
/// Details:
/// - Prefers common terminals (`GNOME Console`/`Terminal`, `kitty`, `alacritty`, `xterm`, `xfce4-terminal`, etc.), falling back to `bash`. Uses `pacman` for official packages and `paru`/`yay` for AUR; appends a hold tail to keep the window open; logs installed names when not in `dry_run`.
/// - During tests, this is a no-op to avoid opening real terminal windows.
pub fn spawn_install(item: &PackageItem, password: Option<&str>, dry_run: bool) {
// Skip actual spawning during tests unless PACSEA_TEST_OUT is set (indicates a test with fake terminal)
#[cfg(test)]
if std::env::var("PACSEA_TEST_OUT").is_err() {
return;
}
let (cmd_str, uses_sudo) = build_install_command(item, password, dry_run);
let src = match item.source {
Source::Official { .. } => "official",
Source::Aur => "aur",
};
tracing::info!(
names = %item.name,
total = 1,
aur_count = usize::from(src == "aur"),
official_count = usize::from(src == "official"),
dry_run = dry_run,
uses_sudo,
"spawning install"
);
let terms = get_terminal_preferences();
// Try preferred path-based selection first
let mut launched = choose_terminal_index_prefer_path(terms).is_some_and(|idx| {
let (term, args, needs_xfce_command) = terms[idx];
try_spawn_terminal(
term,
args,
needs_xfce_command,
&cmd_str,
&item.name,
src,
dry_run,
)
});
// Fallback: try each terminal in order
if !launched {
for (term, args, needs_xfce_command) in terms {
if command_on_path(term) {
launched = try_spawn_terminal(
term,
args,
*needs_xfce_command,
&cmd_str,
&item.name,
src,
dry_run,
);
if launched {
break;
}
}
}
}
// Final fallback: use bash directly
if !launched {
let res = Command::new("bash").args(["-lc", &cmd_str]).spawn();
if let Err(e) = res {
tracing::error!(error = %e, names = %item.name, "failed to spawn bash to run install command");
} else {
tracing::info!(
names = %item.name,
total = 1,
aur_count = usize::from(src == "aur"),
official_count = usize::from(src == "official"),
dry_run = dry_run,
"launched bash for install"
);
}
}
if !dry_run && let Err(e) = log_installed(std::slice::from_ref(&item.name)) {
tracing::warn!(error = %e, names = %item.name, "failed to write install audit log");
}
}
#[cfg(all(test, not(target_os = "windows")))]
mod tests {
#[test]
/// What: Confirm the single-install helper launches gnome-terminal with the expected separator arguments.
///
/// Inputs:
/// - Shim `gnome-terminal` placed first on `PATH` capturing its argv.
/// - `spawn_install` invoked in dry-run mode for an official package.
///
/// Output:
/// - Captured arguments begin with `--`, `bash`, `-lc`, matching the safe invocation contract.
///
/// Details:
/// - Creates temporary directory to host the shim binary, exports `PACSEA_TEST_OUT`, then restores environment variables afterward.
fn install_single_uses_gnome_terminal_double_dash() {
use std::fs;
use std::os::unix::fs::PermissionsExt;
use std::path::PathBuf;
let mut dir: PathBuf = std::env::temp_dir();
dir.push(format!(
"pacsea_test_inst_single_gnome_{}_{}",
std::process::id(),
std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
.expect("System time is before UNIX epoch")
.as_nanos()
));
fs::create_dir_all(&dir).expect("Failed to create test directory");
let mut out_path = dir.clone();
out_path.push("args.txt");
let mut term_path = dir.clone();
term_path.push("gnome-terminal");
let script = "#!/bin/sh\n: > \"$PACSEA_TEST_OUT\"\nfor a in \"$@\"; do printf '%s\n' \"$a\" >> \"$PACSEA_TEST_OUT\"; done\n";
fs::write(&term_path, script.as_bytes()).expect("Failed to write test terminal script");
let mut perms = fs::metadata(&term_path)
.expect("Failed to read test terminal script metadata")
.permissions();
perms.set_mode(0o755);
fs::set_permissions(&term_path, perms)
.expect("Failed to set test terminal script permissions");
let orig_path = std::env::var_os("PATH");
unsafe {
std::env::set_var("PATH", dir.display().to_string());
std::env::set_var("PACSEA_TEST_OUT", out_path.display().to_string());
}
let pkg = crate::state::PackageItem {
name: "ripgrep".into(),
version: "1".into(),
description: String::new(),
source: crate::state::Source::Official {
repo: "extra".into(),
arch: "x86_64".into(),
},
popularity: None,
out_of_date: None,
orphaned: false,
};
super::spawn_install(&pkg, None, true);
std::thread::sleep(std::time::Duration::from_millis(50));
let body = fs::read_to_string(&out_path).expect("fake terminal args file written");
let lines: Vec<&str> = body.lines().collect();
assert!(lines.len() >= 3, "expected at least 3 args, got: {body}");
assert_eq!(lines[0], "--");
assert_eq!(lines[1], "bash");
assert_eq!(lines[2], "-lc");
unsafe {
if let Some(v) = orig_path {
std::env::set_var("PATH", v);
} else {
std::env::remove_var("PATH");
}
std::env::remove_var("PACSEA_TEST_OUT");
}
}
}
#[cfg(target_os = "windows")]
/// What: Present a placeholder install message on Windows where pacman/AUR helpers are unavailable.
///
/// Input:
/// - `item`: Package metadata used to build the informational command.
/// - `password`: Ignored; included for API parity.
/// - `dry_run`: When `true`, uses `PowerShell` to simulate the install operation.
///
/// Output:
/// - Launches a detached `PowerShell` window (if available) for dry-run simulation, or `cmd` window otherwise.
///
/// Details:
/// - When `dry_run` is true and `PowerShell` is available, uses `PowerShell` to simulate the install with Write-Host.
/// - Logs the install attempt when not a dry run to keep audit behaviour consistent with Unix platforms.
/// - During tests, this is a no-op to avoid opening real terminal windows.
#[allow(unused_variables, clippy::missing_const_for_fn)]
pub fn spawn_install(item: &PackageItem, password: Option<&str>, dry_run: bool) {
#[cfg(not(test))]
{
let (cmd_str, _uses_sudo) = build_install_command(item, password, dry_run);
if dry_run && super::utils::is_powershell_available() {
// Use PowerShell to simulate the install operation
let powershell_cmd = format!(
"Write-Host 'DRY RUN: Simulating install of {}' -ForegroundColor Yellow; Write-Host 'Command: {}' -ForegroundColor Cyan; Write-Host ''; Write-Host 'Press any key to close...'; $null = $Host.UI.RawUI.ReadKey('NoEcho,IncludeKeyDown')",
item.name,
cmd_str.replace('\'', "''")
);
let _ = Command::new("powershell.exe")
.args(["-NoProfile", "-Command", &powershell_cmd])
.spawn();
} else {
let _ = Command::new("cmd")
.args(["/C", "start", "Pacsea Install", "cmd", "/K", &cmd_str])
.spawn();
}
if !dry_run {
let _ = super::logging::log_installed(std::slice::from_ref(&item.name));
}
}
}
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/src/install/logging.rs | src/install/logging.rs | use std::io::Write;
/// What: Append installed package names to an audit log under the logs directory.
///
/// Input: `names` slice of package names to log; each line is timestamped.
///
/// Output: `Ok(())` on success; otherwise an I/O error.
///
/// Details: Writes to `logs_dir/install_log.log`, prefixing each name with a UTC timestamp.
pub fn log_installed(names: &[String]) -> std::io::Result<()> {
let mut path = crate::theme::logs_dir();
path.push("install_log.log");
let mut f = std::fs::OpenOptions::new()
.create(true)
.append(true)
.open(path)?;
let now = std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
.ok()
.and_then(|d| i64::try_from(d.as_secs()).ok());
let when = crate::util::ts_to_date(now);
for n in names {
writeln!(f, "{when} {n}")?;
}
Ok(())
}
/// What: Append removed package names to an audit log under the logs directory.
///
/// Input:
/// - `names` slice of package names to append (one per line).
///
/// Output:
/// - `Ok(())` on success; otherwise an I/O error.
///
/// # Errors
/// - Returns `Err` when the logs directory cannot be accessed or created
/// - Returns `Err` when the log file cannot be opened or written to
///
/// Details:
/// - Appends to `logs_dir/remove_log.log` without timestamps.
pub fn log_removed(names: &[String]) -> std::io::Result<()> {
let mut path = crate::theme::logs_dir();
path.push("remove_log.log");
let mut f = std::fs::OpenOptions::new()
.create(true)
.append(true)
.open(path)?;
for n in names {
writeln!(f, "{n}")?;
}
Ok(())
}
#[cfg(test)]
mod tests {
#[test]
/// What: Ensure install/remove logging helpers write files beneath the configured logs directory.
///
/// Inputs:
/// - `names`: Sample package list written to both install and remove logs with HOME redirected.
///
/// Output:
/// - Generated log files contain the package names (with timestamp for installs) under `logs_dir`.
///
/// Details:
/// - Temporarily overrides `HOME`, calls both logging functions, then verifies file contents before
/// restoring the environment.
fn logging_writes_install_and_remove_logs_under_logs_dir() {
use std::fs;
use std::path::PathBuf;
// Shim HOME to temp so logs_dir resolves within it
let orig_home = std::env::var_os("HOME");
let mut home: PathBuf = std::env::temp_dir();
home.push(format!(
"pacsea_test_logs_{}_{}",
std::process::id(),
std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
.expect("System time is before UNIX epoch")
.as_nanos()
));
let _ = fs::create_dir_all(&home);
unsafe { std::env::set_var("HOME", home.display().to_string()) };
// Write install log
let names = vec!["a".to_string(), "b".to_string()];
super::log_installed(&names).expect("Failed to write install log in test");
let mut p = crate::theme::logs_dir();
p.push("install_log.log");
let body = fs::read_to_string(&p).expect("Failed to read install log in test");
assert!(body.contains(" a\n") || body.contains(" a\r\n"));
// Write remove log
super::log_removed(&names).expect("Failed to write remove log in test");
let mut pr = crate::theme::logs_dir();
pr.push("remove_log.log");
let body_r = fs::read_to_string(&pr).expect("Failed to read remove log in test");
assert!(body_r.contains("a\n") || body_r.contains("a\r\n"));
// Cleanup env; not removing files so test artifacts may remain in tmp
unsafe {
if let Some(v) = orig_home {
std::env::set_var("HOME", v);
} else {
std::env::remove_var("HOME");
}
}
}
}
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/src/install/remove.rs | src/install/remove.rs | #[allow(unused_imports)]
use std::process::Command;
use crate::state::modal::CascadeMode;
/// What: Check for configuration directories in `$HOME/PACKAGE_NAME` and `$HOME/.config/PACKAGE_NAME`.
///
/// Inputs:
/// - `package_name`: Name of the package to check for config directories.
/// - `home`: Home directory path.
///
/// Output:
/// - Vector of found config directory paths.
///
/// Details:
/// - Checks both `$HOME/PACKAGE_NAME` and `$HOME/.config/PACKAGE_NAME`.
/// - Only returns directories that actually exist.
#[must_use]
pub fn check_config_directories(package_name: &str, home: &str) -> Vec<std::path::PathBuf> {
use std::path::PathBuf;
let mut found_dirs = Vec::new();
// Check $HOME/PACKAGE_NAME
let home_pkg_dir = PathBuf::from(home).join(package_name);
if home_pkg_dir.exists() && home_pkg_dir.is_dir() {
found_dirs.push(home_pkg_dir);
}
// Check $HOME/.config/PACKAGE_NAME
let config_pkg_dir = PathBuf::from(home).join(".config").join(package_name);
if config_pkg_dir.exists() && config_pkg_dir.is_dir() {
found_dirs.push(config_pkg_dir);
}
found_dirs
}
#[cfg(not(target_os = "windows"))]
use super::utils::{choose_terminal_index_prefer_path, command_on_path, shell_single_quote};
#[cfg(not(target_os = "windows"))]
/// What: Configure terminal-specific environment variables for a command.
///
/// Input:
/// - `cmd`: Command builder to configure.
/// - `term`: Terminal name to check for special handling.
///
/// Output:
/// - Modifies `cmd` with environment variables for konsole, gnome-console, or kgx if needed.
///
/// Details:
/// - Sets Wayland-specific environment for konsole when running under Wayland.
/// - Sets rendering environment for gnome-console and kgx to ensure compatibility.
fn configure_terminal_env(cmd: &mut Command, term: &str) {
if term == "konsole" && std::env::var_os("WAYLAND_DISPLAY").is_some() {
cmd.env("QT_LOGGING_RULES", "qt.qpa.wayland.textinput=false");
}
if term == "gnome-console" || term == "kgx" {
cmd.env("GSK_RENDERER", "cairo");
cmd.env("LIBGL_ALWAYS_SOFTWARE", "1");
}
}
#[cfg(not(target_os = "windows"))]
/// What: Configure test output environment variable for a command.
///
/// Input:
/// - `cmd`: Command builder to configure.
///
/// Output:
/// - Sets `PACSEA_TEST_OUT` environment variable if present, creating parent directory if needed.
///
/// Details:
/// - Only applies when `PACSEA_TEST_OUT` is set in the environment.
fn configure_test_env(cmd: &mut Command) {
if let Ok(p) = std::env::var("PACSEA_TEST_OUT") {
if let Some(parent) = std::path::Path::new(&p).parent() {
let _ = std::fs::create_dir_all(parent);
}
cmd.env("PACSEA_TEST_OUT", p);
}
}
#[cfg(not(target_os = "windows"))]
/// What: Logging context for terminal spawning operations.
///
/// Details:
/// - Groups related logging parameters to reduce function argument count.
struct SpawnContext<'a> {
/// Comma-separated package names string.
names_str: &'a str,
/// Number of package names.
names_len: usize,
/// Whether this is a dry-run operation.
dry_run: bool,
/// Cascade removal mode.
cascade_mode: CascadeMode,
}
#[cfg(not(target_os = "windows"))]
/// What: Attempt to spawn a terminal with the given configuration.
///
/// Input:
/// - `term`: Terminal executable name.
/// - `args`: Arguments to pass before the command string.
/// - `needs_xfce_command`: Whether this terminal needs special xfce4-terminal command format.
/// - `cmd_str`: The command string to execute.
/// - `ctx`: Logging context for the operation.
///
/// Output:
/// - `true` if the terminal was successfully spawned, `false` otherwise.
///
/// Details:
/// - Configures command arguments based on terminal type.
/// - Sets up environment variables and test output handling.
/// - Logs success or failure appropriately.
fn try_spawn_terminal(
term: &str,
args: &[&str],
needs_xfce_command: bool,
cmd_str: &str,
ctx: &SpawnContext<'_>,
) -> bool {
let mut cmd = Command::new(term);
if needs_xfce_command && term == "xfce4-terminal" {
let quoted = shell_single_quote(cmd_str);
cmd.arg("--command").arg(format!("bash -lc {quoted}"));
} else {
cmd.args(args.iter().copied()).arg(cmd_str);
}
configure_test_env(&mut cmd);
configure_terminal_env(&mut cmd, term);
match cmd.spawn() {
Ok(_) => {
tracing::info!(
terminal = %term,
names = %ctx.names_str,
total = ctx.names_len,
dry_run = ctx.dry_run,
mode = ?ctx.cascade_mode,
"launched terminal for removal"
);
true
}
Err(e) => {
tracing::warn!(
terminal = %term,
error = %e,
names = %ctx.names_str,
"failed to spawn terminal, trying next"
);
false
}
}
}
#[cfg(not(target_os = "windows"))]
/// What: Spawn a terminal to remove all given packages with pacman.
///
/// Input:
/// - names slice of package names; `dry_run` prints the removal command instead of executing
///
/// Output:
/// - Launches a terminal (or bash) to run sudo pacman -Rns for the provided names.
///
/// Details:
/// - Prefers common terminals (GNOME Console/Terminal, kitty, alacritty, xterm, xfce4-terminal, etc.); falls back to bash. Appends a hold tail so the window remains open after command completion.
/// - During tests, this is a no-op to avoid opening real terminal windows.
pub fn spawn_remove_all(names: &[String], dry_run: bool, cascade_mode: CascadeMode) {
// Skip actual spawning during tests unless PACSEA_TEST_OUT is set (indicates a test with fake terminal)
#[cfg(test)]
if std::env::var("PACSEA_TEST_OUT").is_err() {
return;
}
let names_str = names.join(" ");
tracing::info!(
names = %names_str,
total = names.len(),
dry_run = dry_run,
mode = ?cascade_mode,
"spawning removal"
);
let flag = cascade_mode.flag();
let hold_tail = "; echo; echo 'Finished.'; echo 'Press any key to close...'; read -rn1 -s _ || (echo; echo 'Press Ctrl+C to close'; sleep infinity)";
let cmd_str = if dry_run {
let cmd = format!(
"sudo pacman {flag} --noconfirm {n}{hold}",
flag = flag,
n = names.join(" "),
hold = hold_tail
);
let quoted = shell_single_quote(&cmd);
format!("echo DRY RUN: {quoted}")
} else {
format!(
"sudo pacman {flag} --noconfirm {n}{hold}",
flag = flag,
n = names.join(" "),
hold = hold_tail
)
};
let terms_gnome_first: &[(&str, &[&str], bool)] = &[
("gnome-terminal", &["--", "bash", "-lc"], false),
("gnome-console", &["--", "bash", "-lc"], false),
("kgx", &["--", "bash", "-lc"], false),
("alacritty", &["-e", "bash", "-lc"], false),
("ghostty", &["-e", "bash", "-lc"], false),
("kitty", &["bash", "-lc"], false),
("xterm", &["-hold", "-e", "bash", "-lc"], false),
("konsole", &["-e", "bash", "-lc"], false),
("xfce4-terminal", &[], true),
("tilix", &["--", "bash", "-lc"], false),
("mate-terminal", &["--", "bash", "-lc"], false),
];
let terms_default: &[(&str, &[&str], bool)] = &[
("alacritty", &["-e", "bash", "-lc"], false),
("ghostty", &["-e", "bash", "-lc"], false),
("kitty", &["bash", "-lc"], false),
("xterm", &["-hold", "-e", "bash", "-lc"], false),
("gnome-terminal", &["--", "bash", "-lc"], false),
("gnome-console", &["--", "bash", "-lc"], false),
("kgx", &["--", "bash", "-lc"], false),
("konsole", &["-e", "bash", "-lc"], false),
("xfce4-terminal", &[], true),
("tilix", &["--", "bash", "-lc"], false),
("mate-terminal", &["--", "bash", "-lc"], false),
];
let is_gnome = std::env::var("XDG_CURRENT_DESKTOP")
.ok()
.is_some_and(|v| v.to_uppercase().contains("GNOME"));
let terms = if is_gnome {
terms_gnome_first
} else {
terms_default
};
let ctx = SpawnContext {
names_str: &names_str,
names_len: names.len(),
dry_run,
cascade_mode,
};
let mut launched = choose_terminal_index_prefer_path(terms).is_some_and(|idx| {
let (term, args, needs_xfce_command) = terms[idx];
try_spawn_terminal(term, args, needs_xfce_command, &cmd_str, &ctx)
});
if !launched {
for (term, args, needs_xfce_command) in terms {
if command_on_path(term) {
launched = try_spawn_terminal(term, args, *needs_xfce_command, &cmd_str, &ctx);
if launched {
break;
}
}
}
}
if !launched {
let res = Command::new("bash").args(["-lc", &cmd_str]).spawn();
if let Err(e) = res {
tracing::error!(error = %e, names = %names_str, "failed to spawn bash to run removal command");
} else {
tracing::info!(
names = %names_str,
total = names.len(),
dry_run = dry_run,
mode = ?cascade_mode,
"launched bash for removal"
);
}
}
}
#[cfg(target_os = "windows")]
/// What: Present a placeholder removal message on Windows where pacman is unavailable.
///
/// Input:
/// - `names`: Packages the user requested to remove.
/// - `dry_run`: When `true`, uses `PowerShell` to simulate the removal operation.
/// - `cascade_mode`: Removal mode used for display consistency.
///
/// Output:
/// - Launches a detached `PowerShell` window (if available) for dry-run simulation, or `cmd` window otherwise.
///
/// Details:
/// - When `dry_run` is true and `PowerShell` is available, uses `PowerShell` to simulate the removal with Write-Host.
/// - Mirrors Unix logging by emitting an info trace, but performs no package operations.
/// - During tests, this is a no-op to avoid opening real terminal windows.
#[allow(unused_variables, clippy::missing_const_for_fn)]
pub fn spawn_remove_all(names: &[String], dry_run: bool, cascade_mode: CascadeMode) {
#[cfg(not(test))]
{
let mut names = names.to_vec();
if names.is_empty() {
names.push("nothing".into());
}
let names_str = names.join(" ");
let msg = if dry_run {
format!("DRY RUN: Would remove packages: {names_str}")
} else {
format!("Cannot remove packages on Windows: {names_str}")
};
// Check if this is a dry-run operation
if dry_run && super::utils::is_powershell_available() {
// Use PowerShell to simulate the operation
let escaped_msg = msg.replace('\'', "''");
let powershell_cmd = format!(
"Write-Host '{escaped_msg}' -ForegroundColor Yellow; Write-Host ''; Write-Host 'Press any key to close...'; $null = $Host.UI.RawUI.ReadKey('NoEcho,IncludeKeyDown')"
);
let _ = std::process::Command::new("powershell.exe")
.args(["-NoProfile", "-Command", &powershell_cmd])
.spawn();
} else {
let _ = std::process::Command::new("cmd")
.args([
"/C",
"start",
"Pacsea Remove",
"cmd",
"/K",
&format!("echo {msg}"),
])
.spawn();
}
}
}
#[cfg(test)]
mod tests {
#[test]
#[cfg(unix)]
/// What: Verify the removal helper prefers gnome-terminal and passes the expected dash handling.
///
/// Inputs:
/// - Fake `gnome-terminal` script injected into `PATH`.
/// - `spawn_remove_all` invoked in dry-run cascade mode with two package names.
///
/// Output:
/// - Captured invocation arguments start with `--`, `bash`, `-lc` to ensure safe command parsing.
///
/// Details:
/// - Redirects `PACSEA_TEST_OUT` so the shim terminal records arguments, then restores environment variables.
fn remove_all_uses_gnome_terminal_double_dash() {
use std::fs;
use std::os::unix::fs::PermissionsExt;
use std::path::PathBuf;
let mut dir: PathBuf = std::env::temp_dir();
dir.push(format!(
"pacsea_test_remove_gnome_{}_{}",
std::process::id(),
std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
.expect("System time is before UNIX epoch")
.as_nanos()
));
let _ = fs::create_dir_all(&dir);
let mut out_path = dir.clone();
out_path.push("args.txt");
let mut term_path = dir.clone();
term_path.push("gnome-terminal");
let script = "#!/bin/sh\n: > \"$PACSEA_TEST_OUT\"\nfor a in \"$@\"; do printf '%s\n' \"$a\" >> \"$PACSEA_TEST_OUT\"; done\n";
fs::write(&term_path, script.as_bytes()).expect("failed to write test terminal script");
let mut perms = fs::metadata(&term_path)
.expect("failed to read test terminal script metadata")
.permissions();
perms.set_mode(0o755);
fs::set_permissions(&term_path, perms)
.expect("failed to set test terminal script permissions");
let orig_path = std::env::var_os("PATH");
unsafe {
std::env::set_var("PATH", dir.display().to_string());
std::env::set_var("PACSEA_TEST_OUT", out_path.display().to_string());
}
let names = vec!["ripgrep".to_string(), "fd".to_string()];
super::spawn_remove_all(
&names,
true,
crate::state::modal::CascadeMode::CascadeWithConfigs,
);
std::thread::sleep(std::time::Duration::from_millis(50));
let body = fs::read_to_string(&out_path).expect("fake terminal args file written");
let lines: Vec<&str> = body.lines().collect();
assert!(lines.len() >= 3, "expected at least 3 args, got: {body}");
assert_eq!(lines[0], "--");
assert_eq!(lines[1], "bash");
assert_eq!(lines[2], "-lc");
unsafe {
if let Some(v) = orig_path {
std::env::set_var("PATH", v);
} else {
std::env::remove_var("PATH");
}
std::env::remove_var("PACSEA_TEST_OUT");
}
}
}
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/src/install/scan/spawn.rs | src/install/scan/spawn.rs | /*!
What: Scan spawn launcher
Input:
- Package name and scan configuration flags
Output:
- Uses integrated process for scans (excluding aur-sleuth)
- Spawns terminal for aur-sleuth if enabled
Details:
- Configures environment variables and launches scans via executor
- aur-sleuth runs in separate terminal simultaneously
*/
/// What: Build aur-sleuth command for terminal execution.
///
/// Input:
/// - `pkg`: AUR package identifier to analyse.
///
/// Output:
/// - Command string for terminal execution.
///
/// Details:
/// - Sets up working directory, finds aur-sleuth binary, loads proxy settings, and runs aur-sleuth.
#[cfg(not(target_os = "windows"))]
#[must_use]
pub fn build_sleuth_command_for_terminal(pkg: &str) -> String {
// This command will be run in a separate terminal
// It sets up the working directory, finds aur-sleuth, loads config, and runs the scan
format!(
r#"pkg='{pkg}'; work=$(mktemp -d -t pacsea_scan_XXXXXXXX); cd "$work" && \
(if command -v paru >/dev/null 2>&1; then paru -G "$pkg"; elif command -v yay >/dev/null 2>&1; then yay -G "$pkg"; else git clone --depth 1 "https://aur.archlinux.org/${{pkg}}.git" || exit 1; fi) && \
if [ -f "$pkg/PKGBUILD" ]; then cd "$pkg"; else f=$(find "$pkg" -maxdepth 3 -type f -name PKGBUILD 2>/dev/null | head -n1); if [ -n "$f" ]; then cd "$(dirname "$f")"; elif [ -d "$pkg" ]; then cd "$pkg"; fi; fi && \
A_SLEUTH="$(command -v aur-sleuth 2>/dev/null || true)"; \
if [ -z "$A_SLEUTH" ] && [ -x "$HOME/.local/bin/aur-sleuth" ]; then A_SLEUTH="$HOME/.local/bin/aur-sleuth"; fi; \
if [ -z "$A_SLEUTH" ] && [ -x "/usr/local/bin/aur-sleuth" ]; then A_SLEUTH="/usr/local/bin/aur-sleuth"; fi; \
if [ -z "$A_SLEUTH" ] && [ -x "/usr/bin/aur-sleuth" ]; then A_SLEUTH="/usr/bin/aur-sleuth"; fi; \
if [ -n "$A_SLEUTH" ]; then \
cfg="${{XDG_CONFIG_HOME:-$HOME/.config}}/pacsea/settings.conf"; \
if [ -f "$cfg" ]; then \
get_key() {{ awk -F= -v k="$1" 'tolower($0) ~ "^[[:space:]]*"k"[[:space:]]*=" {{ sub(/#.*/,"",$2); gsub(/^[[:space:]]+|[[:space:]]+$/,"",$2); print $2; exit }}' "$cfg"; }}; \
HP=$(get_key http_proxy); [ -n "$HP" ] && export http_proxy="$HP"; \
XP=$(get_key https_proxy); [ -n "$XP" ] && export https_proxy="$XP"; \
AP=$(get_key all_proxy); [ -n "$AP" ] && export ALL_PROXY="$AP"; \
NP=$(get_key no_proxy); [ -n "$NP" ] && export NO_PROXY="$NP"; \
CAB=$(get_key requests_ca_bundle); [ -n "$CAB" ] && export REQUESTS_CA_BUNDLE="$CAB"; \
SCF=$(get_key ssl_cert_file); [ -n "$SCF" ] && export SSL_CERT_FILE="$SCF"; \
CCB=$(get_key curl_ca_bundle); [ -n "$CCB" ] && export CURL_CA_BUNDLE="$CCB"; \
PIPIDX=$(get_key pip_index_url); [ -n "$PIPIDX" ] && export PIP_INDEX_URL="$PIPIDX"; \
PIPEX=$(get_key pip_extra_index_url); [ -n "$PIPEX" ] && export PIP_EXTRA_INDEX_URL="$PIPEX"; \
PIPTH=$(get_key pip_trusted_host); [ -n "$PIPTH" ] && export PIP_TRUSTED_HOST="$PIPTH"; \
UVCA=$(get_key uv_http_ca_certs); [ -n "$UVCA" ] && export UV_HTTP_CA_CERTS="$UVCA"; \
fi; \
WORK_DIR=$(pwd); \
SLEUTH_OUTPUT_FILE="./.pacsea_sleuth.txt"; \
if command -v script >/dev/null 2>&1; then \
SLEUTH_CMD="cd $(printf '%q' "$WORK_DIR") && script -f -q $(printf '%q' "$SLEUTH_OUTPUT_FILE") -c \"$(printf '%q' "$A_SLEUTH") --pkgdir .\"; echo ''; echo 'Press Enter to close this window...'; read -r _;"; \
else \
SLEUTH_CMD="cd $(printf '%q' "$WORK_DIR") && $(printf '%q' "$A_SLEUTH") --pkgdir .; echo ''; echo 'Press Enter to close this window...'; read -r _;"; \
fi; \
TERM_FOUND=false; \
if command -v gnome-terminal >/dev/null 2>&1; then \
gnome-terminal -- bash -lc "$SLEUTH_CMD" 2>&1 && TERM_FOUND=true; \
elif command -v alacritty >/dev/null 2>&1; then \
alacritty -e bash -lc "$SLEUTH_CMD" 2>&1 && TERM_FOUND=true; \
elif command -v kitty >/dev/null 2>&1; then \
kitty bash -lc "$SLEUTH_CMD" 2>&1 && TERM_FOUND=true; \
elif command -v xterm >/dev/null 2>&1; then \
xterm -hold -e bash -lc "$SLEUTH_CMD" 2>&1 && TERM_FOUND=true; \
elif command -v konsole >/dev/null 2>&1; then \
konsole -e bash -lc "$SLEUTH_CMD" 2>&1 && TERM_FOUND=true; \
elif command -v tilix >/dev/null 2>&1; then \
tilix -e bash -lc "$SLEUTH_CMD" 2>&1 && TERM_FOUND=true; \
elif command -v mate-terminal >/dev/null 2>&1; then \
mate-terminal -- bash -lc "$SLEUTH_CMD" 2>&1 && TERM_FOUND=true; \
elif command -v xfce4-terminal >/dev/null 2>&1; then \
SLEUTH_CMD_QUOTED=$(printf '%q' "$SLEUTH_CMD"); \
xfce4-terminal --command "bash -lc $SLEUTH_CMD_QUOTED" 2>&1 && TERM_FOUND=true; \
fi; \
if [ "$TERM_FOUND" = "true" ]; then \
echo "aur-sleuth launched in separate terminal window."; \
echo "The scan will continue in the background. You can close the terminal when done."; \
else \
echo "No suitable terminal found. Running aur-sleuth in current terminal..."; \
("$A_SLEUTH" --pkgdir . 2>&1 | tee ./.pacsea_sleuth.txt) || echo 'aur-sleuth failed; see output above'; \
fi; \
else \
echo 'aur-sleuth not found (checked PATH, ~/.local/bin, /usr/local/bin, /usr/bin)'; \
fi"#
)
}
/// What: Launch integrated scan process for AUR package (excluding aur-sleuth).
///
/// Input:
/// - `pkg`: AUR package identifier to analyse.
/// - `_do_clamav`/`_do_trivy`/`_do_semgrep`/`_do_shellcheck`/`_do_virustotal`/`_do_custom`/`do_sleuth`: Toggles enabling optional scan stages.
///
/// Output:
/// - Uses integrated process for scans (excluding aur-sleuth).
/// - Spawns terminal for aur-sleuth if enabled (runs simultaneously).
///
/// Details:
/// - Clones `https://aur.archlinux.org/<pkg>.git` and runs `makepkg -o` (download sources only).
/// - Optionally runs `ClamAV`, `Trivy` filesystem, and `Semgrep` scans via integrated process.
/// - Performs `VirusTotal` hash lookups for `PKGBUILD`/`src` files when `VT_API_KEY` is provided.
/// - aur-sleuth runs in separate terminal simultaneously if enabled.
/// - Note: This function is kept for backward compatibility; actual execution should use `ExecutorRequest::Scan`.
/// - `_do_clamav`, `_do_trivy`, `_do_semgrep`, `_do_shellcheck`, `_do_virustotal`, and `_do_custom` parameters are kept for API consistency but unused in this function.
/// - The actual scan configuration is handled via `ExecutorRequest::Scan` which reads from the application state.
/// - The underscore prefix suppresses Rust/clippy warnings for intentionally unused parameters.
#[cfg(not(target_os = "windows"))]
#[allow(
clippy::too_many_arguments,
clippy::fn_params_excessive_bools,
clippy::must_use_candidate
)]
pub fn spawn_aur_scan_for_with_config(
pkg: &str,
_do_clamav: bool,
_do_trivy: bool,
_do_semgrep: bool,
_do_shellcheck: bool,
_do_virustotal: bool,
_do_custom: bool,
do_sleuth: bool,
) {
// Note: _do_clamav, _do_trivy, _do_semgrep, _do_shellcheck, _do_virustotal, and _do_custom
// are unused in this function. They are kept for API consistency, but the actual scan
// configuration is handled via ExecutorRequest::Scan which reads from application state.
// The underscore prefix suppresses Rust/clippy warnings for intentionally unused parameters.
// If sleuth is enabled, spawn it in a separate terminal
if do_sleuth {
let sleuth_cmd = build_sleuth_command_for_terminal(pkg);
super::super::shell::spawn_shell_commands_in_terminal(&[sleuth_cmd]);
}
// Note: The integrated scan process is triggered via ExecutorRequest::Scan
// This function is kept for backward compatibility but the actual execution
// should be done through the executor pattern (see events/modals/scan.rs)
}
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/src/install/scan/pkg.rs | src/install/scan/pkg.rs | /*!
What: Package scan command builder
Input:
- Package name to scan
Output:
- Vector of shell commands for scanning an AUR package
Details:
- Handles repository fetching, makepkg, and all scan stages
*/
#[cfg(not(target_os = "windows"))]
use super::{common, summary};
#[cfg(not(target_os = "windows"))]
/// What: Assemble the shell command sequence used to scan an AUR package in a temporary workspace.
///
/// Input:
/// - `pkg`: AUR package name to clone and analyse.
///
/// Output:
/// - Ordered vector of shell fragments executed sequentially in a spawned terminal.
///
/// Details:
/// - Handles repository fetching, `makepkg -o`, optional scanners, and summary reporting.
/// - Ensures each step logs progress and tolerates partial failures where possible.
/// - Note: Kept for backward compatibility; new code should use `build_scan_cmds_for_pkg_without_sleuth`.
#[allow(dead_code)] // Kept for backward compatibility
pub fn build_scan_cmds_for_pkg(pkg: &str) -> Vec<String> {
// All commands are joined with " && " and run in a single bash -lc invocation in a terminal.
// Keep each step resilient so later steps still run where possible.
let mut cmds: Vec<String> = Vec::new();
add_setup_commands(&mut cmds, pkg);
add_fetch_commands(&mut cmds, pkg);
add_makepkg_commands(&mut cmds, pkg);
add_all_scans(&mut cmds);
add_summary_commands(&mut cmds, pkg);
cmds
}
/// What: Assemble the shell command sequence for scanning (excluding aur-sleuth).
///
/// Input:
/// - `pkg`: AUR package name to clone and analyse.
///
/// Output:
/// - Ordered vector of shell fragments executed sequentially via PTY.
///
/// Details:
/// - Handles repository fetching, `makepkg -o`, optional scanners (excluding sleuth), and summary reporting.
/// - Used for integrated process execution (aur-sleuth runs separately in terminal).
#[cfg(not(target_os = "windows"))]
pub fn build_scan_cmds_for_pkg_without_sleuth(pkg: &str) -> Vec<String> {
// All commands are joined with " && " and run in a single bash -lc invocation via PTY.
// Keep each step resilient so later steps still run where possible.
let mut cmds: Vec<String> = Vec::new();
add_setup_commands(&mut cmds, pkg);
add_fetch_commands(&mut cmds, pkg);
add_makepkg_commands(&mut cmds, pkg);
add_scans_without_sleuth(&mut cmds);
add_summary_commands_without_sleuth(&mut cmds, pkg);
cmds
}
/// What: Add setup commands (working directory, logging) to command vector.
///
/// Input:
/// - `cmds`: Mutable reference to command vector.
/// - `pkg`: Package name.
///
/// Output:
/// - Appends setup commands to the vector.
#[cfg(not(target_os = "windows"))]
fn add_setup_commands(cmds: &mut Vec<String>, pkg: &str) {
// 0) Create and enter working directory; remember it for later messages
cmds.push(format!("pkg='{pkg}'"));
cmds.push("echo \"[PACSEA] scan_start pkg='$pkg' ts=$(date -Ins) shell=$SHELL term=$TERM display=$DISPLAY\"".to_string());
cmds.push("work=$(mktemp -d -t pacsea_scan_XXXXXXXX)".to_string());
cmds.push("echo \"Pacsea: scanning AUR package '$pkg'\"".to_string());
cmds.push("echo \"Working directory: $work\"".to_string());
cmds.push("cd \"$work\" && { export PACSEA_DEBUG_LOG=\"$(pwd)/.pacsea_debug.log\"; exec > >(tee -a \"$PACSEA_DEBUG_LOG\") 2>&1; exec 9>>\"$PACSEA_DEBUG_LOG\"; export BASH_XTRACEFD=9; set -x; echo \"Pacsea debug: $(date) start scan for '$pkg' in $PWD\"; trap 'code=$?; echo; echo \"Pacsea debug: exit code=$code\"; echo \"Log: $PACSEA_DEBUG_LOG\"; echo \"Press any key to close...\"; read -rn1 -s _' EXIT; }".to_string());
cmds.push("if command -v git >/dev/null 2>&1 || sudo pacman -Qi git >/dev/null 2>&1; then :; else echo 'git not found. Cannot clone AUR repo.'; false; fi".to_string());
}
/// What: Add repository fetching commands to command vector.
///
/// Input:
/// - `cmds`: Mutable reference to command vector.
/// - `_pkg`: Package name (unused, shell variable $pkg is set earlier).
///
/// Output:
/// - Appends fetch commands to the vector.
///
/// Details:
/// - `_pkg` parameter is kept for API consistency but unused in Rust code.
/// - The shell variable `$pkg` is set in `add_setup_commands()` earlier in the command chain.
/// - The underscore prefix suppresses Rust/clippy warnings for intentionally unused parameters.
#[cfg(not(target_os = "windows"))]
fn add_fetch_commands(cmds: &mut Vec<String>, _pkg: &str) {
// Note: _pkg is unused in Rust code; shell variable $pkg is used in command strings.
// Parameter kept for API consistency with other command builder functions.
// 1) Fetch PKGBUILD via AUR helper first; fallback to git clone
cmds.push("echo 'Fetching PKGBUILD via AUR helper (-G)…'".to_string());
cmds.push("echo \"[PACSEA] phase=fetch_helper ts=$(date -Ins)\"".to_string());
cmds.push("(if command -v paru >/dev/null 2>&1; then paru -G \"$pkg\"; elif command -v yay >/dev/null 2>&1; then yay -G \"$pkg\"; else echo 'No AUR helper (paru/yay) found for -G'; false; fi) || (echo 'Falling back to git clone…'; git clone --depth 1 \"https://aur.archlinux.org/${pkg}.git\" || { echo 'Clone failed'; false; })".to_string());
cmds.push("if [ -f \"$pkg/PKGBUILD\" ]; then cd \"$pkg\"; else f=$(find \"$pkg\" -maxdepth 3 -type f -name PKGBUILD 2>/dev/null | head -n1); if [ -n \"$f\" ]; then cd \"$(dirname \"$f\")\"; elif [ -d \"$pkg\" ]; then cd \"$pkg\"; fi; fi".to_string());
cmds.push("echo \"PKGBUILD path: $(pwd)/PKGBUILD\"".to_string());
}
/// What: Add makepkg commands to command vector.
///
/// Input:
/// - `cmds`: Mutable reference to command vector.
/// - `_pkg`: Package name (unused, shell variable $pkg is set earlier).
///
/// Output:
/// - Appends makepkg commands to the vector.
///
/// Details:
/// - Handles PKGBUILD location fallbacks and helper cache population.
/// - `_pkg` parameter is kept for API consistency but unused in Rust code.
/// - The shell variable `$pkg` is set in `add_setup_commands()` earlier in the command chain.
/// - The underscore prefix suppresses Rust/clippy warnings for intentionally unused parameters.
#[cfg(not(target_os = "windows"))]
fn add_makepkg_commands(cmds: &mut Vec<String>, _pkg: &str) {
// Note: _pkg is unused in Rust code; shell variable $pkg is used in command strings.
// Parameter kept for API consistency with other command builder functions.
// 2) Download sources only
cmds.push("echo 'Running makepkg -o (download sources only)…'".to_string());
cmds.push("echo \"[PACSEA] phase=makepkg_download ts=$(date -Ins)\"".to_string());
// Do not abort the whole chain if makepkg fails (e.g., missing base-devel). Continue scanning.
cmds.push("({ \
if [ ! -f PKGBUILD ]; then \
echo 'PKGBUILD not found; fallback: re-clone via git…'; \
cd .. || true; \
rm -rf \"$pkg\" 2>/dev/null || true; \
git clone --depth 1 \"https://aur.archlinux.org/${pkg}.git\" || true; \
if [ -f \"$pkg/PKGBUILD\" ]; then \
cd \"$pkg\"; \
else \
f=$(find \"$pkg\" -maxdepth 3 -type f -name PKGBUILD 2>/dev/null | head -n1); \
if [ -n \"$f\" ]; then cd \"$(dirname \"$f\")\" || true; else echo 'PKGBUILD still missing after git fallback'; fi; \
fi; \
fi; \
if [ ! -f PKGBUILD ]; then \
echo 'Trying helper -S to populate cache and copy build files…'; \
cdir=''; \
if command -v paru >/dev/null 2>&1; then \
echo 'Detecting paru buildDir…'; \
bdir=$(paru -Pg 2>/dev/null | grep -m1 -o '\"buildDir\": *\"[^\"]*\"' | cut -d '\"' -f4); \
bdir=${bdir:-\"$HOME/.cache/paru\"}; \
echo \"Paru buildDir: $bdir\"; \
echo 'Cleaning existing cached package directory…'; \
find \"$bdir\" -maxdepth 5 -type d -name \"$pkg\" -exec rm -rf {} + 2>/dev/null || true; \
echo 'Populating paru cache with -S (auto-abort, 20s timeout)…'; \
timeout 20s bash -lc 'yes n | paru -S \"$pkg\"' >/dev/null 2>&1 || true; \
cdir=$(find \"$bdir\" -maxdepth 6 -type f -name PKGBUILD -path \"*/$pkg/*\" 2>/dev/null | head -n1); \
if [ -z \"$cdir\" ]; then cdir=$(find \"$bdir\" -maxdepth 6 -type f -name PKGBUILD 2>/dev/null | head -n1); fi; \
elif command -v yay >/dev/null 2>&1; then \
echo 'Detecting yay buildDir…'; \
bdir=$(yay -Pg 2>/dev/null | grep -m1 -o '\"buildDir\": *\"[^\"]*\"' | cut -d '\"' -f4); \
bdir=${bdir:-\"$HOME/.cache/yay\"}; \
echo \"Yay buildDir: $bdir\"; \
echo 'Cleaning existing cached package directory…'; \
find \"$bdir\" -maxdepth 5 -type d -name \"$pkg\" -exec rm -rf {} + 2>/dev/null || true; \
echo 'Populating yay cache with -S (auto-abort, 20s timeout)…'; \
timeout 20s bash -lc 'yes n | yay -S --noconfirm \"$pkg\"' >/dev/null 2>&1 || true; \
cdir=$(find \"$bdir\" -maxdepth 6 -type f -name PKGBUILD -path \"*/$pkg/*\" 2>/dev/null | head -n1); \
if [ -z \"$cdir\" ]; then cdir=$(find \"$bdir\" -maxdepth 6 -type f -name PKGBUILD 2>/dev/null | head -n1); fi; \
fi; \
if [ -n \"$cdir\" ]; then \
cd \"$(dirname \"$cdir\")\" || true; \
else \
echo 'Could not locate PKGBUILD in helper cache.'; \
fi; \
fi; \
echo \"PKGBUILD path: $(pwd)/PKGBUILD\"; \
if [ -f PKGBUILD ]; then \
makepkg -o --noconfirm && echo 'makepkg -o: sources downloaded.'; \
else \
echo 'Skipping makepkg -o: PKGBUILD still missing.'; \
fi; \
}) || echo 'makepkg -o failed or partially completed; continuing'".to_string());
}
/// What: Add all scan commands to command vector.
///
/// Input:
/// - `cmds`: Mutable reference to command vector.
///
/// Output:
/// - Appends all scan commands to the vector.
/// - Note: Kept for backward compatibility; new code should use `add_scans_without_sleuth`.
#[cfg(not(target_os = "windows"))]
#[allow(dead_code)] // Kept for backward compatibility
fn add_all_scans(cmds: &mut Vec<String>) {
common::add_pattern_exports(cmds);
common::add_clamav_scan(cmds);
common::add_trivy_scan(cmds);
common::add_semgrep_scan(cmds);
common::add_sleuth_scan(cmds);
common::add_shellcheck_scan(cmds);
common::add_shellcheck_risk_eval(cmds);
common::add_custom_pattern_scan(cmds);
common::add_virustotal_scan(cmds);
}
/// What: Add scan commands to command vector (excluding aur-sleuth).
///
/// Input:
/// - `cmds`: Mutable reference to command vector.
///
/// Output:
/// - Appends scan commands (excluding sleuth) to the vector.
///
/// Details:
/// - Used for integrated process execution (aur-sleuth runs separately in terminal).
#[cfg(not(target_os = "windows"))]
pub fn add_scans_without_sleuth(cmds: &mut Vec<String>) {
common::add_pattern_exports(cmds);
common::add_clamav_scan(cmds);
common::add_trivy_scan(cmds);
common::add_semgrep_scan(cmds);
// Skip sleuth - runs in separate terminal
common::add_shellcheck_scan(cmds);
common::add_shellcheck_risk_eval(cmds);
common::add_custom_pattern_scan(cmds);
common::add_virustotal_scan(cmds);
}
/// What: Add summary commands to command vector.
///
/// Input:
/// - `cmds`: Mutable reference to command vector.
/// - `_pkg`: Package name (unused, shell variable $pkg is set earlier).
///
/// Output:
/// - Appends summary commands to the vector.
/// - Note: Kept for backward compatibility; new code should use `add_summary_commands_without_sleuth`.
///
/// Details:
/// - `_pkg` parameter is kept for API consistency but unused in Rust code.
/// - The shell variable `$pkg` is set in `add_setup_commands()` earlier in the command chain.
/// - The underscore prefix suppresses Rust/clippy warnings for intentionally unused parameters.
#[cfg(not(target_os = "windows"))]
#[allow(dead_code)] // Kept for backward compatibility
fn add_summary_commands(cmds: &mut Vec<String>, _pkg: &str) {
// Note: _pkg is unused in Rust code; shell variable $pkg is used in command strings.
// Parameter kept for API consistency with other command builder functions.
// Final note with working directory for manual inspection
cmds.push("echo".to_string());
cmds.push("echo '--- Summary ---'".to_string());
cmds.push("echo -e '\\033[1;36m[📊] Summary\\033[0m'".to_string());
summary::add_overall_risk_calc(cmds);
summary::add_clamav_summary(cmds);
summary::add_trivy_summary(cmds);
summary::add_semgrep_summary(cmds);
summary::add_shellcheck_summary(cmds);
summary::add_shellcheck_risk_summary(cmds);
summary::add_sleuth_summary(cmds);
summary::add_custom_and_vt_summary(cmds);
cmds.push("echo".to_string());
cmds.push("echo \"Pacsea: scan finished. Working directory preserved: $work\"".to_string());
cmds.push("echo -e \"\\033[1;32m[✔] Pacsea: scan finished.\\033[0m Working directory preserved: $work\"".to_string());
}
/// What: Add summary commands to command vector (excluding aur-sleuth summary).
///
/// Input:
/// - `cmds`: Mutable reference to command vector.
/// - `_pkg`: Package name (unused, shell variable $pkg is set earlier).
///
/// Output:
/// - Appends summary commands (excluding sleuth) to the vector.
///
/// Details:
/// - Used for integrated process execution (aur-sleuth runs separately in terminal).
/// - `_pkg` parameter is kept for API consistency but unused in Rust code.
/// - The shell variable `$pkg` is set in `add_setup_commands()` earlier in the command chain.
/// - The underscore prefix suppresses Rust/clippy warnings for intentionally unused parameters.
#[cfg(not(target_os = "windows"))]
pub fn add_summary_commands_without_sleuth(cmds: &mut Vec<String>, _pkg: &str) {
// Note: _pkg is unused in Rust code; shell variable $pkg is used in command strings.
// Parameter kept for API consistency with other command builder functions.
// Final note with working directory for manual inspection
cmds.push("echo".to_string());
cmds.push("echo '--- Summary ---'".to_string());
cmds.push("echo -e '\\033[1;36m[📊] Summary\\033[0m'".to_string());
summary::add_overall_risk_calc(cmds);
summary::add_clamav_summary(cmds);
summary::add_trivy_summary(cmds);
summary::add_semgrep_summary(cmds);
summary::add_shellcheck_summary(cmds);
summary::add_shellcheck_risk_summary(cmds);
// Skip sleuth summary - runs in separate terminal
summary::add_custom_and_vt_summary(cmds);
cmds.push("echo".to_string());
cmds.push("echo \"Pacsea: scan finished. Working directory preserved: $work\"".to_string());
cmds.push("echo -e \"\\033[1;32m[✔] Pacsea: scan finished.\\033[0m Working directory preserved: $work\"".to_string());
}
#[cfg(all(test, not(target_os = "windows")))]
mod tests {
use super::*;
#[test]
/// What: Ensure scan command generation for AUR packages exports expected steps and annotations.
///
/// Inputs:
/// - Package name `foobar` supplied to `build_scan_cmds_for_pkg`.
///
/// Output:
/// - Command list includes environment exports, git clone, makepkg fetch, optional scan sections, and summary note.
///
/// Details:
/// - Joins the command list to assert presence of key substrings, catching regressions in the scripted pipeline.
fn build_scan_cmds_for_pkg_has_core_steps() {
let cmds = build_scan_cmds_for_pkg("foobar");
let joined = cmds.join("\n");
assert!(
joined.contains("pkg='foobar'"),
"should export pkg variable with provided name"
);
assert!(
joined.contains("git clone --depth 1 \"https://aur.archlinux.org/${pkg}.git\""),
"should clone the AUR repo using the pkg variable"
);
assert!(
joined.contains("makepkg -o --noconfirm"),
"should attempt to download sources with makepkg -o"
);
assert!(
joined.contains("--- ClamAV scan (optional) ---"),
"should include ClamAV scan section"
);
assert!(
joined.contains("--- Trivy filesystem scan (optional) ---"),
"should include Trivy FS scan section"
);
assert!(
joined.contains("--- Semgrep static analysis (optional) ---"),
"should include Semgrep scan section"
);
assert!(
joined.contains("--- VirusTotal hash lookups (requires VT_API_KEY env var) ---"),
"should include VirusTotal lookup section"
);
assert!(
joined.contains("echo '--- Summary ---'"),
"should include final summary section"
);
assert!(
joined.contains("Pacsea: scan finished. Working directory preserved: $work"),
"should print final working directory note"
);
}
}
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/src/install/scan/summary.rs | src/install/scan/summary.rs | /*!
What: Summary command builders for scan results
Input:
- Command vector to append to
Output:
- Appends summary command strings to the provided vector
Details:
- Provides functions to generate overall risk assessment and per-scan summaries
*/
/// What: Add overall risk calculation commands to command vector.
///
/// Input:
/// - `cmds`: Mutable reference to command vector to append to.
///
/// Output:
/// - Appends overall risk calculation commands to the vector.
///
/// Details:
/// - Aggregates scores from `ShellCheck`, `ClamAV`, `Trivy`, `Semgrep`, `VirusTotal`, and Custom scans.
/// - Calculates overall percentage and tier (LOW/MEDIUM/HIGH/CRITICAL).
#[cfg(not(target_os = "windows"))]
#[allow(clippy::all, clippy::literal_string_with_formatting_args)] // Shell variable syntax ${VAR:-default} in raw strings - false positive
pub fn add_overall_risk_calc(cmds: &mut Vec<String>) {
cmds.push(
r#"(
overall_score=0; overall_max=0;
rf=./.pacsea_shellcheck_risk.txt;
if [ -f "$rf" ]; then
RS=$(grep -E '^RISK_SCORE=' "$rf" | cut -d= -f2); RS=${RS:-0};
if [ "$RS" -gt 100 ]; then RS=100; fi;
overall_score=$((overall_score+RS)); overall_max=$((overall_max+100));
fi;
if [ -f ./.pacsea_scan_clamav.txt ]; then
INF=$(grep -E 'Infected files:[[:space:]]*[0-9]+' ./.pacsea_scan_clamav.txt | tail -n1 | awk -F: '{print $2}' | xargs);
INF=${INF:-0};
CV=$([ "$INF" -gt 0 ] && echo 100 || echo 0);
overall_score=$((overall_score+CV)); overall_max=$((overall_max+100));
fi;
TRI=0;
if [ -f ./.pacsea_scan_trivy.json ]; then
C=$(grep -o '"Severity":"CRITICAL"' ./.pacsea_scan_trivy.json | wc -l);
H=$(grep -o '"Severity":"HIGH"' ./.pacsea_scan_trivy.json | wc -l);
M=$(grep -o '"Severity":"MEDIUM"' ./.pacsea_scan_trivy.json | wc -l);
L=$(grep -o '"Severity":"LOW"' ./.pacsea_scan_trivy.json | wc -l);
TRI=$((C*10 + H*5 + M*2 + L));
elif [ -f ./.pacsea_scan_trivy.txt ]; then
C=$(grep -oi 'CRITICAL' ./.pacsea_scan_trivy.txt | wc -l);
H=$(grep -oi 'HIGH' ./.pacsea_scan_trivy.txt | wc -l);
M=$(grep -oi 'MEDIUM' ./.pacsea_scan_trivy.txt | wc -l);
L=$(grep -oi 'LOW' ./.pacsea_scan_trivy.txt | wc -l);
TRI=$((C*10 + H*5 + M*2 + L));
fi;
if [ -f ./.pacsea_scan_trivy.json ] || [ -f ./.pacsea_scan_trivy.txt ]; then
if [ "$TRI" -gt 100 ]; then TRI=100; fi;
overall_score=$((overall_score+TRI)); overall_max=$((overall_max+100));
fi;
SG=0;
if [ -f ./.pacsea_scan_semgrep.json ]; then
SG=$(grep -o '"check_id"' ./.pacsea_scan_semgrep.json | wc -l);
elif [ -f ./.pacsea_scan_semgrep.txt ]; then
SG=$(grep -E '^[^:]+:[0-9]+:[0-9]+:' ./.pacsea_scan_semgrep.txt | wc -l);
fi;
if [ -f ./.pacsea_scan_semgrep.json ] || [ -f ./.pacsea_scan_semgrep.txt ]; then
SG=$((SG*3)); if [ "$SG" -gt 100 ]; then SG=100; fi;
overall_score=$((overall_score+SG)); overall_max=$((overall_max+100));
fi;
VT=0;
if [ -f ./.pacsea_scan_vt_summary.txt ]; then
VT_MAL=$(grep -E '^VT_MAL=' ./.pacsea_scan_vt_summary.txt | cut -d= -f2);
VT_SUS=$(grep -E '^VT_SUS=' ./.pacsea_scan_vt_summary.txt | cut -d= -f2);
VT_MAL=${VT_MAL:-0}; VT_SUS=${VT_SUS:-0};
VT=$((VT_MAL*10 + VT_SUS*3));
if [ "$VT" -gt 100 ]; then VT=100; fi;
overall_score=$((overall_score+VT)); overall_max=$((overall_max+100));
fi;
CS=0;
if [ -f ./.pacsea_custom_score.txt ]; then
CS=$(grep -E '^CUSTOM_PERCENT=' ./.pacsea_custom_score.txt | cut -d= -f2); CS=${CS:-0};
if [ "$CS" -gt 100 ]; then CS=100; fi;
overall_score=$((overall_score+CS)); overall_max=$((overall_max+100));
fi;
PCT=0; if [ "$overall_max" -gt 0 ]; then PCT=$((overall_score*100/overall_max)); fi;
TIER='LOW'; COLOR='\033[1;32m'; ICON='[✔]';
if [ "$PCT" -ge 75 ]; then TIER='CRITICAL'; COLOR='\033[1;31m'; ICON='[❌]';
elif [ "$PCT" -ge 50 ]; then TIER='HIGH'; COLOR='\033[1;33m'; ICON='[❗]';
elif [ "$PCT" -ge 25 ]; then TIER='MEDIUM'; COLOR='\033[1;34m'; ICON='[⚠️ ]';
fi;
echo -e "$COLOR$ICON Overall risk: ${PCT}% ($TIER)\033[0m";
{
echo "OVERALL_PERCENT=$PCT";
echo "OVERALL_TIER=$TIER";
echo "COMPONENT_MAX=$overall_max";
echo "COMPONENT_SCORE=$overall_score";
} > ./.pacsea_overall_risk.txt;
)"#
.to_string(),
);
}
/// What: Add `ClamAV` summary commands to command vector.
///
/// Input:
/// - `cmds`: Mutable reference to command vector to append to.
///
/// Output:
/// - Appends `ClamAV` summary commands to the vector.
#[cfg(not(target_os = "windows"))]
pub fn add_clamav_summary(cmds: &mut Vec<String>) {
cmds.push(
r#"if [ -f ./.pacsea_scan_clamav.txt ]; then
inf=$(grep -E 'Infected files:[[:space:]]*[0-9]+' ./.pacsea_scan_clamav.txt | tail -n1 | awk -F: '{print $2}' | xargs);
if [ -n "$inf" ]; then
if [ "$inf" -gt 0 ]; then echo "ClamAV: infected files: $inf";
else echo "ClamAV: no infections detected"; fi;
else
echo 'ClamAV: no infections detected';
fi;
else
echo 'ClamAV: not run';
fi"#
.to_string(),
);
}
/// What: Add Trivy summary commands to command vector.
///
/// Input:
/// - `cmds`: Mutable reference to command vector to append to.
///
/// Output:
/// - Appends Trivy summary commands to the vector.
#[cfg(not(target_os = "windows"))]
pub fn add_trivy_summary(cmds: &mut Vec<String>) {
cmds.push(
r#"if [ -f ./.pacsea_scan_trivy.json ]; then
c=$(grep -o '"Severity":"CRITICAL"' ./.pacsea_scan_trivy.json | wc -l);
h=$(grep -o '"Severity":"HIGH"' ./.pacsea_scan_trivy.json | wc -l);
m=$(grep -o '"Severity":"MEDIUM"' ./.pacsea_scan_trivy.json | wc -l);
l=$(grep -o '"Severity":"LOW"' ./.pacsea_scan_trivy.json | wc -l);
t=$((c+h+m+l));
if [ "$t" -gt 0 ]; then
echo "Trivy findings: critical=$c high=$h medium=$m low=$l total=$t";
else
echo 'Trivy: no vulnerabilities found';
fi;
elif [ -f ./.pacsea_scan_trivy.txt ]; then
if grep -qiE 'CRITICAL|HIGH|MEDIUM|LOW' ./.pacsea_scan_trivy.txt; then
c=$(grep -oi 'CRITICAL' ./.pacsea_scan_trivy.txt | wc -l);
h=$(grep -oi 'HIGH' ./.pacsea_scan_trivy.txt | wc -l);
m=$(grep -oi 'MEDIUM' ./.pacsea_scan_trivy.txt | wc -l);
l=$(grep -oi 'LOW' ./.pacsea_scan_trivy.txt | wc -l);
t=$((c+h+m+l));
echo "Trivy findings: critical=$c high=$h medium=$m low=$l total=$t";
else
echo 'Trivy: no vulnerabilities found';
fi;
else
echo 'Trivy: not run';
fi"#
.to_string(),
);
}
/// What: Add Semgrep summary commands to command vector.
///
/// Input:
/// - `cmds`: Mutable reference to command vector to append to.
///
/// Output:
/// - Appends Semgrep summary commands to the vector.
#[cfg(not(target_os = "windows"))]
pub fn add_semgrep_summary(cmds: &mut Vec<String>) {
cmds.push(
r#"if [ -f ./.pacsea_scan_semgrep.json ]; then
n=$(grep -o '"check_id"' ./.pacsea_scan_semgrep.json | wc -l);
if [ "$n" -gt 0 ]; then echo "Semgrep findings: $n";
else echo 'Semgrep: no findings'; fi;
elif [ -f ./.pacsea_scan_semgrep.txt ]; then
n=$(grep -E '^[^:]+:[0-9]+:[0-9]+:' ./.pacsea_scan_semgrep.txt | wc -l);
if [ "$n" -gt 0 ]; then echo "Semgrep findings: $n";
else echo 'Semgrep: no findings'; fi;
else
echo 'Semgrep: not run';
fi"#
.to_string(),
);
}
/// What: Add `ShellCheck` summary commands to command vector.
///
/// Input:
/// - `cmds`: Mutable reference to command vector to append to.
///
/// Output:
/// - Appends `ShellCheck` summary commands to the vector.
#[cfg(not(target_os = "windows"))]
pub fn add_shellcheck_summary(cmds: &mut Vec<String>) {
cmds.push(
r#"if [ -f ./.pacsea_shellcheck_pkgbuild.json ] || [ -f ./.pacsea_shellcheck_pkgbuild.txt ] || [ -f ./.pacsea_shellcheck_install.json ] || [ -f ./.pacsea_shellcheck_install.txt ]; then
sc_err=0; sc_warn=0;
sc_err=$((sc_err + $(cat ./.pacsea_shellcheck_pkgbuild.json ./.pacsea_shellcheck_install.json 2>/dev/null | grep -o '"level":"error"' | wc -l)));
sc_warn=$((sc_warn + $(cat ./.pacsea_shellcheck_pkgbuild.json ./.pacsea_shellcheck_install.json 2>/dev/null | grep -o '"level":"warning"' | wc -l)));
sc_err=$((sc_err + $(cat ./.pacsea_shellcheck_pkgbuild.txt ./.pacsea_shellcheck_install.txt 2>/dev/null | grep -oi 'error:' | wc -l)));
sc_warn=$((sc_warn + $(cat ./.pacsea_shellcheck_pkgbuild.txt ./.pacsea_shellcheck_install.txt 2>/dev/null | grep -oi 'warning:' | wc -l)));
echo "ShellCheck: errors=$sc_err warnings=$sc_warn";
else
echo 'ShellCheck: not run';
fi"#
.to_string(),
);
}
/// What: Add `ShellCheck` risk evaluation summary commands to command vector.
///
/// Input:
/// - `cmds`: Mutable reference to command vector to append to.
///
/// Output:
/// - Appends `ShellCheck` risk evaluation summary commands to the vector.
#[cfg(not(target_os = "windows"))]
pub fn add_shellcheck_risk_summary(cmds: &mut Vec<String>) {
cmds.push(
r#"rf=./.pacsea_shellcheck_risk.txt; if [ -f "$rf" ] && { [ -f ./.pacsea_shellcheck_pkgbuild.json ] || [ -f ./.pacsea_shellcheck_pkgbuild.txt ] || [ -f ./.pacsea_shellcheck_install.json ] || [ -f ./.pacsea_shellcheck_install.txt ]; }; then
RS=$(grep -E '^RISK_SCORE=' "$rf" | cut -d= -f2);
RT=$(grep -E '^RISK_TIER=' "$rf" | cut -d= -f2);
echo "ShellCheck Risk Evaluation: score=$RS tier=$RT";
fi"#
.to_string(),
);
}
/// What: Add aur-sleuth summary commands to command vector.
///
/// Input:
/// - `cmds`: Mutable reference to command vector to append to.
///
/// Output:
/// - Appends aur-sleuth summary commands to the vector.
/// - Note: Kept for backward compatibility; aur-sleuth now runs in separate terminal.
#[cfg(not(target_os = "windows"))]
#[allow(dead_code)] // Kept for backward compatibility
pub fn add_sleuth_summary(cmds: &mut Vec<String>) {
cmds.push(
r#"if [ -f ./.pacsea_sleuth.txt ]; then
status_line=$(grep -E '^Status:' ./.pacsea_sleuth.txt | head -n1);
if [ -n "$status_line" ]; then
echo "aur-sleuth: $status_line";
else
echo 'aur-sleuth: scan completed';
fi;
else
echo 'aur-sleuth: not run';
fi"#
.to_string(),
);
}
/// What: Add custom scan and `VirusTotal` summary commands to command vector.
///
/// Input:
/// - `cmds`: Mutable reference to command vector to append to.
///
/// Output:
/// - Appends custom scan and `VirusTotal` summary commands to the vector.
#[cfg(not(target_os = "windows"))]
pub fn add_custom_and_vt_summary(cmds: &mut Vec<String>) {
cmds.push(
r#"csf=./.pacsea_custom_score.txt; if [ -f "$csf" ]; then
CP=$(grep -E '^CUSTOM_PERCENT=' "$csf" | cut -d= -f2);
CT=$(grep -E '^CUSTOM_TIER=' "$csf" | cut -d= -f2);
CC=$(grep -E '^CUSTOM_CRIT=' "$csf" | cut -d= -f2);
CH=$(grep -E '^CUSTOM_HIGH=' "$csf" | cut -d= -f2);
CM=$(grep -E '^CUSTOM_MED=' "$csf" | cut -d= -f2);
CL=$(grep -E '^CUSTOM_LOW=' "$csf" | cut -d= -f2);
echo "Custom scan: score=${CP}% tier=$CT crit=$CC high=$CH med=$CM low=$CL";
else
echo 'Custom scan: not run';
fi
vtf=./.pacsea_scan_vt_summary.txt; if [ -f "$vtf" ]; then
VT_TOTAL=$(grep -E '^VT_TOTAL=' "$vtf" | cut -d= -f2);
VT_KNOWN=$(grep -E '^VT_KNOWN=' "$vtf" | cut -d= -f2);
VT_UNKNOWN=$(grep -E '^VT_UNKNOWN=' "$vtf" | cut -d= -f2);
VT_MAL=$(grep -E '^VT_MAL=' "$vtf" | cut -d= -f2);
VT_SUS=$(grep -E '^VT_SUS=' "$vtf" | cut -d= -f2);
VT_HAR=$(grep -E '^VT_HAR=' "$vtf" | cut -d= -f2);
VT_UND=$(grep -E '^VT_UND=' "$vtf" | cut -d= -f2);
echo "VirusTotal: files=$VT_TOTAL known=$VT_KNOWN malicious=$VT_MAL suspicious=$VT_SUS harmless=$VT_HAR undetected=$VT_UND unknown=$VT_UNKNOWN";
else
echo 'VirusTotal: not configured or no files';
fi"#
.to_string(),
);
}
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/src/install/scan/mod.rs | src/install/scan/mod.rs | /*!
What: AUR package scan launcher
Input:
- Package name to scan (clone-and-scan), or a target directory to scan in-place
Output:
- Spawns a terminal that runs a sequence of shell commands to clone, download sources, and run optional scanners; results are printed/saved into files in a temp directory (or target dir)
Details:
- Steps: clone AUR repo; run `makepkg -o`; run optional scanners (`ClamAV`, `Trivy`, `Semgrep`); optional `VirusTotal` hash lookups when `VT_API_KEY` is present
- Semgrep is not installed automatically; if missing, a warning is printed and the scan is skipped
- `VirusTotal` lookups are hash-based; unknown files may report "no report found"
- Working directory is a temporary directory printed to the terminal and preserved for inspection
*/
mod common;
mod dir;
pub mod pkg;
pub mod spawn;
mod summary;
#[cfg(not(target_os = "windows"))]
pub use spawn::spawn_aur_scan_for_with_config;
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/src/install/scan/dir.rs | src/install/scan/dir.rs | /*!
What: Directory scan command builder
Input:
- Target directory to scan
Output:
- Vector of shell commands for scanning a directory in-place
Details:
- Mirrors package scan but omits clone steps, operating on existing directory
*/
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/src/install/scan/common.rs | src/install/scan/common.rs | /*!
What: Common scan command builders shared between package and directory scans
Input:
- Command vector to append to
Output:
- Appends scan command strings to the provided vector
Details:
- Provides reusable functions for `ClamAV`, `Trivy`, `Semgrep`, `ShellCheck`, Custom patterns, `VirusTotal`, and `aur-sleuth` scans
*/
/// What: Add pattern environment variable exports to command vector.
///
/// Input:
/// - `cmds`: Mutable reference to command vector to append to.
///
/// Output:
/// - Appends pattern export commands to the vector.
///
/// Details:
/// - Sets default pattern regexes for CRITICAL, HIGH, MEDIUM, and LOW severity levels if not already set.
#[cfg(not(target_os = "windows"))]
pub fn add_pattern_exports(cmds: &mut Vec<String>) {
cmds.push("if [ -z \"${PACSEA_PATTERNS_CRIT:-}\" ]; then export PACSEA_PATTERNS_CRIT='/dev/(tcp|udp)/|bash -i *>& *[^ ]*/dev/(tcp|udp)/[0-9]+|exec [0-9]{2,}<>/dev/(tcp|udp)/|rm -rf[[:space:]]+/|dd if=/dev/zero of=/dev/sd[a-z]|[>]{1,2}[[:space:]]*/dev/sd[a-z]|: *\\(\\) *\\{ *: *\\| *: *& *\\};:|/etc/sudoers([[:space:]>]|$)|echo .*[>]{2}.*(/etc/sudoers|/root/.ssh/authorized_keys)|/etc/ld\\.so\\.preload|LD_PRELOAD=|authorized_keys.*[>]{2}|ssh-rsa [A-Za-z0-9+/=]+.*[>]{2}.*authorized_keys|curl .*(169\\.254\\.169\\.254)'; fi".to_string());
cmds.push("if [ -z \"${PACSEA_PATTERNS_HIGH:-}\" ]; then export PACSEA_PATTERNS_HIGH='eval|base64 -d|wget .*(sh|bash|dash|ksh|zsh)([^A-Za-z]|$)|curl .*(sh|bash|dash|ksh|zsh)([^A-Za-z]|$)|sudo[[:space:]]|chattr[[:space:]]|useradd|adduser|groupadd|systemctl|service[[:space:]]|crontab|/etc/cron\\.|[>]{2}.*(\\.bashrc|\\.bash_profile|/etc/profile|\\.zshrc)|cat[[:space:]]+/etc/shadow|cat[[:space:]]+~/.ssh/id_rsa|cat[[:space:]]+~/.bash_history|systemctl stop (auditd|rsyslog)|service (auditd|rsyslog) stop|scp .*@|curl -F|nc[[:space:]].*<|tar -czv?f|zip -r'; fi".to_string());
cmds.push("if [ -z \"${PACSEA_PATTERNS_MEDIUM:-}\" ]; then export PACSEA_PATTERNS_MEDIUM='whoami|uname -a|hostname|id|groups|nmap|netstat -anp|ss -anp|ifconfig|ip addr|arp -a|grep -ri .*secret|find .*-name.*(password|\\.key)|env[[:space:]]*\\|[[:space:]]*grep -i pass|wget https?://|curl https?://'; fi".to_string());
cmds.push("if [ -z \"${PACSEA_PATTERNS_LOW:-}\" ]; then export PACSEA_PATTERNS_LOW='http_proxy=|https_proxy=|ALL_PROXY=|yes[[:space:]]+> */dev/null *&|ulimit -n [0-9]{5,}'; fi".to_string());
}
/// What: Add `ClamAV` scan commands to command vector.
///
/// Input:
/// - `cmds`: Mutable reference to command vector to append to.
///
/// Output:
/// - Appends `ClamAV` scan commands to the vector.
///
/// Details:
/// - Checks for `ClamAV` availability and signature database before running scan.
/// - Respects `PACSEA_SCAN_DO_CLAMAV` environment variable.
#[cfg(not(target_os = "windows"))]
pub fn add_clamav_scan(cmds: &mut Vec<String>) {
cmds.push("echo '--- ClamAV scan (optional) ---'".to_string());
cmds.push("echo -e '\\033[1;34m[🔍] ClamAV scan (optional)\\033[0m'".to_string());
cmds.push("(if [ \"${PACSEA_SCAN_DO_CLAMAV:-1}\" = \"1\" ]; then ((command -v clamscan >/dev/null 2>&1 || sudo pacman -Qi clamav >/dev/null 2>&1) && { if find /var/lib/clamav -maxdepth 1 -type f \\( -name '*.cvd' -o -name '*.cld' \\) 2>/dev/null | grep -q .; then clamscan -r . | tee ./.pacsea_scan_clamav.txt; else echo 'ClamAV found but no signature database in /var/lib/clamav'; echo 'Tip: run: sudo freshclam (or start the updater: sudo systemctl start clamav-freshclam)'; fi; } || echo 'ClamAV (clamscan) encountered an error; skipping') || echo 'ClamAV not found; skipping'; else echo 'ClamAV: skipped by config'; fi)".to_string());
}
/// What: Add Trivy filesystem scan commands to command vector.
///
/// Input:
/// - `cmds`: Mutable reference to command vector to append to.
///
/// Output:
/// - Appends Trivy scan commands to the vector.
///
/// Details:
/// - Attempts JSON output first, falls back to text output.
/// - Respects `PACSEA_SCAN_DO_TRIVY` environment variable.
#[cfg(not(target_os = "windows"))]
pub fn add_trivy_scan(cmds: &mut Vec<String>) {
cmds.push("echo '--- Trivy filesystem scan (optional) ---'".to_string());
cmds.push("echo -e '\\033[1;34m[🧰] Trivy filesystem scan (optional)\\033[0m'".to_string());
cmds.push("(if [ \"${PACSEA_SCAN_DO_TRIVY:-1}\" = \"1\" ]; then ((command -v trivy >/dev/null 2>&1 || sudo pacman -Qi trivy >/dev/null 2>&1) && (trivy fs --quiet --format json . > ./.pacsea_scan_trivy.json || trivy fs --quiet . | tee ./.pacsea_scan_trivy.txt) || echo 'Trivy not found or failed; skipping'); else echo 'Trivy: skipped by config'; fi)".to_string());
}
/// What: Add Semgrep static analysis commands to command vector.
///
/// Input:
/// - `cmds`: Mutable reference to command vector to append to.
///
/// Output:
/// - Appends Semgrep scan commands to the vector.
///
/// Details:
/// - Uses auto-config mode for `Semgrep`.
/// - Respects `PACSEA_SCAN_DO_SEMGREP` environment variable.
#[cfg(not(target_os = "windows"))]
pub fn add_semgrep_scan(cmds: &mut Vec<String>) {
cmds.push("echo '--- Semgrep static analysis (optional) ---'".to_string());
cmds.push("echo -e '\\033[1;34m[🧪] Semgrep static analysis (optional)\\033[0m'".to_string());
cmds.push("(if [ \"${PACSEA_SCAN_DO_SEMGREP:-1}\" = \"1\" ]; then ((command -v semgrep >/dev/null 2>&1 || sudo pacman -Qi semgrep >/dev/null 2>&1) && (semgrep --config=auto --json . > ./.pacsea_scan_semgrep.json || semgrep --config=auto . | tee ./.pacsea_scan_semgrep.txt) || echo 'Semgrep not found; skipping'); else echo 'Semgrep: skipped by config'; fi)".to_string());
}
/// What: Add aur-sleuth audit commands to command vector.
///
/// Input:
/// - `cmds`: Mutable reference to command vector to append to.
///
/// Output:
/// - Appends aur-sleuth audit commands to the vector.
///
/// Details:
/// - Searches for aur-sleuth in multiple locations.
/// - Loads proxy settings from Pacsea config if available.
/// - Respects `PACSEA_SCAN_DO_SLEUTH` environment variable.
/// - Note: Kept for backward compatibility; aur-sleuth now runs in separate terminal.
#[cfg(not(target_os = "windows"))]
#[allow(dead_code)] // Kept for backward compatibility
pub fn add_sleuth_scan(cmds: &mut Vec<String>) {
cmds.push("echo '--- aur-sleuth audit (optional) ---'".to_string());
cmds.push("echo -e '\\033[1;34m[🔎] aur-sleuth audit (optional)\\033[0m'".to_string());
cmds.push("echo -e '\\033[2mPlease wait... This may take a while...\\033[0m'".to_string());
cmds.push(
r#"(if [ "${PACSEA_SCAN_DO_SLEUTH:-1}" = "1" ]; then
# Find aur-sleuth binary in common locations
A_SLEUTH="$(command -v aur-sleuth 2>/dev/null || true)";
if [ -z "$A_SLEUTH" ] && [ -x "${HOME}/.local/bin/aur-sleuth" ]; then
A_SLEUTH="${HOME}/.local/bin/aur-sleuth";
fi;
if [ -z "$A_SLEUTH" ] && [ -x "/usr/local/bin/aur-sleuth" ]; then
A_SLEUTH="/usr/local/bin/aur-sleuth";
fi;
if [ -z "$A_SLEUTH" ] && [ -x "/usr/bin/aur-sleuth" ]; then
A_SLEUTH="/usr/bin/aur-sleuth";
fi;
if [ -n "$A_SLEUTH" ]; then
# Load proxy and certificate settings from config
cfg="${XDG_CONFIG_HOME:-$HOME/.config}/pacsea/settings.conf";
if [ -f "$cfg" ]; then
get_key() {
awk -F= -v k="$1" 'tolower($0) ~ "^[[:space:]]*"k"[[:space:]]*=" {
sub(/#.*/,"",$2);
gsub(/^[[:space:]]+|[[:space:]]+$/,"",$2);
print $2;
exit
}' "$cfg";
};
HP=$(get_key http_proxy); [ -n "$HP" ] && export http_proxy="$HP";
XP=$(get_key https_proxy); [ -n "$XP" ] && export https_proxy="$XP";
AP=$(get_key all_proxy); [ -n "$AP" ] && export ALL_PROXY="$AP";
NP=$(get_key no_proxy); [ -n "$NP" ] && export NO_PROXY="$NP";
CAB=$(get_key requests_ca_bundle); [ -n "$CAB" ] && export REQUESTS_CA_BUNDLE="$CAB";
SCF=$(get_key ssl_cert_file); [ -n "$SCF" ] && export SSL_CERT_FILE="$SCF";
CCB=$(get_key curl_ca_bundle); [ -n "$CCB" ] && export CURL_CA_BUNDLE="$CCB";
PIPIDX=$(get_key pip_index_url); [ -n "$PIPIDX" ] && export PIP_INDEX_URL="$PIPIDX";
PIPEX=$(get_key pip_extra_index_url); [ -n "$PIPEX" ] && export PIP_EXTRA_INDEX_URL="$PIPEX";
PIPTH=$(get_key pip_trusted_host); [ -n "$PIPTH" ] && export PIP_TRUSTED_HOST="$PIPTH";
UVCA=$(get_key uv_http_ca_certs); [ -n "$UVCA" ] && export UV_HTTP_CA_CERTS="$UVCA";
fi;
# Run aur-sleuth directly in a separate terminal
# Use script command to capture output while preserving TUI functionality
WORK_DIR=$(pwd);
SLEUTH_OUTPUT_FILE="./.pacsea_sleuth.txt";
# Use script -f (flush) -q (quiet) to capture output without breaking TUI
if command -v script >/dev/null 2>&1; then
SLEUTH_CMD="cd $(printf '%q' "$WORK_DIR") && script -f -q $(printf '%q' "$SLEUTH_OUTPUT_FILE") -c \"$(printf '%q' "$A_SLEUTH") --pkgdir .\"; echo ''; echo 'Press Enter to close this window...'; read -r _;";
else
# Fallback: run directly without output capture
SLEUTH_CMD="cd $(printf '%q' "$WORK_DIR") && $(printf '%q' "$A_SLEUTH") --pkgdir .; echo ''; echo 'Press Enter to close this window...'; read -r _;";
fi;
# Find an available terminal and spawn aur-sleuth in it
TERM_FOUND=false;
if command -v gnome-terminal >/dev/null 2>&1; then
gnome-terminal -- bash -lc "$SLEUTH_CMD" 2>&1 && TERM_FOUND=true;
elif command -v alacritty >/dev/null 2>&1; then
alacritty -e bash -lc "$SLEUTH_CMD" 2>&1 && TERM_FOUND=true;
elif command -v kitty >/dev/null 2>&1; then
kitty bash -lc "$SLEUTH_CMD" 2>&1 && TERM_FOUND=true;
elif command -v xterm >/dev/null 2>&1; then
xterm -hold -e bash -lc "$SLEUTH_CMD" 2>&1 && TERM_FOUND=true;
elif command -v konsole >/dev/null 2>&1; then
konsole -e bash -lc "$SLEUTH_CMD" 2>&1 && TERM_FOUND=true;
elif command -v tilix >/dev/null 2>&1; then
tilix -e bash -lc "$SLEUTH_CMD" 2>&1 && TERM_FOUND=true;
elif command -v mate-terminal >/dev/null 2>&1; then
mate-terminal -- bash -lc "$SLEUTH_CMD" 2>&1 && TERM_FOUND=true;
elif command -v xfce4-terminal >/dev/null 2>&1; then
SLEUTH_CMD_QUOTED=$(printf '%q' "$SLEUTH_CMD");
xfce4-terminal --command "bash -lc $SLEUTH_CMD_QUOTED" 2>&1 && TERM_FOUND=true;
fi;
if [ "$TERM_FOUND" = "true" ]; then
echo "aur-sleuth launched in separate terminal window.";
echo "The scan will continue in the background. You can close the terminal when done.";
# Don't wait - let the user control when to close the terminal
# The output file will be available when the scan completes
else
echo "No suitable terminal found. Running aur-sleuth in current terminal...";
# When running in current terminal, we can capture output
("$A_SLEUTH" --pkgdir . 2>&1 | tee ./.pacsea_sleuth.txt) || echo 'aur-sleuth failed; see output above';
fi;
else
echo 'aur-sleuth not found (checked PATH, ~/.local/bin, /usr/local/bin, /usr/bin)';
fi;
else
echo 'aur-sleuth: skipped by config';
fi)"#
.to_string(),
);
}
/// What: Add `ShellCheck` lint commands to command vector.
///
/// Input:
/// - `cmds`: Mutable reference to command vector to append to.
///
/// Output:
/// - Appends `ShellCheck` lint commands to the vector.
///
/// Details:
/// - Analyzes `PKGBUILD` and `*.install` files.
/// - Respects `PACSEA_SCAN_DO_SHELLCHECK` environment variable.
#[cfg(not(target_os = "windows"))]
pub fn add_shellcheck_scan(cmds: &mut Vec<String>) {
cmds.push("echo '--- ShellCheck lint (optional) ---'".to_string());
cmds.push("echo -e '\\033[1;34m[🧹] ShellCheck lint (optional)\\033[0m'".to_string());
cmds.push("(if [ \"${PACSEA_SCAN_DO_SHELLCHECK:-1}\" = \"1\" ]; then if command -v shellcheck >/dev/null 2>&1 || sudo pacman -Qi shellcheck >/dev/null 2>&1; then if [ -f PKGBUILD ]; then echo \"[shellcheck] Analyzing: PKGBUILD (bash, -e SC2034)\"; (shellcheck -s bash -x -e SC2034 -f json PKGBUILD > ./.pacsea_shellcheck_pkgbuild.json || shellcheck -s bash -x -e SC2034 PKGBUILD | tee ./.pacsea_shellcheck_pkgbuild.txt || true); fi; inst_files=(); while IFS= read -r -d '' f; do inst_files+=(\"$f\"); done < <(find . -maxdepth 1 -type f -name \"*.install\" -print0); if [ \"${#inst_files[@]}\" -gt 0 ]; then echo \"[shellcheck] Analyzing: ${inst_files[*]} (bash)\"; (shellcheck -s bash -x -f json \"${inst_files[@]}\" > ./.pacsea_shellcheck_install.json || shellcheck -s bash -x \"${inst_files[@]}\" | tee ./.pacsea_shellcheck_install.txt || true); fi; else echo 'ShellCheck not found; skipping'; fi; else echo 'ShellCheck: skipped by config'; fi)".to_string());
}
/// What: Add `ShellCheck` risk evaluation commands to command vector.
///
/// Input:
/// - `cmds`: Mutable reference to command vector to append to.
///
/// Output:
/// - Appends risk evaluation commands to the vector.
///
/// Details:
/// - Calculates risk score based on `ShellCheck` errors/warnings and `PKGBUILD` heuristics.
/// - Respects `PACSEA_SCAN_DO_SHELLCHECK` environment variable.
#[cfg(not(target_os = "windows"))]
pub fn add_shellcheck_risk_eval(cmds: &mut Vec<String>) {
cmds.push("(if [ \"${PACSEA_SCAN_DO_SHELLCHECK:-1}\" = \"1\" ]; then echo -e '\\033[1;33m[⚠️ ] Risk evaluation (PKGBUILD/.install)\\033[0m'; ({ sc_err=0; sc_warn=0; sc_info=0; sc_err=$((sc_err + $(cat ./.pacsea_shellcheck_pkgbuild.json ./.pacsea_shellcheck_install.json 2>/dev/null | grep -o '\"level\":\"error\"' | wc -l))); sc_warn=$((sc_warn + $(cat ./.pacsea_shellcheck_pkgbuild.json ./.pacsea_shellcheck_install.json 2>/dev/null | grep -o '\"level\":\"warning\"' | wc -l))); sc_info=$((sc_info + $(cat ./.pacsea_shellcheck_pkgbuild.json ./.pacsea_shellcheck_install.json 2>/dev/null | grep -o '\"level\":\"info\"' | wc -l))); sc_err=$((sc_err + $(cat ./.pacsea_shellcheck_pkgbuild.txt ./.pacsea_shellcheck_install.txt 2>/dev/null | grep -oi 'error:' | wc -l))); sc_warn=$((sc_warn + $(cat ./.pacsea_shellcheck_pkgbuild.txt ./.pacsea_shellcheck_install.txt 2>/dev/null | grep -oi 'warning:' | wc -l))); if [ -f PKGBUILD ]; then pkgrisk=$(grep -Eoi 'curl|wget|bash -c|sudo|chown|chmod|mktemp|systemctl|useradd|groupadd|nc\\s|socat|/tmp/' PKGBUILD | wc -l); else pkgrisk=0; fi; if ls ./*.install >/dev/null 2>&1; then inst_risk=$(grep -Eoi 'post_install|pre_install|post_upgrade|pre_upgrade|systemctl|useradd|groupadd|chown|chmod|sudo|service|adduser' ./*.install | wc -l); else inst_risk=0; fi; risk=$((sc_err*5 + sc_warn*2 + sc_info + pkgrisk*3 + inst_risk*4)); tier='LOW'; if [ \"$risk\" -ge 60 ]; then tier='CRITICAL'; elif [ \"$risk\" -ge 40 ]; then tier='HIGH'; elif [ \"$risk\" -ge 20 ]; then tier='MEDIUM'; fi; { echo \"SC_ERRORS=$sc_err\"; echo \"SC_WARNINGS=$sc_warn\"; echo \"SC_INFO=$sc_info\"; echo \"PKGBUILD_HEURISTICS=$pkgrisk\"; echo \"INSTALL_HEURISTICS=$inst_risk\"; echo \"RISK_SCORE=$risk\"; echo \"RISK_TIER=$tier\"; } > ./.pacsea_shellcheck_risk.txt; echo \"Risk score: $risk ($tier)\"; } || echo 'Risk evaluation encountered an error; skipping'); else echo 'Risk Evaluation: skipped (ShellCheck disabled)'; fi)".to_string());
}
/// What: Add custom suspicious patterns scan commands to command vector.
///
/// Input:
/// - `cmds`: Mutable reference to command vector to append to.
///
/// Output:
/// - Appends custom pattern scan commands to the vector.
///
/// Details:
/// - Scans `PKGBUILD`, `*.install`, and shell files in `src/` for suspicious patterns.
/// - Calculates risk score based on pattern matches.
/// - Respects `PACSEA_SCAN_DO_CUSTOM` environment variable.
#[cfg(not(target_os = "windows"))]
pub fn add_custom_pattern_scan(cmds: &mut Vec<String>) {
cmds.push("echo '--- Custom suspicious patterns scan (optional) ---'".to_string());
cmds.push(
"echo -e '\\033[1;34m[🕵️] Custom suspicious patterns scan (optional)\\033[0m'".to_string(),
);
cmds.push(r#"(if [ "${PACSEA_SCAN_DO_CUSTOM:-1}" = "1" ]; then
files='';
if [ -f PKGBUILD ]; then files='PKGBUILD'; fi;
for f in ./*.install; do [ -f "$f" ] && files="$files $f"; done;
# Include shell-like files under src/
if [ -d ./src ]; then
src_ext=$(find ./src -type f \( -name "*.sh" -o -name "*.bash" -o -name "*.zsh" -o -name "*.ksh" \) 2>/dev/null)
src_shebang=$(grep -Ilr '^#!.*\b(sh|bash|zsh|ksh)\b' ./src 2>/dev/null)
if [ -n "$src_ext$src_shebang" ]; then files="$files $src_ext $src_shebang"; fi;
fi;
if [ -z "$files" ]; then
echo 'No PKGBUILD, .install, or src shell files to scan';
else
: > ./.pacsea_custom_scan.txt;
# Critical indicators: reverse shells, destructive ops, sudoers/ld.so.preload tampering, FD sockets, authorized_keys backdoor, IMDS
crit="$PACSEA_PATTERNS_CRIT";
# High indicators: eval/obfuscation, download+execute, persistence, priv escalation, service control, data theft, log tamper
high="$PACSEA_PATTERNS_HIGH";
# Medium indicators: recon, network scan, sensitive search, proxies, generic downloads
med="$PACSEA_PATTERNS_MEDIUM";
# Low indicators: proxy vars, resource hints
low="$PACSEA_PATTERNS_LOW";
echo "[debug] Files to scan: $files"
echo "[debug] Pattern (CRIT): $crit"
echo "[debug] Pattern (HIGH): $high"
echo "[debug] Pattern (MED): $med"
echo "[debug] Pattern (LOW): $low"
echo "[debug] Running grep for CRIT..."
tmp=$(grep -Eo "$crit" $files 2>/dev/null); rc=$?; printf "%s\n" "$tmp" > ./.pacsea_custom_crit_hits.txt
Ccrit=$(printf "%s" "$tmp" | wc -l); echo "[debug] grep CRIT rc=$rc count=$Ccrit"
echo "[debug] Running grep for HIGH..."
tmp=$(grep -Eo "$high" $files 2>/dev/null); rc=$?; printf "%s\n" "$tmp" > ./.pacsea_custom_high_hits.txt
Chigh=$(printf "%s" "$tmp" | wc -l); echo "[debug] grep HIGH rc=$rc count=$Chigh"
echo "[debug] Running grep for MED..."
tmp=$(grep -Eo "$med" $files 2>/dev/null); rc=$?; printf "%s\n" "$tmp" > ./.pacsea_custom_med_hits.txt
Cmed=$(printf "%s" "$tmp" | wc -l); echo "[debug] grep MED rc=$rc count=$Cmed"
echo "[debug] Running grep for LOW..."
tmp=$(grep -Eo "$low" $files 2>/dev/null); rc=$?; printf "%s\n" "$tmp" > ./.pacsea_custom_low_hits.txt
Clow=$(printf "%s" "$tmp" | wc -l); echo "[debug] grep LOW rc=$rc count=$Clow"
score=$((Ccrit*10 + Chigh*5 + Cmed*2 + Clow));
if [ "$score" -gt 100 ]; then score=100; fi;
tier='LOW';
if [ "$score" -ge 75 ]; then tier='CRITICAL';
elif [ "$score" -ge 50 ]; then tier='HIGH';
elif [ "$score" -ge 25 ]; then tier='MEDIUM';
fi;
{
echo "CUSTOM_CRIT=$Ccrit";
echo "CUSTOM_HIGH=$Chigh";
echo "CUSTOM_MED=$Cmed";
echo "CUSTOM_LOW=$Clow";
echo "CUSTOM_PERCENT=$score";
echo "CUSTOM_TIER=$tier";
} > ./.pacsea_custom_score.txt;
echo "Custom suspicious patterns: crit=$Ccrit high=$Chigh med=$Cmed low=$Clow score=${score}% tier=$tier" | tee -a ./.pacsea_custom_scan.txt;
fi;
else
echo 'Custom scan: skipped by config';
fi)"#.to_string());
}
/// What: Add `VirusTotal` hash lookup commands to command vector.
///
/// Input:
/// - `cmds`: Mutable reference to command vector to append to.
///
/// Output:
/// - Appends `VirusTotal` lookup commands to the vector.
///
/// Details:
/// - Looks up `SHA256` hashes of `PKGBUILD` and `src` files in `VirusTotal`.
/// - Requires `VT_API_KEY` environment variable or config setting.
/// - Respects `PACSEA_SCAN_DO_VIRUSTOTAL` environment variable.
#[cfg(not(target_os = "windows"))]
pub fn add_virustotal_scan(cmds: &mut Vec<String>) {
cmds.push("echo '--- VirusTotal hash lookups (requires VT_API_KEY env var) ---'".to_string());
cmds.push(
"echo -e '\\033[1;34m[🔬] VirusTotal hash lookups (requires VT_API_KEY env var)\\033[0m'"
.to_string(),
);
cmds.push(
concat!(
"if [ \"${PACSEA_SCAN_DO_VIRUSTOTAL:-1}\" = \"1\" ]; then ",
"if [ -z \"${VT_API_KEY:-}\" ]; then ",
" cfg=\"${XDG_CONFIG_HOME:-$HOME/.config}/pacsea/settings.conf\"; ",
" if [ -f \"$cfg\" ]; then ",
" VT_API_KEY=\"$(awk -F= '/^[[:space:]]*virustotal_api_key[[:space:]]*=/{print $2}' \"$cfg\" | sed 's/#.*//' | xargs)\"; ",
" fi; ",
"fi; ",
"if [ -n \"${VT_API_KEY:-}\" ]; then ",
" files=$(find . -type f \\( -name 'PKGBUILD' -o -path './src/*' -o -name '*.patch' -o -name '*.diff' \\) 2>/dev/null); ",
" vt_total=0; vt_known=0; vt_unknown=0; vt_mal_sum=0; vt_sus_sum=0; vt_har_sum=0; vt_und_sum=0; ",
" : > ./.pacsea_scan_vt.txt; ",
" if [ -z \"$files\" ]; then ",
" echo 'No files to hash (PKGBUILD/src)'; ",
" else ",
" for f in $files; do ",
" if [ -f \"$f\" ]; then ",
" h=$(sha256sum \"$f\" | awk '{print $1}'); ",
" echo \"File: $f\" | tee -a ./.pacsea_scan_vt.txt; ",
" echo \"SHA256: $h\" | tee -a ./.pacsea_scan_vt.txt; ",
" vt_total=$((vt_total+1)); ",
" resp=$(curl -s -H \"x-apikey: $VT_API_KEY\" \"https://www.virustotal.com/api/v3/files/$h\"); ",
" if echo \"$resp\" | grep -q '\"error\"'; then ",
" echo 'VT: No report found' | tee -a ./.pacsea_scan_vt.txt; ",
" vt_unknown=$((vt_unknown+1)); ",
" else ",
" mal=$(echo \"$resp\" | grep -o '\"malicious\":[0-9]\\+' | head -n1 | cut -d: -f2); ",
" sus=$(echo \"$resp\" | grep -o '\"suspicious\":[0-9]\\+' | head -n1 | cut -d: -f2); ",
" har=$(echo \"$resp\" | grep -o '\"harmless\":[0-9]\\+' | head -n1 | cut -d: -f2); ",
" und=$(echo \"$resp\" | grep -o '\"undetected\":[0-9]\\+' | head -n1 | cut -d: -f2); ",
" echo \"VT: malicious=${mal:-0} suspicious=${sus:-0} harmless=${har:-0} undetected=${und:-0}\" | tee -a ./.pacsea_scan_vt.txt; ",
" echo \"VT report: https://www.virustotal.com/gui/file/$h\" | tee -a ./.pacsea_scan_vt.txt; ",
" vt_known=$((vt_known+1)); ",
" vt_mal_sum=$((vt_mal_sum+${mal:-0})); ",
" vt_sus_sum=$((vt_sus_sum+${sus:-0})); ",
" vt_har_sum=$((vt_har_sum+${har:-0})); ",
" vt_und_sum=$((vt_und_sum+${und:-0})); ",
" fi; ",
" echo | tee -a ./.pacsea_scan_vt.txt >/dev/null; ",
" fi; ",
" done; ",
" { ",
" echo \"VT_TOTAL=$vt_total\"; ",
" echo \"VT_KNOWN=$vt_known\"; ",
" echo \"VT_UNKNOWN=$vt_unknown\"; ",
" echo \"VT_MAL=$vt_mal_sum\"; ",
" echo \"VT_SUS=$vt_sus_sum\"; ",
" echo \"VT_HAR=$vt_har_sum\"; ",
" echo \"VT_UND=$vt_und_sum\"; ",
" } > ./.pacsea_scan_vt_summary.txt; ",
" fi; ",
"else ",
" echo 'VT_API_KEY not set; skipping VirusTotal lookups.'; ",
"fi; ",
"else ",
" echo 'VirusTotal: skipped by config.'; ",
"fi"
)
.to_string(),
);
}
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/src/install/remove/tests.rs | src/install/remove/tests.rs | //! Unit tests for remove command building and execution.
#![cfg(test)]
use crate::state::modal::CascadeMode;
use crate::install::remove::spawn_remove_all;
#[test]
/// What: Verify remove command building logic.
///
/// Inputs:
/// - Package names, cascade mode, dry_run flag.
///
/// Output:
/// - Command structure is correct.
///
/// Details:
/// - Tests that remove commands are built correctly.
/// - Note: spawn_remove_all spawns terminal, so this test verifies the function can be called.
fn remove_command_building() {
let names = vec!["test-pkg1".to_string(), "test-pkg2".to_string()];
// Test that function can be called (it will spawn terminal in non-test mode)
// In test mode without PACSEA_TEST_OUT, it should be a no-op
spawn_remove_all(&names, true, CascadeMode::Basic);
spawn_remove_all(&names, true, CascadeMode::Cascade);
spawn_remove_all(&names, true, CascadeMode::CascadeWithConfigs);
}
#[test]
/// What: Verify remove command building with different cascade modes.
///
/// Inputs:
/// - Package names with different cascade modes.
///
/// Output:
/// - Commands are built correctly for each mode.
///
/// Details:
/// - Tests that cascade mode affects command building.
fn remove_cascade_modes() {
let names = vec!["test-pkg".to_string()];
// Test all cascade modes
spawn_remove_all(&names, true, CascadeMode::Basic);
spawn_remove_all(&names, true, CascadeMode::Cascade);
spawn_remove_all(&names, true, CascadeMode::CascadeWithConfigs);
}
#[test]
/// What: Verify remove command building with empty list.
///
/// Inputs:
/// - Empty package list.
///
/// Output:
/// - Function handles empty list gracefully.
///
/// Details:
/// - Tests edge case of empty package list.
fn remove_empty_list() {
let names = Vec::<String>::new();
spawn_remove_all(&names, true, CascadeMode::Basic);
}
#[test]
/// What: Verify remove command building with dry-run.
///
/// Inputs:
/// - Package names with dry_run=true.
///
/// Output:
/// - Dry-run commands are built correctly.
///
/// Details:
/// - Tests that dry-run mode produces appropriate commands.
fn remove_dry_run() {
let names = vec!["test-pkg".to_string()];
spawn_remove_all(&names, true, CascadeMode::Basic);
}
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/src/args/i18n.rs | src/args/i18n.rs | //! Internationalization helpers for CLI commands.
use pacsea::i18n::translations::{TranslationMap, translate_with_fallback};
use pacsea::i18n::{self, find_locales_dir, load_locale_file, resolve_locale};
use std::collections::HashMap;
/// What: Load translations for CLI usage.
///
/// Inputs:
/// - None (uses system locale detection and fallback chain).
///
/// Output:
/// - Tuple of (primary translations, fallback translations).
///
/// Details:
/// - Resolves locale from system or settings.
/// - Loads primary locale and English fallback.
/// - Returns empty maps if loading fails (graceful degradation).
pub fn load_cli_translations() -> (TranslationMap, TranslationMap) {
// Get locales directory
let Some(locales_dir) = find_locales_dir() else {
tracing::debug!("Locales directory not found, using English fallback");
return (HashMap::new(), HashMap::new());
};
// Resolve locale (try to read from settings, fallback to system/default)
let Some(i18n_config_path) = i18n::find_config_file("i18n.yml") else {
tracing::debug!("i18n.yml not found, using default locale");
let fallback = load_locale_file("en-US", &locales_dir).unwrap_or_default();
return (fallback.clone(), fallback);
};
// Try to read locale from settings
let settings_locale = &pacsea::theme::settings().locale;
let resolved_locale = resolve_locale(settings_locale, &i18n_config_path);
// Load primary locale
let primary = load_locale_file(&resolved_locale, &locales_dir).unwrap_or_default();
// Always load English as fallback
let fallback = if resolved_locale == "en-US" {
primary.clone()
} else {
load_locale_file("en-US", &locales_dir).unwrap_or_default()
};
(primary, fallback)
}
/// What: Get a translation for CLI usage.
///
/// Inputs:
/// - `key`: Dot-notation key (e.g., "app.cli.refresh.starting").
///
/// Output:
/// - Translated string, or key itself if translation not found.
///
/// Details:
/// - Uses lazy static to cache translations (loaded once).
/// - Falls back to English if primary locale missing.
/// - Returns key itself if both missing (for debugging).
pub fn t(key: &str) -> String {
use std::sync::OnceLock;
static TRANSLATIONS: OnceLock<(TranslationMap, TranslationMap)> = OnceLock::new();
let (primary, fallback) = TRANSLATIONS.get_or_init(load_cli_translations);
translate_with_fallback(key, primary, fallback)
}
/// What: Get a translation with format arguments.
///
/// Inputs:
/// - `key`: Dot-notation key.
/// - `args`: Format arguments (as Display trait objects).
///
/// Output:
/// - Formatted translated string.
///
/// Details:
/// - Replaces placeholders in order: first {} gets first arg, etc.
/// - Supports multiple placeholders: "{} and {}" -> "arg1 and arg2".
pub fn t_fmt(key: &str, args: &[&dyn std::fmt::Display]) -> String {
let translation = t(key);
let mut result = translation;
for arg in args {
result = result.replacen("{}", &arg.to_string(), 1);
}
result
}
/// What: Get a translation with a single format argument (convenience function).
///
/// Inputs:
/// - `key`: Dot-notation key.
/// - `arg`: Single format argument.
///
/// Output:
/// - Formatted translated string.
pub fn t_fmt1<T: std::fmt::Display>(key: &str, arg: T) -> String {
t_fmt(key, &[&arg])
}
/// What: Get a translation with two format arguments (convenience function).
///
/// Inputs:
/// - `key`: Dot-notation key.
/// - `arg1`: First format argument.
/// - `arg2`: Second format argument.
///
/// Output:
/// - Formatted translated string.
pub fn t_fmt2<T1: std::fmt::Display, T2: std::fmt::Display>(
key: &str,
arg1: T1,
arg2: T2,
) -> String {
t_fmt(key, &[&arg1, &arg2])
}
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/src/args/update.rs | src/args/update.rs | //! Command-line update functionality.
#[cfg(not(target_os = "windows"))]
use crate::args::i18n;
#[cfg(not(target_os = "windows"))]
use crate::args::utils;
#[cfg(not(target_os = "windows"))]
use pacsea::install::shell_single_quote;
#[cfg(not(target_os = "windows"))]
use pacsea::theme;
#[cfg(not(target_os = "windows"))]
use std::path::Path;
#[cfg(not(target_os = "windows"))]
use tracing::{debug, warn};
/// What: Format text with ANSI color codes if colors are enabled.
///
/// Inputs:
/// - `text`: The text to format.
/// - `color_code`: ANSI color code (e.g., "32" for green, "31" for red).
/// - `no_color`: If true, returns text without color codes.
///
/// Output:
/// - Colored text string if colors enabled, plain text otherwise.
///
/// Details:
/// - Uses ANSI escape sequences for terminal colors.
/// - Respects the `no_color` flag to disable coloring.
#[cfg(not(target_os = "windows"))]
fn colorize(text: &str, color_code: &str, no_color: bool) -> String {
if no_color {
text.to_string()
} else {
format!("\x1b[{color_code}m{text}\x1b[0m")
}
}
/// What: Format success messages in green.
///
/// Inputs:
/// - `text`: The text to format.
/// - `no_color`: If true, returns text without color codes.
///
/// Output:
/// - Green colored text string if colors enabled, plain text otherwise.
#[cfg(not(target_os = "windows"))]
fn success_color(text: &str, no_color: bool) -> String {
colorize(text, "32", no_color) // Green
}
/// What: Format error messages in red.
///
/// Inputs:
/// - `text`: The text to format.
/// - `no_color`: If true, returns text without color codes.
///
/// Output:
/// - Red colored text string if colors enabled, plain text otherwise.
#[cfg(not(target_os = "windows"))]
fn error_color(text: &str, no_color: bool) -> String {
colorize(text, "31", no_color) // Red
}
/// What: Format info messages in cyan.
///
/// Inputs:
/// - `text`: The text to format.
/// - `no_color`: If true, returns text without color codes.
///
/// Output:
/// - Cyan colored text string if colors enabled, plain text otherwise.
#[cfg(not(target_os = "windows"))]
fn info_color(text: &str, no_color: bool) -> String {
colorize(text, "36", no_color) // Cyan
}
/// What: Format warning messages in yellow.
///
/// Inputs:
/// - `text`: The text to format.
/// - `no_color`: If true, returns text without color codes.
///
/// Output:
/// - Yellow colored text string if colors enabled, plain text otherwise.
#[cfg(not(target_os = "windows"))]
fn warning_color(text: &str, no_color: bool) -> String {
colorize(text, "33", no_color) // Yellow
}
/// What: Format a file path as a clickable hyperlink in the terminal using OSC 8 escape sequences.
///
/// Inputs:
/// - `path`: The file path to make clickable.
///
/// Output:
/// - A string containing the path formatted as a clickable hyperlink.
///
/// Details:
/// - Uses OSC 8 escape sequences to create clickable links in modern terminals.
/// - Converts the path to an absolute file:// URL.
/// - Handles paths that may not exist yet by using absolute path resolution.
#[cfg(not(target_os = "windows"))]
fn format_clickable_path(path: &Path) -> String {
// Try to get absolute path - canonicalize if file exists, otherwise resolve relative to current dir
let absolute_path = if path.exists() {
path.canonicalize().unwrap_or_else(|_| {
std::env::current_dir()
.ok()
.and_then(|cwd| cwd.join(path).canonicalize().ok())
.unwrap_or_else(|| path.to_path_buf())
})
} else {
// File doesn't exist yet, try to resolve relative to current directory
if path.is_absolute() {
path.to_path_buf()
} else {
std::env::current_dir()
.ok()
.map_or_else(|| path.to_path_buf(), |cwd| cwd.join(path))
}
};
let path_str = absolute_path.to_string_lossy();
let file_url = format!("file://{path_str}");
format!("\x1b]8;;{file_url}\x1b\\{path_str}\x1b]8;;\x1b\\")
}
/// What: Extract failed package names from pacman error output.
///
/// Inputs:
/// - `output`: The pacman command output text to parse.
///
/// Output:
/// - Vector of failed package names.
///
/// Details:
/// - Parses various pacman error patterns including "target not found", transaction failures, etc.
/// - Handles both English and German error messages.
#[cfg(not(target_os = "windows"))]
#[allow(clippy::similar_names)]
fn extract_failed_packages_from_pacman(output: &str) -> Vec<String> {
let mut failed = Vec::new();
let lines: Vec<&str> = output.lines().collect();
let mut in_error_section = false;
let mut in_conflict_section = false;
// Get locale-specific error patterns from i18n
let target_not_found = i18n::t("app.cli.update.pacman_errors.target_not_found").to_lowercase();
let failed_to_commit = i18n::t("app.cli.update.pacman_errors.failed_to_commit").to_lowercase();
let failed_to_prepare =
i18n::t("app.cli.update.pacman_errors.failed_to_prepare").to_lowercase();
let error_prefix = i18n::t("app.cli.update.pacman_errors.error_prefix").to_lowercase();
let resolving = i18n::t("app.cli.update.pacman_errors.resolving").to_lowercase();
let looking_for = i18n::t("app.cli.update.pacman_errors.looking_for").to_lowercase();
let package_word = i18n::t("app.cli.update.pacman_errors.package").to_lowercase();
let packages_word = i18n::t("app.cli.update.pacman_errors.packages").to_lowercase();
let error_word = i18n::t("app.cli.update.pacman_errors.error").to_lowercase();
let failed_word = i18n::t("app.cli.update.pacman_errors.failed").to_lowercase();
let transaction_word = i18n::t("app.cli.update.pacman_errors.transaction").to_lowercase();
let conflicting_word = i18n::t("app.cli.update.pacman_errors.conflicting").to_lowercase();
let files_word = i18n::t("app.cli.update.pacman_errors.files").to_lowercase();
for line in &lines {
let trimmed = line.trim();
let lower = trimmed.to_lowercase();
// Pattern 1: "error: target not found: package-name"
if lower.contains(&target_not_found) {
// Extract package name after "not found:" or similar
if let Some(colon_pos) = trimmed.rfind(':') {
let after_colon = &trimmed[colon_pos + 1..].trim();
// Package name should be alphanumeric with dashes/underscores
if after_colon
.chars()
.all(|c| c.is_alphanumeric() || c == '-' || c == '_' || c == '/')
{
// Remove any trailing punctuation
let pkg = after_colon.trim_end_matches(|c: char| {
!c.is_alphanumeric() && c != '-' && c != '_' && c != '/'
});
if !pkg.is_empty() && pkg.len() > 1 {
failed.push(pkg.to_string());
}
}
}
in_error_section = true;
}
// Pattern 2: "error: failed to commit transaction" or similar
else if lower.contains(&failed_to_commit) || lower.contains(&failed_to_prepare) {
in_error_section = true;
in_conflict_section = true;
}
// Pattern 3: Look for package names in error context
else if in_error_section || in_conflict_section {
// Look for lines that might contain package names
// Skip common error message text
if !trimmed.is_empty()
&& !lower.starts_with(&format!("{error_prefix}:"))
&& !lower.contains(&resolving)
&& !lower.contains(&looking_for)
&& !lower.contains("::")
{
// Check if line looks like it contains package names
// Package names are typically: alphanumeric, dashes, underscores, slashes
let words: Vec<&str> = trimmed.split_whitespace().collect();
for word in words {
let clean_word = word.trim_matches(|c: char| {
!c.is_alphanumeric() && c != '-' && c != '_' && c != '/' && c != ':'
});
// Valid package name: 2+ chars, alphanumeric with dashes/underscores/slashes
if clean_word.len() >= 2
&& clean_word.chars().all(|c| {
c.is_alphanumeric() || c == '-' || c == '_' || c == '/' || c == ':'
})
&& clean_word.contains(|c: char| c.is_alphanumeric())
{
// Avoid common false positives using locale-specific words
if !clean_word.eq_ignore_ascii_case(&package_word)
&& !clean_word.eq_ignore_ascii_case(&packages_word)
&& !clean_word.eq_ignore_ascii_case(&error_word)
&& !clean_word.eq_ignore_ascii_case(&failed_word)
&& !clean_word.eq_ignore_ascii_case(&transaction_word)
&& !clean_word.eq_ignore_ascii_case(&conflicting_word)
&& !clean_word.eq_ignore_ascii_case(&files_word)
{
failed.push(clean_word.to_string());
}
}
}
}
// Reset error section on empty lines or new error messages
if trimmed.is_empty() || lower.starts_with(&format!("{error_prefix}:")) {
in_error_section = false;
in_conflict_section = false;
}
}
// Pattern 4: Look for package names after "::" separator (pacman format: repo::package)
else if trimmed.contains("::") {
let parts: Vec<&str> = trimmed.split("::").collect();
if parts.len() == 2 {
let pkg_part = parts[1].split_whitespace().next().unwrap_or("");
if pkg_part
.chars()
.all(|c| c.is_alphanumeric() || c == '-' || c == '_')
&& pkg_part.len() >= 2
{
failed.push(pkg_part.to_string());
}
}
}
}
failed
}
/// What: Extract failed package names from command output.
///
/// Inputs:
/// - `output`: The command output text to parse.
/// - `helper`: The AUR helper name (yay/paru) or "pacman" for official packages.
///
/// Output:
/// - Vector of failed package names.
///
/// Details:
/// - Parses yay/paru output for lines like "package - exit status X".
/// - Uses locale-independent pattern matching (exit status pattern is universal).
/// - Does not rely on locale-specific error messages.
#[cfg(not(target_os = "windows"))]
fn extract_failed_packages(output: &str, helper: &str) -> Vec<String> {
let mut failed = if helper == "pacman" {
extract_failed_packages_from_pacman(output)
} else {
// For yay/paru, primarily rely on the universal " - exit status" pattern
// This pattern appears to be locale-independent
let mut failed_aur = Vec::new();
let lines: Vec<&str> = output.lines().collect();
// Look for lines with "exit status" pattern - this is the most reliable indicator
// Format: "package - exit status X" (works across locales)
for line in &lines {
if line.contains(" - exit status")
&& let Some(pkg) = line.split(" - exit status").next()
{
let pkg = pkg.trim();
// Remove common prefixes like "->" that yay/paru use
let pkg = pkg.strip_prefix("->").unwrap_or(pkg).trim();
if !pkg.is_empty() {
failed_aur.push(pkg.to_string());
}
}
}
// If we didn't find any via exit status pattern, try to find packages
// in a section that follows common structural markers
if failed_aur.is_empty() {
// Look for sections that typically contain failed packages
// These sections usually have markers like "->" followed by package lists
let mut in_package_list = false;
for line in &lines {
let trimmed = line.trim();
// Detect start of package list section (common markers)
// Look for lines with "->" that might indicate a list section
if trimmed.starts_with("->") && trimmed.len() > 2 {
// Check if the rest looks like it might be a header/description
// If it contains common words, it's probably a header, not a package
let after_arrow = &trimmed[2..].trim();
if after_arrow.chars().all(|c| !c.is_whitespace() && c != ':') {
// Might be a package name
if after_arrow
.chars()
.all(|c| c.is_alphanumeric() || c == '-' || c == '_')
{
failed_aur.push((*after_arrow).to_string());
in_package_list = true;
}
} else {
in_package_list = true;
}
} else if in_package_list {
// In package list, look for package-like strings
if !trimmed.is_empty()
&& !trimmed.starts_with("==>")
&& !trimmed.contains("exit status")
&& trimmed
.chars()
.all(|c| c.is_alphanumeric() || c == '-' || c == '_')
{
failed_aur.push(trimmed.to_string());
} else if trimmed.is_empty() || trimmed.starts_with("==>") {
// Empty line or new section marker ends the list
in_package_list = false;
}
}
}
}
failed_aur
};
// Deduplicate and return
failed.sort();
failed.dedup();
// Additional cleanup: remove very short strings and common false positives
failed.retain(|pkg| {
pkg.len() >= 2
&& !pkg.eq_ignore_ascii_case("package")
&& !pkg.eq_ignore_ascii_case("packages")
&& !pkg.eq_ignore_ascii_case("error")
&& !pkg.eq_ignore_ascii_case("failed")
});
failed
}
/// What: Execute a command with output both displayed in real-time and logged to file using tee.
///
/// Inputs:
/// - `program`: The program to execute.
/// - `args`: Command arguments.
/// - `log_file_path`: Path to the log file where output should be written.
/// - `password`: Optional sudo password; when provided, uses `sudo -S` with password piping.
///
/// Output:
/// - `Ok((status, output))` if command executed, `Err(e)` if execution failed.
///
/// Details:
/// - Uses a shell wrapper with `tee` to duplicate output to both terminal and log file.
/// - Preserves real-time output display while logging everything.
/// - Returns the command output for parsing failed packages.
/// - Output is parsed using locale-aware i18n patterns.
/// - Sets `LC_ALL=C` and `LANG=C` for consistent English output.
/// - Handles TTY detection and falls back to stdout if no TTY available.
/// - Uses `set -o pipefail` for reliable exit status capture.
/// - Configures stdin/stdout/stderr explicitly to prevent interactive prompts.
#[cfg(not(target_os = "windows"))]
fn run_command_with_logging(
program: &str,
args: &[&str],
log_file_path: &Path,
password: Option<&str>,
) -> Result<(std::process::ExitStatus, String), std::io::Error> {
use std::io::IsTerminal;
use std::process::{Command, Stdio};
let log_file_str = log_file_path.to_string_lossy();
let args_str = args
.iter()
.map(|a| shell_single_quote(a))
.collect::<Vec<_>>()
.join(" ");
// Check if stdout is a TTY for /dev/tty redirection
let has_tty = std::io::stdout().is_terminal();
let tty_redirect = if has_tty {
"> /dev/tty"
} else {
"> /dev/stdout"
};
// Use bash -c with tee to both display and log output
// Redirect both stdout and stderr through tee
// Use set -o pipefail for reliable exit status capture
// Also capture output to a temp file so we can read it back
let temp_output =
std::env::temp_dir().join(format!("pacsea_update_output_{}.txt", std::process::id()));
let temp_output_str = temp_output.to_string_lossy();
// Build the command with optional password piping for sudo
// When program is "sudo", we need to handle it specially:
// - With password: echo 'password' | sudo -S pacman args...
// - Without password: sudo pacman args...
// When program is not "sudo" (e.g., paru/yay), use it directly
let full_command = if program == "sudo" {
password.map_or_else(
|| format!("sudo {args_str}"),
|pass| {
// Use shell_single_quote for consistent password escaping
let escaped = shell_single_quote(pass);
// args[0] is the actual command (e.g., "pacman"), args[1..] are its arguments
args.first().map_or_else(
|| format!("echo {escaped} | sudo -S {args_str}"),
|cmd| {
// Escape cmd for shell safety, even though currently only trusted values are passed
let cmd_escaped = shell_single_quote(cmd);
let cmd_args = &args[1..];
let cmd_args_str = cmd_args
.iter()
.map(|a| shell_single_quote(a))
.collect::<Vec<_>>()
.join(" ");
if cmd_args_str.is_empty() {
format!("echo {escaped} | sudo -S {cmd_escaped}")
} else {
format!("echo {escaped} | sudo -S {cmd_escaped} {cmd_args_str}")
}
},
)
},
)
} else {
// Non-sudo command (e.g., paru, yay), use directly
// Escape program for shell safety, even though currently only trusted values are passed
let program_escaped = shell_single_quote(program);
format!("{program_escaped} {args_str}")
};
// Use tee twice: first logs to file, second captures to tempfile and displays
// set -o pipefail ensures exit status reflects command failure, not tee
// Use stdbuf -oL -eL to force line buffering so progress output appears immediately
// command 2>&1 | tee -a logfile | tee tempfile > /dev/tty
// This way: output is displayed once, logged to file, and captured to tempfile
let log_file_escaped = shell_single_quote(&log_file_str);
let temp_output_escaped = shell_single_quote(&temp_output_str);
let shell_cmd = format!(
"set -o pipefail; stdbuf -oL -eL {full_command} 2>&1 | tee -a {log_file_escaped} | tee {temp_output_escaped} {tty_redirect}"
);
let shell_cmd_log = if program == "sudo" && password.is_some() {
// Avoid logging the inlined password; show the sudo command shape instead.
format!(
"set -o pipefail; stdbuf -oL -eL sudo {args_str} 2>&1 | tee -a {log_file_escaped} | tee {temp_output_escaped} {tty_redirect}"
)
} else {
shell_cmd.clone()
};
debug!(
program,
args = ?args,
uses_password = password.is_some(),
has_tty,
log_file = %log_file_path.display(),
temp_output = %temp_output.display(),
shell_cmd = %shell_cmd_log,
"executing update command with logging"
);
let status = Command::new("bash")
.arg("-c")
.arg(&shell_cmd)
.env("LC_ALL", "C")
.env("LANG", "C")
.stdin(Stdio::null())
.stdout(Stdio::piped())
.stderr(Stdio::piped())
.status()
.map_err(|err| {
warn!(
program,
args = ?args,
log_file = %log_file_path.display(),
error = %err,
"failed to spawn update command"
);
err
})?;
// Read the captured output
let output = match std::fs::read_to_string(&temp_output) {
Ok(content) => content,
Err(err) => {
warn!(
path = %temp_output.display(),
error = %err,
"failed to read update temp output"
);
String::new()
}
};
// Clean up temp file
if let Err(err) = std::fs::remove_file(&temp_output) {
debug!(
path = %temp_output.display(),
error = %err,
"failed to remove temp output file"
);
}
debug!(
program,
args = ?args,
status = ?status,
status_code = status.code(),
output_len = output.len(),
log_file = %log_file_path.display(),
"update command finished"
);
Ok((status, output))
}
/// What: State tracking structure for system update operations.
///
/// Details:
/// - Tracks success/failure status of pacman and AUR updates.
/// - Maintains lists of failed commands and packages.
/// - Used to aggregate results across multiple update operations.
#[cfg(not(target_os = "windows"))]
struct UpdateState {
/// Overall success status (true if all operations succeeded).
all_succeeded: bool,
/// List of failed command names.
failed_commands: Vec<String>,
/// List of failed package names.
failed_packages: Vec<String>,
/// Pacman update success status (None = not run, Some(true) = success, Some(false) = failed).
pacman_succeeded: Option<bool>,
/// AUR update success status (None = not run, Some(true) = success, Some(false) = failed).
aur_succeeded: Option<bool>,
/// Name of the AUR helper used (paru/yay).
aur_helper_name: Option<String>,
}
#[cfg(not(target_os = "windows"))]
impl UpdateState {
/// What: Create a new `UpdateState` with default values.
///
/// Inputs:
/// - None (no parameters required).
///
/// Output:
/// - A new `UpdateState` instance with all fields initialized.
///
/// Details:
/// - Initializes `all_succeeded` to `true`.
/// - Initializes all collections (`failed_commands`, `failed_packages`) as empty vectors.
/// - Sets all optional status fields (`pacman_succeeded`, `aur_succeeded`, `aur_helper_name`) to `None`.
const fn new() -> Self {
Self {
all_succeeded: true,
failed_commands: Vec::new(),
failed_packages: Vec::new(),
pacman_succeeded: None,
aur_succeeded: None,
aur_helper_name: None,
}
}
}
/// What: Prompt user for sudo password and validate it is not empty.
///
/// Inputs:
/// - `write_log`: Function to write log messages.
///
/// Output:
/// - `Some(password)` if password is valid and non-empty, `None` if passwordless sudo works.
/// - Exits the process with code 1 if password is empty or cannot be read.
///
/// Details:
/// - Prompts user for password using `rpassword::prompt_password`.
/// - Validates that password is not empty (after trimming whitespace).
/// - Empty passwords are rejected early to prevent sudo failures.
#[cfg(not(target_os = "windows"))]
fn prompt_and_validate_password(write_log: &(dyn Fn(&str) + Send + Sync)) -> Option<String> {
use std::io::IsTerminal;
use std::process::Command;
// Check if passwordless sudo is available
if Command::new("sudo")
.args(["-n", "true"])
.stdin(std::process::Stdio::null())
.stdout(std::process::Stdio::null())
.stderr(std::process::Stdio::null())
.status()
.is_ok_and(|s| s.success())
{
// Passwordless sudo works, no password needed
write_log("Passwordless sudo detected, skipping password prompt");
return None;
}
// Password required, but check if stdin is available for interactive input
if !std::io::stdin().is_terminal() {
// Not in an interactive terminal (e.g., in tests or non-interactive environment)
let error_msg =
"Password required but stdin is not a terminal. Cannot prompt for password.";
eprintln!("{}", i18n::t_fmt1("app.cli.update.error_prefix", error_msg));
write_log("FAILED: Password required but stdin is not a terminal");
tracing::error!("Password required but stdin is not a terminal");
std::process::exit(1);
}
// Password required, prompt user
// Get username to mimic sudo's password prompt format
let username = std::env::var("USER").unwrap_or_else(|_| "user".to_string());
let password_prompt = i18n::t_fmt1("app.cli.update.password_prompt", &username);
match rpassword::prompt_password(&password_prompt) {
Ok(pass) => {
// Validate that password is not empty
// Empty passwords will cause sudo to fail, so reject them early
let trimmed_pass = pass.trim();
if trimmed_pass.is_empty() {
let error_msg = "Empty password provided. Password cannot be empty.";
eprintln!("{}", i18n::t_fmt1("app.cli.update.error_prefix", error_msg));
write_log("FAILED: Empty password provided");
tracing::error!("Empty password provided");
std::process::exit(1);
}
write_log("Password obtained from user (not logged)");
// Return trimmed password to ensure consistency with validation
Some(trimmed_pass.to_string())
}
Err(e) => {
eprintln!("{}", i18n::t_fmt1("app.cli.update.error_prefix", &e));
write_log(&format!("FAILED: Could not read password: {e}"));
tracing::error!("Failed to read sudo password: {e}");
std::process::exit(1);
}
}
}
/// What: Create and setup the update log file, returning a `write_log` closure.
///
/// Inputs:
/// - `log_file_path`: Path to the log file.
///
/// Output:
/// - A closure that writes timestamped messages to the log file.
///
/// Details:
/// - Ensures the log file's parent directory exists.
/// - Creates a closure that appends timestamped messages to the log file.
#[cfg(not(target_os = "windows"))]
fn setup_log_file(log_file_path: &std::path::Path) -> Box<dyn Fn(&str) + Send + Sync> {
use std::fs::OpenOptions;
use std::io::Write;
use std::time::{SystemTime, UNIX_EPOCH};
// Ensure log file exists and is writable
if let Some(parent) = log_file_path.parent() {
let _ = std::fs::create_dir_all(parent);
}
// Clone the path for the closure
let log_path = log_file_path.to_path_buf();
// Return closure that writes to log file
Box::new(move |message: &str| {
if let Ok(mut file) = OpenOptions::new().create(true).append(true).open(&log_path) {
let timestamp = SystemTime::now().duration_since(UNIX_EPOCH).map_or_else(
|_| "unknown".to_string(),
|d| pacsea::util::ts_to_date(Some(i64::try_from(d.as_secs()).unwrap_or(0))),
);
let _ = writeln!(file, "[{timestamp}] {message}");
}
})
}
/// What: Execute pacman system update and update the state accordingly.
///
/// Inputs:
/// - `state`: Mutable reference to `UpdateState` to update.
/// - `log_file_path`: Path to the log file.
/// - `password`: Optional sudo password.
/// - `no_color`: If true, disables colored output.
/// - `write_log`: Function to write log messages.
///
/// Output:
/// - None (modifies state in place).
///
/// Details:
/// - Runs `sudo pacman -Syu --noconfirm` to update official packages.
/// - Updates state with success/failure status and failed packages.
/// - Extracts failed package names from command output on failure.
/// - Logs all operations and status messages to the log file.
#[cfg(not(target_os = "windows"))]
fn run_pacman_update(
state: &mut UpdateState,
log_file_path: &Path,
password: Option<&str>,
no_color: bool,
write_log: &(dyn Fn(&str) + Send + Sync),
) {
println!(
"{}",
info_color(&i18n::t("app.cli.update.starting"), no_color)
);
write_log("Starting system update: pacman -Syu --noconfirm");
let pacman_result = run_command_with_logging(
"sudo",
&["pacman", "-Syu", "--noconfirm"],
log_file_path,
password,
);
match pacman_result {
Ok((status, output)) => {
if status.success() {
println!(
"{}",
success_color(&i18n::t("app.cli.update.pacman_success"), no_color)
);
write_log("SUCCESS: pacman -Syu --noconfirm completed successfully");
state.pacman_succeeded = Some(true);
} else {
println!(
"{}",
error_color(&i18n::t("app.cli.update.pacman_failed"), no_color)
);
write_log(&format!(
"FAILED: pacman -Syu --noconfirm failed with exit code {:?}",
status.code()
));
let packages = extract_failed_packages(&output, "pacman");
state.failed_packages.extend(packages);
state.all_succeeded = false;
state.failed_commands.push("pacman -Syu".to_string());
state.pacman_succeeded = Some(false);
}
}
Err(e) => {
println!(
"{}",
error_color(&i18n::t("app.cli.update.pacman_exec_failed"), no_color)
);
eprintln!(
"{}",
error_color(&i18n::t_fmt1("app.cli.update.error_prefix", &e), no_color)
);
write_log(&format!(
"FAILED: Could not execute pacman -Syu --noconfirm: {e}"
));
state.all_succeeded = false;
state
.failed_commands
.push("pacman -Syu --noconfirm".to_string());
state.pacman_succeeded = Some(false);
}
}
}
/// What: Refresh sudo timestamp to allow AUR helper to use sudo without password prompt.
///
/// Inputs:
/// - `password`: Optional sudo password to use for refresh.
/// - `write_log`: Function to write log messages.
///
/// Output:
/// - None (no return value).
///
/// Details:
/// - Runs `sudo -S -v` with the password to refresh the sudo timestamp.
/// - This prevents a second password prompt when the AUR helper calls sudo internally.
/// - Only executes if a password is provided; silently does nothing if password is `None`.
/// - Logs the refresh operation to the log file.
#[cfg(not(target_os = "windows"))]
fn refresh_sudo_timestamp(password: Option<&str>, write_log: &(dyn Fn(&str) + Send + Sync)) {
use std::process::Command;
if let Some(pass) = password {
let escaped = shell_single_quote(pass);
let refresh_cmd = format!("echo {escaped} | sudo -S -v");
let _ = Command::new("bash")
.arg("-c")
.arg(&refresh_cmd)
.stdin(std::process::Stdio::null())
.stdout(std::process::Stdio::null())
.stderr(std::process::Stdio::null())
.status();
write_log("Refreshed sudo timestamp for AUR helper");
}
}
/// What: Execute AUR helper system update and update the state accordingly.
///
/// Inputs:
/// - `state`: Mutable reference to `UpdateState` to update.
/// - `log_file_path`: Path to the log file.
/// - `no_color`: If true, disables colored output.
/// - `write_log`: Function to write log messages.
///
/// Output:
/// - None (modifies state in place).
///
/// Details:
/// - Detects available AUR helper (paru/yay, prefers paru).
/// - Runs `{helper} -Sua --noconfirm` to update only AUR packages (official packages already updated by pacman).
/// - Updates state with success/failure status and failed packages.
/// - If no AUR helper is available, logs a warning and skips the update.
/// - Extracts failed package names from command output on failure.
#[cfg(not(target_os = "windows"))]
fn run_aur_update(
state: &mut UpdateState,
log_file_path: &Path,
no_color: bool,
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | true |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/src/args/list.rs | src/args/list.rs | //! Command-line list installed packages functionality.
use crate::args::i18n;
/// What: Handle list installed packages flag by querying pacman and displaying results.
///
/// Inputs:
/// - `exp`: If true, list explicitly installed packages.
/// - `imp`: If true, list implicitly installed packages.
/// - `all`: If true, list all installed packages.
///
/// Output:
/// - Exits the process after displaying the package list.
///
/// Details:
/// - Uses `pacman -Qq` to get all installed packages.
/// - Uses `pacman -Qetq` to get explicitly installed packages.
/// - Calculates implicitly installed as all minus explicit.
/// - Defaults to `--exp` (explicitly installed) if no option is specified.
/// - Prints packages one per line to stdout.
/// - Exits immediately after listing (doesn't launch TUI).
pub fn handle_list(exp: bool, imp: bool, all: bool) -> ! {
use std::process::{Command, Stdio};
// Default to --exp if no option is specified
let exp = if !exp && !imp && !all {
tracing::info!("No list option specified, defaulting to --exp");
true
} else {
exp
};
tracing::info!(
exp = exp,
imp = imp,
all = all,
"List installed packages requested from CLI"
);
// Get all installed packages
let all_packages = match Command::new("pacman")
.args(["-Qq"])
.stdin(Stdio::null())
.output()
{
Ok(output) => {
if !output.status.success() {
eprintln!("{}", i18n::t("app.cli.list.query_failed"));
tracing::error!("pacman -Qq failed");
std::process::exit(1);
}
let packages: std::collections::HashSet<String> =
String::from_utf8_lossy(&output.stdout)
.lines()
.map(|s| s.trim().to_string())
.filter(|s| !s.is_empty())
.collect();
packages
}
Err(e) => {
eprintln!("{}", i18n::t_fmt1("app.cli.list.pacman_exec_failed", &e));
tracing::error!(error = %e, "Failed to execute pacman");
std::process::exit(1);
}
};
// Get explicitly installed packages
let explicit_packages = match Command::new("pacman")
.args(["-Qetq"])
.stdin(Stdio::null())
.output()
{
Ok(output) => {
if !output.status.success() {
eprintln!("{}", i18n::t("app.cli.list.query_explicit_failed"));
tracing::error!("pacman -Qetq failed");
std::process::exit(1);
}
let packages: std::collections::HashSet<String> =
String::from_utf8_lossy(&output.stdout)
.lines()
.map(|s| s.trim().to_string())
.filter(|s| !s.is_empty())
.collect();
packages
}
Err(e) => {
eprintln!("{}", i18n::t_fmt1("app.cli.list.pacman_exec_failed", &e));
tracing::error!(error = %e, "Failed to execute pacman");
std::process::exit(1);
}
};
// Calculate implicitly installed packages (all - explicit)
let implicit_packages: std::collections::HashSet<String> = all_packages
.difference(&explicit_packages)
.cloned()
.collect();
// Collect and sort packages based on requested type
let mut packages_to_list = Vec::new();
if all {
packages_to_list.extend(all_packages.iter().cloned());
}
if exp {
packages_to_list.extend(explicit_packages.iter().cloned());
}
if imp {
packages_to_list.extend(implicit_packages.iter().cloned());
}
// Remove duplicates and sort
let mut unique_packages: std::collections::HashSet<String> =
packages_to_list.into_iter().collect();
let mut sorted_packages: Vec<String> = unique_packages.drain().collect();
sorted_packages.sort();
let count = sorted_packages.len();
// Print packages one per line
for pkg in sorted_packages {
println!("{pkg}");
}
tracing::info!(count = count, "Listed installed packages");
std::process::exit(0);
}
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/src/args/args.rs | src/args/args.rs | //! Command-line argument parsing and handling.
use clap::Parser;
/// Pacsea - A fast, friendly TUI for browsing and installing Arch and AUR packages
#[derive(Parser, Debug)]
#[command(name = "pacsea")]
#[command(version)]
#[command(about = "A fast, friendly TUI for browsing and installing Arch and AUR packages", long_about = None)]
pub struct Args {
/// Perform a dry run without making actual changes
#[arg(long)]
pub dry_run: bool,
/// Set the logging level (trace, debug, info, warn, error)
#[arg(long, default_value = "info")]
pub log_level: String,
/// Enable verbose output (equivalent to --log-level debug)
#[arg(short, long)]
pub verbose: bool,
/// Disable colored output
#[arg(long)]
pub no_color: bool,
/// [Not yet implemented] Specify the configuration directory (default: ~/.config/pacsea)
#[arg(long)]
pub config_dir: Option<String>,
/// Search for packages from command line
#[arg(short, long)]
pub search: Option<String>,
/// Install packages from command line (comma-separated or space-separated)
#[arg(short, long, num_args = 1..)]
pub install: Vec<String>,
/// Install packages from file (e.g., pacsea -I FILENAME.txt)
#[arg(short = 'I')]
pub install_from_file: Option<String>,
/// Remove packages from command line (e.g., pacsea -r PACKAGE1 PACKAGE2 or pacsea --remove PACKAGE)
#[arg(short = 'r', long, num_args = 1..)]
pub remove: Vec<String>,
/// [Not yet implemented] Remove packages from file (e.g., pacsea -R FILENAME.txt)
#[arg(short = 'R')]
pub remove_from_file: Option<String>,
/// System update (sync + update, e.g., pacsea --update)
#[arg(short = 'u', long)]
pub update: bool,
/// Output news dialog to commandline with link to website at the end
#[arg(short = 'n', long)]
pub news: bool,
/// List unread news (use with --news)
#[arg(long)]
pub unread: bool,
/// List read news (use with --news)
#[arg(long)]
pub read: bool,
/// List all news (read and unread) (use with --news)
#[arg(long, short = 'a')]
pub all_news: bool,
/// [Not yet implemented] Update package database before starting
#[arg(short = 'y', long)]
pub refresh: bool,
/// Clear all cache files (dependencies, files, services, sandbox) and exit
#[arg(long)]
pub clear_cache: bool,
/// List installed packages (use with --exp, --imp, or --all)
#[arg(short = 'l', long)]
pub list: bool,
/// List explicitly installed packages (use with --list)
#[arg(long)]
pub exp: bool,
/// List implicitly installed packages (use with --list)
#[arg(long)]
pub imp: bool,
/// List all installed packages (use with --list)
#[arg(long)]
pub all: bool,
}
/// What: Process all command-line arguments and handle early-exit flags.
///
/// Inputs:
/// - `args`: Parsed command-line arguments.
///
/// Output:
/// - Returns `true` if the program should continue to TUI, `false` if it should exit early.
///
/// Details:
/// - Handles search mode (exits immediately).
/// - Handles clear cache flag (exits immediately).
/// - Logs warnings for unimplemented flags (install, remove, update, refresh, news).
/// - Returns `true` if no early-exit flags were triggered.
pub fn process_args(args: &Args) -> bool {
use crate::args::{cache, install, list, news, remove, search, update};
// Handle command-line search mode
if let Some(search_query) = &args.search {
search::handle_search(search_query);
}
// Handle clear cache flag
if args.clear_cache {
cache::handle_clear_cache();
}
// Handle list installed packages flag
if args.list {
list::handle_list(args.exp, args.imp, args.all);
}
// Handle command-line install mode
if !args.install.is_empty() {
install::handle_install(&args.install);
}
// Handle install from file (-I)
if let Some(file_path) = &args.install_from_file {
install::handle_install_from_file(file_path);
}
// Handle remove packages from command line (-r / --remove)
if !args.remove.is_empty() {
remove::handle_remove(&args.remove);
}
// Handle remove packages from file (-R)
if let Some(file_path) = &args.remove_from_file {
tracing::info!(file = %file_path, "Remove from file requested from CLI");
// TODO: Implement remove from file (mentioned in roadmap)
tracing::warn!("Remove from file not yet implemented, falling back to TUI");
}
// Handle system update (--update / -u)
#[cfg(not(target_os = "windows"))]
if args.update {
update::handle_update(args.no_color);
}
#[cfg(target_os = "windows")]
if args.update {
eprintln!("System update is not supported on Windows");
std::process::exit(1);
}
// Handle refresh flag
if args.refresh {
tracing::info!("Refresh mode requested from CLI");
// TODO: Implement package database refresh
tracing::warn!("Refresh mode not yet implemented");
}
// Handle news flag
if args.news {
news::handle_news(args.unread, args.read, args.all_news);
}
true
}
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/src/args/search.rs | src/args/search.rs | //! Command-line search functionality.
use crate::args::i18n;
/// What: Handle command-line search mode by executing yay/paru -Ss with the search pattern.
///
/// Inputs:
/// - `search_query`: The search pattern to use.
///
/// Output:
/// - Exits the process with the command's exit code or 1 on error.
///
/// Details:
/// - Checks for paru first, then falls back to yay.
/// - Executes the search command and outputs results to terminal.
/// - Exits immediately after showing results (doesn't launch TUI).
pub fn handle_search(search_query: &str) -> ! {
use std::process::{Command, Stdio};
tracing::info!(query = %search_query, "Search mode requested from CLI");
// Check for paru first, then yay
let has_paru = Command::new("paru")
.args(["--version"])
.stdin(Stdio::null())
.stdout(Stdio::null())
.stderr(Stdio::null())
.output()
.is_ok();
let has_yay = if has_paru {
false
} else {
Command::new("yay")
.args(["--version"])
.stdin(Stdio::null())
.stdout(Stdio::null())
.stderr(Stdio::null())
.output()
.is_ok()
};
if has_paru {
tracing::info!("Using paru for search");
let status = Command::new("paru").args(["-Ss", search_query]).status();
match status {
Ok(exit_status) => {
std::process::exit(exit_status.code().unwrap_or(1));
}
Err(e) => {
eprintln!("{}", i18n::t_fmt1("app.cli.search.paru_exec_failed", &e));
tracing::error!(error = %e, "Failed to execute paru");
std::process::exit(1);
}
}
} else if has_yay {
tracing::info!("Using yay for search");
let status = Command::new("yay").args(["-Ss", search_query]).status();
match status {
Ok(exit_status) => {
std::process::exit(exit_status.code().unwrap_or(1));
}
Err(e) => {
eprintln!("{}", i18n::t_fmt1("app.cli.search.yay_exec_failed", &e));
tracing::error!(error = %e, "Failed to execute yay");
std::process::exit(1);
}
}
} else {
eprintln!("{}", i18n::t("app.cli.search.neither_helper_available"));
tracing::error!("Neither paru nor yay is available for search");
std::process::exit(1);
}
}
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/src/args/install.rs | src/args/install.rs | //! Command-line install functionality.
use crate::args::{i18n, package, utils};
/// What: Read and parse package names from a file.
///
/// Inputs:
/// - `file_path`: Path to the file containing package names.
///
/// Output:
/// - Vector of package names, or exits on error.
///
/// Details:
/// - Reads file line by line.
/// - Ignores empty lines.
/// - Ignores lines starting with "#" and ignores text after "#" in any line.
/// - Trims whitespace from package names.
fn read_packages_from_file(file_path: &str) -> Vec<String> {
use std::fs::File;
use std::io::{BufRead, BufReader};
let file = match File::open(file_path) {
Ok(f) => f,
Err(e) => {
eprintln!(
"{}",
i18n::t_fmt(
"app.cli.install.file_open_error",
&[&file_path as &dyn std::fmt::Display, &e]
)
);
tracing::error!(file = %file_path, error = %e, "Failed to open file");
std::process::exit(1);
}
};
let reader = BufReader::new(file);
let mut packages = Vec::new();
let mut warnings = Vec::new();
for (line_num, line) in reader.lines().enumerate() {
let original_line = match line {
Ok(l) => l,
Err(e) => {
eprintln!(
"{}",
i18n::t_fmt(
"app.cli.install.file_read_error",
&[
&(line_num + 1) as &dyn std::fmt::Display,
&file_path as &dyn std::fmt::Display,
&e,
]
)
);
tracing::error!(
file = %file_path,
line = line_num + 1,
error = %e,
"Failed to read line from file"
);
continue;
}
};
// Remove comments (everything after "#")
let line = original_line.split('#').next().unwrap_or("").trim();
// Skip empty lines and lines starting with "#"
if line.is_empty() || line.starts_with('#') {
continue;
}
// Check if line contains spaces between words (package names should not have spaces)
if line.contains(' ') {
warnings.push((line_num + 1, original_line.trim().to_string()));
tracing::warn!(
file = %file_path,
line = line_num + 1,
content = %original_line.trim(),
"Line contains spaces between words"
);
continue;
}
packages.push(line.to_string());
}
// Display warnings if any
if !warnings.is_empty() {
eprintln!("\n{}", i18n::t("app.cli.install.lines_with_spaces"));
for (line_num, content) in &warnings {
eprintln!(
"{}",
i18n::t_fmt2("app.cli.install.line_item", line_num, content)
);
}
eprintln!();
}
packages
}
/// What: Install official packages via pacman.
///
/// Inputs:
/// - `packages`: Vector of official package names.
///
/// Output:
/// - `Ok(())` on success, exits the process on failure.
///
/// Details:
/// - Executes `sudo pacman -S` with the package names.
/// - Exits on failure.
fn install_official_packages(packages: &[String]) {
use std::process::Command;
if packages.is_empty() {
return;
}
tracing::info!(packages = ?packages, "Installing official packages");
let status = Command::new("sudo")
.arg("pacman")
.arg("-S")
.args(packages)
.status();
match status {
Ok(exit_status) if exit_status.success() => {
tracing::info!("Official packages installed successfully");
}
Ok(exit_status) => {
eprintln!("{}", i18n::t("app.cli.install.official_failed"));
tracing::error!(
exit_code = exit_status.code(),
"Failed to install official packages"
);
std::process::exit(exit_status.code().unwrap_or(1));
}
Err(e) => {
eprintln!("{}", i18n::t_fmt1("app.cli.install.pacman_exec_failed", &e));
tracing::error!(error = %e, "Failed to execute pacman");
std::process::exit(1);
}
}
}
/// What: Install AUR packages via paru or yay.
///
/// Inputs:
/// - `packages`: Vector of AUR package names.
/// - `helper`: AUR helper name ("paru" or "yay").
///
/// Output:
/// - `Ok(())` on success, exits the process on failure.
///
/// Details:
/// - Executes helper `-S` with the package names.
/// - Exits on failure.
fn install_aur_packages(packages: &[String], helper: &str) {
use std::process::Command;
if packages.is_empty() {
return;
}
tracing::info!(helper = %helper, packages = ?packages, "Installing AUR packages");
let status = Command::new(helper).arg("-S").args(packages).status();
match status {
Ok(exit_status) if exit_status.success() => {
tracing::info!("AUR packages installed successfully");
}
Ok(exit_status) => {
eprintln!("{}", i18n::t("app.cli.install.aur_failed"));
tracing::error!(
exit_code = exit_status.code(),
"Failed to install AUR packages"
);
std::process::exit(exit_status.code().unwrap_or(1));
}
Err(e) => {
eprintln!(
"{}",
i18n::t_fmt2("app.cli.install.aur_helper_exec_failed", helper, &e)
);
tracing::error!(error = %e, helper = %helper, "Failed to execute AUR helper");
std::process::exit(1);
}
}
}
/// What: Handle command-line install from file mode by installing packages via pacman or AUR helper.
///
/// Inputs:
/// - `file_path`: Path to file containing package names (one per line).
///
/// Output:
/// - Exits the process with the command's exit code or 1 on error.
///
/// Details:
/// - Reads package names from file (one per line).
/// - Ignores empty lines and lines starting with "#".
/// - Ignores text after "#" in any line.
/// - Checks each package using `sudo pacman -Ss`, then `yay/paru -Ss` if not found.
/// - Warns user if any packages don't exist and asks for confirmation (Yes default).
/// - Determines if packages are official or AUR.
/// - Installs official packages via `sudo pacman -S`.
/// - Installs AUR packages via `paru -S` or `yay -S` (prefers paru).
/// - Exits immediately after installation (doesn't launch TUI).
pub fn handle_install_from_file(file_path: &str) -> ! {
tracing::info!(file = %file_path, "Install from file requested from CLI");
// Read packages from file
let package_names = read_packages_from_file(file_path);
if package_names.is_empty() {
eprintln!(
"{}",
i18n::t_fmt1("app.cli.install.no_packages_in_file", file_path)
);
tracing::error!(file = %file_path, "No packages found in file");
std::process::exit(1);
}
tracing::info!(
file = %file_path,
package_count = package_names.len(),
"Read packages from file"
);
// Get AUR helper early to check AUR packages
let aur_helper = utils::get_aur_helper();
// Validate and categorize packages using search commands
let (official_packages, aur_packages, invalid_packages) =
package::validate_and_categorize_packages_search(&package_names, aur_helper);
// Handle invalid packages
if !invalid_packages.is_empty() {
eprintln!("\n{}", i18n::t("app.cli.install.packages_not_found"));
for pkg in &invalid_packages {
eprintln!(" - {pkg}");
}
if aur_helper.is_none() && !invalid_packages.is_empty() {
eprintln!("\n{}", i18n::t("app.cli.install.no_aur_helper_note"));
eprintln!("{}", i18n::t("app.cli.install.install_aur_helper"));
}
eprintln!();
// If all packages are invalid, exit with error
if official_packages.is_empty() && aur_packages.is_empty() {
eprintln!("{}", i18n::t("app.cli.install.no_valid_packages"));
tracing::error!("All packages are invalid");
std::process::exit(1);
}
// Ask user if they want to continue (Yes default)
if !utils::prompt_user(&i18n::t("app.cli.install.continue_prompt")) {
tracing::info!("User cancelled installation due to invalid packages");
println!("{}", i18n::t("app.cli.install.cancelled"));
std::process::exit(0);
}
}
// If no valid packages remain after filtering, exit
if official_packages.is_empty() && aur_packages.is_empty() {
eprintln!("Error: No valid packages to install.");
tracing::error!("No valid packages after validation");
std::process::exit(1);
}
// Install official packages
install_official_packages(&official_packages);
// Install AUR packages
if !aur_packages.is_empty() {
let Some(helper) = aur_helper else {
eprintln!(
"Error: Neither paru nor yay is available. Please install one of them to install AUR packages."
);
tracing::error!("Neither paru nor yay is available for AUR packages");
std::process::exit(1);
};
install_aur_packages(&aur_packages, helper);
}
tracing::info!("All packages installed successfully");
std::process::exit(0);
}
/// What: Handle command-line install mode by installing packages via pacman or AUR helper.
///
/// Inputs:
/// - `packages`: Vector of package names (comma-separated or space-separated).
///
/// Output:
/// - Exits the process with the command's exit code or 1 on error.
///
/// Details:
/// - Parses package names (handles comma-separated and space-separated).
/// - Checks each package to verify it exists before installation.
/// - Warns user if any packages don't exist and asks for confirmation.
/// - Determines if packages are official or AUR.
/// - Installs official packages via `sudo pacman -S`.
/// - Installs AUR packages via `paru -S` or `yay -S` (prefers paru).
/// - Exits immediately after installation (doesn't launch TUI).
pub fn handle_install(packages: &[String]) -> ! {
tracing::info!(packages = ?packages, "Install mode requested from CLI");
let package_names = utils::parse_package_names(packages);
if package_names.is_empty() {
eprintln!("{}", i18n::t("app.cli.install.no_packages_specified"));
tracing::error!("No packages specified for installation");
std::process::exit(1);
}
// Get AUR helper early to check AUR packages
let aur_helper = utils::get_aur_helper();
// Validate and categorize packages
let (official_packages, aur_packages, invalid_packages) =
package::validate_and_categorize_packages(&package_names, aur_helper);
// Handle invalid packages
if !package::handle_invalid_packages(
&invalid_packages,
aur_helper,
&official_packages,
&aur_packages,
) {
tracing::info!("User cancelled installation due to invalid packages");
println!("{}", i18n::t("app.cli.install.cancelled"));
std::process::exit(0);
}
// If no valid packages remain after filtering, exit
if official_packages.is_empty() && aur_packages.is_empty() {
eprintln!("{}", i18n::t("app.cli.install.no_valid_packages"));
tracing::error!("No valid packages after validation");
std::process::exit(1);
}
// Install official packages
install_official_packages(&official_packages);
// Install AUR packages
if !aur_packages.is_empty() {
let Some(helper) = aur_helper else {
eprintln!("{}", i18n::t("app.cli.install.neither_helper_available"));
tracing::error!("Neither paru nor yay is available for AUR packages");
std::process::exit(1);
};
install_aur_packages(&aur_packages, helper);
}
tracing::info!("All packages installed successfully");
std::process::exit(0);
}
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/src/args/utils.rs | src/args/utils.rs | //! Shared utilities for argument processing.
/// What: Determine the log level based on command-line arguments and environment variables.
///
/// Inputs:
/// - `args`: Parsed command-line arguments.
///
/// Output:
/// - Log level string (trace, debug, info, warn, error).
///
/// Details:
/// - Verbose flag overrides `log_level` argument.
/// - `PACSEA_PREFLIGHT_TRACE=1` enables TRACE level for detailed preflight timing.
pub fn determine_log_level(args: &crate::args::Args) -> String {
if args.verbose {
"debug".to_string()
} else if std::env::var("PACSEA_PREFLIGHT_TRACE").ok().as_deref() == Some("1") {
"trace".to_string()
} else {
args.log_level.clone()
}
}
/// What: Check if paru or yay is available.
///
/// Inputs:
/// - None.
///
/// Output:
/// - `Some("paru")` if paru is available, `Some("yay")` if only yay is available, `None` if neither.
///
/// Details:
/// - Checks for paru first, then falls back to yay.
pub fn get_aur_helper() -> Option<&'static str> {
use std::process::{Command, Stdio};
if Command::new("paru")
.args(["--version"])
.stdin(Stdio::null())
.stdout(Stdio::null())
.stderr(Stdio::null())
.output()
.is_ok()
{
return Some("paru");
}
if Command::new("yay")
.args(["--version"])
.stdin(Stdio::null())
.stdout(Stdio::null())
.stderr(Stdio::null())
.output()
.is_ok()
{
return Some("yay");
}
None
}
/// What: Parse package names from input, handling both comma-separated and space-separated formats.
///
/// Inputs:
/// - `packages`: Vector of package strings (may contain comma-separated values).
///
/// Output:
/// - Vector of individual package names.
///
/// Details:
/// - Splits each input string by commas and trims whitespace.
/// - Filters out empty strings.
pub fn parse_package_names(packages: &[String]) -> Vec<String> {
let mut result = Vec::new();
for pkg in packages {
for name in pkg.split(',') {
let trimmed = name.trim();
if !trimmed.is_empty() {
result.push(trimmed.to_string());
}
}
}
result
}
/// What: Prompt the user for yes/no confirmation.
///
/// Inputs:
/// - `message`: The prompt message to display.
///
/// Output:
/// - `true` if user confirms (default), `false` if user explicitly declines (n/N/no).
///
/// Details:
/// - Reads a single line from stdin.
/// - Defaults to "yes" (empty input or Enter key).
/// - Returns `false` only if user explicitly enters 'n', 'N', or 'no'.
/// - Trims whitespace before checking.
pub fn prompt_user(message: &str) -> bool {
use std::io::{self, Write};
print!("{message} [Y/n]: ");
io::stdout().flush().ok();
let mut input = String::new();
if io::stdin().read_line(&mut input).is_ok() {
let trimmed = input.trim();
// Default to yes (empty input), only return false for explicit 'n' or 'no'
!(trimmed.eq_ignore_ascii_case("n") || trimmed.eq_ignore_ascii_case("no"))
} else {
true // Default to yes on read error
}
}
/// What: Prompt the user for yes/no confirmation with "No" as default.
///
/// Inputs:
/// - `message`: The prompt message to display.
///
/// Output:
/// - `true` if user explicitly confirms (y/Y/yes), `false` otherwise (default).
///
/// Details:
/// - Reads a single line from stdin.
/// - Defaults to "no" (empty input or Enter key).
/// - Returns `true` only if user explicitly enters 'y', 'Y', or 'yes'.
/// - Trims whitespace before checking.
pub fn prompt_user_no_default(message: &str) -> bool {
use std::io::{self, Write};
print!("{message} [y/N]: ");
io::stdout().flush().ok();
let mut input = String::new();
if io::stdin().read_line(&mut input).is_ok() {
let trimmed = input.trim();
// Default to no (empty input), only return true for explicit 'y' or 'yes'
trimmed.eq_ignore_ascii_case("y") || trimmed.eq_ignore_ascii_case("yes")
} else {
false // Default to no on read error
}
}
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/src/args/mod.rs | src/args/mod.rs | //! Command-line argument parsing and handling.
pub mod cache;
pub mod definition;
pub mod i18n;
pub mod install;
pub mod list;
pub mod news;
pub mod package;
pub mod remove;
pub mod search;
pub mod update;
pub mod utils;
// Re-export commonly used items
pub use definition::{Args, process_args};
pub use utils::determine_log_level;
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/src/args/definition.rs | src/args/definition.rs | //! Command-line argument definition and processing.
use clap::Parser;
/// Pacsea - A fast, friendly TUI for browsing and installing Arch and AUR packages
#[derive(Parser, Debug)]
#[command(name = "pacsea")]
#[command(version)]
#[command(about = "A fast, friendly TUI for browsing and installing Arch and AUR packages", long_about = None)]
#[allow(clippy::struct_excessive_bools)]
pub struct Args {
/// Perform a dry run without making actual changes
#[arg(long)]
pub dry_run: bool,
/// Set the logging level (trace, debug, info, warn, error)
#[arg(long, default_value = "info")]
pub log_level: String,
/// Enable verbose output (equivalent to --log-level debug)
#[arg(short, long)]
pub verbose: bool,
/// Disable colored output
#[arg(long)]
pub no_color: bool,
/// [Not yet implemented] Specify the configuration directory (default: ~/.config/pacsea)
#[arg(long)]
pub config_dir: Option<String>,
/// Search for packages from command line
#[arg(short, long)]
pub search: Option<String>,
/// Install packages from command line (comma-separated or space-separated)
#[arg(short, long, num_args = 1..)]
pub install: Vec<String>,
/// Install packages from file (e.g., pacsea -I FILENAME.txt)
#[arg(short = 'I')]
pub install_from_file: Option<String>,
/// Remove packages from command line (e.g., pacsea -r PACKAGE1 PACKAGE2 or pacsea --remove PACKAGE)
#[arg(short = 'r', long, num_args = 1..)]
pub remove: Vec<String>,
/// [Not yet implemented] Remove packages from file (e.g., pacsea -R FILENAME.txt)
#[arg(short = 'R')]
pub remove_from_file: Option<String>,
/// System update (sync + update, e.g., pacsea --update)
#[arg(short = 'u', long)]
pub update: bool,
/// Output news dialog to commandline with link to website at the end
#[arg(short = 'n', long)]
pub news: bool,
/// List unread news (use with --news)
#[arg(long)]
pub unread: bool,
/// List read news (use with --news)
#[arg(long)]
pub read: bool,
/// List all news (read and unread) (use with --news)
#[arg(long, short = 'a')]
pub all_news: bool,
/// Clear all cache files (dependencies, files, services, sandbox) and exit
#[arg(long)]
pub clear_cache: bool,
/// List installed packages (use with --exp, --imp, or --all)
#[arg(short = 'l', long)]
pub list: bool,
/// List explicitly installed packages (use with --list)
#[arg(long)]
pub exp: bool,
/// List implicitly installed packages (use with --list)
#[arg(long)]
pub imp: bool,
/// List all installed packages (use with --list)
#[arg(long)]
pub all: bool,
}
/// What: Process all command-line arguments and handle early-exit flags.
///
/// Inputs:
/// - `args`: Parsed command-line arguments.
///
/// Output:
/// - Returns `None` (no longer returns refresh result).
///
/// Details:
/// - Handles search mode (exits immediately).
/// - Handles clear cache flag (exits immediately).
/// - Logs warnings for unimplemented flags (install, remove, update, news).
#[allow(unused_imports)]
pub fn process_args(args: &Args) -> Option<bool> {
use crate::args::{cache, install, list, news, remove, search, update};
// Handle command-line search mode
if let Some(search_query) = &args.search {
search::handle_search(search_query);
}
// Handle clear cache flag
if args.clear_cache {
cache::handle_clear_cache();
}
// Handle list installed packages flag
if args.list {
list::handle_list(args.exp, args.imp, args.all);
}
// Handle command-line install mode
if !args.install.is_empty() {
install::handle_install(&args.install);
}
// Handle install from file (-I)
if let Some(file_path) = &args.install_from_file {
install::handle_install_from_file(file_path);
}
// Handle remove packages from command line (-r / --remove)
if !args.remove.is_empty() {
remove::handle_remove(&args.remove);
}
// Handle remove packages from file (-R)
if let Some(file_path) = &args.remove_from_file {
tracing::info!(file = %file_path, "Remove from file requested from CLI");
// TODO: Implement remove from file (mentioned in roadmap)
tracing::warn!("Remove from file not yet implemented, falling back to TUI");
}
// Handle system update (--update / -u)
#[cfg(not(target_os = "windows"))]
if args.update {
update::handle_update(args.no_color);
}
#[cfg(target_os = "windows")]
if args.update {
eprintln!("System update is not supported on Windows");
std::process::exit(1);
}
// Handle news flag
if args.news {
news::handle_news(args.unread, args.read, args.all_news);
}
None
}
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/src/args/news.rs | src/args/news.rs | //! Command-line news functionality.
use crate::args::i18n;
use pacsea::theme;
/// What: Handle news flag by fetching Arch news and displaying to command line.
///
/// Inputs:
/// - `unread`: If true, list only unread news.
/// - `read`: If true, list only read news.
/// - `all_news`: If true, list all news (read and unread).
///
/// Output:
/// - Exits the process after displaying the news.
///
/// Details:
/// - Fetches Arch Linux news from RSS feed.
/// - Loads read news URLs from persisted file.
/// - Filters news based on the specified option (defaults to all if none specified).
/// - Prints news items with date, title, and URL.
/// - Outputs link to website at the end.
/// - Exits immediately after displaying (doesn't launch TUI).
pub fn handle_news(unread: bool, read: bool, all_news: bool) -> ! {
use std::collections::HashSet;
tracing::info!(
unread = unread,
read = read,
all_news = all_news,
"News mode requested from CLI"
);
// Default to all if no option is specified
let show_all = if !unread && !read && !all_news {
tracing::info!("No news option specified, defaulting to --all");
true
} else {
all_news
};
// Load read news URLs from persisted file
let news_read_path = theme::lists_dir().join("news_read_urls.json");
let read_urls: HashSet<String> = if let Ok(s) = std::fs::read_to_string(&news_read_path)
&& let Ok(set) = serde_json::from_str::<HashSet<String>>(&s)
{
set
} else {
HashSet::new()
};
// Fetch news (using tokio runtime for async)
// Spawn a separate thread with its own runtime to avoid nested runtime issues
let (tx, rx) = std::sync::mpsc::channel();
std::thread::spawn(move || {
let rt = tokio::runtime::Builder::new_current_thread()
.enable_all()
.build();
let res = match rt {
Ok(rt) => rt.block_on(pacsea::sources::fetch_arch_news(100, None)),
Err(e) => Err::<Vec<pacsea::state::NewsItem>, _>(format!("rt: {e}").into()),
};
let _ = tx.send(res);
});
let news_items = match rx.recv() {
Ok(Ok(items)) => items,
Ok(Err(e)) => {
eprintln!("{}", i18n::t_fmt1("app.cli.news.fetch_error", &e));
tracing::error!(error = %e, "Failed to fetch news");
std::process::exit(1);
}
Err(e) => {
eprintln!("{}", i18n::t_fmt1("app.cli.news.runtime_error", e));
tracing::error!(error = %e, "Failed to receive news from thread");
std::process::exit(1);
}
};
// Filter news based on option
let filtered_items: Vec<&pacsea::state::NewsItem> = if show_all {
news_items.iter().collect()
} else if unread {
news_items
.iter()
.filter(|item| !read_urls.contains(&item.url))
.collect()
} else if read {
news_items
.iter()
.filter(|item| read_urls.contains(&item.url))
.collect()
} else {
news_items.iter().collect()
};
// Print news items
if filtered_items.is_empty() {
println!("{}", i18n::t("app.cli.news.no_items"));
} else {
for item in &filtered_items {
let status = if read_urls.contains(&item.url) {
i18n::t("app.cli.news.status_read")
} else {
i18n::t("app.cli.news.status_unread")
};
println!("{} {} - {}", status, item.date, item.title);
println!("{}", i18n::t_fmt1("app.cli.news.url_label", &item.url));
println!();
}
}
// Print link to website at the end
println!("{}", i18n::t("app.cli.news.website_link"));
tracing::info!(count = filtered_items.len(), "Displayed news items");
std::process::exit(0);
}
#[cfg(test)]
mod tests {
use std::collections::HashSet;
#[test]
/// What: Test news filtering logic for default-to-all behavior.
///
/// Inputs:
/// - No flags specified (all false).
///
/// Output:
/// - `show_all` should be `true` (defaults to all).
///
/// Details:
/// - Verifies default behavior when no option is specified.
fn test_news_filtering_defaults_to_all() {
let unread = false;
let read = false;
let all_news = false;
let show_all = if !unread && !read && !all_news {
true
} else {
all_news
};
assert!(show_all, "Should default to all when no flags specified");
}
#[test]
/// What: Test news filtering logic for unread filter.
///
/// Inputs:
/// - News items with some marked as read.
///
/// Output:
/// - Only unread items returned.
///
/// Details:
/// - Verifies unread filtering excludes read URLs.
fn test_news_filtering_unread() {
let read_urls: HashSet<String> =
HashSet::from([("https://archlinux.org/news/item-1/").to_string()]);
let news_items = [
pacsea::state::NewsItem {
date: "2025-01-01".to_string(),
title: "Item 1".to_string(),
url: "https://archlinux.org/news/item-1/".to_string(),
},
pacsea::state::NewsItem {
date: "2025-01-02".to_string(),
title: "Item 2".to_string(),
url: "https://archlinux.org/news/item-2/".to_string(),
},
];
let filtered: Vec<&pacsea::state::NewsItem> = news_items
.iter()
.filter(|item| !read_urls.contains(&item.url))
.collect();
assert_eq!(filtered.len(), 1);
assert_eq!(filtered[0].title, "Item 2");
}
#[test]
/// What: Test news filtering logic for read filter.
///
/// Inputs:
/// - News items with some marked as read.
///
/// Output:
/// - Only read items returned.
///
/// Details:
/// - Verifies read filtering includes only read URLs.
fn test_news_filtering_read() {
let read_urls: HashSet<String> =
HashSet::from([("https://archlinux.org/news/item-1/").to_string()]);
let news_items = [
pacsea::state::NewsItem {
date: "2025-01-01".to_string(),
title: "Item 1".to_string(),
url: "https://archlinux.org/news/item-1/".to_string(),
},
pacsea::state::NewsItem {
date: "2025-01-02".to_string(),
title: "Item 2".to_string(),
url: "https://archlinux.org/news/item-2/".to_string(),
},
];
let filtered: Vec<&pacsea::state::NewsItem> = news_items
.iter()
.filter(|item| read_urls.contains(&item.url))
.collect();
assert_eq!(filtered.len(), 1);
assert_eq!(filtered[0].title, "Item 1");
}
}
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/src/args/package.rs | src/args/package.rs | //! Package validation and categorization utilities.
use crate::args::i18n;
/// What: Check if a package exists in the official repositories using search.
///
/// Inputs:
/// - `package_name`: Name of the package to check.
///
/// Output:
/// - `true` if the package exists in official repos, `false` otherwise.
///
/// Details:
/// - Uses `sudo pacman -Ss` to search for the package and checks if exact match exists.
/// - Returns `false` if pacman is not available or the package is not found.
fn is_official_package_search(package_name: &str) -> bool {
use std::process::{Command, Stdio};
match Command::new("sudo")
.args(["pacman", "-Ss", package_name])
.stdin(Stdio::null())
.output()
{
Ok(output) => {
if !output.status.success() {
return false;
}
// Check if the output contains the exact package name
let output_str = String::from_utf8_lossy(&output.stdout);
// Look for exact package name match (format: "repo/package_name" or "package_name")
output_str.lines().any(|line| {
line.split_whitespace().next().is_some_and(|pkg_line| {
// Handle format like "repo/package_name" or just "package_name"
let pkg_part = pkg_line.split('/').next_back().unwrap_or(pkg_line);
pkg_part == package_name
})
})
}
Err(_) => false,
}
}
/// What: Check if a package exists in the official repositories.
///
/// Inputs:
/// - `package_name`: Name of the package to check.
///
/// Output:
/// - `true` if the package exists in official repos, `false` otherwise.
///
/// Details:
/// - Uses `pacman -Si` to check if the package exists in official repositories.
/// - Returns `false` if pacman is not available or the package is not found.
fn is_official_package(package_name: &str) -> bool {
use std::process::{Command, Stdio};
match Command::new("pacman")
.args(["-Si", package_name])
.stdin(Stdio::null())
.stdout(Stdio::null())
.stderr(Stdio::null())
.output()
{
Ok(output) => output.status.success(),
Err(_) => false,
}
}
/// What: Check if an AUR package exists using search.
///
/// Inputs:
/// - `package_name`: Name of the package to check.
/// - `helper`: AUR helper to use ("paru" or "yay").
///
/// Output:
/// - `true` if the package exists in AUR, `false` otherwise.
///
/// Details:
/// - Uses `paru -Ss` or `yay -Ss` to search for the package and checks if exact match exists.
/// - Returns `false` if the helper is not available or the package is not found.
fn is_aur_package_search(package_name: &str, helper: &str) -> bool {
use std::process::{Command, Stdio};
match Command::new(helper)
.args(["-Ss", package_name])
.stdin(Stdio::null())
.output()
{
Ok(output) => {
if !output.status.success() {
return false;
}
// Check if the output contains the exact package name
let output_str = String::from_utf8_lossy(&output.stdout);
// Look for exact package name match (format: "aur/package_name" or "package_name")
output_str.lines().any(|line| {
line.split_whitespace().next().is_some_and(|pkg_line| {
// Handle format like "aur/package_name" or just "package_name"
let pkg_part = pkg_line.split('/').next_back().unwrap_or(pkg_line);
pkg_part == package_name
})
})
}
Err(_) => false,
}
}
/// What: Check if an AUR package exists.
///
/// Inputs:
/// - `package_name`: Name of the package to check.
/// - `helper`: AUR helper to use ("paru" or "yay").
///
/// Output:
/// - `true` if the package exists in AUR, `false` otherwise.
///
/// Details:
/// - Uses `paru -Si` or `yay -Si` to check if the package exists in AUR.
/// - Returns `false` if the helper is not available or the package is not found.
fn is_aur_package(package_name: &str, helper: &str) -> bool {
use std::process::{Command, Stdio};
match Command::new(helper)
.args(["-Si", package_name])
.stdin(Stdio::null())
.stdout(Stdio::null())
.stderr(Stdio::null())
.output()
{
Ok(output) => output.status.success(),
Err(_) => false,
}
}
/// What: Validate and categorize packages into official, AUR, and invalid.
///
/// Inputs:
/// - `package_names`: Vector of package names to validate.
/// - `aur_helper`: Optional AUR helper name ("paru" or "yay").
///
/// Output:
/// - Tuple of (`official_packages`, `aur_packages`, `invalid_packages`).
///
/// Details:
/// - Checks each package against official repos and AUR (if helper available).
/// - Packages not found in either are marked as invalid.
pub fn validate_and_categorize_packages(
package_names: &[String],
aur_helper: Option<&str>,
) -> (Vec<String>, Vec<String>, Vec<String>) {
let mut official_packages = Vec::new();
let mut aur_packages = Vec::new();
let mut invalid_packages = Vec::new();
for pkg in package_names {
if is_official_package(pkg) {
official_packages.push(pkg.clone());
} else if let Some(helper) = aur_helper {
if is_aur_package(pkg, helper) {
aur_packages.push(pkg.clone());
} else {
invalid_packages.push(pkg.clone());
}
} else {
// No AUR helper available, but package is not official
// We can't verify AUR packages without a helper, so mark as invalid
invalid_packages.push(pkg.clone());
}
}
(official_packages, aur_packages, invalid_packages)
}
/// What: Validate packages using search commands and categorize them.
///
/// Inputs:
/// - `package_names`: Vector of package names to validate.
/// - `aur_helper`: Optional AUR helper name ("paru" or "yay").
///
/// Output:
/// - Tuple of (`official_packages`, `aur_packages`, `invalid_packages`).
///
/// Details:
/// - Checks each package using `sudo pacman -Ss` first.
/// - If not found, checks using `yay/paru -Ss` (if helper available).
/// - Packages not found in either are marked as invalid.
pub fn validate_and_categorize_packages_search(
package_names: &[String],
aur_helper: Option<&str>,
) -> (Vec<String>, Vec<String>, Vec<String>) {
let mut official_packages = Vec::new();
let mut aur_packages = Vec::new();
let mut invalid_packages = Vec::new();
for pkg in package_names {
if is_official_package_search(pkg) {
official_packages.push(pkg.clone());
} else if let Some(helper) = aur_helper {
if is_aur_package_search(pkg, helper) {
aur_packages.push(pkg.clone());
} else {
invalid_packages.push(pkg.clone());
}
} else {
// No AUR helper available, but package is not official
invalid_packages.push(pkg.clone());
}
}
(official_packages, aur_packages, invalid_packages)
}
/// What: Handle invalid packages by warning user and asking for confirmation.
///
/// Inputs:
/// - `invalid_packages`: Vector of invalid package names.
/// - `aur_helper`: Optional AUR helper name.
/// - `official_packages`: Vector of valid official packages.
/// - `aur_packages`: Vector of valid AUR packages.
///
/// Output:
/// - `true` if user wants to continue, `false` if cancelled.
///
/// Details:
/// - Displays warning message listing invalid packages.
/// - Prompts user for confirmation to continue with valid packages.
/// - Exits with error if all packages are invalid.
pub fn handle_invalid_packages(
invalid_packages: &[String],
aur_helper: Option<&str>,
official_packages: &[String],
aur_packages: &[String],
) -> bool {
use crate::args::utils;
if invalid_packages.is_empty() {
return true;
}
eprintln!("\n{}", i18n::t("app.cli.package.packages_not_found"));
for pkg in invalid_packages {
eprintln!(" - {pkg}");
}
if aur_helper.is_none() && !invalid_packages.is_empty() {
eprintln!("\n{}", i18n::t("app.cli.package.no_aur_helper_note"));
eprintln!("{}", i18n::t("app.cli.package.install_aur_helper"));
}
eprintln!();
// If all packages are invalid, exit with error
if official_packages.is_empty() && aur_packages.is_empty() {
eprintln!("{}", i18n::t("app.cli.package.no_valid_packages"));
tracing::error!("All packages are invalid");
std::process::exit(1);
}
utils::prompt_user("Do you want to continue installing the remaining packages?")
}
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/src/args/cache.rs | src/args/cache.rs | //! Command-line cache management functionality.
use crate::args::i18n;
use pacsea::theme;
/// What: Handle clear cache flag by removing all cache files and exiting.
///
/// Inputs:
/// - None (uses `theme::lists_dir()` to locate cache files).
///
/// Output:
/// - Exits the process after clearing cache files.
///
/// Details:
/// - Removes all cache files including dependency, file, service, sandbox, details, PKGBUILD parse,
/// news feed, news content, news seen updates/comments, and news/advisories article caches.
/// - Prints the number of cleared files to stdout.
/// - Exits immediately after clearing (doesn't launch TUI).
pub fn handle_clear_cache() -> ! {
tracing::info!("Clear cache requested from CLI");
let lists_dir = theme::lists_dir();
let cache_files = [
"install_deps_cache.json",
"file_cache.json",
"services_cache.json",
"sandbox_cache.json",
"details_cache.json",
"pkgbuild_parse_cache.json",
"news_content_cache.json",
"news_feed.json",
"news_seen_pkg_updates.json",
"news_seen_aur_comments.json",
"arch_news_cache.json",
"advisories_cache.json",
"news_article_cache.json",
];
let mut cleared_count = 0;
for cache_file in &cache_files {
let cache_path = lists_dir.join(cache_file);
match std::fs::remove_file(&cache_path) {
Ok(()) => {
tracing::info!(path = %cache_path.display(), "cleared cache file");
cleared_count += 1;
}
Err(e) if e.kind() == std::io::ErrorKind::NotFound => {
tracing::debug!(path = %cache_path.display(), "cache file does not exist, skipping");
}
Err(e) => {
tracing::warn!(path = %cache_path.display(), error = %e, "failed to clear cache file");
}
}
}
if cleared_count > 0 {
tracing::info!(cleared_count = cleared_count, "cleared cache files");
println!("{}", i18n::t_fmt1("app.cli.cache.cleared", cleared_count));
} else {
tracing::info!("No cache files found to clear");
println!("{}", i18n::t("app.cli.cache.none_found"));
}
std::process::exit(0);
}
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/src/args/remove.rs | src/args/remove.rs | //! Command-line remove functionality.
use crate::args::{i18n, utils};
/// What: Check for configuration directories in `$HOME/PACKAGE_NAME` and `$HOME/.config/PACKAGE_NAME`.
///
/// Inputs:
/// - `package_name`: Name of the package to check for config directories.
/// - `home`: Home directory path.
///
/// Output:
/// - Vector of found config directory paths.
///
/// Details:
/// - Checks both `$HOME/PACKAGE_NAME` and `$HOME/.config/PACKAGE_NAME`.
/// - Only returns directories that actually exist.
fn check_config_directories(package_name: &str, home: &str) -> Vec<std::path::PathBuf> {
use std::path::PathBuf;
let mut found_dirs = Vec::new();
// Check $HOME/PACKAGE_NAME
let home_pkg_dir = PathBuf::from(home).join(package_name);
if home_pkg_dir.exists() && home_pkg_dir.is_dir() {
found_dirs.push(home_pkg_dir);
}
// Check $HOME/.config/PACKAGE_NAME
let config_pkg_dir = PathBuf::from(home).join(".config").join(package_name);
if config_pkg_dir.exists() && config_pkg_dir.is_dir() {
found_dirs.push(config_pkg_dir);
}
found_dirs
}
/// What: Handle command-line remove mode by removing packages via pacman.
///
/// Inputs:
/// - `packages`: Vector of package names (comma-separated or space-separated).
///
/// Output:
/// - Exits the process with the command's exit code or 1 on error.
///
/// Details:
/// - Parses package names (handles comma-separated and space-separated).
/// - Shows warning about removal and no backup.
/// - Prompts user with [y/N] (No is default).
/// - Executes `sudo pacman -Rns` to remove packages.
/// - After removal, checks for config directories in `$HOME/PACKAGE_NAME` and `$HOME/.config/PACKAGE_NAME`.
/// - Shows found config directories in a list.
/// - Exits immediately after removal (doesn't launch TUI).
pub fn handle_remove(packages: &[String]) -> ! {
use std::process::Command;
tracing::info!(packages = ?packages, "Remove mode requested from CLI");
let package_names = utils::parse_package_names(packages);
if package_names.is_empty() {
eprintln!("{}", i18n::t("app.cli.remove.no_packages"));
tracing::error!("No packages specified for removal");
std::process::exit(1);
}
// Show warning message
eprintln!("\n{}", i18n::t("app.cli.remove.warning"));
eprintln!("\n{}", i18n::t("app.cli.remove.packages_to_remove"));
for pkg in &package_names {
eprintln!(" - {pkg}");
}
eprintln!();
// Prompt user for confirmation (defaults to No)
if !utils::prompt_user_no_default(&i18n::t("app.cli.remove.prompt")) {
tracing::info!("User cancelled removal");
println!("{}", i18n::t("app.cli.remove.cancelled"));
std::process::exit(0);
}
// Execute sudo pacman -Rns
tracing::info!(packages = ?package_names, "Removing packages");
let status = Command::new("sudo")
.arg("pacman")
.arg("-Rns")
.args(&package_names)
.status();
match status {
Ok(exit_status) if exit_status.success() => {
tracing::info!("Packages removed successfully");
println!("\n{}", i18n::t("app.cli.remove.success"));
// Check for config directories after removal
if let Ok(home) = std::env::var("HOME") {
let mut found_configs = Vec::new();
for pkg in &package_names {
let config_dirs = check_config_directories(pkg, &home);
for dir in config_dirs {
found_configs.push((pkg.clone(), dir));
}
}
if !found_configs.is_empty() {
println!("\n{}", i18n::t("app.cli.remove.config_dirs_found"));
for (pkg, dir) in &found_configs {
println!(" - {}: {}", pkg, dir.display());
}
println!("\n{}", i18n::t("app.cli.remove.config_dirs_note"));
}
}
std::process::exit(0);
}
Ok(exit_status) => {
eprintln!("\n{}", i18n::t("app.cli.remove.failed"));
tracing::error!(exit_code = exit_status.code(), "Failed to remove packages");
std::process::exit(exit_status.code().unwrap_or(1));
}
Err(e) => {
eprintln!("\n{}", i18n::t_fmt1("app.cli.remove.exec_failed", &e));
tracing::error!(error = %e, "Failed to execute pacman");
std::process::exit(1);
}
}
}
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/tests/update.rs | tests/update.rs | //! Integration and UI tests for update operations.
#[path = "update/system_update_integration.rs"]
mod system_update_integration;
#[path = "update/system_update_ui.rs"]
mod system_update_ui;
#[path = "update/update_flags.rs"]
mod update_flags;
#[path = "update/update_single_integration.rs"]
mod update_single_integration;
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/tests/downgrade.rs | tests/downgrade.rs | //! Integration and UI tests for downgrade operations.
#[path = "downgrade/downgrade_integration.rs"]
mod downgrade_integration;
#[path = "downgrade/downgrade_ui.rs"]
mod downgrade_ui;
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/tests/install.rs | tests/install.rs | //! Integration and UI tests for install operations.
#[path = "install/direct_install_integration.rs"]
mod direct_install_integration;
#[path = "install/direct_install_ui.rs"]
mod direct_install_ui;
#[path = "install/install_integration.rs"]
mod install_integration;
#[path = "install/install_ui.rs"]
mod install_ui;
#[path = "install/optional_deps_integration.rs"]
mod optional_deps_integration;
#[path = "install/optional_deps_ui.rs"]
mod optional_deps_ui;
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/tests/preflight_integration.rs | tests/preflight_integration.rs | //! Integration tests for preflight modal optimization features.
//!
//! This file serves as the entry point for cargo test discovery.
//! All test modules are defined in the `preflight_integration`/ subdirectory.
#[path = "preflight_integration/mod.rs"]
mod preflight_integration;
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/tests/file_sync.rs | tests/file_sync.rs | //! Integration and UI tests for file database sync operations.
#[path = "file_sync/file_sync_integration.rs"]
mod file_sync_integration;
#[path = "file_sync/file_sync_ui.rs"]
mod file_sync_ui;
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/tests/other.rs | tests/other.rs | //! Miscellaneous tests (complexity, smoke tests, etc.).
#[path = "other/conflict_preservation.rs"]
mod conflict_preservation;
#[path = "other/cyclomatic_complexity.rs"]
mod cyclomatic_complexity;
#[path = "other/data_flow_complexity.rs"]
mod data_flow_complexity;
#[path = "other/runtime_smoke.rs"]
mod runtime_smoke;
#[path = "other/terminals_args_shape.rs"]
mod terminals_args_shape;
#[path = "other/ui_integration.rs"]
mod ui_integration;
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/tests/remove.rs | tests/remove.rs | //! Integration and UI tests for remove operations.
#[path = "remove/remove_integration.rs"]
mod remove_integration;
#[path = "remove/remove_ui.rs"]
mod remove_ui;
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/tests/scan.rs | tests/scan.rs | //! Integration and UI tests for security scan operations.
#[path = "scan/scan_integration.rs"]
mod scan_integration;
#[path = "scan/scan_ui.rs"]
mod scan_ui;
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/tests/scan/scan_ui.rs | tests/scan/scan_ui.rs | //! UI tests for security scan modals.
//!
//! Tests cover:
//! - `ScanConfig` modal structure
//! - `VirusTotalSetup` modal structure
//! - `PreflightExec` modal for integrated scan execution
//! - Modal state transitions
//!
//! Note: These tests verify modal state structure rather than actual rendering.
#![cfg(test)]
use pacsea::state::{AppState, Modal};
#[test]
/// What: Test `ScanConfig` modal structure.
///
/// Inputs:
/// - `ScanConfig` modal with scanner options.
///
/// Output:
/// - Modal state is correctly structured.
///
/// Details:
/// - Verifies `ScanConfig` modal can be created.
fn ui_scan_config_modal_structure() {
let app = AppState {
modal: Modal::ScanConfig {
do_clamav: true,
do_trivy: true,
do_semgrep: false,
do_shellcheck: true,
do_virustotal: false,
do_custom: false,
do_sleuth: false,
cursor: 3,
},
..Default::default()
};
match app.modal {
Modal::ScanConfig {
do_clamav,
do_trivy,
do_semgrep,
do_shellcheck,
do_virustotal,
do_custom,
do_sleuth,
cursor,
} => {
assert!(do_clamav);
assert!(do_trivy);
assert!(!do_semgrep);
assert!(do_shellcheck);
assert!(!do_virustotal);
assert!(!do_custom);
assert!(!do_sleuth);
assert_eq!(cursor, 3);
}
_ => panic!("Expected ScanConfig modal"),
}
}
#[test]
/// What: Test `VirusTotalSetup` modal structure.
///
/// Inputs:
/// - `VirusTotalSetup` modal with API key input.
///
/// Output:
/// - Modal state is correctly structured.
///
/// Details:
/// - Verifies `VirusTotalSetup` modal can be created.
fn ui_virustotal_setup_modal_structure() {
let app = AppState {
modal: Modal::VirusTotalSetup {
input: "test-api-key-12345".to_string(),
cursor: 18,
},
..Default::default()
};
match app.modal {
Modal::VirusTotalSetup { input, cursor } => {
assert_eq!(input, "test-api-key-12345");
assert_eq!(cursor, 18);
}
_ => panic!("Expected VirusTotalSetup modal"),
}
}
#[test]
/// What: Test ``PreflightExec`` modal structure for integrated scan execution.
///
/// Inputs:
/// - ``PreflightExec`` modal after scan configuration confirmation.
///
/// Output:
/// - Modal state is correctly structured.
///
/// Details:
/// - Verifies ``PreflightExec`` modal can be created for scan execution.
fn ui_scan_preflight_exec_structure() {
use pacsea::install::ExecutorRequest;
use pacsea::state::{PackageItem, PreflightAction, PreflightTab, Source};
let item = PackageItem {
name: "test-pkg".to_string(),
version: String::new(),
description: String::new(),
source: Source::Aur,
popularity: None,
out_of_date: None,
orphaned: false,
};
let app = AppState {
modal: Modal::PreflightExec {
items: vec![item],
action: PreflightAction::Install,
tab: PreflightTab::Summary,
verbose: false,
log_lines: vec![],
abortable: false,
header_chips: pacsea::state::modal::PreflightHeaderChips::default(),
success: None,
},
pending_executor_request: Some(ExecutorRequest::Scan {
package: "test-pkg".to_string(),
do_clamav: true,
do_trivy: true,
do_semgrep: false,
do_shellcheck: false,
do_virustotal: false,
do_custom: false,
dry_run: false,
}),
..Default::default()
};
match app.modal {
Modal::PreflightExec {
items,
action,
tab,
verbose,
log_lines,
abortable,
..
} => {
assert_eq!(items.len(), 1);
assert_eq!(items[0].name, "test-pkg");
assert_eq!(action, PreflightAction::Install);
assert_eq!(tab, PreflightTab::Summary);
assert!(!verbose);
assert!(log_lines.is_empty());
assert!(!abortable);
}
_ => panic!("Expected PreflightExec modal"),
}
match app.pending_executor_request {
Some(ExecutorRequest::Scan {
package,
do_clamav,
do_trivy,
do_semgrep,
do_shellcheck,
do_virustotal,
do_custom,
..
}) => {
assert_eq!(package, "test-pkg");
assert!(do_clamav);
assert!(do_trivy);
assert!(!do_semgrep);
assert!(!do_shellcheck);
assert!(!do_virustotal);
assert!(!do_custom);
}
_ => panic!("Expected Scan executor request"),
}
}
#[test]
/// What: Test modal transition from ``ScanConfig`` to ``PreflightExec`` for integrated scan.
///
/// Inputs:
/// - ``ScanConfig`` modal with non-sleuth scanners enabled.
///
/// Output:
/// - Modal transitions to ``PreflightExec``.
/// - Executor request is created with ``ExecutorRequest::Scan``.
///
/// Details:
/// - Verifies modal state transition flow for integrated scan execution.
fn ui_scan_modal_transition() {
use pacsea::install::ExecutorRequest;
use pacsea::state::{PackageItem, PreflightAction, PreflightTab, Source};
let item = PackageItem {
name: "test-pkg".to_string(),
version: String::new(),
description: String::new(),
source: Source::Aur,
popularity: None,
out_of_date: None,
orphaned: false,
};
let mut app = AppState {
modal: Modal::ScanConfig {
do_clamav: true,
do_trivy: true,
do_semgrep: false,
do_shellcheck: false,
do_virustotal: false,
do_custom: false,
do_sleuth: false,
cursor: 0,
},
pending_install_names: Some(vec!["test-pkg".to_string()]),
dry_run: false,
..Default::default()
};
// Simulate scan configuration confirmation
let package = "test-pkg";
let do_clamav = true;
let do_trivy = true;
let do_semgrep = false;
let do_shellcheck = false;
let do_virustotal = false;
let do_custom = false;
#[allow(clippy::no_effect_underscore_binding)]
let _do_sleuth = false;
// Transition to PreflightExec
app.modal = Modal::PreflightExec {
items: vec![item],
action: PreflightAction::Install,
tab: PreflightTab::Summary,
verbose: false,
log_lines: vec![],
abortable: false,
success: None,
header_chips: pacsea::state::modal::PreflightHeaderChips::default(),
};
app.pending_executor_request = Some(ExecutorRequest::Scan {
package: package.to_string(),
do_clamav,
do_trivy,
do_semgrep,
do_shellcheck,
do_virustotal,
do_custom,
dry_run: app.dry_run,
});
// Verify transition to PreflightExec
assert!(matches!(app.modal, Modal::PreflightExec { .. }));
assert!(app.pending_executor_request.is_some());
match app.pending_executor_request {
Some(ExecutorRequest::Scan { .. }) => {}
_ => panic!("Expected Scan executor request"),
}
}
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/tests/scan/mod.rs | tests/scan/mod.rs | //! Integration and UI tests for security scan operations.
mod scan_integration;
mod scan_ui;
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/tests/scan/scan_integration.rs | tests/scan/scan_integration.rs | //! Integration tests for security scan process.
//!
//! Tests cover:
//! - Scan configuration modal
//! - Scan command building
//! - Different scanner options
//! - Integrated scan process (``ExecutorRequest::Scan``)
//! - aur-sleuth terminal spawning
#![cfg(test)]
use pacsea::state::{AppState, Modal};
#[test]
/// What: Test `ScanConfig` modal state creation.
///
/// Inputs:
/// - `ScanConfig` modal with various scanner options.
///
/// Output:
/// - Modal state is correctly structured.
///
/// Details:
/// - Verifies scan configuration modal can be created and accessed.
fn integration_scan_config_modal_state() {
let app = AppState {
modal: Modal::ScanConfig {
do_clamav: true,
do_trivy: true,
do_semgrep: false,
do_shellcheck: false,
do_virustotal: false,
do_custom: false,
do_sleuth: false,
cursor: 0,
},
..Default::default()
};
match app.modal {
Modal::ScanConfig {
do_clamav,
do_trivy,
do_semgrep,
do_shellcheck,
do_virustotal,
do_custom,
do_sleuth,
cursor,
} => {
assert!(do_clamav);
assert!(do_trivy);
assert!(!do_semgrep);
assert!(!do_shellcheck);
assert!(!do_virustotal);
assert!(!do_custom);
assert!(!do_sleuth);
assert_eq!(cursor, 0);
}
_ => panic!("Expected ScanConfig modal"),
}
}
#[test]
/// What: Test scan command structure.
///
/// Inputs:
/// - Package name and scan options.
///
/// Output:
/// - Command structure is correct.
///
/// Details:
/// - Verifies scan command format.
/// - Note: Actual execution spawns terminal, so this tests command structure only.
fn integration_scan_command_structure() {
let pkg = "test-pkg";
// Test that scan commands would include package name
// The actual command building is in install::scan module
assert!(!pkg.is_empty());
// Test scan environment variable structure
let env_vars = vec![
"PACSEA_SCAN_DO_CLAMAV=1",
"PACSEA_SCAN_DO_TRIVY=1",
"PACSEA_SCAN_DO_SEMGREP=0",
];
for env_var in env_vars {
assert!(env_var.starts_with("PACSEA_SCAN_DO_"));
}
}
#[test]
/// What: Test scan configuration with all scanners enabled.
///
/// Inputs:
/// - `ScanConfig` modal with all scanners enabled.
///
/// Output:
/// - All flags are correctly set.
///
/// Details:
/// - Verifies that all scan options can be enabled simultaneously.
fn integration_scan_all_scanners() {
let app = AppState {
modal: Modal::ScanConfig {
do_clamav: true,
do_trivy: true,
do_semgrep: true,
do_shellcheck: true,
do_virustotal: true,
do_custom: true,
do_sleuth: true,
cursor: 0,
},
..Default::default()
};
match app.modal {
Modal::ScanConfig {
do_clamav,
do_trivy,
do_semgrep,
do_shellcheck,
do_virustotal,
do_custom,
do_sleuth,
..
} => {
assert!(do_clamav);
assert!(do_trivy);
assert!(do_semgrep);
assert!(do_shellcheck);
assert!(do_virustotal);
assert!(do_custom);
assert!(do_sleuth);
}
_ => panic!("Expected ScanConfig modal"),
}
}
#[test]
/// What: Test scan configuration with no scanners enabled.
///
/// Inputs:
/// - `ScanConfig` modal with all scanners disabled.
///
/// Output:
/// - All flags are correctly set to false.
///
/// Details:
/// - Verifies that scan options can all be disabled.
fn integration_scan_no_scanners() {
let app = AppState {
modal: Modal::ScanConfig {
do_clamav: false,
do_trivy: false,
do_semgrep: false,
do_shellcheck: false,
do_virustotal: false,
do_custom: false,
do_sleuth: false,
cursor: 0,
},
..Default::default()
};
match app.modal {
Modal::ScanConfig {
do_clamav,
do_trivy,
do_semgrep,
do_shellcheck,
do_virustotal,
do_custom,
do_sleuth,
..
} => {
assert!(!do_clamav);
assert!(!do_trivy);
assert!(!do_semgrep);
assert!(!do_shellcheck);
assert!(!do_virustotal);
assert!(!do_custom);
assert!(!do_sleuth);
}
_ => panic!("Expected ScanConfig modal"),
}
}
#[test]
/// What: Test `VirusTotal` setup modal state.
///
/// Inputs:
/// - `VirusTotalSetup` modal with API key input.
///
/// Output:
/// - Modal state is correctly structured.
///
/// Details:
/// - Verifies `VirusTotal` setup modal can be created.
fn integration_virustotal_setup_modal_state() {
let app = AppState {
modal: Modal::VirusTotalSetup {
input: "test-api-key".to_string(),
cursor: 12,
},
..Default::default()
};
match app.modal {
Modal::VirusTotalSetup { input, cursor } => {
assert_eq!(input, "test-api-key");
assert_eq!(cursor, 12);
}
_ => panic!("Expected VirusTotalSetup modal"),
}
}
#[test]
/// What: Test integrated scan process with ``ExecutorRequest::Scan``.
///
/// Inputs:
/// - Security scan configuration with non-sleuth scanners enabled.
///
/// Output:
/// - ``ExecutorRequest::Scan`` is created for non-sleuth scans.
/// - ``PreflightExec`` modal is shown.
///
/// Details:
/// - Non-sleuth scans (``ClamAV``, Trivy, Semgrep, ``ShellCheck``, ``VirusTotal``, custom) use ``ExecutorRequest::Scan``.
/// - aur-sleuth runs in separate terminal simultaneously when enabled.
fn integration_scan_uses_executor_request() {
use pacsea::install::ExecutorRequest;
use pacsea::state::{PackageItem, PreflightAction, PreflightTab, Source};
let mut app = AppState {
pending_install_names: Some(vec!["test-pkg".to_string()]),
dry_run: false,
..Default::default()
};
// Simulate scan configuration confirmation for non-sleuth scans
let package = "test-pkg";
let do_clamav = true;
let do_trivy = true;
let do_semgrep = false;
let do_shellcheck = false;
let do_virustotal = false;
let do_custom = false;
#[allow(clippy::no_effect_underscore_binding)]
let _do_sleuth = false; // No sleuth - should use ExecutorRequest
// Create package item for PreflightExec
let item = PackageItem {
name: package.to_string(),
version: String::new(),
description: String::new(),
source: Source::Aur,
popularity: None,
out_of_date: None,
orphaned: false,
};
// Simulate transition to PreflightExec and creation of ExecutorRequest::Scan
app.modal = Modal::PreflightExec {
items: vec![item],
action: PreflightAction::Install,
tab: PreflightTab::Summary,
verbose: false,
log_lines: vec![],
abortable: false,
success: None,
header_chips: pacsea::state::modal::PreflightHeaderChips::default(),
};
app.pending_executor_request = Some(ExecutorRequest::Scan {
package: package.to_string(),
do_clamav,
do_trivy,
do_semgrep,
do_shellcheck,
do_virustotal,
do_custom,
dry_run: app.dry_run,
});
// Verify ExecutorRequest::Scan is created
match app.pending_executor_request {
Some(ExecutorRequest::Scan {
package: pkg,
do_clamav: clamav,
do_trivy: trivy,
do_semgrep: semgrep,
do_shellcheck: shellcheck,
do_virustotal: vt,
do_custom: custom,
..
}) => {
assert_eq!(pkg, "test-pkg");
assert!(clamav);
assert!(trivy);
assert!(!semgrep);
assert!(!shellcheck);
assert!(!vt);
assert!(!custom);
}
_ => panic!("Expected Scan executor request"),
}
}
#[test]
/// What: Test that aur-sleuth uses terminal spawning while other scans use ``ExecutorRequest``.
///
/// Inputs:
/// - Security scan configuration with both sleuth and non-sleuth scanners enabled.
///
/// Output:
/// - ``ExecutorRequest::Scan`` is created for non-sleuth scans.
/// - aur-sleuth command is built for terminal spawning.
///
/// Details:
/// - Non-sleuth scans use integrated process via ``ExecutorRequest::Scan``.
/// - aur-sleuth uses terminal spawning via ``build_sleuth_command_for_terminal``.
#[cfg(not(target_os = "windows"))]
fn integration_scan_mixed_sleuth_and_integrated() {
use pacsea::install::ExecutorRequest;
let package = "test-pkg";
let do_clamav = true;
#[allow(clippy::no_effect_underscore_binding)]
let _do_sleuth = true; // Sleuth enabled - should use terminal
// Non-sleuth scans should use ExecutorRequest::Scan
let executor_request = ExecutorRequest::Scan {
package: package.to_string(),
do_clamav,
do_trivy: false,
do_semgrep: false,
do_shellcheck: false,
do_virustotal: false,
do_custom: false,
dry_run: false,
};
// Verify ExecutorRequest::Scan is created for non-sleuth scans
match executor_request {
ExecutorRequest::Scan {
package: pkg,
do_clamav: clamav,
..
} => {
assert_eq!(pkg, "test-pkg");
assert!(clamav);
// Note: do_sleuth is not part of ExecutorRequest::Scan
// aur-sleuth is handled separately via terminal spawning
}
_ => panic!("Expected Scan executor request"),
}
// Verify sleuth command can be built (structural test)
// The actual command building is tested in spawn.rs
let sleuth_command = pacsea::install::build_sleuth_command_for_terminal(package);
assert!(sleuth_command.contains(package));
}
#[test]
/// What: Test scan with only `ClamAV` enabled.
///
/// Inputs:
/// - `ScanConfig` with only `ClamAV` enabled.
///
/// Output:
/// - `ExecutorRequest::Scan` with only `do_clamav=true`.
///
/// Details:
/// - Verifies individual scanner can be enabled.
fn integration_scan_clamav_only() {
use pacsea::install::ExecutorRequest;
let request = ExecutorRequest::Scan {
package: "test-pkg".to_string(),
do_clamav: true,
do_trivy: false,
do_semgrep: false,
do_shellcheck: false,
do_virustotal: false,
do_custom: false,
dry_run: false,
};
match request {
ExecutorRequest::Scan {
do_clamav,
do_trivy,
do_semgrep,
do_shellcheck,
do_virustotal,
do_custom,
..
} => {
assert!(do_clamav);
assert!(!do_trivy);
assert!(!do_semgrep);
assert!(!do_shellcheck);
assert!(!do_virustotal);
assert!(!do_custom);
}
_ => panic!("Expected ExecutorRequest::Scan"),
}
}
#[test]
/// What: Test scan with only Trivy enabled.
///
/// Inputs:
/// - `ScanConfig` with only Trivy enabled.
///
/// Output:
/// - `ExecutorRequest::Scan` with only `do_trivy=true`.
///
/// Details:
/// - Verifies Trivy scanner can be enabled individually.
fn integration_scan_trivy_only() {
use pacsea::install::ExecutorRequest;
let request = ExecutorRequest::Scan {
package: "test-pkg".to_string(),
do_clamav: false,
do_trivy: true,
do_semgrep: false,
do_shellcheck: false,
do_virustotal: false,
do_custom: false,
dry_run: false,
};
match request {
ExecutorRequest::Scan { do_trivy, .. } => {
assert!(do_trivy);
}
_ => panic!("Expected ExecutorRequest::Scan"),
}
}
#[test]
/// What: Test scan with only Semgrep enabled.
///
/// Inputs:
/// - `ScanConfig` with only Semgrep enabled.
///
/// Output:
/// - `ExecutorRequest::Scan` with only `do_semgrep=true`.
///
/// Details:
/// - Verifies Semgrep scanner can be enabled individually.
fn integration_scan_semgrep_only() {
use pacsea::install::ExecutorRequest;
let request = ExecutorRequest::Scan {
package: "test-pkg".to_string(),
do_clamav: false,
do_trivy: false,
do_semgrep: true,
do_shellcheck: false,
do_virustotal: false,
do_custom: false,
dry_run: false,
};
match request {
ExecutorRequest::Scan { do_semgrep, .. } => {
assert!(do_semgrep);
}
_ => panic!("Expected ExecutorRequest::Scan"),
}
}
#[test]
/// What: Test scan with only `ShellCheck` enabled.
///
/// Inputs:
/// - `ScanConfig` with only `ShellCheck` enabled.
///
/// Output:
/// - `ExecutorRequest::Scan` with only `do_shellcheck=true`.
///
/// Details:
/// - Verifies `ShellCheck` scanner can be enabled individually.
fn integration_scan_shellcheck_only() {
use pacsea::install::ExecutorRequest;
let request = ExecutorRequest::Scan {
package: "test-pkg".to_string(),
do_clamav: false,
do_trivy: false,
do_semgrep: false,
do_shellcheck: true,
do_virustotal: false,
do_custom: false,
dry_run: false,
};
match request {
ExecutorRequest::Scan { do_shellcheck, .. } => {
assert!(do_shellcheck);
}
_ => panic!("Expected ExecutorRequest::Scan"),
}
}
#[test]
/// What: Test scan with only `VirusTotal` enabled.
///
/// Inputs:
/// - `ScanConfig` with only `VirusTotal` enabled.
///
/// Output:
/// - `ExecutorRequest::Scan` with only `do_virustotal=true`.
///
/// Details:
/// - Verifies `VirusTotal` scanner can be enabled individually.
fn integration_scan_virustotal_only() {
use pacsea::install::ExecutorRequest;
let request = ExecutorRequest::Scan {
package: "test-pkg".to_string(),
do_clamav: false,
do_trivy: false,
do_semgrep: false,
do_shellcheck: false,
do_virustotal: true,
do_custom: false,
dry_run: false,
};
match request {
ExecutorRequest::Scan { do_virustotal, .. } => {
assert!(do_virustotal);
}
_ => panic!("Expected ExecutorRequest::Scan"),
}
}
#[test]
/// What: Test scan with only custom pattern enabled.
///
/// Inputs:
/// - `ScanConfig` with only custom pattern enabled.
///
/// Output:
/// - `ExecutorRequest::Scan` with only `do_custom=true`.
///
/// Details:
/// - Verifies custom pattern scanner can be enabled individually.
fn integration_scan_custom_only() {
use pacsea::install::ExecutorRequest;
let request = ExecutorRequest::Scan {
package: "test-pkg".to_string(),
do_clamav: false,
do_trivy: false,
do_semgrep: false,
do_shellcheck: false,
do_virustotal: false,
do_custom: true,
dry_run: false,
};
match request {
ExecutorRequest::Scan { do_custom, .. } => {
assert!(do_custom);
}
_ => panic!("Expected ExecutorRequest::Scan"),
}
}
#[test]
/// What: Test scan dry-run mode.
///
/// Inputs:
/// - Scan with `dry_run` enabled.
///
/// Output:
/// - `ExecutorRequest::Scan` with `dry_run=true`.
///
/// Details:
/// - Verifies dry-run mode is respected for scans.
fn integration_scan_dry_run() {
use pacsea::install::ExecutorRequest;
let request = ExecutorRequest::Scan {
package: "test-pkg".to_string(),
do_clamav: true,
do_trivy: true,
do_semgrep: false,
do_shellcheck: false,
do_virustotal: false,
do_custom: false,
dry_run: true,
};
match request {
ExecutorRequest::Scan { dry_run, .. } => {
assert!(dry_run);
}
_ => panic!("Expected ExecutorRequest::Scan"),
}
}
#[test]
/// What: Test `ScanConfig` cursor navigation.
///
/// Inputs:
/// - `ScanConfig` modal with cursor at different positions.
///
/// Output:
/// - Cursor position is correctly tracked.
///
/// Details:
/// - Verifies cursor navigation within the scan config modal.
fn integration_scan_config_cursor_navigation() {
let app = AppState {
modal: Modal::ScanConfig {
do_clamav: true,
do_trivy: true,
do_semgrep: false,
do_shellcheck: false,
do_virustotal: false,
do_custom: false,
do_sleuth: false,
cursor: 4, // On VirusTotal option
},
..Default::default()
};
match app.modal {
Modal::ScanConfig { cursor, .. } => {
assert_eq!(cursor, 4);
}
_ => panic!("Expected ScanConfig modal"),
}
}
#[test]
/// What: Test `VirusTotal` setup input handling.
///
/// Inputs:
/// - `VirusTotalSetup` modal with API key input.
///
/// Output:
/// - Input and cursor are correctly tracked.
///
/// Details:
/// - Verifies API key input handling.
fn integration_virustotal_setup_input_handling() {
let api_key = "abcdef1234567890abcdef1234567890abcdef1234567890abcdef1234567890";
let app = AppState {
modal: Modal::VirusTotalSetup {
input: api_key.to_string(),
cursor: api_key.len(),
},
..Default::default()
};
match app.modal {
Modal::VirusTotalSetup { input, cursor } => {
assert_eq!(input.len(), 64); // VT API keys are 64 chars
assert_eq!(cursor, 64);
}
_ => panic!("Expected VirusTotalSetup modal"),
}
}
#[test]
/// What: Test sleuth command building for terminal.
///
/// Inputs:
/// - Package name for aur-sleuth scan.
///
/// Output:
/// - Command string contains package name.
///
/// Details:
/// - Verifies aur-sleuth terminal command structure.
#[cfg(not(target_os = "windows"))]
fn integration_sleuth_command_building() {
let package = "test-aur-package";
let sleuth_command = pacsea::install::build_sleuth_command_for_terminal(package);
assert!(sleuth_command.contains(package));
assert!(sleuth_command.contains("aur-sleuth") || sleuth_command.contains("sleuth"));
}
#[test]
/// What: Test scan with multiple non-sleuth scanners enabled.
///
/// Inputs:
/// - Multiple scanners enabled (`ClamAV`, Trivy, `ShellCheck`).
///
/// Output:
/// - `ExecutorRequest::Scan` with multiple scanners enabled.
///
/// Details:
/// - Verifies multiple scanners can be combined.
fn integration_scan_multiple_scanners() {
use pacsea::install::ExecutorRequest;
let request = ExecutorRequest::Scan {
package: "test-pkg".to_string(),
do_clamav: true,
do_trivy: true,
do_semgrep: false,
do_shellcheck: true,
do_virustotal: false,
do_custom: true,
dry_run: false,
};
match request {
ExecutorRequest::Scan {
do_clamav,
do_trivy,
do_semgrep,
do_shellcheck,
do_virustotal,
do_custom,
..
} => {
assert!(do_clamav);
assert!(do_trivy);
assert!(!do_semgrep);
assert!(do_shellcheck);
assert!(!do_virustotal);
assert!(do_custom);
}
_ => panic!("Expected ExecutorRequest::Scan"),
}
}
#[test]
/// What: Test scan modal cancellation.
///
/// Inputs:
/// - `ScanConfig` modal that is cancelled.
///
/// Output:
/// - Modal transitions to `None`.
///
/// Details:
/// - Simulates user pressing Escape to cancel scan config.
fn integration_scan_config_cancellation() {
let mut app = AppState {
modal: Modal::ScanConfig {
do_clamav: true,
do_trivy: true,
do_semgrep: false,
do_shellcheck: false,
do_virustotal: false,
do_custom: false,
do_sleuth: false,
cursor: 0,
},
..Default::default()
};
// Simulate cancellation
app.modal = Modal::None;
assert!(matches!(app.modal, Modal::None));
}
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/tests/file_sync/file_sync_ui.rs | tests/file_sync/file_sync_ui.rs | //! UI tests for file database sync modals.
//!
//! Tests cover:
//! - ``PasswordPrompt`` modal structure for ``FileSync`` purpose
//! - Modal state transitions
//! - ``PreflightExec`` modal for file sync execution
#![cfg(test)]
use pacsea::state::{AppState, Modal, PreflightTab, modal::PasswordPurpose};
#[test]
/// What: Test ``PasswordPrompt`` modal structure for ``FileSync`` purpose.
///
/// Inputs:
/// - ``PasswordPrompt`` modal with ``FileSync`` purpose.
///
/// Output:
/// - Modal state is correctly structured.
///
/// Details:
/// - Verifies ``PasswordPrompt`` modal can be created with ``FileSync`` purpose.
fn ui_file_sync_password_prompt_structure() {
let app = AppState {
modal: Modal::PasswordPrompt {
purpose: PasswordPurpose::FileSync,
items: vec![],
input: String::new(),
cursor: 0,
error: None,
},
pending_custom_command: Some("sudo pacman -Fy".to_string()),
..Default::default()
};
match app.modal {
Modal::PasswordPrompt {
purpose,
items,
input,
cursor,
error,
} => {
assert_eq!(purpose, PasswordPurpose::FileSync);
assert!(items.is_empty());
assert!(input.is_empty());
assert_eq!(cursor, 0);
assert!(error.is_none());
}
_ => panic!("Expected PasswordPrompt modal"),
}
assert_eq!(
app.pending_custom_command,
Some("sudo pacman -Fy".to_string())
);
}
#[test]
/// What: Test ``PreflightExec`` modal structure for file sync execution.
///
/// Inputs:
/// - ``PreflightExec`` modal after password submission for file sync.
///
/// Output:
/// - Modal state is correctly structured.
///
/// Details:
/// - Verifies ``PreflightExec`` modal can be created for file sync execution.
fn ui_file_sync_preflight_exec_structure() {
let app = AppState {
modal: Modal::PreflightExec {
items: vec![],
action: pacsea::state::PreflightAction::Install,
tab: PreflightTab::Summary,
verbose: false,
log_lines: vec![],
abortable: false,
header_chips: pacsea::state::modal::PreflightHeaderChips::default(),
success: None,
},
pending_executor_request: Some(pacsea::install::ExecutorRequest::CustomCommand {
command: "sudo pacman -Fy".to_string(),
password: Some("testpassword".to_string()),
dry_run: false,
}),
..Default::default()
};
match app.modal {
Modal::PreflightExec {
items,
action,
tab,
verbose,
log_lines,
abortable,
..
} => {
assert!(items.is_empty());
assert_eq!(action, pacsea::state::PreflightAction::Install);
assert_eq!(tab, PreflightTab::Summary);
assert!(!verbose);
assert!(log_lines.is_empty());
assert!(!abortable);
}
_ => panic!("Expected PreflightExec modal"),
}
match app.pending_executor_request {
Some(pacsea::install::ExecutorRequest::CustomCommand {
command,
password,
dry_run,
}) => {
assert_eq!(command, "sudo pacman -Fy");
assert_eq!(password, Some("testpassword".to_string()));
assert!(!dry_run);
}
_ => panic!("Expected CustomCommand executor request"),
}
}
#[test]
/// What: Test modal transition from ``PasswordPrompt`` to ``PreflightExec`` for file sync.
///
/// Inputs:
/// - ``PasswordPrompt`` modal with ``FileSync`` purpose and password entered.
///
/// Output:
/// - Modal transitions to ``PreflightExec``.
/// - Executor request is created.
///
/// Details:
/// - Verifies modal state transition flow for file sync.
fn ui_file_sync_modal_transition() {
let mut app = AppState {
modal: Modal::PasswordPrompt {
purpose: PasswordPurpose::FileSync,
items: vec![],
input: "testpassword".to_string(),
cursor: 12,
error: None,
},
pending_custom_command: Some("sudo pacman -Fy".to_string()),
pending_exec_header_chips: Some(pacsea::state::modal::PreflightHeaderChips::default()),
..Default::default()
};
// Simulate password submission
let password = if let Modal::PasswordPrompt { ref input, .. } = app.modal {
if input.trim().is_empty() {
None
} else {
Some(input.clone())
}
} else {
None
};
let custom_cmd = app.pending_custom_command.take();
let header_chips = app.pending_exec_header_chips.take().unwrap_or_default();
if let Some(custom_cmd) = custom_cmd {
app.modal = Modal::PreflightExec {
success: None,
items: vec![],
action: pacsea::state::PreflightAction::Install,
tab: PreflightTab::Summary,
verbose: false,
log_lines: vec![],
abortable: false,
header_chips,
};
app.pending_executor_request = Some(pacsea::install::ExecutorRequest::CustomCommand {
command: custom_cmd,
password,
dry_run: app.dry_run,
});
}
// Verify transition to PreflightExec
assert!(matches!(app.modal, Modal::PreflightExec { .. }));
assert!(app.pending_executor_request.is_some());
}
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/tests/file_sync/file_sync_integration.rs | tests/file_sync/file_sync_integration.rs | //! Integration tests for file database sync fallback process.
//!
//! Tests cover:
//! - File database sync fallback flow
//! - Password prompt for file sync
//! - Executor request handling for `sudo pacman -Fy`
//! - Modal transitions
#![cfg(test)]
use pacsea::install::ExecutorRequest;
use pacsea::state::{AppState, Modal, modal::PasswordPurpose};
use std::sync::{Arc, Mutex};
/// What: Test file database sync fallback triggers password prompt on failure.
///
/// Inputs:
/// - Preflight modal on Files tab
/// - File sync result indicating failure
///
/// Output:
/// - Password prompt modal is shown with ``FileSync`` purpose
/// - Custom command is stored for execution
///
/// Details:
/// - Verifies that sync failure triggers password prompt flow
#[test]
fn integration_file_sync_fallback_password_prompt() {
let mut app = AppState::default();
// Simulate sync failure by setting pending_file_sync_result
let sync_result: Arc<Mutex<Option<Result<bool, String>>>> =
Arc::new(Mutex::new(Some(Err("Permission denied".to_string()))));
app.pending_file_sync_result = Some(sync_result);
// Simulate tick handler processing the sync result
if let Some(sync_result_arc) = app.pending_file_sync_result.take()
&& let Ok(mut sync_result) = sync_result_arc.lock()
&& let Some(result) = sync_result.take()
&& let Err(_e) = result
{
app.modal = Modal::PasswordPrompt {
purpose: PasswordPurpose::FileSync,
items: vec![],
input: String::new(),
cursor: 0,
error: None,
};
app.pending_custom_command = Some("sudo pacman -Fy".to_string());
app.pending_exec_header_chips = Some(pacsea::state::modal::PreflightHeaderChips::default());
}
// Verify password prompt modal
match app.modal {
Modal::PasswordPrompt { purpose, items, .. } => {
assert_eq!(purpose, PasswordPurpose::FileSync);
assert!(items.is_empty());
}
_ => panic!("Expected PasswordPrompt modal with FileSync purpose"),
}
// Verify custom command is stored
assert_eq!(
app.pending_custom_command,
Some("sudo pacman -Fy".to_string())
);
}
#[test]
/// What: Test file database sync success flow.
///
/// Inputs:
/// - Preflight modal on Files tab
/// - File sync result indicating success
///
/// Output:
/// - Toast message is set
/// - No password prompt is shown
///
/// Details:
/// - Verifies that successful sync shows toast message
fn integration_file_sync_success() {
let mut app = AppState::default();
// Simulate sync success
let sync_result: Arc<Mutex<Option<Result<bool, String>>>> =
Arc::new(Mutex::new(Some(Ok(true))));
app.pending_file_sync_result = Some(sync_result);
// Simulate tick handler processing the sync result
if let Some(sync_result_arc) = app.pending_file_sync_result.take()
&& let Ok(mut sync_result) = sync_result_arc.lock()
&& let Some(result) = sync_result.take()
&& let Ok(synced) = result
{
if synced {
app.toast_message = Some("File database sync completed successfully".to_string());
} else {
app.toast_message = Some("File database is already fresh".to_string());
}
app.toast_expires_at = Some(std::time::Instant::now() + std::time::Duration::from_secs(3));
}
// Verify toast message is set
assert!(app.toast_message.is_some());
if let Some(ref msg) = app.toast_message {
assert!(msg.contains("sync"));
}
// Verify no password prompt
assert!(!matches!(app.modal, Modal::PasswordPrompt { .. }));
}
#[test]
/// What: Test executor request for file database sync.
///
/// Inputs:
/// - Password prompt modal with ``FileSync`` purpose
/// - Custom command stored
///
/// Output:
/// - ``ExecutorRequest::CustomCommand`` is created with correct command
///
/// Details:
/// - Verifies that file sync creates correct executor request
fn integration_file_sync_executor_request() {
let mut app = AppState {
modal: Modal::PasswordPrompt {
purpose: PasswordPurpose::FileSync,
items: vec![],
input: "testpassword".to_string(),
cursor: 12,
error: None,
},
pending_custom_command: Some("sudo pacman -Fy".to_string()),
pending_exec_header_chips: Some(pacsea::state::modal::PreflightHeaderChips::default()),
..Default::default()
};
// Simulate password submission
let password = if app.modal.is_password_prompt()
&& let Modal::PasswordPrompt { ref input, .. } = app.modal
{
if input.trim().is_empty() {
None
} else {
Some(input.clone())
}
} else {
None
};
let custom_cmd = app.pending_custom_command.take();
let header_chips = app.pending_exec_header_chips.take().unwrap_or_default();
if let Some(custom_cmd) = custom_cmd {
app.modal = Modal::PreflightExec {
items: vec![],
action: pacsea::state::PreflightAction::Install,
tab: pacsea::state::PreflightTab::Summary,
verbose: false,
log_lines: vec![],
abortable: false,
header_chips,
success: None,
};
let executor_request = ExecutorRequest::CustomCommand {
command: custom_cmd,
password,
dry_run: app.dry_run,
};
// Verify executor request
match executor_request {
ExecutorRequest::CustomCommand {
command,
password: pwd,
..
} => {
assert_eq!(command, "sudo pacman -Fy");
assert_eq!(pwd, Some("testpassword".to_string()));
}
_ => panic!("Expected CustomCommand executor request"),
}
}
}
// Helper trait for testing
trait ModalTestHelper {
fn is_password_prompt(&self) -> bool;
}
impl ModalTestHelper for Modal {
fn is_password_prompt(&self) -> bool {
matches!(self, Self::PasswordPrompt { .. })
}
}
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Firstp1ck/Pacsea | https://github.com/Firstp1ck/Pacsea/blob/c433ad6a837b7985d8b99ba9afd8f07a93d046f4/tests/file_sync/mod.rs | tests/file_sync/mod.rs | //! Integration and UI tests for file database sync operations.
mod file_sync_integration;
mod file_sync_ui;
| rust | MIT | c433ad6a837b7985d8b99ba9afd8f07a93d046f4 | 2026-01-04T20:14:32.225407Z | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.