repo stringlengths 6 65 | file_url stringlengths 81 311 | file_path stringlengths 6 227 | content stringlengths 0 32.8k | language stringclasses 1 value | license stringclasses 7 values | commit_sha stringlengths 40 40 | retrieved_at stringdate 2026-01-04 15:31:58 2026-01-04 20:25:31 | truncated bool 2 classes |
|---|---|---|---|---|---|---|---|---|
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/crates/resolver/src/graph.rs | crates/resolver/src/graph.rs | //! Dependency graph types and operations
use sps2_types::{Version, VersionSpec};
use std::fmt;
use std::path::PathBuf;
/// Package identifier (name + version)
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
pub struct PackageId {
pub name: String,
pub version: Version,
}
impl PackageId {
/// Create new package ID
#[must_use]
pub fn new(name: String, version: Version) -> Self {
Self { name, version }
}
}
impl fmt::Display for PackageId {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}-{}", self.name, self.version)
}
}
/// Dependency kind
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub enum DepKind {
/// Build-time dependency
Build,
/// Runtime dependency
Runtime,
}
/// Action to take for a resolved node
#[derive(Clone, Debug, PartialEq, Eq)]
pub enum NodeAction {
/// Download binary package from repository
Download,
/// Use local package file
Local,
}
/// Dependency edge in the resolution graph
#[derive(Clone, Debug)]
pub struct DepEdge {
/// Package name
pub name: String,
/// Version specification
pub spec: VersionSpec,
/// Dependency kind
pub kind: DepKind,
}
impl DepEdge {
/// Create new dependency edge
#[must_use]
pub fn new(name: String, spec: VersionSpec, kind: DepKind) -> Self {
Self { name, spec, kind }
}
/// Check if a version satisfies this edge
#[must_use]
pub fn satisfies(&self, version: &Version) -> bool {
self.spec.matches(version)
}
}
/// Resolved dependency node
#[derive(Clone, Debug)]
pub struct ResolvedNode {
/// Package name
pub name: String,
/// Resolved version
pub version: Version,
/// Action to perform
pub action: NodeAction,
/// Dependencies of this package
pub deps: Vec<DepEdge>,
/// Download URL (for Download action)
pub url: Option<String>,
/// Local file path (for Local action)
pub path: Option<PathBuf>,
/// Signature URL for detached signature (if remote)
pub signature_url: Option<String>,
/// Expected BLAKE3 hash for integrity verification (if remote)
pub expected_hash: Option<sps2_hash::Hash>,
}
impl ResolvedNode {
/// Create new resolved node for download
#[must_use]
pub fn download(name: String, version: Version, url: String, deps: Vec<DepEdge>) -> Self {
Self {
name,
version,
action: NodeAction::Download,
deps,
url: Some(url),
path: None,
signature_url: None,
expected_hash: None,
}
}
/// Create new resolved node for local file
#[must_use]
pub fn local(name: String, version: Version, path: PathBuf, deps: Vec<DepEdge>) -> Self {
Self {
name,
version,
action: NodeAction::Local,
deps,
url: None,
path: Some(path),
signature_url: None,
expected_hash: None,
}
}
/// Get package ID
#[must_use]
pub fn package_id(&self) -> PackageId {
PackageId::new(self.name.clone(), self.version.clone())
}
/// Get runtime dependencies
pub fn runtime_deps(&self) -> impl Iterator<Item = &DepEdge> {
self.deps
.iter()
.filter(|edge| edge.kind == DepKind::Runtime)
}
/// Get build dependencies
pub fn build_deps(&self) -> impl Iterator<Item = &DepEdge> {
self.deps.iter().filter(|edge| edge.kind == DepKind::Build)
}
}
/// Dependency graph
#[derive(Clone, Debug)]
pub struct DependencyGraph {
/// Resolved nodes indexed by package ID
pub nodes: std::collections::HashMap<PackageId, ResolvedNode>,
/// Adjacency list (package -> dependencies)
pub edges: std::collections::HashMap<PackageId, Vec<PackageId>>,
}
impl DependencyGraph {
/// Create new empty graph
#[must_use]
pub fn new() -> Self {
Self {
nodes: std::collections::HashMap::new(),
edges: std::collections::HashMap::new(),
}
}
/// Add node to graph
pub fn add_node(&mut self, node: ResolvedNode) {
let id = node.package_id();
self.nodes.insert(id.clone(), node);
self.edges.entry(id).or_default();
}
/// Add edge between two packages
pub fn add_edge(&mut self, from: &PackageId, to: &PackageId) {
self.edges.entry(from.clone()).or_default().push(to.clone());
}
/// Check for cycles using DFS
#[must_use]
pub fn has_cycles(&self) -> bool {
use std::collections::HashSet;
let mut visited = HashSet::new();
let mut rec_stack = HashSet::new();
for node_id in self.nodes.keys() {
if !visited.contains(node_id)
&& self.has_cycle_util(node_id, &mut visited, &mut rec_stack)
{
return true;
}
}
false
}
/// Utility function for cycle detection
fn has_cycle_util(
&self,
node_id: &PackageId,
visited: &mut std::collections::HashSet<PackageId>,
rec_stack: &mut std::collections::HashSet<PackageId>,
) -> bool {
visited.insert(node_id.clone());
rec_stack.insert(node_id.clone());
if let Some(dependencies) = self.edges.get(node_id) {
for dep in dependencies {
if !visited.contains(dep) && self.has_cycle_util(dep, visited, rec_stack) {
return true;
}
if rec_stack.contains(dep) {
return true;
}
}
}
rec_stack.remove(node_id);
false
}
/// Perform topological sort using Kahn's algorithm
///
/// # Errors
///
/// Returns an error if a dependency cycle is detected.
pub fn topological_sort(&self) -> Result<Vec<PackageId>, sps2_errors::Error> {
use std::collections::{HashMap, VecDeque};
if self.has_cycles() {
return Err(sps2_errors::PackageError::DependencyCycle {
package: "unknown".to_string(),
}
.into());
}
// Calculate in-degrees
let mut in_degree: HashMap<PackageId, usize> = HashMap::new();
for node_id in self.nodes.keys() {
in_degree.insert(node_id.clone(), 0);
}
for dependencies in self.edges.values() {
for dep in dependencies {
*in_degree.entry(dep.clone()).or_insert(0) += 1;
}
}
// Find nodes with no incoming edges
let mut queue: VecDeque<PackageId> = in_degree
.iter()
.filter_map(|(id, °ree)| if degree == 0 { Some(id.clone()) } else { None })
.collect();
let mut result = Vec::new();
while let Some(node_id) = queue.pop_front() {
result.push(node_id.clone());
if let Some(dependencies) = self.edges.get(&node_id) {
for dep in dependencies {
if let Some(degree) = in_degree.get_mut(dep) {
*degree -= 1;
if *degree == 0 {
queue.push_back(dep.clone());
}
}
}
}
}
if result.len() != self.nodes.len() {
return Err(sps2_errors::PackageError::DependencyCycle {
package: "unknown".to_string(),
}
.into());
}
Ok(result)
}
}
impl Default for DependencyGraph {
fn default() -> Self {
Self::new()
}
}
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | false |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/crates/resolver/src/execution.rs | crates/resolver/src/execution.rs | // src/execution.rs
//! Planning and metadata for *parallel* package installation/build.
//!
//! Public API is **unchanged**, but the internals are optimised
use crate::{graph::DependencyGraph, NodeAction, PackageId};
use std::collections::{HashMap, VecDeque};
use std::sync::atomic::{AtomicUsize, Ordering};
use std::sync::Arc;
/// Per-node execution metadata.
///
/// The struct is `Arc`-wrapped inside the [`ExecutionPlan`]; cloning the `Arc`
/// is cheap and thread-safe.
#[derive(Debug)]
pub struct NodeMeta {
/// Action that the installer/runner must perform.
pub action: NodeAction,
/// *Remaining* unsatisfied dependencies.
/// Once this reaches 0 the package is runnable.
in_degree: AtomicUsize,
/// Packages that depend on this one (reverse edges).
///
/// This field is kept `pub` for backwards-compatibility with the
/// `sps2-install` crate. New code should prefer the
/// [`ExecutionPlan::complete_package`] API.
pub parents: Vec<PackageId>,
}
impl NodeMeta {
/// New metadata with a fixed initial in-degree.
#[inline]
#[must_use]
pub fn new(action: NodeAction, in_degree: usize) -> Self {
Self {
action,
in_degree: AtomicUsize::new(in_degree),
parents: Vec::new(),
}
}
/// Thread-safe decrement; returns the **updated** in-degree (never under-flows).
///
/// If the counter is already 0 the call is a no-op and 0 is returned.
#[inline]
#[must_use]
pub fn decrement_in_degree(&self) -> usize {
// `fetch_update` loops internally until CAS succeeds.
self.in_degree
.fetch_update(Ordering::AcqRel, Ordering::Acquire, |current| {
(current != 0).then(|| current - 1)
})
.map(|prev| prev.saturating_sub(1))
.unwrap_or(0)
}
/// Current unsatisfied dependency count.
#[inline]
#[must_use]
pub fn in_degree(&self) -> usize {
self.in_degree.load(Ordering::Acquire)
}
/// Register `parent` as a reverse-edge.
#[inline]
pub fn add_parent(&mut self, parent: PackageId) {
self.parents.push(parent);
}
/// Immutable view of reverse-edges.
#[inline]
#[must_use]
pub fn parents(&self) -> &[PackageId] {
&self.parents
}
}
/// Immutable execution plan – produced **once** after resolution
/// and consumed concurrently by the installer/runner.
#[derive(Clone, Debug)]
pub struct ExecutionPlan {
batches: Vec<Vec<PackageId>>,
metadata: HashMap<PackageId, Arc<NodeMeta>>,
}
impl ExecutionPlan {
// ---------------------------------------------------------------------
// Construction
// ---------------------------------------------------------------------
/// Build a plan from an already topologically-sorted list (`sorted`) and
/// its originating dependency graph.
///
/// # Panics
///
/// Panics if `graph` does not contain every [`PackageId`] present in
/// `sorted` (the resolver guarantees this invariant).
#[must_use]
pub fn from_sorted_packages(sorted: &[PackageId], graph: &DependencyGraph) -> Self {
let mut metadata: HashMap<PackageId, Arc<NodeMeta>> = HashMap::with_capacity(sorted.len());
let mut in_degree: HashMap<&PackageId, usize> = HashMap::with_capacity(sorted.len());
// 1) Pre-compute in-degrees in O(e)
for id in sorted {
in_degree.insert(id, 0);
}
for tos in graph.edges.values() {
for to in tos {
if let Some(slot) = in_degree.get_mut(to) {
*slot += 1;
}
}
}
// 2) Create NodeMeta and reverse edges
for id in sorted {
let action = graph
.nodes
.get(id)
.map(|n| n.action.clone())
.expect("package missing from graph");
let meta = Arc::new(NodeMeta::new(
action,
*in_degree.get(id).expect("key present"),
));
metadata.insert(id.clone(), meta);
}
for (from, tos) in &graph.edges {
for to in tos {
if let Some(meta) = metadata.get_mut(from).and_then(Arc::get_mut) {
meta.add_parent(to.clone());
}
}
}
// 3) Kahn layering to build parallel batches in O(n + e)
let mut queue: VecDeque<&PackageId> = in_degree
.iter()
.filter(|(_, &d)| d == 0)
.map(|(id, _)| *id)
.collect();
let mut batches: Vec<Vec<PackageId>> = Vec::new();
let mut remaining = in_degree.len();
while remaining > 0 {
let mut batch: Vec<PackageId> = Vec::with_capacity(queue.len());
for _ in 0..queue.len() {
let id = queue.pop_front().expect("queue not empty");
batch.push(id.clone());
remaining -= 1;
// Decrement children
if let Some(children) = graph.edges.get(id) {
for child in children {
let child_meta = metadata
.get(child)
.expect("child in metadata; resolver invariant");
if child_meta.decrement_in_degree() == 0 {
queue.push_back(child);
}
}
}
}
batches.push(batch);
}
Self { batches, metadata }
}
// ---------------------------------------------------------------------
// Inspection helpers
// ---------------------------------------------------------------------
/// Layered batches; inside each slice packages are independent.
#[inline]
#[must_use]
pub fn batches(&self) -> &[Vec<PackageId>] {
&self.batches
}
/// Per-package metadata (constant during execution).
#[inline]
#[must_use]
pub fn metadata(&self, id: &PackageId) -> Option<&Arc<NodeMeta>> {
self.metadata.get(id)
}
/// All packages whose `in_degree == 0` **at plan creation time**.
#[inline]
#[must_use]
pub fn initial_ready(&self) -> Vec<PackageId> {
self.metadata
.iter()
.filter(|(_, m)| m.in_degree() == 0)
.map(|(id, _)| id.clone())
.collect()
}
/// Legacy alias used by the installer: forwards to [`Self::initial_ready`].
#[inline]
#[must_use]
pub fn ready_packages(&self) -> Vec<PackageId> {
self.initial_ready()
}
/// Mark `finished` as completed and return **newly** unblocked packages.
///
/// # Panics
///
/// Panics if
/// 1. `finished` is unknown to this [`ExecutionPlan`] (violates resolver
/// invariant), **or**
/// 2. any parent package listed in `NodeMeta::parents` cannot be found in
/// the plan’s metadata map (also a resolver invariant).
#[inline]
#[must_use]
pub fn complete_package(&self, finished: &PackageId) -> Vec<PackageId> {
let meta = self
.metadata
.get(finished)
.expect("completed package known to plan");
meta.parents
.iter()
.filter_map(|parent| {
let parent_meta = self
.metadata
.get(parent)
.expect("parent package known to plan");
(parent_meta.decrement_in_degree() == 0).then(|| parent.clone())
})
.collect()
}
// ---------------------------------------------------------------------
// Convenience metrics
// ---------------------------------------------------------------------
#[inline]
#[must_use]
pub fn package_count(&self) -> usize {
self.metadata.len()
}
#[inline]
#[must_use]
pub fn is_complete(&self) -> bool {
self.metadata.values().all(|m| m.in_degree() == 0)
}
#[inline]
#[must_use]
pub fn completed_count(&self) -> usize {
self.metadata
.values()
.filter(|m| m.in_degree() == 0)
.count()
}
}
// -------------------------------------------------------------------------
// Stats helper (unchanged public fields, lint-clean implementation)
// -------------------------------------------------------------------------
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | false |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/crates/resolver/src/sat/solver.rs | crates/resolver/src/sat/solver.rs | //! DPLL-based SAT solver with CDCL optimizations
use super::conflict_analysis::VariableActivity;
use super::types::TruthValue;
use super::{
Assignment, Clause, ClauseRef, ConflictAnalysis, ConflictExplanation, DependencyProblem,
Literal, Variable,
};
use crate::sat::clause::clause_ref;
use sps2_errors::{Error, PackageError};
use std::collections::{HashMap, HashSet, VecDeque};
/// SAT solver using DPLL with conflict-driven clause learning
#[derive(Debug)]
pub struct SatSolver {
/// Original clauses
clauses: Vec<ClauseRef>,
/// Learned clauses from conflicts
learned_clauses: Vec<ClauseRef>,
/// Current assignment
assignment: Assignment,
/// Propagation queue
propagation_queue: VecDeque<Literal>,
/// Watch lists for two-watched literal scheme
/// Maps each literal to clauses watching it
watch_lists: HashMap<Literal, Vec<ClauseRef>>,
/// Conflict analysis
conflict_analysis: ConflictAnalysis,
/// Variable activity for VSIDS heuristic
variable_activity: VariableActivity,
/// All variables in the problem
variables: HashSet<Variable>,
/// Decision stack for backtracking
decisions: Vec<(Variable, bool)>,
/// Statistics
stats: SolverStats,
}
/// Solver statistics
#[derive(Debug, Default)]
struct SolverStats {
decisions: u64,
propagations: u64,
conflicts: u64,
learned_clauses: u64,
restarts: u64,
}
impl SatSolver {
/// Create new SAT solver
#[must_use]
pub fn new() -> Self {
Self {
clauses: Vec::new(),
learned_clauses: Vec::new(),
assignment: Assignment::new(),
propagation_queue: VecDeque::new(),
watch_lists: HashMap::new(),
conflict_analysis: ConflictAnalysis::new(),
variable_activity: VariableActivity::new(0.95),
variables: HashSet::new(),
decisions: Vec::new(),
stats: SolverStats::default(),
}
}
/// Create new SAT solver with variable map for version preference
#[must_use]
pub fn with_variable_map(variable_map: &super::VariableMap) -> Self {
let mut solver = Self::new();
solver.set_version_preference(variable_map);
solver
}
/// Set version preference based on variable map
pub fn set_version_preference(&mut self, variable_map: &super::VariableMap) {
// Boost activity for higher versions to prefer them in decisions
for package_name in variable_map.all_packages() {
let mut versions: Vec<_> = variable_map
.get_package_versions(package_name)
.into_iter()
.filter_map(|pv| variable_map.get_variable(pv).map(|var| (pv, var)))
.collect();
// Sort by version descending
versions.sort_by(|(pv1, _), (pv2, _)| pv2.version.cmp(&pv1.version));
// Give higher activity to higher versions
for (i, (_, var)) in versions.iter().enumerate() {
// Boost activity multiple times to give preference to higher versions
let boost_count = (versions.len() - i) * 10;
for _ in 0..boost_count {
self.variable_activity.bump_variable(*var);
}
}
}
}
/// Add a clause to the solver
pub fn add_clause(&mut self, clause: Clause) {
// Simplify clause
let Some(simplified) = clause.simplify() else {
return; // Tautology, skip
};
if simplified.is_empty() {
// Empty clause means unsatisfiable
// We'll handle this in solve()
}
// Extract variables
for &lit in simplified.literals() {
self.variables.insert(lit.variable());
}
let clause_ref = clause_ref(simplified);
// Initialize watched literals
if clause_ref.len() >= 2 {
// Watch first two literals - we watch for when they become false
let lit1 = clause_ref.literals()[0];
let lit2 = clause_ref.literals()[1];
// Add clause to watch lists for the NEGATION of these literals
// When lit1 becomes false (i.e., ¬lit1 becomes true), we need to update watches
self.watch_lists
.entry(lit1.negate())
.or_default()
.push(clause_ref.clone());
self.watch_lists
.entry(lit2.negate())
.or_default()
.push(clause_ref.clone());
} else if clause_ref.len() == 1 {
// Unit clause - add to propagation queue
self.propagation_queue.push_back(clause_ref.literals()[0]);
}
self.clauses.push(clause_ref);
}
/// Solve the SAT problem
pub fn solve(&mut self) -> Result<Assignment, Error> {
// Check for empty clauses (immediate UNSAT)
if self.clauses.iter().any(|c| c.is_empty()) {
return Err(PackageError::DependencyConflict {
message: "Unsatisfiable constraints detected".to_string(),
}
.into());
}
// Main DPLL loop
loop {
// Unit propagation
match self.propagate() {
PropagationResult::Conflict(conflict_clause) => {
self.stats.conflicts += 1;
// Analyze conflict
self.conflict_analysis.set_conflict(conflict_clause);
if let Some((learned_clause, backtrack_level)) =
self.conflict_analysis.analyze_conflict(&self.assignment)
{
// Learn clause
self.learn_clause(learned_clause);
// Backtrack
self.backtrack_to(backtrack_level);
} else {
// Conflict at level 0 - UNSAT
return Err(PackageError::DependencyConflict {
message: "No valid package selection exists".to_string(),
}
.into());
}
}
PropagationResult::Ok => {
// Check if all variables are assigned
if self.all_variables_assigned() {
return Ok(self.assignment.clone());
}
// Make a decision
if let Some((var, value)) = self.decide() {
self.stats.decisions += 1;
self.decisions.push((var, value));
self.assignment
.assign(var, value, self.assignment.current_level() + 1);
let lit = if value {
Literal::positive(var)
} else {
Literal::negative(var)
};
self.propagation_queue.push_back(lit);
} else {
// No unassigned variables but not all assigned? Shouldn't happen
return Ok(self.assignment.clone());
}
}
}
// Restart heuristic (every 100 conflicts)
if self.stats.conflicts > 0 && self.stats.conflicts.is_multiple_of(100) {
self.restart();
self.stats.restarts += 1;
}
}
}
/// Unit propagation with two-watched literals
fn propagate(&mut self) -> PropagationResult {
while let Some(lit) = self.propagation_queue.pop_front() {
self.stats.propagations += 1;
// First, check if this literal is already assigned
let current_value = self.assignment.eval_literal(lit);
if current_value.is_false() {
// Conflict: trying to assign a literal that's already false
// Find the unit clause that contains this literal for conflict analysis
for clause in &self.clauses {
if clause.is_unit() && clause.literals()[0] == lit {
return PropagationResult::Conflict(clause.clone());
}
}
// If we can't find the clause, create a dummy one
let dummy_clause = clause_ref(Clause::unit(lit));
return PropagationResult::Conflict(dummy_clause);
} else if current_value == TruthValue::Unassigned {
// Assign the literal
let var = lit.variable();
let assign_value = lit.is_positive();
self.assignment
.assign(var, assign_value, self.assignment.current_level());
}
// When literal L becomes true, we check clauses watching L
// because we watch for when literals become false
if let Some(watching) = self.watch_lists.get(&lit).cloned() {
for clause_ref in watching {
match self.update_watches(&clause_ref, lit) {
WatchResult::Conflict => {
self.propagation_queue.clear();
return PropagationResult::Conflict(clause_ref);
}
WatchResult::Unit(unit_lit) => {
// Check if already assigned
let value = self.assignment.eval_literal(unit_lit);
if value.is_false() {
// Conflict
self.propagation_queue.clear();
return PropagationResult::Conflict(clause_ref);
} else if value == TruthValue::Unassigned {
// Propagate
let var = unit_lit.variable();
let assign_value = unit_lit.is_positive();
self.assignment.assign(
var,
assign_value,
self.assignment.current_level(),
);
self.propagation_queue.push_back(unit_lit);
// Record implication for conflict analysis
self.conflict_analysis.record_implication(
var,
clause_ref.clone(),
self.assignment.current_level(),
);
}
}
WatchResult::Ok => {}
}
}
}
}
PropagationResult::Ok
}
/// Update watches for a clause when a literal becomes false
fn update_watches(&mut self, clause: &ClauseRef, assigned_lit: Literal) -> WatchResult {
// The assigned_lit just became true, so its negation became false
let lit_that_became_false = assigned_lit.negate();
// First, check if clause is already satisfied
for &lit in clause.literals() {
if self.assignment.eval_literal(lit).is_true() {
return WatchResult::Ok;
}
}
// Find the two currently watched literals (should be the first two that aren't false)
let mut watched_indices = Vec::new();
let mut other_unassigned_indices = Vec::new();
let mut false_count = 0;
let mut unassigned_count = 0;
// Check all literals in the clause
for (i, &lit) in clause.literals().iter().enumerate() {
let value = self.assignment.eval_literal(lit);
if value == TruthValue::Unassigned {
unassigned_count += 1;
// Check if this literal is currently being watched
if self.watch_lists.get(&lit.negate()).is_some_and(|list| {
list.iter()
.any(|c| std::ptr::eq(c.as_ref(), clause.as_ref()))
}) {
watched_indices.push(i);
} else {
other_unassigned_indices.push(i);
}
} else if value.is_false() {
false_count += 1;
// Check if this false literal is currently being watched
if lit == lit_that_became_false
&& self.watch_lists.get(&assigned_lit).is_some_and(|list| {
list.iter()
.any(|c| std::ptr::eq(c.as_ref(), clause.as_ref()))
})
{
watched_indices.push(i);
}
}
}
// If all literals are false, it's a conflict
if false_count == clause.len() {
return WatchResult::Conflict;
}
// If only one literal is unassigned and all others are false, it's a unit clause
if unassigned_count == 1 && false_count == clause.len() - 1 {
for &lit in clause.literals() {
if self.assignment.eval_literal(lit) == TruthValue::Unassigned {
return WatchResult::Unit(lit);
}
}
}
// If we have at least 2 unassigned literals, maintain two watches
if unassigned_count >= 2 {
// We need to ensure exactly 2 literals are watched
// Remove the watch for the literal that just became false
if let Some(list) = self.watch_lists.get_mut(&assigned_lit) {
list.retain(|c| !std::ptr::eq(c.as_ref(), clause.as_ref()));
}
// If we now have less than 2 watches, add a new one
if watched_indices.len() < 2 && !other_unassigned_indices.is_empty() {
let new_watched_idx = other_unassigned_indices[0];
let new_watched_lit = clause.literals()[new_watched_idx];
self.watch_lists
.entry(new_watched_lit.negate())
.or_default()
.push(clause.clone());
}
}
WatchResult::Ok
}
/// Make a decision using VSIDS heuristic
fn decide(&self) -> Option<(Variable, bool)> {
let unassigned: Vec<Variable> = self
.variables
.iter()
.filter(|&&var| !self.assignment.is_assigned(var))
.copied()
.collect();
if unassigned.is_empty() {
return None;
}
// Use VSIDS to pick variable
let var = self
.variable_activity
.highest_activity(&unassigned)
.unwrap_or(unassigned[0]);
// Choose polarity (prefer positive for now)
Some((var, true))
}
/// Check if all variables are assigned
fn all_variables_assigned(&self) -> bool {
self.variables
.iter()
.all(|&var| self.assignment.is_assigned(var))
}
/// Learn a new clause
fn learn_clause(&mut self, clause: Clause) {
self.stats.learned_clauses += 1;
// Bump activity of variables in learned clause
self.variable_activity.bump_clause(&clause);
self.variable_activity.decay_all();
// Add to learned clauses
let clause_ref = clause_ref(clause);
self.learned_clauses.push(clause_ref.clone());
// Add to watch lists if not unit
if clause_ref.len() >= 2 {
let lit1 = clause_ref.literals()[0];
let lit2 = clause_ref.literals()[1];
// Watch for when these literals become false
self.watch_lists
.entry(lit1.negate())
.or_default()
.push(clause_ref.clone());
self.watch_lists
.entry(lit2.negate())
.or_default()
.push(clause_ref.clone());
}
}
/// Backtrack to a given level
fn backtrack_to(&mut self, level: u32) {
// Clear propagation queue
self.propagation_queue.clear();
// Remove decisions above the level
while let Some((var, _value)) = self.decisions.last() {
if self.assignment.level(*var).unwrap_or(0) > level {
self.decisions.pop();
} else {
break;
}
}
// Backtrack assignment
self.assignment.backtrack_to(level);
// Clear conflict analysis
self.conflict_analysis.clear();
}
/// Restart the search
fn restart(&mut self) {
self.backtrack_to(0);
self.decisions.clear();
}
/// Analyze conflict for external explanation
pub fn analyze_conflict(&self, problem: &DependencyProblem) -> ConflictExplanation {
self.conflict_analysis.explain_unsat(problem)
}
}
/// Result of unit propagation
#[derive(Debug)]
enum PropagationResult {
Ok,
Conflict(ClauseRef),
}
/// Result of watch update
#[derive(Debug)]
enum WatchResult {
Ok,
Unit(Literal),
Conflict,
}
impl Default for SatSolver {
fn default() -> Self {
Self::new()
}
}
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | false |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/crates/resolver/src/sat/types.rs | crates/resolver/src/sat/types.rs | //! Basic types for SAT solving
use std::collections::HashMap;
use std::fmt;
/// A boolean variable in the SAT problem
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]
pub struct Variable(pub u32);
impl Variable {
/// Create new variable with given index
#[must_use]
pub const fn new(index: u32) -> Self {
Self(index)
}
/// Get the variable index
#[must_use]
pub const fn index(self) -> u32 {
self.0
}
}
impl fmt::Display for Variable {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "v{}", self.0)
}
}
/// A literal is a variable or its negation
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct Literal {
variable: Variable,
positive: bool,
}
impl Literal {
/// Create a positive literal
#[must_use]
pub const fn positive(variable: Variable) -> Self {
Self {
variable,
positive: true,
}
}
/// Create a negative literal
#[must_use]
pub const fn negative(variable: Variable) -> Self {
Self {
variable,
positive: false,
}
}
/// Get the variable
#[must_use]
pub const fn variable(self) -> Variable {
self.variable
}
/// Check if literal is positive
#[must_use]
pub const fn is_positive(self) -> bool {
self.positive
}
/// Check if literal is negative
#[must_use]
pub const fn is_negative(self) -> bool {
!self.positive
}
/// Negate the literal
#[must_use]
pub const fn negate(self) -> Self {
Self {
variable: self.variable,
positive: !self.positive,
}
}
/// Convert to DIMACS format integer
/// Positive literals are variable index + 1
/// Negative literals are -(variable index + 1)
#[must_use]
pub fn to_dimacs(self) -> i32 {
let index = self.variable.index().min(i32::MAX as u32 - 1) + 1;
let index_i32 = i32::try_from(index).expect("Variable index too large for DIMACS format");
if self.positive {
index_i32
} else {
-index_i32
}
}
/// Create from DIMACS format integer
#[must_use]
pub fn from_dimacs(dimacs: i32) -> Self {
let index = dimacs.unsigned_abs() - 1;
if dimacs > 0 {
Self::positive(Variable::new(index))
} else {
Self::negative(Variable::new(index))
}
}
}
impl fmt::Display for Literal {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
if self.positive {
write!(f, "{}", self.variable)
} else {
write!(f, "¬{}", self.variable)
}
}
}
/// Truth value for a variable
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum TruthValue {
True,
False,
Unassigned,
}
impl TruthValue {
/// Check if value is assigned
#[must_use]
pub const fn is_assigned(self) -> bool {
matches!(self, Self::True | Self::False)
}
/// Check if value is true
#[must_use]
pub const fn is_true(self) -> bool {
matches!(self, Self::True)
}
/// Check if value is false
#[must_use]
pub const fn is_false(self) -> bool {
matches!(self, Self::False)
}
/// Convert to boolean (panics if unassigned)
#[must_use]
pub fn to_bool(self) -> bool {
match self {
Self::True => true,
Self::False => false,
Self::Unassigned => panic!("Cannot convert unassigned value to bool"),
}
}
}
/// Variable assignment
#[derive(Debug, Clone)]
pub struct Assignment {
/// Current assignment of variables
values: HashMap<Variable, TruthValue>,
/// Decision level for each assigned variable
levels: HashMap<Variable, u32>,
/// Order in which variables were assigned
trail: Vec<Variable>,
/// Current decision level
current_level: u32,
}
impl Assignment {
/// Create new empty assignment
#[must_use]
pub fn new() -> Self {
Self {
values: HashMap::new(),
levels: HashMap::new(),
trail: Vec::new(),
current_level: 0,
}
}
/// Get value of a variable
#[must_use]
pub fn get(&self, var: Variable) -> TruthValue {
self.values
.get(&var)
.copied()
.unwrap_or(TruthValue::Unassigned)
}
/// Check if variable is assigned
#[must_use]
pub fn is_assigned(&self, var: Variable) -> bool {
self.get(var).is_assigned()
}
/// Check if variable is true
#[must_use]
pub fn is_true(&self, var: Variable) -> bool {
self.get(var).is_true()
}
/// Check if variable is false
#[must_use]
pub fn is_false(&self, var: Variable) -> bool {
self.get(var).is_false()
}
/// Evaluate a literal under this assignment
#[must_use]
pub fn eval_literal(&self, lit: Literal) -> TruthValue {
match self.get(lit.variable()) {
TruthValue::Unassigned => TruthValue::Unassigned,
TruthValue::True => {
if lit.is_positive() {
TruthValue::True
} else {
TruthValue::False
}
}
TruthValue::False => {
if lit.is_positive() {
TruthValue::False
} else {
TruthValue::True
}
}
}
}
/// Assign a variable
pub fn assign(&mut self, var: Variable, value: bool, level: u32) {
let truth_value = if value {
TruthValue::True
} else {
TruthValue::False
};
self.values.insert(var, truth_value);
self.levels.insert(var, level);
self.trail.push(var);
self.current_level = level;
}
/// Unassign a variable
pub fn unassign(&mut self, var: Variable) {
self.values.remove(&var);
self.levels.remove(&var);
self.trail.retain(|&v| v != var);
}
/// Get decision level of a variable
#[must_use]
pub fn level(&self, var: Variable) -> Option<u32> {
self.levels.get(&var).copied()
}
/// Get current decision level
#[must_use]
pub const fn current_level(&self) -> u32 {
self.current_level
}
/// Backtrack to a given level
pub fn backtrack_to(&mut self, level: u32) {
// Remove all assignments at higher levels
self.trail.retain(|&var| {
if let Some(&var_level) = self.levels.get(&var) {
if var_level > level {
self.values.remove(&var);
self.levels.remove(&var);
false
} else {
true
}
} else {
false
}
});
self.current_level = level;
}
/// Get number of assigned variables
#[must_use]
pub fn num_assigned(&self) -> usize {
self.values.len()
}
/// Check if all variables in a set are assigned
#[must_use]
pub fn all_assigned(&self, vars: &[Variable]) -> bool {
vars.iter().all(|&v| self.is_assigned(v))
}
/// Get trail of assignments
#[must_use]
pub fn trail(&self) -> &[Variable] {
&self.trail
}
/// Clear all assignments
pub fn clear(&mut self) {
self.values.clear();
self.levels.clear();
self.trail.clear();
self.current_level = 0;
}
}
impl Default for Assignment {
fn default() -> Self {
Self::new()
}
}
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | false |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/crates/resolver/src/sat/clause.rs | crates/resolver/src/sat/clause.rs | //! SAT clause representation with two-watched literal optimization
use super::types::TruthValue;
use super::{Assignment, Literal, Variable};
use std::fmt;
use std::sync::Arc;
/// A clause in CNF (Conjunctive Normal Form)
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct Clause {
/// Literals in the clause
literals: Vec<Literal>,
/// Watched literal indices for two-watched literal scheme
/// These are indices into the literals vector
watched: Option<(usize, usize)>,
}
impl Clause {
/// Create new clause from literals
#[must_use]
pub fn new(literals: Vec<Literal>) -> Self {
let mut clause = Self {
literals,
watched: None,
};
// Initialize watched literals if clause has at least 2 literals
if clause.literals.len() >= 2 {
clause.watched = Some((0, 1));
}
clause
}
/// Create unit clause (single literal)
#[must_use]
pub fn unit(literal: Literal) -> Self {
Self {
literals: vec![literal],
watched: None,
}
}
/// Create binary clause (two literals)
#[must_use]
pub fn binary(lit1: Literal, lit2: Literal) -> Self {
Self {
literals: vec![lit1, lit2],
watched: Some((0, 1)),
}
}
/// Get literals in the clause
#[must_use]
pub fn literals(&self) -> &[Literal] {
&self.literals
}
/// Get number of literals
#[must_use]
pub fn len(&self) -> usize {
self.literals.len()
}
/// Check if clause is empty
#[must_use]
pub fn is_empty(&self) -> bool {
self.literals.is_empty()
}
/// Check if clause is unit (single literal)
#[must_use]
pub fn is_unit(&self) -> bool {
self.literals.len() == 1
}
/// Check if clause is binary (two literals)
#[must_use]
pub fn is_binary(&self) -> bool {
self.literals.len() == 2
}
/// Check if clause contains a literal
#[must_use]
pub fn contains(&self, literal: Literal) -> bool {
self.literals.contains(&literal)
}
/// Check if clause contains a variable
#[must_use]
pub fn contains_variable(&self, var: Variable) -> bool {
self.literals.iter().any(|lit| lit.variable() == var)
}
/// Evaluate clause under assignment
#[must_use]
pub fn evaluate(&self, assignment: &Assignment) -> TruthValue {
let mut has_unassigned = false;
for &lit in &self.literals {
match assignment.eval_literal(lit) {
TruthValue::True => return TruthValue::True, // Clause is satisfied
TruthValue::Unassigned => has_unassigned = true,
TruthValue::False => {} // Continue checking other literals
}
}
if has_unassigned {
TruthValue::Unassigned
} else {
TruthValue::False // All literals are false
}
}
/// Check if clause is satisfied by assignment
#[must_use]
pub fn is_satisfied(&self, assignment: &Assignment) -> bool {
self.evaluate(assignment).is_true()
}
/// Check if clause is conflicting (all literals false)
#[must_use]
pub fn is_conflict(&self, assignment: &Assignment) -> bool {
self.evaluate(assignment).is_false()
}
/// Find unit literal if clause is unit under assignment
/// Returns None if clause is not unit
#[must_use]
pub fn find_unit_literal(&self, assignment: &Assignment) -> Option<Literal> {
let mut unassigned_literal = None;
let mut unassigned_count = 0;
for &lit in &self.literals {
match assignment.eval_literal(lit) {
TruthValue::True => return None, // Clause is already satisfied
TruthValue::Unassigned => {
unassigned_literal = Some(lit);
unassigned_count += 1;
if unassigned_count > 1 {
return None; // More than one unassigned literal
}
}
TruthValue::False => {} // Continue
}
}
unassigned_literal // Return the single unassigned literal if any
}
/// Get watched literals (for two-watched literal scheme)
#[must_use]
pub fn watched_literals(&self) -> Option<(Literal, Literal)> {
self.watched
.map(|(i, j)| (self.literals[i], self.literals[j]))
}
/// Update watched literals after assignment
/// Returns true if watch was successfully updated, false if clause is unit or conflict
pub fn update_watch(&mut self, assigned_lit: Literal, assignment: &Assignment) -> bool {
let Some((w1, w2)) = self.watched else {
return true; // No watched literals for unit clauses
};
// Check if assigned literal is one of the watched
let assigned_idx = if self.literals[w1] == assigned_lit {
Some(w1)
} else if self.literals[w2] == assigned_lit {
Some(w2)
} else {
return true; // Assigned literal is not watched
};
let Some(assigned_watch_idx) = assigned_idx else {
return true;
};
// Check if assigned literal is false
if assignment.eval_literal(assigned_lit).is_true() {
return true; // Clause is satisfied
}
// Find new literal to watch
for (i, &lit) in self.literals.iter().enumerate() {
if i == w1 || i == w2 {
continue; // Skip current watched literals
}
let lit_value = assignment.eval_literal(lit);
if !lit_value.is_false() {
// Found non-false literal, update watch
if assigned_watch_idx == w1 {
self.watched = Some((i, w2));
} else {
self.watched = Some((w1, i));
}
return true;
}
}
// No replacement found - clause is unit or conflict
false
}
/// Get variables in the clause
pub fn variables(&self) -> impl Iterator<Item = Variable> + '_ {
self.literals.iter().map(|lit| lit.variable())
}
/// Remove duplicate literals and check for tautology
/// Returns None if clause is tautology (contains both x and ¬x)
#[must_use]
pub fn simplify(mut self) -> Option<Self> {
// Remove duplicates
self.literals
.sort_by_key(|lit| (lit.variable().index(), lit.is_positive()));
self.literals.dedup();
// Check for tautology
for i in 0..self.literals.len() {
for j in (i + 1)..self.literals.len() {
if self.literals[i].variable() == self.literals[j].variable()
&& self.literals[i].is_positive() != self.literals[j].is_positive()
{
return None; // Tautology
}
}
}
// Re-initialize watched literals
if self.literals.len() >= 2 {
self.watched = Some((0, 1));
} else {
self.watched = None;
}
Some(self)
}
}
impl fmt::Display for Clause {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
if self.is_empty() {
write!(f, "⊥") // Empty clause (false)
} else {
let literals: Vec<String> = self.literals.iter().map(ToString::to_string).collect();
write!(f, "({})", literals.join(" ∨ "))
}
}
}
/// Reference to a clause (for efficient storage)
pub type ClauseRef = Arc<Clause>;
/// Create a clause reference
#[must_use]
pub fn clause_ref(clause: Clause) -> ClauseRef {
Arc::new(clause)
}
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | false |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/crates/resolver/src/sat/mod.rs | crates/resolver/src/sat/mod.rs | //! SAT solver for dependency resolution
//!
//! This module implements a DPLL-based SAT solver optimized for package
//! dependency resolution. It supports:
//! - Version constraint clauses
//! - Conflict-driven clause learning
//! - Two-watched literal optimization
//! - VSIDS variable ordering heuristic
use semver::Version;
use sps2_errors::{Error, PackageError};
use sps2_events::{AppEvent, EventEmitter, EventSender, FailureContext, LifecycleEvent};
use std::collections::{HashMap, HashSet};
use std::fmt;
mod clause;
mod conflict_analysis;
mod solver;
mod types;
mod variable_map;
pub use clause::{Clause, ClauseRef};
pub use conflict_analysis::ConflictAnalysis;
pub use solver::SatSolver;
pub use types::{Assignment, Literal, Variable};
pub use variable_map::VariableMap;
/// Package identifier with version for SAT solving
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct PackageVersion {
pub name: String,
pub version: Version,
}
impl PackageVersion {
/// Create new package version
#[must_use]
pub fn new(name: String, version: Version) -> Self {
Self { name, version }
}
}
impl fmt::Display for PackageVersion {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}@{}", self.name, self.version)
}
}
/// SAT problem representing package dependency constraints
#[derive(Debug, Clone)]
pub struct DependencyProblem {
/// Map from package versions to SAT variables
pub variables: VariableMap,
/// Clauses representing constraints
pub clauses: Vec<Clause>,
/// Required packages (at least one version must be selected)
pub required_packages: HashSet<String>,
}
impl DependencyProblem {
/// Create new dependency problem
#[must_use]
pub fn new() -> Self {
Self {
variables: VariableMap::new(),
clauses: Vec::new(),
required_packages: HashSet::new(),
}
}
/// Add a package version to the problem
pub fn add_package_version(&mut self, package: PackageVersion) -> Variable {
self.variables.add_package_version(package)
}
/// Add a clause to the problem
pub fn add_clause(&mut self, clause: Clause) {
self.clauses.push(clause);
}
/// Mark a package as required
pub fn require_package(&mut self, name: String) {
self.required_packages.insert(name);
}
/// Get all versions of a package
#[must_use]
pub fn get_package_versions(&self, name: &str) -> Vec<&PackageVersion> {
self.variables.get_package_versions(name)
}
/// Add constraint: at most one version of a package can be selected
///
/// # Panics
///
/// Panics if variables for package versions are not found. This should not
/// happen in normal usage as versions are added before constraints.
pub fn add_at_most_one_constraint(&mut self, package_name: &str) {
// Clone versions to avoid borrow issues
let versions: Vec<PackageVersion> = self
.variables
.get_package_versions(package_name)
.into_iter()
.cloned()
.collect();
// For each pair of versions, add clause: ¬v1 ∨ ¬v2
for i in 0..versions.len() {
for j in (i + 1)..versions.len() {
let v1 = self.variables.get_variable(&versions[i]).unwrap();
let v2 = self.variables.get_variable(&versions[j]).unwrap();
let clause = Clause::new(vec![Literal::negative(v1), Literal::negative(v2)]);
self.add_clause(clause);
}
}
}
/// Add constraint: at least one version of a required package must be selected
pub fn add_at_least_one_constraint(&mut self, package_name: &str) {
let versions = self.variables.get_package_versions(package_name);
if !versions.is_empty() {
let literals: Vec<Literal> = versions
.iter()
.filter_map(|pv| self.variables.get_variable(pv))
.map(Literal::positive)
.collect();
if !literals.is_empty() {
self.add_clause(Clause::new(literals));
}
}
}
}
impl Default for DependencyProblem {
fn default() -> Self {
Self::new()
}
}
/// Solution to a dependency problem
#[derive(Debug, Clone)]
pub struct DependencySolution {
/// Selected package versions
pub selected: HashMap<String, Version>,
/// Assignment that satisfies all constraints
pub assignment: Assignment,
}
impl DependencySolution {
/// Create new solution
#[must_use]
pub fn new(selected: HashMap<String, Version>, assignment: Assignment) -> Self {
Self {
selected,
assignment,
}
}
/// Check if a package version is selected
#[must_use]
pub fn is_selected(&self, name: &str, version: &Version) -> bool {
self.selected.get(name) == Some(version)
}
}
pub struct ConflictExplanation {
pub conflicting_packages: Vec<(String, String)>, // (package, version)
pub message: String,
}
impl ConflictExplanation {
fn new(conflicting_packages: Vec<(String, String)>, message: String) -> Self {
Self {
conflicting_packages,
message,
}
}
}
/// Convert a dependency problem to CNF and solve using SAT
///
/// # Errors
///
/// Returns an error if:
/// - The SAT problem is unsatisfiable (conflicting constraints)
/// - The solver encounters an internal error
/// - Package version mapping fails
pub async fn solve_dependencies(
problem: DependencyProblem,
event_sender: Option<&EventSender>,
) -> Result<DependencySolution, Error> {
// Create SAT solver with version preference
let mut solver = SatSolver::with_variable_map(&problem.variables);
// Add all clauses to the solver
for clause in &problem.clauses {
solver.add_clause(clause.clone());
}
// Add at-most-one constraints for each package
let all_packages: HashSet<String> =
problem.variables.all_packages().map(String::from).collect();
for package in &all_packages {
let versions = problem.variables.get_package_versions(package);
// At most one version can be selected
for i in 0..versions.len() {
for j in (i + 1)..versions.len() {
if let (Some(v1), Some(v2)) = (
problem.variables.get_variable(versions[i]),
problem.variables.get_variable(versions[j]),
) {
solver.add_clause(Clause::new(vec![
Literal::negative(v1),
Literal::negative(v2),
]));
}
}
}
}
// Add at-least-one constraints for required packages
for package in &problem.required_packages {
let versions = problem.variables.get_package_versions(package);
let literals: Vec<Literal> = versions
.iter()
.filter_map(|pv| problem.variables.get_variable(pv))
.map(Literal::positive)
.collect();
if !literals.is_empty() {
solver.add_clause(Clause::new(literals));
}
}
// Solve the SAT problem
if let Ok(assignment) = solver.solve() {
// Extract selected packages from assignment
let mut selected = HashMap::new();
for package_name in all_packages {
let versions = problem.variables.get_package_versions(&package_name);
for package_version in versions {
if let Some(var) = problem.variables.get_variable(package_version) {
if assignment.is_true(var) {
selected.insert(
package_version.name.clone(),
package_version.version.clone(),
);
break;
}
}
}
}
Ok(DependencySolution::new(selected, assignment))
} else {
// Extract conflict information
let conflict = solver.analyze_conflict(&problem);
// Emit conflict event if sender is available
let error = PackageError::DependencyConflict {
message: conflict.message.clone(),
};
if let Some(sender) = event_sender {
sender.emit(AppEvent::Lifecycle(LifecycleEvent::resolver_failed(
FailureContext::from_error(&error),
conflict
.conflicting_packages
.iter()
.map(|(name, version)| format!("{name}@{version}"))
.collect(),
)));
}
Err(error.into())
}
}
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | false |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/crates/resolver/src/sat/variable_map.rs | crates/resolver/src/sat/variable_map.rs | //! Mapping between package versions and SAT variables
use super::{PackageVersion, Variable};
use std::collections::HashMap;
/// Maps package versions to SAT variables
#[derive(Debug, Clone)]
pub struct VariableMap {
/// Next variable index to allocate
next_var: u32,
/// Map from package version to variable
package_to_var: HashMap<PackageVersion, Variable>,
/// Reverse map from variable to package version
var_to_package: HashMap<Variable, PackageVersion>,
/// Map from package name to all its versions
package_versions: HashMap<String, Vec<PackageVersion>>,
}
impl VariableMap {
/// Create new empty variable map
#[must_use]
pub fn new() -> Self {
Self {
next_var: 0,
package_to_var: HashMap::new(),
var_to_package: HashMap::new(),
package_versions: HashMap::new(),
}
}
/// Add a package version and return its variable
pub fn add_package_version(&mut self, package: PackageVersion) -> Variable {
// Check if already exists
if let Some(&var) = self.package_to_var.get(&package) {
return var;
}
// Allocate new variable
let var = Variable::new(self.next_var);
self.next_var += 1;
// Add to maps
self.package_to_var.insert(package.clone(), var);
self.var_to_package.insert(var, package.clone());
// Add to package versions list
self.package_versions
.entry(package.name.clone())
.or_default()
.push(package);
var
}
/// Get variable for a package version
#[must_use]
pub fn get_variable(&self, package: &PackageVersion) -> Option<Variable> {
self.package_to_var.get(package).copied()
}
/// Get package version for a variable
#[must_use]
pub fn get_package(&self, var: Variable) -> Option<&PackageVersion> {
self.var_to_package.get(&var)
}
/// Get all versions of a package
#[must_use]
pub fn get_package_versions(&self, name: &str) -> Vec<&PackageVersion> {
self.package_versions
.get(name)
.map(|versions| versions.iter().collect())
.unwrap_or_default()
}
/// Get all package names
pub fn all_packages(&self) -> impl Iterator<Item = &str> {
self.package_versions.keys().map(String::as_str)
}
/// Get number of variables
#[must_use]
pub fn num_variables(&self) -> u32 {
self.next_var
}
/// Get all variables
pub fn all_variables(&self) -> impl Iterator<Item = Variable> + '_ {
(0..self.next_var).map(Variable::new)
}
/// Check if a package has any versions
#[must_use]
pub fn has_package(&self, name: &str) -> bool {
self.package_versions.contains_key(name)
}
/// Get variables for all versions of a package
#[must_use]
pub fn get_package_variables(&self, name: &str) -> Vec<Variable> {
self.get_package_versions(name)
.into_iter()
.filter_map(|pv| self.get_variable(pv))
.collect()
}
/// Clear all mappings
pub fn clear(&mut self) {
self.next_var = 0;
self.package_to_var.clear();
self.var_to_package.clear();
self.package_versions.clear();
}
}
impl Default for VariableMap {
fn default() -> Self {
Self::new()
}
}
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | false |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/crates/resolver/src/sat/conflict_analysis.rs | crates/resolver/src/sat/conflict_analysis.rs | //! Conflict analysis and learning for SAT solver
use super::{Assignment, Clause, ClauseRef, DependencyProblem, Literal, Variable};
use crate::sat::ConflictExplanation;
use std::collections::{HashMap, HashSet, VecDeque};
/// Conflict analysis data
#[derive(Debug, Clone)]
pub struct ConflictAnalysis {
/// Implication graph: variable -> (implying clause, decision level)
implication_graph: HashMap<Variable, (ClauseRef, u32)>,
/// Conflict clause that caused the conflict
conflict_clause: Option<ClauseRef>,
/// Learned clauses from conflict analysis
learned_clauses: Vec<Clause>,
}
impl ConflictAnalysis {
/// Create new conflict analysis
#[must_use]
pub fn new() -> Self {
Self {
implication_graph: HashMap::new(),
conflict_clause: None,
learned_clauses: Vec::new(),
}
}
/// Record an implication
pub fn record_implication(&mut self, var: Variable, clause: ClauseRef, level: u32) {
self.implication_graph.insert(var, (clause, level));
}
/// Set the conflict clause
pub fn set_conflict(&mut self, clause: ClauseRef) {
self.conflict_clause = Some(clause);
}
/// Analyze conflict and learn a new clause
/// Returns the backtrack level
pub fn analyze_conflict(&mut self, assignment: &Assignment) -> Option<(Clause, u32)> {
let conflict_clause = self.conflict_clause.as_ref()?;
let current_level = assignment.current_level();
if current_level == 0 {
// Conflict at level 0 means UNSAT
return None;
}
// First UIP (Unique Implication Point) cut
let mut learned_literals = Vec::new();
let mut seen = HashSet::new();
let mut queue = VecDeque::new();
let mut current_level_count = 0;
// Start with conflict clause literals
for &lit in conflict_clause.literals() {
let var = lit.variable();
if assignment.level(var).unwrap_or(0) == current_level {
current_level_count += 1;
}
seen.insert(var);
queue.push_back(var);
}
// Find first UIP
while current_level_count > 1 {
let Some(var) = queue.pop_front() else {
break;
};
if let Some((clause, _level)) = self.implication_graph.get(&var) {
// This variable was implied by a clause
for &lit in clause.literals() {
let lit_var = lit.variable();
if lit_var != var && !seen.contains(&lit_var) {
seen.insert(lit_var);
let lit_level = assignment.level(lit_var).unwrap_or(0);
if lit_level == current_level {
current_level_count += 1;
queue.push_back(lit_var);
} else if lit_level > 0 {
// Add to learned clause (negated)
let assigned_value = assignment.is_true(lit_var);
let learned_lit = if assigned_value {
Literal::negative(lit_var)
} else {
Literal::positive(lit_var)
};
learned_literals.push(learned_lit);
}
}
}
if assignment.level(var).unwrap_or(0) == current_level {
current_level_count -= 1;
}
}
}
// Add the UIP literal
if let Some(&uip_var) = queue.front() {
let assigned_value = assignment.is_true(uip_var);
let uip_lit = if assigned_value {
Literal::negative(uip_var)
} else {
Literal::positive(uip_var)
};
learned_literals.push(uip_lit);
}
if learned_literals.is_empty() {
return None;
}
// Find backtrack level (second highest level in learned clause)
let mut levels: Vec<u32> = learned_literals
.iter()
.filter_map(|lit| assignment.level(lit.variable()))
.collect();
levels.sort_unstable();
levels.dedup();
let backtrack_level = if levels.len() > 1 {
levels[levels.len() - 2]
} else {
0
};
let learned_clause = Clause::new(learned_literals);
self.learned_clauses.push(learned_clause.clone());
Some((learned_clause, backtrack_level))
}
/// Clear conflict analysis data
pub fn clear(&mut self) {
self.implication_graph.clear();
self.conflict_clause = None;
}
/// Analyze unsatisfiable problem and generate explanation
pub fn explain_unsat(&self, problem: &DependencyProblem) -> ConflictExplanation {
let mut conflicting_packages = Vec::new();
let mut involved_packages = HashSet::new();
// Analyze learned clauses to find conflicting packages
for clause in &self.learned_clauses {
let mut clause_packages = HashSet::new();
for lit in clause.literals() {
if let Some(package_version) = problem.variables.get_package(lit.variable()) {
clause_packages.insert(package_version.name.clone());
involved_packages.insert(package_version.name.clone());
}
}
// If clause involves exactly 2 packages, they're likely conflicting
if clause_packages.len() == 2 {
let packages: Vec<String> = clause_packages.into_iter().collect();
conflicting_packages.push((packages[0].clone(), packages[1].clone()));
}
}
// Generate explanation message
let message = if conflicting_packages.is_empty() {
"Unable to find a valid set of package versions that satisfies all constraints"
.to_string()
} else {
let conflicts: Vec<String> = conflicting_packages
.iter()
.map(|(p1, p2)| format!("{p1} and {p2}"))
.collect();
format!(
"Dependency conflicts detected between: {}",
conflicts.join(", ")
)
};
// Generate suggestions
let mut suggestions = Vec::new();
if !involved_packages.is_empty() {
suggestions.push(format!(
"Try updating version constraints for: {}",
involved_packages.into_iter().collect::<Vec<_>>().join(", ")
));
}
suggestions.push(
"Consider removing conflicting packages or finding compatible versions".to_string(),
);
ConflictExplanation::new(conflicting_packages, message)
}
}
impl Default for ConflictAnalysis {
fn default() -> Self {
Self::new()
}
}
/// Variable activity scores for VSIDS heuristic
#[derive(Debug, Clone)]
pub struct VariableActivity {
/// Activity score for each variable
scores: HashMap<Variable, f64>,
/// Decay factor (typically 0.95)
decay: f64,
/// Increment value
increment: f64,
}
impl VariableActivity {
/// Create new activity tracker
#[must_use]
pub fn new(decay: f64) -> Self {
Self {
scores: HashMap::new(),
decay,
increment: 1.0,
}
}
/// Bump activity of variables in a clause
pub fn bump_clause(&mut self, clause: &Clause) {
for &lit in clause.literals() {
self.bump_variable(lit.variable());
}
}
/// Bump activity of a single variable
pub fn bump_variable(&mut self, var: Variable) {
let score = self.scores.entry(var).or_insert(0.0);
*score += self.increment;
}
/// Decay all activities
pub fn decay_all(&mut self) {
self.increment /= self.decay;
// Rescale if increment gets too large
if self.increment > 1e100 {
self.rescale();
}
}
/// Rescale all scores to prevent overflow
fn rescale(&mut self) {
let scale = 1e-100;
for score in self.scores.values_mut() {
*score *= scale;
}
self.increment *= scale;
}
/// Get variable with highest activity
#[must_use]
pub fn highest_activity(&self, unassigned: &[Variable]) -> Option<Variable> {
unassigned
.iter()
.max_by(|&a, &b| {
let score_a = self.scores.get(a).copied().unwrap_or(0.0);
let score_b = self.scores.get(b).copied().unwrap_or(0.0);
score_a.partial_cmp(&score_b).unwrap()
})
.copied()
}
}
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | false |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/crates/types/src/recipe.rs | crates/types/src/recipe.rs | //! YAML recipe format for sps2
//!
//! This module provides a declarative YAML-based recipe format that replaces
//! the Starlark-based system with proper staged execution.
use serde::de::{self, IgnoredAny, MapAccess, Unexpected, Visitor};
use serde::{Deserialize, Serialize};
use std::{collections::HashMap, fmt};
/// Build isolation level
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize)]
#[serde(rename_all = "lowercase")]
pub enum IsolationLevel {
/// No isolation - uses host environment as-is (shows warning)
None = 0,
/// Default isolation - clean environment, controlled paths (default)
Default = 1,
/// Enhanced isolation - default + private HOME/TMPDIR
Enhanced = 2,
/// Hermetic isolation - full whitelist approach, network blocking
Hermetic = 3,
}
impl Default for IsolationLevel {
fn default() -> Self {
Self::Default
}
}
impl<'de> Deserialize<'de> for IsolationLevel {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::Deserializer<'de>,
{
struct IsolationLevelVisitor;
impl<'de> Visitor<'de> for IsolationLevelVisitor {
type Value = IsolationLevel;
fn expecting(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str("an isolation level (none, default, enhanced, hermetic, or 0-3)")
}
fn visit_str<E>(self, value: &str) -> Result<Self::Value, E>
where
E: de::Error,
{
parse_from_str(value)
}
fn visit_string<E>(self, value: String) -> Result<Self::Value, E>
where
E: de::Error,
{
parse_from_str(&value)
}
fn visit_u64<E>(self, value: u64) -> Result<Self::Value, E>
where
E: de::Error,
{
if value > u64::from(u8::MAX) {
return Err(de::Error::invalid_value(
Unexpected::Unsigned(value),
&"number between 0 and 3",
));
}
let byte = u8::try_from(value).map_err(|_| {
de::Error::invalid_value(Unexpected::Unsigned(value), &"number between 0 and 3")
})?;
parse_from_u8(byte)
}
fn visit_i64<E>(self, value: i64) -> Result<Self::Value, E>
where
E: de::Error,
{
if value < 0 {
return Err(de::Error::invalid_value(
Unexpected::Signed(value),
&"number between 0 and 3",
));
}
let unsigned = u64::try_from(value).map_err(|_| {
de::Error::invalid_value(Unexpected::Signed(value), &"number between 0 and 3")
})?;
self.visit_u64(unsigned)
}
fn visit_map<M>(self, mut map: M) -> Result<Self::Value, M::Error>
where
M: MapAccess<'de>,
{
if let Some((key, _value)) = map.next_entry::<String, IgnoredAny>()? {
if map.next_entry::<IgnoredAny, IgnoredAny>()?.is_some() {
return Err(de::Error::custom(
"isolation level map must contain a single entry",
));
}
parse_from_str(&key)
} else {
Err(de::Error::custom(
"expected isolation level map with a single entry",
))
}
}
}
deserializer.deserialize_any(IsolationLevelVisitor)
}
}
fn parse_from_str<E>(value: &str) -> Result<IsolationLevel, E>
where
E: de::Error,
{
let normalized = value.trim().to_ascii_lowercase();
match normalized.as_str() {
"none" => Ok(IsolationLevel::None),
"default" => Ok(IsolationLevel::Default),
"enhanced" => Ok(IsolationLevel::Enhanced),
"hermetic" => Ok(IsolationLevel::Hermetic),
other => Err(de::Error::unknown_variant(
other,
&["none", "default", "enhanced", "hermetic"],
)),
}
}
fn parse_from_u8<E>(value: u8) -> Result<IsolationLevel, E>
where
E: de::Error,
{
IsolationLevel::from_u8(value).ok_or_else(|| {
de::Error::invalid_value(
Unexpected::Unsigned(u64::from(value)),
&"number between 0 and 3",
)
})
}
impl IsolationLevel {
/// Convert from u8
#[must_use]
pub fn from_u8(value: u8) -> Option<Self> {
match value {
0 => Some(Self::None),
1 => Some(Self::Default),
2 => Some(Self::Enhanced),
3 => Some(Self::Hermetic),
_ => None,
}
}
/// Convert to u8
#[must_use]
pub fn as_u8(self) -> u8 {
self as u8
}
/// Check if this is the default isolation level
#[must_use]
pub fn is_default_value(self) -> bool {
self == Self::Default
}
}
impl std::fmt::Display for IsolationLevel {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Self::None => write!(f, "none"),
Self::Default => write!(f, "default"),
Self::Enhanced => write!(f, "enhanced"),
Self::Hermetic => write!(f, "hermetic"),
}
}
}
/// Complete YAML recipe structure
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct YamlRecipe {
/// Package metadata (required)
pub metadata: Metadata,
/// Dynamic facts/variables (optional)
#[serde(default, skip_serializing_if = "HashMap::is_empty")]
pub facts: HashMap<String, String>,
/// Environment setup stage (optional)
#[serde(default, skip_serializing_if = "Environment::is_default")]
pub environment: Environment,
/// Source acquisition stage (required)
pub source: Source,
/// Build stage (required)
pub build: Build,
/// Post-processing stage (optional)
#[serde(default, skip_serializing_if = "Post::is_empty")]
pub post: Post,
/// Installation behavior (optional)
#[serde(default, skip_serializing_if = "Install::is_default")]
pub install: Install,
}
/// Package metadata
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Metadata {
pub name: String,
pub version: String,
pub description: String,
pub license: String,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub homepage: Option<String>,
#[serde(default, skip_serializing_if = "Dependencies::is_empty")]
pub dependencies: Dependencies,
}
/// Dependencies specification
#[derive(Debug, Clone, Default, Serialize, Deserialize)]
pub struct Dependencies {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub runtime: Vec<String>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub build: Vec<String>,
}
impl Dependencies {
/// Check if dependencies are empty
#[must_use]
pub fn is_empty(&self) -> bool {
self.runtime.is_empty() && self.build.is_empty()
}
}
/// Environment setup stage
#[derive(Debug, Clone, Deserialize)]
pub struct Environment {
/// Isolation level: none (0), standard (1), enhanced (2), hermetic (3)
#[serde(default = "default_isolation")]
pub isolation: IsolationLevel,
/// Apply optimized compiler flags
#[serde(default)]
pub defaults: bool,
/// Allow network during build
#[serde(default)]
pub network: bool,
/// Environment variables
#[serde(default)]
pub variables: HashMap<String, String>,
}
impl Serialize for Environment {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
use serde::ser::SerializeMap;
let mut map = serializer.serialize_map(None)?;
if self.isolation != IsolationLevel::default() {
map.serialize_entry("isolation", &self.isolation)?;
}
if self.defaults {
map.serialize_entry("defaults", &self.defaults)?;
}
if self.network {
map.serialize_entry("network", &self.network)?;
}
if !self.variables.is_empty() {
map.serialize_entry("variables", &self.variables)?;
}
map.end()
}
}
fn default_isolation() -> IsolationLevel {
IsolationLevel::Default
}
impl Default for Environment {
fn default() -> Self {
Self {
isolation: default_isolation(),
defaults: false,
network: false,
variables: HashMap::new(),
}
}
}
impl Environment {
/// Check if environment is default
#[must_use]
pub fn is_default(&self) -> bool {
self.isolation == IsolationLevel::default()
&& !self.defaults
&& !self.network
&& self.variables.is_empty()
}
}
/// Source acquisition stage
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Source {
/// Source method (single source for backward compatibility)
#[serde(flatten)]
pub method: Option<SourceMethod>,
/// Multiple sources (new multi-source support)
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub sources: Vec<NamedSource>,
/// Patches to apply after extraction
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub patches: Vec<String>,
}
/// Named source with optional extract location
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct NamedSource {
/// Source method
#[serde(flatten)]
pub method: SourceMethod,
/// Where to extract relative to build directory (optional)
#[serde(default, skip_serializing_if = "Option::is_none")]
pub extract_to: Option<String>,
}
/// Source acquisition methods
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(untagged)]
pub enum SourceMethod {
Git { git: GitSource },
Fetch { fetch: FetchSource },
Local { local: LocalSource },
}
/// Git source specification
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct GitSource {
pub url: String,
#[serde(rename = "ref")]
pub git_ref: String,
}
/// Fetch source specification
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct FetchSource {
pub url: String,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub checksum: Option<Checksum>,
/// Where to extract relative to build directory (optional)
#[serde(default, skip_serializing_if = "Option::is_none")]
pub extract_to: Option<String>,
}
/// Checksum specification
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Checksum {
#[serde(flatten)]
pub algorithm: ChecksumAlgorithm,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(untagged)]
pub enum ChecksumAlgorithm {
Blake3 { blake3: String },
Sha256 { sha256: String },
Md5 { md5: String },
}
/// Local source specification
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct LocalSource {
pub path: String,
}
/// Build stage
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(untagged)]
pub enum Build {
/// Simple build system invocation
System {
system: BuildSystem,
#[serde(default)]
args: Vec<String>,
},
/// Complex build with custom steps
Steps { steps: Vec<ParsedStep> },
}
/// Supported build systems
#[derive(Debug, Clone, Copy, Serialize, Deserialize)]
#[serde(rename_all = "lowercase")]
pub enum BuildSystem {
Autotools,
Cmake,
Meson,
Cargo,
Make,
Go,
Python,
Nodejs,
}
/// Parsed build step from YAML recipe
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(untagged)]
pub enum ParsedStep {
// Simple command (splits by whitespace, no shell features)
Command { command: String },
// Shell command (passed to sh -c, supports pipes/redirects/etc)
Shell { shell: String },
Make { make: Vec<String> },
Configure { configure: Vec<String> },
Cmake { cmake: Vec<String> },
Meson { meson: Vec<String> },
Cargo { cargo: Vec<String> },
Go { go: Vec<String> },
Python { python: Vec<String> },
Nodejs { nodejs: Vec<String> },
}
/// Post-processing stage
#[derive(Debug, Clone, Default, Serialize, Deserialize)]
pub struct Post {
/// `RPath` patching behavior
#[serde(default, skip_serializing_if = "Option::is_none")]
pub patch_rpaths: Option<String>,
/// Fix executable permissions
#[serde(default, skip_serializing_if = "Option::is_none")]
pub fix_permissions: Option<PostOption>,
/// QA pipeline override (auto, rust, c, go, python, skip)
#[serde(default, skip_serializing_if = "crate::QaPipelineOverride::is_default")]
pub qa_pipeline: crate::QaPipelineOverride,
/// Custom post-processing commands
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub commands: Vec<String>,
}
impl Post {
/// Check if post-processing is empty
#[must_use]
pub fn is_empty(&self) -> bool {
self.patch_rpaths.is_none()
&& self.fix_permissions.is_none()
&& self.qa_pipeline == crate::QaPipelineOverride::Auto
&& self.commands.is_empty()
}
}
/// Post-processing option (true/false or list of paths)
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(untagged)]
pub enum PostOption {
Boolean(bool),
Paths(Vec<String>),
}
/// Installation behavior
#[derive(Debug, Clone, Default, Deserialize)]
pub struct Install {
/// Automatically install after building
#[serde(default)]
pub auto: bool,
}
impl Serialize for Install {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
use serde::ser::SerializeMap;
let mut map = serializer.serialize_map(None)?;
if self.auto {
map.serialize_entry("auto", &self.auto)?;
}
map.end()
}
}
impl Install {
/// Check if install is default
#[must_use]
pub fn is_default(&self) -> bool {
!self.auto
}
}
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | false |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/crates/types/src/lib.rs | crates/types/src/lib.rs | #![warn(mismatched_lifetime_syntaxes)]
#![deny(clippy::pedantic, unsafe_code)]
#![allow(clippy::module_name_repetitions)]
//! Core type definitions for the sps2 package manager
//!
//! This crate provides fundamental types used throughout the system,
//! including version specifications, package information, and common data structures.
pub mod format;
pub mod manifest;
pub mod package;
pub mod recipe;
pub mod reports;
pub mod state;
pub mod version;
// Re-export commonly used types
pub use format::{
PackageFormatChecker, PackageFormatCompatibility, PackageFormatMigration,
PackageFormatValidationResult, PackageFormatVersion, PackageFormatVersionError,
};
pub use manifest::{
Dependencies as ManifestDependencies, Manifest, ManifestBuilder,
PackageInfo as ManifestPackageInfo,
};
pub use package::{
DepEdge, DepKind, PackageId, PackageInfo, PackageSpec, PackageStatus, PythonPackageMetadata,
SearchResult,
};
pub use recipe::{
Build, BuildSystem, Checksum, ChecksumAlgorithm, Dependencies, Environment, FetchSource,
GitSource, Install, IsolationLevel, LocalSource, Metadata, NamedSource, ParsedStep, Post,
PostOption, Source, SourceMethod, YamlRecipe,
};
pub use reports::{BuildReport, InstallReport, PackageChange};
pub use semver::Version;
pub use state::{ChangeType, OpChange, SlotId, StateId, StateInfo, StateTransition};
pub use uuid::Uuid;
pub use version::{VersionConstraint, VersionSpec};
// QA pipeline override is defined below in this module
use serde::{Deserialize, Serialize};
/// Architecture type for packages
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
pub enum Arch {
#[serde(rename = "arm64")]
Arm64,
}
/// `RPath` handling style for dynamic libraries and executables
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "lowercase")]
pub enum RpathStyle {
/// Modern approach: Keep @rpath references with proper `LC_RPATH` entries
/// This is the default and recommended approach for relocatable binaries
Modern,
/// Absolute approach: Rewrite all @rpath references to absolute paths
/// Use this for compatibility with tools that don't handle @rpath correctly
Absolute,
}
impl std::fmt::Display for RpathStyle {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Self::Modern => write!(f, "Modern"),
Self::Absolute => write!(f, "Absolute"),
}
}
}
impl Default for RpathStyle {
fn default() -> Self {
Self::Modern
}
}
/// Build system profile for post-validation pipeline selection
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "snake_case")]
pub enum BuildSystemProfile {
/// C/C++ build systems (autotools, cmake, meson) - full validation pipeline
/// Includes all validators and patchers, binary patching, and code re-signing
NativeFull,
/// Rust build system - minimal validation to avoid breaking panic unwinding
/// Skips binary patching and code re-signing that interfere with Rust runtime
RustMinimal,
/// Go build system - medium validation for mostly static binaries
/// Limited patching, no rpath needed unless CGO is used
GoMedium,
/// Script-based systems (Python, Node.js) - light validation
/// Focus on permissions and text file patching only
ScriptLight,
}
/// QA pipeline override for manual recipe control
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize)]
#[serde(rename_all = "snake_case")]
pub enum QaPipelineOverride {
/// Use automatic detection based on build systems used
Auto,
/// Force Rust minimal pipeline (skip binary patching)
Rust,
/// Force C/C++ full pipeline (comprehensive validation and patching)
C,
/// Force Go medium pipeline (limited patching)
Go,
/// Force Python/script light pipeline (text file patching only)
Python,
/// Skip artifact QA entirely (dangerous, use only for special cases)
Skip,
}
impl<'de> serde::Deserialize<'de> for QaPipelineOverride {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::Deserializer<'de>,
{
struct QaPipelineOverrideVisitor;
impl serde::de::Visitor<'_> for QaPipelineOverrideVisitor {
type Value = QaPipelineOverride;
fn expecting(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.write_str("a qa pipeline override (auto, rust, c, go, python, or skip)")
}
fn visit_str<E>(self, value: &str) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
match value.trim().to_ascii_lowercase().as_str() {
"auto" => Ok(QaPipelineOverride::Auto),
"rust" => Ok(QaPipelineOverride::Rust),
"c" => Ok(QaPipelineOverride::C),
"go" => Ok(QaPipelineOverride::Go),
"python" => Ok(QaPipelineOverride::Python),
"skip" => Ok(QaPipelineOverride::Skip),
other => Err(serde::de::Error::unknown_variant(
other,
&["auto", "rust", "c", "go", "python", "skip"],
)),
}
}
fn visit_string<E>(self, value: String) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
self.visit_str(&value)
}
}
deserializer.deserialize_any(QaPipelineOverrideVisitor)
}
}
impl Default for QaPipelineOverride {
fn default() -> Self {
Self::Auto
}
}
impl QaPipelineOverride {
/// Convert override to build system profile
#[must_use]
pub fn to_profile(self) -> Option<BuildSystemProfile> {
match self {
Self::Auto | Self::Skip => None, // Auto: use detection, Skip: skip QA entirely
Self::Rust => Some(BuildSystemProfile::RustMinimal),
Self::C => Some(BuildSystemProfile::NativeFull),
Self::Go => Some(BuildSystemProfile::GoMedium),
Self::Python => Some(BuildSystemProfile::ScriptLight),
}
}
/// Check if this override skips QA entirely
#[must_use]
pub fn skips_qa(self) -> bool {
matches!(self, Self::Skip)
}
/// Check if this is the default value (for serde `skip_serializing_if`)
#[must_use]
pub fn is_default(&self) -> bool {
matches!(self, Self::Auto)
}
}
impl BuildSystemProfile {
/// Determine profile from build system name
#[must_use]
pub fn from_build_system(build_system: &str) -> Self {
match build_system {
"cargo" => Self::RustMinimal,
"go" => Self::GoMedium,
"python" | "nodejs" => Self::ScriptLight,
_ => Self::NativeFull, // Default to full validation for unknown systems
}
}
}
impl Default for BuildSystemProfile {
fn default() -> Self {
Self::NativeFull
}
}
impl std::fmt::Display for Arch {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Self::Arm64 => write!(f, "arm64"),
}
}
}
/// Output format for CLI commands
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "lowercase")]
pub enum OutputFormat {
Plain,
Tty,
Json,
}
impl Default for OutputFormat {
fn default() -> Self {
Self::Tty
}
}
/// Color output choice
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "lowercase")]
pub enum ColorChoice {
Always,
Auto,
Never,
}
// Implement clap::ValueEnum for ColorChoice
impl clap::ValueEnum for ColorChoice {
fn value_variants<'a>() -> &'a [Self] {
&[Self::Always, Self::Auto, Self::Never]
}
fn to_possible_value(&self) -> Option<clap::builder::PossibleValue> {
Some(match self {
Self::Always => clap::builder::PossibleValue::new("always"),
Self::Auto => clap::builder::PossibleValue::new("auto"),
Self::Never => clap::builder::PossibleValue::new("never"),
})
}
}
impl Default for ColorChoice {
fn default() -> Self {
Self::Auto
}
}
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | false |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/crates/types/src/version.rs | crates/types/src/version.rs | //! Version specification and constraint parsing
//!
//! Implements Python-style version constraints:
//! - `==1.2.3` - Exact version
//! - `>=1.2.0` - Minimum version
//! - `<=2.0.0` - Maximum version
//! - `~=1.2.0` - Compatible release (>=1.2.0,<1.3.0)
//! - `!=1.5.0` - Exclude version
//! - Multiple constraints: `>=1.2,<2.0,!=1.5.0`
use semver::Version;
use serde::{Deserialize, Serialize};
use sps2_errors::VersionError;
use std::fmt;
use std::str::FromStr;
/// A single version constraint
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub enum VersionConstraint {
Exact(Version),
GreaterEqual(Version),
LessEqual(Version),
Greater(Version),
Less(Version),
Compatible(Version),
NotEqual(Version),
}
impl VersionConstraint {
/// Check if a version satisfies this constraint
#[must_use]
pub fn matches(&self, version: &Version) -> bool {
match self {
Self::Exact(v) => version == v,
Self::GreaterEqual(v) => version >= v,
Self::LessEqual(v) => version <= v,
Self::Greater(v) => version > v,
Self::Less(v) => version < v,
Self::NotEqual(v) => version != v,
Self::Compatible(v) => {
// ~=1.2.3 means >=1.2.3,<1.3.0 (patch version updates only)
// ~=1.2.0 means >=1.2.0,<1.3.0 (patch version updates only)
// For simplicity, always allow only patch updates for compatible constraints
version >= v && version.major == v.major && version.minor == v.minor
}
}
}
/// Parse a single constraint from a string
fn parse(s: &str) -> Result<Self, VersionError> {
let s = s.trim();
if let Some(version_str) = s.strip_prefix("==") {
let version =
Version::parse(version_str.trim()).map_err(|e| VersionError::ParseError {
message: e.to_string(),
})?;
Ok(Self::Exact(version))
} else if let Some(version_str) = s.strip_prefix(">=") {
let version =
Version::parse(version_str.trim()).map_err(|e| VersionError::ParseError {
message: e.to_string(),
})?;
Ok(Self::GreaterEqual(version))
} else if let Some(version_str) = s.strip_prefix("<=") {
let version =
Version::parse(version_str.trim()).map_err(|e| VersionError::ParseError {
message: e.to_string(),
})?;
Ok(Self::LessEqual(version))
} else if let Some(version_str) = s.strip_prefix("!=") {
let version =
Version::parse(version_str.trim()).map_err(|e| VersionError::ParseError {
message: e.to_string(),
})?;
Ok(Self::NotEqual(version))
} else if let Some(version_str) = s.strip_prefix("~=") {
let version =
Version::parse(version_str.trim()).map_err(|e| VersionError::ParseError {
message: e.to_string(),
})?;
Ok(Self::Compatible(version))
} else if let Some(version_str) = s.strip_prefix('>') {
let version =
Version::parse(version_str.trim()).map_err(|e| VersionError::ParseError {
message: e.to_string(),
})?;
Ok(Self::Greater(version))
} else if let Some(version_str) = s.strip_prefix('<') {
let version =
Version::parse(version_str.trim()).map_err(|e| VersionError::ParseError {
message: e.to_string(),
})?;
Ok(Self::Less(version))
} else {
Err(VersionError::InvalidConstraint {
input: s.to_string(),
})
}
}
}
impl fmt::Display for VersionConstraint {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::Exact(v) => write!(f, "=={v}"),
Self::GreaterEqual(v) => write!(f, ">={v}"),
Self::LessEqual(v) => write!(f, "<={v}"),
Self::Greater(v) => write!(f, ">{v}"),
Self::Less(v) => write!(f, "<{v}"),
Self::Compatible(v) => write!(f, "~={v}"),
Self::NotEqual(v) => write!(f, "!={v}"),
}
}
}
/// A version specification that can contain multiple constraints
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct VersionSpec {
constraints: Vec<VersionConstraint>,
}
impl VersionSpec {
/// Create a version spec from a single constraint
#[must_use]
pub fn single(constraint: VersionConstraint) -> Self {
Self {
constraints: vec![constraint],
}
}
/// Create an exact version spec
#[must_use]
pub fn exact(version: Version) -> Self {
Self::single(VersionConstraint::Exact(version))
}
/// Check if a version satisfies all constraints
#[must_use]
pub fn matches(&self, version: &Version) -> bool {
self.constraints.iter().all(|c| c.matches(version))
}
/// Get the constraints
#[must_use]
pub fn constraints(&self) -> &[VersionConstraint] {
&self.constraints
}
/// Check if this spec has any constraints
#[must_use]
pub fn is_any(&self) -> bool {
self.constraints.is_empty()
}
}
impl FromStr for VersionSpec {
type Err = VersionError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let s = s.trim();
if s.is_empty() || s == "*" {
// No constraints means any version
return Ok(Self {
constraints: vec![],
});
}
// Split by comma and parse each constraint
let constraints: Result<Vec<_>, _> = s
.split(',')
.map(|part| VersionConstraint::parse(part.trim()))
.collect();
let constraints = constraints?;
if constraints.is_empty() {
return Err(VersionError::InvalidConstraint {
input: s.to_string(),
});
}
Ok(Self { constraints })
}
}
impl fmt::Display for VersionSpec {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
if self.constraints.is_empty() {
write!(f, "*")
} else {
let strs: Vec<_> = self.constraints.iter().map(ToString::to_string).collect();
write!(f, "{}", strs.join(","))
}
}
}
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | false |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/crates/types/src/manifest.rs | crates/types/src/manifest.rs | #![allow(clippy::module_name_repetitions)]
//! Package manifest handling types for sps2
//!
//! This module defines the `manifest.toml` format and provides
//! serialization/deserialization and validation for package metadata.
use crate::{package::PackageSpec, Arch, PackageFormatVersion, PythonPackageMetadata, Version};
use serde::{de::IgnoredAny, Deserialize, Serialize};
use sps2_errors::{Error, PackageError};
/// Package manifest (manifest.toml contents)
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Manifest {
/// Package format version for compatibility checking
#[serde(default = "PackageFormatVersion::default")]
pub format_version: PackageFormatVersion,
pub package: PackageInfo,
pub dependencies: Dependencies,
/// Optional Python-specific metadata for Python packages
#[serde(skip_serializing_if = "Option::is_none")]
pub python: Option<PythonPackageMetadata>,
}
/// Package information section
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct PackageInfo {
pub name: String,
pub version: String,
pub revision: u32,
pub arch: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub description: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub homepage: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub license: Option<String>,
/// Legacy compression configuration retained for backward compatibility
#[serde(default, alias = "compression", skip_serializing)]
pub legacy_compression: Option<IgnoredAny>,
}
/// Dependencies section
#[derive(Debug, Clone, Default, Serialize, Deserialize)]
pub struct Dependencies {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub runtime: Vec<String>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub build: Vec<String>,
}
impl Manifest {
/// Create a new manifest
#[must_use]
pub fn new(name: String, version: &Version, revision: u32, arch: &Arch) -> Self {
Self {
format_version: PackageFormatVersion::CURRENT,
package: PackageInfo {
name,
version: version.to_string(),
revision,
arch: arch.to_string(),
description: None,
homepage: None,
license: None,
legacy_compression: None,
},
dependencies: Dependencies::default(),
python: None,
}
}
/// Parse the package version
///
/// # Errors
///
/// Returns an error if the version string is not a valid semantic version.
pub fn version(&self) -> Result<Version, Error> {
Version::parse(&self.package.version).map_err(|_e| {
sps2_errors::VersionError::InvalidVersion {
input: self.package.version.clone(),
}
.into()
})
}
/// Parse the architecture
///
/// # Errors
///
/// Returns an error if the architecture string is not supported (currently only "arm64" is supported).
pub fn arch(&self) -> Result<Arch, Error> {
match self.package.arch.as_str() {
"arm64" => Ok(Arch::Arm64),
_ => Err(PackageError::InvalidFormat {
message: format!("unsupported architecture: {}", self.package.arch),
}
.into()),
}
}
/// Get runtime dependencies as `PackageSpec`
///
/// # Errors
///
/// Returns an error if any dependency specification string is invalid or cannot be parsed.
pub fn runtime_deps(&self) -> Result<Vec<PackageSpec>, Error> {
self.dependencies
.runtime
.iter()
.map(|s| PackageSpec::parse(s))
.collect::<Result<Vec<_>, _>>()
.map_err(Into::into)
}
/// Get build dependencies as `PackageSpec`
///
/// # Errors
///
/// Returns an error if any dependency specification string is invalid or cannot be parsed.
pub fn build_deps(&self) -> Result<Vec<PackageSpec>, Error> {
self.dependencies
.build
.iter()
.map(|s| PackageSpec::parse(s))
.collect::<Result<Vec<_>, _>>()
.map_err(Into::into)
}
/// Add a runtime dependency
pub fn add_runtime_dep(&mut self, spec: &str) {
self.dependencies.runtime.push(spec.to_string());
}
/// Add a build dependency
pub fn add_build_dep(&mut self, spec: &str) {
self.dependencies.build.push(spec.to_string());
}
/// Serialize to TOML string
///
/// # Errors
///
/// Returns an error if the manifest cannot be serialized to TOML format.
pub fn to_toml(&self) -> Result<String, Error> {
toml::to_string_pretty(self).map_err(|e| {
PackageError::InvalidManifest {
message: e.to_string(),
}
.into()
})
}
/// Load manifest from TOML string
///
/// # Errors
///
/// Returns an error if the TOML content is malformed or contains invalid manifest data.
pub fn from_toml(content: &str) -> Result<Self, Error> {
toml::from_str(content).map_err(|e| {
PackageError::InvalidManifest {
message: e.to_string(),
}
.into()
})
}
/// Get the package format version
#[must_use]
pub const fn format_version(&self) -> &PackageFormatVersion {
&self.format_version
}
/// Set the package format version
pub fn set_format_version(&mut self, version: PackageFormatVersion) {
self.format_version = version;
}
/// Check if this manifest is compatible with a specific format version
#[must_use]
pub fn is_compatible_with(&self, other_version: &PackageFormatVersion) -> bool {
self.format_version.is_compatible_with(other_version)
}
/// Check if this manifest requires migration to be compatible with a version
#[must_use]
pub fn requires_migration_to(&self, target_version: &PackageFormatVersion) -> bool {
self.format_version.requires_migration_from(target_version)
}
/// Validate manifest fields
///
/// # Errors
///
/// Returns an error if any required field is empty, invalid, or if dependency specifications are malformed.
pub fn validate(&self) -> Result<(), Error> {
// Validate name
if self.package.name.is_empty() {
return Err(PackageError::InvalidManifest {
message: "package name cannot be empty".to_string(),
}
.into());
}
// Validate version
self.version()?;
// Validate arch
self.arch()?;
// Validate dependencies
self.runtime_deps()?;
self.build_deps()?;
// Validate format version compatibility
let current_version = PackageFormatVersion::CURRENT;
if !self.format_version.is_compatible_with(¤t_version) {
return Err(PackageError::InvalidManifest {
message: format!(
"Package format version {} is incompatible with current version {}",
self.format_version, current_version
),
}
.into());
}
Ok(())
}
/// Get package filename
#[must_use]
pub fn filename(&self) -> String {
format!(
"{}-{}-{}.{}.sp",
self.package.name, self.package.version, self.package.revision, self.package.arch
)
}
}
/// Builder for creating manifests
#[derive(Debug, Clone)]
pub struct ManifestBuilder {
manifest: Manifest,
}
impl ManifestBuilder {
/// Create a new builder
#[must_use]
pub fn new(name: String, version: &Version, arch: &Arch) -> Self {
Self {
manifest: Manifest::new(name, version, 1, arch),
}
}
/// Set package format version
#[must_use]
pub fn format_version(mut self, version: PackageFormatVersion) -> Self {
self.manifest.format_version = version;
self
}
/// Set revision
#[must_use]
pub fn revision(mut self, revision: u32) -> Self {
self.manifest.package.revision = revision;
self
}
/// Set description
#[must_use]
pub fn description(mut self, desc: String) -> Self {
self.manifest.package.description = Some(desc);
self
}
/// Set homepage
#[must_use]
pub fn homepage(mut self, url: String) -> Self {
self.manifest.package.homepage = Some(url);
self
}
/// Set license
#[must_use]
pub fn license(mut self, license: String) -> Self {
self.manifest.package.license = Some(license);
self
}
/// Add runtime dependency
#[must_use]
pub fn depends_on(mut self, spec: &str) -> Self {
self.manifest.add_runtime_dep(spec);
self
}
/// Add build dependency
#[must_use]
pub fn build_depends_on(mut self, spec: &str) -> Self {
self.manifest.add_build_dep(spec);
self
}
/// Set Python package metadata
#[must_use]
pub fn python_metadata(mut self, metadata: PythonPackageMetadata) -> Self {
self.manifest.python = Some(metadata);
self
}
/// Build the manifest
///
/// # Errors
///
/// Returns an error if the manifest validation fails.
pub fn build(self) -> Result<Manifest, Error> {
self.manifest.validate()?;
Ok(self.manifest)
}
}
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | false |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/crates/types/src/state.rs | crates/types/src/state.rs | //! State management type definitions
use crate::Version;
use chrono::{DateTime, Utc};
use serde::{Deserialize, Deserializer, Serialize, Serializer};
use std::fmt;
use uuid::Uuid;
/// State identifier
pub type StateId = Uuid;
/// Information about a system state
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct StateInfo {
/// State ID
pub id: StateId,
/// Parent state ID
#[serde(alias = "parent_id")]
pub parent: Option<StateId>,
/// Creation timestamp
pub timestamp: DateTime<Utc>,
/// Operation that created this state
pub operation: String,
/// Whether this is the current state
pub current: bool,
/// Number of packages in this state
pub package_count: usize,
/// Total size of packages
pub total_size: u64,
/// Summary of changes from parent (using `ops::OpChange` for change type info)
pub changes: Vec<OpChange>,
}
/// State transition record
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct StateTransition {
pub from: StateId,
pub to: StateId,
pub operation: String,
pub timestamp: DateTime<Utc>,
pub success: bool,
pub rollback_of: Option<StateId>,
}
/// Operation change for state tracking
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct OpChange {
/// Change type
pub change_type: ChangeType,
/// Package name
pub package: String,
/// Old version (for updates/removals)
pub old_version: Option<Version>,
/// New version (for installs/updates)
pub new_version: Option<Version>,
}
/// Type of operation change
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "lowercase")]
pub enum ChangeType {
/// Package was installed
Install,
/// Package was updated
Update,
/// Package was removed
Remove,
/// Package was downgraded
Downgrade,
}
/// Identifier for the live slot containing a state snapshot.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord, Default)]
pub enum SlotId {
/// Primary slot (`live-A`).
#[default]
A,
/// Secondary slot (`live-B`).
B,
}
impl SlotId {
/// All available slots.
pub const ALL: [SlotId; 2] = [SlotId::A, SlotId::B];
/// Directory name associated with the slot.
#[must_use]
pub fn dir_name(self) -> &'static str {
match self {
SlotId::A => "live-A",
SlotId::B => "live-B",
}
}
/// Return the opposite slot.
#[must_use]
pub fn other(self) -> SlotId {
match self {
SlotId::A => SlotId::B,
SlotId::B => SlotId::A,
}
}
}
impl fmt::Display for SlotId {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str(self.dir_name())
}
}
impl Serialize for SlotId {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
serializer.serialize_str(self.dir_name())
}
}
impl<'de> Deserialize<'de> for SlotId {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let raw = String::deserialize(deserializer)?;
match raw.as_str() {
"live-A" | "A" | "a" => Ok(SlotId::A),
"live-B" | "B" | "b" => Ok(SlotId::B),
other => Err(serde::de::Error::custom(format!(
"unknown slot identifier: {other}"
))),
}
}
}
/// Phase of a two-phase commit transaction
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
pub enum TransactionPhase {
/// The database changes are committed, and the system is ready for the filesystem swap
Prepared,
/// Filesystem swap has been executed
Swapped,
}
/// Transaction journal for crash recovery
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct TransactionJournal {
/// New state ID being transitioned to
pub new_state_id: Uuid,
/// Parent state ID we're transitioning from
pub parent_state_id: Uuid,
/// Path to the staging directory
pub staging_path: std::path::PathBuf,
/// Slot containing the prepared state
#[serde(default)]
pub staging_slot: SlotId,
/// Current phase of the transaction
pub phase: TransactionPhase,
/// Operation type (install, uninstall, rollback, etc.)
pub operation: String,
}
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | false |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/crates/types/src/reports.rs | crates/types/src/reports.rs | //! Report type definitions for operations
use crate::Version;
use serde::{Deserialize, Serialize};
use std::path::PathBuf;
use uuid::Uuid;
/// Installation report
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct InstallReport {
/// Packages that were installed
pub installed: Vec<PackageChange>,
/// Packages that were updated
pub updated: Vec<PackageChange>,
/// Packages that were removed
pub removed: Vec<PackageChange>,
/// New state ID
pub state_id: Uuid,
/// Total execution time
pub duration_ms: u64,
}
/// Build report
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct BuildReport {
/// Package that was built
pub package: String,
/// Version that was built
pub version: Version,
/// Output file path
pub output_path: PathBuf,
/// Build duration
pub duration_ms: u64,
}
/// Package change for reports
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct PackageChange {
/// Package name
pub name: String,
/// Previous version
pub from_version: Option<Version>,
/// New version
pub to_version: Option<Version>,
/// Size in bytes
pub size: Option<u64>,
}
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | false |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/crates/types/src/format.rs | crates/types/src/format.rs | //! Package format versioning for sps2 package evolution
//!
//! This module provides comprehensive versioning support for the .sp package format,
//! enabling safe evolution and migration of the package format over time.
use serde::{Deserialize, Serialize};
use std::fmt;
/// Package format version using semantic versioning
///
/// The package format version follows semantic versioning principles:
/// - Major: Breaking changes requiring migration (incompatible format changes)
/// - Minor: Backwards-compatible feature additions (new optional fields, compression types)
/// - Patch: Bug fixes and optimizations (no format changes)
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, Deserialize)]
pub struct PackageFormatVersion {
pub major: u32,
pub minor: u32,
pub patch: u32,
}
impl PackageFormatVersion {
/// Current stable format version (v1.0.0)
pub const CURRENT: Self = Self {
major: 1,
minor: 0,
patch: 0,
};
/// Create a new package format version
#[must_use]
pub const fn new(major: u32, minor: u32, patch: u32) -> Self {
Self {
major,
minor,
patch,
}
}
/// Parse version from string in format "major.minor.patch"
///
/// # Errors
///
/// Returns an error if the version string is malformed or contains invalid numbers
pub fn parse(version_str: &str) -> Result<Self, PackageFormatVersionError> {
let parts: Vec<&str> = version_str.split('.').collect();
if parts.len() != 3 {
return Err(PackageFormatVersionError::InvalidFormat {
input: version_str.to_string(),
reason: "Expected format: major.minor.patch".to_string(),
});
}
let major =
parts[0]
.parse::<u32>()
.map_err(|_| PackageFormatVersionError::InvalidNumber {
component: "major".to_string(),
value: parts[0].to_string(),
})?;
let minor =
parts[1]
.parse::<u32>()
.map_err(|_| PackageFormatVersionError::InvalidNumber {
component: "minor".to_string(),
value: parts[1].to_string(),
})?;
let patch =
parts[2]
.parse::<u32>()
.map_err(|_| PackageFormatVersionError::InvalidNumber {
component: "patch".to_string(),
value: parts[2].to_string(),
})?;
Ok(Self::new(major, minor, patch))
}
/// Check if this version is compatible with another version
///
/// Compatibility rules:
/// - Same major version: compatible
/// - Different major version: incompatible (breaking changes)
/// - Minor/patch differences within same major: compatible
#[must_use]
pub fn is_compatible_with(&self, other: &Self) -> bool {
self.major == other.major
}
/// Check if this version is newer than another
#[must_use]
pub fn is_newer_than(&self, other: &Self) -> bool {
self > other
}
/// Check if this version requires migration from another version
#[must_use]
pub fn requires_migration_from(&self, other: &Self) -> bool {
self.major != other.major
}
/// Get the compatibility matrix entry for this version
#[must_use]
pub fn compatibility_info(&self) -> PackageFormatCompatibility {
match (self.major, self.minor, self.patch) {
(1, 0, 0) => PackageFormatCompatibility {
version: self.clone(),
minimum_reader_version: Self::new(1, 0, 0),
maximum_reader_version: Self::new(1, u32::MAX, u32::MAX),
supports_signatures: true,
deprecation_warning: None,
},
// Future versions would be added here
_ => PackageFormatCompatibility {
version: self.clone(),
minimum_reader_version: self.clone(),
maximum_reader_version: self.clone(),
supports_signatures: true,
deprecation_warning: Some(format!(
"Format version {self} is not officially supported"
)),
},
}
}
/// Get version information for storage in package headers
///
/// # Errors
///
/// Returns an error if the `minor` or `patch` versions cannot fit into
/// `u16` for header encoding.
pub fn to_header_bytes(&self) -> Result<[u8; 12], PackageFormatVersionError> {
let mut bytes = [0u8; 12];
// Magic bytes for versioned package format: "SPV1" (0x53505631)
bytes[0..4].copy_from_slice(&[0x53, 0x50, 0x56, 0x31]);
// Major version (4 bytes, little endian)
bytes[4..8].copy_from_slice(&self.major.to_le_bytes());
// Minor version (2 bytes, little endian)
let minor_u16 =
u16::try_from(self.minor).map_err(|_| PackageFormatVersionError::InvalidNumber {
component: "minor".to_string(),
value: self.minor.to_string(),
})?;
bytes[8..10].copy_from_slice(&minor_u16.to_le_bytes());
// Patch version (2 bytes, little endian)
let patch_u16 =
u16::try_from(self.patch).map_err(|_| PackageFormatVersionError::InvalidNumber {
component: "patch".to_string(),
value: self.patch.to_string(),
})?;
bytes[10..12].copy_from_slice(&patch_u16.to_le_bytes());
Ok(bytes)
}
/// Parse version from package header bytes
///
/// # Errors
///
/// Returns an error if the header format is invalid or contains unsupported version
pub fn from_header_bytes(bytes: &[u8]) -> Result<Self, PackageFormatVersionError> {
if bytes.len() < 12 {
return Err(PackageFormatVersionError::InvalidHeader {
reason: "Header too short".to_string(),
});
}
// Check magic bytes
if bytes[0..4] != [0x53, 0x50, 0x56, 0x31] {
return Err(PackageFormatVersionError::InvalidHeader {
reason: "Invalid magic bytes in version header".to_string(),
});
}
// Parse version components
let major = u32::from_le_bytes([bytes[4], bytes[5], bytes[6], bytes[7]]);
let minor = u32::from(u16::from_le_bytes([bytes[8], bytes[9]]));
let patch = u32::from(u16::from_le_bytes([bytes[10], bytes[11]]));
Ok(Self::new(major, minor, patch))
}
}
impl fmt::Display for PackageFormatVersion {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}.{}.{}", self.major, self.minor, self.patch)
}
}
impl Default for PackageFormatVersion {
fn default() -> Self {
Self::CURRENT
}
}
/// Package format compatibility information
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct PackageFormatCompatibility {
/// The format version this compatibility info describes
pub version: PackageFormatVersion,
/// Minimum version of sps2 that can read this format
pub minimum_reader_version: PackageFormatVersion,
/// Maximum version of sps2 that can read this format
pub maximum_reader_version: PackageFormatVersion,
/// Whether this version supports package signatures
pub supports_signatures: bool,
/// Optional deprecation warning message
pub deprecation_warning: Option<String>,
}
/// Migration information for upgrading packages between format versions
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct PackageFormatMigration {
/// Source format version
pub from_version: PackageFormatVersion,
/// Target format version
pub to_version: PackageFormatVersion,
/// Whether automatic migration is possible
pub automatic: bool,
/// Migration steps required
pub steps: Vec<MigrationStep>,
/// Estimated time for migration
pub estimated_duration: MigrationDuration,
}
/// Individual migration step
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct MigrationStep {
/// Description of this migration step
pub description: String,
/// Whether this step is reversible
pub reversible: bool,
/// Data that might be lost in this step
pub data_loss_warning: Option<String>,
}
/// Estimated duration for migration operations
#[derive(Debug, Clone, Serialize, Deserialize)]
pub enum MigrationDuration {
/// Migration completes instantly
Instant,
/// Migration takes seconds
Seconds(u32),
/// Migration takes minutes
Minutes(u32),
/// Migration takes hours
Hours(u32),
}
/// Package format version validation result
#[derive(Debug, Clone)]
pub enum PackageFormatValidationResult {
/// Format is compatible and can be processed
Compatible,
/// Format is newer but backwards compatible
BackwardsCompatible {
/// Warning message about newer format
warning: String,
},
/// Format requires migration to be processed
RequiresMigration {
/// Available migration path
migration: PackageFormatMigration,
},
/// Format is incompatible and cannot be processed
Incompatible {
/// Reason for incompatibility
reason: String,
/// Suggested action for user
suggestion: String,
},
}
/// Errors related to package format versioning
#[derive(Debug, Clone, thiserror::Error)]
pub enum PackageFormatVersionError {
#[error("Invalid version format: {input} - {reason}")]
InvalidFormat { input: String, reason: String },
#[error("Invalid version number in {component}: {value}")]
InvalidNumber { component: String, value: String },
#[error("Invalid package header: {reason}")]
InvalidHeader { reason: String },
#[error("Unsupported format version: {version}")]
UnsupportedVersion { version: String },
#[error("Format version {version} requires migration from {current_version}")]
MigrationRequired {
version: String,
current_version: String,
},
#[error("Format version {version} is incompatible with current reader")]
IncompatibleVersion { version: String },
}
/// Package format version compatibility checker
#[derive(Clone, Debug)]
pub struct PackageFormatChecker {
/// Current version this checker supports
current_version: PackageFormatVersion,
}
impl PackageFormatChecker {
/// Create a new format checker for the current version
#[must_use]
pub fn new() -> Self {
Self {
current_version: PackageFormatVersion::CURRENT,
}
}
/// Create a format checker for a specific version
#[must_use]
pub fn for_version(version: PackageFormatVersion) -> Self {
Self {
current_version: version,
}
}
/// Validate a package format version for compatibility
#[must_use]
pub fn validate_version(
&self,
package_version: &PackageFormatVersion,
) -> PackageFormatValidationResult {
if package_version == &self.current_version {
return PackageFormatValidationResult::Compatible;
}
if package_version.is_compatible_with(&self.current_version) {
if package_version.is_newer_than(&self.current_version) {
PackageFormatValidationResult::BackwardsCompatible {
warning: format!(
"Package uses newer format version {} (current: {})",
package_version, self.current_version
),
}
} else {
PackageFormatValidationResult::Compatible
}
} else if package_version.requires_migration_from(&self.current_version) {
let migration = self.get_migration_path(package_version);
PackageFormatValidationResult::RequiresMigration { migration }
} else {
PackageFormatValidationResult::Incompatible {
reason: format!(
"Format version {} is incompatible with current version {}",
package_version, self.current_version
),
suggestion: "Upgrade sps2 to a newer version that supports this format".to_string(),
}
}
}
/// Get migration path from one version to another
fn get_migration_path(&self, from_version: &PackageFormatVersion) -> PackageFormatMigration {
// For now, provide a simple migration path
// In the future, this would include more sophisticated migration logic
PackageFormatMigration {
from_version: from_version.clone(),
to_version: self.current_version.clone(),
automatic: from_version.major == self.current_version.major,
steps: vec![MigrationStep {
description: format!(
"Convert package from format {} to {}",
from_version, self.current_version
),
reversible: false,
data_loss_warning: None,
}],
estimated_duration: MigrationDuration::Seconds(30),
}
}
/// Get all migration paths available from a version
#[must_use]
pub fn available_migrations(
&self,
from_version: &PackageFormatVersion,
) -> Vec<PackageFormatMigration> {
// For now, only support migration to current version
// Future implementations could support migration to multiple target versions
vec![self.get_migration_path(from_version)]
}
}
impl Default for PackageFormatChecker {
fn default() -> Self {
Self::new()
}
}
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | false |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/crates/types/src/package.rs | crates/types/src/package.rs | //! Package-related type definitions
use crate::{Arch, Version, VersionSpec};
use serde::{Deserialize, Serialize};
use std::fmt;
/// Unique identifier for a package
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
pub struct PackageId {
pub name: String,
pub version: Version,
}
impl PackageId {
/// Create a new package ID
pub fn new(name: impl Into<String>, version: Version) -> Self {
Self {
name: name.into(),
version,
}
}
}
impl fmt::Display for PackageId {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}-{}", self.name, self.version)
}
}
/// Package specification with optional version constraints
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct PackageSpec {
pub name: String,
pub version_spec: VersionSpec,
}
impl PackageSpec {
/// Parse a package spec from a string (e.g., "jq>=1.6,<2.0")
///
/// # Errors
///
/// Returns `VersionError` if the package specification string is malformed
/// or contains invalid version constraints.
///
pub fn parse(s: &str) -> Result<Self, sps2_errors::VersionError> {
// Find the first constraint operator
let operators = ["==", ">=", "<=", "!=", "~=", ">", "<"];
let mut split_pos = None;
for op in &operators {
if let Some(pos) = s.find(op) {
match split_pos {
None => split_pos = Some(pos),
Some(sp) if pos < sp => split_pos = Some(pos),
Some(_) => {}
}
}
}
let (name, version_str) = if let Some(pos) = split_pos {
(s[..pos].trim(), s[pos..].trim())
} else {
// No version constraint means any version
(s.trim(), "*")
};
if name.is_empty() {
return Err(sps2_errors::VersionError::InvalidConstraint {
input: s.to_string(),
});
}
Ok(Self {
name: name.to_string(),
version_spec: version_str.parse()?,
})
}
}
impl fmt::Display for PackageSpec {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
if self.version_spec.is_any() {
write!(f, "{}", self.name)
} else {
write!(f, "{}{}", self.name, self.version_spec)
}
}
}
/// Package information for installed packages
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct PackageInfo {
/// Package name
pub name: String,
/// Installed version
pub version: Option<Version>,
/// Available version
pub available_version: Option<Version>,
/// Description
pub description: Option<String>,
/// Homepage URL
pub homepage: Option<String>,
/// License
pub license: Option<String>,
/// Installation status
pub status: PackageStatus,
/// Dependencies (as strings for simplicity)
pub dependencies: Vec<String>,
/// Size on disk (bytes)
pub size: Option<u64>,
/// Architecture
pub arch: Option<Arch>,
/// Whether package is installed
pub installed: bool,
}
/// Package installation status
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "lowercase")]
pub enum PackageStatus {
/// Not installed
Available,
/// Installed and up to date
Installed,
/// Installed but update available
Outdated,
/// Installed from local file
Local,
}
/// Search result from package index
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct SearchResult {
/// Package name
pub name: String,
/// Latest version
pub version: Version,
/// Description
pub description: Option<String>,
/// Homepage URL
pub homepage: Option<String>,
/// Whether package is installed
pub installed: bool,
}
/// Dependency kind
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "lowercase")]
pub enum DepKind {
Build,
Runtime,
}
impl fmt::Display for DepKind {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::Build => write!(f, "build"),
Self::Runtime => write!(f, "runtime"),
}
}
}
impl std::str::FromStr for PackageSpec {
type Err = sps2_errors::VersionError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
Self::parse(s)
}
}
/// Dependency edge in resolver graph
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct DepEdge {
pub name: String,
pub spec: VersionSpec,
pub kind: DepKind,
}
/// Python-specific metadata for packages that use Python
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct PythonPackageMetadata {
/// The Python version requirement (e.g., ">=3.9,<3.12")
pub requires_python: String,
/// Path within the package to the built wheel file
pub wheel_file: String,
/// Path within the package to the locked requirements file
pub requirements_file: String,
/// Mapping of executable names to their Python entry points
/// e.g., {"black": "black:main", "blackd": "blackd:main"}
pub executables: std::collections::HashMap<String, String>,
}
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | false |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/crates/events/src/lib.rs | crates/events/src/lib.rs | #![warn(mismatched_lifetime_syntaxes)]
#![deny(clippy::pedantic, unsafe_code)]
#![allow(clippy::module_name_repetitions)]
//! Event system for async communication in sps2
//!
//! This crate provides a domain-driven event system with sophisticated
//! progress tracking, tracing integration, and clean separation of concerns.
//! All output goes through events - no direct logging or printing is allowed
//! outside the CLI.
//!
//! ## Architecture
//!
//! - **Domain-driven events**: Events grouped by functional domain (Build, Download, etc.)
//! - **Unified `EventEmitter` trait**: Single, consistent API for all event emissions
//! - **Tracing integration**: Built-in structured logging with intelligent log levels
//! - **Progress tracking**: Sophisticated algorithms with ETA, speed calculation, and phases
use serde::{Deserialize, Serialize};
pub mod meta;
pub use meta::{EventLevel, EventMeta, EventSource};
// Re-export the progress tracking system
pub mod progress;
pub use progress::*;
// Import the new domain-driven event system
pub mod events;
pub use events::{
AcquisitionContext,
// Domain event types
AppEvent,
BuildDiagnostic,
BuildEvent,
BuildSession,
BuildSystem,
BuildTarget,
CleanupSummary,
CommandDescriptor,
DownloadContext,
FailureContext,
GeneralEvent,
GuardDiscrepancy,
GuardEvent,
GuardHealingPlan,
GuardLevel,
GuardScope,
GuardSeverity,
GuardTargetSummary,
GuardVerificationMetrics,
HealthStatus,
InstallContext,
LifecycleAcquisitionSource,
LifecycleDomain,
// Lifecycle event types (consolidated from 7 old events)
LifecycleEvent,
LifecyclePackageUpdateType,
LifecycleStage,
LifecycleUpdateOperation,
LifecycleUpdateResult,
LogStream,
PackageEvent,
PhaseStatus,
ProcessCommandDescriptor,
ProgressEvent,
QaEvent,
RepoContext,
ResolverContext,
RollbackContext,
RollbackSummary,
StateEvent,
UninstallContext,
UpdateContext,
};
use tokio::sync::mpsc::{UnboundedReceiver, UnboundedSender};
/// Envelope carrying metadata alongside an application event.
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct EventMessage {
pub meta: EventMeta,
pub event: AppEvent,
}
impl EventMessage {
#[must_use]
pub fn new(meta: EventMeta, event: AppEvent) -> Self {
Self { meta, event }
}
#[must_use]
pub fn from_event(event: AppEvent) -> Self {
let meta = derive_meta(&event);
Self { meta, event }
}
}
/// Type alias for event sender using the new `AppEvent` system
pub type EventSender = UnboundedSender<EventMessage>;
/// Type alias for event receiver using the new `AppEvent` system
pub type EventReceiver = UnboundedReceiver<EventMessage>;
/// Create a new event channel with the `AppEvent` system
#[must_use]
pub fn channel() -> (EventSender, EventReceiver) {
tokio::sync::mpsc::unbounded_channel()
}
/// The unified trait for emitting events throughout the sps2 system
///
/// This trait provides a single, consistent API for emitting events regardless of
/// whether you have a raw `EventSender` or a struct that contains one.
pub trait EventEmitter {
/// Get the event sender for this emitter
fn event_sender(&self) -> Option<&EventSender>;
/// Allow implementers to enrich event metadata before emission.
fn enrich_event_meta(&self, _event: &AppEvent, _meta: &mut EventMeta) {}
/// Emit an event with explicitly provided metadata.
fn emit_with_meta(&self, meta: EventMeta, event: AppEvent) {
if let Some(sender) = self.event_sender() {
let message = EventMessage::new(meta, event);
let _ = sender.send(message);
}
}
/// Emit an event through this emitter, automatically deriving metadata.
fn emit(&self, event: AppEvent) {
let mut meta = derive_meta(&event);
self.enrich_event_meta(&event, &mut meta);
self.emit_with_meta(meta, event);
}
/// Emit a debug log event
fn emit_debug(&self, message: impl Into<String>) {
self.emit(AppEvent::General(GeneralEvent::debug(message)));
}
/// Emit a debug log event with context
fn emit_debug_with_context(
&self,
message: impl Into<String>,
context: std::collections::HashMap<String, String>,
) {
self.emit(AppEvent::General(GeneralEvent::debug_with_context(
message, context,
)));
}
/// Emit a warning event
fn emit_warning(&self, message: impl Into<String>) {
self.emit(AppEvent::General(GeneralEvent::warning(message)));
}
/// Emit a warning event with context
fn emit_warning_with_context(&self, message: impl Into<String>, context: impl Into<String>) {
self.emit(AppEvent::General(GeneralEvent::warning_with_context(
message, context,
)));
}
/// Emit an error event
fn emit_error(&self, message: impl Into<String>) {
self.emit(AppEvent::General(GeneralEvent::error(message)));
}
/// Emit an error event with details
fn emit_error_with_details(&self, message: impl Into<String>, details: impl Into<String>) {
self.emit(AppEvent::General(GeneralEvent::error_with_details(
message, details,
)));
}
/// Emit an operation started event
fn emit_operation_started(&self, operation: impl Into<String>) {
self.emit(AppEvent::General(GeneralEvent::OperationStarted {
operation: operation.into(),
}));
}
/// Emit an operation completed event
fn emit_operation_completed(&self, operation: impl Into<String>, success: bool) {
self.emit(AppEvent::General(GeneralEvent::OperationCompleted {
operation: operation.into(),
success,
}));
}
/// Emit an operation failed event
fn emit_operation_failed(&self, operation: impl Into<String>, failure: events::FailureContext) {
self.emit(AppEvent::General(GeneralEvent::operation_failed(
operation, failure,
)));
}
/// Emit a download started event
fn emit_download_started(
&self,
url: impl Into<String>,
package: Option<String>,
total_bytes: Option<u64>,
) {
self.emit(AppEvent::Lifecycle(LifecycleEvent::download_started(
url.into(),
package,
total_bytes,
)));
}
/// Emit a download completed event
fn emit_download_completed(
&self,
url: impl Into<String>,
package: Option<String>,
bytes_downloaded: u64,
) {
self.emit(AppEvent::Lifecycle(LifecycleEvent::download_completed(
url.into(),
package,
bytes_downloaded,
)));
}
/// Emit a progress started event
fn emit_progress_started(
&self,
id: impl Into<String>,
operation: impl Into<String>,
total: Option<u64>,
) {
self.emit(AppEvent::Progress(ProgressEvent::started(
id, operation, total,
)));
}
/// Emit a progress update event
fn emit_progress_updated(&self, id: impl Into<String>, current: u64, total: Option<u64>) {
self.emit(AppEvent::Progress(ProgressEvent::updated(
id, current, total,
)));
}
/// Emit a progress completed event
fn emit_progress_completed(&self, id: impl Into<String>, duration: std::time::Duration) {
self.emit(AppEvent::Progress(ProgressEvent::completed(id, duration)));
}
/// Emit a progress failed event
fn emit_progress_failed(&self, id: impl Into<String>, failure: events::FailureContext) {
self.emit(AppEvent::Progress(ProgressEvent::failed(id, failure)));
}
}
/// Implementation of `EventEmitter` for the raw `EventSender`
/// This allows `EventSender` to be used directly where `EventEmitter` is expected
impl EventEmitter for EventSender {
fn event_sender(&self) -> Option<&EventSender> {
Some(self)
}
}
fn derive_meta(event: &AppEvent) -> EventMeta {
let level: EventLevel = event.log_level().into();
let source = event.event_source();
EventMeta::new(level, source)
}
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | false |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/crates/events/src/meta.rs | crates/events/src/meta.rs | use std::borrow::Cow;
use std::collections::BTreeMap;
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use tracing::Level;
use uuid::Uuid;
/// Structured metadata that accompanies every event emission.
///
/// This wrapper gives consumers enough context to correlate events across
/// domains, attach them to tracing spans, and provide stable identifiers for
/// telemetry pipelines.
#[derive(Clone, Debug, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct EventMeta {
/// Unique identifier for this specific event.
pub event_id: Uuid,
/// Parent event (when modelling hierarchical operations / progress).
pub parent_id: Option<Uuid>,
/// High-level correlation identifier (operation id, package key, etc.).
pub correlation_id: Option<String>,
/// Timestamp captured at emission time.
pub timestamp: DateTime<Utc>,
/// Severity used for routing to logging systems and alerting.
pub level: EventLevel,
/// Subsystem/component that originated the event.
pub source: EventSource,
/// Optional free-form labels for downstream enrichment (kept small on purpose).
#[serde(default, skip_serializing_if = "BTreeMap::is_empty")]
pub labels: BTreeMap<String, String>,
}
impl EventMeta {
/// Create a new metadata instance for a given source and level.
#[must_use]
pub fn new(level: impl Into<EventLevel>, source: impl Into<EventSource>) -> Self {
Self {
event_id: Uuid::new_v4(),
parent_id: None,
correlation_id: None,
timestamp: Utc::now(),
level: level.into(),
source: source.into(),
labels: BTreeMap::new(),
}
}
/// Attach a correlation identifier used to stitch related events.
#[must_use]
pub fn with_correlation_id(mut self, correlation_id: impl Into<String>) -> Self {
self.correlation_id = Some(correlation_id.into());
self
}
/// Attach the parent event identifier for hierarchical operations.
#[must_use]
pub fn with_parent(mut self, parent_id: Uuid) -> Self {
self.parent_id = Some(parent_id);
self
}
/// Add an arbitrary label entry (kept intentionally small).
#[must_use]
pub fn with_label(mut self, key: impl Into<String>, value: impl Into<String>) -> Self {
self.labels.insert(key.into(), value.into());
self
}
/// Convert the metadata level into a tracing level for downstream logging.
#[must_use]
pub fn tracing_level(&self) -> Level {
self.level.into()
}
}
/// Lightweight severity levels used by the event system.
#[derive(Clone, Copy, Debug, Deserialize, Serialize, PartialEq, Eq, PartialOrd, Ord)]
#[serde(rename_all = "snake_case")]
pub enum EventLevel {
Trace,
Debug,
Info,
Warn,
Error,
}
impl From<EventLevel> for Level {
fn from(level: EventLevel) -> Self {
match level {
EventLevel::Trace => Level::TRACE,
EventLevel::Debug => Level::DEBUG,
EventLevel::Info => Level::INFO,
EventLevel::Warn => Level::WARN,
EventLevel::Error => Level::ERROR,
}
}
}
impl From<Level> for EventLevel {
fn from(level: Level) -> Self {
match level {
Level::TRACE => EventLevel::Trace,
Level::DEBUG => EventLevel::Debug,
Level::INFO => EventLevel::Info,
Level::WARN => EventLevel::Warn,
Level::ERROR => EventLevel::Error,
}
}
}
/// Component/feature that originated the event.
#[derive(Clone, Debug, Deserialize, Serialize, PartialEq, Eq, PartialOrd, Ord)]
#[serde(rename_all = "snake_case")]
pub struct EventSource(Cow<'static, str>);
impl EventSource {
pub const GENERAL: Self = Self::const_str("general");
pub const DOWNLOAD: Self = Self::const_str("download");
pub const BUILD: Self = Self::const_str("build");
pub const STATE: Self = Self::const_str("state");
pub const INSTALL: Self = Self::const_str("install");
pub const UNINSTALL: Self = Self::const_str("uninstall");
pub const UPDATE: Self = Self::const_str("update");
pub const ACQUISITION: Self = Self::const_str("acquisition");
pub const PROGRESS: Self = Self::const_str("progress");
pub const REPO: Self = Self::const_str("repo");
pub const RESOLVER: Self = Self::const_str("resolver");
pub const GUARD: Self = Self::const_str("guard");
pub const QA: Self = Self::const_str("qa");
pub const AUDIT: Self = Self::const_str("audit");
pub const PACKAGE: Self = Self::const_str("package");
pub const PLATFORM: Self = Self::const_str("platform");
const fn const_str(value: &'static str) -> Self {
Self(Cow::Borrowed(value))
}
/// Create a source value from any stringy input (e.g. crate path).
#[must_use]
pub fn from_dynamic(value: impl Into<String>) -> Self {
let value = value.into();
Self(Cow::Owned(value))
}
/// Borrow the underlying identifier used for logging/telemetry.
#[must_use]
pub fn as_str(&self) -> &str {
&self.0
}
}
impl From<&'static str> for EventSource {
fn from(value: &'static str) -> Self {
Self(Cow::Borrowed(value))
}
}
impl From<String> for EventSource {
fn from(value: String) -> Self {
Self(Cow::Owned(value))
}
}
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | false |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/crates/events/src/progress/config.rs | crates/events/src/progress/config.rs | //! Configuration and core types for progress tracking
use serde::{Deserialize, Serialize};
use std::time::Duration;
/// Configuration for progress tracking algorithms
#[derive(Debug, Clone)]
pub struct ProgressConfig {
/// Number of samples for moving average (default: 10)
pub speed_window_size: usize,
/// Maximum samples to retain in history (default: 1000)
pub max_history_samples: usize,
/// Update frequency for smooth UI (default: 100ms)
pub update_interval: Duration,
/// Outlier rejection multiplier (default: 2.0)
pub outlier_threshold: f64,
/// Exponential moving average alpha (default: 0.3)
pub ema_alpha: f64,
/// Minimum samples needed for reliable ETA (default: 3)
pub min_samples_for_eta: usize,
}
impl Default for ProgressConfig {
fn default() -> Self {
Self {
speed_window_size: 10,
max_history_samples: 1000,
update_interval: Duration::from_millis(100),
outlier_threshold: 2.0,
ema_alpha: 0.3,
min_samples_for_eta: 3,
}
}
}
/// A phase in a multi-stage operation
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ProgressPhase {
/// Human-readable name of the phase
pub name: String,
/// Weight of this phase relative to others (0.0-1.0)
pub weight: f64,
/// Optional estimated duration for this phase
pub estimated_duration: Option<Duration>,
/// Optional human-readable description of the phase
pub description: Option<String>,
}
impl ProgressPhase {
/// Create a new progress phase
#[must_use]
pub fn new(name: &str, description: &str) -> Self {
Self {
name: name.to_string(),
weight: 1.0, // Default equal weight
estimated_duration: None,
description: Some(description.to_string()),
}
}
/// Set the weight for this phase
#[must_use]
pub fn with_weight(mut self, weight: f64) -> Self {
self.weight = weight;
self
}
/// Set the estimated duration for this phase
#[must_use]
pub fn with_duration(mut self, duration: Duration) -> Self {
self.estimated_duration = Some(duration);
self
}
}
/// Direction of speed trend
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum TrendDirection {
/// Speed is increasing
Accelerating,
/// Speed is decreasing
Decelerating,
/// Speed is relatively stable
Stable,
}
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | false |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/crates/events/src/progress/update.rs | crates/events/src/progress/update.rs | //! Progress update and formatting utilities
use super::config::TrendDirection;
use std::time::Duration;
/// Result of a progress update with calculated metrics
#[derive(Debug, Clone)]
pub struct ProgressUpdate {
/// Tracker ID
pub id: String,
/// Current progress
pub progress: u64,
/// Total amount of work
pub total: Option<u64>,
/// Current phase index
pub phase: Option<usize>,
/// Smoothed speed (units per second)
pub speed: Option<f64>,
/// Estimated time to completion
pub eta: Option<Duration>,
/// Speed trend direction
pub trend: TrendDirection,
}
impl ProgressUpdate {
/// Get progress as a percentage (0.0-100.0)
#[must_use]
pub fn percentage(&self) -> Option<f64> {
if let Some(total) = self.total {
if total > 0 {
Some((self.progress as f64 / total as f64) * 100.0)
} else {
Some(100.0)
}
} else {
None
}
}
/// Format speed in human-readable units
#[must_use]
pub fn format_speed(&self, unit: &str) -> Option<String> {
self.speed.map(|speed| {
if speed > 1_000_000.0 {
format!("{:.1}M {unit}/s", speed / 1_000_000.0)
} else if speed > 1_000.0 {
format!("{:.1}K {unit}/s", speed / 1_000.0)
} else {
format!("{:.1} {unit}/s", speed)
}
})
}
/// Format ETA in human-readable format
#[must_use]
pub fn format_eta(&self) -> Option<String> {
self.eta.map(|eta| {
let total_seconds = eta.as_secs();
if total_seconds > 3600 {
let hours = total_seconds / 3600;
let minutes = (total_seconds % 3600) / 60;
format!("{hours}h {minutes}m")
} else if total_seconds > 60 {
let minutes = total_seconds / 60;
let seconds = total_seconds % 60;
format!("{minutes}m {seconds}s")
} else {
format!("{total_seconds}s")
}
})
}
}
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | false |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/crates/events/src/progress/tracker.rs | crates/events/src/progress/tracker.rs | //! Core progress tracking with sophisticated ETA calculations
use super::config::{ProgressConfig, ProgressPhase, TrendDirection};
use super::speed::{SpeedBuffer, SpeedSample};
use super::update::ProgressUpdate;
use std::time::{Duration, Instant};
use uuid::Uuid;
/// Core progress tracker with sophisticated algorithms
#[derive(Debug, Clone)]
pub struct ProgressTracker {
/// Unique identifier for this tracker
id: String,
/// Human-readable operation name
operation: String,
/// Total amount of work (bytes, items, etc.)
total: Option<u64>,
/// Current progress
current: u64,
/// Phases for multi-stage operations
pub phases: Vec<ProgressPhase>,
/// Current active phase
pub current_phase: usize,
/// Speed calculation buffer
speed_buffer: SpeedBuffer,
/// Configuration for algorithms
config: ProgressConfig,
/// When tracking started
start_time: Instant,
/// Last update time
last_update: Instant,
/// Exponential moving average state
ema_speed: Option<f64>,
/// Whether tracker has been completed
pub(crate) completed: bool,
/// Optional parent identifier (e.g. correlation id)
parent_id: Option<String>,
/// Root event identifier for the initial Started event
root_event_id: Uuid,
}
impl ProgressTracker {
/// Create a new progress tracker
#[must_use]
pub fn new(
id: String,
operation: String,
total: Option<u64>,
parent_id: Option<String>,
) -> Self {
let config = ProgressConfig::default();
let now = Instant::now();
Self {
id,
operation,
total,
current: 0,
phases: Vec::new(),
current_phase: 0,
speed_buffer: SpeedBuffer::new(config.speed_window_size),
config,
start_time: now,
last_update: now,
ema_speed: None,
completed: false,
parent_id,
root_event_id: Uuid::new_v4(),
}
}
/// Create a new progress tracker with custom configuration
#[must_use]
pub fn with_config(
id: String,
operation: String,
total: Option<u64>,
config: ProgressConfig,
parent_id: Option<String>,
) -> Self {
let now = Instant::now();
Self {
id,
operation,
total,
current: 0,
phases: Vec::new(),
current_phase: 0,
speed_buffer: SpeedBuffer::new(config.speed_window_size),
config,
start_time: now,
last_update: now,
ema_speed: None,
completed: false,
parent_id,
root_event_id: Uuid::new_v4(),
}
}
/// Add phases for multi-stage operations
#[must_use]
pub fn with_phases(mut self, phases: Vec<ProgressPhase>) -> Self {
// Normalize phase weights to sum to 1.0
let total_weight: f64 = phases.iter().map(|p| p.weight).sum();
let mut normalized = phases;
if total_weight > 0.0 {
for phase in &mut normalized {
phase.weight /= total_weight;
}
}
self.phases = normalized;
self
}
/// Update progress and calculate metrics
pub fn update(&mut self, progress: u64) -> ProgressUpdate {
let now = Instant::now();
// For tests and first update, always process
let should_update = now.duration_since(self.last_update) >= self.config.update_interval
|| self.speed_buffer.samples.is_empty();
if !should_update {
return ProgressUpdate {
id: self.id.clone(),
progress,
total: self.total,
phase: if self.phases.is_empty() {
None
} else {
Some(self.current_phase)
},
speed: self.ema_speed,
eta: None,
trend: TrendDirection::Stable,
};
}
self.current = progress;
self.last_update = now;
// Add speed sample
if let Some(instantaneous_speed) = self.speed_buffer.add_sample(progress, now) {
// Update exponential moving average
if let Some(current_ema) = self.ema_speed {
self.ema_speed = Some(
self.config.ema_alpha * instantaneous_speed
+ (1.0 - self.config.ema_alpha) * current_ema,
);
} else {
self.ema_speed = Some(instantaneous_speed);
}
}
// Calculate smoothed speed
let smoothed_speed = self
.speed_buffer
.calculate_smoothed_speed(self.config.outlier_threshold);
// Calculate ETA using multiple methods and pick the best
let eta = self.calculate_eta(smoothed_speed);
// Get trend direction
let trend = self.speed_buffer.get_trend();
ProgressUpdate {
id: self.id.clone(),
progress,
total: self.total,
phase: if self.phases.is_empty() {
None
} else {
Some(self.current_phase)
},
speed: smoothed_speed,
eta,
trend,
}
}
/// Advance to the next phase
pub fn next_phase(&mut self) -> Option<usize> {
if self.current_phase + 1 < self.phases.len() {
self.current_phase += 1;
// Reset speed calculations for new phase
self.speed_buffer = SpeedBuffer::new(self.config.speed_window_size);
self.ema_speed = None;
Some(self.current_phase)
} else {
None
}
}
/// Mark tracker as completed
pub fn complete(&mut self) -> Duration {
self.completed = true;
self.start_time.elapsed()
}
/// Get the operation name associated with this tracker.
#[must_use]
pub fn operation(&self) -> &str {
&self.operation
}
/// Get the total work configured for this tracker, if known.
#[must_use]
pub fn total(&self) -> Option<u64> {
self.total
}
/// Get the parent identifier string associated with this tracker, if any.
#[must_use]
pub fn parent_id(&self) -> Option<&String> {
self.parent_id.as_ref()
}
/// Update the parent identifier for this tracker.
pub fn set_parent_id(&mut self, parent: Option<String>) {
self.parent_id = parent;
}
/// Get the root event identifier for this tracker.
#[must_use]
pub fn root_event_id(&self) -> Uuid {
self.root_event_id
}
/// Calculate ETA using multiple sophisticated methods
fn calculate_eta(&self, current_speed: Option<f64>) -> Option<Duration> {
if self.completed || self.total.is_none() {
return None;
}
let total = self.total?;
let remaining = total.saturating_sub(self.current);
if remaining == 0 {
return Some(Duration::ZERO);
}
// Need minimum samples for reliable ETA
if self.speed_buffer.samples.len() < self.config.min_samples_for_eta {
return None;
}
let speed = current_speed?;
if speed <= 0.0 {
return None;
}
// Method 1: Simple ETA based on current speed
let simple_eta = Duration::from_secs_f64(remaining as f64 / speed);
// Method 2: Phase-aware ETA if we have phases
let phase_eta = if self.phases.is_empty() {
simple_eta
} else {
self.calculate_phase_aware_eta(remaining, speed)
};
// Method 3: Trend-aware ETA
let trend_eta = self.calculate_trend_aware_eta(remaining, speed);
// Combine estimates using weighted average
let estimates = [
(simple_eta, 0.4), // 40% weight on simple calculation
(phase_eta, 0.3), // 30% weight on phase-aware
(trend_eta, 0.3), // 30% weight on trend-aware
];
let total_weight: f64 = estimates.iter().map(|(_, w)| w).sum();
let weighted_sum: f64 = estimates
.iter()
.map(|(eta, weight)| eta.as_secs_f64() * weight)
.sum();
Some(Duration::from_secs_f64(weighted_sum / total_weight))
}
/// Calculate phase-aware ETA considering current phase progress
fn calculate_phase_aware_eta(&self, remaining: u64, speed: f64) -> Duration {
if self.phases.is_empty() {
return Duration::from_secs_f64(remaining as f64 / speed);
}
let current_phase = &self.phases[self.current_phase];
let total = self.total.unwrap_or(0);
// Calculate how much work is left in current phase
let phase_start = self
.phases
.iter()
.take(self.current_phase)
.map(|p| (total as f64 * p.weight) as u64)
.sum::<u64>();
let phase_total = (total as f64 * current_phase.weight) as u64;
let phase_remaining = phase_total.saturating_sub(self.current - phase_start);
// Calculate remaining work in future phases
let future_phases_work: u64 = self
.phases
.iter()
.skip(self.current_phase + 1)
.map(|p| (total as f64 * p.weight) as u64)
.sum();
// Estimate time for current phase
let current_phase_eta = Duration::from_secs_f64(phase_remaining as f64 / speed);
// Estimate time for future phases (assume same speed)
let future_phases_eta = Duration::from_secs_f64(future_phases_work as f64 / speed);
current_phase_eta + future_phases_eta
}
/// Calculate trend-aware ETA considering acceleration/deceleration
fn calculate_trend_aware_eta(&self, remaining: u64, speed: f64) -> Duration {
let trend = self.speed_buffer.get_trend();
match trend {
TrendDirection::Accelerating => {
// If accelerating, assume speed will continue to increase
// Use a conservative 10% acceleration factor
let projected_speed = speed * 1.1;
Duration::from_secs_f64(remaining as f64 / projected_speed)
}
TrendDirection::Decelerating => {
// If decelerating, assume speed will continue to decrease
// Use a conservative 10% deceleration factor
let projected_speed = speed * 0.9;
Duration::from_secs_f64(remaining as f64 / projected_speed)
}
TrendDirection::Stable => {
// Use current speed as-is
Duration::from_secs_f64(remaining as f64 / speed)
}
}
}
/// Get current phase information
pub fn current_phase(&self) -> Option<&ProgressPhase> {
self.phases.get(self.current_phase)
}
/// Get all phases
pub fn phases(&self) -> &[ProgressPhase] {
&self.phases
}
/// Get memory usage estimate for this tracker
#[must_use]
pub fn memory_usage(&self) -> usize {
// Base struct size
let base_size = std::mem::size_of::<Self>();
// String allocations
let string_size = self.id.capacity() + self.operation.capacity();
// Phases vector
let phases_size = self.phases.capacity() * std::mem::size_of::<ProgressPhase>()
+ self
.phases
.iter()
.map(|p| {
p.name.capacity()
+ p.description
.as_ref()
.map_or(0, std::string::String::capacity)
})
.sum::<usize>();
// Speed buffer samples
let samples_size =
self.speed_buffer.samples.capacity() * std::mem::size_of::<SpeedSample>();
base_size + string_size + phases_size + samples_size
}
}
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | false |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/crates/events/src/progress/manager.rs | crates/events/src/progress/manager.rs | //! Thread-safe progress management with event integration
use super::config::ProgressPhase;
use super::tracker::ProgressTracker;
use super::update::ProgressUpdate;
use crate::{AppEvent, EventEmitter, EventLevel, EventMeta, ProgressEvent};
use std::sync::{Arc, Mutex};
use std::time::Duration;
/// Thread-safe progress tracker manager
#[derive(Debug, Clone)]
pub struct ProgressManager {
trackers: Arc<Mutex<std::collections::HashMap<String, ProgressTracker>>>,
}
impl ProgressManager {
/// Create a new progress manager
pub fn new() -> Self {
Self {
trackers: Arc::new(Mutex::new(std::collections::HashMap::new())),
}
}
/// Create a new progress tracker
pub fn create_tracker(
&self,
id: String,
operation: String,
total: Option<u64>,
parent_id: Option<String>,
) -> String {
let tracker = ProgressTracker::new(id.clone(), operation, total, parent_id);
if let Ok(mut trackers) = self.trackers.lock() {
trackers.insert(id.clone(), tracker);
}
id
}
/// Create a tracker with phases
pub fn create_tracker_with_phases(
&self,
id: String,
operation: String,
total: Option<u64>,
phases: Vec<ProgressPhase>,
parent_id: Option<String>,
) -> String {
let tracker =
ProgressTracker::new(id.clone(), operation, total, parent_id).with_phases(phases);
if let Ok(mut trackers) = self.trackers.lock() {
trackers.insert(id.clone(), tracker);
}
id
}
/// Emit a started event for an existing tracker using its stored metadata.
pub fn emit_started<E: EventEmitter>(&self, id: &str, emitter: &E, parent_id: Option<&str>) {
let Ok(mut trackers) = self.trackers.lock() else {
return;
};
let Some(tracker) = trackers.get_mut(id) else {
return;
};
if let Some(parent) = parent_id {
if tracker.parent_id().is_none() {
tracker.set_parent_id(Some(parent.to_string()));
}
}
let operation = tracker.operation().to_string();
let total = tracker.total();
let phases = tracker.phases().to_vec();
let parent_label = tracker.parent_id().cloned();
let root_event_id = tracker.root_event_id();
let event = ProgressEvent::Started {
id: id.to_string(),
operation,
total,
phases,
parent_id: parent_label.clone(),
};
let app_event = AppEvent::Progress(event);
let level = EventLevel::from(app_event.log_level());
let mut meta = EventMeta::new(level, app_event.event_source());
meta.event_id = root_event_id;
if let Some(parent_label) = parent_label {
meta.labels
.insert("progress_parent".to_string(), parent_label);
}
emitter.enrich_event_meta(&app_event, &mut meta);
emitter.emit_with_meta(meta, app_event);
}
/// Get a tracker by its ID
pub fn get_tracker(&self, id: &str) -> Option<ProgressTracker> {
if let Ok(trackers) = self.trackers.lock() {
trackers.get(id).cloned()
} else {
None
}
}
/// Update a tracker's progress
pub fn update(&self, id: &str, progress: u64) -> Option<ProgressUpdate> {
if let Ok(mut trackers) = self.trackers.lock() {
trackers.get_mut(id).map(|tracker| tracker.update(progress))
} else {
None
}
}
/// Advance a tracker to the next phase
pub fn next_phase(&self, id: &str) -> Option<usize> {
if let Ok(mut trackers) = self.trackers.lock() {
trackers.get_mut(id).and_then(ProgressTracker::next_phase)
} else {
None
}
}
/// Complete a tracker
pub fn complete(&self, id: &str) -> Option<Duration> {
if let Ok(mut trackers) = self.trackers.lock() {
if let Some(tracker) = trackers.get_mut(id) {
let duration = tracker.complete();
Some(duration)
} else {
None
}
} else {
None
}
}
/// Remove a completed tracker
#[must_use]
pub fn remove(&self, id: &str) -> bool {
if let Ok(mut trackers) = self.trackers.lock() {
trackers.remove(id).is_some()
} else {
false
}
}
/// Get current memory usage of all trackers
#[must_use]
pub fn total_memory_usage(&self) -> usize {
if let Ok(trackers) = self.trackers.lock() {
trackers.values().map(ProgressTracker::memory_usage).sum()
} else {
0
}
}
/// Get number of active trackers
#[must_use]
pub fn active_count(&self) -> usize {
if let Ok(trackers) = self.trackers.lock() {
trackers.len()
} else {
0
}
}
/// Clean up completed trackers to free memory
#[must_use]
pub fn cleanup_completed(&self) -> usize {
if let Ok(mut trackers) = self.trackers.lock() {
let initial_count = trackers.len();
trackers.retain(|_, tracker| !tracker.completed);
initial_count - trackers.len()
} else {
0
}
}
/// Start a new operation with progress tracking
pub fn start_operation<E: EventEmitter>(
&self,
id: &str,
operation: &str,
total: Option<u64>,
phases: Vec<ProgressPhase>,
emitter: &E,
parent_id: Option<&str>,
) -> String {
let tracker_id = format!("{}_{}", id, uuid::Uuid::new_v4());
let parent_string = parent_id.map(str::to_string);
self.create_tracker_with_phases(
tracker_id.clone(),
operation.to_string(),
total,
phases,
parent_string.clone(),
);
// Emit a Started event for this operation with metadata linking
self.emit_started(&tracker_id, emitter, parent_string.as_deref());
tracker_id
}
/// Update progress for an operation
pub fn update_progress<E: EventEmitter>(
&self,
id: &str,
current: u64,
total: Option<u64>,
emitter: &E,
) {
let Ok(mut trackers) = self.trackers.lock() else {
return;
};
let Some(tracker) = trackers.get_mut(id) else {
return;
};
let update = tracker.update(current);
let parent_label = tracker.parent_id().cloned();
let root_event_id = tracker.root_event_id();
let event = ProgressEvent::Updated {
id: id.to_string(),
current,
total,
phase: update.phase,
speed: update.speed,
eta: update.eta,
efficiency: None,
};
let app_event = AppEvent::Progress(event);
let level = EventLevel::from(app_event.log_level());
let mut meta = EventMeta::new(level, app_event.event_source());
meta.parent_id = Some(root_event_id);
if let Some(parent_label) = parent_label {
meta.labels
.insert("progress_parent".to_string(), parent_label);
}
emitter.enrich_event_meta(&app_event, &mut meta);
emitter.emit_with_meta(meta, app_event);
}
/// Change to a specific phase
pub fn change_phase<E: EventEmitter>(&self, id: &str, phase_index: usize, emitter: &E) {
// Set to specific phase index if available
let Ok(mut trackers) = self.trackers.lock() else {
return;
};
let Some(tracker) = trackers.get_mut(id) else {
return;
};
let clamped = phase_index.min(tracker.phases.len().saturating_sub(1));
tracker.current_phase = clamped;
let phase_name = tracker
.phases()
.get(clamped)
.map_or_else(|| format!("Phase {}", clamped), |p| p.name.clone());
let parent_label = tracker.parent_id().cloned();
let root_event_id = tracker.root_event_id();
let event = ProgressEvent::PhaseChanged {
id: id.to_string(),
phase: clamped,
phase_name,
};
let app_event = AppEvent::Progress(event);
let level = EventLevel::from(app_event.log_level());
let mut meta = EventMeta::new(level, app_event.event_source());
meta.parent_id = Some(root_event_id);
if let Some(parent_label) = parent_label {
meta.labels
.insert("progress_parent".to_string(), parent_label);
}
emitter.enrich_event_meta(&app_event, &mut meta);
emitter.emit_with_meta(meta, app_event);
}
/// Change to a specific phase by name and mark it as done
pub fn update_phase_to_done<E: EventEmitter>(&self, id: &str, phase_name: &str, emitter: &E) {
let Ok(mut trackers) = self.trackers.lock() else {
return;
};
let Some(tracker) = trackers.get_mut(id) else {
return;
};
let Some(phase_index) = tracker.phases().iter().position(|p| p.name == phase_name) else {
return;
};
tracker.current_phase = phase_index;
let parent_label = tracker.parent_id().cloned();
let root_event_id = tracker.root_event_id();
let event = ProgressEvent::PhaseChanged {
id: id.to_string(),
phase: phase_index,
phase_name: phase_name.to_string(),
};
let app_event = AppEvent::Progress(event);
let level = EventLevel::from(app_event.log_level());
let mut meta = EventMeta::new(level, app_event.event_source());
meta.parent_id = Some(root_event_id);
if let Some(parent_label) = parent_label {
meta.labels
.insert("progress_parent".to_string(), parent_label);
}
emitter.enrich_event_meta(&app_event, &mut meta);
emitter.emit_with_meta(meta, app_event);
}
/// Complete an operation
pub fn complete_operation<E: EventEmitter>(&self, id: &str, emitter: &E) {
let Ok(mut trackers) = self.trackers.lock() else {
return;
};
let Some(tracker) = trackers.get_mut(id) else {
return;
};
let duration = tracker.complete();
let parent_label = tracker.parent_id().cloned();
let root_event_id = tracker.root_event_id();
let event = ProgressEvent::Completed {
id: id.to_string(),
duration,
final_speed: None,
total_processed: 0,
};
let app_event = AppEvent::Progress(event);
let level = EventLevel::from(app_event.log_level());
let mut meta = EventMeta::new(level, app_event.event_source());
meta.parent_id = Some(root_event_id);
if let Some(parent_label) = parent_label {
meta.labels
.insert("progress_parent".to_string(), parent_label);
}
emitter.enrich_event_meta(&app_event, &mut meta);
emitter.emit_with_meta(meta, app_event);
}
/// Create a parent progress tracker for batch operations
pub fn create_batch_tracker(
&self,
operation_name: String,
total_items: u64,
phases: Vec<ProgressPhase>,
) -> String {
let id = format!("batch_{}", uuid::Uuid::new_v4());
self.create_tracker_with_phases(
id.clone(),
operation_name,
Some(total_items),
phases,
None,
);
id
}
/// Register a child tracker with its parent
///
/// # Errors
///
/// Returns an error if the event cannot be sent to the event channel.
pub fn register_child_tracker<E: EventEmitter>(
&self,
parent_id: &str,
child_id: &str,
operation_name: String,
weight: f64,
emitter: &E,
) {
// Emit child started event
emitter.emit(AppEvent::Progress(ProgressEvent::ChildStarted {
parent_id: parent_id.to_string(),
child_id: child_id.to_string(),
operation: operation_name,
weight,
}));
// Fire-and-forget; UI aggregates parent/child via events.
}
/// Complete a child tracker and update parent progress
///
/// # Errors
///
/// Returns an error if the event cannot be sent to the event channel.
pub fn complete_child_tracker<E: EventEmitter>(
&self,
parent_id: &str,
child_id: &str,
success: bool,
emitter: &E,
) {
emitter.emit(AppEvent::Progress(ProgressEvent::ChildCompleted {
parent_id: parent_id.to_string(),
child_id: child_id.to_string(),
success,
}));
// Fire-and-forget.
}
}
impl Default for ProgressManager {
fn default() -> Self {
Self::new()
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::progress::config::ProgressPhase;
use crate::{EventMessage, EventSender};
use std::sync::Mutex;
use uuid::Uuid;
#[derive(Default)]
struct TestEmitter {
messages: Mutex<Vec<EventMessage>>,
}
#[derive(Debug, Clone, Copy)]
enum ProgressAssertion<'a> {
Update { current: u64, phase: usize },
PhaseChange { phase: usize, name: &'a str },
Completed,
}
impl TestEmitter {
fn new() -> Self {
Self::default()
}
fn drain(&self) -> Vec<EventMessage> {
let mut guard = self.messages.lock().expect("messages lock poisoned");
guard.drain(..).collect()
}
}
impl EventEmitter for TestEmitter {
fn event_sender(&self) -> Option<&EventSender> {
None
}
fn emit_with_meta(&self, meta: EventMeta, event: AppEvent) {
let mut guard = self.messages.lock().expect("messages lock poisoned");
guard.push(EventMessage::new(meta, event));
}
}
fn assert_parent_meta(
meta: &EventMeta,
root_event_id: Uuid,
parent_label: &str,
is_root: bool,
) {
assert_eq!(
meta.labels.get("progress_parent").map(String::as_str),
Some(parent_label)
);
if is_root {
assert_eq!(meta.event_id, root_event_id);
assert!(meta.parent_id.is_none());
} else {
assert_eq!(meta.parent_id, Some(root_event_id));
}
}
fn expect_started_event(
event: &AppEvent,
total: Option<u64>,
expected_phases: &[&str],
parent_label: &str,
) {
match event {
AppEvent::Progress(ProgressEvent::Started {
total: event_total,
phases,
parent_id,
..
}) => {
assert_eq!(*event_total, total);
let names: Vec<&str> = phases.iter().map(|phase| phase.name.as_str()).collect();
assert_eq!(names, expected_phases);
assert_eq!(parent_id.as_deref(), Some(parent_label));
}
other => panic!("expected ProgressEvent::Started, got {other:?}"),
}
}
fn expect_update_event(
event: &AppEvent,
current: u64,
total: Option<u64>,
phase: Option<usize>,
) {
match event {
AppEvent::Progress(ProgressEvent::Updated {
current: event_current,
total: event_total,
phase: event_phase,
..
}) => {
assert_eq!(*event_current, current);
assert_eq!(*event_total, total);
assert_eq!(*event_phase, phase);
}
other => panic!("expected ProgressEvent::Updated, got {other:?}"),
}
}
fn expect_phase_changed_event(event: &AppEvent, phase: usize, name: &str) {
match event {
AppEvent::Progress(ProgressEvent::PhaseChanged {
phase: event_phase,
phase_name,
..
}) => {
assert_eq!(*event_phase, phase);
assert_eq!(phase_name, name);
}
other => panic!("expected ProgressEvent::PhaseChanged, got {other:?}"),
}
}
#[test]
fn started_event_sets_parent_label_and_meta() {
let manager = ProgressManager::new();
let tracker_id = manager.create_tracker_with_phases(
"install".to_string(),
"install packages".to_string(),
Some(10),
vec![],
None,
);
let emitter = TestEmitter::new();
manager.emit_started(&tracker_id, &emitter, Some("install:pkg"));
let events = emitter.drain();
assert_eq!(events.len(), 1);
let EventMessage { meta, event } = &events[0];
match event {
AppEvent::Progress(ProgressEvent::Started { parent_id, .. }) => {
assert_eq!(parent_id.as_deref(), Some("install:pkg"));
}
other => panic!("expected ProgressEvent::Started, got {other:?}"),
}
assert_eq!(
meta.labels.get("progress_parent").map(String::as_str),
Some("install:pkg")
);
assert!(meta.parent_id.is_none());
}
#[test]
fn progress_updates_reference_root_event() {
let manager = ProgressManager::new();
let tracker_id = manager.create_tracker_with_phases(
"install".to_string(),
"install packages".to_string(),
Some(5),
vec![],
None,
);
let emitter = TestEmitter::new();
manager.emit_started(&tracker_id, &emitter, Some("install:pkg"));
let root_event_id = manager
.get_tracker(&tracker_id)
.expect("tracker")
.root_event_id();
emitter.drain();
manager.update_progress(&tracker_id, 1, Some(5), &emitter);
let events = emitter.drain();
assert_eq!(events.len(), 1);
let EventMessage { meta, event } = &events[0];
matches!(event, AppEvent::Progress(ProgressEvent::Updated { .. }));
assert_eq!(meta.parent_id, Some(root_event_id));
assert_eq!(
meta.labels.get("progress_parent").map(String::as_str),
Some("install:pkg")
);
}
#[test]
fn completion_event_attaches_root_parent() {
let manager = ProgressManager::new();
let tracker_id = manager.create_tracker_with_phases(
"install".to_string(),
"install packages".to_string(),
Some(2),
vec![],
None,
);
let emitter = TestEmitter::new();
manager.emit_started(&tracker_id, &emitter, Some("install:pkg"));
let root_event_id = manager
.get_tracker(&tracker_id)
.expect("tracker")
.root_event_id();
emitter.drain();
manager.complete_operation(&tracker_id, &emitter);
let events = emitter.drain();
assert_eq!(events.len(), 1);
let EventMessage { meta, event } = &events[0];
matches!(event, AppEvent::Progress(ProgressEvent::Completed { .. }));
assert_eq!(meta.parent_id, Some(root_event_id));
assert_eq!(
meta.labels.get("progress_parent").map(String::as_str),
Some("install:pkg")
);
}
#[test]
fn multi_phase_operation_produces_consistent_event_sequence() {
let manager = ProgressManager::new();
let tracker_id = "install-flow".to_string();
let phases = vec![
ProgressPhase::new("Resolve", "resolve dependencies"),
ProgressPhase::new("Fetch", "fetch artifacts"),
ProgressPhase::new("Install", "link outputs"),
];
manager.create_tracker_with_phases(
tracker_id.clone(),
"install packages".to_string(),
Some(3),
phases,
None,
);
let emitter = TestEmitter::new();
let parent_label = "install:root";
manager.emit_started(&tracker_id, &emitter, Some(parent_label));
let root_event_id = manager
.get_tracker(&tracker_id)
.expect("tracker")
.root_event_id();
manager.update_progress(&tracker_id, 1, Some(3), &emitter);
manager.change_phase(&tracker_id, 1, &emitter);
manager.update_progress(&tracker_id, 2, Some(3), &emitter);
manager.update_phase_to_done(&tracker_id, "Install", &emitter);
manager.update_progress(&tracker_id, 3, Some(3), &emitter);
manager.complete_operation(&tracker_id, &emitter);
let mut events = emitter.drain().into_iter();
let EventMessage { meta, event } = events.next().expect("started event");
expect_started_event(
&event,
Some(3),
&["Resolve", "Fetch", "Install"],
parent_label,
);
assert_parent_meta(&meta, root_event_id, parent_label, true);
let expectations = [
ProgressAssertion::Update {
current: 1,
phase: 0,
},
ProgressAssertion::PhaseChange {
phase: 1,
name: "Fetch",
},
ProgressAssertion::Update {
current: 2,
phase: 1,
},
ProgressAssertion::PhaseChange {
phase: 2,
name: "Install",
},
ProgressAssertion::Update {
current: 3,
phase: 2,
},
ProgressAssertion::Completed,
];
for expectation in expectations {
let EventMessage { meta, event } = events
.next()
.unwrap_or_else(|| panic!("missing event for {expectation:?}"));
match expectation {
ProgressAssertion::Update { current, phase } => {
expect_update_event(&event, current, Some(3), Some(phase));
}
ProgressAssertion::PhaseChange { phase, name } => {
expect_phase_changed_event(&event, phase, name);
}
ProgressAssertion::Completed => {
assert!(
matches!(event, AppEvent::Progress(ProgressEvent::Completed { .. })),
"expected completion event, got {event:?}"
);
}
}
assert_parent_meta(&meta, root_event_id, parent_label, false);
}
assert!(events.next().is_none(), "unexpected extra events");
}
#[test]
fn change_phase_clamps_index_and_preserves_parent_metadata() {
let manager = ProgressManager::new();
let tracker_id = manager.create_tracker_with_phases(
"batch-job".to_string(),
"batch operation".to_string(),
Some(4),
vec![
ProgressPhase::new("Stage", "initial staging"),
ProgressPhase::new("Process", "process work"),
],
None,
);
let emitter = TestEmitter::new();
let parent_label = "batch:parent";
manager.emit_started(&tracker_id, &emitter, Some(parent_label));
let root_event_id = manager
.get_tracker(&tracker_id)
.expect("tracker")
.root_event_id();
emitter.drain();
manager.change_phase(&tracker_id, 10, &emitter);
let events = emitter.drain();
assert_eq!(events.len(), 1);
let EventMessage { meta, event } = &events[0];
match event {
AppEvent::Progress(ProgressEvent::PhaseChanged {
phase, phase_name, ..
}) => {
assert_eq!(*phase, 1);
assert_eq!(phase_name, "Process");
}
other => panic!("expected clamped phase change, got {other:?}"),
}
assert_eq!(meta.parent_id, Some(root_event_id));
assert_eq!(
meta.labels.get("progress_parent").map(String::as_str),
Some(parent_label)
);
}
}
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | false |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/crates/events/src/progress/speed.rs | crates/events/src/progress/speed.rs | //! Speed calculation algorithms with smoothing and outlier detection
use super::config::TrendDirection;
use std::collections::VecDeque;
use std::time::{Duration, Instant};
/// Sample point for speed calculation
#[derive(Debug, Clone, Copy)]
#[allow(dead_code)] // Fields used for potential future analytics
pub(crate) struct SpeedSample {
/// Timestamp when sample was taken
pub timestamp: Instant,
/// Total bytes/items processed at this time
pub progress: u64,
/// Time delta since last sample
pub delta_time: Duration,
/// Progress delta since last sample
pub delta_progress: u64,
/// Instantaneous speed for this sample
pub speed: f64,
}
/// Efficient ring buffer for speed samples with automatic pruning
#[derive(Debug, Clone)]
pub(crate) struct SpeedBuffer {
/// Fixed-size ring buffer for recent samples
pub samples: VecDeque<SpeedSample>,
/// Maximum number of samples to keep
max_size: usize,
/// Sum of speeds for quick average calculation
speed_sum: f64,
/// Last recorded progress value
last_progress: u64,
/// Last sample timestamp
last_timestamp: Instant,
}
impl SpeedBuffer {
pub fn new(max_size: usize) -> Self {
Self {
samples: VecDeque::with_capacity(max_size),
max_size,
speed_sum: 0.0,
last_progress: 0,
last_timestamp: Instant::now(),
}
}
/// Add a new sample, calculating speed and managing buffer size
pub fn add_sample(&mut self, progress: u64, timestamp: Instant) -> Option<f64> {
let delta_time = timestamp.duration_since(self.last_timestamp);
let delta_progress = progress.saturating_sub(self.last_progress);
// Avoid division by zero and very small time deltas
if delta_time.as_nanos() < 100_000 {
// Less than 0.1ms, ignore this sample
return None;
}
// Calculate instantaneous speed (units per second)
#[allow(clippy::cast_precision_loss)] // Precision loss acceptable for speed calculation
let speed = delta_progress as f64 / delta_time.as_secs_f64();
let sample = SpeedSample {
timestamp,
progress,
delta_time,
delta_progress,
speed,
};
// Remove oldest sample if at capacity
if self.samples.len() >= self.max_size {
if let Some(old_sample) = self.samples.pop_front() {
self.speed_sum -= old_sample.speed;
}
}
// Add new sample
self.samples.push_back(sample);
self.speed_sum += speed;
// Update state
self.last_progress = progress;
self.last_timestamp = timestamp;
Some(speed)
}
/// Calculate smoothed speed with outlier detection
pub fn calculate_smoothed_speed(&self, outlier_threshold: f64) -> Option<f64> {
if self.samples.is_empty() {
return None;
}
if self.samples.len() == 1 {
return Some(self.samples[0].speed);
}
// Calculate mean and standard deviation for outlier detection
let mean = self.speed_sum / self.samples.len() as f64;
let variance = self
.samples
.iter()
.map(|s| (s.speed - mean).powi(2))
.sum::<f64>()
/ self.samples.len() as f64;
let std_dev = variance.sqrt();
// Filter outliers and calculate smoothed average
let mut valid_speeds = Vec::new();
for sample in &self.samples {
// Reject samples more than threshold * std_dev from mean
if (sample.speed - mean).abs() <= outlier_threshold * std_dev {
valid_speeds.push(sample.speed);
}
}
if valid_speeds.is_empty() {
// All samples were outliers, fall back to simple average
Some(mean)
} else {
// Calculate weighted average with recent samples having more weight
let mut weighted_sum = 0.0;
let mut weight_sum = 0.0;
for (i, &speed) in valid_speeds.iter().enumerate() {
// Linear weighting: newer samples get higher weight
let weight = 1.0 + i as f64 / valid_speeds.len() as f64;
weighted_sum += speed * weight;
weight_sum += weight;
}
Some(weighted_sum / weight_sum)
}
}
/// Calculate exponential moving average for trend analysis
#[allow(dead_code)] // Reserved for future enhanced ETA calculations
pub fn calculate_ema(&self, alpha: f64) -> Option<f64> {
if self.samples.is_empty() {
return None;
}
let mut ema = self.samples[0].speed;
for sample in self.samples.iter().skip(1) {
ema = alpha * sample.speed + (1.0 - alpha) * ema;
}
Some(ema)
}
/// Get recent trend direction (acceleration/deceleration)
pub fn get_trend(&self) -> TrendDirection {
if self.samples.len() < 3 {
return TrendDirection::Stable;
}
let recent_count = (self.samples.len() / 3).max(2);
let recent_samples: Vec<_> = self.samples.iter().rev().take(recent_count).collect();
// Calculate linear regression slope for recent samples
let n = recent_samples.len() as f64;
let sum_x: f64 = (0..recent_samples.len()).map(|i| i as f64).sum();
let sum_y: f64 = recent_samples.iter().map(|s| s.speed).sum();
let sum_x_y: f64 = recent_samples
.iter()
.enumerate()
.map(|(i, s)| i as f64 * s.speed)
.sum();
let sum_x_squared: f64 = (0..recent_samples.len()).map(|i| (i as f64).powi(2)).sum();
let slope = (n * sum_x_y - sum_x * sum_y) / (n * sum_x_squared - sum_x.powi(2));
// Classify trend based on slope
if slope > 0.1 {
TrendDirection::Accelerating
} else if slope < -0.1 {
TrendDirection::Decelerating
} else {
TrendDirection::Stable
}
}
}
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | false |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/crates/events/src/progress/mod.rs | crates/events/src/progress/mod.rs | #![deny(clippy::pedantic, unsafe_code)]
#![allow(
clippy::module_name_repetitions,
clippy::cast_precision_loss, // Mathematical calculations require f64
clippy::cast_possible_truncation, // Intentional for progress calculations
clippy::cast_sign_loss, // Weights are always positive
clippy::similar_names, // Mathematical variable naming is clear
clippy::missing_panics_doc, // Mutex::lock panics are documented as safe
clippy::must_use_candidate, // Many builder methods are self-evident
clippy::uninlined_format_args // Format args are clear in context
)]
// Module declarations
pub mod config;
pub mod manager;
pub mod speed;
pub mod tracker;
pub mod update;
// Public re-exports for main API
pub use config::{ProgressConfig, ProgressPhase, TrendDirection};
pub use manager::ProgressManager;
pub use tracker::ProgressTracker;
pub use update::ProgressUpdate;
/// Standardized progress patterns for user-facing operations
pub mod patterns {
use super::config::ProgressPhase;
use super::ProgressManager;
use std::time::Duration;
/// Configuration for download progress tracking
#[derive(Debug, Clone)]
pub struct DownloadProgressConfig {
/// Human-readable operation description (e.g., "Downloading jq package")
pub operation_name: String,
/// Total bytes to download (enables percentage calculation and ETA)
/// Set to None for unknown size downloads
pub total_bytes: Option<u64>,
/// Package name for display purposes (optional)
/// Used in progress messages: "Downloading {`package_name`}"
pub package_name: Option<String>,
/// Source URL for debugging and logging
pub url: String,
}
/// Configuration for install progress tracking
#[derive(Debug, Clone)]
pub struct InstallProgressConfig {
/// Human-readable operation description (e.g., "Installing packages")
pub operation_name: String,
/// Number of packages to install (used for progress calculation)
pub package_count: u64,
/// Whether to include dependency resolution phase (adds 10% weight)
/// Set to true for fresh installs, false for pre-resolved packages
pub include_dependency_resolution: bool,
}
/// Configuration for update/upgrade progress tracking
#[derive(Debug, Clone)]
pub struct UpdateProgressConfig {
/// Human-readable operation description (e.g., "Updating packages")
pub operation_name: String,
/// Number of packages to update/upgrade (used for progress calculation)
pub package_count: u64,
/// Whether this is an upgrade (true) or update (false)
/// Affects progress messaging and phase weights
pub is_upgrade: bool,
}
/// Configuration for uninstall progress tracking
#[derive(Debug, Clone)]
pub struct UninstallProgressConfig {
/// Human-readable operation description (e.g., "Uninstalling packages")
pub operation_name: String,
/// Number of packages to uninstall (used for progress calculation)
pub package_count: u64,
}
/// Configuration for vulnerability database update progress tracking
#[derive(Debug, Clone)]
pub struct VulnDbUpdateProgressConfig {
/// Human-readable operation description (e.g., "Updating vulnerability database")
pub operation_name: String,
/// Number of vulnerability sources to update (e.g., NVD, OSV, GitHub = 3)
pub sources_count: u64,
}
impl ProgressManager {
/// Create standardized download progress tracker
pub fn create_download_tracker(&self, config: &DownloadProgressConfig) -> String {
let id = format!("download_{}", uuid::Uuid::new_v4());
let operation = format!(
"Downloading {}",
config.package_name.as_deref().unwrap_or("package")
);
let phases = vec![
ProgressPhase {
name: "Connect".to_string(),
weight: 0.05,
estimated_duration: Some(Duration::from_secs(2)),
description: Some("Establishing network connection".to_string()),
},
ProgressPhase {
name: "Download".to_string(),
weight: 0.9,
estimated_duration: None, // Calculated based on speed
description: Some("Transferring data".to_string()),
},
ProgressPhase {
name: "Verify".to_string(),
weight: 0.05,
estimated_duration: Some(Duration::from_secs(1)),
description: Some("Verifying checksum/signature".to_string()),
},
];
self.create_tracker_with_phases(
id.clone(),
operation,
config.total_bytes,
phases,
None,
);
id
}
/// Create standardized install progress tracker
pub fn create_install_tracker(&self, config: InstallProgressConfig) -> String {
let id = format!("install_{}", uuid::Uuid::new_v4());
let mut phases = Vec::new();
if config.include_dependency_resolution {
phases.push(ProgressPhase {
name: "Resolve".to_string(),
weight: 0.1,
estimated_duration: Some(Duration::from_secs(5)),
description: Some("Resolving dependencies".to_string()),
});
}
phases.extend_from_slice(&[
ProgressPhase {
name: "Download".to_string(),
weight: 0.5,
estimated_duration: None,
description: Some("Downloading packages".to_string()),
},
ProgressPhase {
name: "Validate".to_string(),
weight: 0.15,
estimated_duration: None,
description: Some("Validating artifacts".to_string()),
},
ProgressPhase {
name: "Stage".to_string(),
weight: 0.15,
estimated_duration: None,
description: Some("Staging files".to_string()),
},
ProgressPhase {
name: "Commit".to_string(),
weight: 0.1,
estimated_duration: Some(Duration::from_secs(2)),
description: Some("Committing to live".to_string()),
},
]);
self.create_tracker_with_phases(
id.clone(),
config.operation_name,
Some(config.package_count),
phases,
None,
);
id
}
/// Create standardized update/upgrade progress tracker
pub fn create_update_tracker(&self, config: UpdateProgressConfig) -> String {
let id = format!("update_{}", uuid::Uuid::new_v4());
let phases = vec![
ProgressPhase {
name: "Check".to_string(),
weight: 0.1,
estimated_duration: Some(Duration::from_secs(3)),
description: Some("Checking for updates".to_string()),
},
ProgressPhase {
name: "Download".to_string(),
weight: 0.6,
estimated_duration: None,
description: Some("Downloading updates".to_string()),
},
ProgressPhase {
name: "Install".to_string(),
weight: 0.3,
estimated_duration: None,
description: Some("Installing updates".to_string()),
},
];
self.create_tracker_with_phases(
id.clone(),
config.operation_name,
Some(config.package_count),
phases,
None,
);
id
}
/// Create standardized uninstall progress tracker
pub fn create_uninstall_tracker(&self, config: UninstallProgressConfig) -> String {
let id = format!("uninstall_{}", uuid::Uuid::new_v4());
let phases = vec![
ProgressPhase {
name: "Analyze".to_string(),
weight: 0.2,
estimated_duration: Some(Duration::from_secs(2)),
description: Some("Analyzing dependencies".to_string()),
},
ProgressPhase {
name: "Remove".to_string(),
weight: 0.7,
estimated_duration: None,
description: Some("Removing files".to_string()),
},
ProgressPhase {
name: "Cleanup".to_string(),
weight: 0.1,
estimated_duration: Some(Duration::from_secs(1)),
description: Some("Cleaning up".to_string()),
},
];
self.create_tracker_with_phases(
id.clone(),
config.operation_name,
Some(config.package_count),
phases,
None,
);
id
}
/// Create standardized vulnerability database update progress tracker
pub fn create_vulndb_tracker(&self, config: VulnDbUpdateProgressConfig) -> String {
let id = format!("vulndb_{}", uuid::Uuid::new_v4());
let phases = vec![
ProgressPhase {
name: "Initialize".to_string(),
weight: 0.1,
estimated_duration: Some(Duration::from_secs(2)),
description: Some("Initializing update".to_string()),
},
ProgressPhase {
name: "Download".to_string(),
weight: 0.8,
estimated_duration: None, // Depends on network speed
description: Some("Downloading vulnerability data".to_string()),
},
ProgressPhase {
name: "Process".to_string(),
weight: 0.1,
estimated_duration: Some(Duration::from_secs(5)),
description: Some("Processing data".to_string()),
},
];
self.create_tracker_with_phases(
id.clone(),
config.operation_name,
Some(config.sources_count),
phases,
None,
);
id
}
}
}
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | false |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/crates/events/src/events/general.rs | crates/events/src/events/general.rs | use serde::{Deserialize, Serialize};
use std::collections::HashMap;
/// General utility events for warnings, errors, and operations
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(tag = "type")]
pub enum GeneralEvent {
/// Generic warning message with optional context
Warning {
message: String,
context: Option<String>,
},
/// Generic error message with optional details
Error {
message: String,
details: Option<String>,
},
/// Debug logging with structured context
DebugLog {
message: String,
context: HashMap<String, String>,
},
/// Generic operation started notification
OperationStarted { operation: String },
/// Generic operation completion with success status
OperationCompleted { operation: String, success: bool },
/// Generic operation failure with error details
OperationFailed {
operation: String,
failure: super::FailureContext,
},
/// Check mode preview of planned action
CheckModePreview {
operation: String,
action: String,
details: std::collections::HashMap<String, String>,
},
/// Check mode summary of all planned changes
CheckModeSummary {
operation: String,
total_changes: usize,
categories: std::collections::HashMap<String, usize>,
},
}
impl GeneralEvent {
/// Create a warning event
pub fn warning(message: impl Into<String>) -> Self {
Self::Warning {
message: message.into(),
context: None,
}
}
/// Create a warning event with context
pub fn warning_with_context(message: impl Into<String>, context: impl Into<String>) -> Self {
Self::Warning {
message: message.into(),
context: Some(context.into()),
}
}
/// Create an error event
pub fn error(message: impl Into<String>) -> Self {
Self::Error {
message: message.into(),
details: None,
}
}
/// Create an error event with details
pub fn error_with_details(message: impl Into<String>, details: impl Into<String>) -> Self {
Self::Error {
message: message.into(),
details: Some(details.into()),
}
}
/// Create a debug log event
pub fn debug(message: impl Into<String>) -> Self {
Self::DebugLog {
message: message.into(),
context: HashMap::new(),
}
}
/// Create a debug log event with context
pub fn debug_with_context(
message: impl Into<String>,
context: HashMap<String, String>,
) -> Self {
Self::DebugLog {
message: message.into(),
context,
}
}
/// Create an operation failed event with structured error fields
pub fn operation_failed(operation: impl Into<String>, failure: super::FailureContext) -> Self {
Self::OperationFailed {
operation: operation.into(),
failure,
}
}
}
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | false |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/crates/events/src/events/qa.rs | crates/events/src/events/qa.rs | use serde::{Deserialize, Serialize};
use sps2_types::Version;
use std::path::PathBuf;
use super::FailureContext;
/// Target package being evaluated by QA.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct QaTarget {
pub package: String,
pub version: Version,
}
/// QA level applied to the pipeline.
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "snake_case")]
pub enum QaLevel {
Fast,
Standard,
Strict,
Custom(String),
}
/// Status for an individual QA check.
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "snake_case")]
pub enum QaCheckStatus {
Passed,
Failed,
Skipped,
}
/// Severity for QA findings.
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "snake_case")]
pub enum QaSeverity {
Info,
Warning,
Error,
Critical,
}
/// Individual finding emitted by a QA check.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct QaFinding {
pub message: String,
pub severity: QaSeverity,
#[serde(skip_serializing_if = "Option::is_none")]
pub file: Option<PathBuf>,
#[serde(skip_serializing_if = "Option::is_none")]
pub line: Option<u32>,
}
/// Summary emitted after a QA check completes.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct QaCheckSummary {
pub name: String,
pub category: String,
pub status: QaCheckStatus,
#[serde(skip_serializing_if = "Option::is_none")]
pub duration_ms: Option<u64>,
#[serde(skip_serializing_if = "Vec::is_empty")]
pub findings: Vec<QaFinding>,
}
/// QA events consumed by CLI/logging pipelines.
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(tag = "type", rename_all = "snake_case")]
pub enum QaEvent {
PipelineStarted {
target: QaTarget,
level: QaLevel,
},
PipelineCompleted {
target: QaTarget,
total_checks: usize,
passed: usize,
failed: usize,
duration_ms: u64,
},
PipelineFailed {
target: QaTarget,
failure: FailureContext,
},
CheckEvaluated {
target: QaTarget,
summary: QaCheckSummary,
},
}
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | false |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/crates/events/src/events/guard.rs | crates/events/src/events/guard.rs | use serde::{Deserialize, Serialize};
/// Scope covered by a guard operation.
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(tag = "kind", rename_all = "snake_case")]
pub enum GuardScope {
System,
Package {
name: String,
#[serde(skip_serializing_if = "Option::is_none")]
version: Option<String>,
},
Path {
path: String,
},
State {
id: String,
},
Custom {
description: String,
},
}
/// Verification depth applied during guard operations.
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(tag = "level", content = "details", rename_all = "snake_case")]
pub enum GuardLevel {
Quick,
Standard,
Full,
Custom(String),
}
/// Summary of verification targets.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct GuardTargetSummary {
pub packages: usize,
#[serde(skip_serializing_if = "Option::is_none")]
pub files: Option<usize>,
}
/// Metrics captured at the end of a verification run.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct GuardVerificationMetrics {
pub duration_ms: u64,
pub cache_hit_rate: f32,
pub coverage_percent: f32,
}
/// Planned healing workload distribution.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct GuardHealingPlan {
pub total: usize,
pub auto_heal: usize,
pub confirmation_required: usize,
pub manual_only: usize,
}
/// Severity of a guard discrepancy.
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "snake_case")]
pub enum GuardSeverity {
Low,
Medium,
High,
Critical,
}
/// Structured description of a guard discrepancy surfaced to consumers.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct GuardDiscrepancy {
pub kind: String,
pub severity: GuardSeverity,
#[serde(skip_serializing_if = "Option::is_none")]
pub location: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub package: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub version: Option<String>,
pub message: String,
pub auto_heal_available: bool,
pub requires_confirmation: bool,
}
/// Guard events for filesystem integrity verification.
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(tag = "type", rename_all = "snake_case")]
pub enum GuardEvent {
/// Guard verification started.
VerificationStarted {
operation_id: String,
scope: GuardScope,
level: GuardLevel,
targets: GuardTargetSummary,
},
/// Guard verification completed successfully.
VerificationCompleted {
operation_id: String,
scope: GuardScope,
discrepancies: usize,
metrics: GuardVerificationMetrics,
},
/// Guard verification failed before completion.
VerificationFailed {
operation_id: String,
scope: GuardScope,
failure: super::FailureContext,
},
/// Healing workflow started.
HealingStarted {
operation_id: String,
plan: GuardHealingPlan,
},
/// Healing workflow completed.
HealingCompleted {
operation_id: String,
healed: usize,
failed: usize,
duration_ms: u64,
},
/// Healing workflow failed prematurely.
HealingFailed {
operation_id: String,
failure: super::FailureContext,
healed: usize,
},
/// Discrepancy discovered during verification or healing.
DiscrepancyReported {
operation_id: String,
discrepancy: GuardDiscrepancy,
},
}
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | false |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/crates/events/src/events/platform.rs | crates/events/src/events/platform.rs | use serde::{Deserialize, Serialize};
use std::path::PathBuf;
use super::FailureContext;
/// High-level category for a platform operation.
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "snake_case")]
pub enum PlatformOperationKind {
Binary,
Filesystem,
Process,
ToolDiscovery,
}
/// Descriptor for a process command execution.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ProcessCommandDescriptor {
pub program: String,
pub args: Vec<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub cwd: Option<PathBuf>,
}
/// Context describing the operation being performed.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct PlatformOperationContext {
pub kind: PlatformOperationKind,
pub operation: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub target: Option<PathBuf>,
#[serde(skip_serializing_if = "Option::is_none")]
pub source: Option<PathBuf>,
#[serde(skip_serializing_if = "Option::is_none")]
pub command: Option<ProcessCommandDescriptor>,
}
/// Optional metrics gathered for completed operations.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct PlatformOperationMetrics {
#[serde(skip_serializing_if = "Option::is_none")]
pub duration_ms: Option<u64>,
#[serde(skip_serializing_if = "Option::is_none")]
pub exit_code: Option<i32>,
#[serde(skip_serializing_if = "Option::is_none")]
pub stdout_bytes: Option<u64>,
#[serde(skip_serializing_if = "Option::is_none")]
pub stderr_bytes: Option<u64>,
#[serde(skip_serializing_if = "Option::is_none")]
pub changes: Option<Vec<String>>,
}
/// Platform events surfaced to consumers.
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(tag = "type", rename_all = "snake_case")]
pub enum PlatformEvent {
OperationStarted {
context: PlatformOperationContext,
},
OperationCompleted {
context: PlatformOperationContext,
#[serde(skip_serializing_if = "Option::is_none")]
metrics: Option<PlatformOperationMetrics>,
},
OperationFailed {
context: PlatformOperationContext,
failure: FailureContext,
#[serde(skip_serializing_if = "Option::is_none")]
metrics: Option<PlatformOperationMetrics>,
},
}
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | false |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/crates/events/src/events/state.rs | crates/events/src/events/state.rs | use serde::{Deserialize, Serialize};
use sps2_types::StateId;
use super::FailureContext;
/// Context describing a state transition.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct StateTransitionContext {
pub operation: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub source: Option<StateId>,
pub target: StateId,
}
/// Optional summary for completed transitions.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct TransitionSummary {
#[serde(skip_serializing_if = "Option::is_none")]
pub duration_ms: Option<u64>,
}
/// Context for rollback operations.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct RollbackContext {
pub from: StateId,
pub to: StateId,
}
/// Optional summary for completed rollbacks.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct RollbackSummary {
#[serde(skip_serializing_if = "Option::is_none")]
pub duration_ms: Option<u64>,
}
/// Summary for cleanup operations.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct CleanupSummary {
pub planned_states: usize,
#[serde(skip_serializing_if = "Option::is_none")]
pub removed_states: Option<usize>,
#[serde(skip_serializing_if = "Option::is_none")]
pub space_freed_bytes: Option<u64>,
#[serde(skip_serializing_if = "Option::is_none")]
pub duration_ms: Option<u64>,
}
/// State events emitted by state manager and install flows.
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(tag = "type", rename_all = "snake_case")]
pub enum StateEvent {
TransitionStarted {
context: StateTransitionContext,
},
TransitionCompleted {
context: StateTransitionContext,
#[serde(skip_serializing_if = "Option::is_none")]
summary: Option<TransitionSummary>,
},
TransitionFailed {
context: StateTransitionContext,
failure: FailureContext,
},
RollbackStarted {
context: RollbackContext,
},
RollbackCompleted {
context: RollbackContext,
#[serde(skip_serializing_if = "Option::is_none")]
summary: Option<RollbackSummary>,
},
RollbackFailed {
context: RollbackContext,
failure: FailureContext,
},
CleanupStarted {
summary: CleanupSummary,
},
CleanupCompleted {
summary: CleanupSummary,
},
CleanupFailed {
summary: CleanupSummary,
failure: FailureContext,
},
}
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | false |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/crates/events/src/events/lifecycle.rs | crates/events/src/events/lifecycle.rs | use serde::{Deserialize, Serialize};
use sps2_types::Version;
use std::time::Duration;
use super::FailureContext;
/// Generic lifecycle stages for simple operations
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
#[serde(rename_all = "snake_case")]
pub enum LifecycleStage {
Started,
Completed,
Failed,
}
/// Domain identifier for lifecycle events
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
#[serde(rename_all = "snake_case")]
pub enum LifecycleDomain {
Acquisition,
Download,
Install,
Resolver,
Repo,
Uninstall,
Update,
}
// ============================================================================
// Domain-specific context structures
// ============================================================================
/// Context for acquisition events
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct AcquisitionContext {
pub package: String,
pub version: Version,
pub source: LifecycleAcquisitionSource,
#[serde(skip_serializing_if = "Option::is_none")]
pub size: Option<u64>,
}
/// Source of package acquisition
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "snake_case")]
pub enum LifecycleAcquisitionSource {
Remote { url: String, mirror_priority: u8 },
StoreCache { hash: String },
}
/// Context for download events
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct DownloadContext {
pub url: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub package: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub total_bytes: Option<u64>,
#[serde(skip_serializing_if = "Option::is_none")]
pub bytes_downloaded: Option<u64>,
}
/// Context for install events
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct InstallContext {
pub package: String,
pub version: Version,
#[serde(skip_serializing_if = "Option::is_none")]
pub files_installed: Option<usize>,
}
/// Context for resolver events
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ResolverContext {
// Started fields
#[serde(skip_serializing_if = "Option::is_none")]
pub runtime_targets: Option<usize>,
#[serde(skip_serializing_if = "Option::is_none")]
pub build_targets: Option<usize>,
#[serde(skip_serializing_if = "Option::is_none")]
pub local_targets: Option<usize>,
// Completed fields
#[serde(skip_serializing_if = "Option::is_none")]
pub total_packages: Option<usize>,
#[serde(skip_serializing_if = "Option::is_none")]
pub downloaded_packages: Option<usize>,
#[serde(skip_serializing_if = "Option::is_none")]
pub reused_packages: Option<usize>,
#[serde(skip_serializing_if = "Option::is_none")]
pub duration_ms: Option<u64>,
// Failed fields
#[serde(skip_serializing_if = "Vec::is_empty")]
pub conflicting_packages: Vec<String>,
}
/// Context for repo sync events
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct RepoContext {
#[serde(skip_serializing_if = "Option::is_none")]
pub url: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub packages_updated: Option<usize>,
#[serde(skip_serializing_if = "Option::is_none")]
pub duration_ms: Option<u64>,
#[serde(skip_serializing_if = "Option::is_none")]
pub bytes_transferred: Option<u64>,
}
/// Context for uninstall events
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct UninstallContext {
#[serde(skip_serializing_if = "Option::is_none")]
pub package: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub version: Option<Version>,
#[serde(skip_serializing_if = "Option::is_none")]
pub files_removed: Option<usize>,
}
/// Context for update events
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct UpdateContext {
pub operation: LifecycleUpdateOperation,
#[serde(skip_serializing_if = "Option::is_none")]
pub requested: Option<Vec<String>>,
#[serde(skip_serializing_if = "Option::is_none")]
pub total_targets: Option<usize>,
#[serde(skip_serializing_if = "Option::is_none")]
pub updated: Option<Vec<LifecycleUpdateResult>>,
#[serde(skip_serializing_if = "Option::is_none")]
pub skipped: Option<usize>,
#[serde(skip_serializing_if = "Option::is_none")]
pub failed: Option<Vec<String>>,
#[serde(skip_serializing_if = "Option::is_none")]
pub duration: Option<Duration>,
#[serde(skip_serializing_if = "Option::is_none")]
pub size_difference: Option<i64>,
}
/// Types of update operations
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "snake_case")]
pub enum LifecycleUpdateOperation {
Update,
Upgrade,
Downgrade,
Reinstall,
}
/// Package update types based on semantic versioning
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "snake_case")]
pub enum LifecyclePackageUpdateType {
Patch,
Minor,
Major,
PreRelease,
}
/// Update result for completed package updates
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct LifecycleUpdateResult {
pub package: String,
pub from_version: Version,
pub to_version: Version,
pub update_type: LifecyclePackageUpdateType,
pub duration: Duration,
pub size_change: i64,
}
// ============================================================================
// Generic lifecycle event structure
// ============================================================================
/// Generic lifecycle event that consolidates simple Started/Completed/Failed patterns
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(tag = "type", rename_all = "snake_case")]
pub enum LifecycleEvent {
Acquisition {
stage: LifecycleStage,
context: AcquisitionContext,
#[serde(skip_serializing_if = "Option::is_none")]
failure: Option<FailureContext>,
},
Download {
stage: LifecycleStage,
context: DownloadContext,
#[serde(skip_serializing_if = "Option::is_none")]
failure: Option<FailureContext>,
},
Install {
stage: LifecycleStage,
context: InstallContext,
#[serde(skip_serializing_if = "Option::is_none")]
failure: Option<FailureContext>,
},
Resolver {
stage: LifecycleStage,
context: ResolverContext,
#[serde(skip_serializing_if = "Option::is_none")]
failure: Option<FailureContext>,
},
Repo {
stage: LifecycleStage,
context: RepoContext,
#[serde(skip_serializing_if = "Option::is_none")]
failure: Option<FailureContext>,
},
Uninstall {
stage: LifecycleStage,
context: UninstallContext,
#[serde(skip_serializing_if = "Option::is_none")]
failure: Option<FailureContext>,
},
Update {
stage: LifecycleStage,
context: UpdateContext,
#[serde(skip_serializing_if = "Option::is_none")]
failure: Option<FailureContext>,
},
}
// ============================================================================
// Helper methods for ergonomic event creation
// ============================================================================
impl LifecycleEvent {
// Acquisition helpers
/// Create an acquisition started event
#[must_use]
pub fn acquisition_started(
package: String,
version: Version,
source: LifecycleAcquisitionSource,
) -> Self {
Self::Acquisition {
stage: LifecycleStage::Started,
context: AcquisitionContext {
package,
version,
source,
size: None,
},
failure: None,
}
}
/// Create an acquisition completed event
#[must_use]
pub fn acquisition_completed(
package: String,
version: Version,
source: LifecycleAcquisitionSource,
size: u64,
) -> Self {
Self::Acquisition {
stage: LifecycleStage::Completed,
context: AcquisitionContext {
package,
version,
source,
size: Some(size),
},
failure: None,
}
}
/// Create an acquisition failed event
#[must_use]
pub fn acquisition_failed(
package: String,
version: Version,
source: LifecycleAcquisitionSource,
failure: FailureContext,
) -> Self {
Self::Acquisition {
stage: LifecycleStage::Failed,
context: AcquisitionContext {
package,
version,
source,
size: None,
},
failure: Some(failure),
}
}
// Download helpers
/// Create a download started event
#[must_use]
pub fn download_started(
url: String,
package: Option<String>,
total_bytes: Option<u64>,
) -> Self {
Self::Download {
stage: LifecycleStage::Started,
context: DownloadContext {
url,
package,
total_bytes,
bytes_downloaded: None,
},
failure: None,
}
}
/// Create a download completed event
#[must_use]
pub fn download_completed(url: String, package: Option<String>, bytes_downloaded: u64) -> Self {
Self::Download {
stage: LifecycleStage::Completed,
context: DownloadContext {
url,
package,
total_bytes: None,
bytes_downloaded: Some(bytes_downloaded),
},
failure: None,
}
}
/// Create a download failed event
#[must_use]
pub fn download_failed(url: String, package: Option<String>, failure: FailureContext) -> Self {
Self::Download {
stage: LifecycleStage::Failed,
context: DownloadContext {
url,
package,
total_bytes: None,
bytes_downloaded: None,
},
failure: Some(failure),
}
}
// Install helpers
/// Create an install started event
#[must_use]
pub fn install_started(package: String, version: Version) -> Self {
Self::Install {
stage: LifecycleStage::Started,
context: InstallContext {
package,
version,
files_installed: None,
},
failure: None,
}
}
/// Create an install completed event
#[must_use]
pub fn install_completed(package: String, version: Version, files_installed: usize) -> Self {
Self::Install {
stage: LifecycleStage::Completed,
context: InstallContext {
package,
version,
files_installed: Some(files_installed),
},
failure: None,
}
}
/// Create an install failed event
#[must_use]
pub fn install_failed(package: String, version: Version, failure: FailureContext) -> Self {
Self::Install {
stage: LifecycleStage::Failed,
context: InstallContext {
package,
version,
files_installed: None,
},
failure: Some(failure),
}
}
// Resolver helpers
// Resolver helpers
/// Create a resolver started event
#[must_use]
pub fn resolver_started(
runtime_targets: usize,
build_targets: usize,
local_targets: usize,
) -> Self {
Self::Resolver {
stage: LifecycleStage::Started,
context: ResolverContext {
runtime_targets: Some(runtime_targets),
build_targets: Some(build_targets),
local_targets: Some(local_targets),
total_packages: None,
downloaded_packages: None,
reused_packages: None,
duration_ms: None,
conflicting_packages: vec![],
},
failure: None,
}
}
/// Create a resolver completed event
#[must_use]
pub fn resolver_completed(
total_packages: usize,
downloaded_packages: usize,
reused_packages: usize,
duration_ms: u64,
) -> Self {
Self::Resolver {
stage: LifecycleStage::Completed,
context: ResolverContext {
runtime_targets: None,
build_targets: None,
local_targets: None,
total_packages: Some(total_packages),
downloaded_packages: Some(downloaded_packages),
reused_packages: Some(reused_packages),
duration_ms: Some(duration_ms),
conflicting_packages: vec![],
},
failure: None,
}
}
/// Create a resolver failed event
#[must_use]
pub fn resolver_failed(failure: FailureContext, conflicting_packages: Vec<String>) -> Self {
Self::Resolver {
stage: LifecycleStage::Failed,
context: ResolverContext {
runtime_targets: None,
build_targets: None,
local_targets: None,
total_packages: None,
downloaded_packages: None,
reused_packages: None,
duration_ms: None,
conflicting_packages,
},
failure: Some(failure),
}
}
// Repo helpers
/// Create a repo sync started event
#[must_use]
pub fn repo_sync_started(url: Option<String>) -> Self {
Self::Repo {
stage: LifecycleStage::Started,
context: RepoContext {
url,
packages_updated: None,
duration_ms: None,
bytes_transferred: None,
},
failure: None,
}
}
/// Create a repo sync completed event
#[must_use]
pub fn repo_sync_completed(
packages_updated: usize,
duration_ms: u64,
bytes_transferred: u64,
) -> Self {
Self::Repo {
stage: LifecycleStage::Completed,
context: RepoContext {
url: None,
packages_updated: Some(packages_updated),
duration_ms: Some(duration_ms),
bytes_transferred: Some(bytes_transferred),
},
failure: None,
}
}
/// Create a repo sync failed event
#[must_use]
pub fn repo_sync_failed(url: Option<String>, failure: FailureContext) -> Self {
Self::Repo {
stage: LifecycleStage::Failed,
context: RepoContext {
url,
packages_updated: None,
duration_ms: None,
bytes_transferred: None,
},
failure: Some(failure),
}
}
// Uninstall helpers
/// Create an uninstall started event
#[must_use]
pub fn uninstall_started(package: String, version: Version) -> Self {
Self::Uninstall {
stage: LifecycleStage::Started,
context: UninstallContext {
package: Some(package),
version: Some(version),
files_removed: None,
},
failure: None,
}
}
/// Create an uninstall completed event
#[must_use]
pub fn uninstall_completed(package: String, version: Version, files_removed: usize) -> Self {
Self::Uninstall {
stage: LifecycleStage::Completed,
context: UninstallContext {
package: Some(package),
version: Some(version),
files_removed: Some(files_removed),
},
failure: None,
}
}
/// Create an uninstall failed event
#[must_use]
pub fn uninstall_failed(
package: Option<String>,
version: Option<Version>,
failure: FailureContext,
) -> Self {
Self::Uninstall {
stage: LifecycleStage::Failed,
context: UninstallContext {
package,
version,
files_removed: None,
},
failure: Some(failure),
}
}
// Update helpers
/// Create an update started event
#[must_use]
pub fn update_started(
operation: LifecycleUpdateOperation,
requested: Vec<String>,
total_targets: usize,
) -> Self {
Self::Update {
stage: LifecycleStage::Started,
context: UpdateContext {
operation,
requested: Some(requested),
total_targets: Some(total_targets),
updated: None,
skipped: None,
failed: None,
duration: None,
size_difference: None,
},
failure: None,
}
}
/// Create an update completed event
#[must_use]
pub fn update_completed(
operation: LifecycleUpdateOperation,
updated: Vec<LifecycleUpdateResult>,
skipped: usize,
duration: Duration,
size_difference: i64,
) -> Self {
Self::Update {
stage: LifecycleStage::Completed,
context: UpdateContext {
operation,
requested: None,
total_targets: None,
updated: Some(updated),
skipped: Some(skipped),
failed: None,
duration: Some(duration),
size_difference: Some(size_difference),
},
failure: None,
}
}
/// Create an update failed event
#[must_use]
pub fn update_failed(
operation: LifecycleUpdateOperation,
updated: Vec<LifecycleUpdateResult>,
failed: Vec<String>,
failure: FailureContext,
) -> Self {
Self::Update {
stage: LifecycleStage::Failed,
context: UpdateContext {
operation,
requested: None,
total_targets: None,
updated: Some(updated),
skipped: None,
failed: Some(failed),
duration: None,
size_difference: None,
},
failure: Some(failure),
}
}
/// Get the domain for this lifecycle event
#[must_use]
pub fn domain(&self) -> LifecycleDomain {
match self {
Self::Acquisition { .. } => LifecycleDomain::Acquisition,
Self::Download { .. } => LifecycleDomain::Download,
Self::Install { .. } => LifecycleDomain::Install,
Self::Resolver { .. } => LifecycleDomain::Resolver,
Self::Repo { .. } => LifecycleDomain::Repo,
Self::Uninstall { .. } => LifecycleDomain::Uninstall,
Self::Update { .. } => LifecycleDomain::Update,
}
}
/// Get the stage for this lifecycle event
#[must_use]
pub fn stage(&self) -> &LifecycleStage {
match self {
Self::Acquisition { stage, .. }
| Self::Download { stage, .. }
| Self::Install { stage, .. }
| Self::Resolver { stage, .. }
| Self::Repo { stage, .. }
| Self::Uninstall { stage, .. }
| Self::Update { stage, .. } => stage,
}
}
/// Get the failure context if this is a failed event
#[must_use]
pub fn failure(&self) -> Option<&FailureContext> {
match self {
Self::Acquisition { failure, .. }
| Self::Download { failure, .. }
| Self::Install { failure, .. }
| Self::Resolver { failure, .. }
| Self::Repo { failure, .. }
| Self::Uninstall { failure, .. }
| Self::Update { failure, .. } => failure.as_ref(),
}
}
}
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | false |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/crates/events/src/events/build.rs | crates/events/src/events/build.rs | use serde::{Deserialize, Serialize};
use sps2_types::Version;
use std::path::PathBuf;
/// Build system types supported by sps2
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "snake_case")]
pub enum BuildSystem {
Autotools,
CMake,
Cargo,
Make,
Ninja,
Custom,
}
/// Build phases for multi-stage operations
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "snake_case")]
pub enum BuildPhase {
Source,
Build,
PostProcess,
Package,
}
/// Identifier and configuration for a build session.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct BuildSession {
pub id: String,
pub system: BuildSystem,
pub cache_enabled: bool,
}
/// Target package for a build.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct BuildTarget {
pub package: String,
pub version: Version,
}
/// Descriptor for a command executed during a build.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct CommandDescriptor {
pub id: Option<String>,
pub command: String,
pub working_dir: PathBuf,
}
/// Stream for build log output.
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "snake_case")]
pub enum LogStream {
Stdout,
Stderr,
}
/// Status updates for build phases.
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(tag = "status", rename_all = "snake_case")]
pub enum PhaseStatus {
Started,
Completed {
#[serde(skip_serializing_if = "Option::is_none")]
duration_ms: Option<u64>,
},
}
/// Structured diagnostics emitted during a build session.
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(tag = "kind", rename_all = "snake_case")]
pub enum BuildDiagnostic {
Warning {
session_id: String,
message: String,
#[serde(skip_serializing_if = "Option::is_none")]
source: Option<String>,
},
LogChunk {
session_id: String,
#[serde(skip_serializing_if = "Option::is_none")]
command_id: Option<String>,
stream: LogStream,
text: String,
},
CachePruned {
removed_items: usize,
freed_bytes: u64,
},
}
/// Build-specific events consumed by the CLI and logging pipeline.
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(tag = "type", rename_all = "snake_case")]
pub enum BuildEvent {
/// Build session started with high-level context.
Started {
session: BuildSession,
target: BuildTarget,
},
/// Build phase status update.
PhaseStatus {
session_id: String,
phase: BuildPhase,
status: PhaseStatus,
},
/// Build completed successfully.
Completed {
session_id: String,
target: BuildTarget,
artifacts: Vec<PathBuf>,
duration_ms: u64,
},
/// Build failed during execution.
Failed {
session_id: String,
target: BuildTarget,
failure: super::FailureContext,
#[serde(skip_serializing_if = "Option::is_none")]
phase: Option<BuildPhase>,
#[serde(skip_serializing_if = "Option::is_none")]
command: Option<CommandDescriptor>,
},
/// Structured diagnostics for warning/log streaming.
Diagnostic(BuildDiagnostic),
}
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | false |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/crates/events/src/events/progress.rs | crates/events/src/events/progress.rs | use serde::{Deserialize, Serialize};
use std::time::Duration;
// Use the unified progress phase type from the progress module
use crate::progress::config::ProgressPhase;
/// Progress tracking events with sophisticated algorithms
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(tag = "type")]
pub enum ProgressEvent {
/// Progress tracking started
Started {
id: String,
operation: String,
total: Option<u64>,
phases: Vec<ProgressPhase>,
parent_id: Option<String>,
},
/// Progress updated with calculated metrics
Updated {
id: String,
current: u64,
total: Option<u64>,
phase: Option<usize>,
speed: Option<f64>,
eta: Option<Duration>,
efficiency: Option<f64>,
},
/// Progress phase changed
PhaseChanged {
id: String,
phase: usize,
phase_name: String,
},
/// Progress completed successfully
Completed {
id: String,
duration: Duration,
final_speed: Option<f64>,
total_processed: u64,
},
/// Progress failed
Failed {
id: String,
failure: super::FailureContext,
completed_items: u64,
partial_duration: Duration,
},
/// Progress paused
Paused {
id: String,
reason: String,
items_completed: u64,
},
/// Progress resumed
Resumed {
id: String,
pause_duration: Duration,
},
/// Nested progress - child operation started
ChildStarted {
parent_id: String,
child_id: String,
operation: String,
weight: f64, // Contribution to parent progress (0.0-1.0)
},
/// Nested progress - child operation completed
ChildCompleted {
parent_id: String,
child_id: String,
success: bool,
},
}
impl ProgressEvent {
/// Create a simple progress started event
pub fn started(
id: impl Into<String>,
operation: impl Into<String>,
total: Option<u64>,
) -> Self {
Self::Started {
id: id.into(),
operation: operation.into(),
total,
phases: vec![],
parent_id: None,
}
}
/// Create a progress started event with phases
pub fn started_with_phases(
id: impl Into<String>,
operation: impl Into<String>,
total: Option<u64>,
phases: Vec<ProgressPhase>,
) -> Self {
Self::Started {
id: id.into(),
operation: operation.into(),
total,
phases,
parent_id: None,
}
}
/// Create a child progress started event
pub fn child_started(
parent_id: impl Into<String>,
child_id: impl Into<String>,
operation: impl Into<String>,
weight: f64,
) -> Self {
Self::ChildStarted {
parent_id: parent_id.into(),
child_id: child_id.into(),
operation: operation.into(),
weight,
}
}
/// Create a progress update event
pub fn updated(id: impl Into<String>, current: u64, total: Option<u64>) -> Self {
Self::Updated {
id: id.into(),
current,
total,
phase: None,
speed: None,
eta: None,
efficiency: None,
}
}
/// Create a progress completed event
pub fn completed(id: impl Into<String>, duration: Duration) -> Self {
Self::Completed {
id: id.into(),
duration,
final_speed: None,
total_processed: 0,
}
}
/// Create a progress failed event
pub fn failed(id: impl Into<String>, failure: super::FailureContext) -> Self {
Self::Failed {
id: id.into(),
failure,
completed_items: 0,
partial_duration: Duration::from_secs(0),
}
}
}
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | false |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/crates/events/src/events/mod.rs | crates/events/src/events/mod.rs | use serde::{Deserialize, Serialize};
use crate::EventSource;
use sps2_errors::UserFacingError;
/// Structured failure information shared across domains.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct FailureContext {
/// Optional stable error code once taxonomy lands.
#[serde(skip_serializing_if = "Option::is_none")]
pub code: Option<String>,
/// Short user-facing message.
pub message: String,
/// Optional remediation hint.
#[serde(skip_serializing_if = "Option::is_none")]
pub hint: Option<String>,
/// Whether retrying the operation might succeed.
pub retryable: bool,
}
impl FailureContext {
/// Construct a new failure context.
#[must_use]
pub fn new(
code: Option<impl Into<String>>,
message: impl Into<String>,
hint: Option<impl Into<String>>,
retryable: bool,
) -> Self {
Self {
code: code.map(Into::into),
message: message.into(),
hint: hint.map(Into::into),
retryable,
}
}
/// Build failure context from a `UserFacingError` implementation.
#[must_use]
pub fn from_error<E: UserFacingError + ?Sized>(error: &E) -> Self {
Self::new(
error.user_code(),
error.user_message().into_owned(),
error.user_hint(),
error.is_retryable(),
)
}
}
// Declare all domain modules
pub mod build;
pub mod general;
pub mod guard;
pub mod lifecycle; // Generic lifecycle events (replaces acquisition, download, install, resolver, repo, uninstall, update)
pub mod package;
pub mod platform;
pub mod progress;
pub mod qa;
pub mod state;
// Re-export all domain events
pub use build::*;
pub use general::*;
pub use guard::*;
pub use lifecycle::*; // Generic lifecycle events (replaces old event types)
pub use package::*;
pub use platform::*;
pub use progress::*;
pub use qa::*;
pub use state::*;
/// Top-level application event enum that aggregates all domain-specific events
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(tag = "domain", content = "event", rename_all = "snake_case")]
pub enum AppEvent {
/// General utility events (warnings, errors, operations)
General(GeneralEvent),
/// Build system events (compilation, caching, sessions)
Build(BuildEvent),
/// State management events (transactions, rollbacks)
State(StateEvent),
/// Progress tracking events (sophisticated progress algorithms)
Progress(ProgressEvent),
/// Guard events (filesystem integrity, healing)
Guard(GuardEvent),
/// Quality assurance events (artifact validation)
Qa(QaEvent),
/// Package operation events (high-level package operations)
Package(PackageEvent),
/// Platform-specific operation events (binary, filesystem, process operations)
Platform(PlatformEvent),
/// Generic lifecycle events (acquisition, download, install, resolver, repo, uninstall, update)
Lifecycle(LifecycleEvent),
}
impl AppEvent {
/// Identify the source domain for this event (used for metadata/logging).
#[must_use]
pub fn event_source(&self) -> EventSource {
match self {
AppEvent::General(_) => EventSource::GENERAL,
AppEvent::Build(_) => EventSource::BUILD,
AppEvent::State(_) => EventSource::STATE,
AppEvent::Progress(_) => EventSource::PROGRESS,
AppEvent::Guard(_) => EventSource::GUARD,
AppEvent::Qa(_) => EventSource::QA,
AppEvent::Package(_) => EventSource::PACKAGE,
AppEvent::Platform(_) => EventSource::PLATFORM,
AppEvent::Lifecycle(event) => match event.domain() {
LifecycleDomain::Acquisition => EventSource::ACQUISITION,
LifecycleDomain::Download => EventSource::DOWNLOAD,
LifecycleDomain::Install => EventSource::INSTALL,
LifecycleDomain::Resolver => EventSource::RESOLVER,
LifecycleDomain::Repo => EventSource::REPO,
LifecycleDomain::Uninstall => EventSource::UNINSTALL,
LifecycleDomain::Update => EventSource::UPDATE,
},
}
}
/// Determine the appropriate tracing log level for this event
#[must_use]
pub fn log_level(&self) -> tracing::Level {
use tracing::Level;
match self {
// Error-level events
AppEvent::General(GeneralEvent::Error { .. })
| AppEvent::Build(BuildEvent::Failed { .. })
| AppEvent::Progress(ProgressEvent::Failed { .. })
| AppEvent::Qa(QaEvent::PipelineFailed { .. })
| AppEvent::Package(PackageEvent::OperationFailed { .. })
| AppEvent::Platform(PlatformEvent::OperationFailed { .. })
| AppEvent::Guard(
GuardEvent::VerificationFailed { .. } | GuardEvent::HealingFailed { .. },
)
| AppEvent::State(
StateEvent::TransitionFailed { .. }
| StateEvent::RollbackFailed { .. }
| StateEvent::CleanupFailed { .. },
) => Level::ERROR,
// Lifecycle events - check stage
AppEvent::Lifecycle(event) if event.stage() == &LifecycleStage::Failed => Level::ERROR,
// Warning-level events
AppEvent::General(GeneralEvent::Warning { .. })
| AppEvent::Build(BuildEvent::Diagnostic(build::BuildDiagnostic::Warning { .. })) => {
Level::WARN
}
// Debug-level events (progress updates, internal state)
AppEvent::General(GeneralEvent::DebugLog { .. })
| AppEvent::Build(BuildEvent::Diagnostic(build::BuildDiagnostic::LogChunk {
..
}))
| AppEvent::Progress(ProgressEvent::Updated { .. })
| AppEvent::Qa(QaEvent::CheckEvaluated { .. }) => Level::DEBUG,
// Trace-level events (very detailed internal operations)
AppEvent::Build(BuildEvent::Diagnostic(build::BuildDiagnostic::CachePruned {
..
})) => Level::TRACE,
// Default to INFO for most events
_ => Level::INFO,
}
}
/// Get the log target for this event (for structured logging)
#[must_use]
pub fn log_target(&self) -> &'static str {
match self {
AppEvent::General(_) => "sps2::events::general",
AppEvent::Build(_) => "sps2::events::build",
AppEvent::State(_) => "sps2::events::state",
AppEvent::Progress(_) => "sps2::events::progress",
AppEvent::Guard(_) => "sps2::events::guard",
AppEvent::Qa(_) => "sps2::events::qa",
AppEvent::Package(_) => "sps2::events::package",
AppEvent::Platform(_) => "sps2::events::platform",
AppEvent::Lifecycle(event) => match event.domain() {
LifecycleDomain::Acquisition => "sps2::events::acquisition",
LifecycleDomain::Download => "sps2::events::download",
LifecycleDomain::Install => "sps2::events::install",
LifecycleDomain::Resolver => "sps2::events::resolver",
LifecycleDomain::Repo => "sps2::events::repo",
LifecycleDomain::Uninstall => "sps2::events::uninstall",
LifecycleDomain::Update => "sps2::events::update",
},
}
}
/// Get structured fields for logging (simplified for now)
#[must_use]
pub fn log_fields(&self) -> String {
// For now, use debug formatting. In the future, this could be more sophisticated
// with structured key-value pairs extracted from each event type.
format!("{self:?}")
}
}
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | false |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/crates/events/src/events/package.rs | crates/events/src/events/package.rs | use serde::{Deserialize, Serialize};
use super::FailureContext;
/// Named package operations surfaced to consumers.
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "snake_case")]
pub enum PackageOperation {
List,
Search,
HealthCheck,
SelfUpdate,
Cleanup,
}
/// Outcome payloads for completed operations.
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(tag = "kind", rename_all = "snake_case")]
pub enum PackageOutcome {
List {
total: usize,
},
Search {
query: String,
total: usize,
},
Health {
healthy: bool,
issues: Vec<String>,
},
SelfUpdate {
from: String,
to: String,
duration_ms: u64,
},
Cleanup {
states_removed: usize,
packages_removed: usize,
duration_ms: u64,
},
}
/// Package-level events consumed by CLI/log handlers.
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(tag = "type", rename_all = "snake_case")]
pub enum PackageEvent {
OperationStarted {
operation: PackageOperation,
},
OperationCompleted {
operation: PackageOperation,
outcome: PackageOutcome,
},
OperationFailed {
operation: PackageOperation,
failure: FailureContext,
},
}
/// Health status indicator for health checks.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "snake_case")]
pub enum HealthStatus {
Healthy,
Warning,
Error,
}
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | false |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/crates/guard/src/lib.rs | crates/guard/src/lib.rs | #![warn(mismatched_lifetime_syntaxes)]
//! Lightweight state guard utilities for verifying and healing package installations.
mod refcount;
mod store;
mod verifier;
pub use refcount::sync_refcounts_to_active_state;
pub use store::{StoreVerificationConfig, StoreVerificationStats, StoreVerifier};
pub use verifier::{Discrepancy, VerificationLevel, VerificationResult, Verifier};
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | false |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/crates/guard/src/store.rs | crates/guard/src/store.rs | use sps2_errors::Error;
use sps2_events::{
AppEvent, EventEmitter, GuardEvent, GuardLevel, GuardScope, GuardTargetSummary,
GuardVerificationMetrics,
};
use sps2_hash::Hash;
use sps2_state::{queries, StateManager};
use sps2_store::FileStore;
use std::sync::Arc;
use std::time::{Duration, Instant};
use uuid::Uuid;
/// Configuration for store verification.
#[derive(Debug, Clone)]
pub struct StoreVerificationConfig {
/// Maximum age in seconds before re-verification is needed.
pub max_age_seconds: i64,
/// Maximum verification attempts before we stop retrying.
pub max_attempts: i32,
/// Maximum number of objects processed per batch.
pub batch_size: i64,
}
impl Default for StoreVerificationConfig {
fn default() -> Self {
Self {
max_age_seconds: 30 * 24 * 60 * 60, // 30 days
max_attempts: 3,
batch_size: 64,
}
}
}
/// Statistics from a store verification run.
#[derive(Debug, Clone)]
pub struct StoreVerificationStats {
pub total_objects: i64,
pub verified_count: i64,
pub pending_count: i64,
pub failed_count: i64,
pub quarantined_count: i64,
pub processed_count: u64,
pub passed_count: u64,
pub failed_this_run: u64,
pub quarantined_this_run: u64,
pub duration: Duration,
pub objects_per_second: f64,
}
impl StoreVerificationStats {
pub fn empty() -> Self {
Self {
total_objects: 0,
verified_count: 0,
pending_count: 0,
failed_count: 0,
quarantined_count: 0,
processed_count: 0,
passed_count: 0,
failed_this_run: 0,
quarantined_this_run: 0,
duration: Duration::ZERO,
objects_per_second: 0.0,
}
}
}
/// Minimal verifier for the content-addressed store objects.
pub struct StoreVerifier {
state_manager: Arc<StateManager>,
file_store: Arc<FileStore>,
config: StoreVerificationConfig,
}
impl StoreVerifier {
pub fn new(
state_manager: Arc<StateManager>,
file_store: Arc<FileStore>,
config: StoreVerificationConfig,
) -> Self {
Self {
state_manager,
file_store,
config,
}
}
/// Return summary statistics without performing verification.
pub async fn get_stats(&self) -> Result<StoreVerificationStats, Error> {
let mut tx = self.state_manager.begin_transaction().await?;
let (total, verified, pending, failed, quarantined) =
queries::get_verification_stats(&mut tx).await?;
tx.commit().await?;
Ok(StoreVerificationStats {
total_objects: total,
verified_count: verified,
pending_count: pending,
failed_count: failed,
quarantined_count: quarantined,
processed_count: 0,
passed_count: 0,
failed_this_run: 0,
quarantined_this_run: 0,
duration: Duration::ZERO,
objects_per_second: 0.0,
})
}
/// Run verification over the store and emit guard events describing progress.
pub async fn verify_with_progress<E>(&self, events: &E) -> Result<StoreVerificationStats, Error>
where
E: EventEmitter,
{
let initial = self.get_stats().await?;
let operation_id = Uuid::new_v4().to_string();
let scope = GuardScope::Custom {
description: "store objects".to_string(),
};
events.emit(AppEvent::Guard(GuardEvent::VerificationStarted {
operation_id: operation_id.clone(),
scope: scope.clone(),
level: GuardLevel::Full,
targets: GuardTargetSummary {
packages: 0,
files: Some(initial.pending_count as usize),
},
}));
let mut processed = 0u64;
let mut passed = 0u64;
let mut failed = 0u64;
let start = Instant::now();
loop {
let mut tx = self.state_manager.begin_transaction().await?;
let objects = queries::get_objects_needing_verification(
&mut tx,
self.config.max_age_seconds,
self.config.max_attempts,
self.config.batch_size,
)
.await?;
tx.commit().await?;
if objects.is_empty() {
break;
}
for object in objects {
let hash = match Hash::from_hex(&object.hash) {
Ok(h) => h,
Err(e) => {
failed += 1;
processed += 1;
events.emit(AppEvent::Guard(GuardEvent::DiscrepancyReported {
operation_id: operation_id.clone(),
discrepancy: sps2_events::GuardDiscrepancy {
kind: "invalid_hash".to_string(),
severity: sps2_events::GuardSeverity::High,
location: Some(object.hash.clone()),
package: None,
version: None,
message: format!("invalid hash stored in database: {e}"),
auto_heal_available: false,
requires_confirmation: false,
},
}));
continue;
}
};
let mut tx = self.state_manager.begin_transaction().await?;
let verified =
queries::verify_file_with_tracking(&mut tx, &self.file_store, &hash).await?;
tx.commit().await?;
processed += 1;
if verified {
passed += 1;
} else {
failed += 1;
events.emit(AppEvent::Guard(GuardEvent::DiscrepancyReported {
operation_id: operation_id.clone(),
discrepancy: sps2_events::GuardDiscrepancy {
kind: "store_object_failed".to_string(),
severity: sps2_events::GuardSeverity::High,
location: Some(hash.to_hex()),
package: None,
version: None,
message: "store object failed verification".to_string(),
auto_heal_available: false,
requires_confirmation: false,
},
}));
}
}
}
let duration = start.elapsed();
let final_stats = self.get_stats().await?;
let ops_per_sec = if duration.as_secs_f64() > 0.0 {
processed as f64 / duration.as_secs_f64()
} else {
0.0
};
let quarantined_run = if final_stats.quarantined_count > initial.quarantined_count {
(final_stats.quarantined_count - initial.quarantined_count) as u64
} else {
0
};
events.emit(AppEvent::Guard(GuardEvent::VerificationCompleted {
operation_id: operation_id.clone(),
scope,
discrepancies: failed as usize,
metrics: GuardVerificationMetrics {
duration_ms: duration.as_millis() as u64,
cache_hit_rate: 0.0,
coverage_percent: 100.0,
},
}));
Ok(StoreVerificationStats {
total_objects: final_stats.total_objects,
verified_count: final_stats.verified_count,
pending_count: final_stats.pending_count,
failed_count: final_stats.failed_count,
quarantined_count: final_stats.quarantined_count,
processed_count: processed,
passed_count: passed,
failed_this_run: failed,
quarantined_this_run: quarantined_run,
duration,
objects_per_second: ops_per_sec,
})
}
}
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | false |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/crates/guard/src/verifier.rs | crates/guard/src/verifier.rs | use crate::refcount::sync_refcounts_to_active_state;
use sps2_errors::{Error, OpsError};
use sps2_events::{
AppEvent, EventEmitter, EventSender, GuardDiscrepancy, GuardEvent, GuardLevel, GuardScope,
GuardSeverity, GuardTargetSummary, GuardVerificationMetrics,
};
use sps2_hash::Hash;
use sps2_platform::PlatformManager;
use sps2_state::{queries, Package, PackageFileEntry, StateManager};
use sps2_store::{PackageStore, StoredPackage};
use std::collections::HashSet;
use std::path::Path;
use std::time::Instant;
use tokio::fs;
use uuid::Uuid;
use walkdir::WalkDir;
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
enum EntryStatus {
Ok,
Missing,
Corrupted,
}
/// Verification level controls the depth of checks performed.
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum VerificationLevel {
Quick,
Standard,
Full,
}
impl From<&str> for VerificationLevel {
fn from(value: &str) -> Self {
match value {
"quick" => Self::Quick,
"full" => Self::Full,
_ => Self::Standard,
}
}
}
impl VerificationLevel {
fn as_guard_level(self) -> GuardLevel {
match self {
VerificationLevel::Quick => GuardLevel::Quick,
VerificationLevel::Standard => GuardLevel::Standard,
VerificationLevel::Full => GuardLevel::Full,
}
}
}
/// Issues discovered during verification.
#[derive(Debug, Clone, serde::Serialize)]
pub enum Discrepancy {
MissingFile {
package: String,
version: String,
path: String,
},
CorruptedFile {
package: String,
version: String,
path: String,
},
MissingPackageContent {
package: String,
version: String,
},
UnexpectedFile {
path: String,
},
}
impl Discrepancy {
fn to_event(&self) -> GuardDiscrepancy {
match self {
Discrepancy::MissingFile {
package,
version,
path,
} => GuardDiscrepancy {
kind: "missing_file".to_string(),
severity: GuardSeverity::High,
location: Some(path.clone()),
package: Some(package.clone()),
version: Some(version.clone()),
message: format!("{package}-{version} is missing {path}"),
auto_heal_available: true,
requires_confirmation: false,
},
Discrepancy::CorruptedFile {
package,
version,
path,
} => GuardDiscrepancy {
kind: "corrupted_file".to_string(),
severity: GuardSeverity::High,
location: Some(path.clone()),
package: Some(package.clone()),
version: Some(version.clone()),
message: format!("{package}-{version} has corrupted {path}"),
auto_heal_available: true,
requires_confirmation: false,
},
Discrepancy::MissingPackageContent { package, version } => GuardDiscrepancy {
kind: "missing_package_content".to_string(),
severity: GuardSeverity::Critical,
location: None,
package: Some(package.clone()),
version: Some(version.clone()),
message: format!("Package {package}-{version} content missing from store"),
auto_heal_available: false,
requires_confirmation: true,
},
Discrepancy::UnexpectedFile { path } => GuardDiscrepancy {
kind: "unexpected_file".to_string(),
severity: GuardSeverity::Medium,
location: Some(path.clone()),
package: None,
version: None,
message: format!("Untracked file present: {path}"),
auto_heal_available: false,
requires_confirmation: false,
},
}
}
}
/// Result of a verification run.
#[derive(Debug, Clone, serde::Serialize)]
pub struct VerificationResult {
pub state_id: Uuid,
pub discrepancies: Vec<Discrepancy>,
pub is_valid: bool,
pub duration_ms: u64,
}
impl VerificationResult {
pub fn new(state_id: Uuid, discrepancies: Vec<Discrepancy>, duration_ms: u64) -> Self {
let is_valid = discrepancies.is_empty();
Self {
state_id,
discrepancies,
is_valid,
duration_ms,
}
}
}
/// Lightweight verifier that checks live state against the content store.
pub struct Verifier {
state: StateManager,
store: PackageStore,
tx: EventSender,
}
impl EventEmitter for Verifier {
fn event_sender(&self) -> Option<&EventSender> {
Some(&self.tx)
}
}
impl Verifier {
pub fn new(state: StateManager, store: PackageStore, tx: EventSender) -> Self {
Self { state, store, tx }
}
pub async fn verify(&self, level: VerificationLevel) -> Result<VerificationResult, Error> {
self.run(level, false).await
}
pub async fn verify_and_heal(
&self,
level: VerificationLevel,
) -> Result<VerificationResult, Error> {
self.run(level, true).await
}
pub async fn sync_refcounts(&self) -> Result<(usize, usize), Error> {
sync_refcounts_to_active_state(&self.state).await
}
async fn run(&self, level: VerificationLevel, heal: bool) -> Result<VerificationResult, Error> {
let start = Instant::now();
let state_id = self.state.get_active_state().await?;
let live_root = self.state.live_path().to_path_buf();
let packages = self.load_packages(&state_id).await?;
let total_files: usize = packages.iter().map(|(_, entries)| entries.len()).sum();
let operation_id = Uuid::new_v4().to_string();
let scope = GuardScope::State {
id: state_id.to_string(),
};
self.emit(AppEvent::Guard(GuardEvent::VerificationStarted {
operation_id: operation_id.clone(),
scope: scope.clone(),
level: level.as_guard_level(),
targets: GuardTargetSummary {
packages: packages.len(),
files: Some(total_files),
},
}));
let mut discrepancies = Vec::new();
let mut tracked_files: HashSet<String> = HashSet::new();
for (package, entries) in packages.iter() {
let package_hash = Hash::from_hex(&package.hash).map_err(|e| {
Error::from(OpsError::OperationFailed {
message: format!("invalid package hash for {}: {e}", package.name),
})
})?;
let store_path = self.store.package_path(&package_hash);
if !store_path.exists() {
let discrepancy = Discrepancy::MissingPackageContent {
package: package.name.clone(),
version: package.version.clone(),
};
self.emit_discrepancy(&operation_id, &discrepancy);
discrepancies.push(discrepancy);
continue;
}
let stored_package = StoredPackage::load(&store_path).await?;
for entry in entries {
tracked_files.insert(entry.relative_path.clone());
match self
.verify_entry(&stored_package, package, entry, &live_root, level, heal)
.await?
{
EntryStatus::Ok => {}
EntryStatus::Missing => {
let discrepancy =
self.make_discrepancy(package, entry, EntryStatus::Missing);
self.emit_discrepancy(&operation_id, &discrepancy);
discrepancies.push(discrepancy);
}
EntryStatus::Corrupted => {
let discrepancy =
self.make_discrepancy(package, entry, EntryStatus::Corrupted);
self.emit_discrepancy(&operation_id, &discrepancy);
discrepancies.push(discrepancy);
}
}
}
}
// Detect unexpected files in live directory
let unexpected = self
.detect_orphans(&live_root, &tracked_files, heal)
.await?;
for discrepancy in unexpected {
self.emit_discrepancy(&operation_id, &discrepancy);
discrepancies.push(discrepancy);
}
let duration = start.elapsed();
self.emit(AppEvent::Guard(GuardEvent::VerificationCompleted {
operation_id,
scope,
discrepancies: discrepancies.len(),
metrics: GuardVerificationMetrics {
duration_ms: duration.as_millis() as u64,
cache_hit_rate: 0.0,
coverage_percent: 100.0,
},
}));
Ok(VerificationResult::new(
state_id,
discrepancies,
duration.as_millis() as u64,
))
}
async fn verify_entry(
&self,
stored_package: &StoredPackage,
package: &sps2_state::models::Package,
entry: &PackageFileEntry,
live_root: &Path,
level: VerificationLevel,
heal: bool,
) -> Result<EntryStatus, Error> {
let full_path = live_root.join(&entry.relative_path);
if !full_path.exists() {
if heal
&& self
.restore_file(stored_package, package, entry, &full_path)
.await
.is_ok()
&& full_path.exists()
{
return Ok(EntryStatus::Ok);
}
return Ok(EntryStatus::Missing);
}
let metadata = fs::symlink_metadata(&full_path).await?;
if metadata.file_type().is_symlink() || metadata.is_dir() {
// Skip hash verification for symlinks/directories
return Ok(EntryStatus::Ok);
}
if level == VerificationLevel::Quick {
return Ok(EntryStatus::Ok);
}
// Standard level: verify basic file permissions
if level == VerificationLevel::Standard {
return Ok(EntryStatus::Ok);
}
// Full level: hash comparison
let expected_hash = Hash::from_hex(&entry.file_hash).map_err(|e| {
Error::from(OpsError::OperationFailed {
message: format!(
"invalid file hash for {}:{} - {e}",
package.name, entry.relative_path
),
})
})?;
// Skip Python bytecode caches for stability
if entry.relative_path.ends_with(".pyc") || entry.relative_path.contains("__pycache__") {
return Ok(EntryStatus::Ok);
}
let actual_hash = Hash::hash_file(&full_path).await?;
if actual_hash == expected_hash {
return Ok(EntryStatus::Ok);
}
if heal
&& self
.restore_file(stored_package, package, entry, &full_path)
.await
.is_ok()
{
let rehash = Hash::hash_file(&full_path).await?;
if rehash == expected_hash {
return Ok(EntryStatus::Ok);
}
}
Ok(EntryStatus::Corrupted)
}
async fn restore_file(
&self,
stored_package: &StoredPackage,
package: &sps2_state::models::Package,
entry: &PackageFileEntry,
target_path: &Path,
) -> Result<(), Error> {
let source_path = if stored_package.has_file_hashes() {
let file_hash = Hash::from_hex(&entry.file_hash).map_err(|e| {
Error::from(OpsError::OperationFailed {
message: format!(
"invalid file hash for {}:{} - {e}",
package.name, entry.relative_path
),
})
})?;
self.store.file_path(&file_hash)
} else {
stored_package.files_path().join(&entry.relative_path)
};
if !source_path.exists() {
return Err(OpsError::OperationFailed {
message: format!(
"missing source file {} for {}-{}",
source_path.display(),
package.name,
package.version
),
}
.into());
}
if let Some(parent) = target_path.parent() {
fs::create_dir_all(parent).await?;
}
let platform = PlatformManager::instance().platform();
let ctx = platform.create_context(None);
if let Ok(existing_meta) = fs::symlink_metadata(target_path).await {
if existing_meta.is_dir() {
let _ = platform
.filesystem()
.remove_dir_all(&ctx, target_path)
.await;
} else {
let _ = platform.filesystem().remove_file(&ctx, target_path).await;
}
}
let metadata = fs::symlink_metadata(&source_path).await?;
if metadata.is_dir() {
platform
.filesystem()
.clone_directory(&ctx, &source_path, target_path)
.await?
} else if metadata.file_type().is_symlink() {
let target = fs::read_link(&source_path).await?;
fs::symlink(&target, target_path).await?;
} else {
platform
.filesystem()
.clone_file(&ctx, &source_path, target_path)
.await?;
}
Ok(())
}
fn make_discrepancy(
&self,
package: &sps2_state::models::Package,
entry: &PackageFileEntry,
status: EntryStatus,
) -> Discrepancy {
match status {
EntryStatus::Missing => Discrepancy::MissingFile {
package: package.name.clone(),
version: package.version.clone(),
path: entry.relative_path.clone(),
},
EntryStatus::Corrupted => Discrepancy::CorruptedFile {
package: package.name.clone(),
version: package.version.clone(),
path: entry.relative_path.clone(),
},
EntryStatus::Ok => unreachable!(),
}
}
async fn detect_orphans(
&self,
live_root: &Path,
tracked: &HashSet<String>,
heal: bool,
) -> Result<Vec<Discrepancy>, Error> {
if !live_root.exists() {
return Ok(Vec::new());
}
let mut unexpected = Vec::new();
for entry in WalkDir::new(live_root).follow_links(false) {
let entry = match entry {
Ok(e) => e,
Err(_) => continue,
};
if entry.file_type().is_dir() {
continue;
}
let rel_path = match entry.path().strip_prefix(live_root) {
Ok(p) => p.to_string_lossy().replace('\\', "/"),
Err(_) => continue,
};
if rel_path.ends_with(".pyc") || rel_path.contains("__pycache__") || rel_path == "STATE"
{
continue;
}
if !tracked.contains(&rel_path) {
if heal && fs::remove_file(entry.path()).await.is_ok() {
continue;
}
unexpected.push(Discrepancy::UnexpectedFile { path: rel_path });
}
}
Ok(unexpected)
}
fn emit_discrepancy(&self, operation_id: &str, discrepancy: &Discrepancy) {
self.emit(AppEvent::Guard(GuardEvent::DiscrepancyReported {
operation_id: operation_id.to_string(),
discrepancy: discrepancy.to_event(),
}));
}
async fn load_packages(
&self,
state_id: &Uuid,
) -> Result<Vec<(Package, Vec<PackageFileEntry>)>, Error> {
let mut tx = self.state.begin_transaction().await?;
let packages = queries::get_state_packages(&mut tx, state_id).await?;
tx.commit().await?;
let mut result = Vec::new();
for package in packages {
let mut tx = self.state.begin_transaction().await?;
let entries = queries::get_package_file_entries(&mut tx, package.id).await?;
tx.commit().await?;
result.push((package, entries));
}
Ok(result)
}
}
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | false |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/crates/guard/src/refcount.rs | crates/guard/src/refcount.rs | use sps2_errors::Error;
use sps2_state::StateManager;
/// Synchronize store and file-object refcounts to match the active state.
///
/// Returns the number of updated store rows and file rows respectively.
pub async fn sync_refcounts_to_active_state(state: &StateManager) -> Result<(usize, usize), Error> {
let state_id = state.get_active_state().await?;
state.sync_refcounts_to_state(&state_id).await
}
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | false |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/crates/config/src/resources_limits.rs | crates/config/src/resources_limits.rs | //! Resource limit configuration and availability tracking
//!
//! This module defines resource limits and provides utilities for tracking
//! resource availability across concurrent operations.
use serde::{Deserialize, Serialize};
/// Resource limit configuration
///
/// This structure holds configuration for various resource limits
/// used throughout the installation process.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ResourceLimits {
/// Maximum number of concurrent downloads
pub concurrent_downloads: usize,
/// Maximum number of concurrent decompressions
pub concurrent_decompressions: usize,
/// Maximum number of concurrent package installations
pub concurrent_installations: usize,
/// Maximum memory usage in bytes (None = unlimited)
pub memory_usage: Option<u64>,
}
impl ResourceLimits {
/// Create resource limits for testing (lower limits)
#[must_use]
pub fn for_testing() -> Self {
Self {
concurrent_downloads: 2,
concurrent_decompressions: 1,
concurrent_installations: 1,
memory_usage: Some(100 * 1024 * 1024), // 100MB
}
}
/// Create resource limits based on system capabilities
#[must_use]
pub fn from_system() -> Self {
let cpu_count = std::thread::available_parallelism()
.map(std::num::NonZero::get)
.unwrap_or(4);
Self {
concurrent_downloads: cpu_count.min(8),
concurrent_decompressions: (cpu_count / 2).max(1),
concurrent_installations: 1, // Keep installations sequential for safety
memory_usage: None,
}
}
}
impl Default for ResourceLimits {
fn default() -> Self {
Self {
concurrent_downloads: 4,
concurrent_decompressions: 2,
concurrent_installations: 1,
memory_usage: None,
}
}
}
/// Trait for converting pipeline configurations to resource limits
///
/// This trait allows different pipeline configuration types to be converted
/// into `ResourceLimits` for use with the `ResourceManager`.
pub trait IntoResourceLimits {
/// Convert this configuration into `ResourceLimits`
fn into_resource_limits(self) -> ResourceLimits;
}
/// Resource availability information
#[derive(Debug, Clone)]
pub struct ResourceAvailability {
/// Number of download permits currently available
pub download: usize,
/// Number of decompression permits currently available
pub decompression: usize,
/// Number of installation permits currently available
pub installation: usize,
}
impl ResourceAvailability {
/// Check if any resources are available
#[must_use]
pub fn has_any_available(&self) -> bool {
self.download > 0 || self.decompression > 0 || self.installation > 0
}
/// Check if all resources are fully available
#[must_use]
pub fn all_available(&self, limits: &ResourceLimits) -> bool {
self.download >= limits.concurrent_downloads
&& self.decompression >= limits.concurrent_decompressions
&& self.installation >= limits.concurrent_installations
}
}
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | false |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/crates/config/src/guard.rs | crates/config/src/guard.rs | //! Guard configuration for verification and integrity checking
use serde::{Deserialize, Serialize};
use std::path::PathBuf;
/// Symlink handling policy for guard operations
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "snake_case")]
pub enum SymlinkPolicyConfig {
/// Verify symlinks strictly - fail on any symlink issues
Strict,
/// Be lenient with bootstrap directories like /opt/pm/live/bin
LenientBootstrap,
/// Be lenient with all symlinks - log issues but don't fail
LenientAll,
/// Ignore symlinks entirely - skip symlink verification
Ignore,
}
impl Default for SymlinkPolicyConfig {
fn default() -> Self {
Self::LenientBootstrap
}
}
/// How to handle discrepancies found during verification
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "snake_case")]
pub enum DiscrepancyHandling {
/// Fail the operation when discrepancies are found
FailFast,
/// Report discrepancies but continue operation
ReportOnly,
/// Automatically heal discrepancies when possible
AutoHeal,
/// Auto-heal but fail if healing is not possible
AutoHealOrFail,
}
impl Default for DiscrepancyHandling {
fn default() -> Self {
Self::FailFast
}
}
/// Policy for handling user files
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "snake_case")]
pub enum UserFilePolicy {
/// Preserve user-created files
Preserve,
/// Remove user-created files
Remove,
/// Backup user-created files before removal
Backup,
}
impl Default for UserFilePolicy {
fn default() -> Self {
Self::Preserve
}
}
/// Performance configuration for guard operations
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct PerformanceConfigToml {
#[serde(default = "default_progressive_verification")]
pub progressive_verification: bool,
#[serde(default = "default_max_concurrent_tasks")]
pub max_concurrent_tasks: usize,
#[serde(default = "default_verification_timeout_seconds")]
pub verification_timeout_seconds: u64,
}
impl Default for PerformanceConfigToml {
fn default() -> Self {
Self {
progressive_verification: default_progressive_verification(),
max_concurrent_tasks: default_max_concurrent_tasks(),
verification_timeout_seconds: default_verification_timeout_seconds(),
}
}
}
/// Guard-specific configuration
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct GuardConfigToml {
#[serde(default)]
pub symlink_policy: SymlinkPolicyConfig,
#[serde(default = "default_lenient_symlink_directories")]
pub lenient_symlink_directories: Vec<PathBuf>,
}
impl Default for GuardConfigToml {
fn default() -> Self {
Self {
symlink_policy: SymlinkPolicyConfig::default(),
lenient_symlink_directories: default_lenient_symlink_directories(),
}
}
}
/// Verification configuration
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct VerificationConfig {
#[serde(default = "default_enabled")]
pub enabled: bool,
#[serde(default = "default_verification_level")]
pub level: String, // "quick", "standard", or "full"
#[serde(default)]
pub discrepancy_handling: DiscrepancyHandling,
#[serde(default = "default_orphaned_file_action")]
pub orphaned_file_action: String, // "remove", "preserve", or "backup"
#[serde(default = "default_orphaned_backup_dir")]
pub orphaned_backup_dir: PathBuf,
#[serde(default)]
pub user_file_policy: UserFilePolicy,
// Enhanced guard configuration
#[serde(default)]
pub guard: GuardConfigToml,
#[serde(default)]
pub performance: PerformanceConfigToml,
}
impl Default for VerificationConfig {
fn default() -> Self {
Self {
enabled: false, // Disabled by default during development
level: "standard".to_string(),
discrepancy_handling: DiscrepancyHandling::default(),
orphaned_file_action: "preserve".to_string(),
orphaned_backup_dir: PathBuf::from("/opt/pm/orphaned-backup"),
user_file_policy: UserFilePolicy::default(),
guard: GuardConfigToml::default(),
performance: PerformanceConfigToml::default(),
}
}
}
/// Simplified symlink policy for top-level guard configuration
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "snake_case")]
pub enum GuardSymlinkPolicy {
/// Verify symlinks strictly - fail on any symlink issues
Strict,
/// Be lenient with symlinks - log issues but don't fail
Lenient,
/// Ignore symlinks entirely - skip symlink verification
Ignore,
}
impl Default for GuardSymlinkPolicy {
fn default() -> Self {
Self::Lenient
}
}
/// Performance configuration for top-level guard
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct GuardPerformanceConfig {
#[serde(default = "default_progressive_verification")]
pub progressive_verification: bool,
#[serde(default = "default_max_concurrent_tasks")]
pub max_concurrent_tasks: usize,
#[serde(default = "default_verification_timeout_seconds")]
pub verification_timeout_seconds: u64,
}
impl Default for GuardPerformanceConfig {
fn default() -> Self {
Self {
progressive_verification: default_progressive_verification(),
max_concurrent_tasks: default_max_concurrent_tasks(),
verification_timeout_seconds: default_verification_timeout_seconds(),
}
}
}
/// Directory configuration for lenient symlink handling (array of tables approach)
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct GuardDirectoryConfig {
pub path: PathBuf,
}
/// Store verification configuration
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct StoreVerificationConfig {
#[serde(default = "default_store_verification_enabled")]
pub enabled: bool,
#[serde(default = "default_store_max_age_days")]
pub max_age_days: u32,
#[serde(default = "default_store_max_attempts")]
pub max_attempts: u32,
#[serde(default = "default_store_batch_size")]
pub batch_size: u32,
#[serde(default = "default_store_max_concurrency")]
pub max_concurrency: usize,
#[serde(default = "default_store_enable_quarantine")]
pub enable_quarantine: bool,
/// When true, after successful healing the guard will synchronize DB refcounts
/// from the active state only (packages and file entries).
#[serde(default = "default_store_sync_refcounts")]
pub sync_refcounts: bool,
}
impl Default for StoreVerificationConfig {
fn default() -> Self {
Self {
enabled: default_store_verification_enabled(),
max_age_days: default_store_max_age_days(),
max_attempts: default_store_max_attempts(),
batch_size: default_store_batch_size(),
max_concurrency: default_store_max_concurrency(),
enable_quarantine: default_store_enable_quarantine(),
sync_refcounts: default_store_sync_refcounts(),
}
}
}
fn default_store_sync_refcounts() -> bool {
false
}
/// Top-level guard configuration (alternative to verification.guard approach)
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct GuardConfiguration {
#[serde(default = "default_guard_enabled")]
pub enabled: bool,
#[serde(default = "default_verification_level")]
pub verification_level: String, // "quick", "standard", or "full"
#[serde(default)]
pub discrepancy_handling: DiscrepancyHandling,
#[serde(default)]
pub symlink_policy: GuardSymlinkPolicy,
#[serde(default = "default_orphaned_file_action")]
pub orphaned_file_action: String, // "remove", "preserve", or "backup"
#[serde(default = "default_orphaned_backup_dir")]
pub orphaned_backup_dir: PathBuf,
#[serde(default)]
pub user_file_policy: UserFilePolicy,
// Nested configuration sections
#[serde(default)]
pub performance: GuardPerformanceConfig,
#[serde(default)]
pub store_verification: StoreVerificationConfig,
#[serde(default = "default_guard_lenient_symlink_directories")]
pub lenient_symlink_directories: Vec<GuardDirectoryConfig>,
// Legacy compatibility fields - deprecated
#[serde(skip_serializing_if = "Option::is_none")]
pub auto_heal: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")]
pub fail_on_discrepancy: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")]
pub preserve_user_files: Option<bool>,
}
impl Default for GuardConfiguration {
fn default() -> Self {
Self {
enabled: default_guard_enabled(),
verification_level: default_verification_level(),
discrepancy_handling: DiscrepancyHandling::default(),
symlink_policy: GuardSymlinkPolicy::default(),
orphaned_file_action: default_orphaned_file_action(),
orphaned_backup_dir: default_orphaned_backup_dir(),
user_file_policy: UserFilePolicy::default(),
performance: GuardPerformanceConfig::default(),
store_verification: StoreVerificationConfig::default(),
lenient_symlink_directories: default_guard_lenient_symlink_directories(),
auto_heal: None,
fail_on_discrepancy: None,
preserve_user_files: None,
}
}
}
impl VerificationConfig {
/// Check if should fail on discrepancy (for backward compatibility)
#[must_use]
pub fn should_fail_on_discrepancy(&self) -> bool {
matches!(
self.discrepancy_handling,
DiscrepancyHandling::FailFast | DiscrepancyHandling::AutoHealOrFail
)
}
/// Check if should auto-heal (for backward compatibility)
#[must_use]
pub fn should_auto_heal(&self) -> bool {
matches!(
self.discrepancy_handling,
DiscrepancyHandling::AutoHeal | DiscrepancyHandling::AutoHealOrFail
)
}
}
impl GuardConfiguration {
/// Check if should fail on discrepancy (for backward compatibility)
#[must_use]
pub fn should_fail_on_discrepancy(&self) -> bool {
matches!(
self.discrepancy_handling,
DiscrepancyHandling::FailFast | DiscrepancyHandling::AutoHealOrFail
)
}
/// Check if should auto-heal (for backward compatibility)
#[must_use]
pub fn should_auto_heal(&self) -> bool {
matches!(
self.discrepancy_handling,
DiscrepancyHandling::AutoHeal | DiscrepancyHandling::AutoHealOrFail
)
}
}
// Default value functions for serde
fn default_progressive_verification() -> bool {
true
}
fn default_max_concurrent_tasks() -> usize {
8
}
fn default_verification_timeout_seconds() -> u64 {
300 // 5 minutes
}
fn default_guard_enabled() -> bool {
true // Enable guard by default for state verification
}
fn default_guard_lenient_symlink_directories() -> Vec<GuardDirectoryConfig> {
vec![
GuardDirectoryConfig {
path: PathBuf::from("/opt/pm/live/bin"),
},
GuardDirectoryConfig {
path: PathBuf::from("/opt/pm/live/sbin"),
},
]
}
fn default_lenient_symlink_directories() -> Vec<PathBuf> {
vec![
PathBuf::from("/opt/pm/live/bin"),
PathBuf::from("/opt/pm/live/sbin"),
]
}
fn default_enabled() -> bool {
true // Enable verification by default for state integrity
}
fn default_verification_level() -> String {
"standard".to_string()
}
fn default_orphaned_file_action() -> String {
"preserve".to_string()
}
fn default_orphaned_backup_dir() -> PathBuf {
PathBuf::from("/opt/pm/orphaned-backup")
}
fn default_store_verification_enabled() -> bool {
true
}
fn default_store_max_age_days() -> u32 {
30
}
fn default_store_max_attempts() -> u32 {
3
}
fn default_store_batch_size() -> u32 {
100
}
fn default_store_max_concurrency() -> usize {
4
}
fn default_store_enable_quarantine() -> bool {
true
}
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | false |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/crates/config/src/builder.rs | crates/config/src/builder.rs | //! Builder configuration for package building and compilation
use serde::{de::IgnoredAny, Deserialize, Serialize};
use sps2_errors::{ConfigError, Error};
use std::path::{Path, PathBuf};
use tokio::fs;
/// Complete builder configuration
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
pub struct BuilderConfig {
#[serde(default)]
pub build: BuildSettings,
#[serde(default)]
pub packaging: PackagingSettings,
#[serde(default)]
pub environment: EnvironmentSettings,
#[serde(default)]
pub performance: PerformanceSettings,
#[serde(default)]
pub security: SecuritySettings,
}
/// Core build execution settings (global defaults and policies)
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct BuildSettings {
#[serde(default = "default_build_jobs")]
pub build_jobs: usize, // 0 = auto-detect, can be overridden per recipe
#[serde(default = "default_timeout_seconds")]
pub timeout_seconds: u64, // Default timeout, can be overridden per recipe
#[serde(default = "default_build_root")]
pub build_root: PathBuf, // Global build directory
#[serde(default = "default_cleanup_on_success")]
pub cleanup_on_success: bool,
#[serde(default = "default_strict_mode")]
pub strict_mode: bool,
// Recipe environment defaults (can be overridden per recipe)
#[serde(default = "default_isolation_level")]
pub default_isolation_level: String, // "none", "default", "enhanced", "hermetic"
#[serde(default = "default_allow_network")]
pub default_allow_network: bool, // Default network access policy
}
impl Default for BuildSettings {
fn default() -> Self {
Self {
build_jobs: 0, // 0 = auto-detect
timeout_seconds: 3600, // 1 hour
build_root: PathBuf::from("/opt/pm/build"),
cleanup_on_success: true,
strict_mode: true,
default_isolation_level: "default".to_string(),
default_allow_network: false,
}
}
}
/// Package generation settings
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
pub struct PackagingSettings {
#[serde(default)]
pub sbom: SbomSettings,
#[serde(default)]
pub signing: SigningSettings,
/// Legacy compression configuration retained for backward compatibility
#[serde(default, alias = "compression", skip_serializing)]
pub legacy_compression: Option<IgnoredAny>,
}
/// SBOM (Software Bill of Materials) generation settings
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct SbomSettings {
#[serde(default = "default_sbom_enabled")]
pub enabled: bool,
#[serde(default = "default_sbom_format")]
pub format: String, // "spdx-json", "cyclone-dx", etc.
#[serde(default = "default_include_build_info")]
pub include_build_info: bool,
#[serde(default = "default_include_dependencies")]
pub include_dependencies: bool,
#[serde(default)]
pub exclusions: Vec<String>,
}
impl Default for SbomSettings {
fn default() -> Self {
Self {
enabled: true,
format: "spdx-json".to_string(),
include_build_info: true,
include_dependencies: true,
exclusions: Vec::new(),
}
}
}
/// Code signing settings
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct SigningSettings {
#[serde(default = "default_signing_enabled")]
pub enabled: bool,
#[serde(default)]
pub identity: Option<String>,
#[serde(default)]
pub keychain_path: Option<PathBuf>,
#[serde(default = "default_enable_hardened_runtime")]
pub enable_hardened_runtime: bool,
#[serde(default)]
pub entitlements_file: Option<PathBuf>,
}
impl Default for SigningSettings {
fn default() -> Self {
Self {
enabled: false, // Disabled by default
identity: None,
keychain_path: None,
enable_hardened_runtime: true,
entitlements_file: None,
}
}
}
/// Environment constraints and policies for hermetic builds
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct EnvironmentSettings {
#[serde(default)]
pub allowed_env_vars: Vec<String>,
#[serde(default)]
pub allowed_read_paths: Vec<PathBuf>,
#[serde(default)]
pub allowed_write_paths: Vec<PathBuf>,
#[serde(default)]
pub custom_env_vars: std::collections::HashMap<String, String>,
}
impl Default for EnvironmentSettings {
fn default() -> Self {
Self {
allowed_env_vars: default_allowed_env_vars(),
allowed_read_paths: default_allowed_read_paths(),
allowed_write_paths: default_allowed_write_paths(),
custom_env_vars: std::collections::HashMap::new(),
}
}
}
/// Performance and caching settings
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
pub struct PerformanceSettings {
#[serde(default)]
pub cache: CacheSettings,
#[serde(default)]
pub build_system: BuildSystemSettings,
}
/// Cache configuration
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct CacheSettings {
#[serde(default = "default_cache_enabled")]
pub enabled: bool,
#[serde(default = "default_cache_type")]
pub cache_type: String, // "ccache", "sccache", "none"
#[serde(default = "default_cache_dir")]
pub cache_dir: Option<PathBuf>,
#[serde(default = "default_cache_size_mb")]
pub max_size_mb: u64,
#[serde(default = "default_distributed_cache")]
pub distributed: bool,
}
impl Default for CacheSettings {
fn default() -> Self {
Self {
enabled: true,
cache_type: "ccache".to_string(),
cache_dir: default_cache_dir(), // Auto-detect
max_size_mb: 5000, // 5GB
distributed: false,
}
}
}
/// Build system specific settings
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct BuildSystemSettings {
#[serde(default = "default_parallel_builds")]
pub parallel_builds: bool,
#[serde(default = "default_out_of_source")]
pub prefer_out_of_source: bool,
#[serde(default)]
pub cmake_args: Vec<String>,
#[serde(default)]
pub configure_args: Vec<String>,
#[serde(default)]
pub make_args: Vec<String>,
}
impl Default for BuildSystemSettings {
fn default() -> Self {
Self {
parallel_builds: true,
prefer_out_of_source: true,
cmake_args: Vec::new(),
configure_args: Vec::new(),
make_args: Vec::new(),
}
}
}
/// Security and validation settings
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
pub struct SecuritySettings {
#[serde(default)]
pub commands: CommandsConfig,
#[serde(default)]
pub validation: ValidationConfig,
}
/// Build commands configuration
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct CommandsConfig {
#[serde(default = "default_allowed_commands")]
pub allowed: Vec<String>,
#[serde(default = "default_allowed_shell")]
pub allowed_shell: Vec<String>,
#[serde(default)]
pub additional_allowed: Vec<String>,
#[serde(default)]
pub disallowed: Vec<String>,
}
impl Default for CommandsConfig {
fn default() -> Self {
Self {
allowed: default_allowed_commands(),
allowed_shell: default_allowed_shell(),
additional_allowed: Vec::new(),
disallowed: Vec::new(),
}
}
}
/// Validation configuration for build commands
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ValidationConfig {
#[serde(default)]
pub mode: ValidationMode,
#[serde(default)]
pub shell_expansion: ShellExpansionPolicy,
#[serde(default)]
pub path_validation: ValidationMode,
#[serde(default)]
pub signature_validation: ValidationMode,
}
impl Default for ValidationConfig {
fn default() -> Self {
Self {
mode: ValidationMode::Strict,
shell_expansion: ShellExpansionPolicy::Disabled,
path_validation: ValidationMode::Strict,
signature_validation: ValidationMode::Strict,
}
}
}
/// Validation mode for build operations
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "snake_case")]
pub enum ValidationMode {
/// Strict validation - fail on any validation errors
Strict,
/// Lenient validation - log warnings but continue
Lenient,
/// Disabled validation - skip all validation checks
Disabled,
}
impl Default for ValidationMode {
fn default() -> Self {
Self::Strict
}
}
/// Shell expansion policy
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "snake_case")]
pub enum ShellExpansionPolicy {
/// Allow shell expansion
Enabled,
/// Disable shell expansion
Disabled,
}
impl Default for ShellExpansionPolicy {
fn default() -> Self {
Self::Disabled
}
}
impl BuilderConfig {
/// Get the default builder config file path
///
/// # Errors
///
/// Returns an error if the home directory cannot be determined.
pub fn default_path() -> Result<PathBuf, Error> {
let home_dir = dirs::home_dir().ok_or_else(|| ConfigError::NotFound {
path: "home directory".to_string(),
})?;
Ok(home_dir
.join(".config")
.join("sps2")
.join("builder.config.toml"))
}
/// Load builder configuration from file
///
/// # Errors
///
/// Returns an error if the file cannot be read or if the file contents
/// contain invalid TOML syntax that cannot be parsed.
pub async fn load_from_file(path: &Path) -> Result<Self, Error> {
let contents = fs::read_to_string(path)
.await
.map_err(|_| ConfigError::NotFound {
path: path.display().to_string(),
})?;
toml::from_str(&contents).map_err(|e| {
ConfigError::ParseError {
message: e.to_string(),
}
.into()
})
}
/// Load builder configuration with fallback to defaults
///
/// If the config file doesn't exist, creates it with defaults.
///
/// # Errors
///
/// Returns an error if the configuration file exists but cannot be read
/// or contains invalid TOML syntax.
pub async fn load() -> Result<Self, Error> {
let config_path = Self::default_path()?;
if config_path.exists() {
Self::load_from_file(&config_path).await
} else {
// Create default config and save it
let config = Self::default();
if let Err(e) = config.save().await {
tracing::warn!("Failed to save default builder config: {}", e);
}
Ok(config)
}
}
/// Load builder configuration from an optional path or use default
///
/// If path is provided, loads from that file.
/// If path is None, uses the default loading behavior.
///
/// # Errors
///
/// Returns an error if the config file cannot be read or parsed
pub async fn load_or_default(path: &Option<PathBuf>) -> Result<Self, Error> {
match path {
Some(config_path) => Self::load_from_file(config_path).await,
None => Self::load().await,
}
}
/// Save builder configuration to the default location
///
/// # Errors
///
/// Returns an error if the configuration cannot be serialized
/// or if the file cannot be written.
pub async fn save(&self) -> Result<(), Error> {
let config_path = Self::default_path()?;
self.save_to(&config_path).await
}
/// Save builder configuration to a specific path
///
/// # Errors
///
/// Returns an error if the configuration cannot be serialized
/// or if the file cannot be written.
pub async fn save_to(&self, path: &Path) -> Result<(), Error> {
// Ensure parent directory exists
if let Some(parent) = path.parent() {
fs::create_dir_all(parent)
.await
.map_err(|e| ConfigError::WriteError {
path: parent.display().to_string(),
error: e.to_string(),
})?;
}
// Serialize to TOML with pretty formatting
let toml_string =
toml::to_string_pretty(self).map_err(|e| ConfigError::SerializeError {
error: e.to_string(),
})?;
// Add header comment
let content = format!(
"# sps2 Builder Configuration File\n\
# This file was automatically generated.\n\
# You can modify it to customize build behavior and security settings.\n\
# This file contains build commands, validation settings, and security policies.\n\n\
{toml_string}"
);
fs::write(path, content)
.await
.map_err(|e| ConfigError::WriteError {
path: path.display().to_string(),
error: e.to_string(),
})?;
Ok(())
}
/// Get all allowed commands (combining allowed and `additional_allowed`)
#[must_use]
pub fn get_allowed_commands(&self) -> Vec<String> {
let mut commands = self.security.commands.allowed.clone();
commands.extend(self.security.commands.additional_allowed.clone());
commands
}
/// Check if a command is allowed
#[must_use]
pub fn is_command_allowed(&self, command: &str) -> bool {
// First check if it's explicitly disallowed
if self
.security
.commands
.disallowed
.contains(&command.to_string())
{
return false;
}
// Then check if it's in the allowed list
self.get_allowed_commands().contains(&command.to_string())
}
/// Check if a shell pattern is allowed
#[must_use]
pub fn is_shell_pattern_allowed(&self, pattern: &str) -> bool {
self.security
.commands
.allowed_shell
.iter()
.any(|allowed| pattern.starts_with(allowed))
}
/// Validate all configuration settings
///
/// # Errors
///
/// Returns an error if any configuration values are invalid
pub fn validate(&self) -> Result<(), Error> {
// Validate build settings
if self.build.timeout_seconds == 0 {
return Err(ConfigError::InvalidValue {
field: "build.timeout_seconds".to_string(),
value: "0".to_string(),
}
.into());
}
for path in &self.environment.allowed_read_paths {
if !path.is_absolute() {
return Err(ConfigError::InvalidValue {
field: "environment.allowed_read_paths".to_string(),
value: path.display().to_string(),
}
.into());
}
}
for path in &self.environment.allowed_write_paths {
if !path.is_absolute() {
return Err(ConfigError::InvalidValue {
field: "environment.allowed_write_paths".to_string(),
value: path.display().to_string(),
}
.into());
}
}
Ok(())
}
}
// Default value functions for serde
fn default_build_jobs() -> usize {
0 // 0 = auto-detect
}
fn default_timeout_seconds() -> u64 {
3600 // 1 hour
}
fn default_build_root() -> PathBuf {
PathBuf::from("/opt/pm/build")
}
fn default_cleanup_on_success() -> bool {
true
}
fn default_strict_mode() -> bool {
true
}
fn default_isolation_level() -> String {
"default".to_string()
}
fn default_allow_network() -> bool {
false
}
fn default_sbom_enabled() -> bool {
true
}
fn default_sbom_format() -> String {
"spdx-json".to_string()
}
fn default_include_build_info() -> bool {
true
}
fn default_include_dependencies() -> bool {
true
}
fn default_signing_enabled() -> bool {
false
}
fn default_enable_hardened_runtime() -> bool {
true
}
fn default_allowed_env_vars() -> Vec<String> {
vec![
"PATH".to_string(),
"HOME".to_string(),
"USER".to_string(),
"SHELL".to_string(),
"TERM".to_string(),
"LANG".to_string(),
"LC_ALL".to_string(),
"CC".to_string(),
"CXX".to_string(),
"CFLAGS".to_string(),
"CXXFLAGS".to_string(),
"LDFLAGS".to_string(),
"PKG_CONFIG_PATH".to_string(),
]
}
fn default_allowed_read_paths() -> Vec<PathBuf> {
vec![
PathBuf::from("/usr"),
PathBuf::from("/opt/pm"),
PathBuf::from("/System"),
PathBuf::from("/Library"),
]
}
fn default_allowed_write_paths() -> Vec<PathBuf> {
vec![
PathBuf::from("/opt/pm/build"),
PathBuf::from("/opt/pm/store"),
PathBuf::from("/tmp"),
]
}
fn default_cache_enabled() -> bool {
true
}
fn default_cache_type() -> String {
"ccache".to_string()
}
fn default_cache_dir() -> Option<PathBuf> {
None
}
fn default_cache_size_mb() -> u64 {
5000 // 5GB
}
fn default_distributed_cache() -> bool {
false
}
fn default_parallel_builds() -> bool {
true
}
fn default_out_of_source() -> bool {
true
}
fn default_allowed_commands() -> Vec<String> {
let mut commands = Vec::new();
// Collect all command categories
commands.extend(build_tools_commands());
commands.extend(configure_scripts_commands());
commands.extend(file_operations_commands());
commands.extend(text_processing_commands());
commands.extend(archive_tools_commands());
commands.extend(shell_builtins_commands());
commands.extend(development_tools_commands());
commands.extend(platform_specific_commands());
commands
}
/// Build tools and compilers
fn build_tools_commands() -> Vec<String> {
vec![
"make", "cmake", "meson", "ninja", "cargo", "go", "python", "python3", "pip", "pip3",
"npm", "yarn", "pnpm", "node", "gcc", "g++", "clang", "clang++", "cc", "c++", "ld", "ar",
"ranlib", "strip", "objcopy",
]
.into_iter()
.map(String::from)
.collect()
}
/// Configure scripts and build bootstrappers
fn configure_scripts_commands() -> Vec<String> {
vec![
"./configure",
"configure",
"./Configure",
"./config",
"./bootstrap",
"./autogen.sh",
"./buildconf",
"./waf",
"./setup.py",
"./gradlew",
"./mvnw",
"./build.sh",
"./build",
"./install.sh",
"./compile",
"autoreconf",
"autoconf",
"automake",
"libtool",
"glibtoolize",
"libtoolize",
]
.into_iter()
.map(String::from)
.collect()
}
/// File operations
fn file_operations_commands() -> Vec<String> {
vec![
"cp", "mv", "mkdir", "rmdir", "touch", "ln", "install", "chmod", "chown", "rm", "rsync",
]
.into_iter()
.map(String::from)
.collect()
}
/// Text processing utilities
fn text_processing_commands() -> Vec<String> {
vec![
"sed", "awk", "grep", "egrep", "fgrep", "cut", "tr", "sort", "uniq", "head", "tail", "cat",
"echo", "printf", "test", "[",
]
.into_iter()
.map(String::from)
.collect()
}
/// Archive and compression tools
fn archive_tools_commands() -> Vec<String> {
vec![
"tar", "gzip", "gunzip", "bzip2", "bunzip2", "xz", "unxz", "zip", "unzip",
]
.into_iter()
.map(String::from)
.collect()
}
/// Shell built-ins and control flow
fn shell_builtins_commands() -> Vec<String> {
vec![
"sh",
"bash",
"/bin/sh",
"/bin/bash",
"cd",
"pwd",
"export",
"source",
".",
"env",
"set",
"unset",
"true",
"false",
"if",
"then",
"else",
"elif",
"fi",
"for",
"while",
"do",
"done",
"case",
"esac",
"return",
"exit",
"shift",
"break",
"continue",
// Version control
"git",
"hg",
"svn",
]
.into_iter()
.map(String::from)
.collect()
}
/// Development and debugging tools
fn development_tools_commands() -> Vec<String> {
vec![
"pkg-config",
"pkgconf",
"ldconfig",
"patch",
"diff",
"which",
"whereis",
"dirname",
"basename",
"readlink",
"realpath",
"expr",
"xargs",
"tee",
"time",
"nproc",
"getconf",
"file",
// Test runners
"./test.sh",
"./run-tests.sh",
"./check.sh",
"ctest",
]
.into_iter()
.map(String::from)
.collect()
}
/// Platform-specific tools
fn platform_specific_commands() -> Vec<String> {
vec![
// Library inspection
"ldd",
"otool",
"nm",
"strings",
"size",
// macOS specific
"install_name_tool",
"codesign",
"xcrun",
"lipo",
]
.into_iter()
.map(String::from)
.collect()
}
fn default_allowed_shell() -> Vec<String> {
vec![
// Common build patterns
"mkdir -p",
"test -f",
"test -d",
"test -e",
"test -x",
"test -z",
"test -n",
"[ -f",
"[ -d",
"[ -e",
"[ -x",
"[ -z",
"[ -n",
"if [",
"if test",
"for file in",
"for dir in",
"for i in",
"find . -name",
"find . -type",
"echo",
"printf",
"cd ${DESTDIR}",
"cd ${PREFIX}",
"cd ${BUILD_DIR}",
"ln -s",
"ln -sf",
"cp -r",
"cp -a",
"cp -p",
"install -D",
"install -m",
"sed -i",
"sed -e",
// Variable assignments
"export",
"unset",
// Conditionals
"||",
"&&",
]
.into_iter()
.map(String::from)
.collect()
}
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | false |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/crates/config/src/lib.rs | crates/config/src/lib.rs | #![warn(mismatched_lifetime_syntaxes)]
#![deny(clippy::pedantic, unsafe_code)]
#![allow(clippy::module_name_repetitions)]
//! Configuration management for sps2
//!
//! This crate handles loading and merging configuration from:
//! - Default values (hard-coded)
//! - Configuration file (~/.config/sps2/config.toml)
//! - Builder configuration file (~/.config/sps2/builder.config.toml)
//! - Environment variables
//! - CLI flags
pub mod builder;
pub mod constants;
pub mod core;
pub mod guard;
pub mod repository;
pub mod resources_limits;
pub mod resources_manager;
pub mod resources_semaphore;
// Re-export main types for convenience
pub use builder::BuilderConfig;
pub use constants as fixed_paths;
pub use core::{GeneralConfig, NetworkConfig, PathConfig, SecurityConfig, StateConfig};
pub use guard::{
DiscrepancyHandling, GuardConfiguration, GuardDirectoryConfig, GuardPerformanceConfig,
GuardSymlinkPolicy, PerformanceConfigToml, SymlinkPolicyConfig, UserFilePolicy,
VerificationConfig,
};
pub use repository::{Repositories, RepositoryConfig};
pub use resources_limits::{IntoResourceLimits, ResourceAvailability, ResourceLimits};
pub use resources_manager::ResourceManager;
pub use resources_semaphore::{
acquire_semaphore_permit, create_semaphore, try_acquire_semaphore_permit,
};
use serde::{Deserialize, Serialize};
use sps2_errors::{ConfigError, Error};
use sps2_types::{ColorChoice, OutputFormat};
use std::path::{Path, PathBuf};
use tokio::fs;
/// Main configuration structure
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
pub struct Config {
#[serde(default)]
pub general: GeneralConfig,
#[serde(default)]
pub security: SecurityConfig,
#[serde(default)]
pub state: StateConfig,
#[serde(default)]
pub paths: PathConfig,
#[serde(default)]
pub network: NetworkConfig,
#[serde(default)]
pub verification: VerificationConfig,
#[serde(default)]
pub guard: Option<GuardConfiguration>,
/// Builder configuration (loaded from separate file)
#[serde(skip)]
pub builder: BuilderConfig,
/// Repository definitions (fast/slow/stable/extras)
#[serde(default)]
pub repos: repository::Repositories,
/// Content-addressable store cleanup policy
#[serde(default)]
pub cas: core::CasConfig,
}
impl Config {
/// Get the default config file path
///
/// # Errors
///
/// Returns an error if the home directory cannot be determined.
pub fn default_path() -> Result<PathBuf, Error> {
let home_dir = dirs::home_dir().ok_or_else(|| ConfigError::NotFound {
path: "home directory".to_string(),
})?;
Ok(home_dir.join(".config").join("sps2").join("config.toml"))
}
/// Load configuration from file
///
/// # Errors
///
/// Returns an error if the file cannot be read or if the file contents
/// contain invalid TOML syntax that cannot be parsed.
pub async fn load_from_file(path: &Path) -> Result<Self, Error> {
let contents = fs::read_to_string(path)
.await
.map_err(|_| ConfigError::NotFound {
path: path.display().to_string(),
})?;
let mut config: Self = toml::from_str(&contents).map_err(|e| ConfigError::ParseError {
message: e.to_string(),
})?;
// Load builder config
config.builder = BuilderConfig::load().await?;
Ok(config)
}
/// Load configuration from file with custom builder config path
///
/// # Errors
///
/// Returns an error if the file cannot be read or if the file contents
/// contain invalid TOML syntax that cannot be parsed.
pub async fn load_from_file_with_builder(
path: &Path,
builder_path: &Option<PathBuf>,
) -> Result<Self, Error> {
let contents = fs::read_to_string(path)
.await
.map_err(|_| ConfigError::NotFound {
path: path.display().to_string(),
})?;
let mut config: Self = toml::from_str(&contents).map_err(|e| ConfigError::ParseError {
message: e.to_string(),
})?;
// Load builder config
config.builder = BuilderConfig::load_or_default(builder_path).await?;
Ok(config)
}
/// Load configuration with fallback to defaults
///
/// If the config file doesn't exist, creates it with defaults.
///
/// # Errors
///
/// Returns an error if the configuration file exists but cannot be read
/// or contains invalid TOML syntax.
pub async fn load() -> Result<Self, Error> {
let config_path = Self::default_path()?;
if config_path.exists() {
Self::load_from_file(&config_path).await
} else {
// Create default config and save it
let builder = BuilderConfig::load().await?;
let config = Self {
builder,
..Self::default()
};
if let Err(e) = config.save().await {
tracing::warn!("Failed to save default config: {}", e);
}
Ok(config)
}
}
/// Load configuration from an optional path or use default
///
/// If path is provided, loads from that file.
/// If path is None, uses the default loading behavior.
///
/// # Errors
///
/// Returns an error if the config file cannot be read or parsed
pub async fn load_or_default(path: &Option<std::path::PathBuf>) -> Result<Self, Error> {
match path {
Some(config_path) => Self::load_from_file(config_path).await,
None => Self::load().await,
}
}
/// Load configuration from optional paths or use defaults
///
/// If `config_path` is provided, loads from that file.
/// If `builder_path` is provided, loads builder config from that file.
/// If paths are None, uses the default loading behavior.
///
/// # Errors
///
/// Returns an error if the config files cannot be read or parsed
pub async fn load_or_default_with_builder(
config_path: &Option<std::path::PathBuf>,
builder_path: &Option<std::path::PathBuf>,
) -> Result<Self, Error> {
if let Some(path) = config_path {
Self::load_from_file_with_builder(path, builder_path).await
} else {
let mut config = Self::load().await?;
// Override builder config if custom path provided
if builder_path.is_some() {
config.builder = BuilderConfig::load_or_default(builder_path).await?;
}
Ok(config)
}
}
/// Merge with environment variables
///
/// # Errors
///
/// Returns an error if environment variables contain invalid values
/// that cannot be parsed into the expected types.
pub fn merge_env(&mut self) -> Result<(), Error> {
// SPS2_OUTPUT
if let Ok(output) = std::env::var("SPS2_OUTPUT") {
self.general.default_output = match output.as_str() {
"plain" => OutputFormat::Plain,
"tty" => OutputFormat::Tty,
"json" => OutputFormat::Json,
_ => {
return Err(ConfigError::InvalidValue {
field: "SPS2_OUTPUT".to_string(),
value: output,
}
.into())
}
};
}
// SPS2_COLOR
if let Ok(color) = std::env::var("SPS2_COLOR") {
self.general.color = match color.as_str() {
"always" => ColorChoice::Always,
"auto" => ColorChoice::Auto,
"never" => ColorChoice::Never,
_ => {
return Err(ConfigError::InvalidValue {
field: "SPS2_COLOR".to_string(),
value: color,
}
.into())
}
};
}
// SPS2_BUILD_JOBS
if let Ok(jobs) = std::env::var("SPS2_BUILD_JOBS") {
self.builder.build.build_jobs =
jobs.parse().map_err(|_| ConfigError::InvalidValue {
field: "SPS2_BUILD_JOBS".to_string(),
value: jobs,
})?;
}
// SPS2_NETWORK_ACCESS removed - network access comes from recipe, not config
// SPS2_PARALLEL_DOWNLOADS
if let Ok(downloads) = std::env::var("SPS2_PARALLEL_DOWNLOADS") {
self.general.parallel_downloads =
downloads.parse().map_err(|_| ConfigError::InvalidValue {
field: "SPS2_PARALLEL_DOWNLOADS".to_string(),
value: downloads,
})?;
}
// Optional CAS env overrides (best-effort; ignore if invalid)
if let Ok(v) = std::env::var("SPS2_CAS_KEEP_STATES") {
if let Ok(n) = v.parse() {
self.cas.keep_states_count = n;
}
}
if let Ok(v) = std::env::var("SPS2_CAS_KEEP_DAYS") {
if let Ok(n) = v.parse() {
self.cas.keep_days = n;
}
}
if let Ok(v) = std::env::var("SPS2_CAS_PKG_GRACE_DAYS") {
if let Ok(n) = v.parse() {
self.cas.package_grace_days = n;
}
}
if let Ok(v) = std::env::var("SPS2_CAS_OBJ_GRACE_DAYS") {
if let Ok(n) = v.parse() {
self.cas.object_grace_days = n;
}
}
if let Ok(v) = std::env::var("SPS2_CAS_DRY_RUN") {
self.cas.dry_run = matches!(v.as_str(), "1" | "true" | "yes");
}
Ok(())
}
/// Get the store path (with default)
#[must_use]
pub fn store_path(&self) -> PathBuf {
self.paths
.store_path
.clone()
.unwrap_or_else(|| PathBuf::from(crate::constants::STORE_DIR))
}
/// Get the state path (with default)
#[must_use]
pub fn state_path(&self) -> PathBuf {
self.paths
.state_path
.clone()
.unwrap_or_else(|| PathBuf::from(crate::constants::STATES_DIR))
}
/// Get the build path (with default)
#[must_use]
pub fn build_path(&self) -> PathBuf {
self.paths
.build_path
.clone()
.unwrap_or_else(|| std::env::current_dir().unwrap_or_else(|_| PathBuf::from(".")))
}
/// Get the live root path
#[must_use]
pub fn live_path(&self) -> PathBuf {
PathBuf::from(crate::constants::LIVE_DIR)
}
/// Get the database path
#[must_use]
pub fn db_path(&self) -> PathBuf {
PathBuf::from(crate::constants::DB_PATH)
}
/// Save configuration to the default location
///
/// # Errors
///
/// Returns an error if the configuration cannot be serialized
/// or if the file cannot be written.
pub async fn save(&self) -> Result<(), Error> {
let config_path = Self::default_path()?;
self.save_to(&config_path).await
}
/// Save configuration to a specific path
///
/// # Errors
///
/// Returns an error if the configuration cannot be serialized
/// or if the file cannot be written.
pub async fn save_to(&self, path: &Path) -> Result<(), Error> {
// Ensure parent directory exists
if let Some(parent) = path.parent() {
fs::create_dir_all(parent)
.await
.map_err(|e| ConfigError::WriteError {
path: parent.display().to_string(),
error: e.to_string(),
})?;
}
// Serialize to TOML with pretty formatting
let toml_string =
toml::to_string_pretty(self).map_err(|e| ConfigError::SerializeError {
error: e.to_string(),
})?;
// Add header comment
let content = format!(
"# sps2 Configuration File\n\
# This file was automatically generated.\n\
# You can modify it to customize sps2 behavior.\n\
#\n\
# For more information, see: https://github.com/your-org/sps2\n\n\
{toml_string}"
);
// Write to file
fs::write(path, content)
.await
.map_err(|e| ConfigError::WriteError {
path: path.display().to_string(),
error: e.to_string(),
})?;
Ok(())
}
/// Get all allowed commands (delegated to builder config)
#[must_use]
pub fn get_allowed_commands(&self) -> Vec<String> {
self.builder.get_allowed_commands()
}
/// Check if a command is allowed (delegated to builder config)
#[must_use]
pub fn is_command_allowed(&self, command: &str) -> bool {
self.builder.is_command_allowed(command)
}
/// Check if a shell pattern is allowed (delegated to builder config)
#[must_use]
pub fn is_shell_pattern_allowed(&self, pattern: &str) -> bool {
self.builder.is_shell_pattern_allowed(pattern)
}
/// Validate guard configuration settings
///
/// # Errors
///
/// Returns an error if any configuration values are invalid
pub fn validate_guard_config(&self) -> Result<(), Error> {
self.validate_verification_config()?;
if let Some(guard_config) = &self.guard {
Self::validate_top_level_guard_config(guard_config)?;
}
Ok(())
}
fn validate_verification_config(&self) -> Result<(), Error> {
Self::validate_verification_level(&self.verification.level, "verification.level")?;
Self::validate_orphaned_file_action(
&self.verification.orphaned_file_action,
"verification.orphaned_file_action",
)?;
Self::validate_toml_performance_config(
&self.verification.performance,
"verification.performance",
)?;
Self::validate_symlink_directories(
&self.verification.guard.lenient_symlink_directories,
"verification.guard.lenient_symlink_directories",
)?;
Ok(())
}
fn validate_top_level_guard_config(guard_config: &GuardConfiguration) -> Result<(), Error> {
Self::validate_verification_level(
&guard_config.verification_level,
"guard.verification_level",
)?;
Self::validate_orphaned_file_action(
&guard_config.orphaned_file_action,
"guard.orphaned_file_action",
)?;
Self::validate_guard_performance_config(&guard_config.performance, "guard.performance")?;
Self::validate_guard_symlink_directories(
&guard_config.lenient_symlink_directories,
"guard.lenient_symlink_directories",
)?;
Ok(())
}
fn validate_verification_level(level: &str, field_name: &str) -> Result<(), Error> {
match level {
"quick" | "standard" | "full" => Ok(()),
_ => Err(ConfigError::InvalidValue {
field: field_name.to_string(),
value: level.to_string(),
}
.into()),
}
}
fn validate_orphaned_file_action(action: &str, field_name: &str) -> Result<(), Error> {
match action {
"remove" | "preserve" | "backup" => Ok(()),
_ => Err(ConfigError::InvalidValue {
field: field_name.to_string(),
value: action.to_string(),
}
.into()),
}
}
fn validate_toml_performance_config(
perf: &guard::PerformanceConfigToml,
field_prefix: &str,
) -> Result<(), Error> {
if perf.max_concurrent_tasks == 0 {
return Err(ConfigError::InvalidValue {
field: format!("{field_prefix}.max_concurrent_tasks"),
value: "0".to_string(),
}
.into());
}
if perf.verification_timeout_seconds == 0 {
return Err(ConfigError::InvalidValue {
field: format!("{field_prefix}.verification_timeout_seconds"),
value: "0".to_string(),
}
.into());
}
Ok(())
}
fn validate_guard_performance_config(
perf: &guard::GuardPerformanceConfig,
field_prefix: &str,
) -> Result<(), Error> {
if perf.max_concurrent_tasks == 0 {
return Err(ConfigError::InvalidValue {
field: format!("{field_prefix}.max_concurrent_tasks"),
value: "0".to_string(),
}
.into());
}
if perf.verification_timeout_seconds == 0 {
return Err(ConfigError::InvalidValue {
field: format!("{field_prefix}.verification_timeout_seconds"),
value: "0".to_string(),
}
.into());
}
Ok(())
}
fn validate_symlink_directories(dirs: &[PathBuf], field_name: &str) -> Result<(), Error> {
for dir in dirs {
if !dir.is_absolute() {
return Err(ConfigError::InvalidValue {
field: field_name.to_string(),
value: dir.display().to_string(),
}
.into());
}
}
Ok(())
}
fn validate_guard_symlink_directories(
dirs: &[guard::GuardDirectoryConfig],
field_name: &str,
) -> Result<(), Error> {
for dir_config in dirs {
if !dir_config.path.is_absolute() {
return Err(ConfigError::InvalidValue {
field: field_name.to_string(),
value: dir_config.path.display().to_string(),
}
.into());
}
}
Ok(())
}
}
/// Calculate build jobs based on CPU count
#[must_use]
pub fn calculate_build_jobs(config_value: usize) -> usize {
if config_value > 0 {
config_value // User override
} else {
// Auto-detect based on CPU count
let cpus = num_cpus::get();
// Use 75% of CPUs for builds, minimum 1
// This leaves headroom for system responsiveness
(cpus * 3 / 4).max(1)
}
}
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | false |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/crates/config/src/core.rs | crates/config/src/core.rs | //! Core configuration types and utilities shared across all crates
use super::repository::Repositories;
use serde::{Deserialize, Serialize};
use sps2_types::{ColorChoice, OutputFormat};
use std::path::PathBuf;
/// General application configuration
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct GeneralConfig {
#[serde(default = "default_output_format")]
pub default_output: OutputFormat,
#[serde(default = "default_color_choice")]
pub color: ColorChoice,
#[serde(default = "default_parallel_downloads")]
pub parallel_downloads: usize,
}
impl Default for GeneralConfig {
fn default() -> Self {
Self {
default_output: OutputFormat::Tty,
color: ColorChoice::Auto,
parallel_downloads: 4,
}
}
}
/// Security configuration shared across crates
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct SecurityConfig {
#[serde(default = "default_verify_signatures")]
pub verify_signatures: bool,
#[serde(default = "default_allow_unsigned")]
pub allow_unsigned: bool,
#[serde(default = "default_index_max_age_days")]
pub index_max_age_days: u32,
}
impl Default for SecurityConfig {
fn default() -> Self {
Self {
verify_signatures: true,
allow_unsigned: false,
index_max_age_days: 7,
}
}
}
/// State management configuration
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct StateConfig {
#[serde(default = "default_retention_count")]
pub retention_count: usize,
#[serde(default = "default_retention_days")]
pub retention_days: u32,
#[serde(default = "default_history_verify_limit")]
pub history_verify_limit: usize,
}
impl Default for StateConfig {
fn default() -> Self {
Self {
retention_count: 10, // Keep last 10 states
retention_days: 30, // Or 30 days, whichever is less
history_verify_limit: default_history_verify_limit(),
}
}
}
/// Path configuration
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
pub struct PathConfig {
pub store_path: Option<PathBuf>,
pub state_path: Option<PathBuf>,
pub build_path: Option<PathBuf>,
}
/// CAS cleanup/retention configuration
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct CasConfig {
#[serde(default = "default_retention_count")]
pub keep_states_count: usize,
#[serde(default = "default_retention_days")]
pub keep_days: u32,
#[serde(default = "default_package_grace_days")]
pub package_grace_days: u32,
#[serde(default = "default_object_grace_days")]
pub object_grace_days: u32,
#[serde(default)]
pub dry_run: bool,
}
impl Default for CasConfig {
fn default() -> Self {
Self {
keep_states_count: default_retention_count(),
keep_days: default_retention_days(),
package_grace_days: default_package_grace_days(),
object_grace_days: default_object_grace_days(),
dry_run: false,
}
}
}
/// Repository configuration group
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
pub struct RepositoryGroupConfig {
#[serde(default)]
pub repositories: Repositories,
}
/// Network configuration
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct NetworkConfig {
#[serde(default = "default_timeout")]
pub timeout: u64, // seconds
#[serde(default = "default_retries")]
pub retries: u32,
#[serde(default = "default_retry_delay")]
pub retry_delay: u64, // seconds
}
impl Default for NetworkConfig {
fn default() -> Self {
Self {
timeout: 300, // 5 minutes
retries: 3,
retry_delay: 1, // 1 second
}
}
}
// Default value functions for serde
fn default_output_format() -> OutputFormat {
OutputFormat::Tty
}
fn default_color_choice() -> ColorChoice {
ColorChoice::Auto
}
fn default_parallel_downloads() -> usize {
4
}
fn default_verify_signatures() -> bool {
true
}
fn default_allow_unsigned() -> bool {
false
}
fn default_index_max_age_days() -> u32 {
7
}
fn default_retention_count() -> usize {
10
}
fn default_retention_days() -> u32 {
30
}
fn default_timeout() -> u64 {
300 // 5 minutes
}
fn default_retries() -> u32 {
3
}
fn default_retry_delay() -> u64 {
1 // 1 second
}
fn default_package_grace_days() -> u32 {
7
}
fn default_object_grace_days() -> u32 {
7
}
fn default_history_verify_limit() -> usize {
20
}
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | false |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/crates/config/src/resources_semaphore.rs | crates/config/src/resources_semaphore.rs | //! Semaphore utilities for resource management
//!
//! This module provides helper functions for managing semaphores with
//! consistent error handling across the sps2 package manager.
use sps2_errors::{Error, InstallError};
use std::sync::Arc;
use tokio::sync::{OwnedSemaphorePermit, Semaphore};
/// Acquire a semaphore permit with proper error handling
///
/// This helper function provides consistent error handling for semaphore
/// acquisition across all modules in sps2.
///
/// # Arguments
///
/// * `semaphore` - The semaphore to acquire a permit from
/// * `operation` - Description of the operation for error reporting
///
/// # Errors
///
/// Returns an error if the semaphore is closed or acquisition fails
pub async fn acquire_semaphore_permit(
semaphore: Arc<Semaphore>,
operation: &str,
) -> Result<OwnedSemaphorePermit, Error> {
semaphore.clone().acquire_owned().await.map_err(|_| {
InstallError::ConcurrencyError {
message: format!("failed to acquire semaphore for {operation}"),
}
.into()
})
}
/// Try to acquire a semaphore permit without waiting
///
/// This helper function attempts to acquire a semaphore permit immediately
/// without blocking. Useful for checking resource availability.
///
/// # Arguments
///
/// * `semaphore` - The semaphore to try to acquire a permit from
///
/// # Returns
///
/// Returns `Ok(Some(permit))` if successful, `Ok(None)` if would block,
/// or an error if the semaphore is closed.
///
/// # Errors
///
/// Returns an error if the semaphore is closed.
pub fn try_acquire_semaphore_permit(
semaphore: &Arc<Semaphore>,
) -> Result<Option<OwnedSemaphorePermit>, Error> {
match semaphore.clone().try_acquire_owned() {
Ok(permit) => Ok(Some(permit)),
Err(tokio::sync::TryAcquireError::NoPermits) => Ok(None),
Err(tokio::sync::TryAcquireError::Closed) => Err(InstallError::ConcurrencyError {
message: "semaphore is closed".to_string(),
}
.into()),
}
}
/// Create a semaphore with a specified number of permits
///
/// This is a convenience function for creating semaphores with consistent
/// error handling and documentation.
///
/// # Arguments
///
/// * `permits` - Number of permits the semaphore should have
///
/// # Returns
///
/// Returns an Arc-wrapped semaphore for shared ownership
#[must_use]
pub fn create_semaphore(permits: usize) -> Arc<Semaphore> {
Arc::new(Semaphore::new(permits))
}
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | false |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/crates/config/src/constants.rs | crates/config/src/constants.rs | //! Centralized, non-configurable filesystem paths for sps2
//!
//! These paths are deliberately not exposed via TOML configuration to keep the
//! installation prefix stable. Packages are built against this fixed prefix.
pub const PREFIX: &str = "/opt/pm";
pub const STORE_DIR: &str = "/opt/pm/store";
pub const STATES_DIR: &str = "/opt/pm/states";
pub const LIVE_DIR: &str = "/opt/pm/live";
pub const BIN_DIR: &str = "/opt/pm/live/bin";
pub const LOGS_DIR: &str = "/opt/pm/logs";
pub const KEYS_DIR: &str = "/opt/pm/keys";
pub const DB_PATH: &str = "/opt/pm/state.sqlite";
pub const LAST_GC_TIMESTAMP: &str = "/opt/pm/.last_gc_timestamp";
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | false |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/crates/config/src/resources_manager.rs | crates/config/src/resources_manager.rs | //! Resource manager for coordinating concurrent operations
//!
//! This module provides the main `ResourceManager` struct that coordinates
//! semaphores and resource limits for concurrent operations.
use crate::resources_limits::{ResourceAvailability, ResourceLimits};
use crate::resources_semaphore::{
acquire_semaphore_permit, create_semaphore, try_acquire_semaphore_permit,
};
use sps2_errors::Error;
use std::sync::atomic::AtomicU64;
use std::sync::Arc;
use tokio::sync::{OwnedSemaphorePermit, Semaphore};
/// Resource manager for coordinating resource usage
///
/// This structure manages semaphores and resource limits for concurrent
/// operations, ensuring we don't exceed system capabilities.
#[derive(Debug)]
pub struct ResourceManager {
/// Semaphore for download operations
pub download_semaphore: Arc<Semaphore>,
/// Semaphore for decompression operations
pub decompression_semaphore: Arc<Semaphore>,
/// Semaphore for installation operations
pub installation_semaphore: Arc<Semaphore>,
/// Resource limits configuration
pub limits: ResourceLimits,
/// Current memory usage
pub memory_usage: Arc<AtomicU64>,
}
impl ResourceManager {
/// Create a new resource manager with the given limits
#[must_use]
pub fn new(limits: ResourceLimits) -> Self {
Self {
download_semaphore: create_semaphore(limits.concurrent_downloads),
decompression_semaphore: create_semaphore(limits.concurrent_decompressions),
installation_semaphore: create_semaphore(limits.concurrent_installations),
memory_usage: Arc::new(AtomicU64::new(0)),
limits,
}
}
/// Create a resource manager with system-based limits
#[must_use]
pub fn from_system() -> Self {
Self::new(ResourceLimits::from_system())
}
/// Acquire a download permit
///
/// # Errors
///
/// Returns an error if the semaphore is closed or acquisition fails.
pub async fn acquire_download_permit(&self) -> Result<OwnedSemaphorePermit, Error> {
acquire_semaphore_permit(self.download_semaphore.clone(), "download").await
}
/// Acquire a decompression permit
///
/// # Errors
///
/// Returns an error if the semaphore is closed or acquisition fails.
pub async fn acquire_decompression_permit(&self) -> Result<OwnedSemaphorePermit, Error> {
acquire_semaphore_permit(self.decompression_semaphore.clone(), "decompression").await
}
/// Acquire an installation permit
///
/// # Errors
///
/// Returns an error if the semaphore is closed or acquisition fails.
pub async fn acquire_installation_permit(&self) -> Result<OwnedSemaphorePermit, Error> {
acquire_semaphore_permit(self.installation_semaphore.clone(), "installation").await
}
/// Try to acquire a download permit without blocking
///
/// # Errors
///
/// Returns an error if the semaphore is closed.
pub fn try_acquire_download_permit(&self) -> Result<Option<OwnedSemaphorePermit>, Error> {
try_acquire_semaphore_permit(&self.download_semaphore)
}
/// Try to acquire a decompression permit without blocking
///
/// # Errors
///
/// Returns an error if the semaphore is closed.
pub fn try_acquire_decompression_permit(&self) -> Result<Option<OwnedSemaphorePermit>, Error> {
try_acquire_semaphore_permit(&self.decompression_semaphore)
}
/// Try to acquire an installation permit without blocking
///
/// # Errors
///
/// Returns an error if the semaphore is closed.
pub fn try_acquire_installation_permit(&self) -> Result<Option<OwnedSemaphorePermit>, Error> {
try_acquire_semaphore_permit(&self.installation_semaphore)
}
/// Check if memory usage is within limits
#[must_use]
pub fn is_memory_within_limits(&self, current_usage: u64) -> bool {
match self.limits.memory_usage {
Some(limit) => current_usage <= limit,
None => true, // No limit set
}
}
/// Get current resource availability
#[must_use]
pub fn get_resource_availability(&self) -> ResourceAvailability {
ResourceAvailability {
download: self.download_semaphore.available_permits(),
decompression: self.decompression_semaphore.available_permits(),
installation: self.installation_semaphore.available_permits(),
}
}
/// Clean up resources
///
/// # Errors
///
/// Returns an error if cleanup operations fail.
pub fn cleanup(&self) -> Result<(), Error> {
// Nothing to do here for now, but this can be used to clean up
// any temporary files or other resources created by the resource manager.
Ok(())
}
}
impl Default for ResourceManager {
fn default() -> Self {
Self::new(ResourceLimits::default())
}
}
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | false |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/crates/config/src/repository.rs | crates/config/src/repository.rs | use serde::{Deserialize, Serialize};
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct RepositoryConfig {
pub url: String,
#[serde(default = "default_priority")]
pub priority: u32,
#[serde(default = "default_algorithm")]
pub algorithm: String, // "minisign" | "openpgp" (future)
#[serde(default)]
pub key_ids: Vec<String>,
}
#[derive(Debug, Clone, Default, Serialize, Deserialize)]
pub struct Repositories {
#[serde(default)]
pub fast: Option<RepositoryConfig>,
#[serde(default)]
pub slow: Option<RepositoryConfig>,
#[serde(default)]
pub stable: Option<RepositoryConfig>,
#[serde(default)]
pub extras: std::collections::HashMap<String, RepositoryConfig>,
}
impl Repositories {
#[must_use]
pub fn get_all(&self) -> Vec<&RepositoryConfig> {
let mut all = Vec::new();
if let Some(fast) = &self.fast {
all.push(fast);
}
if let Some(slow) = &self.slow {
all.push(slow);
}
if let Some(stable) = &self.stable {
all.push(stable);
}
all.extend(self.extras.values());
all
}
}
fn default_priority() -> u32 {
1
}
fn default_algorithm() -> String {
"minisign".to_string()
}
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | false |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/crates/platform/src/lib.rs | crates/platform/src/lib.rs | #![warn(mismatched_lifetime_syntaxes)]
//! Platform abstraction layer for macOS ARM64 package manager operations.
//!
//! This crate provides a unified interface for platform-specific operations including:
//! - Binary operations (install_name_tool, otool, codesign)
//! - Filesystem operations (APFS clonefile, atomic operations)
//! - Process execution with proper event emission and error handling
//!
//! The platform abstraction integrates seamlessly with the existing event system
//! and error handling patterns in the sps2 codebase.
pub mod binary;
pub mod core;
pub mod filesystem;
pub mod fs;
pub mod implementations;
pub mod process;
pub use core::{
Platform, PlatformCapabilities, PlatformContext, PlatformManager, ToolInfo, ToolRegistry,
};
pub use implementations::macos::MacOSPlatform;
/// Re-export commonly used types
pub use binary::BinaryOperations;
pub use filesystem::FilesystemOperations;
pub use fs as filesystem_helpers;
pub use process::ProcessOperations;
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | false |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/crates/platform/src/fs.rs | crates/platform/src/fs.rs | #![allow(clippy::module_name_repetitions, unused_variables)]
//! Filesystem convenience helpers backed by the platform abstraction.
//!
//! These functions provide a stable API returning `sps2_errors::Error` while
//! internally leveraging the platform's `FilesystemOperations` implementation.
use crate::PlatformManager;
use sps2_errors::{Error, StorageError};
use std::path::Path;
use tokio::fs;
use tokio::task;
/// Result type for filesystem operations
pub type Result<T> = std::result::Result<T, sps2_errors::Error>;
/// APFS clonefile support
#[cfg(target_os = "macos")]
mod apfs {
use super::{Path, Result, StorageError};
use crate::PlatformManager;
/// Clone a file or directory using APFS clonefile with security flags
pub async fn clone_path(src: &Path, dst: &Path) -> Result<()> {
let platform = PlatformManager::instance().platform();
let context = platform.create_context(None);
// Use clone_directory for directories, clone_file for files
let metadata = std::fs::metadata(src).map_err(|e| StorageError::IoError {
message: e.to_string(),
})?;
let result = if metadata.is_dir() {
platform
.filesystem()
.clone_directory(&context, src, dst)
.await
} else {
platform.filesystem().clone_file(&context, src, dst).await
};
result.map_err(|platform_err| {
// Convert PlatformError back to StorageError for backward compatibility
match platform_err {
sps2_errors::PlatformError::FilesystemOperationFailed { message, .. } => {
StorageError::ApfsCloneFailed { message }.into()
}
_ => StorageError::ApfsCloneFailed {
message: platform_err.to_string(),
}
.into(),
}
})
}
}
/// Atomic rename with swap support
///
/// This function uses macOS `renamex_np` with `RENAME_SWAP` when both paths exist,
/// or regular rename when only source exists. This provides true atomic swap
/// behavior critical for system stability during updates.
///
/// # Errors
///
/// Returns an error if:
/// - Path conversion to C string fails
/// - The atomic rename operation fails (permissions, file not found, etc.)
/// - The blocking task panics
pub async fn atomic_rename(src: &Path, dst: &Path) -> Result<()> {
let platform = PlatformManager::instance().platform();
let context = platform.create_context(None);
platform
.filesystem()
.atomic_rename(&context, src, dst)
.await
.map_err(|platform_err| {
// Convert PlatformError back to StorageError for backward compatibility
match platform_err {
sps2_errors::PlatformError::FilesystemOperationFailed { message, .. } => {
StorageError::AtomicRenameFailed { message }.into()
}
_ => StorageError::AtomicRenameFailed {
message: platform_err.to_string(),
}
.into(),
}
})
}
/// True atomic swap that requires both paths to exist
///
/// This function guarantees atomic exchange of two directories/files using
/// platform abstraction. This is critical for rollback operations
/// where we need to swap live and backup directories atomically.
///
/// # Errors
///
/// Returns an error if:
/// - Either path doesn't exist
/// - The atomic swap operation fails
pub async fn atomic_swap(path1: &Path, path2: &Path) -> Result<()> {
let platform = PlatformManager::instance().platform();
let context = platform.create_context(None);
platform
.filesystem()
.atomic_swap(&context, path1, path2)
.await
.map_err(|platform_err| {
// Convert PlatformError back to StorageError for backward compatibility
match platform_err {
sps2_errors::PlatformError::FilesystemOperationFailed { message, .. } => {
StorageError::AtomicRenameFailed { message }.into()
}
_ => StorageError::AtomicRenameFailed {
message: platform_err.to_string(),
}
.into(),
}
})
}
/// Clone a directory tree using APFS clonefile
///
/// # Errors
///
/// Returns an error if:
/// - Path conversion to C string fails
/// - The APFS clonefile operation fails (permissions, insufficient space, etc.)
/// - The blocking task panics
#[cfg(target_os = "macos")]
pub async fn clone_directory(src: &Path, dst: &Path) -> Result<()> {
apfs::clone_path(src, dst).await
}
/// Clone a directory tree (fallback for non-APFS)
///
/// # Errors
///
/// Returns an error if the recursive copy operation fails
#[cfg(not(target_os = "macos"))]
pub async fn clone_directory(src: &Path, dst: &Path) -> Result<()> {
// Fallback to recursive copy
copy_directory(src, dst).await
}
/// Recursively copy a directory
///
/// # Errors
///
/// Returns an error if:
/// - Creating the destination directory fails
/// - Reading the source directory fails
/// - Copying any file or subdirectory fails
pub async fn copy_directory(src: &Path, dst: &Path) -> Result<()> {
fs::create_dir_all(dst).await?;
let mut entries = fs::read_dir(src).await?;
while let Some(entry) = entries.next_entry().await? {
let src_path = entry.path();
let dst_path = dst.join(entry.file_name());
let metadata = entry.metadata().await?;
if metadata.is_dir() {
Box::pin(copy_directory(&src_path, &dst_path)).await?;
} else {
fs::copy(&src_path, &dst_path).await?;
}
}
Ok(())
}
/// Create a directory with all parent directories
///
/// # Errors
///
/// Returns an error if:
/// - Permission is denied
/// - Any I/O operation fails during directory creation
pub async fn create_dir_all(path: &Path) -> Result<()> {
let platform = PlatformManager::instance().platform();
let context = platform.create_context(None);
platform
.filesystem()
.create_dir_all(&context, path)
.await
.map_err(|platform_err| {
// Convert PlatformError back to StorageError for backward compatibility
match platform_err {
sps2_errors::PlatformError::FilesystemOperationFailed { message, .. } => {
StorageError::IoError { message }.into()
}
_ => StorageError::IoError {
message: platform_err.to_string(),
}
.into(),
}
})
}
/// Remove a directory and all its contents
///
/// # Errors
///
/// Returns an error if:
/// - The directory removal operation fails (permissions, non-empty directory, etc.)
pub async fn remove_dir_all(path: &Path) -> Result<()> {
let platform = PlatformManager::instance().platform();
let context = platform.create_context(None);
platform
.filesystem()
.remove_dir_all(&context, path)
.await
.map_err(|platform_err| {
// Convert PlatformError back to StorageError for backward compatibility
match platform_err {
sps2_errors::PlatformError::FilesystemOperationFailed { message, .. } => {
StorageError::IoError { message }.into()
}
_ => StorageError::IoError {
message: platform_err.to_string(),
}
.into(),
}
})
}
/// Create a hard link with platform optimization
///
/// # Errors
///
/// Returns an error if:
/// - The source file does not exist
/// - The destination already exists
/// - The hard link operation fails (cross-device link, permissions, etc.)
pub async fn hard_link(src: &Path, dst: &Path) -> Result<()> {
let platform = PlatformManager::instance().platform();
let context = platform.create_context(None);
platform
.filesystem()
.hard_link(&context, src, dst)
.await
.map_err(|platform_err| {
// Convert PlatformError back to StorageError for backward compatibility
match platform_err {
sps2_errors::PlatformError::FilesystemOperationFailed { message, .. } => {
StorageError::IoError { message }.into()
}
_ => StorageError::IoError {
message: platform_err.to_string(),
}
.into(),
}
})
}
/// Create staging directory using platform-optimized methods
///
/// This function will clone an existing live directory if it exists,
/// or create a new empty directory for fresh installations.
///
/// # Errors
///
/// Returns an error if:
/// - Directory creation fails
/// - Clone operation fails
/// - Parent directory creation fails
pub async fn create_staging_directory(live_path: &Path, staging_path: &Path) -> Result<()> {
if exists(live_path).await {
// Ensure parent directory exists for staging path
if let Some(parent) = staging_path.parent() {
create_dir_all(parent).await?;
}
// Remove staging directory if it already exists (clonefile requires destination to not exist)
if exists(staging_path).await {
remove_dir_all(staging_path).await?;
}
// Clone the live directory to staging
clone_directory(live_path, staging_path).await?;
} else {
// Create empty staging directory for fresh installation
create_dir_all(staging_path).await?;
}
Ok(())
}
/// Rename a file or directory
///
/// # Errors
///
/// Returns an error if:
/// - The rename operation fails (permissions, cross-device, etc.)
pub async fn rename(src: &Path, dst: &Path) -> Result<()> {
fs::rename(src, dst).await.map_err(|e| {
StorageError::IoError {
message: format!("rename failed: {e}"),
}
.into()
})
}
/// Check if a path exists
pub async fn exists(path: &Path) -> bool {
let platform = PlatformManager::instance().platform();
let context = platform.create_context(None);
platform.filesystem().exists(&context, path).await
}
/// Get the size of a file or directory
///
/// # Errors
///
/// Returns an error if:
/// - Reading file metadata fails
/// - Reading directory contents fails
/// - Any I/O operation fails during recursive directory traversal
pub async fn size(path: &Path) -> Result<u64> {
let platform = PlatformManager::instance().platform();
let context = platform.create_context(None);
platform
.filesystem()
.size(&context, path)
.await
.map_err(|platform_err| {
// Convert PlatformError back to StorageError for backward compatibility
match platform_err {
sps2_errors::PlatformError::FilesystemOperationFailed { message, .. } => {
StorageError::IoError { message }.into()
}
_ => StorageError::IoError {
message: platform_err.to_string(),
}
.into(),
}
})
}
/// Ensure a directory exists and is empty
///
/// # Errors
///
/// Returns an error if:
/// - Directory removal fails
/// - Directory creation fails
pub async fn ensure_empty_dir(path: &Path) -> Result<()> {
if exists(path).await {
remove_dir_all(path).await?;
}
create_dir_all(path).await
}
/// Set APFS compression attribute on a path (macOS only)
#[cfg(target_os = "macos")]
pub fn set_compression(_path: &Path) -> Result<()> {
// Placeholder for compression extended attributes
Ok(())
}
/// Set APFS compression attribute on a path (no-op on non-macOS)
#[cfg(not(target_os = "macos"))]
pub fn set_compression(_path: &Path) -> Result<()> {
Ok(())
}
/// Remove a single file
///
/// # Errors
///
/// Returns an error if:
/// - The file removal operation fails (permissions, file not found, etc.)
pub async fn remove_file(path: &Path) -> Result<()> {
let platform = PlatformManager::instance().platform();
let context = platform.create_context(None);
platform
.filesystem()
.remove_file(&context, path)
.await
.map_err(|platform_err| {
// Convert PlatformError back to StorageError for backward compatibility
match platform_err {
sps2_errors::PlatformError::FilesystemOperationFailed { message, .. } => {
StorageError::IoError { message }.into()
}
_ => StorageError::IoError {
message: platform_err.to_string(),
}
.into(),
}
})
}
#[cfg(unix)]
fn symlink_inner(target: &Path, link: &Path) -> std::io::Result<()> {
use std::os::unix::fs::symlink;
if let Some(parent) = link.parent() {
std::fs::create_dir_all(parent)?;
}
if let Ok(metadata) = std::fs::symlink_metadata(link) {
if metadata.file_type().is_dir() {
std::fs::remove_dir_all(link)?;
} else {
std::fs::remove_file(link)?;
}
}
symlink(target, link)
}
/// Create a symbolic link using platform-safe operations (Unix only).
///
/// # Errors
///
/// Returns an error if:
/// - Creating parent directories fails
/// - Removing a pre-existing path at the link location fails
/// - The symlink creation itself fails
#[cfg(unix)]
pub async fn symlink(target: &Path, link: &Path) -> Result<()> {
let target = target.to_path_buf();
let link = link.to_path_buf();
let result = task::spawn_blocking(move || symlink_inner(&target, &link))
.await
.map_err(|e| {
Error::from(StorageError::IoError {
message: format!("symlink task failed: {e}"),
})
})?;
result.map_err(|e| {
Error::from(StorageError::IoError {
message: format!("symlink operation failed: {e}"),
})
})?;
Ok(())
}
/// Create a symbolic link (not supported on non-Unix platforms).
#[cfg(not(unix))]
pub async fn symlink(_target: &Path, _link: &Path) -> Result<()> {
Err(StorageError::IoError {
message: "symlink is not supported on this platform".to_string(),
}
.into())
}
#[cfg(unix)]
async fn remove_tmp_link(path: &Path) -> Result<()> {
if !exists(path).await {
return Ok(());
}
match tokio::fs::symlink_metadata(path).await {
Ok(metadata) if metadata.file_type().is_dir() => remove_dir_all(path).await?,
Ok(_) => remove_file(path).await?,
Err(e) if e.kind() == std::io::ErrorKind::NotFound => return Ok(()),
Err(e) => {
return Err(StorageError::IoError {
message: format!("failed to inspect temporary link: {e}"),
}
.into())
}
}
Ok(())
}
/// Atomically replace a symbolic link with a new target (Unix only).
///
/// This helper creates a temporary symlink pointing at `target` and swaps it into
/// place using the platform's atomic rename primitive.
#[cfg(unix)]
pub async fn replace_symlink(target: &Path, link: &Path) -> Result<()> {
if let Some(parent) = link.parent() {
create_dir_all(parent).await?;
}
let temp_link = link.with_extension("tmp-link");
remove_tmp_link(&temp_link).await?;
symlink(target, &temp_link).await?;
atomic_rename(&temp_link, link).await
}
/// Atomically replace a symbolic link with a new target (not supported on non-Unix platforms).
#[cfg(not(unix))]
pub async fn replace_symlink(_target: &Path, _link: &Path) -> Result<()> {
Err(StorageError::IoError {
message: "replace_symlink is not supported on this platform".to_string(),
}
.into())
}
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | false |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/crates/platform/src/process/mod.rs | crates/platform/src/process/mod.rs | //! Process execution operations for macOS platform
use async_trait::async_trait;
use sps2_errors::Error;
use std::collections::HashMap;
use std::path::PathBuf;
use std::process::ExitStatus;
use crate::core::PlatformContext;
/// Platform-specific command builder and execution
pub struct PlatformCommand {
program: String,
args: Vec<String>,
current_dir: Option<PathBuf>,
env_vars: HashMap<String, String>,
}
impl PlatformCommand {
/// Create a new platform command
pub fn new(program: &str) -> Self {
Self {
program: program.to_string(),
args: Vec::new(),
current_dir: None,
env_vars: HashMap::new(),
}
}
/// Add an argument to the command
pub fn arg<S: AsRef<str>>(&mut self, arg: S) -> &mut Self {
self.args.push(arg.as_ref().to_string());
self
}
/// Add multiple arguments to the command
pub fn args<I, S>(&mut self, args: I) -> &mut Self
where
I: IntoIterator<Item = S>,
S: AsRef<str>,
{
for arg in args {
self.args.push(arg.as_ref().to_string());
}
self
}
/// Set the working directory for the command
pub fn current_dir<P: Into<PathBuf>>(&mut self, dir: P) -> &mut Self {
self.current_dir = Some(dir.into());
self
}
/// Set an environment variable for the command
pub fn env<K, V>(&mut self, key: K, value: V) -> &mut Self
where
K: AsRef<str>,
V: AsRef<str>,
{
self.env_vars
.insert(key.as_ref().to_string(), value.as_ref().to_string());
self
}
/// Set multiple environment variables for the command
pub fn envs<I, K, V>(&mut self, envs: I) -> &mut Self
where
I: IntoIterator<Item = (K, V)>,
K: AsRef<str>,
V: AsRef<str>,
{
for (key, value) in envs {
self.env_vars
.insert(key.as_ref().to_string(), value.as_ref().to_string());
}
self
}
/// Get the program name
pub fn program(&self) -> &str {
&self.program
}
/// Get the arguments
pub fn get_args(&self) -> &[String] {
&self.args
}
/// Get the current directory
pub fn get_current_dir(&self) -> Option<&PathBuf> {
self.current_dir.as_ref()
}
/// Get the environment variables
pub fn get_env_vars(&self) -> &HashMap<String, String> {
&self.env_vars
}
}
/// Output from command execution
pub struct CommandOutput {
pub status: ExitStatus,
pub stdout: Vec<u8>,
pub stderr: Vec<u8>,
}
/// Trait for process execution operations
#[async_trait]
pub trait ProcessOperations: Send + Sync {
/// Execute a command and return the output
async fn execute_command(
&self,
ctx: &PlatformContext,
cmd: PlatformCommand,
) -> Result<CommandOutput, Error>;
/// Create a new command builder
fn create_command(&self, program: &str) -> PlatformCommand;
/// Find the path to an executable
async fn which(&self, program: &str) -> Result<PathBuf, Error>;
}
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | false |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/crates/platform/src/filesystem/mod.rs | crates/platform/src/filesystem/mod.rs | //! Filesystem operations for macOS platform (APFS clonefile, atomic operations)
use async_trait::async_trait;
use sps2_errors::PlatformError;
use std::path::Path;
use crate::core::PlatformContext;
/// Trait for filesystem operations specific to macOS
#[async_trait]
pub trait FilesystemOperations: Send + Sync {
/// Clone a file using APFS clonefile for efficient copy-on-write
async fn clone_file(
&self,
ctx: &PlatformContext,
src: &Path,
dst: &Path,
) -> Result<(), PlatformError>;
/// Clone a directory using APFS clonefile for efficient copy-on-write
async fn clone_directory(
&self,
ctx: &PlatformContext,
src: &Path,
dst: &Path,
) -> Result<(), PlatformError>;
/// Atomically rename a file
async fn atomic_rename(
&self,
ctx: &PlatformContext,
src: &Path,
dst: &Path,
) -> Result<(), PlatformError>;
/// Atomically swap two files
async fn atomic_swap(
&self,
ctx: &PlatformContext,
path_a: &Path,
path_b: &Path,
) -> Result<(), PlatformError>;
/// Create a hard link between files
async fn hard_link(
&self,
ctx: &PlatformContext,
src: &Path,
dst: &Path,
) -> Result<(), PlatformError>;
/// Create directory and all parent directories
async fn create_dir_all(&self, ctx: &PlatformContext, path: &Path)
-> Result<(), PlatformError>;
/// Remove directory and all contents
async fn remove_dir_all(&self, ctx: &PlatformContext, path: &Path)
-> Result<(), PlatformError>;
/// Check if a path exists
async fn exists(&self, ctx: &PlatformContext, path: &Path) -> bool;
/// Remove a single file
async fn remove_file(&self, ctx: &PlatformContext, path: &Path) -> Result<(), PlatformError>;
/// Get the size of a file or directory
async fn size(&self, ctx: &PlatformContext, path: &Path) -> Result<u64, PlatformError>;
/// Check if a path points to a directory.
async fn is_dir(&self, ctx: &PlatformContext, path: &Path) -> bool;
}
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | false |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/crates/platform/src/binary/mod.rs | crates/platform/src/binary/mod.rs | //! Binary operations for macOS platform (install_name_tool, otool, codesign)
use async_trait::async_trait;
use sps2_errors::PlatformError;
use std::path::Path;
use crate::core::PlatformContext;
/// Trait for binary manipulation operations specific to macOS
#[async_trait]
pub trait BinaryOperations: Send + Sync {
/// Get the install name of a binary using otool -D
async fn get_install_name(
&self,
ctx: &PlatformContext,
binary: &Path,
) -> Result<Option<String>, PlatformError>;
/// Set the install name of a binary using install_name_tool -id
async fn set_install_name(
&self,
ctx: &PlatformContext,
binary: &Path,
name: &str,
) -> Result<(), PlatformError>;
/// Get dependencies of a binary using otool -L
async fn get_dependencies(
&self,
ctx: &PlatformContext,
binary: &Path,
) -> Result<Vec<String>, PlatformError>;
/// Change a dependency reference using install_name_tool -change
async fn change_dependency(
&self,
ctx: &PlatformContext,
binary: &Path,
old: &str,
new: &str,
) -> Result<(), PlatformError>;
/// Add an rpath entry using install_name_tool -add_rpath
async fn add_rpath(
&self,
ctx: &PlatformContext,
binary: &Path,
rpath: &str,
) -> Result<(), PlatformError>;
/// Delete an rpath entry using install_name_tool -delete_rpath
async fn delete_rpath(
&self,
ctx: &PlatformContext,
binary: &Path,
rpath: &str,
) -> Result<(), PlatformError>;
/// Get rpath entries using otool -l
async fn get_rpath_entries(
&self,
ctx: &PlatformContext,
binary: &Path,
) -> Result<Vec<String>, PlatformError>;
/// Verify binary signature using codesign -vvv
async fn verify_signature(
&self,
ctx: &PlatformContext,
binary: &Path,
) -> Result<bool, PlatformError>;
/// Sign binary using codesign
async fn sign_binary(
&self,
ctx: &PlatformContext,
binary: &Path,
identity: Option<&str>,
) -> Result<(), PlatformError>;
}
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | false |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/crates/platform/src/core/mod.rs | crates/platform/src/core/mod.rs | //! Core platform abstractions and context management
use crate::binary::BinaryOperations;
use crate::filesystem::FilesystemOperations;
use crate::process::ProcessOperations;
use serde::{Deserialize, Serialize};
use sps2_errors::PlatformError;
use sps2_events::{
events::{
FailureContext, PlatformEvent, PlatformOperationContext, PlatformOperationKind,
PlatformOperationMetrics,
},
AppEvent, EventEmitter, EventSender,
};
use std::collections::HashMap;
use std::convert::TryFrom;
use std::future::Future;
use std::path::{Path, PathBuf};
use std::sync::{Arc, OnceLock, RwLock};
use std::time::{Duration, Instant, SystemTime};
/// Context for platform operations, providing event emission and metadata tracking
pub struct PlatformContext {
event_sender: Option<EventSender>,
operation_metadata: HashMap<String, String>,
}
impl PlatformContext {
/// Create a new platform context with event emission capabilities
pub fn new(event_sender: Option<EventSender>) -> Self {
Self {
event_sender,
operation_metadata: HashMap::new(),
}
}
/// Emit a platform event if event sender is available
pub async fn emit_event(&self, event: AppEvent) {
if let Some(sender) = &self.event_sender {
sender.emit(event);
}
}
/// Execute an operation with automatic event emission
pub async fn execute_with_events<T, F>(
&self,
_operation: &str,
f: F,
) -> Result<T, PlatformError>
where
F: Future<Output = Result<T, PlatformError>>,
{
let start = Instant::now();
// TODO: Emit operation started event
let result = f.await;
let _duration = start.elapsed();
// TODO: Emit operation completed/failed event based on result
result
}
/// Get access to the platform manager for tool registry and other services
pub fn platform_manager(&self) -> &'static PlatformManager {
PlatformManager::instance()
}
}
/// Platform capabilities detected at runtime
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct PlatformCapabilities {
pub install_name_tool_version: Option<String>,
pub otool_available: bool,
pub codesign_available: bool,
pub apfs_clonefile_supported: bool,
pub atomic_operations_supported: bool,
}
impl Default for PlatformCapabilities {
fn default() -> Self {
Self {
install_name_tool_version: None,
otool_available: true, // Assume available on macOS
codesign_available: true, // Assume available on macOS
apfs_clonefile_supported: true, // Assume APFS on macOS
atomic_operations_supported: true, // Assume supported on macOS
}
}
}
/// Tool information with caching
#[derive(Debug, Clone)]
pub struct ToolInfo {
pub path: std::path::PathBuf,
pub version: String,
pub last_verified: Instant,
}
/// Registry for caching tool information with comprehensive platform tool support
#[derive(Debug)]
pub struct ToolRegistry {
/// Cached tool information with thread-safe access
tools: Arc<RwLock<HashMap<String, CachedTool>>>,
/// Search paths for tool discovery (in priority order)
search_paths: Vec<PathBuf>,
/// Tool-specific fallback paths for common tools
fallback_paths: HashMap<String, Vec<PathBuf>>,
/// Event sender for tool discovery notifications (with interior mutability)
event_tx: Arc<RwLock<Option<EventSender>>>,
}
fn duration_to_millis(duration: Duration) -> u64 {
u64::try_from(duration.as_millis()).unwrap_or(u64::MAX)
}
/// Persistent platform cache for storing discovered tools across process restarts
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct PlatformCache {
/// Platform type (e.g., "macos")
pub platform_type: String,
/// When the cache was last updated
pub discovered_at: String,
/// Discovered tool paths (user-configurable)
pub tools: HashMap<String, String>,
/// Platform capabilities detected
pub capabilities: PlatformCapabilities,
}
impl PlatformCache {
/// Get the default cache file path: ~/.config/sps2/.platform.json
pub fn default_path() -> Result<PathBuf, PlatformError> {
let home_dir = dirs::home_dir().ok_or_else(|| PlatformError::ConfigError {
message: "home directory not found".to_string(),
})?;
Ok(home_dir.join(".config").join("sps2").join(".platform.json"))
}
/// Load platform cache from file
pub async fn load() -> Result<Option<Self>, PlatformError> {
let cache_path = Self::default_path()?;
if !cache_path.exists() {
return Ok(None);
}
let contents = tokio::fs::read_to_string(&cache_path).await.map_err(|e| {
PlatformError::ConfigError {
message: format!("failed to read platform cache: {e}"),
}
})?;
let cache: Self =
serde_json::from_str(&contents).map_err(|e| PlatformError::ConfigError {
message: format!("failed to parse platform cache: {e}"),
})?;
Ok(Some(cache))
}
/// Save platform cache to file
pub async fn save(&self) -> Result<(), PlatformError> {
let cache_path = Self::default_path()?;
// Ensure parent directory exists
if let Some(parent) = cache_path.parent() {
tokio::fs::create_dir_all(parent)
.await
.map_err(|e| PlatformError::ConfigError {
message: format!("failed to create config directory: {e}"),
})?;
}
let contents =
serde_json::to_string_pretty(self).map_err(|e| PlatformError::ConfigError {
message: format!("failed to serialize platform cache: {e}"),
})?;
tokio::fs::write(&cache_path, contents)
.await
.map_err(|e| PlatformError::ConfigError {
message: format!("failed to write platform cache: {e}"),
})?;
Ok(())
}
/// Create new cache from current platform state
pub fn new(platform_type: String, capabilities: PlatformCapabilities) -> Self {
Self {
platform_type,
discovered_at: chrono::Utc::now().to_rfc3339(),
tools: HashMap::new(),
capabilities,
}
}
}
/// Cached information about a discovered tool
#[derive(Debug, Clone)]
pub struct CachedTool {
/// Full path to the tool executable
pub path: PathBuf,
/// When this tool was last verified to exist
pub verified_at: SystemTime,
/// Tool version if detectable
pub version: Option<String>,
/// Tool capabilities and metadata
pub metadata: ToolMetadata,
}
/// Metadata about tool capabilities and requirements
#[derive(Debug, Clone)]
pub struct ToolMetadata {
/// Tool category for organization
pub category: ToolCategory,
/// Whether this tool is critical for platform operations
pub is_critical: bool,
/// Installation suggestion for this tool
pub install_suggestion: String,
}
/// Categories of tools for organization and priority
#[derive(Debug, Clone, PartialEq)]
pub enum ToolCategory {
/// Platform-critical tools (otool, install_name_tool, codesign)
PlatformCritical,
/// Build system tools (make, cmake, gcc, clang)
BuildSystem,
/// Development utilities (pkg-config, autotools)
Development,
/// System utilities (which)
System,
}
impl Default for ToolRegistry {
fn default() -> Self {
Self::new()
}
}
impl ToolRegistry {
/// Create a new tool registry with comprehensive tool support and persistent caching
pub fn new() -> Self {
let mut fallback_paths = HashMap::new();
// Platform-critical tools (Xcode Command Line Tools)
let xcode_paths = vec![
PathBuf::from("/usr/bin"),
PathBuf::from("/Applications/Xcode.app/Contents/Developer/usr/bin"),
PathBuf::from("/Library/Developer/CommandLineTools/usr/bin"),
];
fallback_paths.insert("otool".to_string(), xcode_paths.clone());
fallback_paths.insert("install_name_tool".to_string(), xcode_paths.clone());
fallback_paths.insert("codesign".to_string(), xcode_paths.clone());
// Build system tools (common locations)
let build_paths = vec![
PathBuf::from("/usr/bin"),
PathBuf::from("/usr/local/bin"),
PathBuf::from("/opt/homebrew/bin"),
PathBuf::from("/opt/local/bin"), // MacPorts
];
for tool in &[
"make",
"cmake",
"gcc",
"clang",
"configure",
"autoconf",
"automake",
"libtool",
"pkg-config",
"ninja",
"meson",
] {
fallback_paths.insert(tool.to_string(), build_paths.clone());
}
// System tools
fallback_paths.insert("which".to_string(), vec![PathBuf::from("/usr/bin")]);
Self {
tools: Arc::new(RwLock::new(HashMap::new())),
search_paths: vec![
PathBuf::from("/usr/bin"),
PathBuf::from("/usr/local/bin"),
PathBuf::from("/opt/homebrew/bin"),
PathBuf::from("/Applications/Xcode.app/Contents/Developer/usr/bin"),
PathBuf::from("/Library/Developer/CommandLineTools/usr/bin"),
],
fallback_paths,
event_tx: Arc::new(RwLock::new(None)),
}
}
fn tool_operation_context(
operation: &str,
tool: &str,
path: Option<&Path>,
) -> PlatformOperationContext {
PlatformOperationContext {
kind: PlatformOperationKind::ToolDiscovery,
operation: format!("{operation}:{tool}"),
target: path.map(Path::to_path_buf),
source: None,
command: None,
}
}
fn tool_operation_metrics(
duration: Option<Duration>,
search_paths: &[PathBuf],
notes: Vec<String>,
) -> Option<PlatformOperationMetrics> {
let duration_ms = duration.map(duration_to_millis);
let mut changes = notes;
changes.extend(
search_paths
.iter()
.map(|path| format!("search_path={}", path.display())),
);
if duration_ms.is_none() && changes.is_empty() {
return None;
}
Some(PlatformOperationMetrics {
duration_ms,
exit_code: None,
stdout_bytes: None,
stderr_bytes: None,
changes: if changes.is_empty() {
None
} else {
Some(changes)
},
})
}
fn emit_tool_operation_started(&self, operation: &str, tool: &str, path: Option<&Path>) {
let context = Self::tool_operation_context(operation, tool, path);
self.emit_event(AppEvent::Platform(PlatformEvent::OperationStarted {
context,
}));
}
fn emit_tool_operation_completed(
&self,
operation: &str,
tool: &str,
path: Option<&Path>,
duration: Option<Duration>,
search_paths: &[PathBuf],
notes: Vec<String>,
) {
let context = Self::tool_operation_context(operation, tool, path);
let metrics = Self::tool_operation_metrics(duration, search_paths, notes);
self.emit_event(AppEvent::Platform(PlatformEvent::OperationCompleted {
context,
metrics,
}));
}
fn emit_tool_operation_failed(
&self,
operation: &str,
tool: &str,
path: Option<&Path>,
failure: &PlatformError,
metrics: Option<PlatformOperationMetrics>,
) {
let context = Self::tool_operation_context(operation, tool, path);
self.emit_event(AppEvent::Platform(PlatformEvent::OperationFailed {
context,
failure: FailureContext::from_error(failure),
metrics,
}));
}
/// Set event sender for tool discovery notifications
pub fn set_event_sender(&self, tx: EventSender) {
let mut event_tx = self.event_tx.write().unwrap();
*event_tx = Some(tx);
}
/// Load tools from persistent cache
pub async fn load_from_cache(&self) -> Result<(), PlatformError> {
if let Some(cache) = PlatformCache::load().await? {
let cache_path = PlatformCache::default_path()?;
let search_paths = vec![cache_path.clone()];
self.emit_tool_operation_started("cache_load", "cache", Some(&cache_path));
// Process cached tools and collect valid ones
let mut valid_tools = Vec::new();
for (name, path_str) in cache.tools {
let path = PathBuf::from(path_str);
// Verify tool still exists at cached location
if path.exists() && self.is_executable(&path) {
let version = self.get_tool_version(&path).await.ok();
let metadata = self.get_tool_metadata(&name);
let cached_tool = CachedTool {
path,
verified_at: SystemTime::now(),
version,
metadata,
};
valid_tools.push((name, cached_tool));
}
}
// Now update the cache with all valid tools at once
{
let mut tools = self.tools.write().unwrap();
for (name, cached_tool) in valid_tools {
tools.insert(name, cached_tool);
}
}
let tools_count = {
let tools = self.tools.read().unwrap();
tools.len()
};
let notes = vec![format!("loaded_tools={tools_count}")];
self.emit_tool_operation_completed(
"cache_load",
"cache",
Some(&cache_path),
None,
&search_paths,
notes,
);
}
Ok(())
}
/// Save current tools to persistent cache
pub async fn save_to_cache(&self) -> Result<(), PlatformError> {
// Clone the tools data to avoid holding the lock across await points
let cache_tools = {
let tools = self.tools.read().unwrap();
tools
.iter()
.map(|(name, cached_tool)| (name.clone(), cached_tool.path.display().to_string()))
.collect::<HashMap<String, String>>()
};
// Create or load existing cache to preserve user overrides
let mut cache = PlatformCache::load().await?.unwrap_or_else(|| {
PlatformCache::new("macos".to_string(), PlatformCapabilities::default())
});
// Update with discovered tools (preserving any user overrides)
for (name, path) in cache_tools {
cache.tools.insert(name, path);
}
cache.discovered_at = chrono::Utc::now().to_rfc3339();
cache.save().await?;
Ok(())
}
/// Get a tool path, using persistent cache or discovering if necessary
pub async fn get_tool(&self, name: &str) -> Result<PathBuf, PlatformError> {
// Check in-memory cache first
if let Some(cached) = self.get_cached_tool(name) {
// No TTL check - persistent cache is valid until tool moves
if cached.path.exists() && self.is_executable(&cached.path) {
return Ok(cached.path);
} else {
// Tool moved or deleted - remove from cache
self.remove_cached_tool(name);
}
}
// Tool not in memory cache or invalid - discover and cache
let path = self.discover_tool(name).await?;
self.cache_tool(name, path.clone()).await;
// Save to persistent cache for future process starts
if let Err(e) = self.save_to_cache().await {
// Don't fail the operation if cache save fails
let metrics = Self::tool_operation_metrics(None, &[], vec![format!("error={e}")]);
self.emit_tool_operation_failed("cache_save", "cache", None, &e, metrics);
}
Ok(path)
}
/// Verify that a set of tools are available
pub async fn verify_tools(&self, tools: &[&str]) -> Result<(), PlatformError> {
let mut missing_tools = Vec::new();
for &tool in tools {
if self.get_tool(tool).await.is_err() {
missing_tools.push(tool);
}
}
if !missing_tools.is_empty() {
let suggestions = missing_tools
.iter()
.map(|&tool| self.get_tool_metadata(tool).install_suggestion)
.collect();
return Err(PlatformError::MultipleToolsNotFound {
tools: missing_tools.into_iter().map(String::from).collect(),
suggestions,
});
}
Ok(())
}
/// Get cached tool if available and not expired
fn get_cached_tool(&self, name: &str) -> Option<CachedTool> {
let tools = self.tools.read().unwrap();
tools.get(name).cloned()
}
/// Remove a tool from the cache
fn remove_cached_tool(&self, name: &str) {
let mut tools = self.tools.write().unwrap();
tools.remove(name);
}
/// Discover a tool by searching paths
async fn discover_tool(&self, name: &str) -> Result<PathBuf, PlatformError> {
let search_paths = self.get_search_paths_for_tool(name);
let start = Instant::now();
self.emit_tool_operation_started("discover", name, None);
// Try PATH first (fastest)
if let Ok(path) = self.find_in_path(name).await {
let duration = Some(start.elapsed());
let notes = vec!["source=path".to_string()];
self.emit_tool_operation_completed(
"discover",
name,
Some(&path),
duration,
&search_paths,
notes,
);
return Ok(path);
}
// Try fallback paths
for search_path in search_paths.iter() {
let candidate = search_path.join(name);
if candidate.exists() && self.is_executable(&candidate) {
let duration = Some(start.elapsed());
let notes = vec![format!("source=fallback:{}", search_path.display())];
self.emit_tool_operation_completed(
"discover",
name,
Some(&candidate),
duration,
&search_paths,
notes,
);
return Ok(candidate);
}
}
// Tool not found
let metadata = self.get_tool_metadata(name);
let suggestion = metadata.install_suggestion;
let error = PlatformError::ToolNotFound {
tool: name.to_string(),
suggestion: suggestion.clone(),
searched_paths: search_paths.clone(),
};
let duration = Some(start.elapsed());
let notes = vec![format!("suggestion={suggestion}")];
let metrics = Self::tool_operation_metrics(duration, &search_paths, notes);
self.emit_tool_operation_failed("discover", name, None, &error, metrics);
Err(error)
}
/// Find tool using the system PATH
async fn find_in_path(&self, name: &str) -> Result<PathBuf, PlatformError> {
let output = tokio::process::Command::new("which")
.arg(name)
.output()
.await
.map_err(|e| PlatformError::CommandFailed {
command: "which".to_string(),
error: e.to_string(),
})?;
if output.status.success() {
let path_str = String::from_utf8_lossy(&output.stdout);
let path_str = path_str.trim();
Ok(PathBuf::from(path_str))
} else {
Err(PlatformError::ToolNotFound {
tool: name.to_string(),
suggestion: "Tool not found in PATH".to_string(),
searched_paths: vec![],
})
}
}
/// Get search paths for a specific tool (includes fallbacks)
fn get_search_paths_for_tool(&self, name: &str) -> Vec<PathBuf> {
let mut paths = self.search_paths.clone();
if let Some(fallback_paths) = self.fallback_paths.get(name) {
for path in fallback_paths {
if !paths.contains(path) {
paths.push(path.clone());
}
}
}
paths
}
/// Check if a file is executable
fn is_executable(&self, path: &Path) -> bool {
use std::os::unix::fs::PermissionsExt;
if let Ok(metadata) = std::fs::metadata(path) {
let permissions = metadata.permissions();
permissions.mode() & 0o111 != 0 // Check execute bits
} else {
false
}
}
/// Cache tool information
async fn cache_tool(&self, name: &str, path: PathBuf) {
let version = self.get_tool_version(&path).await.ok();
let metadata = self.get_tool_metadata(name);
let cached_tool = CachedTool {
path,
verified_at: SystemTime::now(),
version,
metadata,
};
let mut tools = self.tools.write().unwrap();
tools.insert(name.to_string(), cached_tool);
}
/// Get tool version if possible
async fn get_tool_version(&self, path: &Path) -> Result<String, PlatformError> {
// Try common version flags
for flag in &["--version", "-V", "-version"] {
if let Ok(output) = tokio::process::Command::new(path).arg(flag).output().await {
if output.status.success() {
let version_output = String::from_utf8_lossy(&output.stdout);
if let Some(first_line) = version_output.lines().next() {
return Ok(first_line.to_string());
}
}
}
}
Err(PlatformError::CommandFailed {
command: path.display().to_string(),
error: "Could not determine version".to_string(),
})
}
/// Get metadata for a tool
fn get_tool_metadata(&self, name: &str) -> ToolMetadata {
match name {
// Platform-critical tools
"otool" | "install_name_tool" | "codesign" => ToolMetadata {
category: ToolCategory::PlatformCritical,
is_critical: true,
install_suggestion: "Install Xcode Command Line Tools: xcode-select --install"
.to_string(),
},
// Build system tools
"make" => ToolMetadata {
category: ToolCategory::BuildSystem,
is_critical: false,
install_suggestion: "Install via sps2: sps2 install make".to_string(),
},
"cmake" => ToolMetadata {
category: ToolCategory::BuildSystem,
is_critical: false,
install_suggestion: "Install via sps2: sps2 install cmake".to_string(),
},
"gcc" => ToolMetadata {
category: ToolCategory::BuildSystem,
is_critical: false,
install_suggestion: "Install via sps2: sps2 install gcc".to_string(),
},
"clang" => ToolMetadata {
category: ToolCategory::BuildSystem,
is_critical: false,
install_suggestion: "Install via sps2: sps2 install llvm".to_string(),
},
// Development tools
"configure" | "autoconf" | "automake" => ToolMetadata {
category: ToolCategory::Development,
is_critical: false,
install_suggestion: format!("Install via sps2: sps2 install {name}"),
},
"libtool" => ToolMetadata {
category: ToolCategory::Development,
is_critical: false,
install_suggestion: "Install via sps2: sps2 install libtool".to_string(),
},
"pkg-config" => ToolMetadata {
category: ToolCategory::Development,
is_critical: false,
install_suggestion: "Install via sps2: sps2 install pkgconf".to_string(),
},
"ninja" => ToolMetadata {
category: ToolCategory::BuildSystem,
is_critical: false,
install_suggestion: "Install via sps2: sps2 install ninja".to_string(),
},
"meson" => ToolMetadata {
category: ToolCategory::BuildSystem,
is_critical: false,
install_suggestion: "Install via sps2: sps2 install meson".to_string(),
},
// System tools
"which" => ToolMetadata {
category: ToolCategory::System,
is_critical: true,
install_suggestion: "System tool 'which' should be available by default"
.to_string(),
},
// Unknown tools
_ => ToolMetadata {
category: ToolCategory::Development,
is_critical: false,
install_suggestion: format!("Tool '{name}' not found. Try: sps2 search {name}"),
},
}
}
/// Emit an event if sender is available
fn emit_event(&self, event: AppEvent) {
let event_tx = self.event_tx.read().unwrap();
if let Some(tx) = event_tx.as_ref() {
tx.emit(event);
}
}
/// Get all cached tools for debugging
pub fn get_cached_tools(&self) -> HashMap<String, CachedTool> {
self.tools.read().unwrap().clone()
}
/// Clear the tool cache (useful for testing or forced refresh)
pub fn clear_cache(&self) {
self.tools.write().unwrap().clear();
}
/// Get tools by category
pub async fn get_tools_by_category(&self, category: ToolCategory) -> Vec<String> {
let all_tools = [
"otool",
"install_name_tool",
"codesign",
"which",
"make",
"cmake",
"gcc",
"clang",
"configure",
"autoconf",
"automake",
"libtool",
"pkg-config",
"ninja",
"meson",
];
all_tools
.into_iter()
.filter(|&tool| self.get_tool_metadata(tool).category == category)
.map(String::from)
.collect()
}
}
/// Singleton platform manager for optimized platform operations
pub struct PlatformManager {
platform: Arc<Platform>,
capabilities: Arc<PlatformCapabilities>,
tool_registry: Arc<ToolRegistry>,
}
impl PlatformManager {
/// Get the singleton instance of the platform manager
pub fn instance() -> &'static PlatformManager {
static INSTANCE: OnceLock<PlatformManager> = OnceLock::new();
INSTANCE.get_or_init(|| {
let platform = Arc::new(Platform::new_internal());
let capabilities = Arc::new(PlatformCapabilities::default());
let tool_registry = Arc::new(ToolRegistry::new());
PlatformManager {
platform,
capabilities,
tool_registry,
}
})
}
/// Get the shared platform instance
pub fn platform(&self) -> &Arc<Platform> {
&self.platform
}
/// Get platform capabilities
pub fn capabilities(&self) -> &Arc<PlatformCapabilities> {
&self.capabilities
}
/// Get the tool registry
pub fn tool_registry(&self) -> &Arc<ToolRegistry> {
&self.tool_registry
}
/// Get a tool path, discovering and caching if necessary
pub async fn get_tool(&self, name: &str) -> Result<PathBuf, PlatformError> {
self.tool_registry.get_tool(name).await
}
/// Verify that a set of tools are available
pub async fn verify_tools(&self, tools: &[&str]) -> Result<(), PlatformError> {
self.tool_registry.verify_tools(tools).await
}
/// Set event sender for tool discovery notifications
pub fn set_tool_event_sender(&self, tx: EventSender) {
self.tool_registry.set_event_sender(tx);
}
/// Initialize the tool cache from persistent storage
/// This should be called once during application startup
pub async fn initialize_cache(&self) -> Result<(), PlatformError> {
self.tool_registry.load_from_cache().await
}
/// Save current tool cache to persistent storage
pub async fn save_cache(&self) -> Result<(), PlatformError> {
self.tool_registry.save_to_cache().await
}
}
/// Main platform abstraction providing access to all platform operations
pub struct Platform {
binary_ops: Box<dyn BinaryOperations>,
filesystem_ops: Box<dyn FilesystemOperations>,
process_ops: Box<dyn ProcessOperations>,
}
impl Platform {
/// Create a new platform instance with the specified implementations
pub fn new(
binary_ops: Box<dyn BinaryOperations>,
filesystem_ops: Box<dyn FilesystemOperations>,
process_ops: Box<dyn ProcessOperations>,
) -> Self {
Self {
binary_ops,
filesystem_ops,
process_ops,
}
}
/// Internal method to create platform instance for singleton
fn new_internal() -> Self {
use crate::implementations::macos::{
binary::MacOSBinaryOperations, filesystem::MacOSFilesystemOperations,
process::MacOSProcessOperations,
};
Self::new(
Box::new(MacOSBinaryOperations::new()),
Box::new(MacOSFilesystemOperations::new()),
Box::new(MacOSProcessOperations::new()),
)
}
// REMOVED: Use PlatformManager::instance().platform() instead
/// Access binary operations
pub fn binary(&self) -> &dyn BinaryOperations {
&*self.binary_ops
}
/// Access filesystem operations
pub fn filesystem(&self) -> &dyn FilesystemOperations {
&*self.filesystem_ops
}
/// Access process operations
pub fn process(&self) -> &dyn ProcessOperations {
&*self.process_ops
}
/// Create a platform context with event emission
pub fn create_context(&self, event_sender: Option<EventSender>) -> PlatformContext {
PlatformContext::new(event_sender)
}
/// Convenience method: Clone a file using APFS clonefile
pub async fn clone_file(
&self,
ctx: &PlatformContext,
src: &std::path::Path,
dst: &std::path::Path,
) -> Result<(), sps2_errors::PlatformError> {
self.filesystem().clone_file(ctx, src, dst).await
}
/// Convenience method: Get binary dependencies
pub async fn get_dependencies(
&self,
ctx: &PlatformContext,
binary: &std::path::Path,
) -> Result<Vec<String>, sps2_errors::PlatformError> {
self.binary().get_dependencies(ctx, binary).await
}
/// Convenience method: Execute a command and get output
pub async fn execute_command(
&self,
ctx: &PlatformContext,
cmd: crate::process::PlatformCommand,
) -> Result<crate::process::CommandOutput, sps2_errors::Error> {
self.process().execute_command(ctx, cmd).await
}
/// Convenience method: Create a new command builder
pub fn command(&self, program: &str) -> crate::process::PlatformCommand {
self.process().create_command(program)
}
/// Convenience method: Sign a binary
pub async fn sign_binary(
&self,
ctx: &PlatformContext,
binary: &std::path::Path,
identity: Option<&str>,
) -> Result<(), sps2_errors::PlatformError> {
self.binary().sign_binary(ctx, binary, identity).await
}
/// Convenience method: Atomically rename a file
pub async fn atomic_rename(
&self,
ctx: &PlatformContext,
src: &std::path::Path,
dst: &std::path::Path,
) -> Result<(), sps2_errors::PlatformError> {
self.filesystem().atomic_rename(ctx, src, dst).await
}
}
/// Integration helpers for converting from other context types
impl PlatformContext {
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | true |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/crates/platform/src/implementations/mod.rs | crates/platform/src/implementations/mod.rs | //! Platform-specific implementations
pub mod macos;
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | false |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/crates/platform/src/implementations/macos/process.rs | crates/platform/src/implementations/macos/process.rs | //! macOS process operations implementation
//!
//! This module wraps the existing proven process execution patterns from the codebase
//! with the platform abstraction layer, adding event emission and proper error handling.
use async_trait::async_trait;
use sps2_errors::{Error, PlatformError};
use sps2_events::{
events::{
FailureContext, PlatformEvent, PlatformOperationContext, PlatformOperationKind,
PlatformOperationMetrics, ProcessCommandDescriptor,
},
AppEvent,
};
use std::convert::TryFrom;
use std::path::PathBuf;
use std::time::{Duration, Instant};
use tokio::process::Command;
use crate::core::PlatformContext;
use crate::process::{CommandOutput, PlatformCommand, ProcessOperations};
/// macOS implementation of process operations
pub struct MacOSProcessOperations;
impl MacOSProcessOperations {
pub fn new() -> Self {
Self
}
}
impl Default for MacOSProcessOperations {
fn default() -> Self {
Self::new()
}
}
fn process_context(descriptor: ProcessCommandDescriptor) -> PlatformOperationContext {
PlatformOperationContext {
kind: PlatformOperationKind::Process,
operation: "execute_command".to_string(),
target: None,
source: None,
command: Some(descriptor),
}
}
fn process_metrics(duration: Duration, output: Option<&CommandOutput>) -> PlatformOperationMetrics {
PlatformOperationMetrics {
duration_ms: Some(duration_to_millis(duration)),
exit_code: output.and_then(|o| o.status.code()),
stdout_bytes: output.and_then(|o| u64::try_from(o.stdout.len()).ok()),
stderr_bytes: output.and_then(|o| u64::try_from(o.stderr.len()).ok()),
changes: None,
}
}
fn duration_to_millis(duration: Duration) -> u64 {
u64::try_from(duration.as_millis()).unwrap_or(u64::MAX)
}
async fn emit_process_started(ctx: &PlatformContext, descriptor: &ProcessCommandDescriptor) {
ctx.emit_event(AppEvent::Platform(PlatformEvent::OperationStarted {
context: process_context(descriptor.clone()),
}))
.await;
}
async fn emit_process_completed(
ctx: &PlatformContext,
descriptor: &ProcessCommandDescriptor,
output: &CommandOutput,
duration: Duration,
) {
ctx.emit_event(AppEvent::Platform(PlatformEvent::OperationCompleted {
context: process_context(descriptor.clone()),
metrics: Some(process_metrics(duration, Some(output))),
}))
.await;
}
async fn emit_process_failed(
ctx: &PlatformContext,
descriptor: &ProcessCommandDescriptor,
error: &PlatformError,
duration: Duration,
) {
ctx.emit_event(AppEvent::Platform(PlatformEvent::OperationFailed {
context: process_context(descriptor.clone()),
failure: FailureContext::from_error(error),
metrics: Some(process_metrics(duration, None)),
}))
.await;
}
#[async_trait]
impl ProcessOperations for MacOSProcessOperations {
async fn execute_command(
&self,
ctx: &PlatformContext,
cmd: PlatformCommand,
) -> Result<CommandOutput, Error> {
let start = Instant::now();
let command_str = cmd.program().to_string();
let args_clone = cmd.get_args().to_vec();
let descriptor = ProcessCommandDescriptor {
program: command_str.clone(),
args: args_clone.clone(),
cwd: cmd.get_current_dir().cloned(),
};
// Emit operation started event
emit_process_started(ctx, &descriptor).await;
// Use inherit to pass through stdout/stderr directly to terminal
let result: Result<CommandOutput, PlatformError> = async {
let mut command = Command::new(cmd.program());
command.args(cmd.get_args());
command.stdout(std::process::Stdio::inherit());
command.stderr(std::process::Stdio::inherit());
if let Some(dir) = cmd.get_current_dir() {
command.current_dir(dir);
}
// Set environment variables
for (key, value) in cmd.get_env_vars() {
command.env(key, value);
}
// Spawn the process and wait for it to complete
let mut child = command
.spawn()
.map_err(|e| PlatformError::ProcessExecutionFailed {
command: cmd.program().to_string(),
message: e.to_string(),
})?;
let status = child
.wait()
.await
.map_err(|e| PlatformError::ProcessExecutionFailed {
command: cmd.program().to_string(),
message: e.to_string(),
})?;
Ok(CommandOutput {
status,
stdout: Vec::new(), // stdout was inherited
stderr: Vec::new(), // stderr was inherited
})
}
.await;
let duration = start.elapsed();
// Emit completion event
match &result {
Ok(output) => {
emit_process_completed(ctx, &descriptor, output, duration).await;
}
Err(e) => {
emit_process_failed(ctx, &descriptor, e, duration).await;
}
}
result.map_err(Error::from)
}
fn create_command(&self, program: &str) -> PlatformCommand {
PlatformCommand::new(program)
}
async fn which(&self, program: &str) -> Result<PathBuf, Error> {
// Use the platform manager's tool registry for which command
let platform_manager = crate::core::PlatformManager::instance();
let which_path = platform_manager
.get_tool("which")
.await
.map_err(Error::from)?;
let output = Command::new(&which_path)
.arg(program)
.output()
.await
.map_err(|e| {
Error::from(PlatformError::ProcessExecutionFailed {
command: "which".to_string(),
message: e.to_string(),
})
})?;
if output.status.success() {
let path_str = String::from_utf8_lossy(&output.stdout).trim().to_string();
Ok(PathBuf::from(path_str))
} else {
Err(Error::from(PlatformError::CommandNotFound {
command: program.to_string(),
}))
}
}
}
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | false |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/crates/platform/src/implementations/macos/filesystem.rs | crates/platform/src/implementations/macos/filesystem.rs | //! macOS filesystem operations implementation
//!
//! This module wraps the existing proven filesystem operations from the root crate
//! with the platform abstraction layer, adding event emission and proper error handling.
use async_trait::async_trait;
use sps2_errors::PlatformError;
use sps2_events::{
events::{
FailureContext, PlatformEvent, PlatformOperationContext, PlatformOperationKind,
PlatformOperationMetrics,
},
AppEvent,
};
use std::ffi::CString;
use std::os::unix::ffi::OsStrExt;
use std::path::Path;
use std::time::{Duration, Instant};
use tokio::fs;
use crate::core::PlatformContext;
use crate::filesystem::FilesystemOperations;
/// macOS implementation of filesystem operations
pub struct MacOSFilesystemOperations;
impl MacOSFilesystemOperations {
pub fn new() -> Self {
Self
}
/// Calculate the size of a file or directory recursively
async fn calculate_size(&self, path: &Path) -> Result<u64, std::io::Error> {
let metadata = tokio::fs::metadata(path).await?;
if metadata.is_file() {
Ok(metadata.len())
} else if metadata.is_dir() {
let mut total = 0u64;
let mut entries = tokio::fs::read_dir(path).await?;
while let Some(entry) = entries.next_entry().await? {
let entry_path = entry.path();
total += Box::pin(self.calculate_size(&entry_path)).await?;
}
Ok(total)
} else {
Ok(0) // Symlinks, devices, etc.
}
}
}
impl Default for MacOSFilesystemOperations {
fn default() -> Self {
Self::new()
}
}
fn filesystem_context(
operation: &str,
source: Option<&Path>,
target: &Path,
) -> PlatformOperationContext {
PlatformOperationContext {
kind: PlatformOperationKind::Filesystem,
operation: operation.to_string(),
target: Some(target.to_path_buf()),
source: source.map(Path::to_path_buf),
command: None,
}
}
fn filesystem_metrics(
duration: Duration,
changes: Option<Vec<String>>,
) -> PlatformOperationMetrics {
PlatformOperationMetrics {
duration_ms: Some(u64::try_from(duration.as_millis()).unwrap_or(u64::MAX)),
exit_code: None,
stdout_bytes: None,
stderr_bytes: None,
changes,
}
}
async fn emit_fs_started(
ctx: &PlatformContext,
operation: &str,
source: Option<&Path>,
target: &Path,
) {
ctx.emit_event(AppEvent::Platform(PlatformEvent::OperationStarted {
context: filesystem_context(operation, source, target),
}))
.await;
}
async fn emit_fs_completed(
ctx: &PlatformContext,
operation: &str,
source: Option<&Path>,
target: &Path,
changes: Option<Vec<String>>,
duration: Duration,
) {
ctx.emit_event(AppEvent::Platform(PlatformEvent::OperationCompleted {
context: filesystem_context(operation, source, target),
metrics: Some(filesystem_metrics(duration, changes)),
}))
.await;
}
async fn emit_fs_failed(
ctx: &PlatformContext,
operation: &str,
source: Option<&Path>,
target: &Path,
error: &PlatformError,
duration: Duration,
) {
ctx.emit_event(AppEvent::Platform(PlatformEvent::OperationFailed {
context: filesystem_context(operation, source, target),
failure: FailureContext::from_error(error),
metrics: Some(filesystem_metrics(duration, None)),
}))
.await;
}
#[async_trait]
impl FilesystemOperations for MacOSFilesystemOperations {
async fn clone_file(
&self,
ctx: &PlatformContext,
src: &Path,
dst: &Path,
) -> Result<(), PlatformError> {
let start = Instant::now();
emit_fs_started(ctx, "clone_file", Some(src), dst).await;
// Use the proven APFS clonefile implementation from root crate
let result = async {
// APFS clonefile constants
const CLONE_NOFOLLOW: u32 = 0x0001;
const CLONE_NOOWNERCOPY: u32 = 0x0002;
let src_cstring = CString::new(src.as_os_str().as_bytes()).map_err(|_| {
PlatformError::FilesystemOperationFailed {
operation: "clone_file".to_string(),
message: format!("Invalid source path: {}", src.display()),
}
})?;
let dst_cstring = CString::new(dst.as_os_str().as_bytes()).map_err(|_| {
PlatformError::FilesystemOperationFailed {
operation: "clone_file".to_string(),
message: format!("Invalid destination path: {}", dst.display()),
}
})?;
tokio::task::spawn_blocking(move || {
// SAFETY: clonefile is available on macOS and we're passing valid C strings
unsafe {
let result = libc::clonefile(
src_cstring.as_ptr(),
dst_cstring.as_ptr(),
CLONE_NOFOLLOW | CLONE_NOOWNERCOPY,
);
if result != 0 {
let errno = *libc::__error();
return Err(PlatformError::FilesystemOperationFailed {
operation: "clone_file".to_string(),
message: format!(
"clonefile failed with code {result}, errno: {errno} ({})",
std::io::Error::from_raw_os_error(errno)
),
});
}
}
Ok(())
})
.await
.map_err(|e| PlatformError::FilesystemOperationFailed {
operation: "clone_file".to_string(),
message: format!("clone task failed: {e}"),
})?
}
.await;
let duration = start.elapsed();
// Emit completion event
match &result {
Ok(_) => {
emit_fs_completed(
ctx,
"clone_file",
Some(src),
dst,
Some(vec![format!("{} -> {}", src.display(), dst.display())]),
duration,
)
.await;
}
Err(e) => {
emit_fs_failed(ctx, "clone_file", Some(src), dst, e, duration).await;
}
}
result
}
async fn clone_directory(
&self,
ctx: &PlatformContext,
src: &Path,
dst: &Path,
) -> Result<(), PlatformError> {
let start = Instant::now();
emit_fs_started(ctx, "clone_directory", Some(src), dst).await;
// Use the same clonefile implementation as clone_file since APFS clonefile handles directories
let result = async {
// APFS clonefile constants
const CLONE_NOFOLLOW: u32 = 0x0001;
const CLONE_NOOWNERCOPY: u32 = 0x0002;
let src_cstring = CString::new(src.as_os_str().as_bytes()).map_err(|_| {
PlatformError::FilesystemOperationFailed {
operation: "clone_directory".to_string(),
message: format!("Invalid source path: {}", src.display()),
}
})?;
let dst_cstring = CString::new(dst.as_os_str().as_bytes()).map_err(|_| {
PlatformError::FilesystemOperationFailed {
operation: "clone_directory".to_string(),
message: format!("Invalid destination path: {}", dst.display()),
}
})?;
tokio::task::spawn_blocking(move || {
// SAFETY: clonefile is available on macOS and we're passing valid C strings
unsafe {
let result = libc::clonefile(
src_cstring.as_ptr(),
dst_cstring.as_ptr(),
CLONE_NOFOLLOW | CLONE_NOOWNERCOPY,
);
if result != 0 {
let errno = *libc::__error();
return Err(PlatformError::FilesystemOperationFailed {
operation: "clone_directory".to_string(),
message: format!(
"clonefile failed with code {result}, errno: {errno} ({})",
std::io::Error::from_raw_os_error(errno)
),
});
}
}
Ok(())
})
.await
.map_err(|e| PlatformError::FilesystemOperationFailed {
operation: "clone_directory".to_string(),
message: format!("clone task failed: {e}"),
})?
}
.await;
let duration = start.elapsed();
// Emit completion event
match &result {
Ok(_) => {
emit_fs_completed(ctx, "clone_directory", Some(src), dst, None, duration).await;
}
Err(e) => {
emit_fs_failed(ctx, "clone_directory", Some(src), dst, e, duration).await;
}
}
result
}
async fn atomic_rename(
&self,
ctx: &PlatformContext,
src: &Path,
dst: &Path,
) -> Result<(), PlatformError> {
let start = Instant::now();
emit_fs_started(ctx, "atomic_rename", Some(src), dst).await;
// Use the proven atomic rename implementation from root crate
let result = async {
#[cfg(target_os = "macos")]
{
// Use async filesystem operations for proper directory handling
if dst.exists() {
if dst.is_dir() {
// For directories, we need to remove the destination first
// Create a temporary backup location
let temp_dst = dst.with_extension("old");
// Move destination to temp location
fs::rename(dst, &temp_dst).await.map_err(|e| {
PlatformError::FilesystemOperationFailed {
operation: "atomic_rename".to_string(),
message: format!("failed to backup destination: {e}"),
}
})?;
// Move source to destination
match fs::rename(src, dst).await {
Ok(()) => {
// Success! Remove the old destination
let _ = fs::remove_dir_all(&temp_dst).await;
Ok(())
}
Err(e) => {
// Failed! Restore the original destination
let _ = fs::rename(&temp_dst, dst).await;
Err(PlatformError::FilesystemOperationFailed {
operation: "atomic_rename".to_string(),
message: format!("rename failed: {e}"),
})
}
}
} else {
// For files, regular rename should work
fs::rename(src, dst).await.map_err(|e| {
PlatformError::FilesystemOperationFailed {
operation: "atomic_rename".to_string(),
message: format!("rename failed: {e}"),
}
})
}
} else {
// Destination doesn't exist, regular rename
fs::rename(src, dst).await.map_err(|e| {
PlatformError::FilesystemOperationFailed {
operation: "atomic_rename".to_string(),
message: format!("rename failed: {e}"),
}
})
}
}
#[cfg(not(target_os = "macos"))]
{
// Fallback to regular rename (not truly atomic swap)
fs::rename(src, dst)
.await
.map_err(|e| PlatformError::FilesystemOperationFailed {
operation: "atomic_rename".to_string(),
message: e.to_string(),
})
}
}
.await;
let duration = start.elapsed();
// Emit completion event
match &result {
Ok(_) => {
emit_fs_completed(ctx, "atomic_rename", Some(src), dst, None, duration).await;
}
Err(e) => {
emit_fs_failed(ctx, "atomic_rename", Some(src), dst, e, duration).await;
}
}
result
}
async fn atomic_swap(
&self,
ctx: &PlatformContext,
path_a: &Path,
path_b: &Path,
) -> Result<(), PlatformError> {
let start = Instant::now();
emit_fs_started(ctx, "atomic_swap", Some(path_a), path_b).await;
// Use the proven atomic swap implementation from root crate
let result = async {
#[cfg(target_os = "macos")]
{
use libc::{c_uint, renamex_np, RENAME_SWAP};
// Verify both paths exist before attempting swap
if !path_a.exists() {
return Err(PlatformError::FilesystemOperationFailed {
operation: "atomic_swap".to_string(),
message: format!("Path does not exist: {}", path_a.display()),
});
}
if !path_b.exists() {
return Err(PlatformError::FilesystemOperationFailed {
operation: "atomic_swap".to_string(),
message: format!("Path does not exist: {}", path_b.display()),
});
}
let path1_cstring = CString::new(path_a.as_os_str().as_bytes()).map_err(|_| {
PlatformError::FilesystemOperationFailed {
operation: "atomic_swap".to_string(),
message: format!("Invalid path: {}", path_a.display()),
}
})?;
let path2_cstring = CString::new(path_b.as_os_str().as_bytes()).map_err(|_| {
PlatformError::FilesystemOperationFailed {
operation: "atomic_swap".to_string(),
message: format!("Invalid path: {}", path_b.display()),
}
})?;
tokio::task::spawn_blocking(move || {
#[allow(unsafe_code)]
// SAFETY: renamex_np is available on macOS and we're passing valid C strings
unsafe {
if renamex_np(
path1_cstring.as_ptr(),
path2_cstring.as_ptr(),
RENAME_SWAP as c_uint,
) != 0
{
let err = std::io::Error::last_os_error();
return Err(PlatformError::FilesystemOperationFailed {
operation: "atomic_swap".to_string(),
message: format!("atomic swap failed: {err}"),
});
}
}
Ok(())
})
.await
.map_err(|e| PlatformError::FilesystemOperationFailed {
operation: "atomic_swap".to_string(),
message: format!("swap task failed: {e}"),
})?
}
#[cfg(not(target_os = "macos"))]
{
// No true atomic swap available on non-macOS platforms
// This is a potentially unsafe fallback using temporary file
let temp_path = path_a.with_extension("tmp_swap");
fs::rename(path_a, &temp_path).await.map_err(|e| {
PlatformError::FilesystemOperationFailed {
operation: "atomic_swap".to_string(),
message: format!("temp rename failed: {e}"),
}
})?;
fs::rename(path_b, path_a).await.map_err(|e| {
PlatformError::FilesystemOperationFailed {
operation: "atomic_swap".to_string(),
message: format!("second rename failed: {e}"),
}
})?;
fs::rename(&temp_path, path_b).await.map_err(|e| {
PlatformError::FilesystemOperationFailed {
operation: "atomic_swap".to_string(),
message: format!("final rename failed: {e}"),
}
})?;
Ok(())
}
}
.await;
let duration = start.elapsed();
// Emit completion event
match &result {
Ok(_) => {
emit_fs_completed(ctx, "atomic_swap", Some(path_a), path_b, None, duration).await;
}
Err(e) => {
emit_fs_failed(ctx, "atomic_swap", Some(path_a), path_b, e, duration).await;
}
}
result
}
async fn hard_link(
&self,
ctx: &PlatformContext,
src: &Path,
dst: &Path,
) -> Result<(), PlatformError> {
let start = Instant::now();
emit_fs_started(ctx, "hard_link", Some(src), dst).await;
// Use the proven hard link implementation from root crate
let result = async {
#[cfg(target_os = "macos")]
{
let src_cstring = CString::new(src.as_os_str().as_bytes()).map_err(|_| {
PlatformError::FilesystemOperationFailed {
operation: "hard_link".to_string(),
message: format!("Invalid source path: {}", src.display()),
}
})?;
let dst_cstring = CString::new(dst.as_os_str().as_bytes()).map_err(|_| {
PlatformError::FilesystemOperationFailed {
operation: "hard_link".to_string(),
message: format!("Invalid destination path: {}", dst.display()),
}
})?;
tokio::task::spawn_blocking(move || {
let result = unsafe { libc::link(src_cstring.as_ptr(), dst_cstring.as_ptr()) };
if result != 0 {
let errno = unsafe { *libc::__error() };
return Err(PlatformError::FilesystemOperationFailed {
operation: "hard_link".to_string(),
message: format!(
"hard link failed with code {result}, errno: {errno} ({})",
std::io::Error::from_raw_os_error(errno)
),
});
}
Ok(())
})
.await
.map_err(|e| PlatformError::FilesystemOperationFailed {
operation: "hard_link".to_string(),
message: format!("hard link task failed: {e}"),
})?
}
#[cfg(not(target_os = "macos"))]
{
fs::hard_link(src, dst).await.map_err(|e| {
PlatformError::FilesystemOperationFailed {
operation: "hard_link".to_string(),
message: format!("hard link failed: {e}"),
}
})
}
}
.await;
let duration = start.elapsed();
// Emit completion event
match &result {
Ok(_) => {
emit_fs_completed(ctx, "hard_link", Some(src), dst, None, duration).await;
}
Err(e) => {
emit_fs_failed(ctx, "hard_link", Some(src), dst, e, duration).await;
}
}
result
}
async fn create_dir_all(
&self,
ctx: &PlatformContext,
path: &Path,
) -> Result<(), PlatformError> {
let start = Instant::now();
emit_fs_started(ctx, "create_dir_all", None, path).await;
// Use standard tokio::fs implementation
let result =
fs::create_dir_all(path)
.await
.map_err(|e| PlatformError::FilesystemOperationFailed {
operation: "create_dir_all".to_string(),
message: format!("create directory failed: {e}"),
});
let duration = start.elapsed();
// Emit completion event
match &result {
Ok(_) => {
emit_fs_completed(ctx, "create_dir_all", None, path, None, duration).await;
}
Err(e) => {
emit_fs_failed(ctx, "create_dir_all", None, path, e, duration).await;
}
}
result
}
async fn remove_dir_all(
&self,
ctx: &PlatformContext,
path: &Path,
) -> Result<(), PlatformError> {
let start = Instant::now();
emit_fs_started(ctx, "remove_dir_all", None, path).await;
// Use standard tokio::fs implementation
let result =
fs::remove_dir_all(path)
.await
.map_err(|e| PlatformError::FilesystemOperationFailed {
operation: "remove_dir_all".to_string(),
message: format!("remove directory failed: {e}"),
});
let duration = start.elapsed();
// Emit completion event
match &result {
Ok(_) => {
emit_fs_completed(ctx, "remove_dir_all", None, path, None, duration).await;
}
Err(e) => {
emit_fs_failed(ctx, "remove_dir_all", None, path, e, duration).await;
}
}
result
}
/// Check if a path exists
async fn exists(&self, _ctx: &PlatformContext, path: &Path) -> bool {
tokio::fs::metadata(path).await.is_ok()
}
/// Remove a single file
async fn remove_file(&self, ctx: &PlatformContext, path: &Path) -> Result<(), PlatformError> {
let start = Instant::now();
emit_fs_started(ctx, "remove_file", None, path).await;
let result = tokio::fs::remove_file(path).await.map_err(|e| {
PlatformError::FilesystemOperationFailed {
operation: "remove_file".to_string(),
message: e.to_string(),
}
});
let duration = start.elapsed();
match &result {
Ok(_) => {
emit_fs_completed(ctx, "remove_file", None, path, None, duration).await;
}
Err(e) => {
emit_fs_failed(ctx, "remove_file", None, path, e, duration).await;
}
}
result
}
/// Get the size of a file or directory
async fn size(&self, ctx: &PlatformContext, path: &Path) -> Result<u64, PlatformError> {
let start = Instant::now();
emit_fs_started(ctx, "size", None, path).await;
let result =
self.calculate_size(path)
.await
.map_err(|e| PlatformError::FilesystemOperationFailed {
operation: "size".to_string(),
message: e.to_string(),
});
let duration = start.elapsed();
match &result {
Ok(_) => {
emit_fs_completed(ctx, "size", None, path, None, duration).await;
}
Err(e) => {
emit_fs_failed(ctx, "size", None, path, e, duration).await;
}
}
result
}
async fn is_dir(&self, _ctx: &PlatformContext, path: &Path) -> bool {
tokio::fs::metadata(path)
.await
.map(|m| m.is_dir())
.unwrap_or(false)
}
}
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | false |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/crates/platform/src/implementations/macos/mod.rs | crates/platform/src/implementations/macos/mod.rs | //! macOS-specific platform implementation
pub mod binary;
pub mod filesystem;
pub mod process;
/// macOS platform implementation
pub struct MacOSPlatform;
impl MacOSPlatform {
/// Create a new macOS platform instance
#[allow(clippy::new_ret_no_self)]
pub fn new() -> crate::core::Platform {
use binary::MacOSBinaryOperations;
use filesystem::MacOSFilesystemOperations;
use process::MacOSProcessOperations;
crate::core::Platform::new(
Box::new(MacOSBinaryOperations::new()),
Box::new(MacOSFilesystemOperations::new()),
Box::new(MacOSProcessOperations::new()),
)
}
}
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | false |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/crates/platform/src/implementations/macos/binary.rs | crates/platform/src/implementations/macos/binary.rs | //! macOS binary operations implementation
//!
//! This module wraps the existing proven binary operations from RPathPatcher and CodeSigner
//! with the platform abstraction layer, adding event emission and proper error handling.
use async_trait::async_trait;
use sps2_errors::PlatformError;
use sps2_events::{
events::{
FailureContext, PlatformEvent, PlatformOperationContext, PlatformOperationKind,
PlatformOperationMetrics,
},
AppEvent,
};
use std::path::Path;
use std::time::{Duration, Instant};
use tokio::process::Command;
use crate::binary::BinaryOperations;
use crate::core::PlatformContext;
/// macOS implementation of binary operations
pub struct MacOSBinaryOperations;
impl MacOSBinaryOperations {
pub fn new() -> Self {
Self
}
}
impl Default for MacOSBinaryOperations {
fn default() -> Self {
Self::new()
}
}
fn binary_context(operation: &str, target: &Path) -> PlatformOperationContext {
PlatformOperationContext {
kind: PlatformOperationKind::Binary,
operation: operation.to_string(),
target: Some(target.to_path_buf()),
source: None,
command: None,
}
}
fn binary_metrics(duration: Duration, changes: Option<Vec<String>>) -> PlatformOperationMetrics {
PlatformOperationMetrics {
duration_ms: Some(duration_to_millis(duration)),
exit_code: None,
stdout_bytes: None,
stderr_bytes: None,
changes,
}
}
fn duration_to_millis(duration: Duration) -> u64 {
u64::try_from(duration.as_millis()).unwrap_or(u64::MAX)
}
async fn emit_binary_started(ctx: &PlatformContext, operation: &str, target: &Path) {
ctx.emit_event(AppEvent::Platform(PlatformEvent::OperationStarted {
context: binary_context(operation, target),
}))
.await;
}
async fn emit_binary_completed(
ctx: &PlatformContext,
operation: &str,
target: &Path,
changes: Option<Vec<String>>,
duration: Duration,
) {
ctx.emit_event(AppEvent::Platform(PlatformEvent::OperationCompleted {
context: binary_context(operation, target),
metrics: Some(binary_metrics(duration, changes)),
}))
.await;
}
async fn emit_binary_failed(
ctx: &PlatformContext,
operation: &str,
target: &Path,
error: &PlatformError,
duration: Duration,
) {
ctx.emit_event(AppEvent::Platform(PlatformEvent::OperationFailed {
context: binary_context(operation, target),
failure: FailureContext::from_error(error),
metrics: Some(binary_metrics(duration, None)),
}))
.await;
}
#[async_trait]
impl BinaryOperations for MacOSBinaryOperations {
async fn get_install_name(
&self,
ctx: &PlatformContext,
binary: &Path,
) -> Result<Option<String>, PlatformError> {
let start = Instant::now();
// Emit operation started event
emit_binary_started(ctx, "get_install_name", binary).await;
// Use tool registry to get otool path
let result: Result<Option<String>, PlatformError> = async {
let otool_path = ctx.platform_manager().get_tool("otool").await?;
let out = Command::new(&otool_path)
.args(["-D", &binary.to_string_lossy()])
.output()
.await
.map_err(|e| PlatformError::ProcessExecutionFailed {
command: "otool -D".to_string(),
message: e.to_string(),
})?;
if !out.status.success() {
return Ok(None);
}
let text = String::from_utf8_lossy(&out.stdout);
// otool -D outputs:
// /path/to/file:
// install_name
let lines: Vec<&str> = text.lines().collect();
if lines.len() >= 2 {
Ok(Some(lines[1].trim().to_string()))
} else {
Ok(None)
}
}
.await;
let duration = start.elapsed();
// Emit completion event
match &result {
Ok(_) => {
emit_binary_completed(ctx, "get_install_name", binary, None, duration).await;
}
Err(e) => {
emit_binary_failed(ctx, "get_install_name", binary, e, duration).await;
}
}
result
}
async fn set_install_name(
&self,
ctx: &PlatformContext,
binary: &Path,
name: &str,
) -> Result<(), PlatformError> {
let start = Instant::now();
// Emit operation started event
emit_binary_started(ctx, "set_install_name", binary).await;
// Use tool registry to get install_name_tool path
let result = async {
let install_name_tool_path =
ctx.platform_manager().get_tool("install_name_tool").await?;
let output = Command::new(&install_name_tool_path)
.args(["-id", name, &binary.to_string_lossy()])
.output()
.await
.map_err(|e| PlatformError::ProcessExecutionFailed {
command: "install_name_tool -id".to_string(),
message: e.to_string(),
})?;
if output.status.success() {
Ok(())
} else {
let stderr = String::from_utf8_lossy(&output.stderr);
// Check for headerpad error (from existing RPathPatcher logic)
if stderr.contains("larger updated load commands do not fit") {
Err(PlatformError::BinaryOperationFailed {
operation: "set_install_name".to_string(),
binary_path: binary.display().to_string(),
message: format!("HEADERPAD_ERROR: {}", binary.display()),
})
} else {
Err(PlatformError::BinaryOperationFailed {
operation: "set_install_name".to_string(),
binary_path: binary.display().to_string(),
message: stderr.trim().to_string(),
})
}
}
}
.await;
let duration = start.elapsed();
// Emit completion event
match &result {
Ok(_) => {
emit_binary_completed(
ctx,
"set_install_name",
binary,
Some(vec![format!("set_install_name -> {name}")]),
duration,
)
.await;
}
Err(e) => {
emit_binary_failed(ctx, "set_install_name", binary, e, duration).await;
}
}
result
}
async fn get_dependencies(
&self,
ctx: &PlatformContext,
binary: &Path,
) -> Result<Vec<String>, PlatformError> {
let start = Instant::now();
emit_binary_started(ctx, "get_dependencies", binary).await;
// Use tool registry to get otool path
let result = async {
let otool_path = ctx.platform_manager().get_tool("otool").await?;
let output = Command::new(&otool_path)
.args(["-L", &binary.to_string_lossy()])
.output()
.await
.map_err(|e| PlatformError::ProcessExecutionFailed {
command: "otool -L".to_string(),
message: e.to_string(),
})?;
if !output.status.success() {
return Err(PlatformError::BinaryOperationFailed {
operation: "get_dependencies".to_string(),
binary_path: binary.display().to_string(),
message: "otool -L failed".to_string(),
});
}
let deps = String::from_utf8_lossy(&output.stdout);
let mut dependencies = Vec::new();
// Process each dependency line (skip the first line which is the file name)
for line in deps.lines().skip(1) {
let dep = line.trim();
if let Some(dep_name) = dep.split_whitespace().next() {
dependencies.push(dep_name.to_string());
}
}
Ok(dependencies)
}
.await;
let duration = start.elapsed();
// Emit completion event
match &result {
Ok(_) => {
emit_binary_completed(ctx, "get_dependencies", binary, None, duration).await;
}
Err(e) => {
emit_binary_failed(ctx, "get_dependencies", binary, e, duration).await;
}
}
result
}
async fn change_dependency(
&self,
ctx: &PlatformContext,
binary: &Path,
old: &str,
new: &str,
) -> Result<(), PlatformError> {
let start = Instant::now();
emit_binary_started(ctx, "change_dependency", binary).await;
// Use the proven install_name_tool -change implementation from RPathPatcher
let result = async {
let install_name_tool_path =
ctx.platform_manager().get_tool("install_name_tool").await?;
let change_output = Command::new(&install_name_tool_path)
.args(["-change", old, new, &binary.to_string_lossy()])
.output()
.await
.map_err(|e| PlatformError::ProcessExecutionFailed {
command: "install_name_tool -change".to_string(),
message: e.to_string(),
})?;
if change_output.status.success() {
Ok(())
} else {
let stderr = String::from_utf8_lossy(&change_output.stderr);
Err(PlatformError::BinaryOperationFailed {
operation: "change_dependency".to_string(),
binary_path: binary.display().to_string(),
message: stderr.trim().to_string(),
})
}
}
.await;
let duration = start.elapsed();
// Emit completion event
match &result {
Ok(_) => {
emit_binary_completed(
ctx,
"change_dependency",
binary,
Some(vec![format!("change_dependency {old} -> {new}")]),
duration,
)
.await;
}
Err(e) => {
emit_binary_failed(ctx, "change_dependency", binary, e, duration).await;
}
}
result
}
async fn add_rpath(
&self,
ctx: &PlatformContext,
binary: &Path,
rpath: &str,
) -> Result<(), PlatformError> {
let start = Instant::now();
emit_binary_started(ctx, "add_rpath", binary).await;
// Use the proven install_name_tool -add_rpath implementation from RPathPatcher
let result = async {
let install_name_tool_path =
ctx.platform_manager().get_tool("install_name_tool").await?;
let output = Command::new(&install_name_tool_path)
.args(["-add_rpath", rpath, &binary.to_string_lossy()])
.output()
.await
.map_err(|e| PlatformError::ProcessExecutionFailed {
command: "install_name_tool -add_rpath".to_string(),
message: e.to_string(),
})?;
if output.status.success() {
Ok(())
} else {
let stderr = String::from_utf8_lossy(&output.stderr);
Err(PlatformError::BinaryOperationFailed {
operation: "add_rpath".to_string(),
binary_path: binary.display().to_string(),
message: stderr.trim().to_string(),
})
}
}
.await;
let duration = start.elapsed();
// Emit completion event
match &result {
Ok(_) => {
emit_binary_completed(
ctx,
"add_rpath",
binary,
Some(vec![format!("add_rpath {rpath}")]),
duration,
)
.await;
}
Err(e) => {
emit_binary_failed(ctx, "add_rpath", binary, e, duration).await;
}
}
result
}
async fn delete_rpath(
&self,
ctx: &PlatformContext,
binary: &Path,
rpath: &str,
) -> Result<(), PlatformError> {
let start = Instant::now();
emit_binary_started(ctx, "delete_rpath", binary).await;
// Use the proven install_name_tool -delete_rpath implementation from RPathPatcher
let result = async {
let install_name_tool_path =
ctx.platform_manager().get_tool("install_name_tool").await?;
let output = Command::new(&install_name_tool_path)
.args(["-delete_rpath", rpath, &binary.to_string_lossy()])
.output()
.await
.map_err(|e| PlatformError::ProcessExecutionFailed {
command: "install_name_tool -delete_rpath".to_string(),
message: e.to_string(),
})?;
if output.status.success() {
Ok(())
} else {
let stderr = String::from_utf8_lossy(&output.stderr);
Err(PlatformError::BinaryOperationFailed {
operation: "delete_rpath".to_string(),
binary_path: binary.display().to_string(),
message: stderr.trim().to_string(),
})
}
}
.await;
let duration = start.elapsed();
// Emit completion event
match &result {
Ok(_) => {
emit_binary_completed(
ctx,
"delete_rpath",
binary,
Some(vec![format!("delete_rpath {rpath}")]),
duration,
)
.await;
}
Err(e) => {
emit_binary_failed(ctx, "delete_rpath", binary, e, duration).await;
}
}
result
}
async fn get_rpath_entries(
&self,
ctx: &PlatformContext,
binary: &Path,
) -> Result<Vec<String>, PlatformError> {
let start = Instant::now();
emit_binary_started(ctx, "get_rpath_entries", binary).await;
// Use the proven otool -l implementation from RPathPatcher
let result = async {
let otool_path = ctx.platform_manager().get_tool("otool").await?;
let out = Command::new(&otool_path)
.args(["-l", &binary.to_string_lossy()])
.output()
.await
.map_err(|e| PlatformError::ProcessExecutionFailed {
command: "otool -l".to_string(),
message: e.to_string(),
})?;
if !out.status.success() {
return Err(PlatformError::BinaryOperationFailed {
operation: "get_rpath_entries".to_string(),
binary_path: binary.display().to_string(),
message: "otool -l failed".to_string(),
});
}
let text = String::from_utf8_lossy(&out.stdout);
let mut rpath_entries = Vec::new();
// Parse the LC_RPATH entries (logic from RPathPatcher)
let mut lines = text.lines();
while let Some(l) = lines.next() {
if l.contains("LC_RPATH") {
let _ = lines.next(); // skip cmdsize
if let Some(p) = lines.next() {
if let Some(idx) = p.find("path ") {
let rpath = &p[idx + 5..p.find(" (").unwrap_or(p.len())];
rpath_entries.push(rpath.to_string());
}
}
}
}
Ok(rpath_entries)
}
.await;
let duration = start.elapsed();
// Emit completion event
match &result {
Ok(_) => {
emit_binary_completed(ctx, "get_rpath_entries", binary, None, duration).await;
}
Err(e) => {
emit_binary_failed(ctx, "get_rpath_entries", binary, e, duration).await;
}
}
result
}
async fn verify_signature(
&self,
ctx: &PlatformContext,
binary: &Path,
) -> Result<bool, PlatformError> {
let start = Instant::now();
emit_binary_started(ctx, "verify_signature", binary).await;
// Use the proven codesign verification implementation from CodeSigner
let result: Result<bool, PlatformError> = async {
let codesign_path = ctx.platform_manager().get_tool("codesign").await?;
let check = Command::new(&codesign_path)
.args(["-vvv", &binary.to_string_lossy()])
.output()
.await
.map_err(|e| PlatformError::ProcessExecutionFailed {
command: "codesign -vvv".to_string(),
message: e.to_string(),
})?;
Ok(check.status.success())
}
.await;
let duration = start.elapsed();
// Emit completion event
match &result {
Ok(valid) => {
emit_binary_completed(
ctx,
"verify_signature",
binary,
Some(vec![format!("signature_valid={valid}")]),
duration,
)
.await;
}
Err(e) => {
emit_binary_failed(ctx, "verify_signature", binary, e, duration).await;
}
}
result
}
async fn sign_binary(
&self,
ctx: &PlatformContext,
binary: &Path,
identity: Option<&str>,
) -> Result<(), PlatformError> {
let start = Instant::now();
let identity_str = identity.unwrap_or("-");
emit_binary_started(ctx, "sign_binary", binary).await;
// Use the proven codesign implementation from CodeSigner
let result = async {
let codesign_path = ctx.platform_manager().get_tool("codesign").await?;
let output = Command::new(&codesign_path)
.args(["-f", "-s", identity_str, &binary.to_string_lossy()])
.output()
.await
.map_err(|e| PlatformError::ProcessExecutionFailed {
command: "codesign".to_string(),
message: e.to_string(),
})?;
if output.status.success() {
Ok(())
} else {
let stderr = String::from_utf8_lossy(&output.stderr);
Err(PlatformError::SigningFailed {
binary_path: binary.display().to_string(),
message: stderr.trim().to_string(),
})
}
}
.await;
let duration = start.elapsed();
// Emit completion event
match &result {
Ok(_) => {
emit_binary_completed(
ctx,
"sign_binary",
binary,
Some(vec![format!("sign_binary {identity_str}")]),
duration,
)
.await;
}
Err(e) => {
emit_binary_failed(ctx, "sign_binary", binary, e, duration).await;
}
}
result
}
}
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | false |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/crates/ops/src/uninstall.rs | crates/ops/src/uninstall.rs | //! Uninstall command implementation
//!
//! Handles package removal with dependency checking.
//! Delegates to `sps2_install` crate for the actual uninstall logic.
use crate::{InstallReport, OpsCtx};
use sps2_errors::{Error, OpsError};
use sps2_events::{
patterns::UninstallProgressConfig, AppEvent, EventEmitter, GeneralEvent, ProgressManager,
};
use sps2_install::{InstallConfig, Installer, UninstallContext};
use std::time::Instant;
use uuid::Uuid;
/// Uninstall packages (delegates to install crate)
///
/// # Errors
///
/// Returns an error if:
/// - No packages are specified
/// - Package removal would break dependencies
/// - Uninstallation fails
pub async fn uninstall(ctx: &OpsCtx, package_names: &[String]) -> Result<InstallReport, Error> {
let start = Instant::now();
if package_names.is_empty() {
return Err(OpsError::NoPackagesSpecified.into());
}
let _correlation = ctx.push_correlation_for_packages("uninstall", package_names);
// Check mode: preview what would be uninstalled
if ctx.check_mode {
return preview_uninstall(ctx, package_names).await;
}
let progress_manager = ProgressManager::new();
let uninstall_config = UninstallProgressConfig {
operation_name: "Uninstalling packages".to_string(),
package_count: package_names.len() as u64,
};
let progress_id = progress_manager.create_uninstall_tracker(uninstall_config);
let correlation = ctx.current_correlation();
progress_manager.emit_started(&progress_id, ctx, correlation.as_deref());
// Create installer
let config = InstallConfig::default();
let mut installer = Installer::new(
config,
ctx.resolver.clone(),
ctx.state.clone(),
ctx.store.clone(),
);
// Build uninstall context
let mut uninstall_context = UninstallContext::new().with_event_sender(ctx.tx.clone());
for package_name in package_names {
uninstall_context = uninstall_context.add_package(package_name.clone());
}
// Execute uninstallation
let result = installer.uninstall(uninstall_context).await?;
// Convert to report format
let report = InstallReport {
installed: Vec::new(), // No packages installed during uninstall
updated: Vec::new(), // No packages updated during uninstall
removed: result
.removed_packages
.iter()
.map(|pkg| crate::PackageChange {
name: pkg.name.clone(),
from_version: Some(pkg.version.clone()),
to_version: None,
size: None,
})
.collect(),
state_id: result.state_id,
duration_ms: u64::try_from(start.elapsed().as_millis()).unwrap_or(u64::MAX),
};
progress_manager.complete_operation(&progress_id, ctx);
Ok(report)
}
/// Preview what would be uninstalled without executing
#[allow(clippy::too_many_lines)]
async fn preview_uninstall(ctx: &OpsCtx, package_names: &[String]) -> Result<InstallReport, Error> {
use std::collections::HashMap;
// Get currently installed packages
let current_packages = ctx.state.get_installed_packages().await?;
// Find packages to remove
let mut packages_to_remove = Vec::new();
let mut not_found_packages = Vec::new();
for package_name in package_names {
if let Some(package_id) = current_packages
.iter()
.find(|pkg| &pkg.name == package_name)
{
packages_to_remove.push(package_id.clone());
} else {
not_found_packages.push(package_name.clone());
}
}
// Report packages that would not be found
for package_name in ¬_found_packages {
ctx.emit(AppEvent::General(GeneralEvent::CheckModePreview {
operation: "uninstall".to_string(),
action: format!("Package {package_name} is not installed"),
details: HashMap::from([
("status".to_string(), "not_installed".to_string()),
("action".to_string(), "skip".to_string()),
]),
}));
}
let mut preview_removed = Vec::new();
let mut broken_dependencies = Vec::new();
// Check each package for dependents
for package in &packages_to_remove {
let package_id = sps2_resolver::PackageId::new(package.name.clone(), package.version());
// Check for dependents
let dependents = ctx.state.get_package_dependents(&package_id).await?;
if dependents.is_empty() {
// Safe to remove
ctx.emit(AppEvent::General(GeneralEvent::CheckModePreview {
operation: "uninstall".to_string(),
action: format!("Would remove {package_id}"),
details: HashMap::from([
("version".to_string(), package.version().to_string()),
("dependents".to_string(), "0".to_string()),
("status".to_string(), "safe_to_remove".to_string()),
]),
}));
} else {
// Has dependents - would break dependencies
let dependent_names: Vec<String> = dependents
.iter()
.map(std::string::ToString::to_string)
.collect();
ctx.emit(AppEvent::General(GeneralEvent::CheckModePreview {
operation: "uninstall".to_string(),
action: format!("Would remove {package_id} (breaks dependencies)"),
details: HashMap::from([
("version".to_string(), package.version().to_string()),
("dependents".to_string(), dependents.len().to_string()),
("dependent_packages".to_string(), dependent_names.join(", ")),
("status".to_string(), "breaks_dependencies".to_string()),
]),
}));
broken_dependencies.extend(dependent_names);
}
preview_removed.push(crate::PackageChange {
name: package.name.clone(),
from_version: Some(package.version()),
to_version: None,
size: None,
});
}
// Show warning for broken dependencies
if !broken_dependencies.is_empty() {
ctx.emit(AppEvent::General(GeneralEvent::CheckModePreview {
operation: "uninstall".to_string(),
action: "WARNING: This would break dependencies for:".to_string(),
details: HashMap::from([
(
"affected_packages".to_string(),
broken_dependencies.join(", "),
),
("severity".to_string(), "error".to_string()),
(
"suggestion".to_string(),
"Use --force to override dependency checks".to_string(),
),
]),
}));
}
// Emit summary
let total_changes = packages_to_remove.len();
let mut categories = HashMap::new();
categories.insert("packages_removed".to_string(), packages_to_remove.len());
if !broken_dependencies.is_empty() {
categories.insert("broken_dependencies".to_string(), broken_dependencies.len());
}
if !not_found_packages.is_empty() {
categories.insert("packages_not_found".to_string(), not_found_packages.len());
}
ctx.emit(AppEvent::General(GeneralEvent::CheckModeSummary {
operation: "uninstall".to_string(),
total_changes,
categories,
}));
// Return preview report (no actual state changes)
Ok(InstallReport {
installed: Vec::new(),
updated: Vec::new(),
removed: preview_removed,
state_id: Uuid::nil(), // No state change in preview
duration_ms: 0,
})
}
#[cfg(test)]
mod tests {
use super::*;
use crate::OpsContextBuilder;
use sps2_builder::Builder;
use sps2_config::Config;
use sps2_index::IndexManager;
use sps2_net::NetClient;
use sps2_resolver::Resolver;
use sps2_state::StateManager;
use sps2_store::PackageStore;
use tempfile::TempDir;
#[tokio::test]
async fn preview_without_packages_reports_not_found() {
let temp_dir = TempDir::new().unwrap();
let state_dir = temp_dir.path().join("state");
let store_dir = temp_dir.path().join("store");
tokio::fs::create_dir_all(&state_dir).await.unwrap();
tokio::fs::create_dir_all(&store_dir).await.unwrap();
let state = StateManager::new(&state_dir).await.unwrap();
let store = PackageStore::new(store_dir.clone());
let (tx, _rx) = sps2_events::channel();
let config = Config::default();
let index = IndexManager::new(&store_dir);
let net = NetClient::new(sps2_net::NetConfig::default()).unwrap();
let resolver = Resolver::with_events(index.clone(), tx.clone());
let builder = Builder::new();
let ctx = OpsContextBuilder::new()
.with_state(state)
.with_store(store)
.with_event_sender(tx)
.with_config(config)
.with_index(index)
.with_net(net)
.with_resolver(resolver)
.with_builder(builder)
.build()
.unwrap();
let preview = preview_uninstall(&ctx, &[]).await.unwrap();
assert!(preview.removed.is_empty());
}
}
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | false |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/crates/ops/src/lib.rs | crates/ops/src/lib.rs | #![warn(mismatched_lifetime_syntaxes)]
#![deny(clippy::pedantic, unsafe_code)]
#![allow(clippy::module_name_repetitions)]
//! High-level operations orchestration for sps2
//!
//! This crate serves as the orchestration layer between the CLI and
//! specialized crates. Small operations are implemented here, while
//! large operations delegate to specialized crates.
mod context;
pub mod keys;
pub mod small_ops;
// Import modularized operations
mod health;
mod maintenance;
mod query;
mod repository;
mod self_update;
mod types;
// Import command modules
mod build;
mod install;
mod pack;
mod uninstall;
mod update;
pub use context::{OpsContextBuilder, OpsCtx};
pub use sps2_guard::{
Discrepancy, StoreVerificationConfig, StoreVerificationStats, StoreVerifier, VerificationLevel,
VerificationResult, Verifier,
};
// Re-export consolidated types from sps2_types
pub use sps2_types::{
BuildReport, ChangeType, InstallReport, OpChange, PackageChange, PackageInfo, PackageStatus,
SearchResult, StateInfo,
};
// Re-export health status from events
pub use sps2_events::HealthStatus;
// Re-export ops-specific types from local types module
pub use types::{
ComponentHealth, HealthCheck, HealthIssue, InstallRequest, IssueSeverity, OpReport,
};
// Re-export operation functions
pub use build::build;
pub use install::install;
pub use pack::{pack_from_directory, pack_from_recipe, pack_from_recipe_no_post};
pub use small_ops::{
check_health, cleanup, history, list_packages, package_info, reposync, rollback,
search_packages, self_update,
};
pub use uninstall::uninstall;
pub use update::{update, upgrade};
use sps2_errors::Error;
use std::sync::Arc;
/// Verify the integrity of the current state
///
/// # Errors
///
/// Returns an error if verification fails.
#[allow(clippy::cast_possible_truncation)]
pub async fn verify(
ctx: &OpsCtx,
heal: bool,
level: &str,
scope: &str,
sync_refcounts: bool,
) -> Result<VerificationResult, Error> {
let mut verification_level = match level {
"quick" => VerificationLevel::Quick,
"full" => VerificationLevel::Full,
_ => VerificationLevel::Standard,
};
if heal {
verification_level = VerificationLevel::Full;
}
match scope {
"store" => {
let config = StoreVerificationConfig::default();
let verifier = StoreVerifier::new(
Arc::new(ctx.state.clone()),
Arc::new(ctx.store.file_store().clone()),
config,
);
let stats = verifier.verify_with_progress(&ctx.tx).await?;
let state_id = ctx.state.get_active_state().await?;
Ok(VerificationResult::new(
state_id,
Vec::new(),
stats.duration.as_millis() as u64,
))
}
"all" => {
let verifier = Verifier::new(ctx.state.clone(), ctx.store.clone(), ctx.tx.clone());
let result = if heal {
verifier.verify_and_heal(VerificationLevel::Full).await?
} else {
verifier.verify(verification_level).await?
};
let config = StoreVerificationConfig::default();
let store_verifier = StoreVerifier::new(
Arc::new(ctx.state.clone()),
Arc::new(ctx.store.file_store().clone()),
config,
);
let _ = store_verifier.verify_with_progress(&ctx.tx).await?;
if sync_refcounts {
verifier.sync_refcounts().await?;
}
Ok(result)
}
_ => {
let verifier = Verifier::new(ctx.state.clone(), ctx.store.clone(), ctx.tx.clone());
let result = if heal {
verifier.verify_and_heal(VerificationLevel::Full).await?
} else {
verifier.verify(verification_level).await?
};
if sync_refcounts {
verifier.sync_refcounts().await?;
}
Ok(result)
}
}
}
/// Operation result that can be serialized for CLI output
#[derive(Clone, Debug, serde::Serialize)]
#[serde(tag = "type", content = "data")]
pub enum OperationResult {
/// Package list
PackageList(Vec<PackageInfo>),
/// Package information
PackageInfo(PackageInfo),
/// Search results
SearchResults(Vec<SearchResult>),
/// Installation report
InstallReport(InstallReport),
/// Build report
BuildReport(BuildReport),
/// State information
StateInfo(StateInfo),
/// State history
StateHistory(Vec<StateInfo>),
/// Health check results
HealthCheck(HealthCheck),
/// Generic success message
Success(String),
/// Generic report
Report(OpReport),
/// Verification result
VerificationResult(VerificationResult),
}
impl OperationResult {
/// Convert to JSON string
///
/// # Errors
///
/// Returns an error if JSON serialization fails.
pub fn to_json(&self) -> Result<String, Error> {
serde_json::to_string_pretty(self).map_err(|e| {
sps2_errors::OpsError::SerializationError {
message: e.to_string(),
}
.into()
})
}
/// Check if this is a success result
#[must_use]
pub fn is_success(&self) -> bool {
match self {
OperationResult::Success(_)
| OperationResult::PackageList(_)
| OperationResult::PackageInfo(_)
| OperationResult::SearchResults(_)
| OperationResult::InstallReport(_)
| OperationResult::BuildReport(_)
| OperationResult::StateInfo(_)
| OperationResult::StateHistory(_)
| OperationResult::Report(_) => true,
OperationResult::HealthCheck(health) => health.is_healthy(),
OperationResult::VerificationResult(result) => result.is_valid,
}
}
}
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | false |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/crates/ops/src/update.rs | crates/ops/src/update.rs | //! Update and upgrade command implementations
//!
//! - `update`: Respects version constraints (e.g., `~=1.2.0`)
//! - `upgrade`: Ignores upper bounds to get latest versions
//!
//! Both delegate to `sps2_install` crate for the actual update logic.
use crate::{InstallReport, OpsCtx};
use sps2_errors::Error;
use sps2_events::{
events::{LifecyclePackageUpdateType, LifecycleUpdateOperation, LifecycleUpdateResult},
patterns::UpdateProgressConfig,
AppEvent, EventEmitter, FailureContext, GeneralEvent, LifecycleEvent, ProgressEvent,
ProgressManager,
};
use sps2_install::{InstallConfig, Installer, UpdateContext};
use sps2_types::{PackageSpec, Version};
use std::time::Instant;
use uuid::Uuid;
/// Update mode determines how version constraints are handled
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum UpdateMode {
/// Update mode: respects version constraints (compatible releases)
Update,
/// Upgrade mode: ignores upper bounds to get latest versions
Upgrade,
}
impl UpdateMode {
/// Returns true if this is upgrade mode
const fn is_upgrade(self) -> bool {
matches!(self, Self::Upgrade)
}
/// Returns the operation name as a string
const fn operation_name(self) -> &'static str {
match self {
Self::Update => "update",
Self::Upgrade => "upgrade",
}
}
/// Returns the lifecycle operation enum
const fn lifecycle_operation(self) -> LifecycleUpdateOperation {
match self {
Self::Update => LifecycleUpdateOperation::Update,
Self::Upgrade => LifecycleUpdateOperation::Upgrade,
}
}
/// Returns the progress message
fn progress_message(self) -> String {
match self {
Self::Update => "Updating packages".to_string(),
Self::Upgrade => "Upgrading packages".to_string(),
}
}
/// Returns the default update type for report generation
const fn default_update_type(self) -> LifecyclePackageUpdateType {
match self {
Self::Update => LifecyclePackageUpdateType::Minor,
Self::Upgrade => LifecyclePackageUpdateType::Major,
}
}
}
/// Update packages (delegates to install crate)
///
/// # Errors
///
/// Returns an error if:
/// - No packages are installed or specified
/// - Update resolution fails
/// - Installation of updates fails
pub async fn update(ctx: &OpsCtx, package_names: &[String]) -> Result<InstallReport, Error> {
update_or_upgrade(ctx, package_names, UpdateMode::Update).await
}
/// Upgrade packages (delegates to install crate)
///
/// # Errors
///
/// Returns an error if:
/// - No packages are installed or specified
/// - Upgrade resolution fails
/// - Installation of upgrades fails
pub async fn upgrade(ctx: &OpsCtx, package_names: &[String]) -> Result<InstallReport, Error> {
update_or_upgrade(ctx, package_names, UpdateMode::Upgrade).await
}
/// Internal implementation for both update and upgrade
async fn update_or_upgrade(
ctx: &OpsCtx,
package_names: &[String],
mode: UpdateMode,
) -> Result<InstallReport, Error> {
let start = Instant::now();
let _correlation = ctx.push_correlation_for_packages(mode.operation_name(), package_names);
// Check mode: preview what would be updated/upgraded
if ctx.check_mode {
return preview_update_or_upgrade(ctx, package_names, mode).await;
}
// Create installer
let config = InstallConfig::default();
let mut installer = Installer::new(
config,
ctx.resolver.clone(),
ctx.state.clone(),
ctx.store.clone(),
);
// Build update context with appropriate mode
let mut update_context = UpdateContext::new()
.with_upgrade(mode.is_upgrade())
.with_event_sender(ctx.tx.clone());
for package_name in package_names {
update_context = update_context.add_package(package_name.clone());
}
// Get currently installed packages before update to track from_version
let installed_before = ctx.state.get_installed_packages().await?;
let installed_map: std::collections::HashMap<String, Version> = installed_before
.iter()
.map(|pkg| (pkg.name.clone(), pkg.version()))
.collect();
let total_targets = if package_names.is_empty() {
installed_before.len()
} else {
package_names.len()
};
let requested_packages: Vec<String> = if package_names.is_empty() {
Vec::new()
} else {
package_names.to_vec()
};
let progress_manager = ProgressManager::new();
let update_config = UpdateProgressConfig {
operation_name: mode.progress_message(),
package_count: total_targets as u64,
is_upgrade: mode.is_upgrade(),
};
let progress_id = progress_manager.create_update_tracker(update_config);
let correlation = ctx.current_correlation();
progress_manager.emit_started(&progress_id, ctx, correlation.as_deref());
ctx.emit(AppEvent::Lifecycle(LifecycleEvent::update_started(
mode.lifecycle_operation(),
requested_packages.clone(),
total_targets,
)));
// Execute update/upgrade
let result = installer.update(update_context).await.inspect_err(|e| {
let failure = FailureContext::from_error(e);
ctx.emit_operation_failed(mode.operation_name(), failure.clone());
ctx.emit(AppEvent::Progress(ProgressEvent::Failed {
id: progress_id.clone(),
failure: failure.clone(),
completed_items: 0,
partial_duration: start.elapsed(),
}));
ctx.emit(AppEvent::Lifecycle(LifecycleEvent::update_failed(
mode.lifecycle_operation(),
Vec::new(),
if requested_packages.is_empty() {
Vec::new()
} else {
requested_packages.clone()
},
failure,
)));
})?;
let report = create_update_report(
&result,
&installed_map,
start,
ctx,
UpdateReportContext {
progress_id: &progress_id,
progress_manager: &progress_manager,
total_targets,
operation: mode.lifecycle_operation(),
mode,
},
);
Ok(report)
}
struct UpdateReportContext<'a> {
progress_id: &'a str,
progress_manager: &'a ProgressManager,
total_targets: usize,
operation: LifecycleUpdateOperation,
mode: UpdateMode,
}
fn create_update_report(
result: &sps2_install::InstallResult,
installed_map: &std::collections::HashMap<String, sps2_types::Version>,
start: std::time::Instant,
ctx: &OpsCtx,
context: UpdateReportContext<'_>,
) -> InstallReport {
// Convert to report format
let report = InstallReport {
installed: result
.installed_packages
.iter()
.map(|pkg| crate::PackageChange {
name: pkg.name.clone(),
from_version: None,
to_version: Some(pkg.version.clone()),
size: None,
})
.collect(),
updated: result
.updated_packages
.iter()
.map(|pkg| crate::PackageChange {
name: pkg.name.clone(),
from_version: installed_map.get(&pkg.name).cloned(),
to_version: Some(pkg.version.clone()),
size: None,
})
.collect(),
removed: result
.removed_packages
.iter()
.map(|pkg| crate::PackageChange {
name: pkg.name.clone(),
from_version: Some(pkg.version.clone()),
to_version: None,
size: None,
})
.collect(),
state_id: result.state_id,
duration_ms: u64::try_from(start.elapsed().as_millis()).unwrap_or(u64::MAX),
};
context
.progress_manager
.complete_operation(context.progress_id, ctx);
let updated_results: Vec<LifecycleUpdateResult> = result
.updated_packages
.iter()
.map(|pkg| LifecycleUpdateResult {
package: pkg.name.clone(),
from_version: installed_map
.get(&pkg.name)
.cloned()
.unwrap_or_else(|| pkg.version.clone()),
to_version: pkg.version.clone(),
update_type: context.mode.default_update_type(), // TODO: Determine actual update type
duration: std::time::Duration::from_secs(30), // TODO: Track actual duration per package
size_change: 0, // TODO: Calculate actual size change
})
.collect();
let skipped = context
.total_targets
.saturating_sub(updated_results.len())
.saturating_sub(result.installed_packages.len());
ctx.emit(AppEvent::Lifecycle(LifecycleEvent::update_completed(
context.operation,
updated_results,
skipped,
start.elapsed(),
0, // TODO: Calculate actual space difference
)));
report
}
/// Preview what would be updated/upgraded without executing
#[allow(clippy::too_many_lines)]
async fn preview_update_or_upgrade(
ctx: &OpsCtx,
package_names: &[String],
mode: UpdateMode,
) -> Result<InstallReport, Error> {
use std::collections::HashMap;
let operation = mode.operation_name();
// Get currently installed packages
let current_packages = ctx.state.get_installed_packages().await?;
// Determine packages to check for updates
let packages_to_check = if package_names.is_empty() {
// Check all packages
current_packages.clone()
} else {
// Check specified packages
current_packages
.iter()
.filter(|pkg| package_names.contains(&pkg.name))
.cloned()
.collect()
};
let mut preview_updated = Vec::new();
let mut packages_up_to_date = Vec::new();
let mut packages_not_found = Vec::new();
// Check for packages that were specified but not found
if !package_names.is_empty() {
for package_name in package_names {
if !current_packages.iter().any(|pkg| &pkg.name == package_name) {
packages_not_found.push(package_name.clone());
}
}
}
// Report packages that are not installed
for package_name in &packages_not_found {
ctx.emit(AppEvent::General(GeneralEvent::CheckModePreview {
operation: operation.to_string(),
action: format!("Package {package_name} is not installed"),
details: HashMap::from([
("status".to_string(), "not_installed".to_string()),
("action".to_string(), "skip".to_string()),
]),
}));
}
// Check each installed package for available updates
for package_id in &packages_to_check {
// Create appropriate spec based on mode
let spec = match mode {
UpdateMode::Upgrade => {
// Upgrade mode: allow any version >= 0.0.0
PackageSpec::parse(&format!("{}>=0.0.0", package_id.name))?
}
UpdateMode::Update => {
// Update mode: try compatible release spec, fallback to any version
match PackageSpec::parse(&format!("{}~={}", package_id.name, package_id.version)) {
Ok(spec) => spec,
Err(_) => {
// Fallback to any version if parsing fails
PackageSpec::parse(&format!("{}>=0.0.0", package_id.name))?
}
}
}
};
// Create resolution context for this package
let mut resolution_context = sps2_resolver::ResolutionContext::new();
resolution_context = resolution_context.add_runtime_dep(spec);
// Resolve to see what version would be installed
match ctx.resolver.resolve_with_sat(resolution_context).await {
Ok(resolution_result) => {
// Check if any resolved package is newer than current
let mut found_update = false;
for (resolved_id, node) in &resolution_result.nodes {
if resolved_id.name == package_id.name {
match resolved_id.version.cmp(&package_id.version()) {
std::cmp::Ordering::Greater => {
// Update/upgrade available
let change_type = determine_version_change_type(
&package_id.version(),
&resolved_id.version,
);
ctx.emit(AppEvent::General(GeneralEvent::CheckModePreview {
operation: operation.to_string(),
action: format!(
"Would {} {} {} → {}",
operation,
package_id.name,
package_id.version,
resolved_id.version
),
details: HashMap::from([
(
"current_version".to_string(),
package_id.version.to_string(),
),
(
"new_version".to_string(),
resolved_id.version.to_string(),
),
("change_type".to_string(), change_type),
(
"source".to_string(),
match node.action {
sps2_resolver::NodeAction::Download => {
"repository".to_string()
}
sps2_resolver::NodeAction::Local => {
"local file".to_string()
}
},
),
]),
}));
preview_updated.push(crate::PackageChange {
name: package_id.name.clone(),
from_version: Some(package_id.version()),
to_version: Some(resolved_id.version.clone()),
size: None,
});
found_update = true;
}
std::cmp::Ordering::Equal => {
// Already up to date
packages_up_to_date.push(package_id.name.clone());
}
std::cmp::Ordering::Less => {}
}
break;
}
}
if !found_update
&& !packages_up_to_date
.iter()
.any(|name| name == &package_id.name)
{
// No update found, package is up to date
packages_up_to_date.push(package_id.name.clone());
}
}
Err(_) => {
// Resolution failed - package might not be available in repository
ctx.emit(AppEvent::General(GeneralEvent::CheckModePreview {
operation: operation.to_string(),
action: format!("Cannot check {}s for {}", operation, package_id.name),
details: HashMap::from([
(
"current_version".to_string(),
package_id.version.to_string(),
),
("status".to_string(), "resolution_failed".to_string()),
(
"reason".to_string(),
"package not found in repository".to_string(),
),
]),
}));
}
}
}
// Show packages that are already up to date (only if there are updates available)
if !preview_updated.is_empty() {
for package_name in &packages_up_to_date {
if let Some(package_id) = current_packages
.iter()
.find(|pkg| &pkg.name == package_name)
{
let status_msg = match mode {
UpdateMode::Update => "is already up to date",
UpdateMode::Upgrade => "is already at latest version",
};
ctx.emit(AppEvent::General(GeneralEvent::CheckModePreview {
operation: operation.to_string(),
action: format!("{}:{} {}", package_id.name, package_id.version, status_msg),
details: HashMap::from([
("version".to_string(), package_id.version.to_string()),
("status".to_string(), "up_to_date".to_string()),
]),
}));
}
}
}
// Emit summary
let total_changes = preview_updated.len();
let mut categories = HashMap::new();
let category_key = format!("packages_{operation}d"); // "packages_updated" or "packages_upgraded"
categories.insert(category_key, preview_updated.len());
categories.insert("packages_up_to_date".to_string(), packages_up_to_date.len());
if !packages_not_found.is_empty() {
categories.insert("packages_not_found".to_string(), packages_not_found.len());
}
ctx.emit(AppEvent::General(GeneralEvent::CheckModeSummary {
operation: operation.to_string(),
total_changes,
categories,
}));
// Return preview report (no actual state changes)
Ok(InstallReport {
installed: Vec::new(),
updated: preview_updated,
removed: Vec::new(),
state_id: Uuid::nil(), // No state change in preview
duration_ms: 0,
})
}
/// Determine the type of version change (major/minor/patch/prerelease)
fn determine_version_change_type(from: &Version, to: &Version) -> String {
if from.major != to.major {
"major".to_string()
} else if from.minor != to.minor {
"minor".to_string()
} else if from.patch != to.patch {
"patch".to_string()
} else {
"prerelease".to_string()
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn classify_version_changes() {
let mut v1 = Version::new(1, 0, 0);
let mut v2 = Version::new(2, 0, 0);
assert_eq!(determine_version_change_type(&v1, &v2), "major");
v1 = Version::new(1, 1, 0);
v2 = Version::new(1, 2, 0);
assert_eq!(determine_version_change_type(&v1, &v2), "minor");
v1 = Version::new(1, 1, 1);
v2 = Version::new(1, 1, 2);
assert_eq!(determine_version_change_type(&v1, &v2), "patch");
v1 = Version::new(1, 1, 1);
v2 = Version::new(1, 1, 1);
assert_eq!(determine_version_change_type(&v1, &v2), "prerelease");
}
#[test]
fn update_mode_properties() {
assert!(UpdateMode::Upgrade.is_upgrade());
assert!(!UpdateMode::Update.is_upgrade());
assert_eq!(UpdateMode::Update.operation_name(), "update");
assert_eq!(UpdateMode::Upgrade.operation_name(), "upgrade");
// Verify lifecycle operations are set correctly (can't use assert_eq as it doesn't impl PartialEq)
assert!(matches!(
UpdateMode::Update.lifecycle_operation(),
LifecycleUpdateOperation::Update
));
assert!(matches!(
UpdateMode::Upgrade.lifecycle_operation(),
LifecycleUpdateOperation::Upgrade
));
}
}
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | false |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/crates/ops/src/pack.rs | crates/ops/src/pack.rs | //! Pack command implementation
//!
//! Provides standalone packaging functionality without requiring a full rebuild.
//! Always runs post-processing steps and QA pipeline by default, matching build command behavior.
use crate::OpsCtx;
use sps2_builder::{
artifact_qa::run_quality_pipeline, create_and_sign_package, execute_post_step_with_security,
parse_yaml_recipe, BuildCommand, BuildConfig, BuildContext, BuildEnvironment, BuildPlan,
BuilderApi, RecipeMetadata, SecurityContext, YamlRecipe,
};
use sps2_errors::{Error, OpsError};
use sps2_events::{events::BuildPhase, AppEvent, BuildEvent, EventEmitter, PhaseStatus};
use sps2_types::{BuildReport, Version};
use std::collections::{HashMap, HashSet};
use std::path::{Path, PathBuf};
use std::time::Instant;
use uuid::Uuid;
/// Pack a recipe from its staging directory with post-processing and QA pipeline
///
/// This is the default pack behavior, matching the build command.
/// Runs post-processing steps, auto-detects QA pipeline from build systems,
/// and allows QA pipeline override from recipe.
///
/// # Errors
///
/// Returns an error if:
/// - Recipe file doesn't exist or has invalid extension
/// - Staging directory doesn't exist or is empty
/// - Post-step execution fails
/// - QA pipeline fails
/// - Packaging process fails
pub async fn pack_from_recipe(
ctx: &OpsCtx,
recipe_path: &Path,
output_dir: Option<&Path>,
) -> Result<BuildReport, Error> {
pack_from_recipe_impl(ctx, recipe_path, output_dir, true).await
}
/// Pack a recipe from its staging directory without post-processing or QA pipeline
///
/// This skips all post-processing steps and QA validation.
/// Use --no-post flag to enable this mode.
///
/// # Errors
///
/// Returns an error if:
/// - Recipe file doesn't exist or has invalid extension
/// - Staging directory doesn't exist or is empty
/// - Packaging process fails
pub async fn pack_from_recipe_no_post(
ctx: &OpsCtx,
recipe_path: &Path,
output_dir: Option<&Path>,
) -> Result<BuildReport, Error> {
pack_from_recipe_impl(ctx, recipe_path, output_dir, false).await
}
/// Pack a directory directly, skipping all post-processing
///
/// This is a power-user feature that packages a directory as-is.
/// It requires a manifest file. // SBOM soft-disabled: any provided path is ignored
///
/// # Errors
///
/// Returns an error if:
/// - Directory to package does not exist or is empty
/// - Manifest file does not exist or is invalid
///
/// - Packaging process fails
pub async fn pack_from_directory(
ctx: &OpsCtx,
directory: &Path,
manifest_path: &Path,
// SBOM soft-disabled: removed sbom_path parameter
output_dir: Option<&Path>,
) -> Result<BuildReport, Error> {
let start = Instant::now();
ctx.emit_operation_started("Packing from directory");
// Validate the directory to be packaged
validate_staging_directory(directory, "directory", &Version::new(0, 0, 0))?;
// Load the manifest to get package metadata
let manifest = sps2_store::manifest_io::read_manifest(manifest_path).await?;
let package_name = manifest.package.name.clone();
let package_version = manifest.version()?;
let _correlation = ctx.push_correlation(format!("pack:{package_name}"));
// Create a minimal build context
let output_path = determine_output_path(output_dir, &package_name, &package_version);
let session_id = Uuid::new_v4().to_string();
let build_context = BuildContext::new(
package_name.clone(),
package_version.clone(),
manifest_path.to_path_buf(), // Use manifest path as a stand-in for recipe
output_path.parent().unwrap_or(directory).to_path_buf(),
)
.with_revision(manifest.package.revision)
.with_event_sender(ctx.tx.clone())
.with_session_id(session_id);
// Create a minimal build environment pointing to the specified directory
let mut environment = BuildEnvironment::new(build_context.clone(), directory)?;
environment.set_staging_dir(directory.to_path_buf());
// Create a minimal build config
let build_config = BuildConfig::default();
// SBOM soft-disabled: ignore sbom_path and do not prepare SBOM files
// Create and sign the package
let package_path =
create_and_sign_package(&build_config, &build_context, &environment, manifest).await?;
let duration = start.elapsed();
ctx.emit_operation_completed(
format!("Packaged {package_name} {package_version} successfully"),
true,
);
Ok(BuildReport {
package: package_name,
version: package_version,
output_path: package_path,
duration_ms: u64::try_from(duration.as_millis()).unwrap_or(u64::MAX),
})
}
/// Internal implementation for recipe-based packaging
async fn pack_from_recipe_impl(
ctx: &OpsCtx,
recipe_path: &Path,
output_dir: Option<&Path>,
execute_post: bool,
) -> Result<BuildReport, Error> {
let start = Instant::now();
// Validate recipe file
validate_recipe_file(recipe_path)?;
ctx.emit_operation_started(format!(
"Packing from recipe{}",
if execute_post {
" (with post steps)"
} else {
""
}
));
// Parse recipe to get package metadata
let yaml_recipe = parse_yaml_recipe(recipe_path).await?;
let package_name = yaml_recipe.metadata.name.clone();
let package_version = Version::parse(&yaml_recipe.metadata.version)?;
let _correlation = ctx.push_correlation(format!("pack:{package_name}"));
// Construct expected staging directory path
let build_root = sps2_config::BuilderConfig::default().build.build_root;
let staging_dir = build_root
.join(&package_name)
.join(package_version.to_string())
.join("stage");
// Validate staging directory exists and has content
validate_staging_directory(&staging_dir, &package_name, &package_version)?;
// Create build context for packaging (same as build command)
let output_path = determine_output_path(output_dir, &package_name, &package_version);
let session_id = Uuid::new_v4().to_string();
let build_context = BuildContext::new(
package_name.clone(),
package_version.clone(),
recipe_path.to_path_buf(),
output_path.parent().unwrap_or(&build_root).to_path_buf(),
)
.with_revision(1)
.with_event_sender(ctx.tx.clone())
.with_session_id(session_id);
// Create build environment pointing to existing staging directory
let mut environment = BuildEnvironment::new(build_context.clone(), &build_root)?;
// If post steps are requested, execute them (same as build command)
if execute_post {
// Detect build systems from recipe build steps for QA pipeline
let build_plan = BuildPlan::from_yaml(&yaml_recipe, recipe_path, None)?;
let detected_build_systems = detect_build_systems_from_steps(&build_plan.build_steps);
// Track detected build systems in environment for QA pipeline
for build_system in &detected_build_systems {
environment.record_build_system(build_system);
}
execute_post_steps(&build_context, &mut environment, &yaml_recipe).await?;
// Run QA pipeline (same as build command)
let qa_pipeline_override = Some(yaml_recipe.post.qa_pipeline);
run_quality_pipeline(&build_context, &environment, qa_pipeline_override).await?;
}
// Create build config (same as build command)
let build_config = BuildConfig::default();
// Generate recipe metadata (same as build command)
let recipe_metadata = RecipeMetadata {
name: yaml_recipe.metadata.name.clone(),
version: yaml_recipe.metadata.version.clone(),
description: yaml_recipe.metadata.description.clone().into(),
homepage: yaml_recipe.metadata.homepage.clone(),
license: Some(yaml_recipe.metadata.license.clone()),
runtime_deps: yaml_recipe.metadata.dependencies.runtime.clone(),
build_deps: yaml_recipe.metadata.dependencies.build.clone(),
};
// Create manifest (SBOM removed)
let manifest = sps2_builder::create_manifest(
&build_context,
recipe_metadata.runtime_deps.clone(),
&recipe_metadata,
&environment,
);
// Create and sign package (EXACT same as build command)
let package_path =
create_and_sign_package(&build_config, &build_context, &environment, manifest).await?;
let duration = start.elapsed();
ctx.emit_operation_completed(
format!("Packed {package_name} v{package_version} from staging directory"),
true,
);
// Create BuildReport
Ok(BuildReport {
package: package_name,
version: package_version,
output_path: package_path,
duration_ms: u64::try_from(duration.as_millis()).unwrap_or(u64::MAX),
})
}
/// Execute post-processing steps from recipe (same as build command)
async fn execute_post_steps(
context: &BuildContext,
environment: &mut BuildEnvironment,
yaml_recipe: &YamlRecipe,
) -> Result<(), Error> {
// Parse recipe into build plan to extract post steps
let build_plan = BuildPlan::from_yaml(yaml_recipe, &context.recipe_path, None)?;
if build_plan.post_steps.is_empty() {
return Ok(());
}
context
.event_sender
.as_ref()
.unwrap()
.emit_operation_started("Executing post-processing steps");
// Create working directory and security context
let working_dir = environment.build_prefix().join("src");
let mut initial_vars = HashMap::new();
initial_vars.insert("NAME".to_string(), context.name.clone());
initial_vars.insert("VERSION".to_string(), context.version.to_string());
let mut security_context =
SecurityContext::new(environment.build_prefix().to_path_buf(), initial_vars);
security_context.set_current_dir(working_dir.clone());
// Create builder API
let resources = std::sync::Arc::new(sps2_config::ResourceManager::default());
let mut api = BuilderApi::new(working_dir, resources)?;
// Execute each post step
for step in &build_plan.post_steps {
context
.event_sender
.as_ref()
.unwrap()
.emit(AppEvent::Build(BuildEvent::PhaseStatus {
session_id: context.session_id(),
phase: BuildPhase::Build,
status: PhaseStatus::Started,
}));
execute_post_step_with_security(
step,
&mut api,
environment,
&mut security_context,
None, // No sps2_config restriction for pack command
)
.await?;
context
.event_sender
.as_ref()
.unwrap()
.emit(AppEvent::Build(BuildEvent::PhaseStatus {
session_id: context.session_id(),
phase: BuildPhase::Build,
status: PhaseStatus::Completed { duration_ms: None },
}));
}
context
.event_sender
.as_ref()
.unwrap()
.emit_operation_completed("Post-processing steps completed", true);
Ok(())
}
/// Validate recipe file exists and has correct extension
fn validate_recipe_file(recipe_path: &Path) -> Result<(), Error> {
if !recipe_path.exists() {
return Err(OpsError::RecipeNotFound {
path: recipe_path.display().to_string(),
}
.into());
}
let extension = recipe_path.extension().and_then(|ext| ext.to_str());
let is_valid = matches!(extension, Some("yaml" | "yml"));
if !is_valid {
return Err(OpsError::InvalidRecipe {
path: recipe_path.display().to_string(),
reason: "recipe must have .yaml or .yml extension".to_string(),
}
.into());
}
Ok(())
}
/// Validate staging directory exists and contains packageable content
fn validate_staging_directory(
staging_dir: &Path,
package_name: &str,
package_version: &Version,
) -> Result<(), Error> {
if !staging_dir.exists() {
return Err(OpsError::StagingDirectoryNotFound {
path: staging_dir.display().to_string(),
package: format!("{package_name}:{package_version}"),
}
.into());
}
if !staging_dir.is_dir() {
return Err(OpsError::InvalidStagingDirectory {
path: staging_dir.display().to_string(),
reason: "staging path is not a directory".to_string(),
}
.into());
}
// Check if directory has any content
let entries =
std::fs::read_dir(staging_dir).map_err(|e| OpsError::InvalidStagingDirectory {
path: staging_dir.display().to_string(),
reason: format!("cannot read staging directory: {e}"),
})?;
if entries.count() == 0 {
return Err(OpsError::InvalidStagingDirectory {
path: staging_dir.display().to_string(),
reason: "staging directory is empty".to_string(),
}
.into());
}
Ok(())
}
/// Determine output path for package
fn determine_output_path(
output_dir: Option<&Path>,
package_name: &str,
package_version: &Version,
) -> PathBuf {
let filename = format!("{package_name}-{package_version}-1.arm64.sp");
output_dir.unwrap_or_else(|| Path::new(".")).join(filename)
}
/// Detect build systems used in build steps for QA pipeline routing
///
/// Maps build commands to build system names that the QA pipeline router understands.
/// This determines which QA pipeline profile to use (Rust, Python, C/C++, etc.).
fn detect_build_systems_from_steps(build_steps: &[BuildCommand]) -> HashSet<String> {
let mut build_systems = HashSet::new();
for step in build_steps {
let build_system = match step {
BuildCommand::Configure { .. } => "configure",
BuildCommand::Make { .. } => "make",
BuildCommand::Autotools { .. } => "autotools",
BuildCommand::Cmake { .. } => "cmake",
BuildCommand::Meson { .. } => "meson",
BuildCommand::Cargo { .. } => "cargo",
BuildCommand::Go { .. } => "go",
BuildCommand::Python { .. } => "python",
BuildCommand::NodeJs { .. } => "nodejs",
BuildCommand::Command { .. } => "shell",
};
build_systems.insert(build_system.to_string());
}
build_systems
}
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | false |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/crates/ops/src/small_ops.rs | crates/ops/src/small_ops.rs | //! Small operations implemented in the ops crate
//!
//! This module serves as a public API facade that re-exports operations
//! from specialized modules. All function signatures are preserved for
//! backward compatibility.
// Import all the modularized operations
use crate::health;
use crate::maintenance;
use crate::query;
use crate::repository;
use crate::self_update as self_update_module;
// Re-export all public functions to maintain API compatibility
pub use health::check_health;
pub use maintenance::{cleanup, history, rollback};
pub use query::{list_packages, package_info, search_packages};
pub use repository::{add_repo, list_repos, remove_repo, reposync};
pub use self_update_module::self_update;
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | false |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/crates/ops/src/self_update.rs | crates/ops/src/self_update.rs | //! Self-Update Functionality
use crate::OpsCtx;
use sps2_errors::{Error, OpsError};
use sps2_events::{
events::{PackageOperation, PackageOutcome},
AppEvent, EventEmitter, FailureContext, PackageEvent,
};
use std::path::Path;
use std::time::Instant;
/// Update sps2 to the latest version
///
/// # Errors
///
/// Returns an error if:
/// - Failed to check for latest version
/// - Failed to download or verify the new binary
/// - Failed to replace the current executable
pub async fn self_update(ctx: &OpsCtx, skip_verify: bool, force: bool) -> Result<String, Error> {
let operation_start = Instant::now();
let current_version = env!("CARGO_PKG_VERSION").to_string();
ctx.emit(AppEvent::Package(PackageEvent::OperationStarted {
operation: PackageOperation::SelfUpdate,
}));
let result: Result<(String, String, String), Error> = async {
// Check latest version from GitHub API
let latest_version = get_latest_version(&ctx.net, &ctx.tx).await?;
// Compare versions
let current = sps2_types::Version::parse(¤t_version)?;
let latest = sps2_types::Version::parse(&latest_version)?;
if !force && latest <= current {
return Ok((
format!("Already on latest version: {current_version}"),
current_version.clone(),
current_version.clone(),
));
}
// Determine download URLs for ARM64 macOS
let binary_url = format!(
"https://github.com/sps-io/sps2/releases/download/v{latest_version}/sps2-{latest_version}-aarch64-apple-darwin"
);
let signature_url = format!("{binary_url}.minisig");
// Create temporary directory for download
let temp_dir = tempfile::tempdir().map_err(|e| OpsError::SelfUpdateFailed {
message: format!("Failed to create temp directory: {e}"),
})?;
let temp_binary = temp_dir.path().join("sps2-new");
let temp_signature = temp_dir.path().join("sps2-new.minisig");
// Download new binary
sps2_net::download_file(&ctx.net, &binary_url, &temp_binary, None, &ctx.tx)
.await
.map_err(|e| OpsError::SelfUpdateFailed {
message: format!("Failed to download binary: {e}"),
})?;
if !skip_verify {
// Download signature
sps2_net::download_file(&ctx.net, &signature_url, &temp_signature, None, &ctx.tx)
.await
.map_err(|e| OpsError::SelfUpdateFailed {
message: format!("Failed to download signature: {e}"),
})?;
// Verify signature
verify_binary_signature(&temp_binary, &temp_signature).await?;
}
// Replace current executable atomically
replace_current_executable(&temp_binary).await?;
Ok((
format!("Updated from {current_version} to {latest_version}"),
current_version.clone(),
latest_version,
))
}
.await;
match result {
Ok((message, from_version, to_version)) => {
let duration = u64::try_from(operation_start.elapsed().as_millis()).unwrap_or(u64::MAX);
ctx.emit(AppEvent::Package(PackageEvent::OperationCompleted {
operation: PackageOperation::SelfUpdate,
outcome: PackageOutcome::SelfUpdate {
from: from_version.clone(),
to: to_version.clone(),
duration_ms: duration,
},
}));
Ok(message)
}
Err(err) => {
ctx.emit(AppEvent::Package(PackageEvent::OperationFailed {
operation: PackageOperation::SelfUpdate,
failure: FailureContext::from_error(&err),
}));
Err(err)
}
}
}
/// Get latest version from GitHub releases API
async fn get_latest_version(
net_client: &sps2_net::NetClient,
tx: &sps2_events::EventSender,
) -> Result<String, Error> {
let api_url = "https://api.github.com/repos/sps-io/sps2/releases/latest";
let response_text = sps2_net::fetch_text(net_client, api_url, tx)
.await
.map_err(|e| OpsError::SelfUpdateFailed {
message: format!("Failed to fetch release info: {e}"),
})?;
let release: serde_json::Value =
serde_json::from_str(&response_text).map_err(|e| OpsError::SelfUpdateFailed {
message: format!("Failed to parse release JSON: {e}"),
})?;
let tag_name = release["tag_name"]
.as_str()
.ok_or_else(|| OpsError::SelfUpdateFailed {
message: "Release JSON missing tag_name field".to_string(),
})?;
// Remove 'v' prefix if present
let version = tag_name.strip_prefix('v').unwrap_or(tag_name);
Ok(version.to_string())
}
/// Verify binary signature using minisign
async fn verify_binary_signature(binary_path: &Path, signature_path: &Path) -> Result<(), Error> {
let binary_content =
tokio::fs::read(binary_path)
.await
.map_err(|e| OpsError::SelfUpdateFailed {
message: format!("Failed to read binary for verification: {e}"),
})?;
let signature_content = tokio::fs::read_to_string(signature_path)
.await
.map_err(|e| OpsError::SelfUpdateFailed {
message: format!("Failed to read signature: {e}"),
})?;
// Parse signature
let signature = minisign_verify::Signature::decode(&signature_content).map_err(|e| {
OpsError::SelfUpdateFailed {
message: format!("Failed to parse signature: {e}"),
}
})?;
// Use the same release signing key as for packages
// In production, this would be the same trusted key used for package verification
let trusted_key = "RWSGOq2NVecA2UPNdBUZykp1MLhfMmkAK/SZSjK3bpq2q7I8LbSVVBDm";
let public_key = minisign_verify::PublicKey::from_base64(trusted_key).map_err(|e| {
OpsError::SelfUpdateFailed {
message: format!("Failed to parse public key: {e}"),
}
})?;
public_key
.verify(&binary_content, &signature, false)
.map_err(|e| OpsError::SelfUpdateFailed {
message: format!("Binary signature verification failed: {e}"),
})?;
Ok(())
}
/// Replace current executable atomically
async fn replace_current_executable(new_binary_path: &Path) -> Result<(), Error> {
// Get current executable path
let current_exe = std::env::current_exe().map_err(|e| OpsError::SelfUpdateFailed {
message: format!("Failed to get current executable path: {e}"),
})?;
// Make new binary executable
#[cfg(unix)]
{
use std::os::unix::fs::PermissionsExt;
let mut perms = tokio::fs::metadata(new_binary_path)
.await
.map_err(|e| OpsError::SelfUpdateFailed {
message: format!("Failed to get binary metadata: {e}"),
})?
.permissions();
perms.set_mode(0o755);
tokio::fs::set_permissions(new_binary_path, perms)
.await
.map_err(|e| OpsError::SelfUpdateFailed {
message: format!("Failed to set binary permissions: {e}"),
})?;
}
// Create backup of current executable
let backup_path = current_exe.with_extension("backup");
tokio::fs::copy(¤t_exe, &backup_path)
.await
.map_err(|e| OpsError::SelfUpdateFailed {
message: format!("Failed to create backup: {e}"),
})?;
// Atomic replacement using rename
tokio::fs::rename(new_binary_path, ¤t_exe)
.await
.map_err(|e| {
// Attempt to restore backup on failure
if let Err(restore_err) = std::fs::rename(&backup_path, ¤t_exe) {
OpsError::SelfUpdateFailed {
message: format!(
"Failed to replace executable: {e}. Also failed to restore backup: {restore_err}"
),
}
} else {
OpsError::SelfUpdateFailed {
message: format!("Failed to replace executable: {e}. Restored from backup."),
}
}
})?;
// Clean up backup on success
let _ = tokio::fs::remove_file(backup_path).await;
Ok(())
}
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | false |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/crates/ops/src/install.rs | crates/ops/src/install.rs | //! Install command implementation
//!
//! Handles package installation with support for both local .sp files and remote packages.
//! Delegates to `sps2_install` crate for the actual installation logic.
use crate::{InstallReport, InstallRequest, OpsCtx};
use sps2_errors::{Error, OpsError};
use sps2_events::{
AppEvent, EventEmitter, FailureContext, GeneralEvent, LifecycleEvent, ProgressEvent,
};
use sps2_install::{InstallConfig, InstallContext, Installer};
use sps2_types::{PackageSpec, Version};
use std::convert::TryFrom;
use std::path::{Path, PathBuf};
use std::time::Instant;
use uuid::Uuid;
/// Install packages using the high-performance parallel pipeline
///
/// This function provides a unified installation workflow that seamlessly handles
/// both local .sp files and remote packages with optimal performance.
///
/// # Errors
///
/// Returns an error if:
/// - No packages are specified
/// - Package specifications cannot be parsed
/// - Installation fails
#[allow(clippy::too_many_lines)] // Complex orchestration function coordinating multiple subsystems
pub async fn install(
ctx: &OpsCtx,
package_specs: &[String],
force_download: bool,
) -> Result<InstallReport, Error> {
let start = Instant::now();
if package_specs.is_empty() {
return Err(OpsError::NoPackagesSpecified.into());
}
let _correlation = ctx.push_correlation_for_packages("install", package_specs);
// Check mode: preview what would be installed
if ctx.check_mode {
return preview_install(ctx, package_specs).await;
}
// Event emission moved to operations layer where actual work happens
// Parse install requests and separate local files from remote packages
let install_requests = parse_install_requests(package_specs)?;
let mut remote_specs = Vec::new();
let mut local_files = Vec::new();
for request in install_requests {
match request {
InstallRequest::Remote(spec) => {
remote_specs.push(spec);
}
InstallRequest::LocalFile(path) => {
local_files.push(path);
}
}
}
// Get currently installed packages before install to track changes
let installed_before = ctx.state.get_installed_packages().await?;
let installed_map: std::collections::HashMap<String, Version> = installed_before
.iter()
.map(|pkg| (pkg.name.clone(), pkg.version()))
.collect();
// Use different strategies based on the mix of packages with enhanced error handling
let result = if !remote_specs.is_empty() && local_files.is_empty() {
// All remote packages - use high-performance parallel pipeline
match install_remote_packages_parallel(ctx, &remote_specs, force_download).await {
Ok(result) => result,
Err(e) => {
// Provide specific guidance for remote package failures
ctx.emit_error("Failed to install remote packages");
return Err(e);
}
}
} else if remote_specs.is_empty() && !local_files.is_empty() {
// All local files - use local installer
match install_local_packages(ctx, &local_files, force_download).await {
Ok(result) => result,
Err(e) => {
// Provide specific guidance for local file failures
ctx.emit(AppEvent::General(GeneralEvent::error_with_details(
format!("Failed to install {} local packages", local_files.len()),
format!(
"Error: {e}. \n\nSuggested solutions:\n\
• Verify file paths are correct and files exist\n\
• Check file permissions (must be readable)\n\
• Ensure .sp files are not corrupted\n\
• Use absolute paths or './' prefix for current directory"
),
)));
return Err(e);
}
}
} else {
// Mixed local and remote - use hybrid approach
match install_mixed_packages(ctx, &remote_specs, &local_files, force_download).await {
Ok(result) => result,
Err(e) => {
// Provide guidance for mixed installation failures
ctx.emit(AppEvent::General(GeneralEvent::error_with_details(
format!(
"Failed to install mixed packages ({} remote, {} local)",
remote_specs.len(),
local_files.len()
),
format!(
"Error: {e}. \n\nSuggested solutions:\n\
• Verify file paths are correct and files exist\n\
• Check file permissions (must be readable)\n\
• Ensure .sp files are not corrupted\n\
• Use absolute paths or './' prefix for current directory"
),
)));
return Err(e);
}
}
};
// Convert to report format with proper change tracking
let report = InstallReport {
installed: result
.installed_packages
.iter()
.map(|pkg| {
crate::PackageChange {
name: pkg.name.clone(),
from_version: None,
to_version: Some(pkg.version.clone()),
size: None, // TODO: Get size from store when available
}
})
.collect(),
updated: result
.updated_packages
.iter()
.map(|pkg| crate::PackageChange {
name: pkg.name.clone(),
from_version: installed_map.get(&pkg.name).cloned(),
to_version: Some(pkg.version.clone()),
size: None,
})
.collect(),
removed: result
.removed_packages
.iter()
.map(|pkg| crate::PackageChange {
name: pkg.name.clone(),
from_version: Some(pkg.version.clone()),
to_version: None,
size: None,
})
.collect(),
state_id: result.state_id,
duration_ms: u64::try_from(start.elapsed().as_millis()).unwrap_or(u64::MAX),
};
// Event emission moved to operations layer where actual work happens
Ok(report)
}
/// Preview what would be installed without executing
#[allow(clippy::too_many_lines)]
async fn preview_install(ctx: &OpsCtx, package_specs: &[String]) -> Result<InstallReport, Error> {
use std::collections::HashMap;
// Parse install requests
let install_requests = parse_install_requests(package_specs)?;
let mut remote_specs = Vec::new();
let mut local_files = Vec::new();
for request in install_requests {
match request {
InstallRequest::Remote(spec) => {
remote_specs.push(spec);
}
InstallRequest::LocalFile(path) => {
local_files.push(path);
}
}
}
// Get currently installed packages to check for updates
let installed_before = ctx.state.get_installed_packages().await?;
let installed_map: HashMap<String, Version> = installed_before
.iter()
.map(|pkg| (pkg.name.clone(), pkg.version()))
.collect();
let mut preview_installed = Vec::new();
let mut preview_updated = Vec::new();
let mut new_packages_count = 0;
let mut dependencies_added_count = 0;
// Handle remote packages
if !remote_specs.is_empty() {
// Create resolution context
let mut resolution_context = sps2_resolver::ResolutionContext::new();
for spec in &remote_specs {
resolution_context = resolution_context.add_runtime_dep(spec.clone());
}
// Resolve dependencies
let resolution_result = ctx.resolver.resolve_with_sat(resolution_context).await?;
// Process resolved packages
for (package_id, node) in &resolution_result.nodes {
let is_requested = remote_specs.iter().any(|spec| spec.name == package_id.name);
let is_dependency = !is_requested;
if let Some(existing_version) = installed_map.get(&package_id.name) {
if existing_version != &package_id.version {
// Package would be updated
ctx.emit(AppEvent::General(GeneralEvent::CheckModePreview {
operation: "install".to_string(),
action: format!(
"Would update {} {} → {}",
package_id.name, existing_version, package_id.version
),
details: HashMap::from([
("current_version".to_string(), existing_version.to_string()),
("new_version".to_string(), package_id.version.to_string()),
(
"source".to_string(),
match node.action {
sps2_resolver::NodeAction::Download => "repository".to_string(),
sps2_resolver::NodeAction::Local => "local file".to_string(),
},
),
]),
}));
preview_updated.push(crate::PackageChange {
name: package_id.name.clone(),
from_version: Some(existing_version.clone()),
to_version: Some(package_id.version.clone()),
size: None,
});
}
} else {
// Package would be newly installed
let action_text = if is_dependency {
format!("Would install {package_id} (dependency)")
} else {
format!("Would install {package_id}")
};
ctx.emit(AppEvent::General(GeneralEvent::CheckModePreview {
operation: "install".to_string(),
action: action_text,
details: HashMap::from([
("version".to_string(), package_id.version.to_string()),
(
"source".to_string(),
match node.action {
sps2_resolver::NodeAction::Download => "repository".to_string(),
sps2_resolver::NodeAction::Local => "local file".to_string(),
},
),
(
"type".to_string(),
if is_dependency {
"dependency".to_string()
} else {
"requested".to_string()
},
),
]),
}));
preview_installed.push(crate::PackageChange {
name: package_id.name.clone(),
from_version: None,
to_version: Some(package_id.version.clone()),
size: None,
});
if is_dependency {
dependencies_added_count += 1;
} else {
new_packages_count += 1;
}
}
}
}
// Handle local files
for local_file in &local_files {
// For local files, we can't easily resolve without reading the file
// So we'll show a basic preview
ctx.emit(AppEvent::General(GeneralEvent::CheckModePreview {
operation: "install".to_string(),
action: format!("Would install from local file: {}", local_file.display()),
details: HashMap::from([
("source".to_string(), "local file".to_string()),
("path".to_string(), local_file.display().to_string()),
]),
}));
// Add to preview (we don't know the exact package name/version without reading)
preview_installed.push(crate::PackageChange {
name: format!(
"local-{}",
local_file.file_stem().unwrap_or_default().to_string_lossy()
),
from_version: None,
to_version: Some(Version::new(0, 0, 0)), // Placeholder
size: None,
});
new_packages_count += 1;
}
// Emit summary
let total_changes = preview_installed.len() + preview_updated.len();
let mut categories = HashMap::new();
categories.insert("new_packages".to_string(), new_packages_count);
categories.insert("updated_packages".to_string(), preview_updated.len());
if dependencies_added_count > 0 {
categories.insert("dependencies_added".to_string(), dependencies_added_count);
}
ctx.emit(AppEvent::General(GeneralEvent::CheckModeSummary {
operation: "install".to_string(),
total_changes,
categories,
}));
// Return preview report (no actual state changes)
Ok(InstallReport {
installed: preview_installed,
updated: preview_updated,
removed: Vec::new(),
state_id: Uuid::nil(), // No state change in preview
duration_ms: 0,
})
}
/// Install remote packages using the high-performance parallel pipeline
#[allow(clippy::too_many_lines)] // Complex parallel pipeline orchestration with error handling
async fn install_remote_packages_parallel(
ctx: &OpsCtx,
specs: &[PackageSpec],
force_download: bool,
) -> Result<sps2_install::InstallResult, Error> {
use sps2_events::{patterns::InstallProgressConfig, ProgressManager};
// use sps2_state::PackageRef;
use std::time::Instant;
let start = Instant::now();
// Create unified progress tracker using standardized patterns
let progress_manager = ProgressManager::new();
let install_config = InstallProgressConfig {
operation_name: format!("Installing {} packages", specs.len()),
package_count: specs.len() as u64,
include_dependency_resolution: true,
};
let progress_id = progress_manager.create_install_tracker(install_config);
let correlation = ctx.current_correlation();
progress_manager.emit_started(&progress_id, ctx, correlation.as_deref());
// The new standardized progress tracker handles the initial event emission.
// Phase 1: Dependency resolution
let resolve_start = Instant::now();
ctx.emit(AppEvent::Lifecycle(LifecycleEvent::resolver_started(
specs.len(),
0,
0,
)));
let mut resolution_context = sps2_resolver::ResolutionContext::new();
for spec in specs {
resolution_context = resolution_context.add_runtime_dep(spec.clone());
}
let resolution_result = match ctx.resolver.resolve_with_sat(resolution_context).await {
Ok(result) => result,
Err(e) => {
let failure = FailureContext::from_error(&e);
ctx.emit_operation_failed("install", failure.clone());
ctx.emit(AppEvent::Lifecycle(LifecycleEvent::resolver_failed(
failure.clone(),
Vec::new(),
)));
ctx.emit(AppEvent::Progress(ProgressEvent::Failed {
id: progress_id.clone(),
failure,
completed_items: 0,
partial_duration: std::time::Duration::default(),
}));
return Err(e);
}
};
let duration_ms = resolve_start.elapsed().as_millis();
let duration_ms = u64::try_from(duration_ms).unwrap_or(u64::MAX);
let execution_plan = resolution_result.execution_plan;
let resolved_packages = resolution_result.nodes;
let mut downloaded_packages = 0usize;
let mut reused_packages = 0usize;
for node in resolved_packages.values() {
match node.action {
sps2_resolver::NodeAction::Download => downloaded_packages += 1,
sps2_resolver::NodeAction::Local => reused_packages += 1,
}
}
ctx.emit(AppEvent::Lifecycle(LifecycleEvent::resolver_completed(
resolved_packages.len(),
downloaded_packages,
reused_packages,
duration_ms,
)));
progress_manager.update_phase_to_done(&progress_id, "Resolve", ctx);
// Phase 2-4: Parallel execution (download, store, prepare)
// Use the same approach as the regular installer with ParallelExecutor
let exec_context = sps2_install::ExecutionContext::new()
.with_event_sender(ctx.tx.clone())
.with_security_policy(sps2_install::SecurityPolicy {
verify_signatures: ctx.config.security.verify_signatures,
allow_unsigned: ctx.config.security.allow_unsigned,
})
.with_force_redownload(force_download);
// Create parallel executor
let resources = std::sync::Arc::new(sps2_config::ResourceManager::default());
let executor =
sps2_install::ParallelExecutor::new(ctx.store.clone(), ctx.state.clone(), resources)?;
// Execute parallel downloads and store packages
let prepared_packages = match executor
.execute_parallel(&execution_plan, &resolved_packages, &exec_context)
.await
{
Ok(prepared_packages) => prepared_packages,
Err(e) => {
let failure = FailureContext::from_error(&e);
ctx.emit_operation_failed("install", failure.clone());
ctx.emit(AppEvent::Progress(ProgressEvent::Failed {
id: progress_id.clone(),
failure,
completed_items: 0,
partial_duration: std::time::Duration::default(),
}));
return Err(e);
}
};
progress_manager.update_phase_to_done(&progress_id, "Download", ctx);
// Phase 5: Atomic installation
ctx.emit_debug("DEBUG: Starting atomic installation");
// Perform atomic installation using the prepared packages
let mut atomic_installer =
sps2_install::AtomicInstaller::new(ctx.state.clone(), ctx.store.clone());
let install_context = sps2_install::InstallContext::new()
.with_event_sender(ctx.tx.clone())
.with_force_download(force_download);
let install_result = atomic_installer
.install(
&install_context,
&resolved_packages,
Some(&prepared_packages),
)
.await?;
ctx.emit_debug("DEBUG: Atomic installation completed");
// Complete progress tracking
progress_manager.complete_operation(&progress_id, ctx);
// Send comprehensive completion metrics
ctx.emit_debug(format!(
"DEBUG: Installation metrics - Total: {}, Successful: {}, Duration: {:.2}s",
specs.len(),
install_result.installed_packages.len(),
start.elapsed().as_secs_f64()
));
// Return the install result from AtomicInstaller (already committed via 2PC)
Ok(install_result)
}
/// Install local packages using the regular installer
async fn install_local_packages(
ctx: &OpsCtx,
files: &[PathBuf],
force_download: bool,
) -> Result<sps2_install::InstallResult, Error> {
// Create installer for local files
let config = InstallConfig::default();
let mut installer = Installer::new(
config,
ctx.resolver.clone(),
ctx.state.clone(),
ctx.store.clone(),
);
// Build install context for local files
let install_context = InstallContext::new()
.with_event_sender(ctx.tx.clone())
.with_local_files(files.to_vec())
.with_force_download(force_download);
// Execute installation
installer.install(install_context).await
}
/// Install mixed local and remote packages using hybrid approach
async fn install_mixed_packages(
ctx: &OpsCtx,
remote_specs: &[PackageSpec],
local_files: &[PathBuf],
force_download: bool,
) -> Result<sps2_install::InstallResult, Error> {
// For mixed installs, use the regular installer for now
// TODO: Optimize this by using pipeline for remote and merging results
let config = InstallConfig::default();
let mut installer = Installer::new(
config,
ctx.resolver.clone(),
ctx.state.clone(),
ctx.store.clone(),
);
// Build install context with both remote and local
let mut install_context = InstallContext::new()
.with_event_sender(ctx.tx.clone())
.with_local_files(local_files.to_vec())
.with_force_download(force_download);
for spec in remote_specs {
install_context = install_context.add_package(spec.clone());
}
// Execute installation
installer.install(install_context).await
}
/// Parse install requests from string specifications
fn parse_install_requests(specs: &[String]) -> Result<Vec<InstallRequest>, Error> {
let mut requests = Vec::new();
for spec in specs {
if Path::new(spec)
.extension()
.is_some_and(|ext| ext.eq_ignore_ascii_case("sp"))
&& Path::new(spec).exists()
{
// Local file
requests.push(InstallRequest::LocalFile(PathBuf::from(spec)));
} else {
// Remote package with version constraints
let package_spec = PackageSpec::parse(spec)?;
requests.push(InstallRequest::Remote(package_spec));
}
}
Ok(requests)
}
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | false |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/crates/ops/src/build.rs | crates/ops/src/build.rs | //! Build command implementation
//!
//! Handles package building from recipes.
//! Delegates to `sps2_builder` crate for the actual build logic.
use crate::{BuildReport, OpsCtx};
use sps2_builder::{parse_yaml_recipe, BuildContext};
use sps2_errors::{Error, OpsError};
use sps2_events::{AppEvent, BuildEvent, BuildSession, BuildTarget, EventEmitter, FailureContext};
use sps2_types::Version;
use std::path::{Path, PathBuf};
use std::time::Instant;
/// Build package from recipe (delegates to builder crate)
///
/// # Errors
///
/// Returns an error if:
/// - Recipe file doesn't exist or has invalid extension
/// - Recipe cannot be loaded or executed
/// - Build process fails
pub async fn build(
ctx: &OpsCtx,
recipe_path: &Path,
output_dir: Option<&Path>,
network: bool,
jobs: Option<usize>,
) -> Result<BuildReport, Error> {
let start = Instant::now();
let correlation_label = format!("build:{}", recipe_path.display());
let _correlation = ctx.push_correlation(correlation_label);
ensure_recipe_path(recipe_path)?;
let (package_name, package_version) = load_recipe_metadata(recipe_path).await?;
let (session, target, session_id) =
build_session(package_name.clone(), package_version.clone());
ctx.emit(AppEvent::Build(BuildEvent::Started {
session: session.clone(),
target: target.clone(),
}));
let output_directory = resolve_output_directory(output_dir);
let canonical_recipe_path = canonicalize_recipe_path(recipe_path)?;
let build_context = BuildContext::new(
package_name.clone(),
package_version.clone(),
canonical_recipe_path,
output_directory,
)
.with_event_sender(ctx.tx.clone())
.with_session_id(session_id.clone());
let builder = configure_builder(ctx, network, jobs);
// Use the builder with custom configuration
let result = match builder.build(build_context).await {
Ok(result) => result,
Err(error) => {
ctx.emit(AppEvent::Build(BuildEvent::Failed {
session_id: session_id.clone(),
target: target.clone(),
failure: FailureContext::from_error(&error),
phase: None,
command: None,
}));
return Err(error);
}
};
// Check if install was requested during recipe execution
if result.install_requested {
ctx.emit_operation_started("Building package");
// Install the built package
let package_path_str = result.package_path.to_string_lossy().to_string();
let _install_report = crate::install(ctx, &[package_path_str], false).await?;
ctx.emit_operation_completed(
format!("Installed {package_name} {package_version} successfully"),
true,
);
}
let report = BuildReport {
package: package_name,
version: package_version,
output_path: result.package_path,
duration_ms: elapsed_millis(start),
};
ctx.emit(AppEvent::Build(BuildEvent::Completed {
session_id,
target,
artifacts: vec![report.output_path.clone()],
duration_ms: report.duration_ms,
}));
Ok(report)
}
fn elapsed_millis(start: Instant) -> u64 {
u64::try_from(start.elapsed().as_millis()).unwrap_or(u64::MAX)
}
fn ensure_recipe_path(recipe_path: &Path) -> Result<(), Error> {
if !recipe_path.exists() {
return Err(OpsError::RecipeNotFound {
path: recipe_path.display().to_string(),
}
.into());
}
let extension = recipe_path.extension().and_then(|ext| ext.to_str());
if matches!(extension, Some("yaml" | "yml")) {
return Ok(());
}
Err(OpsError::InvalidRecipe {
path: recipe_path.display().to_string(),
reason: "recipe must have .yaml or .yml extension".to_string(),
}
.into())
}
async fn load_recipe_metadata(recipe_path: &Path) -> Result<(String, Version), Error> {
let yaml_recipe = parse_yaml_recipe(recipe_path).await?;
let version = Version::parse(&yaml_recipe.metadata.version)?;
Ok((yaml_recipe.metadata.name.clone(), version))
}
fn build_session(
package_name: String,
package_version: Version,
) -> (BuildSession, BuildTarget, String) {
let session_id = uuid::Uuid::new_v4().to_string();
let session = BuildSession {
id: session_id.clone(),
system: sps2_events::BuildSystem::Custom,
cache_enabled: false,
};
let target = BuildTarget {
package: package_name,
version: package_version,
};
(session, target, session_id)
}
fn resolve_output_directory(output_dir: Option<&Path>) -> PathBuf {
output_dir
.map(PathBuf::from)
.or_else(|| std::env::current_dir().ok())
.unwrap_or_else(|| PathBuf::from("."))
}
fn canonicalize_recipe_path(recipe_path: &Path) -> Result<PathBuf, Error> {
recipe_path.canonicalize().map_err(|e| {
OpsError::InvalidRecipe {
path: recipe_path.display().to_string(),
reason: format!("failed to canonicalize recipe path: {e}"),
}
.into()
})
}
fn configure_builder(ctx: &OpsCtx, network: bool, jobs: Option<usize>) -> sps2_builder::Builder {
let mut builder_config = sps2_builder::BuildConfig::default();
if network {
builder_config.config.build.default_allow_network = true;
}
let _ = jobs;
builder_config.sps2_config = Some(ctx.config.clone());
sps2_builder::Builder::with_config(builder_config)
.with_resolver(ctx.resolver.clone())
.with_store(ctx.store.clone())
}
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | false |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/crates/ops/src/types.rs | crates/ops/src/types.rs | //! Types for operations and results
use serde::{Deserialize, Serialize};
use sps2_events::HealthStatus;
use sps2_types::{OpChange, PackageSpec};
use std::collections::HashMap;
use std::path::PathBuf;
// No longer needed - uuid::Uuid imported from sps2_types
/// Operation report for complex operations
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct OpReport {
/// Operation type
pub operation: String,
/// Whether the operation succeeded
pub success: bool,
/// Summary message
pub summary: String,
/// Detailed changes
pub changes: Vec<OpChange>,
/// Execution time in milliseconds
pub duration_ms: u64,
}
impl OpReport {
/// Create success report
#[must_use]
pub fn success(
operation: String,
summary: String,
changes: Vec<OpChange>,
duration_ms: u64,
) -> Self {
Self {
operation,
success: true,
summary,
changes,
duration_ms,
}
}
/// Create failure report
#[must_use]
pub fn failure(operation: String, summary: String, duration_ms: u64) -> Self {
Self {
operation,
success: false,
summary,
changes: Vec::new(),
duration_ms,
}
}
}
// OpChange and ChangeType are now imported from sps2_types
// PackageInfo is now imported from sps2_types
// PackageStatus is now imported from sps2_types
// SearchResult is now imported from sps2_types
// StateInfo is now imported from sps2_types
/// Health check results
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct HealthCheck {
/// Overall health status
pub healthy: bool,
/// Component checks
pub components: HashMap<String, ComponentHealth>,
/// Issues found
pub issues: Vec<HealthIssue>,
}
impl HealthCheck {
/// Check if system is healthy
#[must_use]
pub fn is_healthy(&self) -> bool {
self.healthy
}
}
/// Component health status
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct ComponentHealth {
/// Component name
pub name: String,
/// Health status
pub status: HealthStatus,
/// Status message
pub message: String,
/// Check duration in milliseconds
pub check_duration_ms: u64,
}
// HealthStatus is now imported from sps2_events
/// Health issue
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct HealthIssue {
/// Component where issue was found
pub component: String,
/// Severity level
pub severity: IssueSeverity,
/// Issue description
pub description: String,
/// Suggested fix
pub suggestion: Option<String>,
}
/// Issue severity
#[derive(Clone, Debug, Serialize, Deserialize)]
#[serde(rename_all = "lowercase")]
pub enum IssueSeverity {
/// Low severity
Low,
/// Medium severity
Medium,
/// High severity
High,
/// Critical severity
Critical,
}
/// Install request type
#[derive(Clone, Debug)]
pub enum InstallRequest {
/// Install from repository
Remote(PackageSpec),
/// Install from local file
LocalFile(PathBuf),
}
// InstallReport is now imported from sps2_types
// PackageChange is now imported from sps2_types
// BuildReport is now imported from sps2_types
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | false |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/crates/ops/src/health.rs | crates/ops/src/health.rs | //! System Health and Diagnostics Operations
use crate::{ComponentHealth, HealthCheck, HealthIssue, IssueSeverity, OpsCtx};
use sps2_errors::Error;
use sps2_events::{
events::{HealthStatus, PackageOperation, PackageOutcome},
AppEvent, EventEmitter, PackageEvent,
};
use sps2_guard::{StoreVerificationConfig, StoreVerifier};
use std::collections::HashMap;
use std::convert::TryFrom;
use std::sync::Arc;
use std::time::Instant;
/// Check system health
///
/// # Errors
///
/// Returns an error if health check fails.
pub async fn check_health(ctx: &OpsCtx) -> Result<HealthCheck, Error> {
let _start = Instant::now();
ctx.emit(AppEvent::Package(PackageEvent::OperationStarted {
operation: PackageOperation::HealthCheck,
}));
let mut components = HashMap::new();
let mut issues = Vec::new();
let mut overall_healthy = true;
// Check store health
let store_start = Instant::now();
let store_health = check_store_health(ctx, &mut issues).await;
components.insert(
"store".to_string(),
ComponentHealth {
name: "Store".to_string(),
status: store_health,
message: "Package store integrity check".to_string(),
check_duration_ms: u64::try_from(store_start.elapsed().as_millis()).unwrap_or(u64::MAX),
},
);
if !matches!(store_health, HealthStatus::Healthy) {
overall_healthy = false;
}
// Check state database health
let state_start = Instant::now();
let state_health = check_state_health(ctx, &mut issues).await;
components.insert(
"state".to_string(),
ComponentHealth {
name: "State Database".to_string(),
status: state_health,
message: "State database consistency check".to_string(),
check_duration_ms: u64::try_from(state_start.elapsed().as_millis()).unwrap_or(u64::MAX),
},
);
if !matches!(state_health, HealthStatus::Healthy) {
overall_healthy = false;
}
// Check index health
let index_start = Instant::now();
let index_health = check_index_health(ctx, &mut issues);
components.insert(
"index".to_string(),
ComponentHealth {
name: "Package Index".to_string(),
status: index_health,
message: "Package index freshness check".to_string(),
check_duration_ms: u64::try_from(index_start.elapsed().as_millis()).unwrap_or(u64::MAX),
},
);
if !matches!(index_health, HealthStatus::Healthy) {
overall_healthy = false;
}
let health_check = HealthCheck {
healthy: overall_healthy,
components,
issues,
};
ctx.emit(AppEvent::Package(PackageEvent::OperationCompleted {
operation: PackageOperation::HealthCheck,
outcome: PackageOutcome::Health {
healthy: overall_healthy,
issues: health_check
.issues
.iter()
.map(|i| i.description.clone())
.collect(),
},
}));
Ok(health_check)
}
/// Check store health including verification status
#[allow(clippy::cast_precision_loss)]
async fn check_store_health(ctx: &OpsCtx, issues: &mut Vec<HealthIssue>) -> HealthStatus {
// Check if store directory exists and is accessible
if ctx.store.verify_integrity().is_err() {
issues.push(HealthIssue {
component: "store".to_string(),
severity: IssueSeverity::High,
description: "Package store integrity check failed".to_string(),
suggestion: Some("Run 'sps2 cleanup' to fix corrupted store entries".to_string()),
});
return HealthStatus::Error;
}
// Check store verification status
let config = StoreVerificationConfig::default();
let verifier = StoreVerifier::new(
Arc::new(ctx.state.clone()),
Arc::new(ctx.store.file_store().clone()),
config,
);
if let Ok(verification_stats) = verifier.get_stats().await {
let mut health_status = HealthStatus::Healthy;
// Check for failed verifications
if verification_stats.failed_count > 0 {
issues.push(HealthIssue {
component: "store".to_string(),
severity: IssueSeverity::Medium,
description: format!(
"{} store objects failed verification",
verification_stats.failed_count
),
suggestion: Some(
"Run 'sps2 verify --scope store' to re-verify failed objects".to_string(),
),
});
health_status = HealthStatus::Warning;
}
// Check for quarantined objects
if verification_stats.quarantined_count > 0 {
issues.push(HealthIssue {
component: "store".to_string(),
severity: IssueSeverity::High,
description: format!(
"{} store objects are quarantined due to corruption",
verification_stats.quarantined_count
),
suggestion: Some(
"Quarantined objects may need manual intervention or package reinstallation"
.to_string(),
),
});
health_status = HealthStatus::Error;
}
// Check for large number of pending verifications
let pending_percentage = if verification_stats.total_objects > 0 {
(verification_stats.pending_count as f64 / verification_stats.total_objects as f64)
* 100.0
} else {
0.0
};
if pending_percentage > 50.0 {
issues.push(HealthIssue {
component: "store".to_string(),
severity: IssueSeverity::Low,
description: format!("{pending_percentage:.1}% of store objects need verification"),
suggestion: Some(
"Run 'sps2 verify --scope store' to verify store integrity".to_string(),
),
});
if health_status == HealthStatus::Healthy {
health_status = HealthStatus::Warning;
}
}
health_status
} else {
issues.push(HealthIssue {
component: "store".to_string(),
severity: IssueSeverity::Medium,
description: "Unable to retrieve store verification statistics".to_string(),
suggestion: Some(
"Check database connectivity and run 'sps2 verify --scope store'".to_string(),
),
});
HealthStatus::Warning
}
}
/// Check state database health
async fn check_state_health(ctx: &OpsCtx, issues: &mut Vec<HealthIssue>) -> HealthStatus {
// Check database consistency
if ctx.state.verify_consistency().await.is_ok() {
HealthStatus::Healthy
} else {
issues.push(HealthIssue {
component: "state".to_string(),
severity: IssueSeverity::Critical,
description: "State database consistency check failed".to_string(),
suggestion: Some(
"Database may be corrupted, consider restoring from backup".to_string(),
),
});
HealthStatus::Error
}
}
/// Check index health
fn check_index_health(ctx: &OpsCtx, issues: &mut Vec<HealthIssue>) -> HealthStatus {
// Check if index is stale
if ctx.index.is_stale(7) {
issues.push(HealthIssue {
component: "index".to_string(),
severity: IssueSeverity::Medium,
description: "Package index is outdated (>7 days old)".to_string(),
suggestion: Some("Run 'sps2 reposync' to update package index".to_string()),
});
HealthStatus::Warning
} else {
HealthStatus::Healthy
}
}
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | false |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/crates/ops/src/maintenance.rs | crates/ops/src/maintenance.rs | //! System Cleanup and State Management Operations
use crate::{ChangeType, OpChange, OpsCtx, StateInfo};
use sps2_errors::{Error, OpsError};
use sps2_events::{
events::{PackageOperation, PackageOutcome},
AppEvent, EventEmitter, GeneralEvent, PackageEvent, RollbackContext, RollbackSummary,
StateEvent,
};
use std::convert::TryFrom;
use std::time::Instant;
async fn compute_kept_states(
ctx: &OpsCtx,
keep_count: usize,
keep_days: i64,
) -> Result<std::collections::HashSet<Uuid>, Error> {
let now = chrono::Utc::now().timestamp();
let states = ctx.state.list_states_detailed().await?;
let mut kept: std::collections::HashSet<Uuid> = std::collections::HashSet::new();
for id in states.iter().take(keep_count) {
kept.insert(id.state_id());
}
if keep_days > 0 {
let cutoff = now - (keep_days * 86_400);
for st in &states {
if st.created_at >= cutoff {
kept.insert(st.state_id());
} else {
break;
}
}
}
kept.insert(ctx.state.get_current_state_id().await?);
Ok(kept)
}
async fn collect_required_hashes(
ctx: &OpsCtx,
kept: &std::collections::HashSet<Uuid>,
) -> Result<
(
std::collections::HashSet<String>,
std::collections::HashSet<String>,
),
Error,
> {
let mut required_pkg_hashes: std::collections::HashSet<String> =
std::collections::HashSet::new();
for st in kept {
let pkgs = ctx.state.get_installed_packages_in_state(st).await?;
for p in pkgs {
required_pkg_hashes.insert(p.hash);
}
}
let mut required_file_hashes: std::collections::HashSet<String> =
std::collections::HashSet::new();
{
let mut tx = ctx.state.begin_transaction().await?;
for st in kept {
let packages = sps2_state::queries::get_state_packages(&mut tx, st).await?;
for pkg in &packages {
let entries =
sps2_state::file_queries_runtime::get_package_file_entries(&mut tx, pkg.id)
.await?;
for e in entries {
required_file_hashes.insert(e.file_hash);
}
}
}
tx.commit().await?;
}
Ok((required_pkg_hashes, required_file_hashes))
}
struct LastRefData {
pkg_last_ref: std::collections::HashMap<String, i64>,
obj_last_ref: std::collections::HashMap<String, i64>,
store_refs: Vec<sps2_state::StoreRef>,
file_objs: Vec<sps2_state::FileObject>,
}
async fn collect_last_ref_and_inventory(ctx: &OpsCtx) -> Result<LastRefData, Error> {
let mut tx = ctx.state.begin_transaction().await?;
let store_refs = sps2_state::queries::get_all_store_refs(&mut tx).await?;
let pkg_last_ref = sps2_state::queries::get_package_last_ref_map(&mut tx).await?;
let obj_last_ref = sps2_state::file_queries_runtime::get_file_last_ref_map(&mut tx).await?;
let file_objs = sps2_state::file_queries_runtime::get_all_file_objects(&mut tx).await?;
tx.commit().await?;
Ok(LastRefData {
pkg_last_ref,
obj_last_ref,
store_refs,
file_objs,
})
}
async fn evict_packages(
ctx: &OpsCtx,
last: &LastRefData,
required_pkg_hashes: &std::collections::HashSet<String>,
pkg_grace_secs: i64,
now: i64,
dry_run: bool,
) -> Result<(usize, i64), Error> {
let mut count = 0usize;
let mut bytes = 0i64;
for sr in &last.store_refs {
if required_pkg_hashes.contains(&sr.hash) {
continue;
}
let last_ref = *last.pkg_last_ref.get(&sr.hash).unwrap_or(&0);
if last_ref == 0 || (now - last_ref) >= pkg_grace_secs {
let hash = sps2_hash::Hash::from_hex(&sr.hash).map_err(|e| {
sps2_errors::Error::internal(format!("invalid hash {}: {e}", sr.hash))
})?;
if !dry_run {
let _ = ctx.store.remove_package(&hash).await;
let mut tx = ctx.state.begin_transaction().await?;
sps2_state::queries::insert_package_eviction(
&mut tx,
&sr.hash,
sr.size,
Some("policy"),
)
.await?;
tx.commit().await?;
}
count += 1;
bytes += sr.size;
}
}
Ok((count, bytes))
}
async fn evict_objects(
ctx: &OpsCtx,
last: &LastRefData,
required_file_hashes: &std::collections::HashSet<String>,
obj_grace_secs: i64,
now: i64,
dry_run: bool,
) -> Result<(usize, i64), Error> {
let mut count = 0usize;
let mut bytes = 0i64;
for fo in &last.file_objs {
if required_file_hashes.contains(&fo.hash) {
continue;
}
let last_ref = *last.obj_last_ref.get(&fo.hash).unwrap_or(&0);
if last_ref == 0 || (now - last_ref) >= obj_grace_secs {
let fh = sps2_hash::Hash::from_hex(&fo.hash).map_err(|e| {
sps2_errors::Error::internal(format!("invalid file hash {}: {e}", fo.hash))
})?;
if !dry_run {
let _ = ctx.store.file_store().remove_file(&fh).await;
let mut tx = ctx.state.begin_transaction().await?;
sps2_state::queries::insert_file_object_eviction(
&mut tx,
&fo.hash,
fo.size,
Some("policy"),
)
.await?;
tx.commit().await?;
}
count += 1;
bytes += fo.size;
}
}
Ok((count, bytes))
}
use uuid::Uuid;
/// Clean up orphaned packages and old states
///
/// # Errors
///
/// Returns an error if cleanup operation fails.
pub async fn cleanup(ctx: &OpsCtx) -> Result<String, Error> {
let start = Instant::now();
ctx.emit(AppEvent::Package(PackageEvent::OperationStarted {
operation: PackageOperation::Cleanup,
}));
// Legacy snapshots and orphaned stagings
let cleanup_result = ctx
.state
.cleanup(
ctx.config.state.retention_count,
ctx.config.state.retention_days,
)
.await?;
// CAS policy cleanup
let cas_cfg = &ctx.config.cas;
let keep_count = cas_cfg.keep_states_count;
let keep_days = i64::from(cas_cfg.keep_days);
let pkg_grace_secs = i64::from(cas_cfg.package_grace_days) * 86_400;
let obj_grace_secs = i64::from(cas_cfg.object_grace_days) * 86_400;
let now = chrono::Utc::now().timestamp();
let kept = compute_kept_states(ctx, keep_count, keep_days).await?;
let (required_pkg_hashes, required_file_hashes) = collect_required_hashes(ctx, &kept).await?;
let last = collect_last_ref_and_inventory(ctx).await?;
let (packages_evicted, pkg_space_freed) = evict_packages(
ctx,
&last,
&required_pkg_hashes,
pkg_grace_secs,
now,
cas_cfg.dry_run,
)
.await?;
let (objects_evicted, obj_space_freed) = evict_objects(
ctx,
&last,
&required_file_hashes,
obj_grace_secs,
now,
cas_cfg.dry_run,
)
.await?;
let duration = u64::try_from(start.elapsed().as_millis()).unwrap_or(u64::MAX);
let message = if cas_cfg.dry_run {
format!(
"Dry-run: would prune {} states, remove {} dirs, {} packages ({} bytes), {} objects ({} bytes)",
cleanup_result.states_pruned,
cleanup_result.states_removed,
packages_evicted,
pkg_space_freed,
objects_evicted,
obj_space_freed
)
} else {
format!(
"Pruned {} states, cleaned {} dirs, removed {} packages ({} bytes), {} objects ({} bytes)",
cleanup_result.states_pruned,
cleanup_result.states_removed,
packages_evicted,
pkg_space_freed,
objects_evicted,
obj_space_freed
)
};
ctx.emit(AppEvent::Package(PackageEvent::OperationCompleted {
operation: PackageOperation::Cleanup,
outcome: PackageOutcome::Cleanup {
states_removed: cleanup_result.states_removed,
packages_removed: packages_evicted,
duration_ms: duration,
},
}));
if let Err(e) = update_gc_timestamp().await {
use sps2_events::events::GeneralEvent;
ctx.emit(AppEvent::General(GeneralEvent::warning_with_context(
"Failed to update GC timestamp",
e.to_string(),
)));
}
Ok(message)
}
/// Rollback to a previous state
///
/// # Errors
///
/// Returns an error if:
/// - No previous state exists
/// - Rollback operation fails
pub async fn rollback(ctx: &OpsCtx, target_state: Option<Uuid>) -> Result<StateInfo, Error> {
let start = Instant::now();
// Check mode: preview what would be rolled back
if ctx.check_mode {
return preview_rollback(ctx, target_state).await;
}
let current_before = ctx.state.get_current_state_id().await?;
// If no target specified, rollback to previous state
let target_id = if let Some(id) = target_state {
id
} else {
ctx.state
.get_parent_state_id(¤t_before)
.await?
.ok_or(OpsError::NoPreviousState)?
};
ctx.emit(AppEvent::State(StateEvent::RollbackStarted {
context: RollbackContext {
from: current_before,
to: target_id,
},
}));
// Verify target state exists in database
if !ctx.state.state_exists(&target_id).await? {
return Err(OpsError::StateNotFound {
state_id: target_id,
}
.into());
}
// Filesystem snapshot presence is no longer required; rollback reconstructs incrementally.
// Calculate changes BEFORE rollback (current -> target)
let rollback_changes = calculate_state_changes(ctx, ¤t_before, &target_id).await?;
// Perform rollback using atomic installer
let mut atomic_installer =
sps2_install::AtomicInstaller::new(ctx.state.clone(), ctx.store.clone());
// Move semantics: make target the active state without creating a new one
atomic_installer.rollback_move_to_state(target_id).await?;
// Get state information with pre-calculated changes
let state_info = get_rollback_state_info_with_changes(ctx, target_id, rollback_changes).await?;
let summary = RollbackSummary {
duration_ms: Some(u64::try_from(start.elapsed().as_millis()).unwrap_or(u64::MAX)),
};
let context = RollbackContext {
from: current_before,
to: target_id,
};
ctx.tx.emit(AppEvent::State(StateEvent::RollbackCompleted {
context,
summary: Some(summary),
}));
Ok(state_info)
}
/// Preview what would be rolled back without executing
#[allow(clippy::too_many_lines)]
async fn preview_rollback(ctx: &OpsCtx, target_state: Option<Uuid>) -> Result<StateInfo, Error> {
use std::collections::HashMap;
// Resolve target state (same logic as main rollback)
let target_id = if let Some(id) = target_state {
id
} else {
let current_id = ctx.state.get_current_state_id().await?;
ctx.state
.get_parent_state_id(¤t_id)
.await?
.ok_or(OpsError::NoPreviousState)?
};
// Validate target state exists (same validation as main rollback)
if !ctx.state.state_exists(&target_id).await? {
return Err(OpsError::StateNotFound {
state_id: target_id,
}
.into());
}
// Filesystem snapshot presence is no longer required for rollback preview.
// Calculate changes (current -> target)
let current_id = ctx.state.get_current_state_id().await?;
let changes = calculate_state_changes(ctx, ¤t_id, &target_id).await?;
// Emit preview events for each change
let mut added_count = 0;
let mut removed_count = 0;
let mut updated_count = 0;
for change in &changes {
let (action, change_type, details) = match change.change_type {
ChangeType::Install => {
added_count += 1;
let version = change
.new_version
.as_ref()
.map_or_else(|| "unknown".to_string(), ToString::to_string);
(
format!("Would add {} {}", change.package, version),
"add",
HashMap::from([
("package".to_string(), change.package.clone()),
("new_version".to_string(), version),
]),
)
}
ChangeType::Remove => {
removed_count += 1;
let version = change
.old_version
.as_ref()
.map_or_else(|| "unknown".to_string(), ToString::to_string);
(
format!("Would remove {} {}", change.package, version),
"remove",
HashMap::from([
("package".to_string(), change.package.clone()),
("current_version".to_string(), version),
]),
)
}
ChangeType::Update => {
updated_count += 1;
let old_version = change
.old_version
.as_ref()
.map_or_else(|| "unknown".to_string(), ToString::to_string);
let new_version = change
.new_version
.as_ref()
.map_or_else(|| "unknown".to_string(), ToString::to_string);
(
format!(
"Would update {} {} → {}",
change.package, old_version, new_version
),
"update",
HashMap::from([
("package".to_string(), change.package.clone()),
("current_version".to_string(), old_version),
("target_version".to_string(), new_version),
]),
)
}
ChangeType::Downgrade => {
updated_count += 1;
let old_version = change
.old_version
.as_ref()
.map_or_else(|| "unknown".to_string(), ToString::to_string);
let new_version = change
.new_version
.as_ref()
.map_or_else(|| "unknown".to_string(), ToString::to_string);
(
format!(
"Would downgrade {} {} → {}",
change.package, old_version, new_version
),
"downgrade",
HashMap::from([
("package".to_string(), change.package.clone()),
("current_version".to_string(), old_version),
("target_version".to_string(), new_version),
]),
)
}
};
let mut event_details = details;
event_details.insert("target_state".to_string(), target_id.to_string());
event_details.insert("change_type".to_string(), change_type.to_string());
ctx.emit(AppEvent::General(GeneralEvent::CheckModePreview {
operation: "rollback".to_string(),
action,
details: event_details,
}));
}
// Emit summary
let total_changes = changes.len();
let mut categories = HashMap::new();
if added_count > 0 {
categories.insert("packages_added".to_string(), added_count);
}
if removed_count > 0 {
categories.insert("packages_removed".to_string(), removed_count);
}
if updated_count > 0 {
categories.insert("packages_updated".to_string(), updated_count);
}
ctx.emit(AppEvent::General(GeneralEvent::CheckModeSummary {
operation: "rollback".to_string(),
total_changes,
categories,
}));
// Get target state info for preview (reuse existing function)
let state_info = get_rollback_state_info_with_changes(ctx, target_id, changes).await?;
Ok(state_info)
}
/// Get history of states
///
/// # Errors
///
/// Returns an error if state history retrieval fails.
pub async fn history(
ctx: &OpsCtx,
show_all: bool,
verify: bool,
limit_override: Option<usize>,
) -> Result<Vec<StateInfo>, Error> {
let all_states = ctx.state.list_states_detailed().await?;
let current_id = ctx.state.get_current_state_id().await?;
if verify {
// Deep verify across full DB history; cap by override or config (newest first)
let limit = limit_override.unwrap_or(ctx.config.state.history_verify_limit);
let mut out = Vec::new();
for state in &all_states {
let id = state.state_id();
if is_state_available(ctx, &id).await? {
let parent_id = state
.parent_id
.as_ref()
.and_then(|p| uuid::Uuid::parse_str(p).ok());
let package_count = get_state_package_count(ctx, &id).await?;
let changes = if let Some(parent_id) = parent_id {
calculate_state_changes(ctx, &parent_id, &id).await?
} else {
get_initial_state_changes(ctx, &id).await?
};
out.push(StateInfo {
id,
parent: parent_id,
timestamp: state.timestamp(),
operation: state.operation.clone(),
current: Some(current_id) == Some(id),
package_count,
total_size: 0,
changes,
});
if out.len() >= limit {
break;
}
}
}
return Ok(out);
}
let mut state_infos = Vec::new();
for state in &all_states {
let id = state.state_id();
// Default base history: show unpruned states; always include current
if !(show_all || state.pruned_at.is_none() || Some(id) == Some(current_id)) {
continue;
}
let parent_id = state
.parent_id
.as_ref()
.and_then(|p| uuid::Uuid::parse_str(p).ok());
let package_count = get_state_package_count(ctx, &id).await?;
let changes = if let Some(parent_id) = parent_id {
calculate_state_changes(ctx, &parent_id, &id).await?
} else {
get_initial_state_changes(ctx, &id).await?
};
state_infos.push(StateInfo {
id,
parent: parent_id,
timestamp: state.timestamp(),
operation: state.operation.clone(),
current: Some(current_id) == Some(id),
package_count,
total_size: 0,
changes,
});
}
Ok(state_infos)
}
async fn is_state_available(ctx: &OpsCtx, state_id: &Uuid) -> Result<bool, Error> {
// Check every package in the state
let packages = ctx.state.get_installed_packages_in_state(state_id).await?;
for pkg in packages {
// Legacy path: package dir in store must exist
let hash = sps2_hash::Hash::from_hex(&pkg.hash)
.map_err(|e| sps2_errors::Error::internal(format!("invalid hash {}: {e}", pkg.hash)))?;
if !ctx.store.has_package(&hash).await {
return Ok(false);
}
// File-level path: ensure all referenced file objects exist
let mut tx = ctx.state.begin_transaction().await?;
let file_entries = sps2_state::file_queries_runtime::get_package_file_entries_by_name(
&mut tx,
state_id,
&pkg.name,
&pkg.version,
)
.await?;
tx.commit().await?;
if !file_entries.is_empty() {
for entry in file_entries {
let fh = sps2_hash::Hash::from_hex(&entry.file_hash).map_err(|e| {
sps2_errors::Error::internal(format!(
"invalid file hash {}: {e}",
entry.file_hash
))
})?;
if !ctx.store.file_store().has_file(&fh).await {
return Ok(false);
}
}
}
}
Ok(true)
}
/// Get rollback state information with pre-calculated changes
async fn get_rollback_state_info_with_changes(
ctx: &OpsCtx,
target_id: Uuid,
changes: Vec<OpChange>,
) -> Result<StateInfo, Error> {
let states = ctx.state.list_states_detailed().await?;
let current_id = ctx.state.get_current_state_id().await?;
let state =
states
.iter()
.find(|s| s.state_id() == target_id)
.ok_or(OpsError::StateNotFound {
state_id: target_id,
})?;
let parent_id = state
.parent_id
.as_ref()
.and_then(|p| uuid::Uuid::parse_str(p).ok());
// Get actual package count for target state
let package_count = get_state_package_count(ctx, &target_id).await?;
Ok(StateInfo {
id: target_id,
parent: parent_id,
timestamp: state.timestamp(),
operation: state.operation.clone(),
current: Some(current_id) == Some(target_id),
package_count,
total_size: 0, // TODO: Calculate actual size
changes, // Use pre-calculated changes
})
}
/// Get package count for a specific state
async fn get_state_package_count(ctx: &OpsCtx, state_id: &Uuid) -> Result<usize, Error> {
let packages = ctx.state.get_state_packages(state_id).await?;
Ok(packages.len())
}
/// Calculate changes between parent and child states
async fn calculate_state_changes(
ctx: &OpsCtx,
parent_id: &Uuid,
child_id: &Uuid,
) -> Result<Vec<OpChange>, Error> {
let parent_packages = ctx.state.get_installed_packages_in_state(parent_id).await?;
let child_packages = ctx.state.get_installed_packages_in_state(child_id).await?;
let mut changes = Vec::new();
// Convert to maps for easier comparison (name -> version)
let parent_map: std::collections::HashMap<String, String> = parent_packages
.iter()
.map(|p| (p.name.clone(), p.version.clone()))
.collect();
let child_map: std::collections::HashMap<String, String> = child_packages
.iter()
.map(|p| (p.name.clone(), p.version.clone()))
.collect();
// Find packages that were added (in child but not parent)
for (package_name, version) in &child_map {
if !parent_map.contains_key(package_name) {
changes.push(OpChange {
change_type: ChangeType::Install,
package: package_name.clone(),
old_version: None,
new_version: version.parse().ok(),
});
} else if let Some(parent_version) = parent_map.get(package_name) {
// Check for version changes
if version != parent_version {
// For now, we'll just detect differences, not direction of change
changes.push(OpChange {
change_type: ChangeType::Update,
package: package_name.clone(),
old_version: parent_version.parse().ok(),
new_version: version.parse().ok(),
});
}
}
}
// Find packages that were removed (in parent but not child)
for (package_name, version) in &parent_map {
if !child_map.contains_key(package_name) {
changes.push(OpChange {
change_type: ChangeType::Remove,
package: package_name.clone(),
old_version: version.parse().ok(),
new_version: None,
});
}
}
Ok(changes)
}
/// Get changes for initial state (all packages are installs)
async fn get_initial_state_changes(ctx: &OpsCtx, state_id: &Uuid) -> Result<Vec<OpChange>, Error> {
let packages = ctx.state.get_state_packages(state_id).await?;
let mut changes = Vec::new();
for package in packages {
changes.push(OpChange {
change_type: ChangeType::Install,
package,
old_version: None,
new_version: None, // Would need actual Package data
});
}
Ok(changes)
}
/// Update the GC timestamp after successful cleanup
async fn update_gc_timestamp() -> Result<(), Error> {
let timestamp_path = std::path::Path::new(sps2_config::fixed_paths::LAST_GC_TIMESTAMP);
let now = std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
.unwrap_or_default()
.as_secs();
tokio::fs::write(timestamp_path, now.to_string())
.await
.map_err(|e| sps2_errors::Error::internal(format!("Failed to write GC timestamp: {e}")))?;
Ok(())
}
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | false |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/crates/ops/src/query.rs | crates/ops/src/query.rs | //! Package Information and Search Operations
use crate::{OpsCtx, PackageInfo, PackageStatus, SearchResult};
use sps2_errors::{Error, OpsError};
use sps2_events::{
events::{PackageOperation, PackageOutcome},
AppEvent, EventEmitter, PackageEvent,
};
use sps2_hash::Hash;
use sps2_store::StoredPackage;
/// List installed packages
///
/// # Errors
///
/// Returns an error if package listing fails.
pub async fn list_packages(ctx: &OpsCtx) -> Result<Vec<PackageInfo>, Error> {
let _correlation = ctx.push_correlation("query:list");
ctx.emit(AppEvent::Package(PackageEvent::OperationStarted {
operation: PackageOperation::List,
}));
// Get installed packages from state
let installed_packages = ctx.state.get_installed_packages().await?;
let mut package_infos = Vec::new();
for package in installed_packages {
// Get package details from index
let package_version = package.version();
let index_entry = ctx
.index
.get_version(&package.name, &package_version.to_string());
let (mut description, mut homepage, mut license, mut dependencies) =
if let Some(entry) = index_entry {
(
entry.description.clone(),
entry.homepage.clone(),
entry.license.clone(),
entry.dependencies.runtime.clone(),
)
} else {
(None, None, None, Vec::new())
};
if description.is_none()
|| homepage.is_none()
|| license.is_none()
|| dependencies.is_empty()
{
if let Ok(hash) = Hash::from_hex(&package.hash) {
let package_path = ctx.store.package_path(&hash);
if let Ok(stored) = StoredPackage::load(&package_path).await {
let manifest = stored.manifest();
if description.is_none() {
description.clone_from(&manifest.package.description);
}
if homepage.is_none() {
homepage.clone_from(&manifest.package.homepage);
}
if license.is_none() {
license.clone_from(&manifest.package.license);
}
if dependencies.is_empty() {
dependencies.clone_from(&manifest.dependencies.runtime);
}
}
}
}
// Check if there's an available update
let available_version = ctx
.index
.get_package_versions_with_strings(&package.name)
.and_then(|versions| {
versions
.first()
.and_then(|(ver_str, _)| sps2_types::Version::parse(ver_str).ok())
});
let status = match &available_version {
Some(avail) if avail > &package_version => PackageStatus::Outdated,
_ => PackageStatus::Installed,
};
// Get package size from state database
let size = Some(u64::try_from(package.size).unwrap_or(0));
let package_info = PackageInfo {
name: package.name.clone(),
version: Some(package_version),
available_version,
description,
homepage,
license,
status,
dependencies,
size,
arch: None, // TODO: Get actual architecture
installed: true,
};
package_infos.push(package_info);
}
// Sort by name
package_infos.sort_by(|a, b| a.name.cmp(&b.name));
ctx.emit(AppEvent::Package(PackageEvent::OperationCompleted {
operation: PackageOperation::List,
outcome: PackageOutcome::List {
total: package_infos.len(),
},
}));
Ok(package_infos)
}
/// Get information about a specific package
///
/// # Errors
///
/// Returns an error if package information retrieval fails.
pub async fn package_info(ctx: &OpsCtx, package_name: &str) -> Result<PackageInfo, Error> {
// Check if package is installed
let installed_packages = ctx.state.get_installed_packages().await?;
let installed_version = installed_packages
.iter()
.find(|pkg| pkg.name == package_name)
.map(sps2_state::Package::version);
// Get available versions from index (with version strings)
let versions = ctx
.index
.get_package_versions_with_strings(package_name)
.ok_or_else(|| OpsError::PackageNotFound {
package: package_name.to_string(),
})?;
let (latest_version_str, latest_entry) =
versions.first().ok_or_else(|| OpsError::PackageNotFound {
package: package_name.to_string(),
})?;
let available_version = sps2_types::Version::parse(latest_version_str)?;
let status = match &installed_version {
Some(installed) => {
match installed.cmp(&available_version) {
std::cmp::Ordering::Equal => PackageStatus::Installed,
std::cmp::Ordering::Less => PackageStatus::Outdated,
std::cmp::Ordering::Greater => PackageStatus::Local, // Newer than available
}
}
None => PackageStatus::Available,
};
// Get package size if installed
let size = if installed_version.is_some() {
// Find the installed package to get its size from state database
installed_packages
.iter()
.find(|pkg| pkg.name == package_name)
.map(|pkg| u64::try_from(pkg.size).unwrap_or(0))
} else {
None
};
let package_info = PackageInfo {
name: package_name.to_string(),
version: installed_version.clone(),
available_version: Some(available_version),
description: latest_entry.description.clone(),
homepage: latest_entry.homepage.clone(),
license: latest_entry.license.clone(),
status,
dependencies: latest_entry.dependencies.runtime.clone(),
size,
arch: None, // TODO: Get actual architecture
installed: installed_version.is_some(),
};
Ok(package_info)
}
/// Search for packages
///
/// # Errors
///
/// Returns an error if package search fails.
pub async fn search_packages(ctx: &OpsCtx, query: &str) -> Result<Vec<SearchResult>, Error> {
let _correlation = ctx.push_correlation(format!("query:search:{query}"));
ctx.emit(AppEvent::Package(PackageEvent::OperationStarted {
operation: PackageOperation::Search,
}));
// Search package names in index
let package_names = ctx.index.search(query);
let mut results = Vec::new();
let installed_packages = ctx.state.get_installed_packages().await?;
for package_name in package_names {
if let Some(versions) = ctx.index.get_package_versions_with_strings(package_name) {
if let Some((version_str, latest)) = versions.first() {
if let Ok(version) = sps2_types::Version::parse(version_str) {
let installed = installed_packages
.iter()
.any(|pkg| pkg.name == package_name);
results.push(SearchResult {
name: package_name.to_string(),
version,
description: latest.description.clone(),
homepage: latest.homepage.clone(),
installed,
});
}
}
}
}
ctx.emit(AppEvent::Package(PackageEvent::OperationCompleted {
operation: PackageOperation::Search,
outcome: PackageOutcome::Search {
query: query.to_string(),
total: results.len(),
},
}));
Ok(results)
}
#[cfg(test)]
mod tests {
use super::*;
use crate::OpsContextBuilder;
use sps2_builder::Builder;
use sps2_config::Config;
use sps2_install::{AtomicInstaller, InstallContext, PreparedPackage};
use sps2_net::{NetClient, NetConfig};
use sps2_resolver::{PackageId, ResolvedNode, Resolver};
use sps2_state::StateManager;
use sps2_store::{create_package, PackageStore};
use sps2_types::{Arch, Manifest, Version};
use std::collections::HashMap;
use tempfile::TempDir;
use tokio::fs as afs;
async fn make_package(
store: &PackageStore,
name: &str,
version: &str,
description: &str,
) -> (sps2_hash::Hash, std::path::PathBuf, u64) {
let td = TempDir::new().expect("package tempdir");
let src = td.path().join("src");
afs::create_dir_all(&src).await.expect("src dir");
let version_parsed = Version::parse(version).expect("version");
let mut manifest = Manifest::new(name.to_string(), &version_parsed, 1, &Arch::Arm64);
manifest.package.description = Some(description.to_string());
sps2_store::manifest_io::write_manifest(&src.join("manifest.toml"), &manifest)
.await
.expect("write manifest");
let content_dir = src.join("opt/pm/live/share");
afs::create_dir_all(&content_dir)
.await
.expect("content dir");
afs::write(content_dir.join("file.txt"), description.as_bytes())
.await
.expect("write file");
let sp_path = td.path().join("pkg.sp");
create_package(&src, &sp_path)
.await
.expect("create package");
let stored = store.add_package(&sp_path).await.expect("add package");
let hash = stored.hash().expect("hash");
let path = store.package_path(&hash);
let size = afs::metadata(&sp_path).await.expect("metadata").len();
(hash, path, size)
}
#[tokio::test]
async fn list_packages_uses_manifest_description_when_index_missing() {
let temp_dir = TempDir::new().expect("ops tempdir");
let state_dir = temp_dir.path().join("state");
let store_dir = temp_dir.path().join("store");
afs::create_dir_all(&state_dir).await.expect("state dir");
afs::create_dir_all(&store_dir).await.expect("store dir");
let state = StateManager::new(&state_dir).await.expect("state manager");
let store = PackageStore::new(store_dir.clone());
let description = "Demo package description";
let (hash, store_path, size) = make_package(&store, "demo", "1.2.3", description).await;
let mut atomic = AtomicInstaller::new(state.clone(), store.clone());
let pkg_id = PackageId::new("demo".to_string(), Version::parse("1.2.3").unwrap());
let mut resolved_nodes = HashMap::new();
resolved_nodes.insert(
pkg_id.clone(),
ResolvedNode::local(
"demo".to_string(),
pkg_id.version.clone(),
store_path.clone(),
vec![],
),
);
let mut prepared = HashMap::new();
prepared.insert(
pkg_id.clone(),
PreparedPackage {
hash,
size,
store_path,
is_local: true,
package_hash: None,
},
);
let install_ctx = InstallContext {
packages: vec![],
local_files: vec![],
force: false,
force_download: false,
event_sender: None,
};
atomic
.install(&install_ctx, &resolved_nodes, Some(&prepared))
.await
.expect("install package");
let (tx, _rx) = sps2_events::channel();
let config = Config::default();
let index = sps2_index::IndexManager::new(temp_dir.path().join("index"));
let net = NetClient::new_without_proxies(NetConfig::default()).expect("net client");
let resolver_instance = Resolver::with_events(index.clone(), tx.clone());
let builder = Builder::new();
let ctx = OpsContextBuilder::new()
.with_state(state)
.with_store(store)
.with_index(index)
.with_net(net)
.with_resolver(resolver_instance)
.with_builder(builder)
.with_event_sender(tx)
.with_config(config)
.build()
.expect("ops ctx");
let packages = list_packages(&ctx).await.expect("list packages");
assert_eq!(packages.len(), 1);
assert_eq!(packages[0].name, "demo");
assert_eq!(packages[0].description.as_deref(), Some(description));
}
}
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | false |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/crates/ops/src/context.rs | crates/ops/src/context.rs | //! Operations context for dependency injection
use sps2_builder::Builder;
use sps2_config::Config;
use sps2_events::{EventEmitter, EventSender};
use sps2_index::IndexManager;
use sps2_net::NetClient;
use sps2_resolver::Resolver;
use sps2_state::StateManager;
use sps2_store::PackageStore;
use std::cell::RefCell;
use std::fmt::Write;
/// Operations context providing access to all system components.
pub struct OpsCtx {
pub store: PackageStore,
pub state: StateManager,
pub index: IndexManager,
pub net: NetClient,
pub resolver: Resolver,
pub builder: Builder,
pub tx: EventSender,
pub config: Config,
pub check_mode: bool,
correlation_id: RefCell<Option<String>>,
}
impl EventEmitter for OpsCtx {
fn event_sender(&self) -> Option<&EventSender> {
Some(&self.tx)
}
fn enrich_event_meta(&self, _event: &sps2_events::AppEvent, meta: &mut sps2_events::EventMeta) {
if let Some(correlation) = self.correlation_id.borrow().as_ref() {
meta.correlation_id = Some(correlation.clone());
}
if self.check_mode {
meta.labels
.entry("check_mode".to_string())
.or_insert_with(|| "true".to_string());
}
}
}
impl OpsCtx {
#[must_use]
pub fn push_correlation(&self, correlation: impl Into<String>) -> CorrelationGuard<'_> {
let mut slot = self.correlation_id.borrow_mut();
let previous = slot.replace(correlation.into());
CorrelationGuard {
ctx: self,
previous,
}
}
#[must_use]
pub fn push_correlation_for_packages(
&self,
operation: &str,
packages: &[String],
) -> CorrelationGuard<'_> {
let mut identifier = operation.to_string();
if !packages.is_empty() {
identifier.push(':');
let sample_len = packages.len().min(3);
let sample = packages
.iter()
.take(sample_len)
.map(String::as_str)
.collect::<Vec<_>>()
.join(",");
identifier.push_str(&sample);
if packages.len() > sample_len {
let _ = write!(&mut identifier, ",+{}", packages.len() - sample_len);
}
}
self.push_correlation(identifier)
}
#[must_use]
pub fn current_correlation(&self) -> Option<String> {
self.correlation_id.borrow().clone()
}
}
pub struct CorrelationGuard<'a> {
ctx: &'a OpsCtx,
previous: Option<String>,
}
impl Drop for CorrelationGuard<'_> {
fn drop(&mut self) {
*self.ctx.correlation_id.borrow_mut() = self.previous.take();
}
}
pub struct OpsContextBuilder {
store: Option<PackageStore>,
state: Option<StateManager>,
index: Option<IndexManager>,
net: Option<NetClient>,
resolver: Option<Resolver>,
builder: Option<Builder>,
tx: Option<EventSender>,
config: Option<Config>,
check_mode: Option<bool>,
}
impl OpsContextBuilder {
#[must_use]
pub fn new() -> Self {
Self {
store: None,
state: None,
index: None,
net: None,
resolver: None,
builder: None,
tx: None,
config: None,
check_mode: None,
}
}
#[must_use]
pub fn with_store(mut self, store: PackageStore) -> Self {
self.store = Some(store);
self
}
#[must_use]
pub fn with_state(mut self, state: StateManager) -> Self {
self.state = Some(state);
self
}
#[must_use]
pub fn with_index(mut self, index: IndexManager) -> Self {
self.index = Some(index);
self
}
#[must_use]
pub fn with_net(mut self, net: NetClient) -> Self {
self.net = Some(net);
self
}
#[must_use]
pub fn with_resolver(mut self, resolver: Resolver) -> Self {
self.resolver = Some(resolver);
self
}
#[must_use]
pub fn with_builder(mut self, builder: Builder) -> Self {
self.builder = Some(builder);
self
}
#[must_use]
pub fn with_event_sender(mut self, tx: EventSender) -> Self {
self.tx = Some(tx);
self
}
#[must_use]
pub fn with_config(mut self, config: Config) -> Self {
self.config = Some(config);
self
}
#[must_use]
pub fn with_check_mode(mut self, check_mode: bool) -> Self {
self.check_mode = Some(check_mode);
self
}
/// # Errors
///
/// Returns an error if any required dependency is missing from the builder.
pub fn build(self) -> Result<OpsCtx, sps2_errors::Error> {
let store = self
.store
.ok_or_else(|| sps2_errors::OpsError::MissingComponent {
component: "store".to_string(),
})?;
let state = self
.state
.ok_or_else(|| sps2_errors::OpsError::MissingComponent {
component: "state".to_string(),
})?;
let index = self
.index
.ok_or_else(|| sps2_errors::OpsError::MissingComponent {
component: "index".to_string(),
})?;
let net = self
.net
.ok_or_else(|| sps2_errors::OpsError::MissingComponent {
component: "net".to_string(),
})?;
let resolver = self
.resolver
.ok_or_else(|| sps2_errors::OpsError::MissingComponent {
component: "resolver".to_string(),
})?;
let builder = self
.builder
.ok_or_else(|| sps2_errors::OpsError::MissingComponent {
component: "builder".to_string(),
})?;
let tx = self
.tx
.ok_or_else(|| sps2_errors::OpsError::MissingComponent {
component: "event_sender".to_string(),
})?;
let config = self
.config
.ok_or_else(|| sps2_errors::OpsError::MissingComponent {
component: "config".to_string(),
})?;
Ok(OpsCtx {
store,
state,
index,
net,
resolver,
builder,
tx,
config,
check_mode: self.check_mode.unwrap_or(false),
correlation_id: RefCell::new(None),
})
}
}
impl Default for OpsContextBuilder {
fn default() -> Self {
Self::new()
}
}
#[cfg(test)]
mod tests {
use super::*;
use tempfile::TempDir;
#[tokio::test]
async fn correlation_helpers_round_trip() {
let temp_dir = TempDir::new().unwrap();
let state_dir = temp_dir.path().join("state");
let store_dir = temp_dir.path().join("store");
tokio::fs::create_dir_all(&state_dir).await.unwrap();
tokio::fs::create_dir_all(&store_dir).await.unwrap();
let state = StateManager::new(&state_dir).await.unwrap();
let store = PackageStore::new(store_dir.clone());
let (tx, _rx) = sps2_events::channel();
let config = Config::default();
let index = IndexManager::new(&store_dir);
let net = NetClient::new(sps2_net::NetConfig::default()).unwrap();
let resolver = Resolver::with_events(index.clone(), tx.clone());
let builder = Builder::new();
let ctx = OpsContextBuilder::new()
.with_state(state)
.with_store(store)
.with_event_sender(tx)
.with_config(config)
.with_index(index)
.with_net(net)
.with_resolver(resolver)
.with_builder(builder)
.build()
.unwrap();
assert!(ctx.current_correlation().is_none());
{
let _guard = ctx.push_correlation("install");
assert_eq!(ctx.current_correlation(), Some("install".to_string()));
}
assert!(ctx.current_correlation().is_none());
}
}
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | false |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/crates/ops/src/keys.rs | crates/ops/src/keys.rs | //! Key management utilities for signature verification
use crate::OpsCtx;
use base64::{engine::general_purpose, Engine as _};
use hex;
use minisign_verify::{PublicKey, Signature};
use serde::{Deserialize, Serialize};
use sps2_errors::Error;
use std::collections::HashMap;
use std::path::{Path, PathBuf};
use tokio::fs;
/// Key rotation information for verifying key changes
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct KeyRotation {
/// Previous key ID that signed this rotation
pub previous_key_id: String,
/// New key to trust
pub new_key: TrustedKey,
/// Signature of the new key by the previous key
pub rotation_signature: String,
/// Timestamp of the rotation
pub timestamp: i64,
}
/// A trusted public key with metadata
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct TrustedKey {
/// Unique identifier for the key (hex-encoded keynum)
pub key_id: String,
/// The minisign public key data (base64)
pub public_key: String,
/// Optional comment/description
pub comment: Option<String>,
/// Timestamp when key was first trusted
pub trusted_since: i64,
/// Optional expiration timestamp
pub expires_at: Option<i64>,
}
/// Repository keys.json format
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct RepositoryKeys {
/// Current active signing keys
pub keys: Vec<TrustedKey>,
/// Key rotation history
pub rotations: Vec<KeyRotation>,
/// Minimum signature age in seconds (to prevent replay attacks)
pub max_signature_age: Option<u64>,
}
/// Key manager for handling trusted keys and verification
pub struct KeyManager {
/// Path to keys directory (/opt/pm/keys/)
keys_dir: PathBuf,
/// Currently loaded trusted keys
trusted_keys: HashMap<String, TrustedKey>,
/// Bootstrap key for initial trust
bootstrap_key: Option<TrustedKey>,
}
impl KeyManager {
/// Create a new key manager
pub fn new<P: AsRef<Path>>(keys_dir: P) -> Self {
Self {
keys_dir: keys_dir.as_ref().to_path_buf(),
trusted_keys: HashMap::new(),
bootstrap_key: None,
}
}
/// Initialize the key manager with a bootstrap key
///
/// # Errors
///
/// Returns an error if:
/// - The keys directory cannot be created
/// - The bootstrap key string cannot be decoded
/// - The public key cannot be parsed
pub async fn initialize_with_bootstrap(
&mut self,
bootstrap_key_str: &str,
) -> Result<(), Error> {
fs::create_dir_all(&self.keys_dir).await?;
let decoded_pk = general_purpose::STANDARD
.decode(bootstrap_key_str)
.map_err(|e| {
Error::Config(sps2_errors::ConfigError::Invalid {
message: e.to_string(),
})
})?;
if decoded_pk.len() < 10 {
return Err(Error::Config(sps2_errors::ConfigError::Invalid {
message: "Invalid bootstrap key length".to_string(),
}));
}
let key_id_bytes = &decoded_pk[2..10];
let key_id = hex::encode(key_id_bytes);
let bootstrap = TrustedKey {
key_id: key_id.clone(),
public_key: bootstrap_key_str.to_string(),
comment: Some("Bootstrap key".to_string()),
trusted_since: chrono::Utc::now().timestamp(),
expires_at: None,
};
self.bootstrap_key = Some(bootstrap.clone());
self.trusted_keys.insert(key_id, bootstrap);
self.save_trusted_keys().await?;
Ok(())
}
/// Load trusted keys from disk
///
/// # Errors
///
/// Returns an error if:
/// - The trusted keys file cannot be read
/// - The JSON content cannot be parsed
pub async fn load_trusted_keys(&mut self) -> Result<(), Error> {
let keys_file = self.keys_dir.join("trusted_keys.json");
if !keys_file.exists() {
return Ok(());
}
let content = fs::read_to_string(&keys_file).await?;
let keys: HashMap<String, TrustedKey> = serde_json::from_str(&content)
.map_err(|e| Error::internal(format!("Failed to parse trusted keys: {e}")))?;
self.trusted_keys = keys;
Ok(())
}
/// Save trusted keys to disk
///
/// # Errors
///
/// Returns an error if:
/// - The trusted keys cannot be serialized to JSON
/// - The file cannot be written to disk
pub async fn save_trusted_keys(&self) -> Result<(), Error> {
let keys_file = self.keys_dir.join("trusted_keys.json");
let content = serde_json::to_string_pretty(&self.trusted_keys)
.map_err(|e| Error::internal(format!("Failed to serialize trusted keys: {e}")))?;
fs::write(&keys_file, content).await?;
Ok(())
}
/// Fetch and verify keys from repository
///
/// # Errors
///
/// Returns an error if:
/// - The keys cannot be fetched from the repository
/// - The keys content cannot be parsed as JSON
/// - Signature verification fails
pub async fn fetch_and_verify_keys(
&mut self,
net_client: &sps2_net::NetClient,
keys_url: &str,
tx: &sps2_events::EventSender,
) -> Result<Vec<sps2_net::PublicKeyRef>, Error> {
let keys_content = sps2_net::fetch_text(net_client, keys_url, tx).await?;
let repo_keys: RepositoryKeys = serde_json::from_str(&keys_content)?;
self.verify_key_rotations(&repo_keys)?;
for key in &repo_keys.keys {
if !self.trusted_keys.contains_key(&key.key_id)
&& self.is_key_rotation_valid(&repo_keys, &key.key_id)
{
self.trusted_keys.insert(key.key_id.clone(), key.clone());
}
}
self.save_trusted_keys().await?;
Ok(self
.trusted_keys
.values()
.map(|k| sps2_net::PublicKeyRef {
id: k.key_id.clone(),
algo: sps2_net::Algorithm::Minisign,
data: k.public_key.clone(),
})
.collect())
}
/// Verify signature against content using trusted keys
///
/// # Errors
///
/// Returns an error if:
/// - The signature cannot be decoded
/// - None of the trusted keys can verify the signature
/// - The signature has expired
#[allow(dead_code)]
pub fn verify_signature(&self, content: &str, signature: &str) -> Result<(), Error> {
let sig = Signature::decode(signature)?;
let mut verification_errors = Vec::new();
let now = chrono::Utc::now().timestamp();
for trusted_key in self.trusted_keys.values() {
if let Some(expires_at) = trusted_key.expires_at {
if now > expires_at {
verification_errors.push(format!("Key {} has expired", trusted_key.key_id));
continue;
}
}
match PublicKey::from_base64(&trusted_key.public_key) {
Ok(public_key) => match public_key.verify(content.as_bytes(), &sig, false) {
Ok(()) => return Ok(()),
Err(e) => {
verification_errors.push(format!("Key {}: {}", trusted_key.key_id, e));
}
},
Err(e) => {
verification_errors.push(format!(
"Invalid key format for {}: {}",
trusted_key.key_id, e
));
}
}
}
Err(Error::Signing(
sps2_errors::SigningError::VerificationFailed {
reason: format!(
"Signature verification failed. Tried {} trusted keys. Errors: {}",
self.trusted_keys.len(),
verification_errors.join("; ")
),
},
))
}
/// Verify key rotations are valid
fn verify_key_rotations(&self, repo_keys: &RepositoryKeys) -> Result<(), Error> {
for rotation in &repo_keys.rotations {
let previous_key = self
.trusted_keys
.get(&rotation.previous_key_id)
.ok_or_else(|| {
Error::Signing(sps2_errors::SigningError::NoTrustedKeyFound {
key_id: rotation.previous_key_id.clone(),
})
})?;
let rotation_content = format!(
"{}{}{}",
rotation.new_key.key_id, rotation.new_key.public_key, rotation.timestamp
);
let previous_public_key = PublicKey::from_base64(&previous_key.public_key)?;
let rotation_sig = Signature::decode(&rotation.rotation_signature)?;
previous_public_key.verify(rotation_content.as_bytes(), &rotation_sig, false)?;
}
Ok(())
}
/// Check if a key rotation is valid for a given key ID
fn is_key_rotation_valid(&self, repo_keys: &RepositoryKeys, key_id: &str) -> bool {
if let Some(bootstrap) = &self.bootstrap_key {
if bootstrap.key_id == key_id {
return true;
}
}
for rotation in &repo_keys.rotations {
if rotation.new_key.key_id == key_id
&& self.trusted_keys.contains_key(&rotation.previous_key_id)
{
return true;
}
}
false
}
/// Get all trusted keys
#[must_use]
pub fn get_trusted_keys(&self) -> Vec<sps2_net::PublicKeyRef> {
self.trusted_keys
.values()
.map(|k| sps2_net::PublicKeyRef {
id: k.key_id.clone(),
algo: sps2_net::Algorithm::Minisign,
data: k.public_key.clone(),
})
.collect()
}
/// Import a new key into the trusted set
///
/// # Errors
///
/// Returns an error if:
/// - The trusted keys cannot be saved to disk
pub async fn import_key(&mut self, key: &TrustedKey) -> Result<(), Error> {
if self.trusted_keys.contains_key(&key.key_id) {
return Ok(()); // Key already trusted
}
self.trusted_keys.insert(key.key_id.clone(), key.clone());
self.save_trusted_keys().await
}
/// Remove a trusted key by its key ID
///
/// # Errors
///
/// Returns an error if saving the updated trusted keys fails.
pub async fn remove_key(&mut self, key_id: &str) -> Result<(), Error> {
self.trusted_keys.remove(key_id);
self.save_trusted_keys().await
}
}
/// Extract base64 payload from a minisign public key box or return input if already base64.
#[must_use]
fn extract_base64(pk_input: &str) -> String {
let trimmed = pk_input.trim();
if trimmed.lines().count() <= 1 && !trimmed.contains(' ') {
return trimmed.to_string();
}
let mut lines = trimmed.lines();
let _ = lines.next();
for line in lines {
let l = line.trim();
if !l.is_empty() {
return l.to_string();
}
}
trimmed.to_string()
}
/// List trusted keys (key IDs) as a displayable string.
///
/// # Errors
///
/// Returns an error if the trusted keys file cannot be read/parsed.
pub async fn keys_list(_ctx: &OpsCtx) -> Result<String, Error> {
let mut km = KeyManager::new(PathBuf::from(sps2_config::fixed_paths::KEYS_DIR));
km.load_trusted_keys().await?;
let keys = km.get_trusted_keys();
if keys.is_empty() {
return Ok("No trusted keys found.".to_string());
}
let mut lines = Vec::new();
for k in keys {
lines.push(format!(
"{} ({})",
k.id,
match k.algo {
sps2_net::Algorithm::Minisign => "minisign",
}
));
}
Ok(lines.join("\n"))
}
/// Import a minisign public key from file. Accepts either a minisign .pub box or raw base64.
///
/// # Errors
///
/// Returns an error if the key cannot be read, decoded, or saved.
pub async fn keys_import_from_file(
_ctx: &OpsCtx,
pubkey_path: &Path,
comment: Option<String>,
) -> Result<String, Error> {
let content = fs::read_to_string(pubkey_path).await?;
let b64 = extract_base64(&content);
let decoded = general_purpose::STANDARD
.decode(&b64)
.map_err(|e| Error::internal(format!("Invalid minisign public key: {e}")))?;
if decoded.len() < 10 {
return Err(Error::internal(
"Minisign public key is too short".to_string(),
));
}
let key_id = hex::encode(&decoded[2..10]);
let mut km = KeyManager::new(PathBuf::from(sps2_config::fixed_paths::KEYS_DIR));
km.load_trusted_keys().await?;
let trusted = TrustedKey {
key_id: key_id.clone(),
public_key: b64,
comment,
trusted_since: chrono::Utc::now().timestamp(),
expires_at: None,
};
km.import_key(&trusted).await?;
Ok(format!("Imported minisign key {key_id}"))
}
/// Remove a trusted key by key ID.
///
/// # Errors
///
/// Returns an error if saving the updated trusted keys fails or if the key is not present.
pub async fn keys_remove(_ctx: &OpsCtx, key_id: &str) -> Result<String, Error> {
let mut km = KeyManager::new(PathBuf::from(sps2_config::fixed_paths::KEYS_DIR));
km.load_trusted_keys().await?;
// Proceed even if key doesn't exist; report accordingly
let existed = km.trusted_keys.contains_key(key_id);
km.remove_key(key_id).await?;
if existed {
Ok(format!("Removed key {key_id}"))
} else {
Ok(format!("Key {key_id} not found (no changes)"))
}
}
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | false |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/crates/ops/src/repository.rs | crates/ops/src/repository.rs | //! Repository and Index Management Operations
use crate::keys;
use crate::{keys::KeyManager, OpsCtx};
use dialoguer::{theme::ColorfulTheme, Confirm};
use sps2_config::{Config, RepositoryConfig};
use sps2_errors::{ConfigError, Error, OpsError, SigningError};
use sps2_events::{AppEvent, EventEmitter, FailureContext, GeneralEvent, LifecycleEvent};
use std::path::PathBuf;
use std::time::Instant;
/// Sync repository index
///
/// # Errors
///
/// Returns an error if index synchronization fails.
///
/// # Panics
///
/// Panics if `base_url` is None after validation (should never happen).
pub async fn reposync(ctx: &OpsCtx, yes: bool) -> Result<String, Error> {
let start = Instant::now();
let _correlation = ctx.push_correlation("reposync");
let Some(base_url) = get_base_url(ctx) else {
let err = Error::Config(ConfigError::MissingField {
field: "repositories".to_string(),
});
ctx.emit(AppEvent::Lifecycle(LifecycleEvent::repo_sync_failed(
None,
FailureContext::from_error(&err),
)));
return Err(err);
};
ctx.emit(AppEvent::Lifecycle(LifecycleEvent::repo_sync_started(
Some(base_url.to_string()),
)));
let index_result = sync_and_verify_index(ctx, &base_url, start, yes).await;
let index_json = match index_result {
Ok(json) => json,
Err(e) => {
let failure = FailureContext::from_error(&e);
ctx.emit(AppEvent::Lifecycle(LifecycleEvent::repo_sync_failed(
Some(base_url.to_string()),
failure,
)));
return Err(e);
}
};
// Enforce index freshness based on security policy
if let Ok(parsed_index) = sps2_index::Index::from_json(&index_json) {
let now = chrono::Utc::now();
let age = now.signed_duration_since(parsed_index.metadata.timestamp);
let max_days = i64::from(ctx.config.security.index_max_age_days);
if age.num_days() > max_days {
let err = OpsError::RepoSyncFailed {
message: format!(
"Repository index is stale: {} days old (max {} days)",
age.num_days(),
max_days
),
}
.into();
ctx.emit(AppEvent::Lifecycle(LifecycleEvent::repo_sync_failed(
Some(base_url.to_string()),
FailureContext::from_error(&err),
)));
return Err(err);
}
}
finalize_index_update(ctx, &index_json, start).await
}
fn get_base_url(ctx: &OpsCtx) -> Option<String> {
let mut candidates: Vec<&sps2_config::RepositoryConfig> = ctx.config.repos.get_all();
candidates.sort_by_key(|r| r.priority);
candidates.first().map(|repo| repo.url.clone())
}
async fn sync_and_verify_index(
ctx: &OpsCtx,
base_url: &str,
start: Instant,
yes: bool,
) -> Result<String, Error> {
let index_url = format!("{base_url}/index.json");
let index_sig_url = format!("{base_url}/index.json.minisig");
let keys_url = format!("{base_url}/keys.json");
let cached_etag = ctx.index.cache.load_etag().await.unwrap_or(None);
let index_json =
download_index_conditional(ctx, &index_url, cached_etag.as_deref(), start).await?;
let index_signature = sps2_net::fetch_text(&ctx.net, &index_sig_url, &ctx.tx).await?;
let mut trusted_keys = fetch_and_verify_keys(ctx, &ctx.net, &keys_url, &ctx.tx).await?;
if let Err(e) = sps2_net::verify_minisign_bytes_with_keys(
index_json.as_bytes(),
&index_signature,
&trusted_keys,
) {
handle_signature_verification_error(
ctx,
e,
&keys_url,
&index_json,
&index_signature,
yes,
&mut trusted_keys,
)
.await?;
}
Ok(index_json)
}
async fn handle_signature_verification_error(
ctx: &OpsCtx,
e: SigningError,
keys_url: &str,
index_json: &str,
index_signature: &str,
yes: bool,
trusted_keys: &mut Vec<sps2_net::PublicKeyRef>,
) -> Result<(), Error> {
match e {
SigningError::NoTrustedKeyFound { key_id } => {
let repo_keys: keys::RepositoryKeys =
sps2_net::fetch_json(&ctx.net, keys_url, &ctx.tx).await?;
let key_to_trust = repo_keys.keys.iter().find(|k| k.key_id == key_id);
if let Some(key) = key_to_trust {
let prompt = format!(
"The repository index is signed with a new key: {key_id}. Do you want to trust it?"
);
if yes
|| Confirm::with_theme(&ColorfulTheme::default())
.with_prompt(prompt)
.interact()
.map_err(|e| {
Error::internal(format!("Failed to get user confirmation: {e}"))
})?
{
let mut key_manager =
KeyManager::new(PathBuf::from(sps2_config::fixed_paths::KEYS_DIR));
key_manager.load_trusted_keys().await?;
key_manager.import_key(key).await?;
*trusted_keys = key_manager.get_trusted_keys();
// Re-verify
sps2_net::verify_minisign_bytes_with_keys(
index_json.as_bytes(),
index_signature,
trusted_keys,
)?;
} else {
return Err(Error::Signing(SigningError::NoTrustedKeyFound { key_id }));
}
} else {
return Err(Error::Signing(SigningError::NoTrustedKeyFound { key_id }));
}
}
other_error => {
return Err(OpsError::RepoSyncFailed {
message: format!("Index signature verification failed: {other_error}"),
}
.into());
}
}
Ok(())
}
/// Add a new repository to the user's configuration.
///
/// # Errors
///
/// Returns an error if:
/// - The configuration file cannot be loaded or created
/// - The repository URL is invalid
/// - The configuration cannot be saved
pub async fn add_repo(_ctx: &OpsCtx, name: &str, url: &str) -> Result<String, Error> {
let config_path = Config::default_path()?;
let mut config = Config::load_or_default(&Some(config_path)).await?;
if config.repos.extras.contains_key(name) {
return Err(Error::Config(ConfigError::Invalid {
message: format!("Repository '{name}' already exists."),
}));
}
let new_repo = RepositoryConfig {
url: url.to_string(),
priority: 10,
algorithm: "minisign".to_string(),
key_ids: vec![],
};
config.repos.extras.insert(name.to_string(), new_repo);
config.save().await?;
Ok(format!("Repository '{name}' added successfully."))
}
/// List configured repositories from the user's configuration.
///
/// # Errors
///
/// Returns an error if the configuration file cannot be read.
pub async fn list_repos(_ctx: &OpsCtx) -> Result<String, Error> {
let config_path = Config::default_path()?;
let config = Config::load_or_default(&Some(config_path)).await?;
let mut lines = Vec::new();
if let Some(ref fast) = config.repos.fast {
lines.push(format!(
"fast: {} (priority {})",
fast.url, fast.priority
));
}
if let Some(ref slow) = config.repos.slow {
lines.push(format!(
"slow: {} (priority {})",
slow.url, slow.priority
));
}
if let Some(ref stable) = config.repos.stable {
lines.push(format!(
"stable: {} (priority {})",
stable.url, stable.priority
));
}
for (name, repo) in &config.repos.extras {
lines.push(format!("{name}: {} (priority {})", repo.url, repo.priority));
}
if lines.is_empty() {
Ok("No repositories configured.".to_string())
} else {
Ok(lines.join("\n"))
}
}
/// Remove a repository by name. Supports standard names (fast/slow/stable) and extras.
///
/// # Errors
///
/// Returns an error if the configuration cannot be loaded or saved, or if the
/// named repository does not exist.
pub async fn remove_repo(_ctx: &OpsCtx, name: &str) -> Result<String, Error> {
let config_path = Config::default_path()?;
let mut config = Config::load_or_default(&Some(config_path)).await?;
let mut removed = false;
match name {
"fast" => {
if config.repos.fast.take().is_some() {
removed = true;
}
}
"slow" => {
if config.repos.slow.take().is_some() {
removed = true;
}
}
"stable" => {
if config.repos.stable.take().is_some() {
removed = true;
}
}
_ => {
if config.repos.extras.remove(name).is_some() {
removed = true;
}
}
}
if !removed {
return Err(Error::Config(ConfigError::Invalid {
message: format!("Repository '{name}' not found."),
}));
}
config.save().await?;
Ok(format!("Repository '{name}' removed successfully."))
}
/// Download index conditionally with `ETag` support
async fn download_index_conditional(
ctx: &OpsCtx,
index_url: &str,
cached_etag: Option<&str>,
start: Instant,
) -> Result<String, Error> {
let response =
sps2_net::fetch_text_conditional(&ctx.net, index_url, cached_etag, &ctx.tx).await?;
if let Some((content, new_etag)) = response {
if let Some(etag) = new_etag {
if let Err(e) = ctx.index.cache.save_etag(&etag).await {
ctx.emit_warning(format!("Failed to save ETag: {e}"));
}
}
Ok(content)
} else {
ctx.tx
.emit(AppEvent::Lifecycle(LifecycleEvent::repo_sync_completed(
0,
u64::try_from(start.elapsed().as_millis()).unwrap_or(u64::MAX),
0,
)));
Ok("Repository index is unchanged (304 Not Modified)".to_string())
}
}
/// Process and save the new index
async fn finalize_index_update(
ctx: &OpsCtx,
index_json: &str,
start: Instant,
) -> Result<String, Error> {
let old_package_count = ctx.index.index().map_or(0, |idx| idx.packages.len());
let mut new_index_manager = ctx.index.clone();
new_index_manager.load(Some(index_json)).await?;
let new_package_count = new_index_manager
.index()
.map_or(0, |idx| idx.packages.len());
let packages_updated = new_package_count.saturating_sub(old_package_count);
new_index_manager.save_to_cache().await?;
let message = if packages_updated > 0 {
format!("Updated {packages_updated} packages from repository")
} else {
"Repository index updated (no new packages)".to_string()
};
ctx.emit(AppEvent::Lifecycle(LifecycleEvent::repo_sync_completed(
packages_updated,
u64::try_from(start.elapsed().as_millis()).unwrap_or(u64::MAX),
0, // TODO: Track actual bytes transferred
)));
Ok(message)
}
/// Fetch and verify signing keys with rotation support
async fn fetch_and_verify_keys(
_ctx: &OpsCtx,
net_client: &sps2_net::NetClient,
keys_url: &str,
tx: &sps2_events::EventSender,
) -> Result<Vec<sps2_net::PublicKeyRef>, Error> {
let mut key_manager = KeyManager::new(PathBuf::from(sps2_config::fixed_paths::KEYS_DIR));
key_manager.load_trusted_keys().await?;
if key_manager.get_trusted_keys().is_empty() {
let bootstrap_key = "RWSGOq2NVecA2UPNdBUZykp1MLhfMmkAK/SZSjK3bpq2q7I8LbSVVBDm";
tx.emit(AppEvent::General(GeneralEvent::Warning {
message: "Initializing with bootstrap key".to_string(),
context: Some("First run - no trusted keys found".to_string()),
}));
key_manager.initialize_with_bootstrap(bootstrap_key).await?;
}
let trusted_keys = key_manager
.fetch_and_verify_keys(net_client, keys_url, tx)
.await?;
tx.emit(AppEvent::General(GeneralEvent::OperationCompleted {
operation: "Key verification".to_string(),
success: true,
}));
Ok(trusted_keys)
}
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | false |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/crates/install/src/installer.rs | crates/install/src/installer.rs | //! Main installer implementation
use crate::{
InstallConfig, InstallContext, InstallOperation, InstallResult, StateInfo, UninstallContext,
UninstallOperation, UpdateContext, UpdateOperation,
};
use sps2_errors::{Error, InstallError};
// EventSender not used directly in this module but imported for potential future use
use sps2_resolver::Resolver;
use sps2_state::StateManager;
use sps2_store::PackageStore;
/// Main installer for sps2 packages
#[derive(Clone)]
pub struct Installer {
/// Configuration
config: InstallConfig,
/// Dependency resolver
resolver: Resolver,
/// State manager
state_manager: StateManager,
/// Package store
store: PackageStore,
}
impl std::fmt::Debug for Installer {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("Installer")
.field("config", &self.config)
.finish_non_exhaustive()
}
}
impl Installer {
/// Create new installer
#[must_use]
pub fn new(
config: InstallConfig,
resolver: Resolver,
state_manager: StateManager,
store: PackageStore,
) -> Self {
Self {
config,
resolver,
state_manager,
store,
}
}
/// Install packages
///
/// # Errors
///
/// Returns an error if package resolution fails, download fails, or installation fails.
pub async fn install(&mut self, context: InstallContext) -> Result<InstallResult, Error> {
// Validate context
Self::validate_install_context(&context)?;
// Create install operation
let mut operation = InstallOperation::new(
self.resolver.clone(),
self.state_manager.clone(),
self.store.clone(),
)?;
// Execute installation
let result = operation.execute(context).await?;
// Trigger garbage collection
self.cleanup_old_states().await?;
Ok(result)
}
/// Uninstall packages
///
/// # Errors
///
/// Returns an error if package validation fails or uninstall operation fails.
pub async fn uninstall(&mut self, context: UninstallContext) -> Result<InstallResult, Error> {
// Validate context
Self::validate_uninstall_context(&context)?;
// Create uninstall operation
let mut operation = UninstallOperation::new(self.state_manager.clone(), self.store.clone());
// Execute uninstallation
let result = operation.execute(context).await?;
// Trigger garbage collection
self.cleanup_old_states().await?;
Ok(result)
}
/// Update packages
///
/// # Errors
///
/// Returns an error if package resolution fails, download fails, or update fails.
pub async fn update(&mut self, context: UpdateContext) -> Result<InstallResult, Error> {
// Validate context
Self::validate_update_context(&context);
// Create update operation
let mut operation = UpdateOperation::new(
self.resolver.clone(),
self.state_manager.clone(),
self.store.clone(),
)?;
// Execute update
let result = operation.execute(context).await?;
// Trigger garbage collection
self.cleanup_old_states().await?;
Ok(result)
}
/// List available states for rollback
///
/// # Errors
///
/// Returns an error if querying the state database fails.
pub async fn list_states(&self) -> Result<Vec<StateInfo>, Error> {
let states = self.state_manager.list_states_detailed().await?;
let mut state_infos = Vec::new();
for state in states {
let packages = self
.state_manager
.get_installed_packages_in_state(&state.state_id())
.await?;
// Parse parent_id if present
let parent_id = state
.parent_id
.as_ref()
.and_then(|id| uuid::Uuid::parse_str(id).ok());
state_infos.push(StateInfo {
id: state.state_id(),
timestamp: state.timestamp(),
parent_id,
package_count: packages.len(),
packages: packages
.into_iter()
.take(5)
.map(|pkg| sps2_types::PackageId::new(pkg.name.clone(), pkg.version()))
.collect(), // First 5 packages as sample
});
}
Ok(state_infos)
}
/// Get current state information
///
/// # Errors
///
/// Returns an error if the current state cannot be found or accessed.
pub async fn current_state(&self) -> Result<StateInfo, Error> {
let current_id = self.state_manager.get_current_state_id().await?;
let states = self.list_states().await?;
states
.into_iter()
.find(|state| state.id == current_id)
.ok_or_else(|| {
InstallError::StateNotFound {
state_id: current_id.to_string(),
}
.into()
})
}
/// Cleanup old states according to retention policy
async fn cleanup_old_states(&self) -> Result<(), Error> {
self.state_manager
.cleanup_old_states(self.config.state_retention)
.await?;
self.store.garbage_collect()?;
Ok(())
}
/// Validate install context
fn validate_install_context(context: &InstallContext) -> Result<(), Error> {
if context.packages.is_empty() && context.local_files.is_empty() {
return Err(InstallError::NoPackagesSpecified.into());
}
// Validate local file paths exist
for path in &context.local_files {
if !path.exists() {
return Err(InstallError::LocalPackageNotFound {
path: path.display().to_string(),
}
.into());
}
if path.extension().is_none_or(|ext| ext != "sp") {
return Err(InstallError::InvalidPackageFile {
path: path.display().to_string(),
message: "file must have .sp extension".to_string(),
}
.into());
}
}
Ok(())
}
/// Validate uninstall context
fn validate_uninstall_context(context: &UninstallContext) -> Result<(), Error> {
if context.packages.is_empty() {
return Err(InstallError::NoPackagesSpecified.into());
}
Ok(())
}
/// Validate update context
fn validate_update_context(_context: &UpdateContext) {
// Update context is always valid (empty packages means update all)
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::{AtomicInstaller, PreparedPackage};
use sps2_index::IndexManager;
use sps2_resolver::{PackageId, ResolvedNode, Resolver};
use sps2_store::create_package;
use sps2_types::{Arch, Manifest, Version};
use std::collections::HashMap;
use tempfile::TempDir;
use tokio::fs as afs;
async fn mk_env() -> (TempDir, StateManager, sps2_store::PackageStore) {
let td = TempDir::new().expect("tempdir");
let state = StateManager::new(td.path()).await.expect("state manager");
let store_base = td.path().join("store");
afs::create_dir_all(&store_base).await.expect("store dir");
let store = sps2_store::PackageStore::new(store_base);
(td, state, store)
}
async fn make_sp_and_add_to_store(
store: &sps2_store::PackageStore,
name: &str,
version: &str,
) -> (sps2_hash::Hash, std::path::PathBuf, u64) {
let td = TempDir::new().expect("pkg dir");
let src = td.path().join("src");
afs::create_dir_all(&src).await.expect("src dir");
let version_parsed = Version::parse(version).expect("version");
let manifest = Manifest::new(name.to_string(), &version_parsed, 1, &Arch::Arm64);
let manifest_path = src.join("manifest.toml");
sps2_store::manifest_io::write_manifest(&manifest_path, &manifest)
.await
.expect("write manifest");
let content_dir = src.join("opt/pm/live/share");
afs::create_dir_all(&content_dir)
.await
.expect("content dir");
afs::write(content_dir.join("content.txt"), name.as_bytes())
.await
.expect("write content");
let sp = td.path().join("pkg.sp");
create_package(&src, &sp).await.expect("create package");
let stored = store.add_package(&sp).await.expect("add package");
let hash = stored.hash().expect("hash");
let path = store.package_path(&hash);
let size = afs::metadata(&sp).await.expect("metadata").len();
(hash, path, size)
}
#[tokio::test]
async fn list_states_reports_actual_package_versions() {
let (_, state, store) = mk_env().await;
let (hash, store_path, size) = make_sp_and_add_to_store(&store, "demo", "1.2.3").await;
let mut resolved: HashMap<PackageId, ResolvedNode> = HashMap::new();
let pkg_id = PackageId::new("demo".to_string(), Version::parse("1.2.3").unwrap());
resolved.insert(
pkg_id.clone(),
ResolvedNode::local(
"demo".to_string(),
pkg_id.version.clone(),
store_path.clone(),
vec![],
),
);
let mut prepared = HashMap::new();
prepared.insert(
pkg_id.clone(),
PreparedPackage {
hash: hash.clone(),
size,
store_path,
is_local: true,
package_hash: None,
},
);
let ctx = InstallContext {
packages: vec![],
local_files: vec![],
force: false,
force_download: false,
event_sender: None,
};
let mut atomic = AtomicInstaller::new(state.clone(), store.clone());
let _ = atomic
.install(&ctx, &resolved, Some(&prepared))
.await
.expect("install");
let temp_dir = TempDir::new().expect("installer tempdir");
let package_resolver = Resolver::new(IndexManager::new(temp_dir.path().join("index")));
let installer = Installer::new(
InstallConfig::default(),
package_resolver,
state.clone(),
store,
);
let states = installer.list_states().await.expect("list states");
assert!(!states.is_empty());
let first = &states[0];
assert_eq!(first.package_count, 1);
assert_eq!(first.packages.len(), 1);
assert_eq!(first.packages[0].name, "demo");
assert_eq!(first.packages[0].version, Version::parse("1.2.3").unwrap());
}
}
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | false |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/crates/install/src/lib.rs | crates/install/src/lib.rs | #![warn(clippy::pedantic)]
#![deny(clippy::all)]
//! Package installation with atomic updates for sps2
//!
//! This crate handles the installation of packages with atomic
//! state transitions, rollback capabilities, and parallel execution.
#[macro_use]
mod macros;
mod api;
mod atomic;
mod installer;
mod operations;
mod prepare;
//mod pipeline;
//pub mod validation;
pub use atomic::{AtomicInstaller, StateTransition};
pub use installer::Installer;
pub use operations::{InstallOperation, UninstallOperation, UpdateOperation};
pub use prepare::{ExecutionContext, ParallelExecutor};
// Re-export the public API surface from api module
pub use api::config::{InstallConfig, SecurityPolicy};
pub use api::context::{InstallContext, UninstallContext, UpdateContext};
pub use api::result::{InstallResult, StateInfo};
pub use api::types::PreparedPackage;
// Re-export EventSender for use by macros and contexts
pub use sps2_events::EventSender;
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | false |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/crates/install/src/operations.rs | crates/install/src/operations.rs | //! High-level installation operations
use crate::SecurityPolicy;
use crate::{
AtomicInstaller, ExecutionContext, InstallContext, InstallResult, ParallelExecutor,
UninstallContext, UpdateContext,
};
use sps2_errors::{Error, InstallError};
use sps2_events::events::GeneralEvent;
use sps2_events::{AppEvent, EventEmitter};
use sps2_resolver::{NodeAction, ResolutionContext, ResolutionResult, Resolver};
use sps2_state::StateManager;
use sps2_store::PackageStore;
use sps2_types::PackageSpec;
use std::sync::Arc;
/// Install operation
pub struct InstallOperation {
/// Resolver for dependencies
resolver: Resolver,
/// State manager
state_manager: StateManager,
/// Package store
store: PackageStore,
/// Parallel executor
executor: ParallelExecutor,
}
impl InstallOperation {
/// Create new install operation
///
/// # Errors
///
/// Returns an error if parallel executor initialization fails.
pub fn new(
resolver: Resolver,
state_manager: StateManager,
store: PackageStore,
) -> Result<Self, Error> {
// Create a default ResourceManager for the ParallelExecutor
let resources = Arc::new(sps2_config::ResourceManager::default());
let executor = ParallelExecutor::new(store.clone(), state_manager.clone(), resources)?;
Ok(Self {
resolver,
state_manager,
store,
executor,
})
}
/// Execute installation
///
/// # Errors
///
/// Returns an error if dependency resolution fails, package download fails, or installation fails.
pub async fn execute(&mut self, context: InstallContext) -> Result<InstallResult, Error> {
// Check local .sp files exist (validation moved to AtomicInstaller)
Self::check_local_packages_exist(&context)?;
// Check for already installed packages (handled during atomic install)
// Resolve dependencies
let resolution = self.resolve_dependencies(&context).await?;
// Check for already installed packages after resolution
Self::check_already_installed_resolved(&resolution);
// Execute parallel downloads
let exec_context = ExecutionContext::new()
.with_event_sender(
context
.event_sender
.clone()
.unwrap_or_else(|| sps2_events::channel().0),
)
.with_security_policy(SecurityPolicy {
verify_signatures: true, // default to verify in this path
allow_unsigned: false,
})
.with_force_redownload(context.force_download);
// Debug: Check what packages we're trying to process
context.emit_debug(format!(
"DEBUG: About to process {} resolved packages via ParallelExecutor",
resolution.nodes.len()
));
let prepared_packages = self
.executor
.execute_parallel(&resolution.execution_plan, &resolution.nodes, &exec_context)
.await?;
// Debug: Check what packages were prepared
context.emit_debug(format!(
"DEBUG: ParallelExecutor prepared {} packages",
prepared_packages.len()
));
// ParallelExecutor now returns prepared package data instead of doing database operations
// Perform atomic installation
let mut atomic_installer =
AtomicInstaller::new(self.state_manager.clone(), self.store.clone());
let result = atomic_installer
.install(&context, &resolution.nodes, Some(&prepared_packages))
.await?;
Ok(result)
}
/// Resolve dependencies for installation
async fn resolve_dependencies(
&self,
context: &InstallContext,
) -> Result<sps2_resolver::ResolutionResult, Error> {
let mut resolution_context = ResolutionContext::new();
// Add requested packages as runtime dependencies
for spec in &context.packages {
resolution_context = resolution_context.add_runtime_dep(spec.clone());
}
// Add local files
for path in &context.local_files {
resolution_context = resolution_context.add_local_file(path.clone());
}
context.emit_operation_started("Resolving dependencies");
let resolution = match self.resolver.resolve_with_sat(resolution_context).await {
Ok(result) => result,
Err(e) => {
// Emit helpful error event for resolution failures
context.emit_error_with_details(
"Package resolution failed",
format!(
"Error: {e}. \n\nPossible reasons:\n\
• Package name or version typo.\n\
• Package not available in the current repositories.\n\
• Version constraints are unsatisfiable.\n\
\nSuggested solutions:\n\
• Double-check package name and version specs.\n\
• Run 'sps2 search <package_name>' to find available packages.\n\
• Run 'sps2 reposync' to update your package index."
),
);
return Err(e);
}
};
context.emit_operation_completed("Dependency resolution", true);
Ok(resolution)
}
/// Check local .sp package files exist (validation moved to `AtomicInstaller`)
fn check_local_packages_exist(context: &InstallContext) -> Result<(), Error> {
for local_file in &context.local_files {
// Check if file exists
if !local_file.exists() {
return Err(InstallError::LocalPackageNotFound {
path: local_file.display().to_string(),
}
.into());
}
// Check file extension
if local_file.extension().is_none_or(|ext| ext != "sp") {
return Err(InstallError::InvalidPackageFile {
path: local_file.display().to_string(),
message: "file must have .sp extension".to_string(),
}
.into());
}
// Validation moved to AtomicInstaller where it actually happens
}
Ok(())
}
/// Check for already installed packages after resolution
fn check_already_installed_resolved(resolution: &ResolutionResult) {
// Check if any resolved nodes are local (already installed)
for node in resolution.packages_in_order() {
if let NodeAction::Local = node.action {
// This package is already installed, emit a warning but don't error
// The resolver has already handled this correctly
// Package is already installed, the resolver has handled this correctly
}
}
}
}
/// Uninstall operation
pub struct UninstallOperation {
/// State manager
state_manager: StateManager,
/// Package store
store: PackageStore,
}
impl UninstallOperation {
/// Create new uninstall operation
#[must_use]
pub fn new(state_manager: StateManager, store: PackageStore) -> Self {
Self {
state_manager,
store,
}
}
/// Execute uninstallation
///
/// # Errors
///
/// Returns an error if package removal fails or dependency checks fail.
pub async fn execute(&mut self, context: UninstallContext) -> Result<InstallResult, Error> {
// Get currently installed packages
let current_packages = self.state_manager.get_installed_packages().await?;
// Find packages to remove
let mut packages_to_remove = Vec::new();
for package_name in &context.packages {
if let Some(package_id) = current_packages
.iter()
.find(|pkg| &pkg.name == package_name)
{
packages_to_remove.push(package_id.clone());
} else if !context.force {
return Err(InstallError::PackageNotInstalled {
package: package_name.clone(),
}
.into());
}
}
// Check for dependents if not forcing
if !context.force {
for package in &packages_to_remove {
let package_id =
sps2_resolver::PackageId::new(package.name.clone(), package.version());
let dependents = self
.state_manager
.get_package_dependents(&package_id)
.await?;
if !dependents.is_empty() {
return Err(InstallError::PackageHasDependents {
package: package_id.name.clone(),
}
.into());
}
}
}
// Perform atomic uninstallation using AtomicInstaller
let package_ids: Vec<sps2_resolver::PackageId> = packages_to_remove
.iter()
.map(|pkg| sps2_resolver::PackageId::new(pkg.name.clone(), pkg.version()))
.collect();
let mut atomic_installer =
AtomicInstaller::new(self.state_manager.clone(), self.store.clone());
let result = atomic_installer.uninstall(&package_ids, &context).await?;
Ok(result)
}
}
/// Update operation
pub struct UpdateOperation {
/// Install operation for handling updates
install_operation: InstallOperation,
/// State manager
state_manager: StateManager,
}
impl UpdateOperation {
/// Create new update operation
///
/// # Errors
///
/// Returns an error if install operation initialization fails.
pub fn new(
resolver: Resolver,
state_manager: StateManager,
store: PackageStore,
) -> Result<Self, Error> {
let install_operation = InstallOperation::new(resolver, state_manager.clone(), store)?;
Ok(Self {
install_operation,
state_manager,
})
}
/// Execute update
///
/// # Errors
///
/// Returns an error if package resolution fails, update conflicts occur, or installation fails.
pub async fn execute(&mut self, context: UpdateContext) -> Result<InstallResult, Error> {
// Get currently installed packages
let current_packages = self.state_manager.get_installed_packages().await?;
// Determine packages to update
let packages_to_update = if context.packages.is_empty() {
// Update all packages
current_packages
} else {
// Update specified packages
current_packages
.into_iter()
.filter(|pkg| context.packages.contains(&pkg.name))
.collect()
};
// Check if any updates are actually needed
if packages_to_update.is_empty() {
// No packages to update - return early with empty result
let result = InstallResult::new(uuid::Uuid::nil());
return Ok(result);
}
// For each package, check if an update is available before proceeding
let (packages_needing_update, _packages_up_to_date) = self
.check_packages_for_updates(&packages_to_update, &context)
.await?;
// If no packages need updating, return early
if packages_needing_update.is_empty() {
let result = InstallResult::new(uuid::Uuid::nil());
return Ok(result);
}
// Convert to package specs for installation
self.build_and_execute_install(&packages_needing_update, &context)
.await
}
/// Check which packages have available updates
///
/// Complex workflow function that checks each package for updates and emits detailed events.
/// Function length is necessary for comprehensive error handling and event emission.
#[allow(clippy::too_many_lines)]
async fn check_packages_for_updates(
&self,
packages_to_update: &[sps2_state::models::Package],
context: &UpdateContext,
) -> Result<(Vec<sps2_state::models::Package>, Vec<String>), Error> {
use std::collections::HashMap;
let mut packages_needing_update = Vec::new();
let mut packages_up_to_date = Vec::new();
for package_id in packages_to_update {
let spec = if context.upgrade {
// Upgrade mode: ignore upper bounds
PackageSpec::parse(&format!("{}>=0.0.0", package_id.name))?
} else {
// Update mode: respect constraints (compatible release)
PackageSpec::parse(&format!("{}~={}", package_id.name, package_id.version))?
};
// Create resolution context to check for available updates
let mut resolution_context = ResolutionContext::new();
resolution_context = resolution_context.add_runtime_dep(spec);
// Resolve to see what version would be installed
match self
.install_operation
.resolver
.resolve_with_sat(resolution_context)
.await
{
Ok(resolution_result) => {
// Check if any resolved package is newer than current
let mut found_update = false;
for (resolved_id, node) in &resolution_result.nodes {
if resolved_id.name == package_id.name {
match resolved_id.version.cmp(&package_id.version()) {
std::cmp::Ordering::Greater => {
// Update available - add to list
packages_needing_update.push(package_id.clone());
found_update = true;
// Emit event for available update
context.emit(AppEvent::General(
GeneralEvent::CheckModePreview {
operation: if context.upgrade {
"upgrade".to_string()
} else {
"update".to_string()
},
action: format!(
"Would {} {} {} → {}",
if context.upgrade { "upgrade" } else { "update" },
package_id.name,
package_id.version,
resolved_id.version
),
details: HashMap::from([
(
"current_version".to_string(),
package_id.version.to_string(),
),
(
"new_version".to_string(),
resolved_id.version.to_string(),
),
("change_type".to_string(), "unknown".to_string()),
(
"source".to_string(),
match node.action {
sps2_resolver::NodeAction::Download => {
"repository".to_string()
}
sps2_resolver::NodeAction::Local => {
"local file".to_string()
}
},
),
]),
},
));
break;
}
std::cmp::Ordering::Equal => {
// Already up to date - add to list
packages_up_to_date.push(package_id.name.clone());
found_update = true;
// Emit event for up to date package
context.emit(AppEvent::General(
GeneralEvent::CheckModePreview {
operation: if context.upgrade {
"upgrade".to_string()
} else {
"update".to_string()
},
action: format!(
"{}:{} is already at {} version",
package_id.name,
package_id.version,
if context.upgrade {
"latest"
} else {
"compatible"
}
),
details: HashMap::from([
(
"version".to_string(),
package_id.version.to_string(),
),
("status".to_string(), "up_to_date".to_string()),
]),
},
));
break;
}
std::cmp::Ordering::Less => {
// This shouldn't happen normally
}
}
break;
}
}
if !found_update {
// No update found, package is up to date
packages_up_to_date.push(package_id.name.clone());
context.emit(AppEvent::General(GeneralEvent::CheckModePreview {
operation: if context.upgrade {
"upgrade".to_string()
} else {
"update".to_string()
},
action: format!(
"{}:{} is already at {} version",
package_id.name,
package_id.version,
if context.upgrade {
"latest"
} else {
"compatible"
}
),
details: HashMap::from([
("version".to_string(), package_id.version.to_string()),
("status".to_string(), "up_to_date".to_string()),
]),
}));
}
}
Err(_) => {
// Resolution failed - package might not be available in repository
context.emit(AppEvent::General(GeneralEvent::CheckModePreview {
operation: if context.upgrade {
"upgrade".to_string()
} else {
"update".to_string()
},
action: format!(
"Cannot check {}s for {}",
if context.upgrade { "upgrade" } else { "update" },
package_id.name
),
details: HashMap::from([
(
"current_version".to_string(),
package_id.version.to_string(),
),
("status".to_string(), "resolution_failed".to_string()),
(
"reason".to_string(),
"package not found in repository".to_string(),
),
]),
}));
}
}
}
Ok((packages_needing_update, packages_up_to_date))
}
/// Build install context and execute installation for packages needing update
async fn build_and_execute_install(
&mut self,
packages_needing_update: &[sps2_state::models::Package],
context: &UpdateContext,
) -> Result<InstallResult, Error> {
let mut install_context = InstallContext::new();
for package_id in packages_needing_update {
let spec = if context.upgrade {
// Upgrade mode: ignore upper bounds
PackageSpec::parse(&format!("{}>=0.0.0", package_id.name))?
} else {
// Update mode: respect constraints (compatible release)
PackageSpec::parse(&format!("{}~={}", package_id.name, package_id.version))?
};
install_context = install_context.add_package(spec);
}
install_context = install_context.with_force(true); // Force reinstallation for updates
if let Some(sender) = &context.event_sender {
install_context = install_context.with_event_sender(sender.clone());
}
// Execute installation (which handles updates)
self.install_operation.execute(install_context).await
}
}
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | false |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/crates/install/src/macros.rs | crates/install/src/macros.rs | //! Macros for context builder helpers
#[macro_export]
macro_rules! context_add_package_method {
($name:ident, $pkg_type:ty) => {
impl $name {
/// Add package to the context
#[must_use]
pub fn add_package(mut self, package: $pkg_type) -> Self {
self.packages.push(package);
self
}
}
};
}
#[macro_export]
macro_rules! context_builder {
($name:ident { $($field:ident: $ty:ty),* $(,)? }) => {
paste::paste! {
impl $name {
/// Create a new context with default values
pub fn new() -> Self {
Self {
$($field: Default::default(),)*
event_sender: None,
}
}
$( #[must_use]
pub fn [<with_ $field>](mut self, value: $ty) -> Self {
self.$field = value;
self
} )*
/// Set the event sender for progress reporting
#[must_use]
pub fn with_event_sender(mut self, sender: sps2_events::EventSender) -> Self {
self.event_sender = Some(sender);
self
}
}
impl Default for $name {
fn default() -> Self {
Self::new()
}
}
}
};
}
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | false |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/crates/install/src/api/config.rs | crates/install/src/api/config.rs | /// Installer configuration
#[derive(Clone, Debug)]
pub struct InstallConfig {
/// Maximum concurrent downloads
pub max_concurrency: usize,
/// Download timeout in seconds
pub download_timeout: u64,
/// Enable APFS optimizations
pub enable_apfs: bool,
/// State retention policy (number of states to keep)
pub state_retention: usize,
}
impl Default for InstallConfig {
fn default() -> Self {
Self {
max_concurrency: 4,
download_timeout: 300, // 5 minutes
enable_apfs: cfg!(target_os = "macos"),
state_retention: 10,
}
}
}
impl InstallConfig {
/// Create config with custom concurrency
#[must_use]
pub fn with_concurrency(mut self, max_concurrency: usize) -> Self {
self.max_concurrency = max_concurrency;
self
}
/// Set download timeout
#[must_use]
pub fn with_timeout(mut self, timeout_seconds: u64) -> Self {
self.download_timeout = timeout_seconds;
self
}
/// Enable/disable APFS optimizations
#[must_use]
pub fn with_apfs(mut self, enable: bool) -> Self {
self.enable_apfs = enable;
self
}
/// Set state retention policy
#[must_use]
pub fn with_retention(mut self, count: usize) -> Self {
self.state_retention = count;
self
}
}
/// Security policy for signature enforcement
#[derive(Clone, Copy, Debug)]
pub struct SecurityPolicy {
pub verify_signatures: bool,
pub allow_unsigned: bool,
}
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | false |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/crates/install/src/api/types.rs | crates/install/src/api/types.rs | use sps2_hash::Hash;
use std::path::PathBuf;
/// Prepared package data passed from `ParallelExecutor` to `AtomicInstaller`
///
/// This structure contains all the information needed by `AtomicInstaller`
/// to install a package without having to look up `package_map` or perform
/// additional database queries.
#[derive(Clone, Debug)]
pub struct PreparedPackage {
/// Package hash
pub hash: Hash,
/// Package size in bytes
pub size: u64,
/// Path to the package in the store
pub store_path: PathBuf,
/// Whether this package was downloaded or local
pub is_local: bool,
/// Optional package archive hash (BLAKE3) provided by the repository
pub package_hash: Option<Hash>,
}
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | false |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/crates/install/src/api/result.rs | crates/install/src/api/result.rs | use chrono::{DateTime, Utc};
use sps2_resolver::PackageId;
use uuid::Uuid;
/// Installation result
#[derive(Debug)]
pub struct InstallResult {
/// State ID after installation
pub state_id: Uuid,
/// Packages that were installed
pub installed_packages: Vec<PackageId>,
/// Packages that were updated
pub updated_packages: Vec<PackageId>,
/// Packages that were removed
pub removed_packages: Vec<PackageId>,
}
impl InstallResult {
/// Create new install result
#[must_use]
pub fn new(state_id: Uuid) -> Self {
Self {
state_id,
installed_packages: Vec::new(),
updated_packages: Vec::new(),
removed_packages: Vec::new(),
}
}
/// Add installed package
pub fn add_installed(&mut self, package_id: PackageId) {
self.installed_packages.push(package_id);
}
/// Add updated package
pub fn add_updated(&mut self, package_id: PackageId) {
self.updated_packages.push(package_id);
}
/// Add removed package
pub fn add_removed(&mut self, package_id: PackageId) {
self.removed_packages.push(package_id);
}
/// Get total number of changes
#[must_use]
pub fn total_changes(&self) -> usize {
self.installed_packages.len() + self.updated_packages.len() + self.removed_packages.len()
}
}
/// State information for listing
#[derive(Debug, Clone)]
pub struct StateInfo {
/// State ID
pub id: Uuid,
/// Creation timestamp
pub timestamp: DateTime<Utc>,
/// Parent state ID
pub parent_id: Option<Uuid>,
/// Number of packages in this state
pub package_count: usize,
/// Sample of packages (for display)
pub packages: Vec<sps2_types::PackageId>,
}
impl StateInfo {
/// Check if this is the root state
#[must_use]
pub fn is_root(&self) -> bool {
self.parent_id.is_none()
}
/// Get age of this state
#[must_use]
pub fn age(&self) -> chrono::Duration {
Utc::now() - self.timestamp
}
/// Format package list for display
#[must_use]
pub fn package_summary(&self) -> String {
if self.packages.is_empty() {
"No packages".to_string()
} else if self.packages.len() <= 3 {
self.packages
.iter()
.map(|pkg| format!("{}-{}", pkg.name, pkg.version))
.collect::<Vec<_>>()
.join(", ")
} else {
let first_three: Vec<String> = self
.packages
.iter()
.take(3)
.map(|pkg| format!("{}-{}", pkg.name, pkg.version))
.collect();
format!(
"{} and {} more",
first_three.join(", "),
self.package_count - 3
)
}
}
}
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | false |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/crates/install/src/api/mod.rs | crates/install/src/api/mod.rs | pub mod config;
pub mod context;
pub mod result;
pub mod types;
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | false |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/crates/install/src/api/context.rs | crates/install/src/api/context.rs | use sps2_events::EventSender;
use sps2_types::PackageSpec;
use std::path::PathBuf;
/// Installation context
#[derive(Clone, Debug)]
pub struct InstallContext {
/// Package specifications to install
pub packages: Vec<PackageSpec>,
/// Local package files to install
pub local_files: Vec<PathBuf>,
/// Force reinstallation
pub force: bool,
/// Force re-download even if cached in the store
pub force_download: bool,
/// Event sender for progress reporting
pub event_sender: Option<EventSender>,
}
context_builder! {
InstallContext {
packages: Vec<PackageSpec>,
local_files: Vec<PathBuf>,
force: bool,
force_download: bool,
}
}
context_add_package_method!(InstallContext, PackageSpec);
/// Uninstall context
#[derive(Clone, Debug)]
pub struct UninstallContext {
/// Package names to uninstall
pub packages: Vec<String>,
/// Remove dependencies if no longer needed
pub autoremove: bool,
/// Force removal even with dependents
pub force: bool,
/// Event sender for progress reporting
pub event_sender: Option<EventSender>,
}
context_builder! {
UninstallContext {
packages: Vec<String>,
autoremove: bool,
force: bool,
}
}
context_add_package_method!(UninstallContext, String);
/// Update context
#[derive(Clone, Debug)]
pub struct UpdateContext {
/// Packages to update (empty = all)
pub packages: Vec<String>,
/// Upgrade mode (ignore upper bounds)
pub upgrade: bool,
/// Event sender for progress reporting
pub event_sender: Option<EventSender>,
}
context_builder! {
UpdateContext {
packages: Vec<String>,
upgrade: bool,
}
}
context_add_package_method!(UpdateContext, String);
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | false |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/crates/install/src/atomic/installer.rs | crates/install/src/atomic/installer.rs | //! Atomic installer implementation using slot-based staging.
use crate::atomic::{package, transition::StateTransition};
// Removed Python venv handling - Python packages are now handled like regular packages
use crate::{InstallContext, InstallResult, PreparedPackage};
use sps2_errors::{Error, InstallError};
use sps2_events::events::{LifecycleEvent, StateTransitionContext, TransitionSummary};
use sps2_events::{AppEvent, EventEmitter, EventSender, FailureContext, StateEvent};
use sps2_resolver::{PackageId, ResolvedNode};
use sps2_state::StateManager;
use sps2_store::PackageStore;
use std::collections::{HashMap, HashSet};
use std::convert::TryFrom;
use std::time::Instant;
use uuid::Uuid;
/// Implement `EventEmitter` for `InstallContext`
impl EventEmitter for InstallContext {
fn event_sender(&self) -> Option<&EventSender> {
self.event_sender.as_ref()
}
}
/// Implement `EventEmitter` for `UninstallContext`
impl EventEmitter for crate::UninstallContext {
fn event_sender(&self) -> Option<&EventSender> {
self.event_sender.as_ref()
}
}
/// Implement `EventEmitter` for `UpdateContext`
impl EventEmitter for crate::UpdateContext {
fn event_sender(&self) -> Option<&EventSender> {
self.event_sender.as_ref()
}
}
/// Atomic installer using APFS optimizations
pub struct AtomicInstaller {
/// State manager for atomic transitions
state_manager: StateManager,
/// Content-addressable package store
store: PackageStore,
}
impl AtomicInstaller {
/// Execute two-phase commit flow for a transition
async fn execute_two_phase_commit<T: EventEmitter>(
&self,
transition: &StateTransition,
context: &T,
) -> Result<(), Error> {
let source = transition.parent_id;
let target = transition.staging_id;
let parent_id = transition.parent_id.ok_or_else(|| {
Error::from(InstallError::AtomicOperationFailed {
message: "state transition missing parent state".to_string(),
})
})?;
let transition_context = StateTransitionContext {
operation: transition.operation.clone(),
source,
target,
};
let transition_start = Instant::now();
context.emit(AppEvent::State(StateEvent::TransitionStarted {
context: transition_context.clone(),
}));
let transition_data = sps2_state::TransactionData {
package_refs: &transition.package_refs,
file_references: &transition.file_references,
pending_file_hashes: &transition.pending_file_hashes,
};
let journal = match self
.state_manager
.prepare_transaction(
&transition.staging_id,
&parent_id,
transition.staging_slot,
&transition.operation,
&transition_data,
)
.await
{
Ok(journal) => journal,
Err(e) => {
let failure = FailureContext::from_error(&e);
context.emit(AppEvent::State(StateEvent::TransitionFailed {
context: transition_context.clone(),
failure,
}));
return Err(e);
}
};
if let Err(e) = self
.state_manager
.execute_filesystem_swap_and_finalize(journal)
.await
{
let failure = FailureContext::from_error(&e);
context.emit(AppEvent::State(StateEvent::TransitionFailed {
context: transition_context.clone(),
failure,
}));
return Err(e);
}
let summary = TransitionSummary {
duration_ms: Some(
u64::try_from(transition_start.elapsed().as_millis()).unwrap_or(u64::MAX),
),
};
context.emit(AppEvent::State(StateEvent::TransitionCompleted {
context: transition_context,
summary: Some(summary),
}));
Ok(())
}
/// Setup state transition and staging directory
async fn setup_state_transition<T: EventEmitter>(
&self,
operation: &str,
context: &T,
) -> Result<StateTransition, Error> {
// Create new state transition
let mut transition =
StateTransition::new(&self.state_manager, operation.to_string()).await?;
// Set event sender on transition
transition.event_sender = context.event_sender().cloned();
context.emit_debug(format!(
"Prepared staging slot {} at {}",
transition.staging_slot,
transition.slot_path.display()
));
Ok(transition)
}
/// Create new atomic installer
///
/// # Errors
///
/// Returns an error if initialization fails
#[must_use]
pub fn new(state_manager: StateManager, store: PackageStore) -> Self {
Self {
state_manager,
store,
}
}
/// Perform atomic installation
///
/// # Errors
///
/// Returns an error if state transition fails, package installation fails,
/// or filesystem operations fail.
pub async fn install(
&mut self,
context: &InstallContext,
resolved_packages: &HashMap<PackageId, ResolvedNode>,
prepared_packages: Option<&HashMap<PackageId, PreparedPackage>>,
) -> Result<InstallResult, Error> {
// Setup state transition and staging directory
let mut transition = self.setup_state_transition("install", context).await?;
// Collect the current state's packages so we can carry forward untouched entries and
// detect in-place upgrades cleanly.
let parent_packages = if let Some(parent_id) = transition.parent_id {
self.state_manager
.get_installed_packages_in_state(&parent_id)
.await?
} else {
Vec::new()
};
let parent_lookup: HashMap<String, sps2_state::models::Package> = parent_packages
.iter()
.cloned()
.map(|pkg| (pkg.name.clone(), pkg))
.collect();
package::sync_slot_with_parent(
&self.state_manager,
&self.store,
&mut transition,
&parent_packages,
)
.await?;
// The staging slot now mirrors the parent state, so carry-forward only needs to
// register package references for unchanged packages.
let exclude_names: HashSet<String> = resolved_packages
.keys()
.map(|pkg| pkg.name.clone())
.collect();
package::carry_forward_packages(&mut transition, &parent_packages, &exclude_names);
// Apply package changes to staging
let mut result = InstallResult::new(transition.staging_id);
for (package_id, node) in resolved_packages {
let prepared_package = prepared_packages.and_then(|packages| packages.get(package_id));
package::install_package_to_staging(
&self.state_manager,
&mut transition,
package_id,
node,
prepared_package,
parent_lookup.get(&package_id.name),
&mut result,
)
.await?;
}
// Execute two-phase commit
self.execute_two_phase_commit(&transition, context).await?;
Ok(result)
}
// Removed install_python_package - Python packages are now handled like regular packages
/// Perform atomic uninstallation
///
/// # Errors
///
/// Returns an error if state transition fails, package removal fails,
/// or filesystem operations fail.
pub async fn uninstall(
&mut self,
packages_to_remove: &[PackageId],
context: &crate::UninstallContext,
) -> Result<InstallResult, Error> {
// Setup state transition and staging directory
let mut transition = self.setup_state_transition("uninstall", context).await?;
// Ensure the staging slot mirrors the current parent state before applying removals.
let mut result = InstallResult::new(transition.staging_id);
for pkg in packages_to_remove {
context.emit(AppEvent::Lifecycle(LifecycleEvent::uninstall_started(
pkg.name.clone(),
pkg.version.clone(),
)));
}
// Remove packages from staging and track them in result
let parent_packages = if let Some(parent_id) = transition.parent_id {
self.state_manager
.get_installed_packages_in_state(&parent_id)
.await?
} else {
Vec::new()
};
package::sync_slot_with_parent(
&self.state_manager,
&self.store,
&mut transition,
&parent_packages,
)
.await?;
for pkg in &parent_packages {
// Check if this package should be removed
let should_remove = packages_to_remove
.iter()
.any(|remove_pkg| remove_pkg.name == pkg.name);
if should_remove {
result.add_removed(PackageId::new(pkg.name.clone(), pkg.version()));
package::remove_package_from_staging(&self.state_manager, &mut transition, pkg)
.await?;
context.emit_debug(format!("Removed package {} from staging", pkg.name));
}
}
// Carry forward packages that are not being removed
let exclude_names: HashSet<String> = packages_to_remove
.iter()
.map(|pkg| pkg.name.clone())
.collect();
package::carry_forward_packages(&mut transition, &parent_packages, &exclude_names);
// Execute two-phase commit
self.execute_two_phase_commit(&transition, context).await?;
for pkg in &result.removed_packages {
context.emit(AppEvent::Lifecycle(LifecycleEvent::uninstall_completed(
pkg.name.clone(),
pkg.version.clone(),
0,
)));
}
Ok(result)
}
// Removed remove_package_venv - Python packages are now handled like regular packages
/// Rollback by moving active to an existing target state without creating a new state row
///
/// # Errors
///
/// Returns an error if staging or filesystem operations fail.
pub async fn rollback_move_to_state(&mut self, target_state_id: Uuid) -> Result<(), Error> {
let current_state_id = self.state_manager.get_current_state_id().await?;
let mut transition =
StateTransition::new(&self.state_manager, "rollback".to_string()).await?;
let target_packages = self
.state_manager
.get_installed_packages_in_state(&target_state_id)
.await?;
// Use slot sync mechanism for fast rollback
// Fast path: if inactive slot already has target state (common for recent rollback),
// this returns instantly without any filesystem operations
package::sync_slot_to_state(
&self.state_manager,
&self.store,
&mut transition,
target_state_id,
&target_packages,
)
.await?;
let journal = sps2_types::state::TransactionJournal {
new_state_id: target_state_id,
parent_state_id: current_state_id,
staging_path: transition.slot_path.clone(),
staging_slot: transition.staging_slot,
phase: sps2_types::state::TransactionPhase::Prepared,
operation: "rollback".to_string(),
};
self.state_manager.write_journal(&journal).await?;
self.state_manager
.execute_filesystem_swap_and_finalize(journal)
.await?;
// After switching active state, synchronize DB refcounts to match the target state exactly
let _ = self
.state_manager
.sync_refcounts_to_state(&target_state_id)
.await?;
// Ensure the target state is visible in base history
self.state_manager.unprune_state(&target_state_id).await?;
Ok(())
}
}
#[cfg(test)]
mod tests {
use super::*;
use sps2_store::create_package;
use sps2_types::{Arch, Manifest, Version};
use std::collections::HashMap;
use tempfile::TempDir;
use tokio::fs as afs;
async fn mk_env() -> (TempDir, StateManager, sps2_store::PackageStore) {
let td = TempDir::new().expect("td");
let state = StateManager::new(td.path()).await.expect("state");
let store_base = td.path().join("store");
afs::create_dir_all(&store_base).await.unwrap();
let store = sps2_store::PackageStore::new(store_base);
(td, state, store)
}
async fn make_sp_and_add_to_store(
store: &sps2_store::PackageStore,
name: &str,
version: &str,
files: &[(&str, &str)],
) -> (
sps2_hash::Hash,
std::path::PathBuf,
u64,
Vec<sps2_hash::Hash>,
) {
let td = TempDir::new().unwrap();
let src = td.path().join("src");
afs::create_dir_all(&src).await.unwrap();
// manifest
let v = Version::parse(version).unwrap();
let m = Manifest::new(name.to_string(), &v, 1, &Arch::Arm64);
let manifest_path = src.join("manifest.toml");
sps2_store::manifest_io::write_manifest(&manifest_path, &m)
.await
.unwrap();
// files under opt/pm/live
for (rel, content) in files {
let p = src.join("opt/pm/live").join(rel);
if let Some(parent) = p.parent() {
afs::create_dir_all(parent).await.unwrap();
}
afs::write(&p, content.as_bytes()).await.unwrap();
}
// create .sp
let sp = td.path().join("pkg.sp");
create_package(&src, &sp).await.unwrap();
// add to store
let stored = store.add_package(&sp).await.unwrap();
let hash = stored.hash().unwrap();
let path = store.package_path(&hash);
let file_hashes: Vec<sps2_hash::Hash> = stored
.file_hashes()
.unwrap_or(&[])
.iter()
.map(|r| r.hash.clone())
.collect();
let size = afs::metadata(&sp).await.unwrap().len();
(hash, path, size, file_hashes)
}
fn collect_relative_files(base: &std::path::Path) -> Vec<std::path::PathBuf> {
fn walk(
base: &std::path::Path,
current: &std::path::Path,
acc: &mut Vec<std::path::PathBuf>,
) -> std::io::Result<()> {
for entry in std::fs::read_dir(current)? {
let entry = entry?;
let path = entry.path();
if entry.file_type()?.is_dir() {
walk(base, &path, acc)?;
} else {
acc.push(path.strip_prefix(base).unwrap().to_path_buf());
}
}
Ok(())
}
let mut acc = Vec::new();
walk(base, base, &mut acc).unwrap();
acc
}
async fn refcount_store(state: &StateManager, hex: &str) -> i64 {
let mut tx = state.begin_transaction().await.unwrap();
let all = sps2_state::queries::get_all_store_refs(&mut tx)
.await
.unwrap();
all.into_iter()
.find(|r| r.hash == hex)
.map_or(0, |r| r.ref_count)
}
async fn refcount_file(state: &StateManager, hex: &str) -> i64 {
let mut tx = state.begin_transaction().await.unwrap();
let h = sps2_hash::Hash::from_hex(hex).unwrap();
let row = sps2_state::file_queries_runtime::get_file_object(&mut tx, &h)
.await
.unwrap();
row.map_or(0, |o| o.ref_count)
}
#[tokio::test]
#[allow(clippy::too_many_lines)] // Integration test with comprehensive setup
async fn cloned_staging_carries_forward_package_files() {
let (_td, state, store) = mk_env().await;
let (hash_a, path_a, size_a, _file_hashes_a) = make_sp_and_add_to_store(
&store,
"A",
"1.0.0",
&[("bin/a", "alpha"), ("share/doc.txt", "alpha docs")],
)
.await;
let (hash_b, path_b, size_b, _file_hashes_b) = make_sp_and_add_to_store(
&store,
"B",
"1.0.0",
&[("bin/b", "beta"), ("share/readme.txt", "beta docs")],
)
.await;
let mut installer = AtomicInstaller::new(state.clone(), store.clone());
// Install package A
let mut resolved_a: HashMap<PackageId, ResolvedNode> = HashMap::new();
let pid_a = PackageId::new(
"A".to_string(),
sps2_types::Version::parse("1.0.0").unwrap(),
);
resolved_a.insert(
pid_a.clone(),
ResolvedNode::local(
pid_a.name.clone(),
pid_a.version.clone(),
path_a.clone(),
vec![],
),
);
let mut prepared_a = HashMap::new();
prepared_a.insert(
pid_a.clone(),
crate::PreparedPackage {
hash: hash_a.clone(),
size: size_a,
store_path: path_a.clone(),
is_local: true,
package_hash: None,
},
);
let ctx = crate::InstallContext {
packages: vec![],
local_files: vec![],
force: false,
force_download: false,
event_sender: None,
};
let _ = installer
.install(&ctx, &resolved_a, Some(&prepared_a))
.await
.unwrap();
// Install package B (forces cloned staging when clone succeeds)
let mut resolved_b: HashMap<PackageId, ResolvedNode> = HashMap::new();
let pid_b = PackageId::new(
"B".to_string(),
sps2_types::Version::parse("1.0.0").unwrap(),
);
resolved_b.insert(
pid_b.clone(),
ResolvedNode::local(
pid_b.name.clone(),
pid_b.version.clone(),
path_b.clone(),
vec![],
),
);
let mut prepared_b = HashMap::new();
prepared_b.insert(
pid_b.clone(),
crate::PreparedPackage {
hash: hash_b.clone(),
size: size_b,
store_path: path_b.clone(),
is_local: true,
package_hash: None,
},
);
let ctx_b = crate::InstallContext {
packages: vec![],
local_files: vec![],
force: false,
force_download: false,
event_sender: None,
};
let _ = installer
.install(&ctx_b, &resolved_b, Some(&prepared_b))
.await
.unwrap();
let active_state = state.get_current_state_id().await.unwrap();
let mut tx = state.begin_transaction().await.unwrap();
let pkg_a_files = sps2_state::file_queries_runtime::get_package_file_entries_by_name(
&mut tx,
&active_state,
"A",
"1.0.0",
)
.await
.unwrap();
tx.commit().await.unwrap();
assert!(
!pkg_a_files.is_empty(),
"package_files entries for package A should be preserved after cloned staging"
);
}
#[tokio::test]
#[allow(clippy::too_many_lines)] // Integration test with comprehensive setup
async fn install_then_update_replaces_old_version() {
let (_td, state, store) = mk_env().await;
let (hash_v1, path_v1, size_v1, _file_hashes_v1) = make_sp_and_add_to_store(
&store,
"A",
"1.0.0",
&[("share/v1.txt", "v1"), ("bin/a", "binary")],
)
.await;
let (hash_v2, path_v2, size_v2, _file_hashes_v2) = make_sp_and_add_to_store(
&store,
"A",
"1.1.0",
&[("share/v2.txt", "v2"), ("bin/a", "binary2")],
)
.await;
let mut ai = AtomicInstaller::new(state.clone(), store.clone());
// Initial install of v1
let mut resolved: HashMap<PackageId, ResolvedNode> = HashMap::new();
let pid_v1 = PackageId::new(
"A".to_string(),
sps2_types::Version::parse("1.0.0").unwrap(),
);
resolved.insert(
pid_v1.clone(),
ResolvedNode::local(
"A".to_string(),
pid_v1.version.clone(),
path_v1.clone(),
vec![],
),
);
let mut prepared = HashMap::new();
prepared.insert(
pid_v1.clone(),
crate::PreparedPackage {
hash: hash_v1.clone(),
size: size_v1,
store_path: path_v1.clone(),
is_local: true,
package_hash: None,
},
);
let ctx = crate::InstallContext {
packages: vec![],
local_files: vec![],
force: false,
force_download: false,
event_sender: None,
};
let _ = ai.install(&ctx, &resolved, Some(&prepared)).await.unwrap();
let live_path = state.live_path().to_path_buf();
let files_after_install = collect_relative_files(&live_path);
assert!(files_after_install
.iter()
.any(|p| p.ends_with("share/v1.txt")));
let binary_rel_initial = files_after_install
.iter()
.find(|p| p.ends_with("bin/a"))
.expect("binary present after initial install");
assert_eq!(
std::fs::read_to_string(live_path.join(binary_rel_initial)).unwrap(),
"binary"
);
// Update to v2
let mut resolved_update: HashMap<PackageId, ResolvedNode> = HashMap::new();
let pid_v2 = PackageId::new(
"A".to_string(),
sps2_types::Version::parse("1.1.0").unwrap(),
);
resolved_update.insert(
pid_v2.clone(),
ResolvedNode::local(
"A".to_string(),
pid_v2.version.clone(),
path_v2.clone(),
vec![],
),
);
let mut prepared_update = HashMap::new();
prepared_update.insert(
pid_v2.clone(),
crate::PreparedPackage {
hash: hash_v2.clone(),
size: size_v2,
store_path: path_v2.clone(),
is_local: true,
package_hash: None,
},
);
let update_ctx = crate::InstallContext {
packages: vec![],
local_files: vec![],
force: true,
force_download: false,
event_sender: None,
};
let update_result = ai
.install(&update_ctx, &resolved_update, Some(&prepared_update))
.await
.unwrap();
assert!(update_result.installed_packages.is_empty());
assert_eq!(update_result.updated_packages, vec![pid_v2.clone()]);
assert!(update_result.removed_packages.is_empty());
let installed = state.get_installed_packages().await.unwrap();
assert_eq!(installed.len(), 1);
assert_eq!(installed[0].version, "1.1.0");
// Live directory should reflect the new version
let files_after_update = collect_relative_files(&live_path);
assert!(!files_after_update
.iter()
.any(|p| p.ends_with("share/v1.txt")));
assert!(files_after_update
.iter()
.any(|p| p.ends_with("share/v2.txt")));
let binary_rel_updated = files_after_update
.iter()
.find(|p| p.ends_with("bin/a"))
.expect("binary present after update");
assert_eq!(
std::fs::read_to_string(live_path.join(binary_rel_updated)).unwrap(),
"binary2"
);
assert_eq!(refcount_store(&state, &hash_v1.to_hex()).await, 0);
assert!(refcount_store(&state, &hash_v2.to_hex()).await > 0);
}
#[tokio::test]
async fn install_then_uninstall_updates_refcounts() {
let (_td, state, store) = mk_env().await;
let (hash, store_path, size, file_hashes) =
make_sp_and_add_to_store(&store, "A", "1.0.0", &[("bin/x", "same"), ("share/a", "A")])
.await;
let mut ai = AtomicInstaller::new(state.clone(), store.clone());
let mut resolved: HashMap<PackageId, ResolvedNode> = HashMap::new();
let pid = PackageId::new(
"A".to_string(),
sps2_types::Version::parse("1.0.0").unwrap(),
);
resolved.insert(
pid.clone(),
ResolvedNode::local(
"A".to_string(),
pid.version.clone(),
store_path.clone(),
vec![],
),
);
let mut prepared = HashMap::new();
prepared.insert(
pid.clone(),
crate::PreparedPackage {
hash: hash.clone(),
size,
store_path: store_path.clone(),
is_local: true,
package_hash: None,
},
);
let ctx = crate::InstallContext {
packages: vec![],
local_files: vec![],
force: false,
force_download: false,
event_sender: None,
};
let _res = ai.install(&ctx, &resolved, Some(&prepared)).await.unwrap();
// After install
assert!(refcount_store(&state, &hash.to_hex()).await > 0);
for fh in &file_hashes {
assert!(refcount_file(&state, &fh.to_hex()).await > 0);
}
// Uninstall package A
let uctx = crate::UninstallContext {
packages: vec!["A".to_string()],
autoremove: false,
force: true,
event_sender: None,
};
let _u = ai
.uninstall(
&[PackageId::new(
"A".to_string(),
sps2_types::Version::parse("1.0.0").unwrap(),
)],
&uctx,
)
.await
.unwrap();
assert_eq!(refcount_store(&state, &hash.to_hex()).await, 0);
for fh in &file_hashes {
assert_eq!(refcount_file(&state, &fh.to_hex()).await, 0);
}
}
#[tokio::test]
async fn shared_file_uninstall_decrements_but_not_zero() {
let (_td, state, store) = mk_env().await;
// A and B share bin/x
let (hash_a, path_a, size_a, file_hashes_a) = make_sp_and_add_to_store(
&store,
"A",
"1.0.0",
&[("bin/x", "same"), ("share/a", "AA")],
)
.await;
let (hash_b, path_b, size_b, file_hashes_b) = make_sp_and_add_to_store(
&store,
"B",
"1.0.0",
&[("bin/x", "same"), ("share/b", "BB")],
)
.await;
let h_same = file_hashes_a
.iter()
.find(|h| file_hashes_b.iter().any(|hb| hb == *h))
.unwrap()
.clone();
let mut ai = AtomicInstaller::new(state.clone(), store.clone());
// Install A then B
let mut resolved: HashMap<PackageId, ResolvedNode> = HashMap::new();
let pid_a = PackageId::new(
"A".to_string(),
sps2_types::Version::parse("1.0.0").unwrap(),
);
let pid_b = PackageId::new(
"B".to_string(),
sps2_types::Version::parse("1.0.0").unwrap(),
);
resolved.insert(
pid_a.clone(),
ResolvedNode::local(
"A".to_string(),
pid_a.version.clone(),
path_a.clone(),
vec![],
),
);
resolved.insert(
pid_b.clone(),
ResolvedNode::local(
"B".to_string(),
pid_b.version.clone(),
path_b.clone(),
vec![],
),
);
let mut prepared = HashMap::new();
prepared.insert(
pid_a.clone(),
crate::PreparedPackage {
hash: hash_a.clone(),
size: size_a,
store_path: path_a.clone(),
is_local: true,
package_hash: None,
},
);
prepared.insert(
pid_b.clone(),
crate::PreparedPackage {
hash: hash_b.clone(),
size: size_b,
store_path: path_b.clone(),
is_local: true,
package_hash: None,
},
);
let ctx = crate::InstallContext {
packages: vec![],
local_files: vec![],
force: false,
force_download: false,
event_sender: None,
};
let _res = ai.install(&ctx, &resolved, Some(&prepared)).await.unwrap();
// Uninstall A
let uctx = crate::UninstallContext {
packages: vec!["A".to_string()],
autoremove: false,
force: true,
event_sender: None,
};
let _u = ai
.uninstall(std::slice::from_ref(&pid_a), &uctx)
.await
.unwrap();
// Shared file remains referenced by B
assert!(refcount_file(&state, &h_same.to_hex()).await > 0);
}
}
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | false |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/crates/install/src/atomic/fs.rs | crates/install/src/atomic/fs.rs | //! Filesystem operations for atomic installation
use crate::atomic::transition::StateTransition;
use sps2_errors::{Error, InstallError};
use sps2_events::{AppEvent, EventEmitter, GeneralEvent};
use sps2_hash::FileHashResult;
use sps2_resolver::PackageId;
use sps2_store::StoredPackage;
use std::path::Path;
/// Link package from store to staging directory
///
/// Returns (`had_file_hashes`, `file_hashes`) where `file_hashes` is Some only if `record_hashes` is true
pub(super) async fn link_package_to_staging(
transition: &mut StateTransition,
store_path: &Path,
package_id: &PackageId,
record_hashes: bool,
) -> Result<(bool, Option<Vec<FileHashResult>>), Error> {
let staging_prefix = &transition.slot_path;
// Load the stored package
let stored_package = StoredPackage::load(store_path).await?;
// Link files from store to staging
if let Some(sender) = &transition.event_sender {
sender.emit(AppEvent::General(GeneralEvent::DebugLog {
message: format!("Linking package {} to staging", package_id.name),
context: std::collections::HashMap::new(),
}));
}
stored_package.link_to(staging_prefix).await?;
let mut had_file_hashes = false;
let mut linked_entry_count = 0usize;
let mut file_hashes_result = None;
// Collect file paths for database tracking AND store file hash info
if let Some(file_hashes) = stored_package.file_hashes() {
had_file_hashes = true;
linked_entry_count = file_hashes.len();
// Store the file hash information for later use when we have package IDs
if record_hashes {
file_hashes_result = Some(file_hashes.to_vec());
}
}
// Debug what was linked
if let Some(sender) = &transition.event_sender {
sender.emit(AppEvent::General(GeneralEvent::DebugLog {
message: format!(
"Linked {} files/directories for package {}",
linked_entry_count, package_id.name
),
context: std::collections::HashMap::new(),
}));
}
Ok((had_file_hashes, file_hashes_result))
}
/// Remove tracked package entries from staging directory
///
/// Takes a transition and a list of file paths, removes them in safe order:
/// symlinks first, then regular files, then directories (deepest first)
pub(super) async fn remove_tracked_entries(
transition: &StateTransition,
file_paths: &[String],
) -> Result<(), Error> {
// Group files by type for proper removal order
let mut symlinks = Vec::new();
let mut regular_files = Vec::new();
let mut directories = Vec::new();
for file_path in file_paths {
let staging_file = transition.slot_path.join(file_path);
if staging_file.exists() {
// Check if it's a symlink
let metadata = tokio::fs::symlink_metadata(&staging_file).await?;
if metadata.is_symlink() {
symlinks.push(file_path.clone());
} else if staging_file.is_dir() {
directories.push(file_path.clone());
} else {
regular_files.push(file_path.clone());
}
}
}
// Remove in order: symlinks first, then files, then directories
// This ensures we don't try to remove non-empty directories
// 1. Remove symlinks
for file_path in symlinks {
let staging_file = transition.slot_path.join(&file_path);
if staging_file.exists() {
tokio::fs::remove_file(&staging_file).await.map_err(|e| {
InstallError::FilesystemError {
operation: "remove_symlink".to_string(),
path: staging_file.display().to_string(),
message: e.to_string(),
}
})?;
}
}
// 2. Remove regular files
for file_path in regular_files {
let staging_file = transition.slot_path.join(&file_path);
if staging_file.exists() {
tokio::fs::remove_file(&staging_file).await.map_err(|e| {
InstallError::FilesystemError {
operation: "remove_file".to_string(),
path: staging_file.display().to_string(),
message: e.to_string(),
}
})?;
}
}
// 3. Remove directories in reverse order (deepest first)
directories.sort_by(|a, b| b.cmp(a)); // Reverse lexicographic order
for file_path in directories {
let staging_file = transition.slot_path.join(&file_path);
if staging_file.exists() {
// Try to remove directory if it's empty
if let Ok(mut entries) = tokio::fs::read_dir(&staging_file).await {
if entries.next_entry().await?.is_none() {
tokio::fs::remove_dir(&staging_file).await.map_err(|e| {
InstallError::FilesystemError {
operation: "remove_dir".to_string(),
path: staging_file.display().to_string(),
message: e.to_string(),
}
})?;
}
}
}
}
Ok(())
}
/// Detect if this is a Python package and return the directory to remove
///
/// Python packages are isolated in `/opt/pm/live/python/<package_name>/` directories.
/// This method examines file paths to find the Python package directory.
pub(super) fn detect_python_package_directory(file_paths: &[String]) -> Option<String> {
for file_path in file_paths {
// Look for files under python/ directory structure
if let Some(stripped) = file_path.strip_prefix("python/") {
// Extract the package directory (e.g., "ansible/" from "python/ansible/lib/...")
if let Some(slash_pos) = stripped.find('/') {
let package_dir = format!("python/{}", &stripped[..slash_pos]);
return Some(package_dir);
} else if !stripped.is_empty() {
// Handle case where the path is just "python/package_name"
let package_dir = format!("python/{stripped}");
return Some(package_dir);
}
}
}
None
}
/// Clean up remaining Python runtime artifacts
///
/// After removing tracked files, this removes any remaining runtime artifacts
/// (e.g., __pycache__, .pyc files) that weren't explicitly tracked
pub(super) async fn cleanup_python_runtime_artifacts(
transition: &StateTransition,
python_dir: &str,
) -> Result<(), Error> {
let python_staging_dir = transition.slot_path.join(python_dir);
if python_staging_dir.exists() {
// Check if directory still has content (runtime artifacts)
if let Ok(mut entries) = tokio::fs::read_dir(&python_staging_dir).await {
if entries.next_entry().await?.is_some() {
// Directory is not empty, remove remaining runtime artifacts
tokio::fs::remove_dir_all(&python_staging_dir)
.await
.map_err(|e| InstallError::FilesystemError {
operation: "cleanup_python_runtime_artifacts".to_string(),
path: python_staging_dir.display().to_string(),
message: e.to_string(),
})?;
}
}
}
Ok(())
}
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | false |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/crates/install/src/atomic/transition.rs | crates/install/src/atomic/transition.rs | //! State transition management for atomic installations
use sps2_events::EventSender;
use sps2_hash::FileHashResult;
use sps2_state::{FileReference, PackageRef, StateManager};
use sps2_types::state::SlotId;
use std::path::PathBuf;
use uuid::Uuid;
/// State transition for atomic operations
///
/// This is now a simple data container that holds information about
/// a pending state transition. The actual commit logic is handled
/// by the `StateManager` using two-phase commit.
pub struct StateTransition {
/// Staging state ID
pub staging_id: Uuid,
/// Parent state ID
pub parent_id: Option<Uuid>,
/// Slot that will hold the prepared state
pub staging_slot: SlotId,
/// Filesystem path to the staging slot
pub slot_path: PathBuf,
/// Package references to be added during commit
pub package_refs: Vec<PackageRef>,
/// File references for file-level storage
pub file_references: Vec<(i64, FileReference)>, // (package_id, file_reference)
/// Pending file hashes to be converted to file references after we have package IDs
pub pending_file_hashes: Vec<(sps2_resolver::PackageId, Vec<FileHashResult>)>,
/// Event sender for progress reporting
pub event_sender: Option<EventSender>,
/// Operation type (install, uninstall, etc.)
pub operation: String,
}
impl StateTransition {
/// Create new state transition
///
/// # Errors
///
/// Returns an error if getting current state ID fails.
pub async fn new(
state_manager: &StateManager,
operation: String,
) -> Result<Self, sps2_errors::Error> {
let staging_id = Uuid::new_v4();
let parent_id = Some(state_manager.get_current_state_id().await?);
let staging_slot = state_manager.inactive_slot().await;
let slot_path = state_manager.ensure_slot_dir(staging_slot).await?;
Ok(Self {
staging_id,
parent_id,
staging_slot,
slot_path,
package_refs: Vec::new(),
file_references: Vec::new(),
pending_file_hashes: Vec::new(),
event_sender: None,
operation,
})
}
}
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | false |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/crates/install/src/atomic/mod.rs | crates/install/src/atomic/mod.rs | //! Atomic installation operations using APFS clonefile and state transitions
//!
//! This module provides atomic installation capabilities with:
//! - APFS-optimized file operations for instant, space-efficient copies
//! - Hard link creation for efficient package linking
//! - State transitions with rollback support
//! - Platform-specific filesystem optimizations
pub mod fs;
pub mod installer;
pub mod package;
pub mod transition;
// Re-export main public API
pub use installer::AtomicInstaller;
pub use transition::StateTransition;
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | false |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/crates/install/src/atomic/package.rs | crates/install/src/atomic/package.rs | //! Package management operations for atomic installation
//!
//! This module handles:
//! - Carrying forward packages from parent states
//! - Syncing staging slots with parent state
//! - Installing packages to staging
//! - Removing packages from staging
use crate::atomic::fs;
use crate::atomic::transition::StateTransition;
use crate::{InstallResult, PreparedPackage};
use sps2_errors::{Error, InstallError};
use sps2_hash::Hash;
use sps2_resolver::{PackageId, ResolvedNode};
use sps2_state::{file_queries_runtime, PackageRef, StateManager};
use sps2_store::PackageStore;
use std::collections::{HashMap, HashSet};
use std::path::Path;
use uuid::Uuid;
/// Carry forward packages from parent state, excluding specified packages
///
/// This function registers package references for packages that are unchanged
/// between the parent state and the new staging state.
pub(super) fn carry_forward_packages(
transition: &mut StateTransition,
parent_packages: &[sps2_state::models::Package],
exclude_names: &HashSet<String>,
) {
if transition.parent_id.is_none() {
return;
}
for pkg in parent_packages {
if exclude_names.contains(&pkg.name) {
continue;
}
let package_ref = PackageRef {
state_id: transition.staging_id,
package_id: PackageId::new(pkg.name.clone(), pkg.version()),
hash: pkg.hash.clone(),
size: pkg.size,
};
transition.package_refs.push(package_ref);
}
}
/// Sync staging slot to a specific target state
///
/// This ensures the staging slot mirrors an arbitrary target state by:
/// - Fast-path: if slot already has the target state, return immediately (O(1))
/// - Otherwise: diff-based sync removing/linking only deltas (O(changes))
///
/// This is the core optimization that makes rollback as fast as install/uninstall.
///
/// # Errors
///
/// Returns an error if filesystem operations or state manager operations fail.
pub(super) async fn sync_slot_to_state(
state_manager: &StateManager,
store: &PackageStore,
transition: &mut StateTransition,
target_state_id: Uuid,
target_packages: &[sps2_state::models::Package],
) -> Result<(), Error> {
let slot_state = state_manager.slot_state(transition.staging_slot).await;
// Fast path: slot already contains the exact target state
if slot_state == Some(target_state_id) {
return Ok(());
}
let slot_packages = if let Some(slot_state_id) = slot_state {
state_manager
.get_installed_packages_in_state(&slot_state_id)
.await?
} else {
Vec::new()
};
let target_keys: HashSet<String> = target_packages
.iter()
.map(|pkg| format!("{}::{}", pkg.name, pkg.version))
.collect();
let slot_map: HashMap<String, sps2_state::models::Package> = slot_packages
.into_iter()
.map(|pkg| (format!("{}::{}", pkg.name, pkg.version), pkg))
.collect();
// Remove packages that are no longer present in target state
for (key, pkg) in &slot_map {
if !target_keys.contains(key) {
remove_package_from_staging(state_manager, transition, pkg).await?;
}
}
// Link packages that are present in target state but missing from slot
for pkg in target_packages {
let key = format!("{}::{}", pkg.name, pkg.version);
if slot_map.contains_key(&key) {
continue;
}
let hash = Hash::from_hex(&pkg.hash).map_err(|e| {
Error::from(InstallError::AtomicOperationFailed {
message: format!(
"invalid package hash for {}-{} during slot sync: {e}",
pkg.name, pkg.version
),
})
})?;
let store_path = store.package_path(&hash);
let package_id = PackageId::new(pkg.name.clone(), pkg.version());
link_package_to_staging(transition, &store_path, &package_id, false).await?;
}
state_manager
.set_slot_state(transition.staging_slot, Some(target_state_id))
.await?;
Ok(())
}
/// Sync staging slot with parent state
///
/// Convenience wrapper around `sync_slot_to_state` for the common case
/// where we want to sync to the transition's parent state.
///
/// # Errors
///
/// Returns an error if filesystem operations or state manager operations fail.
pub(super) async fn sync_slot_with_parent(
state_manager: &StateManager,
store: &PackageStore,
transition: &mut StateTransition,
parent_packages: &[sps2_state::models::Package],
) -> Result<(), Error> {
let Some(parent_state) = transition.parent_id else {
// No prior state to mirror; ensure slot metadata is cleared.
state_manager
.set_slot_state(transition.staging_slot, None)
.await?;
return Ok(());
};
sync_slot_to_state(
state_manager,
store,
transition,
parent_state,
parent_packages,
)
.await
}
/// Install a single package to staging directory
///
/// This function:
/// - Validates prepared package data
/// - Handles package upgrades by removing old versions
/// - Ensures store references exist
/// - Links package files to staging
/// - Registers package references
///
/// # Errors
///
/// Returns an error if package data is missing, filesystem operations fail,
/// or state manager operations fail.
pub(super) async fn install_package_to_staging(
state_manager: &StateManager,
transition: &mut StateTransition,
package_id: &PackageId,
node: &ResolvedNode,
prepared_package: Option<&PreparedPackage>,
prior_package: Option<&sps2_state::models::Package>,
result: &mut InstallResult,
) -> Result<(), Error> {
// Install the package files (both Download and Local actions are handled identically)
let action_name = match &node.action {
sps2_resolver::NodeAction::Download => "downloaded",
sps2_resolver::NodeAction::Local => "local",
};
let prepared = prepared_package.ok_or_else(|| {
InstallError::AtomicOperationFailed {
message: format!(
"Missing prepared package data for {} package {}-{}. This indicates a bug in ParallelExecutor.",
action_name, package_id.name, package_id.version
),
}
})?;
let hash = &prepared.hash;
let store_path = &prepared.store_path;
let size = prepared.size;
let store_hash_hex = hash.to_hex();
let package_hash_hex = prepared.package_hash.as_ref().map(sps2_hash::Hash::to_hex);
let mut was_present = false;
let mut version_changed = false;
if let Some(existing) = prior_package {
was_present = true;
let existing_version = existing.version();
if existing_version != package_id.version {
version_changed = true;
remove_package_from_staging(state_manager, transition, existing).await?;
}
}
// Load package from the prepared store path
let _stored_package = sps2_store::StoredPackage::load(store_path).await?;
// Ensure store_refs entry exists before adding to package_map
let size_i64 = i64::try_from(size).map_err(|_| {
Error::from(InstallError::AtomicOperationFailed {
message: format!(
"Package size {} exceeds maximum supported size for {}-{}",
size, package_id.name, package_id.version
),
})
})?;
state_manager
.ensure_store_ref(&store_hash_hex, size_i64)
.await?;
// Ensure package is in package_map for future lookups
state_manager
.add_package_map(
&package_id.name,
&package_id.version.to_string(),
&store_hash_hex,
package_hash_hex.as_deref(),
)
.await?;
// Link package files to staging
let (_, file_hashes) =
link_package_to_staging(transition, store_path, package_id, true).await?;
// Store file hashes if we got them
if let Some(hashes) = file_hashes {
transition
.pending_file_hashes
.push((package_id.clone(), hashes));
}
// Add the package reference
let package_ref = PackageRef {
state_id: transition.staging_id,
package_id: package_id.clone(),
hash: store_hash_hex.clone(),
size: size_i64,
};
transition.package_refs.push(package_ref);
if was_present && version_changed {
result.add_updated(package_id.clone());
} else {
result.add_installed(package_id.clone());
}
Ok(())
}
/// Link package from store to staging directory
///
/// This is a wrapper around the `fs::link_package_to_staging` function that
/// maintains backward compatibility with the existing installer code.
///
/// # Errors
///
/// Returns an error if the package cannot be loaded or linked.
async fn link_package_to_staging(
transition: &mut StateTransition,
store_path: &Path,
package_id: &PackageId,
record_hashes: bool,
) -> Result<(bool, Option<Vec<sps2_hash::FileHashResult>>), Error> {
fs::link_package_to_staging(transition, store_path, package_id, record_hashes).await
}
/// Remove package files from staging directory
///
/// This function:
/// - Queries the database for all files belonging to the package
/// - Removes files in safe order (symlinks, regular files, directories)
/// - Cleans up Python runtime artifacts if applicable
///
/// # Errors
///
/// Returns an error if database queries fail or filesystem operations fail.
pub(super) async fn remove_package_from_staging(
state_manager: &StateManager,
transition: &mut StateTransition,
package: &sps2_state::models::Package,
) -> Result<(), Error> {
// Get all files belonging to this package from the database
let state_id =
Uuid::parse_str(&package.state_id).map_err(|e| InstallError::AtomicOperationFailed {
message: format!(
"failed to parse associated state ID for package {}: {e}",
package.name
),
})?;
let mut tx = state_manager.begin_transaction().await?;
let entries = file_queries_runtime::get_package_file_entries_by_name(
&mut tx,
&state_id,
&package.name,
&package.version,
)
.await?;
tx.commit().await?;
let file_paths: Vec<String> = entries
.into_iter()
.map(|entry| entry.relative_path)
.collect();
// Detect if this is a Python package for later cleanup
let python_package_dir = fs::detect_python_package_directory(&file_paths);
// Remove all tracked files using the fs module
fs::remove_tracked_entries(transition, &file_paths).await?;
// After removing all tracked files, clean up any remaining Python runtime artifacts
if let Some(python_dir) = python_package_dir {
fs::cleanup_python_runtime_artifacts(transition, &python_dir).await?;
}
Ok(())
}
#[cfg(test)]
mod tests {
use super::*;
use tempfile::TempDir;
use tokio::fs as afs;
async fn mk_env() -> (TempDir, StateManager, PackageStore) {
let td = TempDir::new().expect("td");
let state = StateManager::new(td.path()).await.expect("state");
let store_base = td.path().join("store");
afs::create_dir_all(&store_base).await.unwrap();
let store = PackageStore::new(store_base);
(td, state, store)
}
#[tokio::test]
async fn test_carry_forward_excludes_specified_packages() {
let (_td, state, _store) = mk_env().await;
let mut transition = StateTransition::new(&state, "test".to_string())
.await
.unwrap();
let parent_packages = vec![
sps2_state::models::Package {
id: 0,
state_id: transition.staging_id.to_string(),
name: "pkg-a".to_string(),
version: "1.0.0".to_string(),
hash: "abc123".to_string(),
size: 1000,
installed_at: chrono::Utc::now().timestamp(),
venv_path: None,
},
sps2_state::models::Package {
id: 0,
state_id: transition.staging_id.to_string(),
name: "pkg-b".to_string(),
version: "2.0.0".to_string(),
hash: "def456".to_string(),
size: 2000,
installed_at: chrono::Utc::now().timestamp(),
venv_path: None,
},
];
let mut exclude_names = HashSet::new();
exclude_names.insert("pkg-a".to_string());
carry_forward_packages(&mut transition, &parent_packages, &exclude_names);
// Should only carry forward pkg-b
assert_eq!(transition.package_refs.len(), 1);
assert_eq!(transition.package_refs[0].package_id.name, "pkg-b");
}
#[tokio::test]
async fn test_carry_forward_all_when_no_exclusions() {
let (_td, state, _store) = mk_env().await;
let mut transition = StateTransition::new(&state, "test".to_string())
.await
.unwrap();
let parent_packages = vec![
sps2_state::models::Package {
id: 0,
state_id: transition.staging_id.to_string(),
name: "pkg-a".to_string(),
version: "1.0.0".to_string(),
hash: "abc123".to_string(),
size: 1000,
installed_at: chrono::Utc::now().timestamp(),
venv_path: None,
},
sps2_state::models::Package {
id: 0,
state_id: transition.staging_id.to_string(),
name: "pkg-b".to_string(),
version: "2.0.0".to_string(),
hash: "def456".to_string(),
size: 2000,
installed_at: chrono::Utc::now().timestamp(),
venv_path: None,
},
];
let exclude_names = HashSet::new();
carry_forward_packages(&mut transition, &parent_packages, &exclude_names);
// Should carry forward both packages
assert_eq!(transition.package_refs.len(), 2);
}
}
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | false |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/crates/install/src/prepare/worker.rs | crates/install/src/prepare/worker.rs | //! Worker functions for package processing
use crate::PreparedPackage;
use dashmap::DashMap;
use sps2_errors::{Error, InstallError};
use sps2_events::events::{LifecycleAcquisitionSource, LifecycleEvent};
use sps2_events::{AppEvent, EventEmitter, FailureContext, GeneralEvent};
use sps2_net::{PackageDownloadConfig, PackageDownloader};
use sps2_resolver::{NodeAction, PackageId, ResolvedNode};
use sps2_state::StateManager;
use sps2_store::PackageStore;
use std::sync::Arc;
use tokio::sync::OwnedSemaphorePermit;
use tokio::time::Duration;
use super::context::ExecutionContext;
pub(crate) struct ProcessPackageArgs {
pub package_id: PackageId,
pub node: ResolvedNode,
pub context: ExecutionContext,
pub store: PackageStore,
pub state_manager: StateManager,
pub timeout_duration: Duration,
pub prepared_packages: Arc<DashMap<PackageId, PreparedPackage>>,
pub permit: OwnedSemaphorePermit,
}
/// Process a single package (download/local)
///
/// Handles both download and local package processing with comprehensive error handling.
/// Function length is due to two distinct workflows (download vs local) with event emission.
#[allow(clippy::too_many_lines)]
pub(crate) async fn process_package(args: ProcessPackageArgs) -> Result<PackageId, Error> {
let ProcessPackageArgs {
package_id,
node,
context,
store,
state_manager,
timeout_duration,
prepared_packages,
permit: _permit,
} = args;
context.emit(AppEvent::General(GeneralEvent::DebugLog {
message: format!(
"DEBUG: Processing package {}-{} with action {:?}",
package_id.name, package_id.version, node.action
),
context: std::collections::HashMap::new(),
}));
context.emit(AppEvent::Lifecycle(LifecycleEvent::install_started(
package_id.name.clone(),
package_id.version.clone(),
)));
match node.action {
NodeAction::Download => {
if let Some(url) = &node.url {
// Download package with timeout and add to store (no validation)
let download_result = tokio::time::timeout(
timeout_duration,
download_package_only(
url,
&package_id,
&node,
&store,
&state_manager,
&context,
&prepared_packages,
),
)
.await;
match download_result {
Ok(Ok(size)) => {
context.emit(AppEvent::Lifecycle(LifecycleEvent::acquisition_completed(
package_id.name.clone(),
package_id.version.clone(),
LifecycleAcquisitionSource::Remote {
url: url.clone(),
mirror_priority: 0,
},
size,
)));
}
Ok(Err(e)) => {
let failure = FailureContext::from_error(&e);
context.emit(AppEvent::Lifecycle(LifecycleEvent::acquisition_failed(
package_id.name.clone(),
package_id.version.clone(),
LifecycleAcquisitionSource::Remote {
url: url.clone(),
mirror_priority: 0,
},
failure,
)));
return Err(e);
}
Err(_) => {
let err: Error = InstallError::DownloadTimeout {
package: package_id.name.clone(),
url: url.to_string(),
timeout_seconds: timeout_duration.as_secs(),
}
.into();
let failure = FailureContext::from_error(&err);
context.emit(AppEvent::Lifecycle(LifecycleEvent::acquisition_failed(
package_id.name.clone(),
package_id.version.clone(),
LifecycleAcquisitionSource::Remote {
url: url.clone(),
mirror_priority: 0,
},
failure,
)));
return Err(err);
}
}
} else {
return Err(InstallError::MissingDownloadUrl {
package: package_id.name.clone(),
}
.into());
}
}
NodeAction::Local => {
context.emit(AppEvent::General(GeneralEvent::DebugLog {
message: format!(
"DEBUG: Processing local package {}-{}, path: {:?}",
package_id.name, package_id.version, node.path
),
context: std::collections::HashMap::new(),
}));
if let Some(path) = &node.path {
// Check if this is an already installed package (empty path)
if path.as_os_str().is_empty() {
context.emit(AppEvent::General(GeneralEvent::DebugLog {
message: format!(
"DEBUG: Package {}-{} is already installed, skipping",
package_id.name, package_id.version
),
context: std::collections::HashMap::new(),
}));
// For already installed packages, just mark as completed
context.emit(AppEvent::Lifecycle(LifecycleEvent::install_completed(
package_id.name.clone(),
package_id.version.clone(),
0,
)));
return Ok(package_id);
}
context.emit(AppEvent::General(GeneralEvent::DebugLog {
message: format!("DEBUG: Adding local package to store: {}", path.display()),
context: std::collections::HashMap::new(),
}));
// For local packages, add to store and prepare data
let stored_package = store.add_package(path).await?;
if let Some(hash) = stored_package.hash() {
let size = stored_package.size().await?;
let store_path = stored_package.path().to_path_buf();
context.emit(AppEvent::General(GeneralEvent::DebugLog {
message: format!(
"DEBUG: Local package stored with hash {} at {}",
hash.to_hex(),
store_path.display()
),
context: std::collections::HashMap::new(),
}));
let prepared_package = PreparedPackage {
hash: hash.clone(),
size,
store_path,
is_local: true,
package_hash: None,
};
prepared_packages.insert(package_id.clone(), prepared_package);
context.emit(AppEvent::General(GeneralEvent::DebugLog {
message: format!(
"DEBUG: Added prepared package for {}-{}",
package_id.name, package_id.version
),
context: std::collections::HashMap::new(),
}));
context.emit(AppEvent::Lifecycle(LifecycleEvent::install_completed(
package_id.name.clone(),
package_id.version.clone(),
0, // TODO: Count actual files
)));
} else {
return Err(InstallError::AtomicOperationFailed {
message: "failed to get hash from local package".to_string(),
}
.into());
}
} else {
return Err(InstallError::MissingLocalPath {
package: package_id.name.clone(),
}
.into());
}
}
}
Ok(package_id)
}
/// Download a package and add to store (no validation - `AtomicInstaller` handles that)
///
/// Complex download workflow including store caching, signature verification, and deduplication.
/// Function length reflects the comprehensive error handling and security checks required.
#[allow(clippy::too_many_lines)]
pub(crate) async fn download_package_only(
url: &str,
package_id: &PackageId,
node: &ResolvedNode,
store: &PackageStore,
state_manager: &StateManager,
context: &ExecutionContext,
prepared_packages: &Arc<DashMap<PackageId, PreparedPackage>>,
) -> Result<u64, Error> {
if let Some(size) = try_prepare_from_store(
package_id,
node,
store,
state_manager,
context,
prepared_packages,
)
.await?
{
return Ok(size);
}
context.emit(AppEvent::Lifecycle(LifecycleEvent::acquisition_started(
package_id.name.clone(),
package_id.version.clone(),
LifecycleAcquisitionSource::Remote {
url: url.to_string(),
mirror_priority: 0,
},
)));
// Create a temporary directory for the download
let temp_dir = tempfile::tempdir().map_err(|e| InstallError::TempFileError {
message: e.to_string(),
})?;
// Use high-level PackageDownloader to benefit from hash/signature handling
let downloader = PackageDownloader::new(
PackageDownloadConfig::default(),
sps2_events::ProgressManager::new(),
)?;
let tx = context
.event_sender()
.cloned()
.unwrap_or_else(|| sps2_events::channel().0);
let download_result = downloader
.download_package(
&package_id.name,
&package_id.version,
url,
node.signature_url.as_deref(),
temp_dir.path(),
node.expected_hash.as_ref(),
String::new(), // internal tracker
None,
&tx,
)
.await?;
// Enforce signature policy if configured
if let Some(policy) = context.security_policy() {
if policy.verify_signatures && !policy.allow_unsigned {
// If a signature was expected (URL provided), require verification
if node.signature_url.is_some() {
if !download_result.signature_verified {
return Err(sps2_errors::Error::Signing(
sps2_errors::SigningError::VerificationFailed {
reason: "package signature could not be verified".to_string(),
},
));
}
} else {
return Err(sps2_errors::Error::Signing(
sps2_errors::SigningError::InvalidSignatureFormat(
"missing signature for package".to_string(),
),
));
}
}
}
let previous_store_hash = if context.force_redownload() {
if let Some(expected_hash) = node.expected_hash.as_ref() {
state_manager
.get_store_hash_for_package_hash(&expected_hash.to_hex())
.await?
.map(|store_hash_hex| sps2_hash::Hash::from_hex(&store_hash_hex))
.transpose()?
} else {
None
}
} else {
None
};
// Add to store and prepare package data
let mut stored_package = store
.add_package_from_file(
&download_result.package_path,
&package_id.name,
&package_id.version,
)
.await?;
if let Some(prev_hash) = previous_store_hash {
if let Some(current_hash) = stored_package.hash() {
if current_hash == prev_hash {
store.remove_package(&prev_hash).await?;
stored_package = store
.add_package_from_file(
&download_result.package_path,
&package_id.name,
&package_id.version,
)
.await?;
} else {
store.remove_package(&prev_hash).await?;
}
}
}
if let Some(hash) = stored_package.hash() {
let size = stored_package.size().await?;
let store_path = stored_package.path().to_path_buf();
let prepared_package = PreparedPackage {
hash: hash.clone(),
size,
store_path,
is_local: false,
package_hash: node.expected_hash.clone(),
};
prepared_packages.insert(package_id.clone(), prepared_package);
context.emit(AppEvent::General(GeneralEvent::DebugLog {
message: format!(
"Package {}-{} downloaded and stored with hash {} (prepared for installation)",
package_id.name,
package_id.version,
hash.to_hex()
),
context: std::collections::HashMap::new(),
}));
Ok(size)
} else {
Err(InstallError::AtomicOperationFailed {
message: "failed to get hash from downloaded package".to_string(),
}
.into())
}
}
pub(crate) async fn try_prepare_from_store(
package_id: &PackageId,
node: &ResolvedNode,
store: &PackageStore,
state_manager: &StateManager,
context: &ExecutionContext,
prepared_packages: &Arc<DashMap<PackageId, PreparedPackage>>,
) -> Result<Option<u64>, Error> {
if context.force_redownload() {
return Ok(None);
}
let Some(expected_hash) = node.expected_hash.as_ref() else {
return Ok(None);
};
let Some(store_hash_hex) = state_manager
.get_store_hash_for_package_hash(&expected_hash.to_hex())
.await?
else {
return Ok(None);
};
let store_hash = sps2_hash::Hash::from_hex(&store_hash_hex)?;
let Some(stored_package) = store.load_package_if_exists(&store_hash).await? else {
return Ok(None);
};
context.emit(AppEvent::Lifecycle(LifecycleEvent::acquisition_started(
package_id.name.clone(),
package_id.version.clone(),
LifecycleAcquisitionSource::StoreCache {
hash: expected_hash.to_hex(),
},
)));
let size = stored_package.size().await?;
let store_path = stored_package.path().to_path_buf();
let prepared_package = PreparedPackage {
hash: store_hash,
size,
store_path,
is_local: false,
package_hash: Some(expected_hash.clone()),
};
prepared_packages.insert(package_id.clone(), prepared_package);
context.emit(AppEvent::General(GeneralEvent::DebugLog {
message: format!(
"Reusing stored package {}-{} with hash {}",
package_id.name,
package_id.version,
expected_hash.to_hex()
),
context: std::collections::HashMap::new(),
}));
context.emit(AppEvent::Lifecycle(LifecycleEvent::acquisition_completed(
package_id.name.clone(),
package_id.version.clone(),
LifecycleAcquisitionSource::StoreCache {
hash: expected_hash.to_hex(),
},
size,
)));
Ok(Some(size))
}
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | false |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/crates/install/src/prepare/executor.rs | crates/install/src/prepare/executor.rs | //! Parallel executor for package operations
use crate::PreparedPackage;
use crossbeam::queue::SegQueue;
use dashmap::DashMap;
use sps2_config::ResourceManager;
use sps2_errors::{Error, InstallError};
use sps2_events::{AppEvent, EventEmitter, GeneralEvent};
use sps2_resolver::{ExecutionPlan, NodeAction, PackageId, ResolvedNode};
use sps2_state::StateManager;
use sps2_store::PackageStore;
use std::collections::HashMap;
use std::sync::atomic::AtomicUsize;
use std::sync::Arc;
use tokio::task::JoinHandle;
use tokio::time::{Duration, Instant};
use super::context::ExecutionContext;
use super::worker::{process_package, ProcessPackageArgs};
/// Parallel executor for package operations
pub struct ParallelExecutor {
/// Package store
store: PackageStore,
/// State manager for `package_map` updates
state_manager: StateManager,
/// Resource manager for concurrency control
resources: Arc<ResourceManager>,
/// Download timeout
download_timeout: Duration,
}
impl ParallelExecutor {
/// Create new parallel executor
///
/// # Errors
///
/// Returns an error if network client initialization fails.
pub fn new(
store: PackageStore,
state_manager: StateManager,
resources: Arc<ResourceManager>,
) -> Result<Self, Error> {
Ok(Self {
store,
state_manager,
resources,
download_timeout: Duration::from_secs(300), // 5 minutes
})
}
/// Set download timeout
#[must_use]
pub fn with_timeout(mut self, timeout: Duration) -> Self {
self.download_timeout = timeout;
self
}
/// Execute packages in parallel according to execution plan
///
/// # Errors
///
/// Returns an error if package processing fails, download fails, or concurrency limits are exceeded.
///
/// Core orchestration loop managing parallel package processing with dependency tracking.
/// Function length is necessary to maintain state consistency across the execution lifecycle.
#[allow(clippy::too_many_lines)]
pub async fn execute_parallel(
&self,
execution_plan: &ExecutionPlan,
resolved_packages: &HashMap<PackageId, ResolvedNode>,
context: &ExecutionContext,
) -> Result<HashMap<PackageId, PreparedPackage>, Error> {
let ready_queue = Arc::new(SegQueue::new());
let inflight = Arc::new(DashMap::new());
let prepared_packages = Arc::new(DashMap::new());
let graph = Self::build_execution_graph(self, execution_plan, resolved_packages);
// Initialize ready queue with packages that have no dependencies
context.emit(AppEvent::General(GeneralEvent::DebugLog {
message: format!(
"DEBUG: Execution plan has {} ready packages",
execution_plan.ready_packages().len()
),
context: std::collections::HashMap::from([(
"ready_packages".to_string(),
execution_plan
.ready_packages()
.iter()
.map(|id| format!("{}-{}", id.name, id.version))
.collect::<Vec<_>>()
.join(", "),
)]),
}));
for package_id in execution_plan.ready_packages() {
context.emit(AppEvent::General(GeneralEvent::DebugLog {
message: format!(
"DEBUG: Processing ready package {}-{}",
package_id.name, package_id.version
),
context: std::collections::HashMap::new(),
}));
// Only add packages with in_degree 0 from our graph
if let Some(node) = graph.get(&package_id) {
let in_degree = node.in_degree.load(std::sync::atomic::Ordering::Relaxed);
context.emit(AppEvent::General(GeneralEvent::DebugLog {
message: format!(
"DEBUG: Package {}-{} has in_degree {}",
package_id.name, package_id.version, in_degree
),
context: std::collections::HashMap::new(),
}));
if in_degree == 0 {
ready_queue.push(package_id.clone());
context.emit(AppEvent::General(GeneralEvent::DebugLog {
message: format!(
"DEBUG: Added package {}-{} to ready queue",
package_id.name, package_id.version
),
context: std::collections::HashMap::new(),
}));
}
} else {
ready_queue.push(package_id.clone());
context.emit(AppEvent::General(GeneralEvent::DebugLog {
message: format!(
"DEBUG: Added package {}-{} to ready queue (not in graph)",
package_id.name, package_id.version
),
context: std::collections::HashMap::new(),
}));
}
}
// Process packages until completion with overall timeout
let overall_timeout = Duration::from_secs(1800); // 30 minutes total
let start_time = Instant::now();
let mut no_progress_iterations = 0;
let mut last_completed_count = 0;
context.emit(AppEvent::General(GeneralEvent::DebugLog {
message: format!(
"DEBUG: Starting main processing loop. execution_plan.is_complete()={}, inflight.is_empty()={}",
execution_plan.is_complete(), inflight.is_empty()
),
context: std::collections::HashMap::new(),
}));
// Process packages until completion - ensure we process ready packages even if execution_plan reports complete
while (!execution_plan.is_complete() || !inflight.is_empty()) || !ready_queue.is_empty() {
context.emit(AppEvent::General(GeneralEvent::DebugLog {
message: format!(
"DEBUG: Loop iteration. execution_plan.is_complete()={}, inflight.is_empty()={}",
execution_plan.is_complete(), inflight.is_empty()
),
context: std::collections::HashMap::new(),
}));
// Check overall timeout
if start_time.elapsed() > overall_timeout {
return Err(InstallError::OperationTimeout {
message: "Overall installation timeout exceeded (30 minutes)".to_string(),
}
.into());
}
// Track progress to detect infinite loops
let current_completed = execution_plan.completed_count();
if current_completed == last_completed_count {
no_progress_iterations += 1;
if no_progress_iterations > 600 {
// 60 seconds of no progress (100 * 10ms sleep)
return Err(InstallError::NoProgress {
message: "No progress made in package installation for 60 seconds"
.to_string(),
}
.into());
}
} else {
no_progress_iterations = 0;
last_completed_count = current_completed;
}
// Try to start new tasks from ready queue
while let Some(package_id) = ready_queue.pop() {
context.emit(AppEvent::General(GeneralEvent::DebugLog {
message: format!(
"DEBUG: Popped package {}-{} from ready queue",
package_id.name, package_id.version
),
context: std::collections::HashMap::new(),
}));
if inflight.contains_key(&package_id) {
context.emit(AppEvent::General(GeneralEvent::DebugLog {
message: format!(
"DEBUG: Package {}-{} already in flight, skipping",
package_id.name, package_id.version
),
context: std::collections::HashMap::new(),
}));
continue; // Already in flight
}
let permit = self.resources.acquire_download_permit().await?;
let node = resolved_packages.get(&package_id).ok_or_else(|| {
InstallError::PackageNotFound {
package: package_id.name.clone(),
}
})?;
context.emit(AppEvent::General(GeneralEvent::DebugLog {
message: format!(
"DEBUG: Starting task for package {}-{} with action {:?}",
package_id.name, package_id.version, node.action
),
context: std::collections::HashMap::new(),
}));
let handle = self.spawn_package_task(
package_id.clone(),
node.clone(),
context.clone(),
permit,
prepared_packages.clone(),
);
inflight.insert(package_id, handle);
}
// Wait for at least one task to complete
if !inflight.is_empty() {
let completed_package = self.wait_for_completion(&inflight).await?;
// Notify execution plan and get newly ready packages
let newly_ready = execution_plan.complete_package(&completed_package);
for package_id in newly_ready {
ready_queue.push(package_id);
}
}
// Small delay to prevent busy waiting
tokio::time::sleep(Duration::from_millis(10)).await;
}
context.emit(AppEvent::General(GeneralEvent::DebugLog {
message: format!(
"DEBUG: Exited main processing loop. execution_plan.is_complete()={}, inflight.is_empty()={}, prepared_packages.len()={}",
execution_plan.is_complete(), inflight.is_empty(), prepared_packages.len()
),
context: std::collections::HashMap::new(),
}));
// Convert DashMap to HashMap and return prepared packages
let prepared_packages =
Arc::try_unwrap(prepared_packages).map_err(|_| InstallError::ConcurrencyError {
message: "failed to unwrap prepared packages Arc".to_string(),
})?;
let mut result = HashMap::new();
for entry in prepared_packages {
result.insert(entry.0, entry.1);
}
Ok(result)
}
/// Build execution graph for tracking
fn build_execution_graph(
_self: &Self,
execution_plan: &ExecutionPlan,
resolved_packages: &HashMap<PackageId, ResolvedNode>,
) -> HashMap<PackageId, ExecutionNode> {
let mut graph = HashMap::new();
for package_id in resolved_packages.keys() {
if let Some(metadata) = execution_plan.metadata(package_id) {
let node = ExecutionNode {
action: metadata.action.clone(),
in_degree: AtomicUsize::new(metadata.in_degree()),
parents: metadata.parents.clone(),
};
graph.insert(package_id.clone(), node);
}
}
graph
}
/// Spawn task for processing a single package
fn spawn_package_task(
&self,
package_id: PackageId,
node: ResolvedNode,
context: ExecutionContext,
permit: tokio::sync::OwnedSemaphorePermit,
prepared_packages: Arc<DashMap<PackageId, PreparedPackage>>,
) -> JoinHandle<Result<PackageId, Error>> {
let store = self.store.clone();
let state_manager = self.state_manager.clone();
let timeout_duration = self.download_timeout;
tokio::spawn(async move {
process_package(ProcessPackageArgs {
package_id,
node,
context,
store,
state_manager,
timeout_duration,
prepared_packages,
permit,
})
.await
})
}
/// Wait for at least one task to complete
async fn wait_for_completion(
&self,
inflight: &DashMap<PackageId, JoinHandle<Result<PackageId, Error>>>,
) -> Result<PackageId, Error> {
let timeout_duration = Duration::from_secs(300); // 5 minutes per task
let start_time = Instant::now();
loop {
// Check if overall timeout exceeded
if start_time.elapsed() > timeout_duration {
return Err(InstallError::TaskError {
message: "Task completion timeout exceeded (5 minutes)".to_string(),
}
.into());
}
// Check for completed tasks
let mut completed = None;
for entry in inflight {
let package_id = entry.key();
let handle = entry.value();
if handle.is_finished() {
completed = Some(package_id.clone());
break;
}
}
if let Some(package_id) = completed {
if let Some((_, handle)) = inflight.remove(&package_id) {
match handle.await {
Ok(Ok(completed_package)) => return Ok(completed_package),
Ok(Err(e)) => return Err(e),
Err(e) => {
return Err(InstallError::TaskError {
message: format!("Task failed for {}: {e}", package_id.name),
}
.into());
}
}
}
}
// Small delay before checking again
tokio::time::sleep(Duration::from_millis(50)).await;
}
}
}
/// Execution node for tracking dependencies
struct ExecutionNode {
/// Action to perform (stored for future use in execution graph)
#[allow(dead_code)]
action: NodeAction,
/// Remaining dependencies
in_degree: AtomicUsize,
/// Parent packages (for future dependency tracking, rollback, and error reporting)
#[allow(dead_code)]
parents: Vec<PackageId>,
}
#[cfg(test)]
mod tests {
use super::*;
use sps2_events::events::{LifecycleAcquisitionSource, LifecycleEvent, LifecycleStage};
use sps2_events::AppEvent;
use sps2_hash::{Hash as PackageHash, HashAlgorithm};
use sps2_resolver::{DependencyGraph, ResolvedNode};
use sps2_store::{create_package, PackageStore};
use sps2_types::{Arch, Manifest, Version};
use std::sync::Arc;
use tempfile::TempDir;
use tokio::fs as afs;
use crate::prepare::context::ExecutionContext;
use crate::prepare::worker::try_prepare_from_store;
async fn mk_env() -> (TempDir, StateManager, PackageStore) {
let td = TempDir::new().expect("tempdir");
let state = StateManager::new(td.path()).await.expect("state manager");
let store_base = td.path().join("store");
afs::create_dir_all(&store_base).await.expect("store dir");
let store = PackageStore::new(store_base);
(td, state, store)
}
async fn create_sp(name: &str, version: &str) -> (TempDir, std::path::PathBuf) {
let td = TempDir::new().expect("package dir");
let src = td.path().join("src");
afs::create_dir_all(&src).await.expect("src dir");
let version = Version::parse(version).expect("valid version");
let manifest = Manifest::new(name.to_string(), &version, 1, &Arch::Arm64);
let manifest_path = src.join("manifest.toml");
sps2_store::manifest_io::write_manifest(&manifest_path, &manifest)
.await
.expect("write manifest");
let content_path = src.join("opt/pm/live/share");
afs::create_dir_all(&content_path)
.await
.expect("content dir");
afs::write(content_path.join("file.txt"), name.as_bytes())
.await
.expect("write file");
let sp_path = td.path().join("pkg.sp");
create_package(&src, &sp_path)
.await
.expect("create package");
(td, sp_path)
}
#[tokio::test]
async fn download_permit_limits_parallelism() {
let (_td, state, store) = mk_env().await;
let (_pkg1_dir, pkg1_sp) = create_sp("pkg-a", "1.0.0").await;
let (_pkg2_dir, pkg2_sp) = create_sp("pkg-b", "1.0.0").await;
let node1 = ResolvedNode::local(
"pkg-a".to_string(),
Version::parse("1.0.0").unwrap(),
pkg1_sp.clone(),
vec![],
);
let node2 = ResolvedNode::local(
"pkg-b".to_string(),
Version::parse("1.0.0").unwrap(),
pkg2_sp.clone(),
vec![],
);
let pkg1_id = node1.package_id();
let pkg2_id = node2.package_id();
let mut resolved_packages = HashMap::new();
resolved_packages.insert(pkg1_id.clone(), node1.clone());
resolved_packages.insert(pkg2_id.clone(), node2.clone());
let mut graph = DependencyGraph::new();
graph.add_node(node1);
graph.add_node(node2);
let sorted = vec![pkg1_id.clone(), pkg2_id.clone()];
let execution_plan = ExecutionPlan::from_sorted_packages(&sorted, &graph);
let limits = sps2_config::ResourceLimits {
concurrent_downloads: 1,
concurrent_decompressions: 1,
concurrent_installations: 1,
memory_usage: None,
};
let resources = Arc::new(sps2_config::ResourceManager::new(limits));
let executor = ParallelExecutor::new(store, state, resources).expect("parallel executor");
let (tx, mut rx) = sps2_events::channel();
let context = ExecutionContext::new().with_event_sender(tx);
executor
.execute_parallel(&execution_plan, &resolved_packages, &context)
.await
.expect("execute parallel");
let mut sequence = Vec::new();
while let Ok(message) = rx.try_recv() {
if let AppEvent::Lifecycle(lifecycle_event) = message.event {
match lifecycle_event {
LifecycleEvent::Install {
stage: LifecycleStage::Started,
context,
..
} => {
sequence.push(("start", context.package));
}
LifecycleEvent::Install {
stage: LifecycleStage::Completed,
context,
..
} => {
sequence.push(("complete", context.package));
}
_ => {}
}
}
}
let starts: Vec<_> = sequence
.iter()
.enumerate()
.filter(|(_, (kind, _))| *kind == "start")
.collect();
let completes: Vec<_> = sequence
.iter()
.enumerate()
.filter(|(_, (kind, _))| *kind == "complete")
.collect();
assert_eq!(starts.len(), 2, "expected two start events");
assert_eq!(completes.len(), 2, "expected two completion events");
assert!(
starts[0].0 < completes[0].0,
"first completion must follow first start"
);
assert!(
completes[0].0 < starts[1].0,
"second package should only start after first completes"
);
}
#[tokio::test]
async fn try_prepare_from_store_returns_package_when_available() {
let (_td, state, store) = mk_env().await;
let (_pkg_dir, pkg_sp) = create_sp("pkg-cache", "1.0.0").await;
let stored_package = store.add_package(&pkg_sp).await.expect("store package");
let store_hash = stored_package.hash().expect("hash");
let expected_size = stored_package.size().await.expect("size");
let package_hash = PackageHash::hash_file_with_algorithm(&pkg_sp, HashAlgorithm::Blake3)
.await
.expect("package hash");
state
.ensure_store_ref(
&store_hash.to_hex(),
i64::try_from(expected_size).unwrap_or(i64::MAX),
)
.await
.expect("store ref");
state
.add_package_map(
"pkg-cache",
"1.0.0",
&store_hash.to_hex(),
Some(&package_hash.to_hex()),
)
.await
.expect("package map insert");
let mut node = ResolvedNode::download(
"pkg-cache".to_string(),
Version::parse("1.0.0").unwrap(),
"https://example.invalid/pkg-cache.sp".to_string(),
vec![],
);
node.expected_hash = Some(package_hash.clone());
let pkg_id = node.package_id();
let prepared_packages = Arc::new(DashMap::new());
let (tx, mut rx) = sps2_events::channel();
let context = ExecutionContext::new().with_event_sender(tx);
let size =
try_prepare_from_store(&pkg_id, &node, &store, &state, &context, &prepared_packages)
.await
.expect("reuse succeeds")
.expect("should reuse store package");
assert_eq!(size, expected_size);
let entry = prepared_packages
.get(&pkg_id)
.expect("prepared package present");
assert_eq!(entry.hash, store_hash);
assert_eq!(entry.size, expected_size);
assert!(!entry.is_local);
assert_eq!(entry.package_hash.as_ref(), Some(&package_hash));
drop(entry);
let mut saw_store_acquisition = false;
while let Ok(message) = rx.try_recv() {
if let AppEvent::Lifecycle(acq) = message.event {
if matches!(
acq,
LifecycleEvent::Acquisition {
context: sps2_events::events::AcquisitionContext {
source: LifecycleAcquisitionSource::StoreCache { .. },
..
},
..
}
) {
saw_store_acquisition = true;
}
}
}
assert!(saw_store_acquisition, "expected store acquisition event");
}
#[tokio::test]
async fn try_prepare_from_store_respects_force_download() {
let (_td, state, store) = mk_env().await;
let (_pkg_dir, pkg_sp) = create_sp("pkg-force", "1.0.0").await;
let stored_package = store.add_package(&pkg_sp).await.expect("store package");
let store_hash = stored_package.hash().expect("hash");
let package_hash = PackageHash::hash_file_with_algorithm(&pkg_sp, HashAlgorithm::Blake3)
.await
.expect("package hash");
state
.ensure_store_ref(
&store_hash.to_hex(),
i64::try_from(stored_package.size().await.expect("size")).unwrap_or(i64::MAX),
)
.await
.expect("store ref");
state
.add_package_map(
"pkg-force",
"1.0.0",
&store_hash.to_hex(),
Some(&package_hash.to_hex()),
)
.await
.expect("package map insert");
let mut node = ResolvedNode::download(
"pkg-force".to_string(),
Version::parse("1.0.0").unwrap(),
"https://example.invalid/pkg-force.sp".to_string(),
vec![],
);
node.expected_hash = Some(package_hash);
let pkg_id = node.package_id();
let prepared_packages = Arc::new(DashMap::new());
let (tx, _rx) = sps2_events::channel();
let context = ExecutionContext::new()
.with_event_sender(tx)
.with_force_redownload(true);
let result =
try_prepare_from_store(&pkg_id, &node, &store, &state, &context, &prepared_packages)
.await
.expect("call succeeds");
assert!(result.is_none(), "expected force download to skip reuse");
assert!(prepared_packages.is_empty());
}
}
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | false |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/crates/install/src/prepare/mod.rs | crates/install/src/prepare/mod.rs | pub mod context;
pub mod executor;
pub mod worker;
pub use context::ExecutionContext;
pub use executor::ParallelExecutor;
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | false |
alexykn/sps2 | https://github.com/alexykn/sps2/blob/a357a9ae7317314ef1605ce29b66f064bd6eb510/crates/install/src/prepare/context.rs | crates/install/src/prepare/context.rs | //! Execution context for parallel operations
use crate::SecurityPolicy;
use sps2_events::{EventEmitter, EventSender};
/// Execution context for parallel operations
#[derive(Clone)]
pub struct ExecutionContext {
/// Event sender for progress reporting
event_sender: Option<EventSender>,
/// Optional security policy for signature enforcement
security_policy: Option<SecurityPolicy>,
/// Whether downloads should bypass cache reuse
force_redownload: bool,
}
impl ExecutionContext {
/// Create new execution context
#[must_use]
pub fn new() -> Self {
Self {
event_sender: None,
security_policy: None,
force_redownload: false,
}
}
/// Set event sender
#[must_use]
pub fn with_event_sender(mut self, event_sender: EventSender) -> Self {
self.event_sender = Some(event_sender);
self
}
/// Set security policy for downloads
#[must_use]
pub fn with_security_policy(mut self, policy: SecurityPolicy) -> Self {
self.security_policy = Some(policy);
self
}
/// Set whether downloads must ignore cached packages
#[must_use]
pub fn with_force_redownload(mut self, force: bool) -> Self {
self.force_redownload = force;
self
}
/// Should downstream logic bypass store reuse
#[must_use]
pub fn force_redownload(&self) -> bool {
self.force_redownload
}
/// Get the security policy if set
pub(crate) fn security_policy(&self) -> Option<SecurityPolicy> {
self.security_policy
}
}
impl EventEmitter for ExecutionContext {
fn event_sender(&self) -> Option<&EventSender> {
self.event_sender.as_ref()
}
}
impl Default for ExecutionContext {
fn default() -> Self {
Self::new()
}
}
| rust | BSD-3-Clause | a357a9ae7317314ef1605ce29b66f064bd6eb510 | 2026-01-04T20:17:02.345249Z | false |
etbcor/nasin-nanpa | https://github.com/etbcor/nasin-nanpa/blob/e5ab7a391aa703cd7beef3656c9c86045aeef485/font-forge-tool/src/ffir.rs | font-forge-tool/src/ffir.rs | use std::borrow::Cow;
use itertools::Itertools;
use crate::NasinNanpaVariation;
/// An encoding position (either a number, or `None` which prints `-1`)
#[derive(Default, Clone)]
pub enum EncPos {
Pos(usize),
#[default]
None,
}
impl EncPos {
fn inc(&mut self) {
*self = match self {
EncPos::Pos(p) => EncPos::Pos(*p + 1),
EncPos::None => EncPos::None,
};
}
fn gen(&self) -> String {
match self {
EncPos::Pos(p) => p.to_string(),
EncPos::None => "-1".to_string(),
}
}
}
/// An encoding, consisting of a fontforge position and an encoding position
#[derive(Default, Clone)]
pub struct Encoding {
pub ff_pos: usize,
pub enc_pos: EncPos,
}
impl Encoding {
pub fn new(ff_pos: usize, enc_pos: EncPos) -> Self {
Self { ff_pos, enc_pos }
}
pub fn gen(&self) -> String {
format!(
"Encoding: {ff_pos} {enc_pos} {ff_pos}",
ff_pos = self.ff_pos,
enc_pos = self.enc_pos.gen(),
)
}
pub fn gen_ref<'a>(&self, position: Cow<'a, str>) -> String {
let Encoding { ff_pos, enc_pos } = self;
format!(
"Refer: {ff_pos} {enc_pos} {position}",
enc_pos = enc_pos.gen(),
position = position,
)
}
}
/// A glyph reference (with positional data)
#[derive(Default, Clone)]
pub struct Ref<'a> {
ref_glyph: Encoding,
position: Cow<'a, str>,
}
impl<'a> Ref<'a> {
pub fn new(ref_glyph: Encoding, position: impl Into<Cow<'a, str>>) -> Self {
Self { ref_glyph, position: position.into(), }
}
pub fn gen(&self) -> String {
self.ref_glyph.gen_ref(self.position.clone())
}
}
/// A glyph representation, consisting of a spline set and references
#[derive(Default, Clone)]
pub struct Rep<'a> {
spline_set: Cow<'a, str>,
references: Cow<'a, [Ref<'a>]>,
}
impl<'a> Rep<'a> {
pub fn new(
spline_set: impl Into<Cow<'a, str>>,
references: impl Into<Cow<'a, [Ref<'a>]>>
) -> Self { Self {
spline_set: spline_set.into(),
references: references.into(),
} }
pub const fn const_new(
spline_set: Cow<'a, str>,
references: &'a [Ref<'a>],
) -> Self { Self {
spline_set,
references: Cow::Borrowed(references),
} }
pub const fn const_dflt() -> Self {
Self {
spline_set: Cow::Borrowed(""),
references: Cow::Borrowed(&[]),
}
}
pub fn gen(&self) -> String {
let f = if !self.spline_set.is_empty() || !self.references.is_empty() {
"Fore\n"
} else {
""
};
let r = self
.references
.clone()
.into_iter()
.map(|r| r.gen())
.join("\n");
let nl = if !self.references.is_empty() {
"\n"
} else {
""
};
let s = if !self.spline_set.is_empty() {
format!("SplineSet{s}\nEndSplineSet\n", s = self.spline_set)
} else {
String::new()
};
format!("{f}{r}{nl}{s}")
}
}
/// An anchor class, either stack or scale
#[derive(Clone)]
pub enum AnchorClass {
Stack,
Scale,
Special(&'static str),
}
/// An anchor type, either base (for lower/outer) or mark (for upper/inner)
#[derive(Clone, Copy)]
pub enum AnchorType {
Base,
Mark,
}
/// An anchor, consisting of a class, type, and position
#[derive(Clone)]
pub struct Anchor {
class: AnchorClass,
ty: AnchorType,
pos: (isize, isize),
}
impl Anchor {
pub const fn new_stack(ty: AnchorType) -> Self {
Self {
class: AnchorClass::Stack,
ty,
pos: (
match ty {
AnchorType::Base => 500,
AnchorType::Mark => -500,
},
400,
),
}
}
pub const fn new_scale(ty: AnchorType, pos: (isize, isize)) -> Self {
Self {
class: AnchorClass::Scale,
ty,
pos,
}
}
pub const fn new_special(ty: AnchorType, pos: (isize, isize), name: &'static str) -> Self {
Self {
class: AnchorClass::Special(name),
ty,
pos,
}
}
fn gen(&self) -> String {
let class = match self.class {
AnchorClass::Stack => "stack",
AnchorClass::Scale => "scale",
AnchorClass::Special(s) => s,
};
let x = self.pos.0;
let y = self.pos.1;
let ty = match self.ty {
AnchorType::Base => "basechar",
AnchorType::Mark => "mark",
};
format!("AnchorPoint: \"{class}\" {x} {y} {ty} 0\n")
}
}
/// This is the smallest building block of a glyph, containing the name, width, representation, and optional anchor
#[derive(Clone)]
pub struct GlyphBasic<'a> {
pub name: Cow<'a, str>,
pub width: usize,
pub rep: Rep<'a>,
pub anchor: Option<Anchor>,
pub anchor2: Option<Anchor>,
}
impl<'a> GlyphBasic<'a> {
pub fn new(name: impl Into<Cow<'a, str>>, width: usize, rep: Rep<'a>, anchor: Option<Anchor>, anchor2: Option<Anchor>) -> Self {
Self {
name: name.into(),
width,
rep,
anchor,
anchor2,
}
}
pub const fn new_const(name: &'static str, width: usize, rep: Rep<'a>, anchor: Option<Anchor>, anchor2: Option<Anchor>) -> Self {
Self {
name: Cow::Borrowed(name),
width,
rep,
anchor,
anchor2,
}
}
}
/// This is a `GlyphBasic` that has been assigned an `EncPos`
pub struct GlyphEnc<'a> {
glyph: GlyphBasic<'a>,
enc: EncPos,
}
#[allow(unused)]
impl<'a> GlyphEnc<'a> {
pub fn from_basic(glyph: GlyphBasic<'a>, enc: EncPos) -> Self {
Self { glyph, enc }
}
pub const fn from_parts(enc: EncPos, name: &'static str, width: usize, rep: Rep<'a>) -> Self {
Self {
glyph: GlyphBasic::new_const(name, width, rep, None, None),
enc,
}
}
}
///
pub enum LookupsMode {
WordLigFromLetters,
WordLigManual(Vec<String>),
StartCont,
Alt,
ComboFirst,
ComboLast,
None,
}
#[derive(Clone)]
pub enum Lookups {
WordLigFromLetters,
WordLigManual(String),
StartCont,
EndCont,
Alt,
ComboFirst,
ComboLast,
None,
}
impl Lookups {
fn from_mode(mode: &LookupsMode, idx: usize) -> Self {
match mode {
LookupsMode::WordLigFromLetters => Lookups::WordLigFromLetters,
LookupsMode::WordLigManual(vec) => {
let s = &vec[idx];
if s.len() > 0 {
Lookups::WordLigManual(vec[idx].clone())
} else {
Lookups::None
}
}
LookupsMode::StartCont => Lookups::StartCont,
LookupsMode::Alt => Lookups::Alt,
LookupsMode::ComboFirst => Lookups::ComboFirst,
LookupsMode::ComboLast => Lookups::ComboLast,
LookupsMode::None => Lookups::None,
}
}
fn gen(&self, name: String, full_name: String, variation: NasinNanpaVariation) -> String {
let latin_ligs = match &self {
// Used in tok_block and tok_ext_block when NasinNanpaVariation == Main
Lookups::WordLigFromLetters => {
let lig = name.chars().join(" ");
let special = if full_name.eq("aleTok") {
"Ligature2: \"'liga' WORD\" a l i\n"
} else {
""
};
format!("Ligature2: \"'liga' WORD\" {lig}\n{special}")
}
// Used in ctrl_block, tok_ctrl_block, and tok_no_combo_block
Lookups::WordLigManual(word) => {
let mut do_it = true;
let always = if word.contains("middleDotTok") {
do_it = false;
format!("Ligature2: \"'liga' VAR\" {word}\n")
} else if word.contains("CartAlt") {
format!(
"Ligature2: \"'liga' VAR\" {which}Tok VAR01\n",
which = if word.contains("start") { "startCart" } else { "endCart" }
)
} else if name.eq("ZWJ") {
"Substitution2: \"'ss02' ZWJ TO STACK\" joinStackTok\nSubstitution2: \"'ss01' ZWJ TO SCALE\" joinScaleTok\n".to_string()
} else if name.eq("startCartComb") {
"Ligature2: \"'liga' VAR\" ZWJ startCartTok\n".to_string()
} else if word.eq("i t a n") {
"Ligature2: \"'liga' VAR\" ijoTok ZWJ tanTok ZWJ anpaTok ZWJ nanpaTok\n".to_string()
} else if word.eq("l e p e k a") {
"Ligature2: \"'liga' VAR\" meliTok ZWJ kuleTok ZWJ kuleTok\n".to_string()
} else {
String::new()
};
let latin = if variation == NasinNanpaVariation::Main && do_it {
if word.eq("space space") {
format!("Ligature2: \"'liga' SPACE\" {word}\nLigature2: \"'liga' SPACE\" z z space\nLigature2: \"'liga' SPACE\" z z\n")
} else if word.eq("arrow") {
let convert = |c: char| match c {
'W' => "less",
'N' => "asciicircum",
'E' => "greater",
'S' => "v",
_ => panic!(),
};
let dir1 = convert(name.chars().nth(5).unwrap());
if let Some(dir2) = name.chars().nth(6) {
let dir2 = convert(dir2);
format!("Ligature2: \"'liga' WORD\" {dir1} {dir2}\nLigature2: \"'liga' WORD\" {dir2} {dir1}\n")
} else {
format!("Ligature2: \"'liga' WORD\" {dir1}\n")
}
} else if word.eq("bar") {
format!("Ligature2: \"'liga' WORD\" bar\n")
} else if word.contains("CartAlt") {
format!(
"Ligature2: \"'liga' VAR\" {which}Tok VAR01\nLigature2: \"'liga' VAR\" {which}Tok one\n",
which = if word.contains("start") { "startCart" } else { "endCart" }
)
} else {
format!("Ligature2: \"'liga' WORD\" {word}\n")
}
} else {
String::new()
};
format!("{always}{latin}")
} // Lookups::WordLigManual
// Used in start_cont_block
Lookups::StartCont => {
let (glyph, joiner) = full_name.rsplit_once("_").unwrap();
format!("Ligature2: \"'liga' START CONTAINER\" {glyph} {joiner}\n")
}
// Used in start_cont_block for laTok
Lookups::EndCont => {
let (glyph, _) = full_name.split_once("_").unwrap();
format!("Ligature2: \"'liga' START CONTAINER\" endRevContTok {glyph}\n")
}
// Used in tok_alt_block
Lookups::Alt => {
let parts: Vec<&str> = full_name.split("_").collect();
let glyph = parts[0];
let sel = parts[1];
let a = if full_name.eq("aTok_VAR02") {
"Ligature2: \"'liga' VAR\" aTok aTok\n"
} else if full_name.eq("aTok_VAR03") {
"Ligature2: \"'liga' VAR\" aTok aTok aTok\n"
} else if full_name.eq("aTok_VAR04") {
"Ligature2: \"'liga' VAR\" semeTok ZWJ aTok\nLigature2: \"'liga' VAR\" aTok ZWJ semeTok\n"
} else if full_name.eq("aTok_VAR05") && variation == NasinNanpaVariation::Main {
r#"Ligature2: "'liga' VAR" aTok exclam question
Ligature2: "'liga' VAR" aTok question exclam
"# } else if full_name.eq("muteTok_VAR02") {
"Ligature2: \"'liga' VAR\" lukaTok ZWJ lukaTok ZWJ lukaTok ZWJ lukaTok\n"
} else { "" };
let arrow_lig = if full_name.contains("niTok_arrow") {
format!("Ligature2: \"'liga' VAR\" {glyph} ZWJ {sel}\n")
} else {
String::new()
};
let num_lig = if variation == NasinNanpaVariation::Main && full_name.contains("VAR0") {
format!(
"Ligature2: \"'liga' VAR\" {glyph} {sel}\n",
sel = match sel {
"VAR01" | "arrowW" => "one",
"VAR02" | "arrowN" => "two",
"VAR03" | "arrowE" => "three",
"VAR04" | "arrowS" => "four",
"VAR05" | "arrowNW" => "five",
"VAR06" | "arrowNE" => "six",
"VAR07" | "arrowSE" => "seven",
"VAR08" | "arrowSW" => "eight",
_ => panic!(),
}
)
} else {
String::new()
};
let rerand = if full_name.contains("VAR0") {
let sel_word = match sel {
"VAR01" | "arrowW" => "one",
"VAR02" | "arrowN" => "two",
"VAR03" | "arrowE" => "three",
"VAR04" | "arrowS" => "four",
"VAR05" | "arrowNW" => "five",
"VAR06" | "arrowNE" => "six",
"VAR07" | "arrowSE" => "seven",
"VAR08" | "arrowSW" => "eight",
_ => panic!(),
};
let sel = sel.chars().last().unwrap().to_string();
if full_name.starts_with("jakiTok") {
if variation == NasinNanpaVariation::Main {
(1..9).map(|n| format!("Ligature2: \"'liga' VAR\" jakiTok_VAR0{n} VAR0{sel}\nLigature2: \"'liga' VAR\" jakiTok_VAR0{n} {sel_word}\n")).collect::<String>()
} else {
(1..9).map(|n| format!("Ligature2: \"'liga' VAR\" jakiTok_VAR0{n} VAR0{sel}\n")).collect::<String>()
}
} else if full_name.starts_with("koTok") {
if variation == NasinNanpaVariation::Main {
(1..9).map(|n| format!("Ligature2: \"'liga' VAR\" koTok_VAR0{n} VAR0{sel}\nLigature2: \"'liga' VAR\" koTok_VAR0{n} {sel_word}\n")).collect::<String>()
} else {
(1..9).map(|n| format!("Ligature2: \"'liga' VAR\" koTok_VAR0{n} VAR0{sel}\n")).collect::<String>()
}
} else {
String::new()
}
} else {
String::new()
};
format!("{a}Ligature2: \"'liga' VAR\" {glyph} {sel}\n{arrow_lig}{num_lig}{rerand}")
}
// Used in tok_outer_block, tok_ext_outer_block, tok_alt_outer_block,
// tok_lower_block, tok_ext_lower_block, and tok_alt_lower_block.
Lookups::ComboFirst => {
let (glyph, joiner) = full_name.rsplit_once('_').unwrap();
format!("Ligature2: \"'liga' GLYPH THEN JOINER\" {glyph} {joiner}\nMultipleSubs2: \"'ccmp' RESPAWN JOINER\" {full_name} {joiner}\n")
}
// Used in tok_inner_block, tok_ext_inner_block, tok_alt_inner_block,
// tok_upper_block, tok_ext_upper_block, and tok_alt_upper_block.
Lookups::ComboLast => {
let (joiner, glyph) = full_name.split_once("_").unwrap();
format!("Ligature2: \"'liga' JOINER THEN GLYPH\" {joiner} {glyph}\nLigature2: \"'liga' CC CLEANUP\" combCartExtHalfTok {full_name}\nLigature2: \"'liga' CC CLEANUP\" combContExtHalfTok {full_name}\nLigature2: \"'liga' CC CLEANUP\" combCartExtTok {full_name}\nLigature2: \"'liga' CC CLEANUP\" combContExtTok {full_name}\n")
}
Lookups::None => String::new(),
};
let rand = if full_name.eq("jakiTok") {
format!(
"{rerand}AlternateSubs2: \"'rand' RAND VARIATIONS\" jakiTok_VAR01 jakiTok_VAR02 jakiTok_VAR03 jakiTok_VAR04 jakiTok_VAR05 jakiTok_VAR06 jakiTok_VAR07 jakiTok_VAR08\n",
rerand = if variation == NasinNanpaVariation::Main {
(1..9).map(|n| format!("Ligature2: \"'liga' VAR\" jakiTok_VAR0{n} VAR09\nLigature2: \"'liga' VAR\" jakiTok_VAR0{n} nine\n")).collect::<String>()
} else {
(1..9).map(|n| format!("Ligature2: \"'liga' VAR\" jakiTok_VAR0{n} VAR09\n")).collect::<String>()
}
)
} else if full_name.eq("koTok") {
format!(
"{rerand}AlternateSubs2: \"'rand' RAND VARIATIONS\" koTok_VAR01 koTok_VAR02 koTok_VAR03 koTok_VAR04 koTok_VAR05 koTok_VAR06 koTok_VAR07 koTok_VAR08\n",
rerand = if variation == NasinNanpaVariation::Main {
(1..9).map(|n| format!("Ligature2: \"'liga' VAR\" koTok_VAR0{n} VAR09\nLigature2: \"'liga' VAR\" koTok_VAR0{n} nine\n")).collect::<String>()
} else {
(1..9).map(|n| format!("Ligature2: \"'liga' VAR\" koTok_VAR0{n} VAR09\n")).collect::<String>()
}
)
} else {
String::new()
};
format!("{latin_ligs}{rand}")
}
}
#[derive(Clone)]
pub enum Cc {
Full,
Half,
Participant,
None,
}
impl Cc {
pub fn gen(&self, full_name: String) -> String {
match self {
Cc::Full => format!("MultipleSubs2: \"'cc01' CART\" {full_name} combCartExtTok\nMultipleSubs2: \"'cc02' CONT\" {full_name} combContExtTok\n"),
Cc::Half => if full_name.eq("comma") {
"MultipleSubs2: \"'cc01' CART\" combCartExt1TickTok\nMultipleSubs2: \"'cc02' CONT\" combContExtHalfTok\n".to_string()
} else if full_name.eq("quotesingle") {
"MultipleSubs2: \"'cc01' CART\" combCartExt5TickTok\nMultipleSubs2: \"'cc02' CONT\" combContExtHalfTok\n".to_string()
} else {
let sqsh = if full_name.eq("space") {
"Position2: \"'sqsh' SPACE SHIFT\" dx=0 dy=0 dh=-500 dv=0\n"
} else {
""
};
format!("{sqsh}MultipleSubs2: \"'cc01' CART\" {full_name} combCartExtHalfTok\nMultipleSubs2: \"'cc02' CONT\" {full_name} combContExtHalfTok\n")
},
Cc::Participant => if full_name.contains("Tick") {
format!("MultipleSubs2: \"'cc01' CART\" {full_name} combCartExtNoneTok\n")
} else if full_name.contains("dakuten") {
format!("MultipleSubs2: \"'cc01' CART\" {full_name} combCartExtHalfTok\n")
} else {
format!("MultipleSubs2: \"'cc01' CART\" {full_name} combCartExtNoneTok\nMultipleSubs2: \"'cc02' CONT\" {full_name} combContExtNoneTok\n")
},
Cc::None => String::new(),
}
}
}
#[derive(Clone)]
pub struct GlyphFull<'a> {
pub glyph: GlyphBasic<'a>,
pub encoding: Encoding,
pub lookups: Lookups,
pub cc_subs: Cc,
}
impl<'a> GlyphFull<'a> {
pub fn from_basic(
glyph: GlyphBasic<'a>,
encoding: Encoding,
lookups: Lookups,
cc_subs: Cc,
) -> Self {
Self {
glyph,
encoding,
lookups,
cc_subs,
}
}
pub fn from_enc(glyph: GlyphEnc<'a>, ff_pos: usize, lookups: Lookups, cc_subs: Cc) -> Self {
Self {
glyph: glyph.glyph,
encoding: Encoding::new(ff_pos, glyph.enc),
lookups,
cc_subs,
}
}
pub fn from_parts(
name: impl Into<Cow<'a, str>>,
width: usize,
rep: Rep<'a>,
anchor: Option<Anchor>,
anchor2: Option<Anchor>,
encoding: Encoding,
lookups: Lookups,
cc_subs: Cc,
) -> Self {
Self {
glyph: GlyphBasic::new(name, width, rep, anchor, anchor2),
encoding,
lookups,
cc_subs,
}
}
pub fn gen(
&self,
prefix: String,
suffix: String,
color: String,
variation: NasinNanpaVariation,
) -> String {
let name = &self.glyph.name;
let encoding = self.encoding.gen();
let color = format!("Colour: {color}");
if name.contains("empty") {
return format!(
"\nStartChar: {name}\n{encoding}\nWidth: 0\nLayerCount: 2\n{color}\nEndChar\n"
);
}
let full_name = format!("{}{}{}", prefix, name, suffix);
let width = self.glyph.width;
let representation = self.glyph.rep.gen();
let lookups = self
.lookups
.gen(name.to_string(), full_name.clone(), variation);
let cc_subs = self.cc_subs.gen(full_name.clone());
let flags = if full_name.eq("ZWSP")
|| full_name.eq("ZWNJ")
|| full_name.eq("ZWJ")
|| full_name.starts_with("VAR")
|| full_name.starts_with("arrow")
|| full_name.eq("joinStackTok")
|| full_name.eq("joinScaleTok")
|| full_name.contains("space")
|| full_name.eq("combCartExtNoneTok")
|| full_name.eq("combContExtNoneTok")
|| full_name.ends_with("Rad")
{
"Flags: W\n"
} else {
""
};
let anchor = if let Some(anchor) = &self.glyph.anchor { anchor.gen() } else { String::new() };
let anchor2 = if let Some(anchor2) = &self.glyph.anchor2 { anchor2.gen() } else { String::new() };
format!("\nStartChar: {full_name}\n{encoding}\nWidth: {width}\n{flags}{anchor2}{anchor}LayerCount: 2\n{representation}{lookups}{cc_subs}{color}\nEndChar\n")
}
}
pub struct GlyphDescriptor {
pub name: &'static str,
pub spline_set: &'static str,
pub width: Option<usize>,
pub anchor: Option<Anchor>,
pub anchor2: Option<Anchor>,
}
impl GlyphDescriptor {
pub const fn new(name: &'static str, spline_set: &'static str) -> Self {
Self {
name,
spline_set,
width: None,
anchor: None,
anchor2: None,
}
}
pub const fn new_with_width(
name: &'static str,
width: usize,
spline_set: &'static str,
) -> Self {
Self {
name,
spline_set,
width: Some(width),
anchor: None,
anchor2: None,
}
}
pub const fn new_with_anchor(
name: &'static str,
anchor: Anchor,
spline_set: &'static str,
) -> Self {
Self {
name,
spline_set,
width: None,
anchor: Some(anchor),
anchor2: None,
}
}
pub const fn new_with_anchors(
name: &'static str,
anchor: Anchor,
anchor2: Anchor,
spline_set: &'static str,
) -> Self {
Self {
name,
spline_set,
width: None,
anchor: Some(anchor),
anchor2: Some(anchor2),
}
}
}
pub struct GlyphBlock<'a> {
pub glyphs: Vec<GlyphFull<'a>>,
pub prefix: Cow<'a, str>,
pub suffix: Cow<'a, str>,
pub color: Cow<'a, str>,
}
impl<'a> GlyphBlock<'a> {
pub fn from_enc_glyphs(
ff_pos: &mut usize,
glyphs: Vec<GlyphEnc<'a>>,
lookups: LookupsMode,
cc_subs: Cc,
prefix: impl Into<Cow<'a, str>>,
suffix: impl Into<Cow<'a, str>>,
color: impl Into<Cow<'a, str>>,
) -> Self {
let mut glyphs: Vec<GlyphFull> = glyphs
.into_iter()
.enumerate()
.map(|(idx, glyph)| {
let g = GlyphFull::from_enc(
glyph,
*ff_pos,
Lookups::from_mode(&lookups, idx),
cc_subs.clone(),
);
*ff_pos += 1;
g
})
.collect();
let mut padding = Self::new_empty(ff_pos, 15 - ((glyphs.len() + 15) % 16), 0).glyphs;
glyphs.append(&mut padding);
Self {
glyphs,
prefix: prefix.into(),
suffix: suffix.into(),
color: color.into(),
}
}
pub fn from_basic_glyphs(
ff_pos: &mut usize,
glyphs: Vec<GlyphBasic<'a>>,
lookups: LookupsMode,
cc_subs: Cc,
prefix: impl Into<Cow<'a, str>>,
suffix: impl Into<Cow<'a, str>>,
color: impl Into<Cow<'a, str>>,
mut enc_pos: EncPos,
) -> Self {
let mut glyphs: Vec<GlyphFull> = glyphs
.into_iter()
.enumerate()
.map(|(idx, glyph)| {
let g = GlyphFull::from_basic(
glyph,
Encoding::new(*ff_pos, enc_pos.clone()),
Lookups::from_mode(&lookups, idx),
cc_subs.clone(),
);
*ff_pos += 1;
enc_pos.inc();
g
})
.collect();
let mut padding = Self::new_empty(ff_pos, 15 - ((glyphs.len() + 15) % 16), 0).glyphs;
glyphs.append(&mut padding);
Self {
glyphs,
prefix: prefix.into(),
suffix: suffix.into(),
color: color.into(),
}
}
pub fn from_const_descriptors(
ff_pos: &mut usize,
glyphs: &'static [GlyphDescriptor],
lookups: LookupsMode,
cc_subs: Cc,
prefix: impl Into<Cow<'a, str>>,
suffix: impl Into<Cow<'a, str>>,
color: impl Into<Cow<'a, str>>,
enc_pos: EncPos,
fallback_width: usize,
) -> Self {
let glyphs: Vec<GlyphBasic> = glyphs
.into_iter()
.map(
|GlyphDescriptor {
name,
spline_set,
width,
anchor,
anchor2,
}| {
GlyphBasic::new(
name.to_string(),
width.unwrap_or(fallback_width),
Rep::new(spline_set.to_string(), &[]),
anchor.clone(),
anchor2.clone(),
)
},
)
.collect();
Self::from_basic_glyphs(
ff_pos, glyphs, lookups, cc_subs, prefix, suffix, color, enc_pos,
)
}
pub fn from_const_encs(
ff_pos: &mut usize,
glyphs: &'a [GlyphEnc],
lookups: LookupsMode,
cc_subs: Cc,
prefix: impl Into<Cow<'a, str>>,
suffix: impl Into<Cow<'a, str>>,
color: impl Into<Cow<'a, str>>,
) -> Self {
let glyphs: Vec<GlyphEnc> = glyphs
.into_iter()
.map(
|GlyphEnc { glyph, enc }| {
GlyphEnc {
glyph: GlyphBasic::new(
glyph.name.to_string(),
glyph.width,
Rep::new(glyph.rep.spline_set.to_string(), &[]),
glyph.anchor.clone(),
glyph.anchor2.clone(),
),
enc: enc.clone(),
}
},
)
.collect();
Self::from_enc_glyphs(
ff_pos, glyphs, lookups, cc_subs, prefix, suffix, color,
)
}
/// Generates a `GlyphBlock` whose glyphs are all references this block's glyphs, all with the same `rel_pos`
pub fn from_refs(
&self,
ff_pos: &mut usize,
rel_pos: String,
lookups: LookupsMode,
cc_subs: Cc,
use_full_names: bool,
prefix: impl Into<Cow<'a, str>>,
suffix: impl Into<Cow<'a, str>>,
color: impl Into<Cow<'a, str>>,
width: Option<usize>,
anchor: Option<Anchor>,
) -> Self {
let glyphs: Vec<GlyphBasic> = self
.glyphs
.clone()
.into_iter()
.map(
|GlyphFull {
glyph, encoding, ..
}| -> GlyphBasic {
let refs: Vec<Ref> = vec![
Some(Ref::new(encoding, rel_pos.clone())),
None,
]
.into_iter()
.flatten()
.collect();
let name: Cow<'a, str> = if use_full_names {
Cow::Owned(format!(
"{pre}{name}{post}",
pre = self.prefix,
name = glyph.name,
post = self.suffix
))
} else {
glyph.name
};
let g = GlyphBasic::new(
name,
match width {
Some(width) => width,
None => glyph.width,
},
Rep::new(Cow::default(), refs),
match &anchor {
Some(anchor) => Some(anchor.clone()),
None => glyph.anchor,
},
None,
);
g
},
)
.collect();
Self::from_basic_glyphs(
ff_pos,
glyphs,
lookups,
cc_subs,
prefix,
suffix,
color,
EncPos::None,
)
}
/// Generates a `GlyphBlock` with a given `count` of empty glyphs
pub fn new_empty(ff_pos: &mut usize, count: usize, width: usize) -> Self {
let end = *ff_pos + count;
let mut glyphs = vec![];
while *ff_pos < end {
glyphs.push(GlyphFull::from_parts(
format!("empty{i:04}", i = *ff_pos),
width,
Rep::default(),
None,
None,
Encoding::new(*ff_pos, EncPos::None),
Lookups::None,
Cc::None,
));
*ff_pos += 1;
}
Self {
glyphs,
prefix: Cow::Borrowed(""),
suffix: Cow::Borrowed(""),
color: Cow::Borrowed("dddddd"),
}
}
/// Generates a `GlyphBlock`
pub fn gen(&self, variation: NasinNanpaVariation) -> String {
let mut s = String::new();
for g in &self.glyphs {
s += &g.gen(
self.prefix.to_string(),
self.suffix.to_string(),
self.color.to_string(),
variation,
)
}
s
}
}
| rust | MIT | e5ab7a391aa703cd7beef3656c9c86045aeef485 | 2026-01-04T20:18:26.798176Z | false |
etbcor/nasin-nanpa | https://github.com/etbcor/nasin-nanpa/blob/e5ab7a391aa703cd7beef3656c9c86045aeef485/font-forge-tool/src/main.rs | font-forge-tool/src/main.rs | use ffir::*;
use glyph_blocks::{base::*, ctrl::*, inner::*, lower::*, outer::*, *};
use itertools::Itertools;
use std::{collections::HashSet, fs::File, io::Write};
mod ffir;
mod glyph_blocks;
#[derive(PartialEq, Eq, Clone, Copy)]
enum NasinNanpaVariation {
Main,
Ucsur,
}
fn gen_nasin_nanpa(variation: NasinNanpaVariation) -> std::io::Result<()> {
let mut ff_pos: usize = 0;
let ctrl_temp = CTRL;
let mut ctrl_block = GlyphBlock::from_const_encs(
&mut ff_pos,
&ctrl_temp,
LookupsMode::WordLigManual(vec![
String::new(),
String::new(),
"bar".to_string(),
"ampersand".to_string(),
"arrow".to_string(),
"arrow".to_string(),
"arrow".to_string(),
"arrow".to_string(),
"arrow".to_string(),
"arrow".to_string(),
"arrow".to_string(),
"arrow".to_string(),
"combCartExtTok comma".to_string(),
"comma comma".to_string(),
"comma comma comma".to_string(),
"comma comma comma comma".to_string(),
String::new(),
String::new(),
String::new(),
String::new(),
String::new(),
String::new(),
String::new(),
String::new(),
String::new(),
String::new(),
String::new(),
String::new(),
"combCartExtTok quotesingle".to_string(),
"quotesingle quotesingle".to_string(),
"quotesingle quotesingle quotesingle".to_string(),
"quotesingle quotesingle quotesingle quotesingle".to_string(),
String::new(),
String::new(),
"quotedbl".to_string(),
"asterisk".to_string(),
]),
Cc::Participant,
"",
"",
"fa6791",
);
ctrl_block.glyphs[0].cc_subs = Cc::None;
let mut tok_ctrl_block = GlyphBlock::from_const_descriptors(
&mut ff_pos,
TOK_CTRL.as_slice(),
LookupsMode::WordLigManual(vec![
"bracketleft".to_string(),
"bracketright".to_string(),
"equal".to_string(),
String::new(),
String::new(),
"hyphen".to_string(),
"plus".to_string(),
"parenleft".to_string(),
"parenright".to_string(),
"underscore".to_string(),
"braceleft".to_string(),
"braceright".to_string(),
"startCartAlt".to_string(),
"endCartAlt".to_string(),
"t e".to_string(),
"t o".to_string(),
"ZWJ startCartTok".to_string(),
]),
Cc::None,
"",
"Tok",
"aaafff",
EncPos::Pos(0xF1990),
0,
);
tok_ctrl_block.glyphs[5].cc_subs = Cc::Participant;
tok_ctrl_block.glyphs[6].cc_subs = Cc::Participant;
tok_ctrl_block.glyphs[12].encoding.enc_pos = EncPos::None;
tok_ctrl_block.glyphs[13].encoding.enc_pos = EncPos::None;
tok_ctrl_block.glyphs[16].cc_subs = Cc::Participant;
tok_ctrl_block.glyphs[16].encoding.enc_pos = EncPos::None;
let mut start_cont_block = GlyphBlock::from_const_descriptors(
&mut ff_pos,
START_CONT.as_slice(),
LookupsMode::StartCont,
Cc::None,
"",
"_startContTok",
"aaafff",
EncPos::None,
1000,
);
start_cont_block.glyphs[7].lookups = Lookups::EndCont;
let latn_block = if variation == NasinNanpaVariation::Main {
GlyphBlock::from_const_descriptors(
&mut ff_pos,
LATN.as_slice(),
LookupsMode::None,
Cc::Half,
"",
"",
"fffaaa",
EncPos::Pos(0x0020),
500,
)
} else {
GlyphBlock::new_empty(&mut ff_pos, 0, 0)
};
let mut no_comb_block = GlyphBlock::from_const_descriptors(
&mut ff_pos,
TOK_NO_COMB.as_slice(),
LookupsMode::WordLigManual(vec![
"period".to_string(),
"colon".to_string(),
"middleDotTok middleDotTok".to_string(),
"middleDotTok middleDotTok middleDotTok".to_string(),
"space space".to_string(),
"i t a n".to_string(),
"l i p a m a n k a".to_string(),
"l e p e k a".to_string(),
"S e k a".to_string(),
"L i n k u".to_string(),
]),
Cc::Full,
"",
"Tok",
"cccfff",
EncPos::None,
1000,
);
no_comb_block.glyphs[0].encoding.enc_pos = EncPos::Pos(0xF199C);
no_comb_block.glyphs[1].encoding.enc_pos = EncPos::Pos(0xF199D);
no_comb_block.glyphs[4].encoding.enc_pos = EncPos::Pos(0x3000);
let radicals_block = GlyphBlock::from_const_descriptors(
&mut ff_pos,
RADICALS.as_slice(),
LookupsMode::None,
Cc::Full,
"",
"Rad",
"7777cc",
EncPos::Pos(0xF1C80),
1000,
);
let base_cor_block = GlyphBlock::from_const_descriptors(
&mut ff_pos,
BASE_COR.as_slice(),
if variation == NasinNanpaVariation::Main {
LookupsMode::WordLigFromLetters
} else {
LookupsMode::None
},
Cc::Full,
"",
"Tok",
"bf80ff",
EncPos::Pos(0xF1900),
1000,
);
let mut base_ext_block = GlyphBlock::from_const_descriptors(
&mut ff_pos,
BASE_EXT.as_slice(),
if variation == NasinNanpaVariation::Main {
LookupsMode::WordLigFromLetters
} else {
LookupsMode::None
},
Cc::Full,
"",
"Tok",
"df80ff",
EncPos::Pos(0xF19A0),
1000,
);
base_ext_block.glyphs[41].encoding.enc_pos = EncPos::None;
base_ext_block.glyphs[42].encoding.enc_pos = EncPos::None;
let base_alt_block = GlyphBlock::from_const_descriptors(
&mut ff_pos,
BASE_ALT.as_slice(),
LookupsMode::Alt,
Cc::Full,
"",
"",
"ff80e6",
EncPos::None,
1000,
);
let outer_cor_block = GlyphBlock::from_const_descriptors(
&mut ff_pos,
OUTER_COR.as_slice(),
LookupsMode::ComboFirst,
Cc::Full,
"",
"Tok_joinScaleTok",
"ffff",
EncPos::None,
1000,
);
let outer_ext_block = GlyphBlock::from_const_descriptors(
&mut ff_pos,
OUTER_EXT.as_slice(),
LookupsMode::ComboFirst,
Cc::Full,
"",
"Tok_joinScaleTok",
"ffff",
EncPos::None,
1000,
);
let outer_alt_block = GlyphBlock::from_const_descriptors(
&mut ff_pos,
OUTER_ALT.as_slice(),
LookupsMode::ComboFirst,
Cc::Full,
"",
"_joinScaleTok",
"ffff",
EncPos::None,
1000,
);
let inner_cor_block = GlyphBlock::from_const_descriptors(
&mut ff_pos,
INNER_COR.as_slice(),
LookupsMode::ComboLast,
Cc::Full,
"joinScaleTok_",
"Tok",
"80ffff",
EncPos::None,
0,
);
let inner_ext_block = GlyphBlock::from_const_descriptors(
&mut ff_pos,
INNER_EXT.as_slice(),
LookupsMode::ComboLast,
Cc::Full,
"joinScaleTok_",
"Tok",
"80ffff",
EncPos::None,
0,
);
let inner_alt_block = GlyphBlock::from_const_descriptors(
&mut ff_pos,
INNER_ALT.as_slice(),
LookupsMode::ComboLast,
Cc::Full,
"joinScaleTok_",
"",
"80ffff",
EncPos::None,
0,
);
let lower_cor_block = GlyphBlock::from_const_descriptors(
&mut ff_pos,
LOWER_COR.as_slice(),
LookupsMode::ComboFirst,
Cc::Full,
"",
"Tok_joinStackTok",
"ff00",
EncPos::None,
1000,
);
let lower_ext_block = GlyphBlock::from_const_descriptors(
&mut ff_pos,
LOWER_EXT.as_slice(),
LookupsMode::ComboFirst,
Cc::Full,
"",
"Tok_joinStackTok",
"ff00",
EncPos::None,
1000,
);
let lower_alt_block = GlyphBlock::from_const_descriptors(
&mut ff_pos,
LOWER_ALT.as_slice(),
LookupsMode::ComboFirst,
Cc::Full,
"",
"_joinStackTok",
"ff00",
EncPos::None,
1000,
);
let upper_cor_block = lower_cor_block.from_refs(
&mut ff_pos,
"S 1 0 0 1 -1000 500 2".to_string(),
LookupsMode::ComboLast,
Cc::Full,
false,
"joinStackTok_",
"Tok",
"80ff80",
Some(0),
Some(Anchor::new_stack(AnchorType::Mark)),
);
let upper_ext_block = lower_ext_block.from_refs(
&mut ff_pos,
"S 1 0 0 1 -1000 500 2".to_string(),
LookupsMode::ComboLast,
Cc::Full,
false,
"joinStackTok_",
"Tok",
"80ff80",
Some(0),
Some(Anchor::new_stack(AnchorType::Mark)),
);
let upper_alt_block = lower_alt_block.from_refs(
&mut ff_pos,
"S 1 0 0 1 -1000 500 2".to_string(),
LookupsMode::ComboLast,
Cc::Full,
false,
"joinStackTok_",
"",
"80ff80",
Some(0),
Some(Anchor::new_stack(AnchorType::Mark)),
);
let put_in_class = |orig: String| format!("Class: {} {}", orig.len(), orig);
let space_calt = {
let names = vec![&base_cor_block, &base_ext_block, &base_alt_block]
.iter()
.enumerate()
.map(|(i, block)| {
block
.glyphs
.iter()
.filter_map(|glyph| {
if glyph.glyph.name.contains("empty") {
None
} else {
Some(format!(
"{}{}",
glyph.glyph.name,
if i != 2 { "Tok" } else { "" }
))
}
})
.join(" ")
})
.join(" ");
let aa = (1..5).map(|x| format!("combCartExt{x}TickTok")).join(" ");
let bb = (5..9).map(|x| format!("combCartExt{x}TickTok")).join(" ");
let prenames = format!("{aa} combCartExtHalfTok combContExtHalfTok {bb} endCartTok combCartExtTok endContTok combContExtTok endRevContTok endCartAltTok teTok toTok middleDotTok colonTok middleDot2Tok middleDot3Tok");
let other = put_in_class(format!("{prenames} {names}"));
let sp = put_in_class("space".to_string());
format!("ContextPos2: class \"'kern' FIX SPACE\" 3 1 1 1\n {other}\n {sp}\n")
};
let zwj_calt = {
let scale_names = vec![&outer_cor_block, &outer_ext_block, &outer_alt_block]
.iter()
.enumerate()
.map(|(i, &block)| {
block
.glyphs
.iter()
.filter_map(|glyph| {
if glyph.glyph.name.contains("empty") {
None
} else {
Some(format!(
"{}{}",
glyph.glyph.name,
if i != 2 { "Tok" } else { "" }
))
}
})
.join(" ")
})
.join(" ");
let scale_glyphs = vec![&outer_cor_block, &outer_ext_block, &outer_alt_block]
.iter()
.map(|block| {
block
.glyphs
.iter()
.filter_map(|glyph| {
if glyph.glyph.name.contains("empty") {
None
} else {
Some(glyph.glyph.name.clone())
}
})
.collect_vec()
})
.flatten()
.collect::<HashSet<_>>();
let stack_names = vec![&lower_cor_block, &lower_ext_block, &lower_alt_block]
.iter()
.enumerate()
.map(|(i, block)| {
block
.glyphs
.iter()
.filter_map(|glyph| {
if glyph.glyph.name.contains("empty")
|| glyph.glyph.name.contains("arrow")
|| scale_glyphs.contains(&glyph.glyph.name)
{
None
} else {
Some(format!(
"{}{}",
glyph.glyph.name,
if i != 2 { "Tok" } else { "" }
))
}
})
.join(" ")
})
.join(" ");
let zwj = put_in_class("ZWJ".to_string());
let scale = put_in_class(scale_names);
let stack = put_in_class(stack_names);
let put_in_sub = |c: &str| format!(" {c}{zwj}\n {c}{scale}\n {c}{stack}\n");
let subs = format!("{}{}{}", put_in_sub(""), put_in_sub("B"), put_in_sub("F"));
format!("ContextSub2: class \"'calt' CHANGE ZWJ\" 4 4 4 2\n{subs}")
};
let mut main_blocks = vec![
latn_block,
no_comb_block,
radicals_block,
base_cor_block,
base_ext_block,
base_alt_block,
outer_cor_block,
outer_ext_block,
outer_alt_block,
inner_cor_block,
inner_ext_block,
inner_alt_block,
lower_cor_block,
lower_ext_block,
lower_alt_block,
upper_cor_block,
upper_ext_block,
upper_alt_block,
];
let chain_calt = {
let put_in_class = |orig: String| format!("Class: {} {}", orig.len(), orig);
let base = {
let ctrl_names = ctrl_block
.glyphs
.iter()
.filter_map(|glyph| {
if glyph.glyph.name.contains("Half") || glyph.glyph.name.contains("Tick") {
None
} else {
Some(format!(
"{}{}{}",
ctrl_block.prefix, glyph.glyph.name, ctrl_block.suffix
))
}
})
.join(" ");
let main_names = main_blocks
.iter()
.map(|block| {
block
.glyphs
.iter()
.map(|glyph| {
format!("{}{}{}", block.prefix, glyph.glyph.name, block.suffix)
})
.join(" ")
})
.join(" ");
put_in_class(format!(
"{} joinStackTok joinScaleTok {}",
ctrl_names, main_names
))
};
let cart = put_in_class(format!(
"{} {} {}",
"combCartExtHalfTok combCartExtNoneTok",
(1..=8)
.map(|x| format!("combCartExt{}TickTok", x))
.join(" "),
"startCartTok combCartExtTok startCartAltTok startCartCombTok"
));
let cont = {
let longs = start_cont_block
.glyphs
.iter()
.filter_map(|glyph| {
if glyph.glyph.name.eq("laTok") {
None
} else {
Some(format!(
"{}{}{}",
start_cont_block.prefix, glyph.glyph.name, start_cont_block.suffix
))
}
})
.join(" ");
put_in_class(format!("combContExtNoneTok combContExtHalfTok startLongPiTok combLongPiExtTok startContTok combContExtTok startRevContTok {}", longs))
};
let put_in_sub = |c: &str| format!(" {c}{base}\n {c}{cart}\n {c}{cont}\n");
let subs = format!("{}{}{}", put_in_sub(""), put_in_sub("B"), put_in_sub("F"));
format!("ChainSub2: class \"'calt' CART AND CONT\" 4 4 4 2\n{subs}")
};
let mut meta_block = vec![ctrl_block, tok_ctrl_block, start_cont_block];
meta_block.append(&mut main_blocks);
let glyphs_string = format!(
"{}",
meta_block.iter().map(|block| block.gen(variation)).join("")
);
let time = std::time::UNIX_EPOCH.elapsed().unwrap().as_secs();
let filename = format!(
"nasin-nanpa-{VERSION}{}.sfd",
if variation == NasinNanpaVariation::Ucsur {
"-UCSUR"
} else {
""
}
);
let mut file = File::create(filename)?;
// FINAL `.sfd` COMPOSITIION
writeln!(
&mut file,
r#"{HEADER}Version: {VERSION}
{DETAILS1}ModificationTime: {time}{DETAILS2}{LOOKUPS}DEI: 91125
{space_calt}{AFTER_SPACE_CALT}{zwj_calt}{AFTER_ZWJ_CALT}{chain_calt}{AFTER_CHAIN_CALT}{VERSION}{OTHER}BeginChars: {ff_pos} {ff_pos}
{glyphs_string}EndChars
EndSplineFont"#
)
}
fn main() -> std::io::Result<()> {
gen_nasin_nanpa(NasinNanpaVariation::Main)?;
gen_nasin_nanpa(NasinNanpaVariation::Ucsur)?;
Ok(())
}
| rust | MIT | e5ab7a391aa703cd7beef3656c9c86045aeef485 | 2026-01-04T20:18:26.798176Z | false |
etbcor/nasin-nanpa | https://github.com/etbcor/nasin-nanpa/blob/e5ab7a391aa703cd7beef3656c9c86045aeef485/font-forge-tool/src/glyph_blocks/base.rs | font-forge-tool/src/glyph_blocks/base.rs | use crate::GlyphDescriptor;
//MARK: BASE
pub const BASE_COR: [GlyphDescriptor; 137] = [
GlyphDescriptor::new(
"a",
r#"
500 50 m 0
555 50 600 95 600 150 c 0
600 205 555 250 500 250 c 0
445 250 400 205 400 150 c 0
400 95 445 50 500 50 c 0
800 0 m 0
800 -26 780 -50 750 -50 c 0
729 -50 710 -37 703 -17 c 0
694 8 685 31 675 53 c 1
641 -8 575 -50 500 -50 c 0
390 -50 300 40 300 150 c 0
300 260 390 350 500 350 c 0
561 350 609 323 648 284 c 0
695 234 753 138 797 17 c 0
799 11 800 6 800 0 c 0
550 800 m 2
550 450 l 2
550 422 528 400 500 400 c 0
472 400 450 422 450 450 c 2
450 800 l 2
450 828 472 850 500 850 c 0
528 850 550 828 550 800 c 2"#,
),
GlyphDescriptor::new(
"akesi",
r#"
675 690 m 0
634 690 600 724 600 765 c 0
600 806 634 840 675 840 c 0
716 840 750 806 750 765 c 0
750 724 716 690 675 690 c 0
588 160 m 1
412 160 l 1
417 143 423 126 431 112 c 0
451 75 475 60 500 60 c 0
525 60 549 75 569 112 c 0
577 126 583 143 588 160 c 1
400 360 m 1
400 260 l 1
600 260 l 1
600 360 l 1
400 360 l 1
588 460 m 1
583 477 577 494 569 508 c 0
549 545 525 560 500 560 c 0
475 560 451 545 431 508 c 0
423 494 417 477 412 460 c 1
588 460 l 1
500 660 m 0
575 660 626 610 656 557 c 0
672 527 684 494 691 460 c 1
781 460 l 2
809 460 831 438 831 410 c 0
831 382 809 360 781 360 c 2
700 360 l 1
700 260 l 1
781 260 l 2
809 260 831 238 831 210 c 0
831 182 809 160 781 160 c 2
691 160 l 1
684 126 672 93 656 63 c 0
626 10 575 -40 500 -40 c 0
425 -40 374 10 344 63 c 0
328 93 316 126 309 160 c 1
219 160 l 2
191 160 169 182 169 210 c 0
169 238 191 260 219 260 c 2
300 260 l 1
300 360 l 1
219 360 l 2
191 360 169 382 169 410 c 0
169 438 191 460 219 460 c 2
309 460 l 1
316 494 328 527 344 557 c 0
374 610 425 660 500 660 c 0
400 765 m 0
400 724 366 690 325 690 c 0
284 690 250 724 250 765 c 0
250 806 284 840 325 840 c 0
366 840 400 806 400 765 c 0"#,
),
GlyphDescriptor::new(
"ala",
r#"
100 750 m 0
100 776 124 800 150 800 c 0
162 800 175 795 185 785 c 2
500 471 l 1
815 785 l 2
825 795 838 800 850 800 c 0
876 800 900 776 900 750 c 0
900 738 895 725 885 715 c 2
571 400 l 1
885 85 l 2
895 75 900 62 900 50 c 0
900 24 876 0 850 0 c 0
838 0 825 5 815 15 c 2
500 329 l 1
185 15 l 2
175 5 162 0 150 0 c 0
124 0 100 24 100 50 c 0
100 62 105 75 115 85 c 2
429 400 l 1
115 715 l 2
105 725 100 738 100 750 c 0"#,
),
GlyphDescriptor::new(
"alasa",
r#"
300 350 m 1
300 103 l 1
441 122 521 221 541 350 c 1
300 350 l 1
541 450 m 1
521 578 441 678 300 697 c 1
300 450 l 1
541 450 l 1
642 350 m 1
620 154 478 0 250 0 c 0
222 0 200 22 200 50 c 2
200 350 l 1
100 350 l 2
72 350 50 372 50 400 c 0
50 428 72 450 100 450 c 2
200 450 l 1
200 750 l 2
200 778 222 800 250 800 c 0
478 800 620 646 642 450 c 1
779 450 l 1
715 515 l 2
705 525 700 538 700 550 c 0
700 576 724 600 750 600 c 0
762 600 775 595 785 585 c 2
935 435 l 2
944 426 950 416 950 400 c 0
950 384 944 374 935 365 c 2
785 215 l 2
775 205 762 200 750 200 c 0
724 200 700 224 700 250 c 0
700 262 705 275 715 285 c 2
779 350 l 1
642 350 l 1"#,
),
GlyphDescriptor::new(
"ale",
r#"
563 400 m 1
599 354 629 318 657 292 c 0
690 262 717 250 746 250 c 0
776 250 800 262 818 285 c 0
837 309 850 348 850 400 c 0
850 452 837 491 818 515 c 0
800 538 776 550 746 550 c 0
717 550 690 538 657 508 c 0
629 482 599 446 563 400 c 1
437 400 m 1
401 446 371 482 343 508 c 0
310 538 283 550 254 550 c 0
224 550 200 538 182 515 c 0
163 491 150 452 150 400 c 0
150 348 163 309 182 285 c 0
200 262 224 250 254 250 c 0
283 250 310 262 343 292 c 0
371 318 401 354 437 400 c 1
50 400 m 0
50 533 125 650 254 650 c 0
317 650 367 622 411 582 c 0
441 555 470 521 500 482 c 1
530 521 559 555 589 582 c 0
633 622 683 650 746 650 c 0
875 650 950 533 950 400 c 0
950 267 875 150 746 150 c 0
683 150 633 178 589 218 c 0
559 245 530 279 500 318 c 1
470 279 441 245 411 218 c 0
367 178 317 150 254 150 c 0
125 150 50 267 50 400 c 0"#,
),
GlyphDescriptor::new(
"anpa",
r#"
575 175 m 0
575 134 541 100 500 100 c 0
459 100 425 134 425 175 c 0
425 216 459 250 500 250 c 0
541 250 575 216 575 175 c 0
150 700 m 0
178 700 200 678 200 650 c 2
200 400 l 1
800 400 l 1
800 650 l 2
800 678 822 700 850 700 c 0
878 700 900 678 900 650 c 2
900 350 l 2
900 322 878 300 850 300 c 2
150 300 l 2
122 300 100 322 100 350 c 2
100 650 l 2
100 678 122 700 150 700 c 0"#,
),
GlyphDescriptor::new(
"ante",
r#"
150 0 m 0
124 0 100 22 100 50 c 0
100 65 107 80 120 90 c 2
470 352 l 2
479 358 490 362 500 362 c 0
510 362 521 358 530 352 c 2
880 90 l 2
893 80 900 65 900 50 c 0
900 22 876 0 850 0 c 0
840 0 829 3 820 10 c 2
500 250 l 1
180 10 l 2
171 3 160 0 150 0 c 0
850 800 m 0
876 800 900 778 900 750 c 0
900 735 893 720 880 710 c 2
530 448 l 2
521 442 510 438 500 438 c 0
490 438 479 442 470 448 c 2
120 710 l 2
107 720 100 735 100 750 c 0
100 778 124 800 150 800 c 0
160 800 171 797 180 790 c 2
500 550 l 1
820 790 l 2
829 797 840 800 850 800 c 0"#,
),
GlyphDescriptor::new(
"anu",
r#"
150 750 m 0
150 777 173 800 200 800 c 0
213 800 226 795 236 785 c 2
500 513 l 1
764 785 l 2
774 795 787 800 800 800 c 0
827 800 850 777 850 750 c 0
850 737 845 725 836 715 c 2
550 421 l 1
550 50 l 2
550 22 528 0 500 0 c 0
472 0 450 22 450 50 c 2
450 421 l 1
164 715 l 2
155 725 150 737 150 750 c 0"#,
),
GlyphDescriptor::new(
"awen",
r#"
547 767 m 2
785 100 l 1
900 100 l 2
928 100 950 78 950 50 c 0
950 22 928 0 900 0 c 2
750 0 l 2
729 0 710 13 703 33 c 2
500 601 l 1
297 33 l 2
290 13 271 0 250 0 c 2
100 0 l 2
72 0 50 22 50 50 c 0
50 78 72 100 100 100 c 2
215 100 l 1
453 767 l 2
460 787 479 800 500 800 c 0
521 800 540 787 547 767 c 2"#,
),
GlyphDescriptor::new(
"e",
r#"
450 800 m 0
450 826 474 850 500 850 c 0
512 850 525 845 535 835 c 2
935 435 l 2
945 425 950 412 950 400 c 0
950 388 945 375 935 365 c 2
535 -35 l 2
525 -45 512 -50 500 -50 c 0
474 -50 450 -26 450 0 c 0
450 12 455 25 465 35 c 2
829 400 l 1
465 765 l 2
455 775 450 788 450 800 c 0
50 800 m 0
50 826 74 850 100 850 c 0
112 850 125 845 135 835 c 2
535 435 l 2
545 425 550 412 550 400 c 0
550 388 545 375 535 365 c 2
135 -35 l 2
125 -45 112 -50 100 -50 c 0
74 -50 50 -26 50 0 c 0
50 12 55 25 65 35 c 2
429 400 l 1
65 765 l 2
55 775 50 788 50 800 c 0"#,
),
GlyphDescriptor::new(
"en",
r#"
550 750 m 2
550 450 l 1
850 450 l 2
878 450 900 428 900 400 c 0
900 372 878 350 850 350 c 2
550 350 l 1
550 50 l 2
550 22 528 0 500 0 c 0
472 0 450 22 450 50 c 2
450 350 l 1
150 350 l 2
122 350 100 372 100 400 c 0
100 428 122 450 150 450 c 2
450 450 l 1
450 750 l 2
450 778 472 800 500 800 c 0
528 800 550 778 550 750 c 2"#,
),
GlyphDescriptor::new(
"esun",
r#"
600 542 m 1
625 540 651 539 678 539 c 0
759 539 799 559 820 580 c 0
841 601 850 630 850 667 c 0
850 704 841 721 831 731 c 0
821 741 804 750 767 750 c 0
731 750 705 741 686 726 c 0
666 711 648 688 636 651 c 0
628 628 615 588 600 542 c 1
399 256 m 1
364 259 327 261 287 261 c 0
227 261 195 243 178 223 c 0
160 202 150 170 150 133 c 0
150 96 159 79 169 69 c 0
179 59 196 50 233 50 c 0
270 50 295 58 314 72 c 0
333 86 351 109 364 149 c 0
372 173 384 211 399 256 c 1
50 800 m 0
50 825 69 850 100 850 c 0
119 850 138 839 146 820 c 2
146 820 147 819 147 818 c 0
154 805 157 796 169 779 c 0
186 753 214 718 255 682 c 0
310 634 389 585 500 559 c 1
518 612 533 657 542 683 c 0
559 734 585 776 625 806 c 0
665 836 714 850 767 850 c 0
819 850 867 837 902 802 c 0
937 767 950 719 950 667 c 0
950 615 937 555 891 509 c 0
845 463 775 439 678 439 c 0
639 439 603 441 568 445 c 1
556 409 543 372 531 336 c 1
668 305 763 251 827 193 c 0
875 150 905 105 923 72 c 0
935 49 941 37 947 18 c 0
947 16 948 16 948 15 c 2
948 12 l 2
949 8 950 3 950 -1 c 0
950 -26 929 -50 899 -50 c 0
877 -50 858 -36 852 -13 c 1
852 -13 851 -12 851 -11 c 0
851 -10 850 -7 849 -4 c 0
846 3 842 13 835 25 c 0
822 50 799 83 760 118 c 0
708 164 627 214 500 241 c 1
483 189 468 145 459 118 c 0
441 65 414 20 374 -9 c 0
334 -39 285 -50 233 -50 c 0
181 -50 133 -37 98 -2 c 0
63 33 50 81 50 133 c 0
50 185 63 243 102 288 c 0
142 335 205 361 287 361 c 0
339 361 387 358 432 353 c 1
444 389 456 426 468 463 c 1
345 494 254 550 189 607 c 0
141 649 107 692 86 724 c 0
75 740 67 753 62 763 c 0
59 769 55 773 55 779 c 1
54 780 l 2
51 786 50 793 50 800 c 0"#,
),
GlyphDescriptor::new(
"ijo",
r#"
500 -50 m 0
251 -50 50 151 50 400 c 0
50 649 251 850 500 850 c 0
749 850 950 649 950 400 c 0
950 151 749 -50 500 -50 c 0
500 50 m 0
693 50 850 207 850 400 c 0
850 593 693 750 500 750 c 0
307 750 150 593 150 400 c 0
150 207 307 50 500 50 c 0"#,
),
GlyphDescriptor::new(
"ike",
r#"
900 275 m 0
900 250 881 225 850 225 c 0
828 225 808 239 802 262 c 0
768 382 648 475 500 475 c 0
352 475 232 382 198 262 c 0
192 239 172 225 150 225 c 0
119 225 100 250 100 275 c 0
100 279 101 284 102 288 c 0
149 455 311 575 500 575 c 0
689 575 851 455 898 288 c 0
899 284 900 279 900 275 c 0"#,
),
GlyphDescriptor::new(
"ilo",
r#"
450 700 m 1
200 700 l 1
200 450 l 1
450 450 l 1
450 700 l 1
550 450 m 1
800 450 l 1
800 700 l 1
550 700 l 1
550 450 l 1
100 750 m 2
100 778 122 800 150 800 c 2
850 800 l 2
878 800 900 778 900 750 c 2
900 400 l 2
900 372 878 350 850 350 c 2
550 350 l 1
550 0 l 2
550 -28 528 -50 500 -50 c 0
472 -50 450 -28 450 0 c 2
450 350 l 1
150 350 l 2
122 350 100 372 100 400 c 2
100 750 l 2"#,
),
GlyphDescriptor::new(
"insa",
r#"
200 600 m 0
228 600 250 578 250 550 c 2
250 300 l 1
750 300 l 1
750 550 l 2
750 578 772 600 800 600 c 0
828 600 850 578 850 550 c 2
850 250 l 2
850 222 828 200 800 200 c 2
200 200 l 2
172 200 150 222 150 250 c 2
150 550 l 2
150 578 172 600 200 600 c 0
575 475 m 0
575 434 541 400 500 400 c 0
459 400 425 434 425 475 c 0
425 516 459 550 500 550 c 0
541 550 575 516 575 475 c 0"#,
),
GlyphDescriptor::new(
"jaki",
r#"
495 126 m 1
446 144 386 186 347 218 c 1
307 203 246 180 223 160 c 0
211 150 209 145 209 141 c 0
210 140 212 137 217 131 c 0
226 122 240 111 259 101 c 0
297 81 342 69 369 69 c 0
438 69 474 87 495 126 c 1
750 393 m 1
694 366 643 341 588 317 c 0
589 314 591 312 592 309 c 0
607 279 612 252 612 207 c 1
632 207 l 1
683 263 718 320 750 393 c 1
497 275 m 1
484 269 472 264 459 259 c 1
476 248 494 234 511 227 c 1
511 233 510 239 509 244 c 0
509 246 502 267 497 275 c 1
418 393 m 1
382 375 367 362 362 354 c 0
361 353 361 352 361 352 c 2
361 351 361 347 368 338 c 0
369 336 371 335 372 333 c 0
396 342 419 350 442 360 c 1
434 371 426 382 418 393 c 1
450 515 m 1
484 528 521 542 562 556 c 0
648 586 707 613 743 637 c 0
760 648 770 657 776 664 c 1
772 666 767 668 760 670 c 0
732 679 686 686 622 686 c 0
559 686 463 667 370 612 c 1
392 585 421 551 450 515 c 1
369 830 m 0
394 830 419 809 419 780 c 0
419 760 407 743 389 734 c 1
475 772 559 786 622 786 c 0
690 786 749 779 792 765 c 0
833 751 881 722 881 668 c 0
881 642 869 619 855 602 c 0
841 585 821 568 798 553 c 0
752 523 685 492 595 461 c 0
565 451 537 441 513 432 c 1
520 422 528 413 535 403 c 1
563 416 596 430 626 445 c 0
683 471 748 502 819 536 c 0
826 539 833 541 841 541 c 0
870 541 890 519 890 491 c 0
890 486 890 480 888 475 c 0
838 325 788 224 690 122 c 2
675 107 l 1
654 107 l 2
634 107 614 107 596 108 c 1
594 101 591 94 588 88 c 0
556 17 477 -31 369 -31 c 0
291 -31 193 14 146 61 c 0
128 79 109 105 109 139 c 0
109 182 133 214 160 237 c 0
192 264 234 280 276 298 c 1
269 312 264 326 262 342 c 0
262 346 261 349 261 353 c 0
261 373 268 392 278 408 c 0
295 435 323 455 355 473 c 1
321 516 287 554 257 591 c 0
238 613 215 643 215 683 c 0
215 724 240 750 264 770 c 0
284 787 311 804 344 823 c 0
352 828 360 830 369 830 c 0"#,
),
GlyphDescriptor::new(
"jan",
r#"
200 450 m 0
200 284 334 150 500 150 c 0
666 150 800 284 800 450 c 0
800 616 666 750 500 750 c 0
334 750 200 616 200 450 c 0
101 -50 m 0
74 -50 51 -28 51 0 c 0
51 14 57 28 68 38 c 2
218 166 l 1
145 238 100 339 100 450 c 0
100 671 279 850 500 850 c 0
721 850 900 671 900 450 c 0
900 339 855 238 782 166 c 1
932 38 l 2
944 28 950 14 950 0 c 0
950 -27 926 -50 900 -50 c 0
889 -50 877 -46 868 -38 c 2
701 104 l 1
642 70 573 50 500 50 c 0
427 50 358 70 299 104 c 1
134 -38 l 2
124 -46 113 -50 101 -50 c 0"#,
),
GlyphDescriptor::new(
"jelo",
r#"
644 30 m 1
500 265 l 1
356 30 l 1
644 30 l 1
583 597 m 0
583 644 546 681 500 681 c 0
454 681 417 644 417 597 c 0
417 550 454 513 500 513 c 0
546 513 583 550 583 597 c 0
266 -70 m 2
241 -70 217 -49 217 -20 c 0
217 -11 219 -2 224 6 c 2
450 375 l 1
450 420 l 1
389 437 341 486 324 547 c 1
270 547 l 2
242 547 220 569 220 597 c 0
220 625 242 647 270 647 c 2
324 647 l 1
341 708 389 757 450 774 c 1
450 827 l 2
450 855 472 877 500 877 c 0
528 877 550 855 550 827 c 2
550 774 l 1
611 757 659 708 676 647 c 1
730 647 l 2
758 647 780 625 780 597 c 0
780 569 758 547 730 547 c 2
676 547 l 1
659 486 611 437 550 420 c 1
550 375 l 1
776 6 l 2
781 -2 783 -11 783 -20 c 0
783 -49 759 -70 734 -70 c 2
266 -70 l 2"#,
),
GlyphDescriptor::new(
"jo",
r#"
600 650 m 0
600 705 555 750 500 750 c 0
445 750 400 705 400 650 c 0
400 595 445 550 500 550 c 0
555 550 600 595 600 650 c 0
700 650 m 0
700 540 610 450 500 450 c 0
449 450 403 469 368 500 c 1
315 455 280 383 280 300 c 0
280 157 383 50 500 50 c 0
603 50 695 133 716 250 c 1
545 250 l 2
517 250 495 272 495 300 c 0
495 328 517 350 545 350 c 2
770 350 l 2
798 350 820 328 820 300 c 0
820 112 682 -50 500 -50 c 0
318 -50 180 112 180 300 c 0
180 415 230 519 311 583 c 1
304 604 300 627 300 650 c 0
300 760 390 850 500 850 c 0
610 850 700 760 700 650 c 0"#,
),
GlyphDescriptor::new(
"kala",
r#"
616 575 m 1
509 571 406 494 320 400 c 1
406 306 510 229 616 225 c 1
633 225 l 2
703 225 756 242 791 269 c 0
827 297 850 339 850 400 c 0
850 461 827 503 791 531 c 0
756 558 703 575 633 575 c 2
616 575 l 1
50 624 m 0
50 649 70 675 100 675 c 0
118 675 135 665 144 648 c 1
145 647 l 2
145 646 146 645 147 644 c 0
149 641 151 636 154 630 c 0
160 619 171 603 183 583 c 0
202 553 225 516 255 477 c 1
346 574 470 669 612 675 c 1
634 675 l 2
718 675 795 654 852 611 c 0
916 562 950 489 950 400 c 0
950 311 916 238 852 189 c 0
795 146 718 125 634 125 c 2
612 125 l 1
470 131 346 226 255 323 c 1
213 268 182 222 154 170 c 0
151 164 149 159 147 156 c 0
146 155 145 154 145 153 c 2
144 152 l 1
135 135 118 125 100 125 c 0
71 125 50 150 50 175 c 0
50 183 52 191 56 198 c 1
56 199 l 1
61 204 61 211 67 219 c 0
74 232 85 250 98 271 c 0
120 307 151 352 189 400 c 1
151 448 120 493 98 529 c 0
85 550 74 568 67 581 c 0
61 590 61 596 56 601 c 0
52 608 50 616 50 624 c 0"#,
),
GlyphDescriptor::new(
"kalama",
r#"
500 797 m 0
528 797 550 775 550 747 c 2
550 563 l 2
550 535 528 513 500 513 c 0
472 513 450 535 450 563 c 2
450 747 l 2
450 775 472 797 500 797 c 0
200 696 m 0
200 721 221 745 251 745 c 0
270 745 288 733 296 714 c 2
366 544 l 2
369 537 370 530 370 524 c 0
370 499 349 475 319 475 c 0
300 475 282 487 274 506 c 2
204 676 l 2
201 683 200 690 200 696 c 0
681 475 m 0
651 475 630 499 630 524 c 0
630 530 631 537 634 544 c 2
704 714 l 2
712 733 730 745 749 745 c 0
779 745 800 721 800 696 c 0
800 690 799 683 796 676 c 2
726 506 l 2
718 487 700 475 681 475 c 0
255 301 m 1
278 187 379 101 500 101 c 0
621 101 722 187 745 301 c 1
255 301 l 1
150 351 m 0
150 379 172 401 200 401 c 2
800 401 l 2
828 401 850 379 850 351 c 0
850 158 693 1 500 1 c 0
307 1 150 158 150 351 c 0"#,
),
GlyphDescriptor::new(
"kama",
r#"
50 138 m 0
50 165 74 188 100 188 c 0
111 188 123 184 132 176 c 0
186 131 243 107 315 101 c 1
553 767 l 2
560 787 578 800 599 800 c 0
620 800 638 788 646 769 c 2
896 156 l 2
899 150 900 143 900 137 c 0
900 126 896 116 889 107 c 1
889 106 l 1
888 106 l 2
887 104 885 102 883 100 c 0
864 81 849 68 817 50 c 0
770 24 699 0 600 0 c 0
572 0 550 22 550 50 c 0
550 78 572 100 600 100 c 0
697 100 748 123 790 151 c 1
603 610 l 1
397 33 l 2
390 13 371 0 350 0 c 0
239 0 150 31 68 99 c 0
56 109 50 124 50 138 c 0"#,
),
GlyphDescriptor::new(
"kasi",
r#"
545 525 m 0
553 520 582 516 601 516 c 0
639 516 686 523 730 535 c 0
774 548 802 565 820 589 c 0
838 612 850 647 850 700 c 0
850 717 850 727 849 734 c 0
845 735 840 735 831 736 c 0
811 737 784 737 744 737 c 0
700 737 671 728 644 703 c 0
614 676 583 625 547 532 c 0
546 529 545 527 545 525 c 0
445 511 m 0
429 601 378 632 310 669 c 0
280 685 229 704 155 709 c 1
150 684 150 655 150 614 c 0
150 580 160 554 177 533 c 0
194 512 222 494 262 481 c 0
307 466 343 459 374 459 c 0
376 459 378 459 380 459 c 0
403 460 425 466 449 478 c 1
448 486 448 494 447 501 c 0
446 504 445 508 445 511 c 0
50 614 m 2
50 619 l 2
50 672 50 728 73 780 c 0
81 798 99 810 119 810 c 0
230 810 309 784 359 757 c 0
397 736 446 708 485 660 c 0
487 658 488 656 490 654 c 1
516 707 544 748 576 777 c 0
626 823 683 837 744 837 c 2
745 837 l 2
783 837 813 837 837 836 c 0
859 835 885 831 905 820 c 0
948 797 950 760 950 703 c 0
950 702 950 701 950 700 c 0
950 632 935 574 900 528 c 0
865 482 815 454 757 438 c 0
709 425 653 416 603 416 c 0
586 416 569 417 554 419 c 2
550 419 l 1
550 1 l 2
550 -27 528 -49 500 -49 c 0
472 -49 450 -27 450 1 c 2
450 371 l 1
429 364 407 360 384 359 c 0
380 359 376 359 372 359 c 0
326 359 280 369 231 385 c 0
133 416 50 491 50 614 c 2"#,
),
GlyphDescriptor::new(
"ken",
r#"
800 7 m 2
800 0 l 2
800 -28 778 -50 750 -50 c 0
722 -50 700 -28 700 0 c 2
700 2 l 2
700 4 699 8 699 13 c 0
698 22 696 35 693 52 c 0
686 85 672 130 645 174 c 0
596 252 500 337 300 349 c 1
300 0 l 2
300 -28 278 -50 250 -50 c 0
222 -50 200 -28 200 0 c 2
200 800 l 2
200 828 222 850 250 850 c 0
278 850 300 828 300 800 c 2
300 451 l 1
500 463 596 548 645 626 c 0
672 670 686 715 693 748 c 0
696 765 698 778 699 787 c 0
699 792 700 796 700 798 c 2
700 800 l 2
700 828 722 850 750 850 c 0
778 850 800 828 800 800 c 2
800 793 l 2
800 770 796 755 791 727 c 0
759 580 671 462 524 400 c 1
623 358 688 293 730 226 c 0
765 170 782 115 791 73 c 0
796 45 800 30 800 7 c 2"#,
),
GlyphDescriptor::new(
"kepeken",
r#"
450 750 m 1
270 750 l 1
270 560 l 1
450 560 l 1
450 750 l 1
550 560 m 1
730 560 l 1
730 750 l 1
550 750 l 1
550 560 l 1
107 170 m 0
107 195 129 220 157 220 c 0
173 220 188 212 198 198 c 2
229 154 l 1
251 272 334 372 450 395 c 1
450 460 l 1
220 460 l 2
192 460 170 482 170 510 c 2
170 800 l 2
170 828 192 850 220 850 c 2
780 850 l 2
808 850 830 828 830 800 c 2
830 510 l 2
830 482 808 460 780 460 c 2
550 460 l 1
550 395 l 1
686 368 777 233 777 90 c 2
777 0 l 2
777 -28 755 -50 727 -50 c 0
699 -50 677 -28 677 0 c 2
677 90 l 2
677 210 594 300 500 300 c 0
406 300 323 210 323 90 c 2
323 0 l 2
323 -26 303 -50 273 -50 c 0
256 -50 240 -42 231 -28 c 2
116 142 l 2
110 151 107 160 107 170 c 0"#,
),
GlyphDescriptor::new(
"kili",
r#"
420 540 m 0
390 579 334 609 259 610 c 0
238 610 215 602 195 578 c 0
174 552 150 499 150 398 c 0
150 203 305 48 500 48 c 0
695 48 850 203 850 398 c 0
850 496 826 548 805 574 c 0
785 598 763 606 740 606 c 0
662 606 613 583 580 540 c 0
561 515 531 500 500 500 c 0
469 500 439 515 420 540 c 0
395 800 m 0
395 826 417 850 444 850 c 0
459 850 472 844 483 833 c 2
483 833 484 833 484 832 c 0
489 827 493 822 498 814 c 0
524 775 550 720 550 651 c 1
598 687 666 706 740 706 c 0
841 706 950 625 950 398 c 0
950 148 750 -52 500 -52 c 0
250 -52 50 148 50 398 c 0
50 629 158 710 258 710 c 2
260 710 l 2
334 709 401 687 450 650 c 1
450 695 434 732 416 757 c 0
413 761 411 763 410 765 c 0
400 775 395 788 395 800 c 0"#,
),
GlyphDescriptor::new(
"kiwen",
r#"
839 532 m 1
720 710 l 1
280 710 l 1
161 532 l 1
500 83 l 1
839 532 l 1
500 -50 m 0
484 -50 469 -43 460 -30 c 2
60 500 l 2
53 509 50 519 50 530 c 0
50 540 52 550 58 558 c 2
211 788 l 2
220 802 236 810 253 810 c 2
747 810 l 2
764 810 780 802 789 788 c 2
942 558 l 2
948 550 950 540 950 530 c 0
950 519 947 509 940 500 c 2
540 -30 l 2
531 -43 516 -50 500 -50 c 0"#,
),
GlyphDescriptor::new(
"ko",
r#"
673 566 m 0
648 566 623 587 623 617 c 0
623 685 568 740 500 740 c 0
434 740 380 688 377 622 c 0
378 615 376 608 374 601 c 0
367 580 348 566 327 566 c 0
322 566 316 567 311 569 c 0
298 573 285 575 273 575 c 0
211 575 150 526 150 451 c 0
150 399 183 351 235 334 c 0
256 327 270 308 270 287 c 0
270 273 263 261 257 253 c 0
243 232 236 208 236 184 c 0
236 121 290 60 359 60 c 0
397 60 435 78 459 111 c 0
469 125 485 132 500 132 c 0
515 132 530 124 540 111 c 0
564 78 602 60 640 60 c 0
665 60 690 68 712 84 c 0
745 108 763 146 763 184 c 0
763 209 756 234 740 256 c 0
733 265 730 275 730 286 c 0
730 307 744 327 765 334 c 0
817 351 850 399 850 451 c 0
850 526 789 575 727 575 c 0
715 575 702 573 689 569 c 0
684 567 678 566 673 566 c 0
50 451 m 0
50 586 161 674 273 674 c 2
284 674 l 1
309 769 397 840 500 840 c 0
603 840 691 769 716 674 c 1
727 674 l 2
839 674 950 586 950 450 c 0
950 375 912 305 849 264 c 1
859 238 864 212 864 185 c 0
864 116 832 47 771 3 c 0
731 -26 685 -40 640 -40 c 0
590 -40 541 -23 500 10 c 1
459 -23 409 -40 359 -40 c 0
314 -40 268 -26 228 3 c 0
168 47 136 114 136 183 c 0
136 210 141 238 151 264 c 1
88 305 50 376 50 451 c 0"#,
),
GlyphDescriptor::new(
"kon",
r#"
376 851 m 0
400 851 427 830 427 800 c 0
427 778 411 757 388 751 c 0
350 741 324 734 303 719 c 0
285 706 267 685 256 642 c 0
252 627 250 614 250 601 c 0
250 546 284 501 339 432 c 0
340 429 343 428 344 425 c 0
394 362 450 292 450 199 c 0
450 107 405 36 355 1 c 0
319 -24 278 -37 242 -47 c 0
241 -47 239 -48 238 -48 c 0
233 -49 228 -50 223 -50 c 0
198 -50 174 -30 174 1 c 0
174 23 188 41 210 48 c 0
250 59 277 68 298 83 c 0
316 96 333 116 344 158 c 0
348 172 350 185 350 198 c 0
350 254 316 300 261 368 c 0
259 371 259 372 256 375 c 0
205 439 150 508 150 601 c 0
150 692 195 765 245 800 c 0
283 827 326 840 364 849 c 0
368 850 372 851 376 851 c 0
776 851 m 0
800 851 827 830 827 800 c 0
827 778 811 757 788 751 c 0
750 741 724 734 703 719 c 0
685 706 667 685 656 642 c 0
652 627 650 614 650 601 c 0
650 546 684 501 739 432 c 0
740 429 743 428 744 425 c 0
794 362 850 292 850 199 c 0
850 107 805 36 755 1 c 0
719 -24 678 -37 642 -47 c 0
641 -47 639 -48 638 -48 c 0
633 -49 628 -50 623 -50 c 0
598 -50 574 -30 574 1 c 0
574 23 588 41 610 48 c 0
650 59 677 68 698 83 c 0
716 96 733 116 744 158 c 0
748 172 750 185 750 198 c 0
750 254 716 300 661 368 c 0
659 371 659 372 656 375 c 0
605 439 550 508 550 601 c 0
550 692 595 765 645 800 c 0
683 827 726 840 764 849 c 0
768 850 772 851 776 851 c 0"#,
),
GlyphDescriptor::new(
"kule",
r#"
312 300 m 1
197 100 l 1
803 100 l 1
688 300 l 1
312 300 l 1
630 400 m 1
500 625 l 1
370 400 l 1
630 400 l 1
110 0 m 2
84 0 60 21 60 50 c 0
60 59 63 67 67 75 c 2
197 300 l 1
100 300 l 2
72 300 50 322 50 350 c 0
50 378 72 400 100 400 c 2
255 400 l 1
457 750 l 2
466 765 482 775 500 775 c 0
518 775 534 765 543 750 c 2
745 400 l 1
900 400 l 2
928 400 950 378 950 350 c 0
950 322 928 300 900 300 c 2
803 300 l 1
933 75 l 2
937 67 940 59 940 50 c 0
940 21 916 0 890 0 c 2
110 0 l 2"#,
),
GlyphDescriptor::new(
"kulupu",
r#"
262 -6 m 0
152 -6 62 84 62 194 c 0
62 304 152 394 262 394 c 0
372 394 462 304 462 194 c 0
462 84 372 -6 262 -6 c 0
262 94 m 0
317 94 362 139 362 194 c 0
362 249 317 294 262 294 c 0
207 294 162 249 162 194 c 0
162 139 207 94 262 94 c 0
738 -6 m 0
628 -6 538 84 538 194 c 0
538 304 628 394 738 394 c 0
848 394 938 304 938 194 c 0
938 84 848 -6 738 -6 c 0
738 94 m 0
793 94 838 139 838 194 c 0
838 249 793 294 738 294 c 0
683 294 638 249 638 194 c 0
638 139 683 94 738 94 c 0
500 406 m 0
390 406 300 496 300 606 c 0
300 716 390 806 500 806 c 0
610 806 700 716 700 606 c 0
700 496 610 406 500 406 c 0
500 506 m 0
555 506 600 551 600 606 c 0
600 661 555 706 500 706 c 0
445 706 400 661 400 606 c 0
400 551 445 506 500 506 c 0"#,
),
GlyphDescriptor::new(
"kute",
r#"
183 520 m 0
183 524 183 532 186 535 c 0
186 536 186 537 186 538 c 0
193 551 195 562 203 580 c 0
214 607 231 643 255 680 c 0
301 751 381 836 500 836 c 0
711 836 816 666 816 519 c 0
816 319 679 179 554 94 c 0
491 51 427 19 380 -2 c 0
356 -12 337 -20 323 -25 c 0
314 -29 309 -32 302 -32 c 1
301 -33 l 1
300 -33 l 2
295 -35 290 -35 285 -35 c 0
260 -35 235 -17 235 13 c 0
235 34 249 55 270 62 c 1
271 62 l 2
272 62 272 63 274 64 c 0
296 71 311 77 340 90 c 0
504 162 716 306 716 519 c 0
716 624 643 736 500 736 c 0
434 736 380 688 339 625 c 0
319 595 305 565 295 542 c 0
288 526 286 518 281 507 c 1
281 505 l 1
274 484 253 470 232 470 c 0
202 470 183 496 183 520 c 0
516 576 m 0
546 576 564 551 564 526 c 0
564 521 564 516 562 511 c 0
528 405 475 321 380 260 c 0
372 255 363 252 354 252 c 0
329 252 303 274 303 303 c 0
303 319 311 335 326 345 c 0
397 390 439 452 467 541 c 0
474 562 495 576 516 576 c 0"#,
),
GlyphDescriptor::new(
"la",
r#"
400 -50 m 0
375 -50 350 -30 350 0 c 0
350 20 361 38 381 46 c 0
485 87 550 244 550 400 c 0
550 556 485 713 381 754 c 0
361 762 350 780 350 800 c 0
350 830 375 850 400 850 c 0
406 850 413 848 419 846 c 0
564 788 650 603 650 400 c 0
650 197 564 12 419 -46 c 0
413 -48 406 -50 400 -50 c 0"#,
),
GlyphDescriptor::new(
"lape",
r#"
750 500 m 0
695 500 650 455 650 400 c 0
650 345 695 300 750 300 c 0
805 300 850 345 850 400 c 0
850 455 805 500 750 500 c 0
556 350 m 1
100 350 l 2
72 350 50 372 50 400 c 0
50 428 72 450 100 450 c 2
556 450 l 1
578 536 657 600 750 600 c 0
860 600 950 510 950 400 c 0
950 290 860 200 750 200 c 0
657 200 578 264 556 350 c 1"#,
),
GlyphDescriptor::new(
"laso",
r#"
356 59 m 1
644 59 l 1
500 294 l 1
356 59 l 1
574 591 m 0
619 601 660 616 680 636 c 0
697 653 711 685 722 721 c 1
719 720 717 720 714 719 c 0
669 709 628 693 608 673 c 0
592 656 577 624 566 589 c 1
569 590 571 590 574 591 c 0
288 719 m 0
285 720 283 720 280 721 c 1
291 686 306 654 322 637 c 0
342 617 383 601 428 591 c 0
431 590 433 590 436 589 c 1
425 625 411 657 394 674 c 0
374 694 333 709 288 719 c 0
215 830 m 2
220 830 l 2
221 830 224 829 226 829 c 0
231 829 237 828 245 827 c 0
261 825 284 822 309 817 c 0
356 807 423 787 465 744 c 0
479 729 491 712 501 694 c 1
511 712 523 729 537 743 c 0
579 786 645 807 692 817 c 0
717 822 740 825 756 827 c 0
764 828 771 829 776 829 c 0
778 829 781 830 782 830 c 2
787 830 l 2
815 830 836 808 836 780 c 2
836 771 l 2
836 770 835 768 835 766 c 0
829 742 827 725 819 696 c 0
807 657 787 602 751 566 c 0
709 523 642 503 595 493 c 0
579 490 564 487 551 485 c 1
551 402 l 1
776 35 l 2
781 27 783 18 783 9 c 0
783 -20 759 -41 734 -41 c 2
266 -41 l 2
241 -41 217 -20 217 9 c 0
217 18 219 27 224 35 c 2
451 406 l 1
451 485 l 1
438 487 422 489 406 493 c 0
359 503 293 524 251 567 c 0
216 603 195 658 183 697 c 0
175 725 173 743 167 766 c 0
167 768 166 770 166 771 c 2
166 780 l 2
166 808 187 830 215 830 c 2"#,
),
GlyphDescriptor::new(
"lawa",
r#"
164 500 m 1
155 469 150 436 150 400 c 0
150 227 269 100 400 100 c 0
531 100 650 227 650 400 c 0
650 436 645 469 636 500 c 1
164 500 l 1
214 600 m 1
586 600 l 1
539 662 471 700 400 700 c 0
329 700 261 662 214 600 c 1
739 500 m 1
746 468 750 434 750 400 c 0
750 187 600 0 400 0 c 0
200 0 50 187 50 400 c 0
50 613 200 800 400 800 c 0
533 800 644 718 703 600 c 1
900 600 l 2
928 600 950 578 950 550 c 0
950 522 928 500 900 500 c 2
739 500 l 1"#,
),
GlyphDescriptor::new(
"len",
r#"
750 250 m 1
750 700 l 1
250 700 l 1
250 250 l 1
750 250 l 1
200 800 m 2
800 800 l 2
828 800 850 778 850 750 c 2
850 0 l 2
850 -28 828 -50 800 -50 c 0
772 -50 750 -28 750 0 c 2
750 150 l 1
550 150 l 1
550 0 l 2
550 -28 528 -50 500 -50 c 0
472 -50 450 -28 450 0 c 2
450 150 l 1
250 150 l 1
250 0 l 2
250 -28 228 -50 200 -50 c 0
172 -50 150 -28 150 0 c 2
150 750 l 2
150 778 172 800 200 800 c 2"#,
),
GlyphDescriptor::new(
"lete",
r#"
701 797 m 0
730 797 750 771 750 746 c 0
750 737 748 729 743 721 c 2
587 450 l 1
900 450 l 2
928 450 950 428 950 400 c 0
950 372 928 350 900 350 c 2
587 350 l 1
743 79 l 2
748 71 750 63 750 54 c 0
750 29 730 3 701 3 c 0
684 3 666 13 657 29 c 2
500 300 l 1
343 29 l 2
334 13 316 3 299 3 c 0
270 3 250 29 250 54 c 0
250 63 252 71 257 79 c 2
413 350 l 1
100 350 l 2
72 350 50 372 50 400 c 0
50 428 72 450 100 450 c 2
413 450 l 1
257 721 l 2
252 729 250 737 250 746 c 0
250 771 270 797 299 797 c 0
316 797 334 787 343 771 c 2
500 500 l 1
657 771 l 2
666 787 684 797 701 797 c 0"#,
),
GlyphDescriptor::new(
"li",
r#"
200 800 m 0
200 827 224 850 250 850 c 0
261 850 273 846 282 838 c 2
757 438 l 2
768 428 775 415 775 400 c 0
775 385 768 372 757 362 c 2
282 -38 l 2
273 -46 261 -50 250 -50 c 0
224 -50 200 -27 200 0 c 0
200 14 206 28 218 38 c 2
647 400 l 1
218 762 l 2
206 772 200 786 200 800 c 0"#,
),
GlyphDescriptor::new(
"lili",
r#"
650 550 m 0
678 550 700 526 700 500 c 0
700 490 697 479 690 470 c 2
540 270 l 2
531 257 516 250 500 250 c 0
484 250 469 257 460 270 c 2
310 470 l 2
303 479 300 490 300 500 c 0
300 526 322 550 350 550 c 0
365 550 380 543 390 530 c 2
500 383 l 1
610 530 l 2
620 543 635 550 650 550 c 0"#,
),
GlyphDescriptor::new(
"linja",
r#"
950 287 m 0
950 262 931 237 900 237 c 0
878 237 858 252 852 275 c 0
829 366 818 459 737 491 c 0
721 497 705 500 689 500 c 0
630 500 572 459 548 385 c 0
513 275 418 200 312 200 c 0
284 200 255 206 226 217 c 0
105 265 84 371 52 500 c 0
51 504 50 509 50 513 c 0
50 538 69 563 100 563 c 0
122 563 142 548 148 525 c 0
171 434 182 341 263 309 c 0
279 303 295 300 311 300 c 0
370 300 428 341 452 415 c 0
487 525 582 600 688 600 c 0
716 600 745 594 774 583 c 0
895 535 916 429 948 300 c 0
949 296 950 291 950 287 c 0"#,
),
GlyphDescriptor::new(
"lipu",
r#"
250 700 m 1
250 100 l 1
750 100 l 1
750 700 l 1
250 700 l 1
150 750 m 2
150 778 172 800 200 800 c 2
800 800 l 2
828 800 850 778 850 750 c 2
850 50 l 2
850 22 828 0 800 0 c 2
200 0 l 2
172 0 150 22 150 50 c 2
150 750 l 2"#,
),
GlyphDescriptor::new(
"loje",
r#"
356 60 m 1
644 60 l 1
500 295 l 1
356 60 l 1
266 -40 m 2
241 -40 217 -19 217 10 c 0
217 19 219 28 224 36 c 2
457 417 l 2
466 432 483 441 500 441 c 0
517 441 534 432 543 417 c 2
776 36 l 2
781 28 783 19 783 10 c 0
783 -19 759 -40 734 -40 c 2
266 -40 l 2
286 735 m 1
309 638 396 565 500 565 c 0
604 565 691 638 714 735 c 1
286 735 l 1
180 785 m 0
180 813 202 835 230 835 c 2
770 835 l 2
798 835 820 813 820 785 c 0
820 608 677 465 500 465 c 0
323 465 180 608 180 785 c 0"#,
),
GlyphDescriptor::new(
"lon",
r#"
575 475 m 0
575 434 541 400 500 400 c 0
459 400 425 434 425 475 c 0
425 516 459 550 500 550 c 0
541 550 575 516 575 475 c 0
50 200 m 0
50 228 72 250 100 250 c 2
900 250 l 2
928 250 950 228 950 200 c 0
950 172 928 150 900 150 c 2
100 150 l 2
72 150 50 172 50 200 c 0"#,
),
GlyphDescriptor::new(
"luka",
r#"
50 350 m 0
50 375 71 400 100 400 c 0
117 400 134 391 143 375 c 2
250 188 l 1
250 290 l 2
250 459 284 586 347 672 c 0
411 760 502 800 600 800 c 0
698 800 789 760 853 672 c 0
916 586 950 459 950 290 c 2
950 0 l 2
950 -28 928 -50 900 -50 c 0
872 -50 850 -28 850 0 c 2
850 290 l 2
850 447 818 550 772 613 c 0
728 674 668 700 600 700 c 0
532 700 472 674 428 613 c 0
382 550 350 447 350 290 c 2
350 0 l 2
350 -27 329 -50 299 -50 c 0
282 -50 266 -41 257 -25 c 2
57 325 l 2
52 333 50 341 50 350 c 0"#,
),
GlyphDescriptor::new(
"lukin",
r#"
500 225 m 0
631 225 770 296 831 400 c 1
| rust | MIT | e5ab7a391aa703cd7beef3656c9c86045aeef485 | 2026-01-04T20:18:26.798176Z | true |
etbcor/nasin-nanpa | https://github.com/etbcor/nasin-nanpa/blob/e5ab7a391aa703cd7beef3656c9c86045aeef485/font-forge-tool/src/glyph_blocks/ctrl.rs | font-forge-tool/src/glyph_blocks/ctrl.rs | use std::borrow::Cow;
use crate::*;
pub const CTRL: [GlyphEnc; 36] = [
GlyphEnc::from_parts(EncPos::Pos(0x0000), "NUL", 0, Rep::const_dflt()),
GlyphEnc::from_parts(EncPos::Pos(0x200B), "ZWSP", 0, Rep::const_dflt()),
GlyphEnc::from_parts(EncPos::Pos(0x200C), "ZWNJ", 0, Rep::const_dflt()),
GlyphEnc::from_parts(EncPos::Pos(0x200D), "ZWJ", 0, Rep::const_dflt()),
GlyphEnc::from_parts(EncPos::Pos(0x2190), "arrowW", 0, Rep::const_dflt()),
GlyphEnc::from_parts(EncPos::Pos(0x2191), "arrowN", 0, Rep::const_dflt()),
GlyphEnc::from_parts(EncPos::Pos(0x2192), "arrowE", 0, Rep::const_dflt()),
GlyphEnc::from_parts(EncPos::Pos(0x2193), "arrowS", 0, Rep::const_dflt()),
GlyphEnc::from_parts(EncPos::Pos(0x2196), "arrowNW", 0, Rep::const_dflt()),
GlyphEnc::from_parts(EncPos::Pos(0x2197), "arrowNE", 0, Rep::const_dflt()),
GlyphEnc::from_parts(EncPos::Pos(0x2198), "arrowSE", 0, Rep::const_dflt()),
GlyphEnc::from_parts(EncPos::Pos(0x2199), "arrowSW", 0, Rep::const_dflt()),
GlyphEnc::from_parts(
EncPos::None,
"combCartExt1TickTok",
0,
Rep::const_new(
Cow::Borrowed(
r#"
-500 -100 m 0
-472 -100 -450 -122 -450 -150 c 2
-450 -250 l 2
-450 -278 -472 -300 -500 -300 c 0
-528 -300 -550 -278 -550 -250 c 2
-550 -150 l 2
-550 -122 -528 -100 -500 -100 c 0"#,
),
&[],
),
),
GlyphEnc::from_parts(
EncPos::None,
"combCartExt2TickTok",
0,
Rep::const_new(
Cow::Borrowed(
r#"
-400 -100 m 0
-372 -100 -350 -122 -350 -150 c 2
-350 -250 l 2
-350 -278 -372 -300 -400 -300 c 0
-428 -300 -450 -278 -450 -250 c 2
-450 -150 l 2
-450 -122 -428 -100 -400 -100 c 0
-600 -100 m 0
-572 -100 -550 -122 -550 -150 c 2
-550 -250 l 2
-550 -278 -572 -300 -600 -300 c 0
-628 -300 -650 -278 -650 -250 c 2
-650 -150 l 2
-650 -122 -628 -100 -600 -100 c 0"#,
),
&[],
),
),
GlyphEnc::from_parts(
EncPos::None,
"combCartExt3TickTok",
0,
Rep::const_new(
Cow::Borrowed(
r#"
-300 -100 m 0
-272 -100 -250 -122 -250 -150 c 2
-250 -250 l 2
-250 -278 -272 -300 -300 -300 c 0
-328 -300 -350 -278 -350 -250 c 2
-350 -150 l 2
-350 -122 -328 -100 -300 -100 c 0
-500 -100 m 0
-472 -100 -450 -122 -450 -150 c 2
-450 -250 l 2
-450 -278 -472 -300 -500 -300 c 0
-528 -300 -550 -278 -550 -250 c 2
-550 -150 l 2
-550 -122 -528 -100 -500 -100 c 0
-700 -100 m 0
-672 -100 -650 -122 -650 -150 c 2
-650 -250 l 2
-650 -278 -672 -300 -700 -300 c 0
-728 -300 -750 -278 -750 -250 c 2
-750 -150 l 2
-750 -122 -728 -100 -700 -100 c 0"#,
),
&[],
),
),
GlyphEnc::from_parts(
EncPos::None,
"combCartExt4TickTok",
0,
Rep::const_new(
Cow::Borrowed(
r#"
-400 -100 m 0
-372 -100 -350 -122 -350 -150 c 2
-350 -250 l 2
-350 -278 -372 -300 -400 -300 c 0
-428 -300 -450 -278 -450 -250 c 2
-450 -150 l 2
-450 -122 -428 -100 -400 -100 c 0
-200 -100 m 0
-172 -100 -150 -122 -150 -150 c 2
-150 -250 l 2
-150 -278 -172 -300 -200 -300 c 0
-228 -300 -250 -278 -250 -250 c 2
-250 -150 l 2
-250 -122 -228 -100 -200 -100 c 0
-600 -100 m 0
-572 -100 -550 -122 -550 -150 c 2
-550 -250 l 2
-550 -278 -572 -300 -600 -300 c 0
-628 -300 -650 -278 -650 -250 c 2
-650 -150 l 2
-650 -122 -628 -100 -600 -100 c 0
-800 -100 m 0
-772 -100 -750 -122 -750 -150 c 2
-750 -250 l 2
-750 -278 -772 -300 -800 -300 c 0
-828 -300 -850 -278 -850 -250 c 2
-850 -150 l 2
-850 -122 -828 -100 -800 -100 c 0"#,
),
&[],
),
),
GlyphEnc::from_parts(EncPos::Pos(0xFE00), "VAR01", 0, Rep::const_dflt()),
GlyphEnc::from_parts(EncPos::Pos(0xFE01), "VAR02", 0, Rep::const_dflt()),
GlyphEnc::from_parts(EncPos::Pos(0xFE02), "VAR03", 0, Rep::const_dflt()),
GlyphEnc::from_parts(EncPos::Pos(0xFE03), "VAR04", 0, Rep::const_dflt()),
GlyphEnc::from_parts(EncPos::Pos(0xFE04), "VAR05", 0, Rep::const_dflt()),
GlyphEnc::from_parts(EncPos::Pos(0xFE05), "VAR06", 0, Rep::const_dflt()),
GlyphEnc::from_parts(EncPos::Pos(0xFE06), "VAR07", 0, Rep::const_dflt()),
GlyphEnc::from_parts(EncPos::Pos(0xFE07), "VAR08", 0, Rep::const_dflt()),
GlyphEnc::from_parts(EncPos::Pos(0xFE08), "VAR09", 0, Rep::const_dflt()),
GlyphEnc::from_parts(EncPos::Pos(0xE01EF), "VAR256", 0, Rep::const_dflt()),
GlyphEnc::from_parts(
EncPos::None,
"combCartExtHalfTok",
0,
Rep::const_new(
Cow::Borrowed(
r#"
-550 -150 m 4
-550 -122 -528 -100 -500 -100 c 6
0 -100 l 2
28 -100 50 -122 50 -150 c 0
50 -178 28 -200 0 -200 c 2
-500 -200 l 6
-528 -200 -550 -178 -550 -150 c 4
-550 950 m 4
-550 978 -528 1000 -500 1000 c 6
0 1000 l 2
28 1000 50 978 50 950 c 0
50 922 28 900 0 900 c 2
-500 900 l 6
-528 900 -550 922 -550 950 c 4"#,
),
&[],
),
),
GlyphEnc::from_parts(
EncPos::None,
"combContExtHalfTok",
0,
Rep::const_new(
Cow::Borrowed(
r#"
-550 -150 m 4
-550 -122 -528 -100 -500 -100 c 6
0 -100 l 2
28 -100 50 -122 50 -150 c 0
50 -178 28 -200 0 -200 c 2
-500 -200 l 6
-528 -200 -550 -178 -550 -150 c 4"#,
),
&[],
),
),
GlyphEnc::from_parts(
EncPos::None,
"combCartExt5TickTok",
0,
Rep::const_new(
Cow::Borrowed(
r#"
-500 1100 m 4
-472 1100 -450 1078 -450 1050 c 6
-450 950 l 6
-450 922 -472 900 -500 900 c 4
-528 900 -550 922 -550 950 c 6
-550 1050 l 6
-550 1078 -528 1100 -500 1100 c 4"#,
),
&[],
),
),
GlyphEnc::from_parts(
EncPos::None,
"combCartExt6TickTok",
0,
Rep::const_new(
Cow::Borrowed(
r#"
-400 1100 m 4
-372 1100 -350 1078 -350 1050 c 6
-350 950 l 6
-350 922 -372 900 -400 900 c 4
-428 900 -450 922 -450 950 c 6
-450 1050 l 6
-450 1078 -428 1100 -400 1100 c 4
-600 1100 m 4
-572 1100 -550 1078 -550 1050 c 6
-550 950 l 6
-550 922 -572 900 -600 900 c 4
-628 900 -650 922 -650 950 c 6
-650 1050 l 6
-650 1078 -628 1100 -600 1100 c 4"#,
),
&[],
),
),
GlyphEnc::from_parts(
EncPos::None,
"combCartExt7TickTok",
0,
Rep::const_new(
Cow::Borrowed(
r#"
-300 1100 m 4
-272 1100 -250 1078 -250 1050 c 6
-250 950 l 6
-250 922 -272 900 -300 900 c 4
-328 900 -350 922 -350 950 c 6
-350 1050 l 6
-350 1078 -328 1100 -300 1100 c 4
-500 1100 m 4
-472 1100 -450 1078 -450 1050 c 6
-450 950 l 6
-450 922 -472 900 -500 900 c 4
-528 900 -550 922 -550 950 c 6
-550 1050 l 6
-550 1078 -528 1100 -500 1100 c 4
-700 1100 m 4
-672 1100 -650 1078 -650 1050 c 6
-650 950 l 6
-650 922 -672 900 -700 900 c 4
-728 900 -750 922 -750 950 c 6
-750 1050 l 6
-750 1078 -728 1100 -700 1100 c 4"#,
),
&[],
),
),
GlyphEnc::from_parts(
EncPos::None,
"combCartExt8TickTok",
0,
Rep::const_new(
Cow::Borrowed(
r#"
-400 1100 m 4
-372 1100 -350 1078 -350 1050 c 6
-350 950 l 6
-350 922 -372 900 -400 900 c 4
-428 900 -450 922 -450 950 c 6
-450 1050 l 6
-450 1078 -428 1100 -400 1100 c 4
-200 1100 m 4
-172 1100 -150 1078 -150 1050 c 6
-150 950 l 6
-150 922 -172 900 -200 900 c 4
-228 900 -250 922 -250 950 c 6
-250 1050 l 6
-250 1078 -228 1100 -200 1100 c 4
-600 1100 m 4
-572 1100 -550 1078 -550 1050 c 6
-550 950 l 6
-550 922 -572 900 -600 900 c 4
-628 900 -650 922 -650 950 c 6
-650 1050 l 6
-650 1078 -628 1100 -600 1100 c 4
-800 1100 m 4
-772 1100 -750 1078 -750 1050 c 6
-750 950 l 6
-750 922 -772 900 -800 900 c 4
-828 900 -850 922 -850 950 c 6
-850 1050 l 6
-850 1078 -828 1100 -800 1100 c 4"#,
),
&[],
),
),
GlyphEnc::from_parts(EncPos::None, "combCartExtNoneTok", 0, Rep::const_dflt()),
GlyphEnc::from_parts(EncPos::None, "combContExtNoneTok", 0, Rep::const_dflt()),
GlyphEnc::from_parts(
EncPos::Pos(0x3099),
"dakuten",
500,
Rep::const_new(
Cow::Borrowed(
r#"
195 835 m 4
205 845 217 850 230 850 c 4
243 850 255 845 265 835 c 6
405 695 l 6
415 685 420 673 420 660 c 4
420 647 415 635 405 625 c 4
395 615 383 610 370 610 c 4
357 610 345 615 335 625 c 6
195 765 l 6
185 775 180 787 180 800 c 4
180 813 185 825 195 835 c 4
95 755 m 4
105 765 117 770 130 770 c 4
143 770 155 765 165 755 c 6
305 615 l 6
315 605 320 593 320 580 c 4
320 567 315 555 305 545 c 4
295 535 283 530 270 530 c 4
257 530 245 535 235 545 c 6
95 685 l 6
85 695 80 707 80 720 c 4
80 733 85 745 95 755 c 4"#,
),
&[],
),
),
GlyphEnc::from_parts(
EncPos::Pos(0x309A),
"handakuten",
500,
Rep::const_new(
Cow::Borrowed(
r#"
100 700 m 4
100 783 167 850 250 850 c 4
333 850 400 783 400 700 c 4
400 617 333 550 250 550 c 4
167 550 100 617 100 700 c 4
250 750 m 4
222 750 200 728 200 700 c 4
200 672 222 650 250 650 c 4
278 650 300 672 300 700 c 4
300 728 278 750 250 750 c 4"#,
),
&[],
),
),
];
//MARK: NON COMBO
pub const TOK_CTRL: [GlyphDescriptor; 17] = [
GlyphDescriptor::new_with_width(
"startCart",
500,
r#"
110 250 m 2
110 58 307 -100 500 -100 c 0
528 -100 550 -122 550 -150 c 0
550 -178 528 -200 500 -200 c 0
251 -200 10 0 10 250 c 2
10 550 l 2
10 800 251 1000 500 1000 c 0
528 1000 550 978 550 950 c 0
550 922 528 900 500 900 c 0
307 900 110 742 110 550 c 2
110 250 l 2"#,
),
GlyphDescriptor::new_with_width(
"endCart",
500,
r#"
0 -100 m 0
193 -100 390 58 390 250 c 2
390 550 l 2
390 742 193 900 0 900 c 0
-28 900 -50 922 -50 950 c 0
-50 978 -28 1000 0 1000 c 0
249 1000 490 800 490 550 c 2
490 250 l 2
490 0 249 -200 0 -200 c 0
-28 -200 -50 -178 -50 -150 c 0
-50 -122 -28 -100 0 -100 c 0"#,
),
GlyphDescriptor::new(
"combCartExt",
r#"
-1050 -150 m 0
-1050 -122 -1028 -100 -1000 -100 c 2
0 -100 l 2
28 -100 50 -122 50 -150 c 0
50 -178 28 -200 0 -200 c 2
-1000 -200 l 2
-1028 -200 -1050 -178 -1050 -150 c 0
-1050 950 m 0
-1050 978 -1028 1000 -1000 1000 c 2
0 1000 l 2
28 1000 50 978 50 950 c 0
50 922 28 900 0 900 c 2
-1000 900 l 2
-1028 900 -1050 922 -1050 950 c 0"#,
),
GlyphDescriptor::new_with_width(
"startLongPi",
1000,
r#"
500 900 m 0
528 900 550 878 550 850 c 2
550 -100 l 1
1000 -100 l 2
1028 -100 1050 -122 1050 -150 c 0
1050 -178 1028 -200 1000 -200 c 2
500 -200 l 2
472 -200 450 -178 450 -150 c 2
450 850 l 2
450 878 472 900 500 900 c 0"#,
),
GlyphDescriptor::new(
"combLongPiExt",
r#"
-1050 -150 m 0
-1050 -122 -1028 -100 -1000 -100 c 2
0 -100 l 2
28 -100 50 -122 50 -150 c 0
50 -178 28 -200 0 -200 c 2
-1000 -200 l 2
-1028 -200 -1050 -178 -1050 -150 c 0"#,
),
GlyphDescriptor::new("joinStack", ""),
GlyphDescriptor::new("joinScale", ""),
GlyphDescriptor::new(
"startCont",
r#"
0 -200 m 0
-28 -200 -50 -178 -50 -150 c 0
-50 -122 -28 -100 0 -100 c 0
28 -100 50 -122 50 -150 c 0
50 -178 28 -200 0 -200 c 0"#,
),
GlyphDescriptor::new(
"endCont",
r#"
0 -200 m 0
-28 -200 -50 -178 -50 -150 c 0
-50 -122 -28 -100 0 -100 c 0
28 -100 50 -122 50 -150 c 0
50 -178 28 -200 0 -200 c 0"#,
),
GlyphDescriptor::new(
"combContExt",
r#"
-1050 -150 m 0
-1050 -122 -1028 -100 -1000 -100 c 2
0 -100 l 2
28 -100 50 -122 50 -150 c 0
50 -178 28 -200 0 -200 c 2
-1000 -200 l 2
-1028 -200 -1050 -178 -1050 -150 c 0"#,
),
GlyphDescriptor::new(
"startRevCont",
r#"
0 -200 m 0
-28 -200 -50 -178 -50 -150 c 0
-50 -122 -28 -100 0 -100 c 0
28 -100 50 -122 50 -150 c 0
50 -178 28 -200 0 -200 c 0"#,
),
GlyphDescriptor::new(
"endRevCont",
r#"
0 -200 m 0
-28 -200 -50 -178 -50 -150 c 0
-50 -122 -28 -100 0 -100 c 0
28 -100 50 -122 50 -150 c 0
50 -178 28 -200 0 -200 c 0"#,
),
GlyphDescriptor::new_with_width(
"startCartAlt",
500,
r#"
500 900 m 0
384 900 265 804 265 691 c 2
265 525 l 2
265 469 230 420 181 400 c 1
230 380 265 331 265 275 c 2
265 109 l 2
265 -4 384 -100 500 -100 c 0
528 -100 550 -122 550 -150 c 0
550 -178 528 -200 500 -200 c 0
331 -200 165 -62 165 109 c 2
165 275 l 2
165 294 149 310 130 310 c 0
72 310 25 349 25 400 c 0
25 451 72 490 130 490 c 0
149 490 165 506 165 525 c 2
165 691 l 2
165 862 331 1000 500 1000 c 0
528 1000 550 978 550 950 c 0
550 922 528 900 500 900 c 0"#,
),
GlyphDescriptor::new_with_width(
"endCartAlt",
500,
r#"
235 691 m 2
235 804 116 900 0 900 c 0
-28 900 -50 922 -50 950 c 0
-50 978 -28 1000 0 1000 c 0
169 1000 335 862 335 691 c 2
335 525 l 2
335 506 351 490 370 490 c 0
428 490 475 451 475 400 c 0
475 349 428 310 370 310 c 0
351 310 335 294 335 275 c 2
335 109 l 2
335 -62 169 -200 0 -200 c 0
-28 -200 -50 -178 -50 -150 c 0
-50 -122 -28 -100 0 -100 c 0
116 -100 235 -4 235 109 c 2
235 275 l 2
235 331 270 380 319 400 c 1
270 420 235 469 235 525 c 2
235 691 l 2"#,
),
GlyphDescriptor::new_with_width(
"te",
1000,
r#"
500 100 m 0
472 100 450 122 450 150 c 2
450 850 l 2
450 878 472 900 500 900 c 2
1000 900 l 2
1028 900 1050 878 1050 850 c 0
1050 822 1028 800 1000 800 c 2
550 800 l 1
550 150 l 2
550 122 528 100 500 100 c 0"#,
),
GlyphDescriptor::new_with_width(
"to",
1000,
r#"
500 700 m 0
528 700 550 678 550 650 c 2
550 -50 l 2
550 -78 528 -100 500 -100 c 2
0 -100 l 2
-28 -100 -50 -78 -50 -50 c 0
-50 -22 -28 0 0 0 c 2
450 0 l 1
450 650 l 2
450 678 472 700 500 700 c 0"#,
),
GlyphDescriptor::new_with_width(
"startCartComb",
0,
r#"
-550 -110 m 0
-550 -82 -528 -60 -500 -60 c 0
-460 -60 -457 -100 -425 -100 c 2
0 -100 l 2
28 -100 50 -122 50 -150 c 0
50 -178 28 -200 0 -200 c 2
-425 -200 l 2
-491 -200 -550 -147 -550 -110 c 0
-550 910 m 0
-550 947 -491 1000 -425 1000 c 2
0 1000 l 2
28 1000 50 978 50 950 c 0
50 922 28 900 0 900 c 2
-425 900 l 2
-457 900 -460 860 -500 860 c 0
-528 860 -550 882 -550 910 c 0"#,
),
];
pub const START_CONT: [GlyphDescriptor; 20] = [
GlyphDescriptor::new(
"aTok",
r#"
550 750 m 2
550 300 l 2
550 272 528 250 500 250 c 0
472 250 450 272 450 300 c 2
450 750 l 2
450 778 472 800 500 800 c 0
528 800 550 778 550 750 c 2
672 -102 m 1
637 -160 573 -200 500 -200 c 0
390 -200 300 -110 300 0 c 0
300 110 390 200 500 200 c 0
558 200 610 176 646 137 c 1
678 109 692 72 709 36 c 0
718 17 727 -1 737 -19 c 0
765 -68 798 -100 856 -100 c 2
1000 -100 l 2
1028 -100 1050 -122 1050 -150 c 0
1050 -178 1028 -200 1000 -200 c 2
856 -200 l 2
765 -200 709 -153 672 -102 c 1
500 -100 m 0
555 -100 600 -55 600 0 c 0
600 55 555 100 500 100 c 0
445 100 400 55 400 0 c 0
400 -55 445 -100 500 -100 c 0"#,
),
GlyphDescriptor::new(
"alasaTok",
r#"
642 350 m 1
621 168 496 22 300 2 c 1
300 -50 l 2
300 -74 306 -84 311 -89 c 0
316 -94 326 -100 350 -100 c 2
1000 -100 l 2
1028 -100 1050 -122 1050 -150 c 0
1050 -178 1028 -200 1000 -200 c 2
350 -200 l 2
309 -200 269 -189 240 -160 c 0
211 -131 200 -91 200 -50 c 2
200 350 l 1
100 350 l 2
72 350 50 372 50 400 c 0
50 428 72 450 100 450 c 2
200 450 l 1
200 750 l 2
200 778 222 800 250 800 c 0
479 800 619 646 642 450 c 1
779 450 l 1
715 515 l 2
705 525 700 537 700 550 c 0
700 577 723 600 750 600 c 0
763 600 775 595 785 585 c 2
935 435 l 2
943 427 950 415 950 400 c 0
950 385 943 373 935 365 c 2
785 215 l 2
775 205 763 200 750 200 c 0
723 200 700 223 700 250 c 0
700 263 705 275 715 285 c 2
779 350 l 1
642 350 l 1
300 350 m 1
300 103 l 1
441 121 522 221 541 350 c 1
300 350 l 1
541 450 m 1
522 578 441 679 300 697 c 1
300 450 l 1
541 450 l 1"#,
),
GlyphDescriptor::new(
"anuTok",
r#"
150 750 m 0
150 777 173 800 200 800 c 0
213 800 226 795 236 785 c 2
500 513 l 1
764 785 l 2
774 795 787 800 800 800 c 0
827 800 850 777 850 750 c 0
850 737 845 725 836 715 c 2
550 421 l 1
550 -50 l 2
550 -74 556 -84 561 -89 c 0
566 -94 576 -100 600 -100 c 2
1000 -100 l 2
1028 -100 1050 -122 1050 -150 c 0
1050 -178 1028 -200 1000 -200 c 2
600 -200 l 2
559 -200 519 -189 490 -160 c 0
461 -131 450 -91 450 -50 c 2
450 421 l 1
164 715 l 2
155 725 150 737 150 750 c 0"#,
),
GlyphDescriptor::new(
"awenTok",
r#"
500 800 m 0
522 800 542 785 548 763 c 2
788 -100 l 1
1000 -100 l 2
1028 -100 1050 -122 1050 -150 c 0
1050 -178 1028 -200 1000 -200 c 2
750 -200 l 2
728 -200 708 -185 702 -163 c 2
500 563 l 1
298 -163 l 2
292 -185 272 -200 250 -200 c 2
100 -200 l 2
72 -200 50 -178 50 -150 c 0
50 -122 72 -100 100 -100 c 2
212 -100 l 1
452 763 l 2
458 785 478 800 500 800 c 0"#,
),
GlyphDescriptor::new(
"kamaTok",
r#"
50 138 m 0
50 166 74 188 100 188 c 0
111 188 123 184 132 176 c 0
186 131 243 107 315 101 c 1
553 767 l 2
560 787 579 800 600 800 c 0
620 800 638 788 646 769 c 2
876 207 l 1
907 136 918 58 941 -18 c 0
951 -50 960 -72 971 -85 c 0
980 -96 987 -100 1000 -100 c 0
1028 -100 1050 -122 1050 -150 c 0
1050 -178 1028 -200 1000 -200 c 0
907 -200 867 -120 845 -47 c 0
833 -7 824 38 813 79 c 1
769 38 701 0 600 0 c 0
572 0 550 22 550 50 c 0
550 78 572 100 600 100 c 0
676 100 722 128 748 154 c 0
761 167 769 179 775 189 c 1
603 610 l 1
397 33 l 2
390 13 371 0 350 0 c 0
239 0 150 31 68 99 c 0
56 109 50 124 50 138 c 0"#,
),
GlyphDescriptor::new(
"kenTok",
r#"
800 -48 m 2
800 -50 l 2
800 -87 813 -100 850 -100 c 2
1000 -100 l 2
1028 -100 1050 -122 1050 -150 c 0
1050 -178 1028 -200 1000 -200 c 2
850 -200 l 2
817 -200 700 -190 700 -50 c 0
700 -45 697 275 300 299 c 1
300 -50 l 2
300 -78 278 -100 250 -100 c 0
222 -100 200 -78 200 -50 c 2
200 750 l 2
200 778 222 800 250 800 c 0
278 800 300 778 300 750 c 2
300 401 l 1
697 425 700 745 700 750 c 0
700 778 722 800 750 800 c 0
778 800 800 778 800 750 c 0
800 740 797 467 524 350 c 1
799 232 800 -43 800 -48 c 2"#,
),
GlyphDescriptor::new(
"kepekenTok",
r#"
107 120 m 0
107 145 128 170 157 170 c 0
173 170 188 162 198 148 c 2
229 104 l 1
251 222 334 322 450 345 c 1
450 410 l 1
220 410 l 2
192 410 170 432 170 460 c 2
170 750 l 2
170 778 192 800 220 800 c 2
780 800 l 2
808 800 830 778 830 750 c 2
830 460 l 2
830 432 808 410 780 410 c 2
550 410 l 1
550 345 l 1
686 318 777 183 777 40 c 2
777 -50 l 2
777 -74 783 -84 788 -89 c 0
793 -94 803 -100 827 -100 c 2
1000 -100 l 2
1028 -100 1050 -122 1050 -150 c 0
1050 -178 1028 -200 1000 -200 c 2
827 -200 l 2
786 -200 746 -189 717 -160 c 0
688 -131 677 -91 677 -50 c 2
677 40 l 2
677 160 594 250 500 250 c 0
406 250 323 160 323 40 c 2
323 -50 l 2
323 -76 303 -100 273 -100 c 0
257 -100 240 -92 231 -78 c 2
116 92 l 2
110 101 107 110 107 120 c 0
550 510 m 1
730 510 l 1
730 700 l 1
550 700 l 1
550 510 l 1
450 700 m 1
270 700 l 1
270 510 l 1
450 510 l 1
450 700 l 1"#,
),
GlyphDescriptor::new(
"laTok",
r#"
337 750 m 4
337 780 362 800 387 800 c 4
393 800 399 798 405 796 c 4
565 732 663 525 663 300 c 4
663 127 608 -57 511 -147 c 4
477 -178 435 -200 387 -200 c 6
0 -200 l 6
-28 -200 -50 -178 -50 -150 c 4
-50 -122 -28 -100 0 -100 c 6
387 -100 l 6
403 -100 422 -92 443 -73 c 4
520 -2 563 158 563 300 c 4
563 478 488 657 369 704 c 4
349 712 337 730 337 750 c 4"#,
),
GlyphDescriptor::new(
"lonTok",
r#"
50 -150 m 0
50 -122 72 -100 100 -100 c 2
1000 -100 l 2
1028 -100 1050 -122 1050 -150 c 0
1050 -178 1028 -200 1000 -200 c 2
100 -200 l 2
72 -200 50 -178 50 -150 c 0
575 125 m 0
575 84 541 50 500 50 c 0
459 50 425 84 425 125 c 0
425 166 459 200 500 200 c 0
541 200 575 166 575 125 c 0"#,
),
GlyphDescriptor::new(
"nanpaTok",
r#"
300 -50 m 2
300 150 l 1
102 150 l 2
74 150 52 172 52 200 c 0
52 228 74 250 102 250 c 2
300 250 l 1
300 450 l 1
102 450 l 2
74 450 52 472 52 500 c 0
52 528 74 550 102 550 c 2
300 550 l 1
300 750 l 2
300 778 322 800 350 800 c 0
378 800 400 778 400 750 c 2
400 550 l 1
600 550 l 1
600 750 l 2
600 778 622 800 650 800 c 0
678 800 700 778 700 750 c 2
700 550 l 1
902 550 l 2
930 550 952 528 952 500 c 0
952 472 930 450 902 450 c 2
700 450 l 1
700 250 l 1
902 250 l 2
930 250 952 228 952 200 c 0
952 172 930 150 902 150 c 2
700 150 l 1
700 -50 l 2
700 -74 706 -84 711 -89 c 0
716 -94 726 -100 750 -100 c 2
1000 -100 l 2
1028 -100 1050 -122 1050 -150 c 0
1050 -178 1028 -200 1000 -200 c 2
750 -200 l 2
709 -200 669 -189 640 -160 c 0
611 -131 600 -91 600 -50 c 2
600 150 l 1
400 150 l 1
400 -50 l 2
400 -78 378 -100 350 -100 c 0
322 -100 300 -78 300 -50 c 2
400 450 m 1
400 250 l 1
600 250 l 1
600 450 l 1
400 450 l 1"#,
),
GlyphDescriptor::new(
"openTok",
r#"
200 800 m 0
228 800 250 778 250 750 c 2
250 350 l 1
750 350 l 1
750 750 l 2
750 778 772 800 800 800 c 0
828 800 850 778 850 750 c 2
850 -50 l 2
850 -74 856 -84 861 -89 c 0
866 -94 876 -100 900 -100 c 2
1000 -100 l 2
1028 -100 1050 -122 1050 -150 c 0
1050 -178 1028 -200 1000 -200 c 2
900 -200 l 2
859 -200 819 -189 790 -160 c 0
761 -131 750 -91 750 -50 c 2
750 0 l 1
200 0 l 2
172 0 150 22 150 50 c 2
150 750 l 2
150 778 172 800 200 800 c 0
250 250 m 1
250 100 l 1
750 100 l 1
750 250 l 1
250 250 l 1"#,
),
GlyphDescriptor::new(
"piTok",
r#"
550 750 m 2
550 -50 l 2
550 -74 556 -84 561 -89 c 0
566 -94 576 -100 600 -100 c 2
1000 -100 l 2
1028 -100 1050 -122 1050 -150 c 0
1050 -178 1028 -200 1000 -200 c 2
600 -200 l 2
559 -200 519 -189 490 -160 c 0
461 -131 450 -91 450 -50 c 2
450 750 l 2
450 778 472 800 500 800 c 0
528 800 550 778 550 750 c 2"#,
),
GlyphDescriptor::new(
"piniTok",
r#"
550 700 m 1
550 -100 l 1
1000 -100 l 2
1028 -100 1050 -122 1050 -150 c 0
1050 -178 1028 -200 1000 -200 c 2
300 -200 l 2
272 -200 250 -178 250 -150 c 0
250 -122 272 -100 300 -100 c 2
450 -100 l 1
450 700 l 1
300 700 l 2
272 700 250 722 250 750 c 0
250 778 272 800 300 800 c 2
700 800 l 2
728 800 750 778 750 750 c 0
750 722 728 700 700 700 c 2
550 700 l 1"#,
),
GlyphDescriptor::new(
"sonaTok",
r#"
500 850 m 0
528 850 550 828 550 800 c 2
550 700 l 2
550 672 528 650 500 650 c 0
472 650 450 672 450 700 c 2
450 800 l 2
450 828 472 850 500 850 c 0
200 733 m 0
200 758 221 783 250 783 c 0
267 783 284 774 293 758 c 2
343 671 l 2
348 663 350 654 350 646 c 0
350 621 329 596 300 596 c 0
283 596 266 605 257 621 c 2
207 708 l 2
202 716 200 725 200 733 c 0
750 783 m 0
779 783 800 758 800 733 c 0
800 725 798 716 793 708 c 2
743 621 l 2
734 605 717 596 700 596 c 0
671 596 650 621 650 646 c 0
650 654 652 663 657 671 c 2
707 758 l 2
716 774 733 783 750 783 c 0
240 500 m 2
240 528 262 550 290 550 c 2
710 550 l 2
738 550 760 528 760 500 c 2
760 -50 l 2
760 -74 766 -84 771 -89 c 0
776 -94 786 -100 810 -100 c 2
1000 -100 l 2
1028 -100 1050 -122 1050 -150 c 0
1050 -178 1028 -200 1000 -200 c 2
810 -200 l 2
769 -200 729 -189 700 -160 c 0
671 -131 660 -91 660 -50 c 2
660 0 l 1
290 0 l 2
262 0 240 22 240 50 c 2
240 500 l 2
660 100 m 1
660 450 l 1
340 450 l 1
340 100 l 1
660 100 l 1"#,
),
GlyphDescriptor::new(
"tanTok",
r#"
1000 -200 m 0
971 -200 850 -192 850 -50 c 2
850 275 l 2
850 429 745 550 575 550 c 0
437 550 323 448 303 315 c 1
374 375 l 2
383 383 395 387 406 387 c 0
420 387 434 381 444 369 c 0
452 360 456 348 456 337 c 0
456 323 450 308 438 298 c 2
276 162 l 2
267 154 254 150 243 150 c 0
229 150 215 156 205 168 c 2
69 331 l 2
61 340 57 352 57 363 c 0
57 377 63 391 75 401 c 0
84 409 96 413 107 413 c 0
121 413 135 407 145 395 c 2
203 326 l 1
228 509 385 650 575 650 c 0
804 650 950 480 950 275 c 2
950 -50 l 2
950 -87 963 -100 1000 -100 c 0
1028 -100 1050 -122 1050 -150 c 0
1050 -178 1028 -200 1000 -200 c 0"#,
),
GlyphDescriptor::new(
"tawaTok",
r#"
400 800 m 0
422 800 440 787 447 767 c 2
756 -100 l 1
1000 -100 l 2
1028 -100 1050 -122 1050 -150 c 0
1050 -178 1028 -200 1000 -200 c 2
721 -200 l 2
700 -200 681 -187 674 -167 c 2
397 610 l 1
210 151 l 1
216 147 222 143 231 138 c 0
264 120 318 100 400 100 c 0
428 100 450 78 450 50 c 0
450 22 428 0 400 0 c 0
301 0 230 24 183 50 c 0
149 69 130 83 111 107 c 0
104 116 100 127 100 138 c 0
100 144 102 150 104 156 c 2
354 769 l 2
362 788 380 800 400 800 c 0"#,
),
GlyphDescriptor::new(
"wileTok",
r#"
210 775 m 0
238 775 260 751 260 725 c 0
260 715 257 704 250 695 c 0
202 631 150 534 150 399 c 0
150 321 164 253 193 206 c 0
220 162 261 135 322 135 c 0
346 135 377 147 403 169 c 0
429 191 446 220 450 254 c 0
452 280 472 300 500 300 c 0
528 300 548 280 550 254 c 0
554 220 571 191 597 169 c 0
623 147 654 135 678 135 c 0
739 135 780 162 807 206 c 0
836 253 850 321 850 399 c 0
850 534 798 631 750 695 c 0
743 704 740 715 740 725 c 0
740 751 762 775 790 775 c 0
805 775 820 768 830 755 c 0
887 679 950 561 950 399 c 0
950 304 936 193 936 95 c 0
936 32 943 -33 958 -67 c 0
964 -81 971 -89 977 -93 c 0
982 -97 989 -100 1000 -100 c 0
1028 -100 1050 -122 1050 -150 c 0
1050 -178 1028 -200 1000 -200 c 0
933 -200 889 -157 867 -109 c 0
842 -53 836 15 836 88 c 1
795 54 742 35 678 35 c 0
609 35 539 78 500 126 c 1
461 78 391 35 322 35 c 0
131 35 50 210 50 399 c 0
50 561 113 679 170 755 c 0
180 768 195 775 210 775 c 0"#,
),
GlyphDescriptor::new(
"wileTok_VAR02",
r#"
500 99 m 1
457 48 388 0 300 0 c 0
162 0 50 112 50 250 c 0
50 315 82 381 121 440 c 0
192 547 314 662 405 738 c 0
433 761 447 773 470 790 c 0
479 797 489 800 500 800 c 0
521 800 534 787 548 776 c 0
658 690 795 566 879 440 c 0
918 381 950 315 950 250 c 0
950 183 936 99 936 29 c 0
936 -15 943 -59 955 -78 c 0
963 -91 974 -100 1000 -100 c 0
1028 -100 1050 -122 1050 -150 c 0
1050 -178 1028 -200 1000 -200 c 0
941 -200 897 -174 870 -131 c 0
841 -84 836 -31 836 29 c 0
836 33 836 36 836 40 c 1
797 15 750 0 700 0 c 0
612 0 543 48 500 99 c 1
500 687 m 1
405 611 276 493 204 385 c 0
168 331 150 285 150 250 c 0
150 167 217 100 300 100 c 0
340 100 375 118 404 144 c 0
434 171 449 199 451 210 c 0
456 233 476 250 500 250 c 0
524 250 544 233 549 210 c 0
551 199 566 171 596 144 c 0
625 118 660 100 700 100 c 0
783 100 850 167 850 250 c 0
850 285 832 331 796 385 c 0
724 494 596 610 500 687 c 1"#,
),
GlyphDescriptor::new(
"nTok",
r#"
550 750 m 2
550 300 l 2
550 272 528 250 500 250 c 0
472 250 450 272 450 300 c 2
450 750 l 2
450 778 472 800 500 800 c 0
528 800 550 778 550 750 c 2
500 100 m 0
445 100 400 55 400 0 c 2
400 -150 l 2
400 -178 378 -200 350 -200 c 0
322 -200 300 -178 300 -150 c 2
300 0 l 2
300 110 390 200 500 200 c 0
610 200 700 110 700 0 c 2
700 -80 l 2
700 -92 708 -100 720 -100 c 2
1000 -100 l 2
1028 -100 1050 -122 1050 -150 c 0
1050 -178 1028 -200 1000 -200 c 2
720 -200 l 2
652 -200 600 -148 600 -80 c 2
600 0 l 2
600 55 555 100 500 100 c 0"#,
),
GlyphDescriptor::new(
"waTok",
r#"
500 800 m 0
528 800 550 778 550 750 c 2
550 300 l 2
550 272 528 250 500 250 c 0
472 250 450 272 450 300 c 2
450 750 l 2
450 778 472 800 500 800 c 0
341 250 m 0
369 250 391 226 391 200 c 0
391 190 388 179 381 170 c 0
357 138 331 89 331 22 c 0
331 -18 339 -51 352 -72 c 0
363 -90 377 -100 402 -100 c 0
423 -100 448 -76 450 -55 c 0
452 -29 472 -10 500 -10 c 0
528 -10 548 -29 550 -55 c 0
552 -76 577 -100 598 -100 c 0
623 -100 637 -90 648 -72 c 0
661 -51 669 -18 669 22 c 0
669 89 643 138 619 170 c 0
612 179 609 190 609 200 c 0
609 226 631 250 659 250 c 0
674 250 689 243 699 230 c 0
732 186 769 116 769 22 c 0
769 -24 777 -51 791 -68 c 0
804 -83 832 -100 900 -100 c 2
1000 -100 l 2
1028 -100 1050 -122 1050 -150 c 1
1050 -178 1028 -200 1000 -200 c 2
900 -200 l 2
822 -200 762 -182 722 -141 c 1
694 -177 652 -200 598 -200 c 0
562 -200 526 -184 500 -161 c 1
474 -184 438 -200 402 -200 c 0
284 -200 231 -94 231 22 c 0
231 117 268 186 301 230 c 0
311 243 326 250 341 250 c 0"#,
),
];
pub const LATN: [GlyphDescriptor; 95] = [
GlyphDescriptor::new("space", r#""#),
GlyphDescriptor::new(
"exclam",
r#"
200 300 m 2
200 800 l 2
200 828 222 850 250 850 c 0
278 850 300 828 300 800 c 2
300 300 l 2
300 272 278 250 250 250 c 0
222 250 200 272 200 300 c 2
325 125 m 0
325 84 291 50 250 50 c 0
209 50 175 84 175 125 c 0
175 166 209 200 250 200 c 0
291 200 325 166 325 125 c 0"#,
),
GlyphDescriptor::new(
"quotedbl",
r#"
170 850 m 4
197 850 220 830 220 802 c 4
220 801 220 801 220 800 c 6
210 600 l 6
209 580 191 562 170 562 c 4
149 562 131 580 130 600 c 6
120 800 l 6
120 801 120 801 120 802 c 4
120 830 143 850 170 850 c 4
330 850 m 4
358 850 380 830 380 803 c 4
380 802 380 801 380 800 c 6
370 600 l 6
369 580 351 562 330 562 c 4
309 562 291 580 290 600 c 6
280 800 l 6
280 801 280 802 280 803 c 4
280 831 303 850 330 850 c 4"#,
),
GlyphDescriptor::new(
"numbersign",
r#"
228 429 m 1
270 429 l 1
270 471 l 1
228 471 l 1
228 429 l 1
270 260 m 2
270 329 l 1
228 329 l 1
228 260 l 2
228 232 206 210 178 210 c 0
150 210 128 232 128 260 c 2
128 329 l 1
60 329 l 2
32 329 10 351 10 379 c 0
10 407 32 429 60 429 c 2
128 429 l 1
128 471 l 1
60 471 l 2
32 471 10 493 10 521 c 0
10 549 32 571 60 571 c 2
128 571 l 1
128 640 l 2
128 668 150 690 178 690 c 0
206 690 228 668 228 640 c 2
228 571 l 1
270 571 l 1
270 640 l 2
270 668 292 690 320 690 c 0
348 690 370 668 370 640 c 2
370 571 l 1
440 571 l 2
468 571 490 549 490 521 c 0
490 493 468 471 440 471 c 2
370 471 l 1
370 429 l 1
440 429 l 2
468 429 490 407 490 379 c 0
490 351 468 329 440 329 c 2
370 329 l 1
370 260 l 2
370 232 348 210 320 210 c 0
292 210 270 232 270 260 c 2"#,
),
GlyphDescriptor::new(
"dollar",
r#"
200 731 m 1
148 711 117 658 117 587 c 0
117 537 136 511 200 503 c 1
200 731 l 1
300 169 m 1
352 189 383 242 383 313 c 0
383 363 364 389 300 397 c 1
300 169 l 1
17 587 m 0
17 718 92 813 200 835 c 1
200 840 l 2
200 868 222 890 250 890 c 0
278 890 300 868 300 840 c 2
300 835 l 1
360 823 444 781 488 646 c 0
490 641 490 636 490 631 c 0
490 603 468 581 440 581 c 0
419 581 399 594 392 615 c 0
375 667 347 713 300 731 c 1
300 498 l 1
376 491 483 455 483 313 c 0
483 182 408 87 300 65 c 1
300 60 l 2
300 32 278 10 250 10 c 0
222 10 200 32 200 60 c 2
200 65 l 1
140 77 56 119 12 254 c 0
10 259 10 264 10 269 c 0
10 297 32 319 60 319 c 0
81 319 101 306 108 285 c 0
125 233 153 187 200 169 c 1
200 402 l 1
124 409 17 445 17 587 c 0"#,
),
GlyphDescriptor::new(
"percent",
r#"
135 740 m 0
121 740 110 729 110 715 c 0
110 701 121 690 135 690 c 0
149 690 160 701 160 715 c 0
160 729 149 740 135 740 c 0
135 590 m 0
66 590 10 646 10 715 c 0
10 784 66 840 135 840 c 0
204 840 260 784 260 715 c 0
260 646 204 590 135 590 c 0
365 210 m 0
351 210 340 199 340 185 c 0
340 171 351 160 365 160 c 0
379 160 390 171 390 185 c 0
390 199 379 210 365 210 c 0
365 60 m 0
296 60 240 116 240 185 c 0
240 254 296 310 365 310 c 0
434 310 490 254 490 185 c 0
490 116 434 60 365 60 c 0
60 50 m 0
33 50 10 72 10 100 c 0
10 108 12 116 16 124 c 2
396 824 l 2
405 841 422 850 440 850 c 0
467 850 490 828 490 800 c 0
490 792 488 784 484 776 c 2
104 76 l 2
95 59 78 50 60 50 c 0"#,
),
GlyphDescriptor::new(
"ampersand",
r#"
110 288 m 2
111 200 161 150 230 150 c 0
261 150 290 163 316 191 c 1
273 253 221 330 175 400 c 1
135 372 110 339 110 290 c 2
110 288 l 2
250 760 m 0
195 760 150 715 150 660 c 0
150 636 152 623 209 532 c 1
270 559 270 559 277 563 c 0
325 589 350 626 350 663 c 0
350 713 307 760 250 760 c 0
50 660 m 0
50 770 140 860 250 860 c 0
364 860 450 765 450 662 c 0
450 581 395 513 325 475 c 0
314 469 309 467 264 447 c 1
298 395 335 340 369 290 c 1
382 332 390 383 390 440 c 0
390 468 412 490 440 490 c 0
468 490 490 468 490 440 c 0
490 345 472 259 437 192 c 1
445 180 464 154 481 129 c 0
485 123 490 113 490 100 c 0
490 72 467 50 440 50 c 0
| rust | MIT | e5ab7a391aa703cd7beef3656c9c86045aeef485 | 2026-01-04T20:18:26.798176Z | true |
etbcor/nasin-nanpa | https://github.com/etbcor/nasin-nanpa/blob/e5ab7a391aa703cd7beef3656c9c86045aeef485/font-forge-tool/src/glyph_blocks/inner.rs | font-forge-tool/src/glyph_blocks/inner.rs | use crate::{GlyphDescriptor, Anchor, AnchorType};
//MARK: INNER
pub const INNER_COR: [GlyphDescriptor; 136] = [
GlyphDescriptor::new_with_anchor("a", Anchor::new_scale(AnchorType::Mark, (-500, 400)),
r#"
-507 305 m 0
-507 324 -522 340 -542 340 c 0
-561 340 -577 324 -577 305 c 0
-577 286 -561 270 -542 270 c 0
-523 270 -507 286 -507 305 c 0
-333 244 m 0
-333 219 -353 194 -383 194 c 0
-401 194 -418 203 -427 220 c 0
-428 223 -430 225 -431 228 c 1
-455 193 -496 170 -542 170 c 0
-617 170 -677 230 -677 305 c 0
-677 380 -617 440 -542 440 c 0
-507 440 -474 426 -450 404 c 0
-413 374 -374 331 -339 268 c 0
-335 260 -333 252 -333 244 c 0
-492 590 m 2
-492 500 l 2
-492 472 -514 450 -542 450 c 0
-570 450 -592 472 -592 500 c 2
-592 590 l 2
-592 618 -570 640 -542 640 c 0
-514 640 -492 618 -492 590 c 2"#,
),
GlyphDescriptor::new_with_anchor("akesi", Anchor::new_scale(AnchorType::Mark, (-500, 400)),
r#"
-641 484 m 0
-669 484 -691 506 -691 534 c 0
-691 562 -669 584 -641 584 c 0
-613 584 -591 562 -591 534 c 0
-591 506 -613 484 -641 484 c 0
-309 534 m 0
-309 506 -331 484 -359 484 c 0
-387 484 -409 506 -409 534 c 0
-409 562 -387 584 -359 584 c 0
-331 584 -309 562 -309 534 c 0
-511 381 m 1
-511 367 l 1
-491 367 l 1
-491 381 l 1
-511 381 l 1
-501 594 m 0
-451 594 -423 553 -409 518 c 0
-404 506 -401 494 -398 481 c 1
-342 481 l 2
-314 481 -292 459 -292 431 c 0
-292 403 -314 381 -342 381 c 2
-391 381 l 1
-391 367 l 1
-342 367 l 2
-314 367 -292 345 -292 317 c 0
-292 289 -314 267 -342 267 c 2
-398 267 l 1
-406 233 -417 205 -439 182 c 0
-453 167 -474 154 -501 154 c 0
-551 154 -579 195 -593 230 c 0
-598 242 -601 254 -604 267 c 1
-660 267 l 2
-688 267 -710 289 -710 317 c 0
-710 345 -688 367 -660 367 c 2
-611 367 l 1
-611 381 l 1
-660 381 l 2
-688 381 -710 403 -710 431 c 0
-710 459 -688 481 -660 481 c 2
-604 481 l 1
-596 515 -585 543 -563 566 c 0
-549 581 -528 594 -501 594 c 0"#,
),
GlyphDescriptor::new_with_anchor("ala", Anchor::new_scale(AnchorType::Mark, (-500, 400)),
r#"
-685 535 m 0
-685 562 -662 585 -635 585 c 0
-622 585 -610 580 -600 570 c 2
-500 471 l 1
-400 570 l 2
-390 580 -378 585 -365 585 c 0
-338 585 -315 562 -315 535 c 0
-315 522 -320 510 -330 500 c 2
-429 400 l 1
-330 300 l 2
-320 290 -315 278 -315 265 c 0
-315 238 -338 215 -365 215 c 0
-378 215 -390 220 -400 230 c 2
-500 329 l 1
-600 230 l 2
-610 220 -622 215 -635 215 c 0
-662 215 -685 238 -685 265 c 0
-685 278 -680 290 -670 300 c 2
-571 400 l 1
-670 500 l 2
-680 510 -685 522 -685 535 c 0"#,
),
GlyphDescriptor::new_with_anchor("alasa", Anchor::new_scale(AnchorType::Mark, (-500, 400)),
r#"
-426 350 m 1
-446 265 -515 200 -620 200 c 0
-648 200 -670 222 -670 250 c 2
-670 350 l 1
-690 350 l 2
-718 350 -740 372 -740 400 c 0
-740 428 -718 450 -690 450 c 2
-670 450 l 1
-670 550 l 2
-670 578 -648 600 -620 600 c 0
-515 600 -446 535 -426 450 c 1
-397 450 l 1
-423 495 l 2
-428 503 -430 511 -430 520 c 0
-430 545 -409 570 -380 570 c 0
-363 570 -346 561 -337 545 c 2
-267 426 l 2
-263 418 -260 409 -260 400 c 0
-260 391 -263 382 -267 374 c 2
-337 255 l 2
-346 239 -363 230 -380 230 c 0
-409 230 -430 255 -430 280 c 0
-430 289 -428 297 -423 305 c 2
-397 350 l 1
-426 350 l 1
-532 450 m 1
-541 467 -551 481 -570 490 c 1
-570 450 l 1
-532 450 l 1
-570 310 m 1
-551 319 -541 333 -532 350 c 1
-570 350 l 1
-570 310 l 1"#,
),
GlyphDescriptor::new_with_anchor("ale", Anchor::new_scale(AnchorType::Mark, (-500, 400)),
r#"
-440 400 m 1
-427 380 -415 366 -405 355 c 0
-391 341 -386 340 -383 340 c 0
-380 340 -377 341 -372 348 c 0
-366 357 -360 374 -360 400 c 0
-360 426 -366 443 -372 452 c 0
-377 459 -380 460 -383 460 c 0
-386 460 -391 459 -405 445 c 0
-415 434 -427 420 -440 400 c 1
-560 400 m 1
-573 420 -585 434 -595 445 c 0
-609 459 -614 460 -617 460 c 0
-620 460 -623 459 -628 452 c 0
-634 443 -640 426 -640 400 c 0
-640 374 -634 357 -628 348 c 0
-623 341 -620 340 -617 340 c 0
-614 340 -609 341 -595 355 c 0
-585 366 -573 380 -560 400 c 1
-740 400 m 0
-740 482 -698 560 -617 560 c 0
-561 560 -526 522 -500 488 c 1
-474 522 -439 560 -383 560 c 0
-302 560 -260 482 -260 400 c 0
-260 318 -302 240 -383 240 c 0
-439 240 -474 278 -500 312 c 1
-526 278 -561 240 -617 240 c 0
-698 240 -740 318 -740 400 c 0"#,
),
GlyphDescriptor::new_with_anchor("anpa", Anchor::new_scale(AnchorType::Mark, (-500, 400)),
r#"
-450 210 m 0
-450 182 -472 160 -500 160 c 0
-528 160 -550 182 -550 210 c 0
-550 238 -528 260 -500 260 c 0
-472 260 -450 238 -450 210 c 0
-670 540 m 0
-642 540 -620 518 -620 490 c 2
-620 390 l 1
-380 390 l 1
-380 490 l 2
-380 518 -358 540 -330 540 c 0
-302 540 -280 518 -280 490 c 2
-280 340 l 2
-280 312 -302 290 -330 290 c 2
-670 290 l 2
-698 290 -720 312 -720 340 c 2
-720 490 l 2
-720 518 -698 540 -670 540 c 0"#,
),
GlyphDescriptor::new_with_anchor("ante", Anchor::new_scale(AnchorType::Mark, (-500, 400)),
r#"
-650 226 m 0
-675 226 -700 246 -700 276 c 0
-700 295 -689 313 -671 321 c 2
-521 391 l 2
-514 394 -507 396 -500 396 c 0
-493 396 -486 394 -479 391 c 2
-329 321 l 2
-311 313 -300 295 -300 276 c 0
-300 246 -325 226 -350 226 c 0
-357 226 -364 228 -371 231 c 2
-500 291 l 1
-629 231 l 2
-636 228 -643 226 -650 226 c 0
-350 574 m 0
-325 574 -300 554 -300 524 c 0
-300 505 -311 487 -329 479 c 2
-479 409 l 2
-486 406 -493 404 -500 404 c 0
-507 404 -514 406 -521 409 c 2
-671 479 l 2
-689 487 -700 505 -700 524 c 0
-700 554 -675 574 -650 574 c 0
-643 574 -636 572 -629 569 c 2
-500 509 l 1
-371 569 l 2
-364 572 -357 574 -350 574 c 0"#,
),
GlyphDescriptor::new_with_anchor("anu", Anchor::new_scale(AnchorType::Mark, (-500, 400)),
r#"
-688 532 m 0
-688 559 -665 582 -638 582 c 0
-625 582 -612 577 -602 567 c 2
-500 462 l 1
-398 567 l 2
-388 577 -375 582 -362 582 c 0
-335 582 -312 559 -312 532 c 0
-312 519 -317 507 -326 497 c 2
-450 370 l 1
-450 210 l 2
-450 182 -472 160 -500 160 c 0
-528 160 -550 182 -550 210 c 2
-550 370 l 1
-674 497 l 2
-683 507 -688 519 -688 532 c 0"#,
),
GlyphDescriptor::new_with_anchor("awen", Anchor::new_scale(AnchorType::Mark, (-500, 400)),
r#"
-453 607 m 2
-361 350 l 1
-334 350 l 2
-306 350 -284 328 -284 300 c 0
-284 272 -306 250 -334 250 c 2
-396 250 l 2
-417 250 -436 263 -443 283 c 2
-500 442 l 1
-557 283 l 2
-564 263 -583 250 -604 250 c 2
-666 250 l 2
-694 250 -716 272 -716 300 c 0
-716 328 -694 350 -666 350 c 2
-639 350 l 1
-547 607 l 2
-540 627 -521 640 -500 640 c 0
-479 640 -460 627 -453 607 c 2"#,
),
GlyphDescriptor::new_with_anchor("e", Anchor::new_scale(AnchorType::Mark, (-500, 400)),
r#"
-500 560 m 0
-500 586 -478 610 -450 610 c 0
-436 610 -422 604 -412 593 c 2
-272 433 l 2
-264 424 -260 412 -260 400 c 0
-260 388 -264 376 -272 367 c 2
-412 207 l 2
-422 196 -436 190 -450 190 c 0
-478 190 -500 214 -500 240 c 0
-500 252 -496 264 -488 273 c 2
-376 400 l 1
-488 527 l 2
-496 536 -500 548 -500 560 c 0
-670 560 m 0
-670 586 -648 610 -620 610 c 0
-606 610 -592 604 -582 593 c 2
-442 433 l 2
-434 424 -430 412 -430 400 c 0
-430 388 -434 376 -442 367 c 2
-582 207 l 2
-592 196 -606 190 -620 190 c 0
-648 190 -670 214 -670 240 c 0
-670 252 -666 264 -658 273 c 2
-546 400 l 1
-658 527 l 2
-666 536 -670 548 -670 560 c 0"#,
),
GlyphDescriptor::new_with_anchor("en", Anchor::new_scale(AnchorType::Mark, (-500, 400)),
r#"
-450 590 m 2
-450 450 l 1
-310 450 l 2
-282 450 -260 428 -260 400 c 0
-260 372 -282 350 -310 350 c 2
-450 350 l 1
-450 210 l 2
-450 182 -472 160 -500 160 c 0
-528 160 -550 182 -550 210 c 2
-550 350 l 1
-690 350 l 2
-718 350 -740 372 -740 400 c 0
-740 428 -718 450 -690 450 c 2
-550 450 l 1
-550 590 l 2
-550 618 -528 640 -500 640 c 0
-472 640 -450 618 -450 590 c 2"#,
),
GlyphDescriptor::new_with_anchor("esun", Anchor::new_scale(AnchorType::Mark, (-500, 400)),
r#"
-577 316 m 1
-578 316 -579 316 -580 316 c 0
-596 316 -600 317 -600 300 c 1
-592 300 -584 309 -577 316 c 1
-423 484 m 1
-408 485 -400 486 -400 500 c 1
-408 500 -417 491 -423 484 c 1
-695 545 m 0
-695 570 -674 595 -645 595 c 0
-628 595 -611 586 -602 570 c 0
-595 558 -575 527 -534 506 c 1
-527 518 -517 530 -507 541 c 0
-482 568 -452 600 -400 600 c 0
-376 600 -348 594 -327 573 c 0
-306 552 -300 524 -300 500 c 0
-300 476 -306 444 -331 419 c 0
-354 397 -385 386 -421 384 c 1
-408 378 -395 371 -384 363 c 0
-340 333 -320 296 -310 276 c 0
-307 269 -305 262 -305 255 c 0
-305 230 -325 205 -355 205 c 0
-374 205 -391 215 -400 233 c 0
-405 243 -416 263 -442 281 c 0
-449 286 -457 291 -466 295 c 1
-473 282 -483 270 -493 259 c 0
-518 232 -548 200 -600 200 c 0
-624 200 -652 206 -673 227 c 0
-694 248 -700 276 -700 300 c 0
-700 324 -694 355 -673 379 c 0
-651 405 -617 416 -580 416 c 0
-579 416 -579 416 -578 416 c 1
-642 449 -675 498 -688 520 c 0
-693 528 -695 537 -695 545 c 0"#,
),
GlyphDescriptor::new_with_anchor("ijo", Anchor::new_scale(AnchorType::Mark, (-500, 400)),
r#"
-500 160 m 0
-633 160 -740 267 -740 400 c 0
-740 533 -633 640 -500 640 c 0
-367 640 -260 533 -260 400 c 0
-260 267 -367 160 -500 160 c 0
-500 260 m 0
-423 260 -360 323 -360 400 c 0
-360 477 -423 540 -500 540 c 0
-577 540 -640 477 -640 400 c 0
-640 323 -577 260 -500 260 c 0"#,
),
GlyphDescriptor::new_with_anchor("ike", Anchor::new_scale(AnchorType::Mark, (-500, 400)),
r#"
-270 335 m 0
-270 310 -290 285 -320 285 c 0
-342 285 -362 300 -368 322 c 0
-382 374 -434 415 -500 415 c 0
-566 415 -618 374 -632 322 c 0
-638 300 -658 285 -680 285 c 0
-710 285 -730 310 -730 335 c 0
-730 339 -729 344 -728 348 c 0
-701 446 -608 515 -500 515 c 0
-392 515 -299 446 -272 348 c 0
-271 344 -270 339 -270 335 c 0"#,
),
GlyphDescriptor::new_with_anchor("ilo", Anchor::new_scale(AnchorType::Mark, (-500, 400)),
r#"
-550 472 m 1
-595 472 l 1
-595 426 l 1
-550 426 l 1
-550 472 l 1
-450 426 m 1
-405 426 l 1
-405 472 l 1
-450 472 l 1
-450 426 l 1
-695 522 m 2
-695 550 -673 572 -645 572 c 2
-355 572 l 2
-327 572 -305 550 -305 522 c 2
-305 376 l 2
-305 348 -327 326 -355 326 c 2
-450 326 l 1
-450 210 l 2
-450 182 -472 160 -500 160 c 0
-528 160 -550 182 -550 210 c 2
-550 326 l 1
-645 326 l 2
-673 326 -695 348 -695 376 c 2
-695 522 l 2"#,
),
GlyphDescriptor::new_with_anchor("insa", Anchor::new_scale(AnchorType::Mark, (-500, 400)),
r#"
-670 535 m 0
-642 535 -620 513 -620 485 c 2
-620 365 l 1
-380 365 l 1
-380 485 l 2
-380 513 -358 535 -330 535 c 0
-302 535 -280 513 -280 485 c 2
-280 315 l 2
-280 287 -302 265 -330 265 c 2
-670 265 l 2
-698 265 -720 287 -720 315 c 2
-720 485 l 2
-720 513 -698 535 -670 535 c 0
-450 475 m 0
-450 447 -472 425 -500 425 c 0
-528 425 -550 447 -550 475 c 0
-550 503 -528 525 -500 525 c 0
-472 525 -450 503 -450 475 c 0"#,
),
GlyphDescriptor::new_with_anchor("jaki", Anchor::new_scale(AnchorType::Mark, (-500, 400)),
r#"
-725 338 m 0
-725 384 -683 416 -636 416 c 0
-609 416 -586 394 -586 365 c 0
-586 347 -597 331 -612 322 c 1
-602 311 -589 299 -576 289 c 1
-578 328 -581 373 -583 415 c 0
-583 416 -583 418 -583 419 c 0
-628 421 -660 425 -686 451 c 0
-700 465 -706 483 -706 500 c 0
-706 527 -691 547 -680 559 c 0
-661 579 -635 597 -611 609 c 0
-589 620 -559 632 -533 632 c 0
-505 632 -483 610 -483 582 c 0
-483 554 -505 532 -533 532 c 1
-544 530 -555 525 -566 519 c 1
-564 519 l 0
-547 519 -533 514 -522 506 c 1
-493 536 -456 572 -407 582 c 0
-401 583 -394 584 -387 584 c 0
-345 584 -317 558 -302 530 c 0
-288 504 -278 476 -278 450 c 0
-278 408 -305 382 -328 370 c 1
-317 354 -301 330 -301 301 c 0
-301 239 -359 220 -418 220 c 0
-446 220 -468 242 -468 270 c 0
-468 298 -446 320 -418 320 c 0
-417 320 -416 321 -415 321 c 1
-421 329 -428 337 -436 346 c 0
-442 352 -454 368 -454 390 c 0
-454 424 -425 441 -417 445 c 0
-403 452 -392 453 -379 457 c 1
-380 462 -383 471 -390 483 c 1
-422 473 -463 424 -483 399 c 1
-482 359 -476 314 -476 273 c 0
-476 221 -492 169 -547 169 c 0
-566 169 -583 177 -595 183 c 0
-631 202 -663 229 -689 259 c 0
-706 279 -725 302 -725 338 c 0"#,
),
GlyphDescriptor::new_with_anchor("jan", Anchor::new_scale(AnchorType::Mark, (-500, 400)),
r#"
-583 457 m 0
-583 411 -546 374 -500 374 c 0
-454 374 -417 411 -417 457 c 0
-417 503 -454 540 -500 540 c 0
-546 540 -583 503 -583 457 c 0
-652 236 m 0
-678 236 -702 258 -702 286 c 0
-702 300 -697 314 -685 324 c 2
-651 353 l 1
-672 383 -683 418 -683 457 c 0
-683 558 -601 640 -500 640 c 0
-399 640 -317 558 -317 457 c 0
-317 418 -329 383 -349 353 c 1
-316 324 l 2
-304 314 -298 300 -298 286 c 0
-298 258 -322 236 -348 236 c 0
-359 236 -371 240 -380 248 c 2
-428 289 l 1
-450 280 -474 274 -500 274 c 0
-526 274 -550 280 -572 289 c 1
-619 248 l 2
-628 240 -640 236 -652 236 c 0"#,
),
GlyphDescriptor::new_with_anchor("jelo", Anchor::new_scale(AnchorType::Mark, (-500, 400)),
r#"
-508 289 m 1
-492 289 l 1
-500 301 l 1
-508 289 l 1
-500 479 m 0
-492 479 -486 484 -485 492 c 0
-485 493 -485 494 -485 495 c 0
-485 496 l 0
-486 503 -492 509 -500 509 c 0
-508 509 -514 503 -515 496 c 0
-515 495 l 0
-515 494 -515 493 -515 492 c 0
-514 484 -508 479 -500 479 c 0
-597 189 m 2
-623 189 -647 210 -647 239 c 0
-647 248 -645 257 -640 265 c 2
-550 411 l 1
-550 419 l 1
-560 426 -568 435 -575 445 c 1
-595 445 l 2
-623 445 -645 467 -645 495 c 0
-645 523 -623 545 -595 545 c 2
-574 545 l 1
-567 555 -560 563 -550 569 c 1
-550 590 l 2
-550 618 -528 640 -500 640 c 0
-472 640 -450 618 -450 590 c 2
-450 569 l 1
-440 563 -433 555 -426 545 c 1
-405 545 l 2
-377 545 -355 523 -355 495 c 0
-355 467 -377 445 -405 445 c 2
-425 445 l 1
-432 435 -440 426 -450 419 c 1
-450 411 l 1
-360 265 l 2
-355 257 -353 248 -353 239 c 0
-353 210 -377 189 -403 189 c 2
-597 189 l 2"#,
),
GlyphDescriptor::new_with_anchor("jo", Anchor::new_scale(AnchorType::Mark, (-500, 400)),
r#"
-474 539 m 0
-474 553 -486 565 -500 565 c 0
-514 565 -527 553 -527 539 c 0
-527 525 -514 513 -500 513 c 0
-486 513 -474 525 -474 539 c 0
-399 539 m 0
-399 483 -444 438 -500 438 c 0
-516 438 -532 441 -546 448 c 1
-570 431 -587 400 -587 362 c 0
-587 301 -543 260 -500 260 c 0
-470 260 -441 280 -425 312 c 1
-478 312 l 2
-506 312 -528 334 -528 362 c 0
-528 390 -506 412 -478 412 c 2
-364 412 l 2
-336 412 -314 390 -314 362 c 0
-314 256 -392 160 -500 160 c 0
-608 160 -687 256 -687 362 c 0
-687 432 -654 495 -601 532 c 0
-601 534 -602 537 -602 539 c 0
-602 595 -556 640 -500 640 c 0
-444 640 -399 595 -399 539 c 0"#,
),
GlyphDescriptor::new_with_anchor("kala", Anchor::new_scale(AnchorType::Mark, (-500, 400)),
r#"
-536 400 m 1
-511 368 -475 325 -429 325 c 0
-405 325 -389 332 -380 341 c 0
-370 351 -360 369 -360 400 c 0
-360 431 -370 449 -380 459 c 0
-389 468 -405 475 -429 475 c 0
-475 475 -511 432 -536 400 c 1
-593 488 m 1
-555 530 -504 575 -429 575 c 0
-384 575 -343 561 -311 531 c 0
-277 498 -260 452 -260 400 c 0
-260 348 -277 302 -311 269 c 0
-343 239 -384 225 -429 225 c 0
-504 225 -555 270 -593 312 c 1
-610 280 -619 240 -660 240 c 0
-690 240 -710 265 -710 290 c 0
-710 297 -708 304 -705 311 c 0
-696 330 -681 363 -658 400 c 1
-681 437 -696 470 -705 489 c 0
-708 496 -710 503 -710 510 c 0
-710 535 -690 560 -660 560 c 0
-619 560 -610 520 -593 488 c 1"#,
),
GlyphDescriptor::new_with_anchor("kalama", Anchor::new_scale(AnchorType::Mark, (-500, 400)),
r#"
-500 641 m 0
-472 641 -450 619 -450 591 c 2
-450 490 l 2
-450 462 -472 440 -500 440 c 0
-528 440 -550 462 -550 490 c 2
-550 591 l 2
-550 619 -528 641 -500 641 c 0
-681 536 m 0
-681 561 -661 586 -631 586 c 0
-611 586 -593 574 -585 555 c 2
-562 499 l 2
-559 493 -558 486 -558 480 c 0
-558 455 -578 430 -608 430 c 0
-628 430 -646 441 -654 460 c 2
-678 516 l 2
-681 522 -681 530 -681 536 c 0
-392 430 m 0
-422 430 -442 455 -442 480 c 0
-442 486 -441 493 -438 499 c 2
-415 554 l 2
-407 573 -389 585 -369 585 c 0
-339 585 -319 560 -319 535 c 0
-319 529 -320 522 -323 516 c 2
-346 460 l 2
-354 441 -372 430 -392 430 c 0
-602 325 m 1
-584 287 -545 261 -500 261 c 0
-455 261 -416 287 -398 325 c 1
-602 325 l 1
-714 375 m 0
-714 403 -692 425 -664 425 c 2
-337 425 l 2
-309 425 -287 403 -287 375 c 0
-287 257 -382 161 -500 161 c 0
-618 161 -714 257 -714 375 c 0"#,
),
GlyphDescriptor::new_with_anchor("kama", Anchor::new_scale(AnchorType::Mark, (-500, 400)),
r#"
-715 309 m 0
-715 337 -691 359 -665 359 c 0
-654 359 -642 355 -633 347 c 0
-619 335 -605 328 -589 324 c 1
-492 597 l 2
-485 617 -467 630 -445 630 c 0
-425 630 -407 618 -399 599 c 2
-289 328 l 2
-287 322 -285 315 -285 309 c 0
-285 290 -296 275 -308 265 c 0
-338 238 -380 220 -445 220 c 0
-473 220 -495 242 -495 270 c 0
-495 298 -473 320 -445 320 c 0
-423 320 -408 324 -397 328 c 1
-442 439 l 1
-508 253 l 2
-515 233 -534 220 -555 220 c 0
-610 220 -656 235 -697 270 c 0
-709 280 -715 295 -715 309 c 0"#,
),
GlyphDescriptor::new_with_anchor("kasi", Anchor::new_scale(AnchorType::Mark, (-500, 400)),
r#"
-708 459 m 2
-708 482 -708 560 -651 560 c 0
-583 560 -547 543 -511 516 c 1
-501 530 -471 570 -405 570 c 2
-402 570 l 2
-333 570 -293 563 -293 487 c 2
-293 486 l 2
-293 431 -316 350 -451 343 c 1
-451 210 l 2
-451 182 -473 160 -501 160 c 0
-529 160 -551 182 -551 210 c 2
-551 321 l 1
-614 321 -708 359 -708 452 c 2
-708 459 l 2
-394 470 m 1
-405 470 l 2
-422 470 -422 470 -437 445 c 1
-432 446 -398 451 -394 470 c 1
-608 452 m 2
-608 447 -608 428 -557 421 c 1
-564 434 -586 448 -608 454 c 1
-608 452 l 2"#,
),
GlyphDescriptor::new_with_anchor("ken", Anchor::new_scale(AnchorType::Mark, (-500, 400)),
r#"
-551 454 m 1
-470 471 -449 531 -449 561 c 0
-449 588 -426 611 -399 611 c 0
-371 611 -349 589 -349 561 c 0
-349 494 -387 434 -431 400 c 1
-387 366 -349 306 -349 239 c 0
-349 211 -371 189 -399 189 c 0
-430 189 -446 212 -449 241 c 0
-452 270 -468 329 -551 346 c 1
-551 239 l 2
-551 211 -573 189 -601 189 c 0
-629 189 -651 211 -651 239 c 2
-651 561 l 2
-651 589 -629 611 -601 611 c 0
-573 611 -551 589 -551 561 c 2
-551 454 l 1"#,
),
GlyphDescriptor::new_with_anchor("kepeken", Anchor::new_scale(AnchorType::Mark, (-500, 400)),
r#"
-546 486 m 1
-575 486 l 1
-575 466 l 1
-546 466 l 1
-546 486 l 1
-446 466 m 1
-417 466 l 1
-417 486 l 1
-446 486 l 1
-446 466 l 1
-704 293 m 0
-704 320 -680 343 -654 343 c 0
-645 343 -636 340 -628 335 c 1
-619 347 -607 358 -594 366 c 1
-625 366 l 2
-653 366 -675 388 -675 416 c 2
-675 536 l 2
-675 564 -653 586 -625 586 c 2
-367 586 l 2
-339 586 -317 564 -317 536 c 2
-317 416 l 2
-317 388 -339 366 -367 366 c 2
-397 366 l 1
-367 346 -341 313 -341 270 c 2
-341 244 l 2
-341 216 -363 194 -391 194 c 0
-419 194 -441 216 -441 244 c 2
-441 270 l 2
-441 274 -445 277 -450 281 c 0
-459 288 -476 294 -496 294 c 0
-521 294 -541 286 -549 273 c 0
-550 271 -550 270 -550 270 c 2
-550 244 l 2
-550 218 -570 194 -600 194 c 0
-612 194 -625 198 -634 207 c 2
-687 256 l 2
-698 266 -704 279 -704 293 c 0"#,
),
GlyphDescriptor::new_with_anchor("kili", Anchor::new_scale(AnchorType::Mark, (-500, 400)),
r#"
-580 429 m 0
-584 434 -595 443 -618 444 c 1
-621 438 -628 422 -628 386 c 0
-628 314 -572 258 -500 258 c 0
-428 258 -371 314 -371 386 c 0
-371 422 -379 437 -382 442 c 1
-403 441 -412 440 -420 429 c 0
-439 404 -469 389 -500 389 c 0
-531 389 -561 404 -580 429 c 0
-577 590 m 0
-577 617 -554 640 -527 640 c 0
-494 640 -476 609 -464 582 c 0
-458 568 -452 550 -450 529 c 1
-430 538 -405 542 -379 542 c 0
-327 542 -271 501 -271 386 c 0
-271 259 -373 158 -500 158 c 0
-627 158 -728 259 -728 386 c 0
-728 503 -672 544 -622 544 c 0
-596 544 -572 538 -552 529 c 1
-558 552 -577 565 -577 590 c 0"#,
),
GlyphDescriptor::new_with_anchor("kiwen", Anchor::new_scale(AnchorType::Mark, (-500, 400)),
r#"
-381 450 m 1
-416 502 l 1
-584 502 l 1
-619 450 l 1
-500 293 l 1
-381 450 l 1
-540 180 m 2
-720 418 l 2
-727 427 -730 438 -730 449 c 0
-730 459 -728 468 -722 476 c 2
-653 580 l 2
-644 594 -628 602 -611 602 c 2
-389 602 l 2
-372 602 -356 594 -347 580 c 2
-278 476 l 2
-272 468 -270 459 -270 449 c 0
-270 438 -273 427 -280 418 c 2
-460 180 l 2
-469 167 -484 160 -500 160 c 0
-516 160 -531 167 -540 180 c 2"#,
),
GlyphDescriptor::new_with_anchor("ko", Anchor::new_scale(AnchorType::Mark, (-500, 400)),
r#"
-407 310 m 0
-407 329 -422 330 -422 357 c 0
-422 378 -408 398 -387 405 c 0
-375 409 -367 420 -367 433 c 0
-367 451 -383 463 -397 463 c 0
-405 463 -413 459 -421 459 c 0
-446 459 -471 478 -471 509 c 0
-471 526 -485 539 -500 539 c 0
-515 539 -527 527 -528 511 c 0
-528 510 -528 510 -528 509 c 0
-528 478 -553 459 -578 459 c 0
-586 459 -594 463 -602 463 c 0
-616 463 -632 451 -632 433 c 0
-632 420 -624 409 -612 405 c 0
-591 398 -577 378 -577 357 c 0
-577 332 -593 325 -593 310 c 0
-593 295 -581 281 -564 281 c 0
-537 281 -536 314 -500 314 c 0
-461 314 -462 281 -436 281 c 0
-419 281 -407 295 -407 310 c 0
-563 180 m 0
-635 180 -692 239 -692 311 c 0
-692 320 -691 328 -689 337 c 1
-716 362 -731 397 -731 434 c 0
-731 508 -676 555 -617 562 c 1
-597 607 -553 639 -500 639 c 0
-447 639 -402 607 -382 562 c 1
-323 555 -268 508 -268 434 c 0
-268 397 -283 362 -310 337 c 1
-308 328 -307 320 -307 311 c 0
-307 239 -364 180 -436 180 c 0
-458 180 -480 186 -500 197 c 1
-520 186 -541 180 -563 180 c 0"#,
),
GlyphDescriptor::new_with_anchor("kon", Anchor::new_scale(AnchorType::Mark, (-500, 400)),
r#"
-558 630 m 0
-532 630 -508 610 -508 580 c 0
-508 557 -524 536 -548 531 c 0
-567 527 -575 524 -581 521 c 0
-587 518 -595 504 -589 492 c 0
-584 483 -572 469 -556 452 c 0
-532 426 -497 390 -497 339 c 0
-497 289 -526 252 -557 234 c 0
-579 221 -601 215 -619 211 c 0
-623 210 -627 210 -631 210 c 0
-656 210 -681 230 -681 260 c 0
-681 283 -665 303 -642 309 c 0
-614 316 -604 314 -598 333 c 0
-595 341 -596 341 -600 348 c 0
-605 357 -617 371 -633 388 c 0
-657 414 -692 449 -692 500 c 0
-692 550 -664 588 -633 607 c 0
-611 620 -587 625 -569 629 c 0
-566 630 -561 630 -558 630 c 0
-368 590 m 0
-342 590 -318 570 -318 540 c 0
-318 517 -334 496 -358 491 c 0
-377 487 -385 484 -391 481 c 0
-397 478 -405 464 -399 452 c 0
-394 443 -382 429 -366 412 c 0
-342 386 -307 350 -307 299 c 0
-307 249 -336 212 -367 194 c 0
-389 181 -411 175 -429 171 c 0
-433 170 -437 170 -441 170 c 0
-466 170 -491 190 -491 220 c 0
-491 243 -475 263 -452 269 c 0
-424 276 -414 274 -408 293 c 0
-405 301 -406 301 -410 308 c 0
-415 317 -427 331 -443 348 c 0
-467 374 -502 409 -502 460 c 0
-502 510 -474 548 -443 567 c 0
-421 580 -397 585 -379 589 c 0
-376 590 -371 590 -368 590 c 0"#,
),
GlyphDescriptor::new_with_anchor("kule", Anchor::new_scale(AnchorType::Mark, (-500, 400)),
r#"
-572 365 m 1
-578 355 l 1
-422 355 l 1
-428 365 l 1
-572 365 l 1
-485 465 m 1
-500 490 l 1
-515 465 l 1
-485 465 l 1
-665 255 m 2
-691 255 -715 276 -715 305 c 0
-715 314 -712 322 -708 330 c 2
-686 368 l 1
-705 375 -719 393 -719 415 c 0
-719 443 -697 465 -669 465 c 2
-630 465 l 1
-543 615 l 2
-534 630 -518 640 -500 640 c 0
-482 640 -466 630 -457 615 c 2
-370 465 l 1
-331 465 l 2
-303 465 -281 443 -281 415 c 0
-281 393 -295 375 -314 368 c 1
-292 330 l 2
-288 322 -285 314 -285 305 c 0
-285 276 -309 255 -335 255 c 2
-665 255 l 2"#,
),
GlyphDescriptor::new_with_anchor("kulupu", Anchor::new_scale(AnchorType::Mark, (-500, 400)),
r#"
-572 342 m 0
-572 362 -589 379 -609 379 c 0
-629 379 -646 362 -646 342 c 0
-646 322 -629 305 -609 305 c 0
-589 305 -572 322 -572 342 c 0
-391 305 m 0
-371 305 -354 322 -354 342 c 0
-354 362 -371 379 -391 379 c 0
-411 379 -428 362 -428 342 c 0
-428 322 -411 305 -391 305 c 0
-500 416 m 0
-510 416 -519 418 -528 420 c 1
-514 406 -505 388 -500 368 c 1
-495 388 -486 406 -472 420 c 1
-481 418 -490 416 -500 416 c 0
-500 491 m 0
-480 491 -463 508 -463 528 c 0
-463 548 -480 565 -500 565 c 0
-520 565 -537 548 -537 528 c 0
-537 508 -520 491 -500 491 c 0
-581 450 m 1
-600 470 -612 498 -612 528 c 0
-612 590 -562 640 -500 640 c 0
-438 640 -388 590 -388 528 c 0
-388 498 -400 470 -419 450 c 1
-410 452 -401 454 -391 454 c 0
-329 454 -279 404 -279 342 c 0
-279 280 -329 230 -391 230 c 0
-444 230 -488 267 -500 316 c 1
-512 267 -556 230 -609 230 c 0
-671 230 -721 280 -721 342 c 0
-721 404 -671 454 -609 454 c 0
-599 454 -590 452 -581 450 c 1"#,
),
GlyphDescriptor::new_with_anchor("kute", Anchor::new_scale(AnchorType::Mark, (-500, 400)),
r#"
-570 174 m 0
-595 174 -620 194 -620 224 c 0
-620 243 -609 262 -590 270 c 0
-559 284 -514 307 -478 340 c 0
-442 373 -419 412 -419 458 c 0
-419 499 -446 540 -500 540 c 0
-519 540 -540 526 -559 497 c 0
-571 478 -578 463 -584 444 c 0
-584 444 l 0
-591 423 -611 408 -632 408 c 0
-662 408 -682 433 -682 458 c 0
-682 474 -676 485 -670 499 c 0
-664 513 -655 532 -642 552 c 0
-618 589 -572 640 -500 640 c 0
-379 640 -319 541 -319 458 c 0
-319 310 -455 220 -550 178 c 0
-557 175 -563 174 -570 174 c 0
-493 512 m 0
-463 512 -443 487 -443 462 c 0
-443 457 -444 452 -446 447 c 0
-464 391 -493 343 -546 309 c 0
-554 304 -564 302 -573 302 c 0
-598 302 -623 322 -623 351 c 0
-623 367 -615 384 -600 394 c 0
-571 412 -554 437 -541 477 c 0
-534 498 -514 512 -493 512 c 0"#,
),
GlyphDescriptor::new_with_anchor("la", Anchor::new_scale(AnchorType::Mark, (-500, 400)),
r#"
-508 160 m 0
-533 160 -558 180 -558 210 c 0
-558 230 -547 248 -527 256 c 0
-489 271 -463 339 -463 400 c 0
-463 461 -489 529 -527 544 c 0
-547 552 -558 570 -558 590 c 0
-558 620 -533 640 -508 640 c 0
-502 640 -495 638 -489 636 c 0
-409 604 -363 508 -363 400 c 0
-363 292 -409 196 -489 164 c 0
-495 162 -502 160 -508 160 c 0"#,
),
GlyphDescriptor::new_with_anchor("lape", Anchor::new_scale(AnchorType::Mark, (-500, 400)),
r#"
-410 450 m 0
-438 450 -460 428 -460 400 c 0
-460 372 -438 350 -410 350 c 0
-382 350 -360 372 -360 400 c 0
-360 428 -382 450 -410 450 c 0
-410 550 m 0
-327 550 -260 483 -260 400 c 0
-260 317 -327 250 -410 250 c 0
-475 250 -530 292 -551 350 c 1
-690 350 l 2
-718 350 -740 372 -740 400 c 0
-740 428 -718 450 -690 450 c 2
-551 450 l 1
-530 508 -475 550 -410 550 c 0"#,
),
GlyphDescriptor::new_with_anchor("laso", Anchor::new_scale(AnchorType::Mark, (-500, 400)),
r#"
-361 590 m 1
-358 590 l 2
-331 590 -308 568 -308 540 c 0
-308 518 -324 451 -358 416 c 0
-373 401 -391 392 -409 385 c 1
-342 276 l 2
-337 268 -334 259 -334 250 c 0
-334 221 -358 200 -384 200 c 2
-616 200 l 2
-642 200 -666 221 -666 250 c 0
-666 259 -664 268 -659 276 c 2
-591 385 l 1
-627 399 -661 422 -682 492 c 0
-688 510 -692 521 -692 540 c 0
-692 568 -669 590 -642 590 c 2
-639 590 l 2
-625 589 -541 583 -500 540 c 1
-494 547 -453 585 -361 590 c 1
-500 343 m 1
-526 300 l 1
-474 300 l 1
-500 343 l 1"#,
),
GlyphDescriptor::new_with_anchor("lawa", Anchor::new_scale(AnchorType::Mark, (-500, 400)),
r#"
-637 414 m 1
-637 409 -638 405 -638 400 c 0
-638 323 -586 275 -538 275 c 0
-490 275 -438 323 -438 400 c 0
-438 405 -438 409 -438 414 c 1
-637 414 l 1
-578 514 m 1
-498 514 l 1
-511 521 -524 525 -538 525 c 0
-552 525 -565 521 -578 514 c 1
-338 414 m 1
-338 409 -338 405 -338 400 c 0
-338 283 -421 175 -538 175 c 0
-655 175 -738 283 -738 400 c 0
-738 517 -655 625 -538 625 c 0
-461 625 -400 579 -366 514 c 1
-321 514 l 2
-293 514 -271 492 -271 464 c 0
-271 436 -293 414 -321 414 c 2
-338 414 l 1"#,
),
GlyphDescriptor::new_with_anchor("len", Anchor::new_scale(AnchorType::Mark, (-500, 400)),
r#"
-618 598 m 2
-382 598 l 2
-354 598 -332 576 -332 548 c 2
-332 253 l 2
-332 225 -354 203 -382 203 c 0
-410 203 -432 225 -432 253 c 2
-432 281 l 1
-450 281 l 1
-450 253 l 2
-450 225 -472 203 -500 203 c 0
-528 203 -550 225 -550 253 c 2
-550 281 l 1
-568 281 l 1
-568 253 l 2
-568 225 -590 203 -618 203 c 0
-646 203 -668 225 -668 253 c 2
-668 548 l 2
-668 576 -646 598 -618 598 c 2
-568 381 m 1
-432 381 l 1
-432 498 l 1
-568 498 l 1
-568 381 l 1"#,
),
GlyphDescriptor::new_with_anchor("lete", Anchor::new_scale(AnchorType::Mark, (-500, 400)),
r#"
-405 615 m 0
-376 615 -355 590 -355 565 c 0
-355 557 -357 548 -362 540 c 2
-414 450 l 1
-310 450 l 2
-282 450 -260 428 -260 400 c 0
-260 372 -282 350 -310 350 c 2
-414 350 l 1
-362 260 l 2
-357 252 -355 243 -355 235 c 0
-355 210 -375 185 -405 185 c 0
-422 185 -439 194 -448 210 c 2
-500 300 l 1
-552 210 l 2
-561 194 -578 185 -595 185 c 0
-624 185 -645 210 -645 235 c 0
-645 243 -643 252 -638 260 c 2
-586 350 l 1
-690 350 l 2
-718 350 -740 372 -740 400 c 0
-740 428 -718 450 -690 450 c 2
-586 450 l 1
-638 540 l 2
-643 548 -645 557 -645 565 c 0
-645 590 -625 615 -595 615 c 0
-578 615 -561 606 -552 590 c 2
-500 500 l 1
-448 590 l 2
-439 606 -422 615 -405 615 c 0"#,
),
GlyphDescriptor::new_with_anchor("li", Anchor::new_scale(AnchorType::Mark, (-500, 400)),
r#"
-620 575 m 0
-620 603 -596 625 -570 625 c 0
-558 625 -546 621 -537 613 c 2
-337 438 l 2
-326 429 -320 414 -320 400 c 0
-320 386 -326 371 -337 362 c 2
-537 187 l 2
-546 179 -558 175 -570 175 c 0
-596 175 -620 197 -620 225 c 0
-620 239 -614 253 -603 263 c 2
-446 400 l 1
-603 537 l 2
-614 547 -620 561 -620 575 c 0"#,
),
GlyphDescriptor::new_with_anchor("lili", Anchor::new_scale(AnchorType::Mark, (-500, 400)),
r#"
-410 510 m 0
-382 510 -360 486 -360 460 c 0
-360 450 -363 439 -370 430 c 2
-460 310 l 2
-469 297 -484 290 -500 290 c 0
-516 290 -531 297 -540 310 c 2
-630 430 l 2
-637 439 -640 450 -640 460 c 0
-640 486 -618 510 -590 510 c 0
-575 510 -560 503 -550 490 c 2
-500 423 l 1
-450 490 l 2
-440 503 -425 510 -410 510 c 0"#,
),
GlyphDescriptor::new_with_anchor("linja", Anchor::new_scale(AnchorType::Mark, (-500, 400)),
r#"
-265 347 m 0
-265 322 -285 298 -315 298 c 0
-337 298 -357 313 -363 335 c 0
-372 368 -374 420 -412 420 c 0
-427 420 -444 410 -452 385 c 0
-471 324 -526 280 -587 280 c 0
-650 280 -692 323 -710 362 c 0
-721 387 -726 414 -733 440 c 0
-734 444 -735 449 -735 453 c 0
-735 478 -715 503 -685 503 c 0
-663 503 -643 487 -637 465 c 0
-628 432 -626 380 -588 380 c 0
-573 380 -556 390 -548 415 c 0
-529 476 -474 520 -413 520 c 0
-350 520 -308 478 -290 439 c 0
-279 414 -274 386 -267 360 c 0
-266 356 -265 351 -265 347 c 0"#,
),
GlyphDescriptor::new_with_anchor("lipu", Anchor::new_scale(AnchorType::Mark, (-500, 400)),
r#"
-574 495 m 1
-574 305 l 1
-426 305 l 1
-426 495 l 1
-574 495 l 1
-674 545 m 2
-674 573 -652 595 -624 595 c 2
-376 595 l 2
-348 595 -326 573 -326 545 c 2
-326 255 l 2
-326 227 -348 205 -376 205 c 2
-624 205 l 2
-652 205 -674 227 -674 255 c 2
-674 545 l 2"#,
),
GlyphDescriptor::new_with_anchor("loje", Anchor::new_scale(AnchorType::Mark, (-500, 400)),
r#"
-500 329 m 1
-521 292 l 1
-479 292 l 1
-500 329 l 1
-500 482 m 0
-484 482 -468 490 -456 504 c 1
-544 504 l 1
-532 490 -516 482 -500 482 c 0
-606 192 m 2
-632 192 -656 213 -656 242 c 0
-656 250 -654 258 -650 266 c 2
-574 401 l 1
-627 430 -661 490 -661 554 c 0
-661 582 -639 604 -611 604 c 2
-389 604 l 2
-361 604 -339 582 -339 554 c 0
-339 490 -373 430 -426 401 c 1
-350 266 l 2
-346 258 -344 250 -344 242 c 0
-344 213 -368 192 -394 192 c 2
-606 192 l 2"#,
),
GlyphDescriptor::new_with_anchor("lon", Anchor::new_scale(AnchorType::Mark, (-500, 400)),
r#"
-450 491 m 0
-450 463 -472 441 -500 441 c 0
-528 441 -550 463 -550 491 c 0
-550 519 -528 541 -500 541 c 0
-472 541 -450 519 -450 491 c 0
-733 349 m 0
-733 377 -711 399 -683 399 c 2
-317 400 l 2
-289 400 -267 378 -267 350 c 0
-267 322 -289 300 -317 300 c 2
-683 299 l 2
-711 299 -733 321 -733 349 c 0"#,
),
GlyphDescriptor::new_with_anchor("luka", Anchor::new_scale(AnchorType::Mark, (-500, 400)),
r#"
-739 422 m 0
-739 447 -719 472 -689 472 c 0
-675 472 -662 466 -652 455 c 1
-646 502 -632 542 -609 573 c 0
-576 618 -530 638 -480 638 c 0
-430 638 -384 618 -351 573 c 0
-320 530 -305 471 -305 397 c 2
-305 276 l 2
-305 248 -327 226 -355 226 c 0
-383 226 -405 248 -405 276 c 2
-405 397 l 2
-405 459 -418 495 -432 514 c 0
-445 531 -460 538 -480 538 c 0
-500 538 -515 531 -528 514 c 0
-542 495 -555 459 -555 397 c 2
-555 276 l 2
-555 250 -576 226 -606 226 c 0
-624 226 -640 235 -649 251 c 2
-732 397 l 2
-736 405 -739 414 -739 422 c 0"#,
),
GlyphDescriptor::new_with_anchor("lukin", Anchor::new_scale(AnchorType::Mark, (-500, 400)),
r#"
-500 326 m 0
-445 326 -386 356 -360 400 c 1
-386 444 -445 474 -500 474 c 0
-555 474 -614 444 -640 400 c 1
-614 356 -555 326 -500 326 c 0
-726 451 m 0
-679 530 -585 574 -500 574 c 0
-415 574 -321 530 -274 451 c 0
-265 435 -260 418 -260 400 c 0
-260 382 -265 365 -274 349 c 0
-321 270 -415 226 -500 226 c 0
-585 226 -679 270 -726 349 c 0
-735 365 -740 382 -740 400 c 0
-740 418 -735 435 -726 451 c 0
-500 350 m 0
-528 350 -550 372 -550 400 c 0
-550 428 -528 450 -500 450 c 0
-472 450 -450 428 -450 400 c 0
-450 372 -472 350 -500 350 c 0"#,
),
| rust | MIT | e5ab7a391aa703cd7beef3656c9c86045aeef485 | 2026-01-04T20:18:26.798176Z | true |
etbcor/nasin-nanpa | https://github.com/etbcor/nasin-nanpa/blob/e5ab7a391aa703cd7beef3656c9c86045aeef485/font-forge-tool/src/glyph_blocks/outer.rs | font-forge-tool/src/glyph_blocks/outer.rs | use crate::{GlyphDescriptor, Anchor, AnchorType};
//MARK: OUTER
pub const OUTER_COR: [GlyphDescriptor; 62] = [
GlyphDescriptor::new_with_anchor("akesi", Anchor::new_scale(AnchorType::Base, (500, 310)),
r#"
675 740 m 0
634 740 600 774 600 815 c 0
600 856 634 890 675 890 c 0
716 890 750 856 750 815 c 0
750 774 716 740 675 740 c 0
250 310 m 0
250 209 274 135 314 86 c 0
353 39 412 10 500 10 c 0
588 10 647 39 686 86 c 0
726 135 750 209 750 310 c 0
750 411 726 485 686 534 c 0
647 581 588 610 500 610 c 0
412 610 353 581 314 534 c 0
274 485 250 411 250 310 c 0
500 710 m 0
612 710 703 671 764 597 c 0
792 563 813 522 827 477 c 1
940 477 l 2
968 477 990 455 990 427 c 0
990 399 968 377 940 377 c 2
847 377 l 1
849 355 850 333 850 310 c 0
850 287 849 265 847 243 c 1
940 243 l 2
968 243 990 221 990 193 c 0
990 165 968 143 940 143 c 2
827 143 l 1
813 98 792 57 764 23 c 0
703 -51 612 -90 500 -90 c 0
388 -90 297 -51 236 23 c 0
208 57 187 98 173 143 c 1
60 143 l 2
32 143 10 165 10 193 c 0
10 221 32 243 60 243 c 2
153 243 l 1
151 265 150 287 150 310 c 0
150 333 151 355 153 377 c 1
60 377 l 2
32 377 10 399 10 427 c 0
10 455 32 477 60 477 c 2
173 477 l 1
187 522 208 563 236 597 c 0
297 671 388 710 500 710 c 0
400 815 m 0
400 774 366 740 325 740 c 0
284 740 250 774 250 815 c 0
250 856 284 890 325 890 c 0
366 890 400 856 400 815 c 0"#,
),
GlyphDescriptor::new_with_anchor("anpa", Anchor::new_scale(AnchorType::Base, (500, 550)),
r#"
575 75 m 0
575 34 541 0 500 0 c 0
459 0 425 34 425 75 c 0
425 116 459 150 500 150 c 0
541 150 575 116 575 75 c 0
150 700 m 0
178 700 200 678 200 650 c 2
200 300 l 1
800 300 l 1
800 650 l 2
800 678 822 700 850 700 c 0
878 700 900 678 900 650 c 2
900 250 l 2
900 222 878 200 850 200 c 2
150 200 l 2
122 200 100 222 100 250 c 2
100 650 l 2
100 678 122 700 150 700 c 0"#,
),
GlyphDescriptor::new_with_anchor("ante", Anchor::new_scale(AnchorType::Base, (500, 400)),
r#"
150 -90 m 0
125 -90 100 -70 100 -40 c 0
100 -20 111 -2 131 6 c 2
481 146 l 2
487 148 494 150 500 150 c 0
506 150 513 148 519 146 c 2
869 6 l 2
889 -2 900 -20 900 -40 c 0
900 -70 875 -90 850 -90 c 0
844 -90 837 -88 831 -86 c 2
500 46 l 1
169 -86 l 2
163 -88 156 -90 150 -90 c 0
100 840 m 0
100 870 125 890 150 890 c 0
156 890 163 888 169 886 c 2
500 754 l 1
831 886 l 2
837 888 844 890 850 890 c 0
875 890 900 870 900 840 c 0
900 820 889 802 869 794 c 2
519 654 l 2
513 652 506 650 500 650 c 0
494 650 487 652 481 654 c 2
131 794 l 2
111 802 100 820 100 840 c 0"#,
),
GlyphDescriptor::new_with_anchor("awen", Anchor::new_scale(AnchorType::Base, (500, 250)),
r#"
500 800 m 0
515 800 526 793 537 787 c 0
547 781 562 774 579 762 c 0
613 738 657 700 700 644 c 0
782 537 860 366 869 100 c 1
900 100 l 2
928 100 950 78 950 50 c 0
950 22 928 0 900 0 c 2
820 0 l 2
792 0 770 22 770 50 c 0
770 325 693 489 620 583 c 0
583 630 547 661 521 680 c 0
513 686 506 690 500 694 c 1
494 690 487 686 479 680 c 0
453 661 417 630 380 583 c 0
307 489 230 325 230 50 c 0
230 22 208 0 180 0 c 2
100 0 l 2
72 0 50 22 50 50 c 0
50 78 72 100 100 100 c 2
131 100 l 1
140 366 218 537 300 644 c 0
343 700 387 738 421 762 c 0
446 779 458 787 481 796 c 0
487 798 494 800 500 800 c 0"#,
),
GlyphDescriptor::new_with_anchor("ijo", Anchor::new_scale(AnchorType::Base, (500, 400)),
r#"
500 -50 m 0
251 -50 50 151 50 400 c 0
50 649 251 850 500 850 c 0
749 850 950 649 950 400 c 0
950 151 749 -50 500 -50 c 0
500 50 m 0
693 50 850 207 850 400 c 0
850 593 693 750 500 750 c 0
307 750 150 593 150 400 c 0
150 207 307 50 500 50 c 0"#,
),
GlyphDescriptor::new_with_anchor("jan", Anchor::new_scale(AnchorType::Base, (500, 450)),
r#"
200 450 m 0
200 284 334 150 500 150 c 0
666 150 800 284 800 450 c 0
800 616 666 750 500 750 c 0
334 750 200 616 200 450 c 0
101 -50 m 0
75 -50 51 -28 51 -0 c 0
51 14 56 28 68 38 c 2
218 166 l 1
145 238 100 339 100 450 c 0
100 671 279 850 500 850 c 0
721 850 900 671 900 450 c 0
900 339 855 238 782 166 c 1
932 38 l 2
944 28 950 14 950 -0 c 0
950 -28 926 -50 900 -50 c 0
889 -50 877 -46 868 -38 c 2
701 104 l 1
642 70 573 50 500 50 c 0
427 50 358 70 299 104 c 1
134 -38 l 2
125 -46 113 -50 101 -50 c 0"#,
),
GlyphDescriptor::new_with_anchor("jo", Anchor::new_scale(AnchorType::Base, (500, 260)),
r#"
590 700 m 0
590 750 550 790 500 790 c 0
450 790 410 750 410 700 c 0
410 650 450 610 500 610 c 0
550 610 590 650 590 700 c 0
690 700 m 0
690 595 605 510 500 510 c 0
432 510 373 545 339 599 c 1
256 543 200 444 200 330 c 0
200 151 337 10 500 10 c 0
648 10 773 125 796 280 c 1
792 280 l 2
764 280 742 302 742 330 c 0
742 358 764 380 792 380 c 2
850 380 l 2
878 380 900 358 900 330 c 0
900 101 723 -90 500 -90 c 0
277 -90 100 101 100 330 c 0
100 484 179 620 299 693 c 0
303 695 306 697 310 698 c 0
310 699 310 699 310 700 c 0
310 805 395 890 500 890 c 0
605 890 690 805 690 700 c 0"#,
),
GlyphDescriptor::new_with_anchor("kala", Anchor::new_scale(AnchorType::Base, (610, 400)),
r#"
309 400 m 1
395 229 505 156 617 150 c 0
623 150 628 150 634 150 c 0
700 150 751 171 786 207 c 0
825 246 850 309 850 400 c 0
850 491 825 554 786 593 c 0
751 629 700 650 634 650 c 0
628 650 623 650 617 650 c 0
505 644 395 571 309 400 c 1
50 699 m 0
50 724 71 749 100 749 c 0
129 749 142 727 154 705 c 0
189 643 215 593 256 510 c 1
349 660 471 742 612 749 c 0
619 749 627 750 634 750 c 0
722 750 800 721 857 663 c 0
919 600 950 509 950 400 c 0
950 291 919 200 857 137 c 0
800 79 722 50 634 50 c 0
627 50 619 51 612 51 c 0
471 58 349 141 256 291 c 1
211 199 185 146 143 75 c 0
134 59 117 51 100 51 c 0
71 51 50 76 50 101 c 0
50 119 59 131 67 144 c 0
112 225 141 281 198 400 c 1
137 528 110 584 57 674 c 0
52 682 50 690 50 699 c 0"#,
),
GlyphDescriptor::new_with_anchor("kili", Anchor::new_scale(AnchorType::Base, (500, 260)),
r#"
420 562 m 0
386 608 321 642 237 642 c 0
212 642 184 632 161 604 c 0
136 574 110 513 110 401 c 0
110 183 283 10 500 10 c 0
717 10 890 183 890 401 c 0
890 510 863 569 839 599 c 0
815 628 788 638 760 638 c 0
674 638 618 612 580 562 c 0
561 537 531 522 500 522 c 0
469 522 439 537 420 562 c 0
391 840 m 0
391 867 414 890 441 890 c 0
466 890 482 871 494 853 c 0
523 811 550 751 550 674 c 1
603 716 678 738 760 738 c 0
871 738 990 649 990 401 c 0
990 128 772 -90 500 -90 c 0
228 -90 10 128 10 401 c 0
10 653 129 742 237 742 c 0
320 742 396 715 450 673 c 1
450 730 430 778 404 806 c 0
395 816 391 828 391 840 c 0"#,
),
GlyphDescriptor::new_with_anchor("kiwen", Anchor::new_scale(AnchorType::Base, (500, 480)),
r#"
860 540 m 1
733 730 l 1
267 730 l 1
140 540 l 1
500 63 l 1
860 540 l 1
460 -50 m 2
39 508 l 2
32 517 29 527 29 538 c 0
29 548 31 558 37 566 c 2
198 808 l 2
207 822 223 830 240 830 c 2
760 830 l 2
777 830 793 822 802 808 c 2
963 566 l 2
969 558 971 548 971 538 c 0
971 527 968 517 961 508 c 2
540 -50 l 2
531 -63 516 -70 500 -70 c 0
484 -70 469 -63 460 -50 c 2"#,
),
GlyphDescriptor::new_with_anchor("ko", Anchor::new_scale(AnchorType::Base, (500, 400)),
r#"
359 -17 m 0
236 -17 136 84 136 207 c 0
136 234 141 261 151 287 c 1
88 328 50 399 50 474 c 0
50 610 161 698 273 698 c 0
277 698 280 697 284 697 c 0
309 792 397 863 500 863 c 0
603 863 691 792 716 697 c 0
720 697 723 698 727 698 c 0
839 698 950 610 950 474 c 0
950 399 912 328 849 287 c 1
859 261 863 234 863 207 c 0
863 84 763 -17 640 -17 c 0
590 -17 540 0 500 33 c 1
460 0 409 -17 359 -17 c 0
727 598 m 0
698 598 688 589 673 589 c 0
648 589 623 609 623 640 c 0
623 708 568 763 500 763 c 0
434 763 380 711 377 645 c 1
377 643 377 641 377 639 c 0
377 609 351 589 327 589 c 0
312 589 302 598 273 598 c 0
211 598 150 549 150 474 c 0
150 422 183 374 235 357 c 0
256 350 270 330 270 309 c 0
270 278 236 266 236 207 c 0
236 138 293 83 359 83 c 0
397 83 435 101 459 134 c 0
469 147 485 155 500 155 c 0
516 155 530 147 540 134 c 0
564 101 602 83 640 83 c 0
706 83 763 138 763 207 c 0
763 266 730 277 730 309 c 0
730 330 744 350 765 357 c 0
817 374 850 422 850 474 c 0
850 549 789 598 727 598 c 0"#,
),
GlyphDescriptor::new_with_anchor("kon", Anchor::new_scale(AnchorType::Base, (500, 400)),
r#"
236 830 m 0
261 830 286 810 286 780 c 0
286 758 271 737 248 731 c 0
210 721 184 714 163 699 c 0
145 686 127 665 116 622 c 0
112 607 110 594 110 581 c 0
110 525 149 474 204 405 c 0
254 342 310 272 310 178 c 0
310 87 265 16 215 -19 c 0
178 -45 135 -57 98 -68 c 0
93 -69 89 -70 84 -70 c 0
59 -70 34 -50 34 -20 c 0
34 2 48 22 70 28 c 0
110 39 137 48 158 63 c 0
176 76 193 96 204 138 c 0
208 152 210 165 210 178 c 0
210 235 170 287 116 355 c 0
65 419 10 488 10 581 c 0
10 672 55 744 105 780 c 0
143 807 186 820 224 829 c 0
228 830 232 830 236 830 c 0
916 870 m 0
941 870 966 850 966 820 c 0
966 798 951 777 928 771 c 0
890 761 864 754 843 739 c 0
825 726 807 705 796 662 c 0
792 647 790 634 790 621 c 0
790 565 829 514 884 445 c 0
934 382 990 312 990 218 c 0
990 127 945 56 895 21 c 0
858 -5 815 -17 778 -28 c 0
773 -29 769 -30 764 -30 c 0
739 -30 714 -10 714 20 c 0
714 42 728 62 750 68 c 0
790 79 817 88 838 103 c 0
856 116 873 136 884 178 c 0
888 192 890 205 890 218 c 0
890 275 850 327 796 395 c 0
745 459 690 528 690 621 c 0
690 712 735 784 785 820 c 0
823 847 866 860 904 869 c 0
908 870 912 870 916 870 c 0"#,
),
GlyphDescriptor::new_with_anchor("kulupu", Anchor::new_scale(AnchorType::Base, (500, 320)),
r#"
170 -90 m 0
82 -90 10 -18 10 70 c 0
10 158 82 230 170 230 c 0
258 230 330 158 330 70 c 0
330 -18 258 -90 170 -90 c 0
170 10 m 0
203 10 230 37 230 70 c 0
230 103 203 130 170 130 c 0
137 130 110 103 110 70 c 0
110 37 137 10 170 10 c 0
830 -90 m 0
742 -90 670 -18 670 70 c 0
670 158 742 230 830 230 c 0
918 230 990 158 990 70 c 0
990 -18 918 -90 830 -90 c 0
830 10 m 0
863 10 890 37 890 70 c 0
890 103 863 130 830 130 c 0
797 130 770 103 770 70 c 0
770 37 797 10 830 10 c 0
500 570 m 0
412 570 340 642 340 730 c 0
340 818 412 890 500 890 c 0
588 890 660 818 660 730 c 0
660 642 588 570 500 570 c 0
500 670 m 0
533 670 560 697 560 730 c 0
560 763 533 790 500 790 c 0
467 790 440 763 440 730 c 0
440 697 467 670 500 670 c 0"#,
),
GlyphDescriptor::new_with_anchor("lawa", Anchor::new_scale(AnchorType::Base, (410, 260)),
r#"
134 515 m 0
119 472 110 424 110 373 c 0
110 166 253 11 413 11 c 0
573 11 716 166 716 373 c 0
716 424 707 472 692 515 c 1
137 515 l 2
136 515 135 515 134 515 c 0
188 615 m 1
638 615 l 1
581 690 500 735 413 735 c 0
326 735 245 690 188 615 c 1
796 515 m 1
809 470 816 422 816 373 c 0
816 125 643 -89 413 -89 c 0
183 -89 10 125 10 373 c 0
10 621 183 835 413 835 c 0
561 835 686 745 756 615 c 1
940 615 l 2
968 615 990 593 990 565 c 0
990 537 968 515 940 515 c 2
796 515 l 1"#,
),
GlyphDescriptor::new_with_anchor("len", Anchor::new_scale(AnchorType::Base, (500, 450)),
r#"
750 200 m 1
750 700 l 1
250 700 l 1
250 200 l 1
750 200 l 1
200 800 m 2
800 800 l 2
828 800 850 778 850 750 c 2
850 0 l 2
850 -28 828 -50 800 -50 c 0
772 -50 750 -28 750 0 c 2
750 100 l 1
550 100 l 1
550 0 l 2
550 -28 528 -50 500 -50 c 0
472 -50 450 -28 450 0 c 2
450 100 l 1
250 100 l 1
250 0 l 2
250 -28 228 -50 200 -50 c 0
172 -50 150 -28 150 0 c 2
150 750 l 2
150 778 172 800 200 800 c 2"#,
),
GlyphDescriptor::new_with_anchor("lipu", Anchor::new_scale(AnchorType::Base, (500, 400)),
r#"
250 700 m 1
250 100 l 1
750 100 l 1
750 700 l 1
250 700 l 1
150 750 m 2
150 778 172 800 200 800 c 2
800 800 l 2
828 800 850 778 850 750 c 2
850 50 l 2
850 22 828 0 800 0 c 2
200 0 l 2
172 0 150 22 150 50 c 2
150 750 l 2"#,
),
GlyphDescriptor::new_with_anchor("luka", Anchor::new_scale(AnchorType::Base, (600, 330)),
r#"
50 350 m 0
50 375 70 400 100 400 c 0
117 400 134 391 143 375 c 2
250 188 l 1
250 290 l 2
250 459 284 586 347 672 c 0
411 760 502 800 600 800 c 0
698 800 789 760 853 672 c 0
916 586 950 459 950 290 c 2
950 0 l 2
950 -28 928 -50 900 -50 c 0
872 -50 850 -28 850 0 c 2
850 290 l 2
850 447 818 550 772 613 c 0
728 674 668 700 600 700 c 0
532 700 472 674 428 613 c 0
382 550 350 447 350 290 c 2
350 0 l 2
350 -26 330 -50 300 -50 c 0
282 -50 266 -41 257 -25 c 2
57 325 l 2
53 333 50 342 50 350 c 0"#,
),
GlyphDescriptor::new_with_anchor("lupa", Anchor::new_scale(AnchorType::Base, (500, 470)),
r#"
500 100 m 0
631 100 750 227 750 400 c 2
750 750 l 2
750 778 772 800 800 800 c 0
828 800 850 778 850 750 c 2
850 400 l 2
850 187 700 0 500 0 c 0
300 0 150 187 150 400 c 2
150 750 l 2
150 778 172 800 200 800 c 0
228 800 250 778 250 750 c 2
250 400 l 2
250 227 369 100 500 100 c 0"#,
),
GlyphDescriptor::new_with_anchor("mama", Anchor::new_scale(AnchorType::Base, (500, 525)),
r#"
500 160 m 0
459 160 425 126 425 85 c 0
425 44 459 10 500 10 c 0
541 10 575 44 575 85 c 0
575 126 541 160 500 160 c 0
149 525 m 0
149 389 294 260 500 260 c 0
706 260 851 389 851 525 c 0
851 661 706 790 500 790 c 0
294 790 149 661 149 525 c 0
647 180 m 1
665 153 675 120 675 85 c 0
675 -12 597 -90 500 -90 c 0
403 -90 325 -12 325 85 c 0
325 120 335 153 353 180 c 1
181 228 49 358 49 525 c 0
49 737 263 890 500 890 c 0
737 890 951 737 951 525 c 0
951 358 819 228 647 180 c 1"#,
),
GlyphDescriptor::new_with_anchor("mani", Anchor::new_scale(AnchorType::Base, (500, 350)),
r#"
800 350 m 0
800 516 666 650 500 650 c 0
334 650 200 516 200 350 c 0
200 184 334 50 500 50 c 0
666 50 800 184 800 350 c 0
50 800 m 0
50 825 70 850 100 850 c 0
122 850 142 836 148 813 c 0
158 776 172 751 191 733 c 0
211 714 238 700 282 686 c 1
345 727 420 750 500 750 c 0
580 750 655 727 718 686 c 1
762 700 789 714 809 733 c 0
828 751 842 776 852 813 c 0
858 836 878 850 900 850 c 0
930 850 950 825 950 800 c 0
950 796 949 791 948 787 c 0
935 737 914 695 879 661 c 0
857 640 832 624 803 611 c 1
863 541 900 450 900 350 c 0
900 129 721 -50 500 -50 c 0
279 -50 100 129 100 350 c 0
100 450 137 541 197 611 c 1
168 624 143 640 121 661 c 0
86 695 65 737 52 787 c 0
51 791 50 796 50 800 c 0"#,
),
GlyphDescriptor::new_with_anchor("meli", Anchor::new_scale(AnchorType::Base, (500, 400)),
r#"
150 400 m 0
150 593 307 750 500 750 c 0
693 750 850 593 850 400 c 0
850 207 693 50 500 50 c 0
307 50 150 207 150 400 c 0
500 650 m 0
362 650 250 538 250 400 c 0
250 262 362 150 500 150 c 0
638 150 750 262 750 400 c 0
750 538 638 650 500 650 c 0
500 790 m 0
285 790 110 615 110 400 c 2
110 -40 l 2
110 -68 88 -90 60 -90 c 0
32 -90 10 -68 10 -40 c 2
10 400 l 2
10 671 229 890 500 890 c 0
771 890 990 671 990 400 c 2
990 -40 l 2
990 -68 968 -90 940 -90 c 0
912 -90 890 -68 890 -40 c 2
890 400 l 2
890 615 715 790 500 790 c 0"#,
),
GlyphDescriptor::new_with_anchor("mi", Anchor::new_scale(AnchorType::Base, (500, 525)),
r#"
250 525 m 0
250 387 362 275 500 275 c 0
638 275 750 387 750 525 c 0
750 663 638 775 500 775 c 0
362 775 250 663 250 525 c 0
252 -39 m 0
250 -34 150 304 150 525 c 0
150 718 307 875 500 875 c 0
693 875 850 718 850 525 c 0
850 332 693 175 500 175 c 0
418 175 343 203 284 250 c 1
288 228 312 114 348 -11 c 0
350 -17 350 -21 350 -26 c 0
350 -55 324 -74 298 -74 c 0
279 -74 259 -64 252 -39 c 0"#,
),
GlyphDescriptor::new_with_anchor("mije", Anchor::new_scale(AnchorType::Base, (500, 475)),
r#"
250 475 m 0
250 337 362 225 500 225 c 0
638 225 750 337 750 475 c 0
750 613 638 725 500 725 c 0
362 725 250 613 250 475 c 0
745 225 m 1
860 225 l 2
888 225 910 203 910 175 c 2
910 25 l 2
910 -3 888 -25 860 -25 c 0
832 -25 810 -3 810 25 c 2
810 125 l 1
190 125 l 1
190 25 l 2
190 -3 168 -25 140 -25 c 0
112 -25 90 -3 90 25 c 2
90 175 l 2
90 203 112 225 140 225 c 2
255 225 l 1
190 289 150 377 150 475 c 0
150 668 307 825 500 825 c 0
693 825 850 668 850 475 c 0
850 377 810 289 745 225 c 1"#,
),
GlyphDescriptor::new_with_anchor("monsi", Anchor::new_scale(AnchorType::Base, (650, 400)),
r#"
175 325 m 0
134 325 100 359 100 400 c 0
100 441 134 475 175 475 c 0
216 475 250 441 250 400 c 0
250 359 216 325 175 325 c 0
800 750 m 0
800 722 778 700 750 700 c 2
400 700 l 1
400 100 l 1
750 100 l 2
778 100 800 78 800 50 c 0
800 22 778 0 750 0 c 2
350 0 l 2
322 0 300 22 300 50 c 2
300 750 l 2
300 778 322 800 350 800 c 2
750 800 l 2
778 800 800 778 800 750 c 0"#,
),
GlyphDescriptor::new_with_anchor("mu", Anchor::new_scale(AnchorType::Base, (500, 400)),
r#"
890 715 m 0
890 756 856 790 815 790 c 0
774 790 740 756 740 715 c 0
740 674 774 640 815 640 c 0
856 640 890 674 890 715 c 0
821 331 m 0
821 508 677 652 500 652 c 0
323 652 179 508 179 331 c 0
179 154 323 10 500 10 c 0
677 10 821 154 821 331 c 0
185 640 m 0
226 640 260 674 260 715 c 0
260 756 226 790 185 790 c 0
144 790 110 756 110 715 c 0
110 674 144 640 185 640 c 0
185 890 m 0
277 890 353 819 360 728 c 1
404 744 451 752 500 752 c 0
549 752 596 744 640 728 c 1
647 819 723 890 815 890 c 0
912 890 990 812 990 715 c 0
990 635 936 566 862 546 c 1
900 483 921 410 921 331 c 0
921 98 733 -90 500 -90 c 0
267 -90 79 98 79 331 c 0
79 410 100 483 138 546 c 1
64 566 10 635 10 715 c 0
10 812 88 890 185 890 c 0
500 20 m 0
464 20 435 49 435 85 c 0
435 121 464 150 500 150 c 0
536 150 565 121 565 85 c 0
565 49 536 20 500 20 c 0"#,
),
GlyphDescriptor::new_with_anchor("mun", Anchor::new_scale(AnchorType::Base, (640, 400)),
r#"
110 697 m 0
199 814 341 890 500 890 c 0
771 890 990 671 990 400 c 0
990 129 771 -90 500 -90 c 0
341 -90 199 -14 110 103 c 0
103 112 100 122 100 133 c 0
100 157 114 173 132 180 c 0
205 207 243 242 263 277 c 0
284 313 290 354 290 400 c 0
290 446 284 487 263 523 c 0
243 558 205 593 132 620 c 0
114 627 100 643 100 667 c 0
100 678 103 688 110 697 c 0
232 684 m 1
286 654 324 616 349 573 c 0
383 516 390 454 390 400 c 0
390 346 383 284 349 227 c 0
324 184 286 146 232 116 c 1
302 50 396 10 500 10 c 0
715 10 890 185 890 400 c 0
890 615 715 790 500 790 c 0
396 790 302 750 232 684 c 1"#,
),
GlyphDescriptor::new_with_anchor("musi", Anchor::new_scale(AnchorType::Base, (500, 400)),
r#"
799 630 m 0
840 630 874 664 874 705 c 0
874 746 840 780 799 780 c 0
758 780 724 746 724 705 c 0
724 664 758 630 799 630 c 0
201 630 m 0
242 630 276 664 276 705 c 0
276 746 242 780 201 780 c 0
160 780 126 746 126 705 c 0
126 664 160 630 201 630 c 0
251 537 m 1
251 323 l 2
251 190 318 86 408 42 c 0
438 27 469 20 500 20 c 0
576 20 634 60 673 106 c 0
716 157 749 233 749 323 c 2
749 537 l 1
677 559 624 626 624 705 c 0
624 802 702 880 799 880 c 0
896 880 974 802 974 705 c 0
974 626 921 559 849 537 c 1
849 323 l 2
849 206 807 108 750 41 c 0
695 -24 610 -80 500 -80 c 0
390 -80 305 -24 250 41 c 0
193 109 151 206 151 323 c 2
151 537 l 1
79 559 26 626 26 705 c 0
26 802 104 880 201 880 c 0
298 880 376 802 376 705 c 0
376 626 323 559 251 537 c 1"#,
),
GlyphDescriptor::new_with_anchor("nanpa", Anchor::new_scale(AnchorType::Base, (500, 400)),
r#"
250 650 m 1
250 150 l 1
750 150 l 1
750 650 l 1
250 650 l 1
150 -40 m 2
150 50 l 1
62 50 l 2
34 50 12 72 12 100 c 0
12 128 34 150 62 150 c 2
150 150 l 1
150 650 l 1
62 650 l 2
34 650 12 672 12 700 c 0
12 728 34 750 62 750 c 2
150 750 l 1
150 840 l 2
150 868 172 890 200 890 c 0
228 890 250 868 250 840 c 2
250 750 l 1
750 750 l 1
750 840 l 2
750 868 772 890 800 890 c 0
828 890 850 868 850 840 c 2
850 750 l 1
942 750 l 2
970 750 992 728 992 700 c 0
992 672 970 650 942 650 c 2
850 650 l 1
850 150 l 1
942 150 l 2
970 150 992 128 992 100 c 0
992 72 970 50 942 50 c 2
850 50 l 1
850 -40 l 2
850 -68 828 -90 800 -90 c 0
772 -90 750 -68 750 -40 c 2
750 50 l 1
250 50 l 1
250 -40 l 2
250 -68 228 -90 200 -90 c 0
172 -90 150 -68 150 -40 c 2"#,
),
GlyphDescriptor::new_with_anchor("nasa", Anchor::new_scale(AnchorType::Base, (530, 400)),
r#"
958 645 m 0
958 619 936 595 908 595 c 0
893 595 878 602 868 615 c 0
829 666 780 708 721 739 c 0
690 755 666 762 633 773 c 0
595 781 562 788 517 788 c 0
420 788 340 752 281 710 c 0
211 660 156 589 129 498 c 0
117 459 112 428 112 382 c 0
112 296 142 226 178 173 c 0
238 85 336 11 480 11 c 0
587 11 668 57 722 112 c 0
774 165 818 239 818 340 c 0
818 435 780 505 733 554 c 0
686 603 618 645 526 645 c 0
419 645 345 594 301 531 c 0
276 495 255 449 255 391 c 0
255 299 297 236 350 197 c 0
381 174 423 154 474 154 c 0
478 154 482 154 486 154 c 0
513 154 536 132 536 104 c 0
536 77 515 55 488 54 c 0
483 54 478 54 474 54 c 0
397 54 337 82 291 116 c 0
215 172 155 262 155 391 c 0
155 473 184 539 219 589 c 0
280 677 381 745 526 745 c 0
613 745 684 716 738 680 c 0
830 619 901 519 915 383 c 0
917 367 918 353 918 340 c 0
918 248 889 175 852 116 c 0
787 13 681 -66 535 -85 c 0
514 -88 497 -89 480 -89 c 0
339 -89 236 -32 162 38 c 0
91 105 36 196 18 312 c 0
14 339 12 360 12 382 c 0
12 530 68 637 139 717 c 0
209 796 306 856 429 879 c 0
463 885 487 888 517 888 c 0
571 888 616 878 656 870 c 0
662 869 667 868 672 866 c 0
701 857 731 847 768 828 c 0
840 790 900 738 948 675 c 0
955 666 958 656 958 645 c 0"#,
),
GlyphDescriptor::new_with_anchor("nena", Anchor::new_scale(AnchorType::Base, (500, 400)),
r#"
500 700 m 0
369 700 250 573 250 400 c 2
250 50 l 2
250 22 228 0 200 0 c 0
172 0 150 22 150 50 c 2
150 400 l 2
150 613 300 800 500 800 c 0
700 800 850 613 850 400 c 2
850 50 l 2
850 22 828 0 800 0 c 0
772 0 750 22 750 50 c 2
750 400 l 2
750 573 631 700 500 700 c 0"#,
),
GlyphDescriptor::new_with_anchor("nimi", Anchor::new_scale(AnchorType::Base, (500, 400)),
r#"
110 350 m 2
110 240 200 150 310 150 c 2
690 150 l 2
800 150 890 240 890 350 c 2
890 450 l 2
890 560 800 650 690 650 c 2
310 650 l 2
200 650 110 560 110 450 c 2
110 350 l 2
310 50 m 2
144 50 10 184 10 350 c 2
10 450 l 2
10 616 144 750 310 750 c 2
690 750 l 2
856 750 990 616 990 450 c 2
990 350 l 2
990 184 856 50 690 50 c 2
310 50 l 2"#,
),
GlyphDescriptor::new_with_anchor("noka", Anchor::new_scale(AnchorType::Base, (700, 570)),
r#"
700 135 m 0
700 213 554 219 509 219 c 0
475 219 435 216 375 210 c 1
370 210 l 2
310 210 316 275 315 334 c 0
315 349 314 366 314 384 c 0
314 501 317 634 320 741 c 0
321 768 343 790 370 790 c 0
401 790 420 765 420 735 c 0
420 730 414 544 414 384 c 0
414 358 414 337 415 314 c 1
452 317 482 319 509 319 c 0
568 319 800 308 800 135 c 0
800 25 740 -90 490 -90 c 0
297 -90 86 -88 48 -79 c 0
25 -74 10 -53 10 -30 c 2
10 740 l 2
10 768 32 790 60 790 c 0
88 790 110 768 110 740 c 2
110 16 l 1
192 12 277 10 490 10 c 0
692 10 700 91 700 135 c 0"#,
),
GlyphDescriptor::new_with_anchor("ona", Anchor::new_scale(AnchorType::Base, (625, 400)),
r#"
625 150 m 0
763 150 875 262 875 400 c 0
875 538 763 650 625 650 c 0
487 650 375 538 375 400 c 0
375 262 487 150 625 150 c 0
625 50 m 0
406 50 73 148 61 152 c 0
39 158 25 178 25 200 c 0
25 205 26 209 27 214 c 0
27 215 38 250 74 250 c 0
80 250 86 249 93 247 c 0
112 241 222 210 350 184 c 1
303 243 275 318 275 400 c 0
275 593 432 750 625 750 c 0
818 750 975 593 975 400 c 0
975 207 818 50 625 50 c 0"#,
),
GlyphDescriptor::new_with_anchor("open", Anchor::new_scale(AnchorType::Base, (500, 580)),
r#"
250 225 m 1
250 75 l 1
750 75 l 1
750 225 l 1
250 225 l 1
250 725 m 2
250 325 l 1
750 325 l 1
750 725 l 2
750 753 772 775 800 775 c 0
828 775 850 753 850 725 c 0
850 492 850 258 850 25 c 0
850 -3 828 -25 800 -25 c 2
200 -25 l 2
172 -25 150 -3 150 25 c 0
150 258 150 492 150 725 c 0
150 753 172 775 200 775 c 0
228 775 250 753 250 725 c 2"#,
),
GlyphDescriptor::new_with_anchor("pilin", Anchor::new_scale(AnchorType::Base, (500, 380)),
r#"
280 790 m 0
192 790 110 707 110 589 c 0
110 539 132 477 172 409 c 0
252 272 392 126 500 27 c 1
607 125 748 272 828 409 c 0
868 477 890 539 890 589 c 0
890 707 808 790 720 790 c 0
673 790 631 772 600 746 c 0
568 719 552 689 549 673 c 0
545 649 524 631 500 631 c 0
476 631 455 649 451 673 c 0
448 689 432 719 400 746 c 0
369 772 327 790 280 790 c 0
500 787 m 1
547 843 624 890 720 890 c 0
875 890 990 749 990 589 c 0
990 513 957 432 914 359 c 0
837 227 703 83 603 -13 c 0
572 -42 558 -57 533 -78 c 0
524 -86 512 -90 500 -90 c 0
477 -90 463 -74 448 -61 c 0
327 47 177 204 86 359 c 0
43 432 10 513 10 589 c 0
10 749 125 890 280 890 c 0
376 890 453 843 500 787 c 1"#,
),
GlyphDescriptor::new_with_anchor("poka", Anchor::new_scale(AnchorType::Base, (400, 450)),
r#"
100 500 m 0
128 500 150 478 150 450 c 2
150 200 l 1
650 200 l 1
650 450 l 2
650 478 672 500 700 500 c 0
728 500 750 478 750 450 c 2
750 150 l 2
750 122 728 100 700 100 c 2
100 100 l 2
72 100 50 122 50 150 c 2
50 450 l 2
50 478 72 500 100 500 c 0
950 300 m 0
950 259 916 225 875 225 c 0
834 225 800 259 800 300 c 0
800 341 834 375 875 375 c 0
916 375 950 341 950 300 c 0"#,
),
GlyphDescriptor::new_with_anchor("poki", Anchor::new_scale(AnchorType::Base, (500, 400)),
r#"
200 750 m 0
228 750 250 728 250 700 c 2
250 150 l 1
750 150 l 1
750 700 l 2
750 728 772 750 800 750 c 0
828 750 850 728 850 700 c 2
850 100 l 2
850 72 828 50 800 50 c 2
200 50 l 2
172 50 150 72 150 100 c 2
150 700 l 2
150 728 172 750 200 750 c 0"#,
),
GlyphDescriptor::new_with_anchor("sama", Anchor::new_scale(AnchorType::Base, (500, 400)),
r#"
990 100 m 0
990 72 968 50 940 50 c 2
60 50 l 2
32 50 10 72 10 100 c 0
10 128 32 150 60 150 c 2
940 150 l 2
968 150 990 128 990 100 c 0
990 700 m 0
990 672 968 650 940 650 c 2
60 650 l 2
32 650 10 672 10 700 c 0
10 728 32 750 60 750 c 2
940 750 l 2
968 750 990 728 990 700 c 0"#,
),
GlyphDescriptor::new_with_anchor("selo", Anchor::new_scale(AnchorType::Base, (500, 350)),
r#"
149 600 m 1
110 600 l 1
110 150 l 2
110 122 88 100 60 100 c 0
32 100 10 122 10 150 c 2
10 650 l 2
10 678 32 700 60 700 c 2
198 700 l 2
199 700 l 0
199 700 l 2
801 700 l 2
801 700 l 0
802 700 l 2
940 700 l 2
968 700 990 678 990 650 c 2
990 150 l 2
990 122 968 100 940 100 c 0
912 100 890 122 890 150 c 2
890 600 l 1
851 600 l 1
851 213 l 2
851 185 829 163 801 163 c 0
773 163 751 185 751 213 c 2
751 600 l 1
249 600 l 1
249 213 l 2
249 185 227 163 199 163 c 0
171 163 149 185 149 213 c 2
149 600 l 1"#,
),
GlyphDescriptor::new_with_anchor("sike", Anchor::new_scale(AnchorType::Base, (500, 400)),
r#"
500 50 m 0
307 50 150 207 150 400 c 0
150 593 307 750 500 750 c 0
693 750 850 593 850 400 c 0
850 207 693 50 500 50 c 0
750 400 m 0
750 538 638 650 500 650 c 0
362 650 250 538 250 400 c 0
250 262 362 150 500 150 c 0
638 150 750 262 750 400 c 0
500 -90 m 0
229 -90 10 129 10 400 c 0
10 671 229 890 500 890 c 0
771 890 990 671 990 400 c 0
990 129 771 -90 500 -90 c 0
500 10 m 0
715 10 890 185 890 400 c 0
890 615 715 790 500 790 c 0
285 790 110 615 110 400 c 0
110 185 285 10 500 10 c 0"#,
),
GlyphDescriptor::new_with_anchor("sin", Anchor::new_scale(AnchorType::Base, (500, 310)),
r#"
10 310 m 0
10 338 32 360 60 360 c 2
200 360 l 2
228 360 250 338 250 310 c 0
250 282 228 260 200 260 c 2
60 260 l 2
32 260 10 282 10 310 c 0
500 800 m 0
528 800 550 778 550 750 c 2
550 610 l 2
550 582 528 560 500 560 c 0
472 560 450 582 450 610 c 2
450 750 l 2
450 778 472 800 500 800 c 0
750 310 m 0
750 338 772 360 800 360 c 2
940 360 l 2
968 360 990 338 990 310 c 0
990 282 968 260 940 260 c 2
800 260 l 2
772 260 750 282 750 310 c 0"#,
),
GlyphDescriptor::new_with_anchor("sina", Anchor::new_scale(AnchorType::Base, (500, 275)),
r#"
250 275 m 0
250 137 362 25 500 25 c 0
638 25 750 137 750 275 c 0
750 413 638 525 500 525 c 0
362 525 250 413 250 275 c 0
150 275 m 0
150 494 248 827 252 839 c 0
258 861 278 875 300 875 c 0
305 875 309 874 314 873 c 0
315 873 350 862 350 826 c 0
350 820 349 814 347 807 c 0
341 788 310 678 284 550 c 1
343 597 418 625 500 625 c 0
693 625 850 468 850 275 c 0
850 82 693 -75 500 -75 c 0
307 -75 150 82 150 275 c 0"#,
),
GlyphDescriptor::new_with_anchor("sinpin", Anchor::new_scale(AnchorType::Base, (350, 400)),
r#"
825 475 m 0
866 475 900 441 900 400 c 0
900 359 866 325 825 325 c 0
784 325 750 359 750 400 c 0
750 441 784 475 825 475 c 0
200 50 m 0
200 78 222 100 250 100 c 2
600 100 l 1
600 700 l 1
250 700 l 2
222 700 200 722 200 750 c 0
200 778 222 800 250 800 c 2
650 800 l 2
678 800 700 778 700 750 c 2
700 50 l 2
700 22 678 0 650 0 c 2
250 0 l 2
222 0 200 22 200 50 c 0"#,
),
GlyphDescriptor::new_with_anchor("sitelen", Anchor::new_scale(AnchorType::Base, (500, 480)),
r#"
720 140 m 0
720 107 693 80 660 80 c 0
627 80 600 107 600 140 c 0
600 173 627 200 660 200 c 0
693 200 720 173 720 140 c 0
560 140 m 0
560 107 533 80 500 80 c 0
467 80 440 107 440 140 c 0
440 173 467 200 500 200 c 0
533 200 560 173 560 140 c 0
400 137 m 0
400 104 373 77 340 77 c 0
307 77 280 104 280 137 c 0
280 170 307 197 340 197 c 0
373 197 400 170 400 137 c 0
250 750 m 1
250 50 l 1
750 50 l 1
750 750 l 1
250 750 l 1
150 800 m 2
150 828 172 850 200 850 c 2
800 850 l 2
828 850 850 828 850 800 c 2
850 0 l 2
850 -28 828 -50 800 -50 c 2
200 -50 l 2
172 -50 150 -28 150 0 c 2
150 800 l 2"#,
),
GlyphDescriptor::new_with_anchor("sona", Anchor::new_scale(AnchorType::Base, (500, 260)),
r#"
250 510 m 1
250 10 l 1
750 10 l 1
750 510 l 1
250 510 l 1
150 560 m 2
150 588 172 610 200 610 c 2
800 610 l 2
828 610 850 588 850 560 c 2
850 -40 l 2
850 -68 828 -90 800 -90 c 2
200 -90 l 2
172 -90 150 -68 150 -40 c 2
150 560 l 2
750 823 m 0
779 823 800 798 800 773 c 0
800 765 798 756 793 748 c 2
743 661 l 2
734 645 717 636 700 636 c 0
671 636 650 661 650 686 c 0
650 694 652 703 657 711 c 2
707 798 l 2
716 814 733 823 750 823 c 0
200 773 m 0
200 798 221 823 250 823 c 0
267 823 284 814 293 798 c 2
343 711 l 2
348 703 350 694 350 686 c 0
350 661 329 636 300 636 c 0
283 636 266 645 257 661 c 2
207 748 l 2
202 756 200 765 200 773 c 0
500 890 m 0
528 890 550 868 550 840 c 2
550 740 l 2
550 712 528 690 500 690 c 0
472 690 450 712 450 740 c 2
450 840 l 2
450 868 472 890 500 890 c 0"#,
),
GlyphDescriptor::new_with_anchor("soweli", Anchor::new_scale(AnchorType::Base, (640, 500)),
r#"
190 850 m 2
640 850 l 2
833 850 990 693 990 500 c 0
990 324 860 178 690 154 c 1
690 0 l 2
690 -28 668 -50 640 -50 c 0
612 -50 590 -28 590 0 c 2
590 200 l 2
590 228 612 250 640 250 c 0
778 250 890 362 890 500 c 0
890 638 778 750 640 750 c 2
190 750 l 2
162 750 140 772 140 800 c 0
140 828 162 850 190 850 c 2
320 250 m 0
348 250 370 228 370 200 c 2
370 0 l 2
370 -28 348 -50 320 -50 c 0
292 -50 270 -28 270 0 c 2
270 200 l 2
270 228 292 250 320 250 c 0
160 250 m 0
188 250 210 228 210 200 c 2
210 0 l 2
210 -28 188 -50 160 -50 c 0
132 -50 110 -28 110 0 c 2
110 200 l 2
110 228 132 250 160 250 c 0
480 250 m 0
508 250 530 228 530 200 c 2
530 0 l 2
530 -28 508 -50 480 -50 c 0
452 -50 430 -28 430 0 c 2
430 200 l 2
430 228 452 250 480 250 c 0"#,
),
GlyphDescriptor::new_with_anchor("suli", Anchor::new_scale(AnchorType::Base, (500, 650)),
r#"
940 890 m 0
970 890 990 865 990 840 c 0
990 832 988 824 984 816 c 0
864 596 767 418 694 274 c 0
620 130 572 23 548 -55 c 0
542 -76 522 -90 500 -90 c 0
478 -90 458 -76 452 -55 c 0
428 23 380 130 306 274 c 0
233 418 136 596 16 816 c 0
12 824 10 832 10 840 c 0
10 865 30 890 60 890 c 0
78 890 95 881 104 864 c 0
224 644 321 465 395 320 c 0
437 237 473 164 500 101 c 1
527 164 563 237 605 320 c 0
679 465 776 644 896 864 c 0
905 881 922 890 940 890 c 0"#,
),
GlyphDescriptor::new_with_anchor("suno", Anchor::new_scale(AnchorType::Base, (500, 400)),
r#"
500 650 m 0
362 650 250 538 250 400 c 0
250 262 362 150 500 150 c 0
638 150 750 262 750 400 c 0
750 538 638 650 500 650 c 0
450 840 m 2
450 868 472 890 500 890 c 0
528 890 550 868 550 840 c 2
550 746 l 1
703 724 824 603 846 450 c 1
940 450 l 2
968 450 990 428 990 400 c 0
990 372 968 350 940 350 c 2
846 350 l 1
824 197 703 76 550 54 c 1
550 -40 l 2
550 -68 528 -90 500 -90 c 0
472 -90 450 -68 450 -40 c 2
450 54 l 1
297 76 176 197 154 350 c 1
60 350 l 2
32 350 10 372 10 400 c 0
10 428 32 450 60 450 c 2
154 450 l 1
176 603 297 724 450 746 c 1
450 840 l 2"#,
),
GlyphDescriptor::new_with_anchor("telo", Anchor::new_scale(AnchorType::Base, (500, 400)),
r#"
920 866 m 0
950 866 970 841 970 816 c 0
970 810 969 803 967 798 c 0
957 762 944 721 919 685 c 0
884 635 813 590 722 590 c 0
625 590 552 650 488 701 c 0
418 756 374 790 319 790 c 0
306 790 293 788 278 784 c 0
235 773 214 755 201 737 c 0
186 716 179 690 169 652 c 0
163 629 142 614 120 614 c 0
90 614 70 639 70 664 c 0
70 668 70 672 71 676 c 0
80 714 93 757 120 795 c 0
156 845 228 890 319 890 c 0
415 890 487 831 552 779 c 0
621 725 667 690 722 690 c 0
735 690 748 692 762 696 c 0
804 707 824 724 837 742 c 0
852 763 861 790 872 830 c 0
878 852 898 866 920 866 c 0
880 186 m 0
910 186 930 161 930 136 c 0
930 130 929 123 927 118 c 0
917 82 904 41 879 5 c 0
| rust | MIT | e5ab7a391aa703cd7beef3656c9c86045aeef485 | 2026-01-04T20:18:26.798176Z | true |
etbcor/nasin-nanpa | https://github.com/etbcor/nasin-nanpa/blob/e5ab7a391aa703cd7beef3656c9c86045aeef485/font-forge-tool/src/glyph_blocks/lower.rs | font-forge-tool/src/glyph_blocks/lower.rs | use crate::{GlyphDescriptor, Anchor, AnchorType};
//MARK: LOWER
pub const LOWER_COR: [GlyphDescriptor; 137] = [
GlyphDescriptor::new_with_anchor("a", Anchor::new_stack(AnchorType::Base),
r#"
550 340 m 2
550 240 l 2
550 212 528 190 500 190 c 0
472 190 450 212 450 240 c 2
450 340 l 2
450 368 472 390 500 390 c 0
528 390 550 368 550 340 c 2
750 -40 m 0
750 -65 729 -90 700 -90 c 0
683 -90 668 -82 658 -67 c 0
647 -49 633 -32 620 -17 c 1
598 -61 552 -90 500 -90 c 0
425 -90 365 -30 365 45 c 0
365 120 425 180 500 180 c 0
529 180 556 170 578 155 c 0
626 121 693 65 742 -13 c 0
747 -21 750 -31 750 -40 c 0
500 10 m 0
519 10 535 26 535 45 c 0
535 64 519 80 500 80 c 0
481 80 465 64 465 45 c 0
465 26 481 10 500 10 c 0"#,
),
GlyphDescriptor::new_with_anchor("akesi", Anchor::new_stack(AnchorType::Base),
r#"
693 340 m 0
693 312 670 290 642 290 c 0
614 290 592 312 592 340 c 0
592 368 614 390 642 390 c 0
670 390 693 368 693 340 c 0
500 350 m 0
554 350 589 314 608 280 c 0
616 266 622 252 626 237 c 1
660 237 l 2
688 237 710 215 710 187 c 0
710 159 688 137 660 137 c 2
635 137 l 1
635 123 l 1
660 123 l 2
688 123 710 101 710 73 c 0
710 45 688 23 660 23 c 2
626 23 l 1
622 8 616 -6 608 -20 c 0
589 -54 554 -90 500 -90 c 0
446 -90 411 -54 392 -20 c 0
384 -6 378 8 374 23 c 1
340 23 l 2
312 23 290 45 290 73 c 0
290 101 312 123 340 123 c 2
365 123 l 1
365 137 l 1
340 137 l 2
312 137 290 159 290 187 c 0
290 215 312 237 340 237 c 2
374 237 l 1
378 252 384 266 392 280 c 0
411 314 446 350 500 350 c 0
517 237 m 1
509 249 503 250 500 250 c 0
497 250 491 249 483 237 c 1
517 237 l 1
465 137 m 1
465 123 l 1
535 123 l 1
535 137 l 1
465 137 l 1
483 23 m 1
491 11 497 10 500 10 c 0
503 10 509 11 517 23 c 1
483 23 l 1
359 290 m 0
331 290 309 312 309 340 c 0
309 368 331 390 359 390 c 0
387 390 410 368 410 340 c 0
410 312 387 290 359 290 c 0"#,
),
GlyphDescriptor::new_with_anchor("ala", Anchor::new_stack(AnchorType::Base),
r#"
260 340 m 0
260 367 283 390 310 390 c 0
323 390 335 385 345 375 c 2
500 221 l 1
655 375 l 2
665 385 677 390 690 390 c 0
717 390 740 367 740 340 c 0
740 327 735 315 725 305 c 2
571 150 l 1
725 -5 l 2
735 -15 740 -27 740 -40 c 0
740 -67 717 -90 690 -90 c 0
677 -90 665 -85 655 -75 c 2
500 79 l 1
345 -75 l 2
335 -85 323 -90 310 -90 c 0
283 -90 260 -67 260 -40 c 0
260 -27 265 -15 275 -5 c 2
429 150 l 1
275 305 l 2
265 315 260 327 260 340 c 0"#,
),
GlyphDescriptor::new_with_anchor("alasa", Anchor::new_stack(AnchorType::Base),
r#"
700 -20 m 0
673 -20 650 3 650 30 c 0
650 43 655 55 665 65 c 2
699 100 l 1
620 100 l 1
594 -14 495 -90 350 -90 c 0
322 -90 300 -68 300 -40 c 2
300 100 l 1
180 100 l 2
152 100 130 122 130 150 c 0
130 178 152 200 180 200 c 2
300 200 l 1
300 340 l 2
300 368 322 390 350 390 c 0
495 390 594 314 620 200 c 1
699 200 l 1
665 235 l 2
655 245 650 257 650 270 c 0
650 297 673 320 700 320 c 0
713 320 725 315 735 305 c 2
855 185 l 2
863 177 870 165 870 150 c 0
870 135 863 123 855 115 c 2
735 -5 l 2
725 -15 713 -20 700 -20 c 0
516 200 m 1
510 214 501 229 489 241 c 0
470 260 442 278 400 286 c 1
400 200 l 1
516 200 l 1
400 100 m 1
400 14 l 1
442 22 470 40 489 59 c 0
501 71 510 86 516 100 c 1
400 100 l 1"#,
),
GlyphDescriptor::new_with_anchor("ale", Anchor::new_stack(AnchorType::Base),
r#"
70 150 m 0
70 278 141 390 266 390 c 0
327 390 375 363 417 325 c 0
445 300 472 268 500 232 c 1
528 268 555 300 583 325 c 0
625 363 673 390 734 390 c 0
859 390 930 278 930 150 c 0
930 22 859 -90 734 -90 c 0
673 -90 625 -63 583 -25 c 0
555 0 528 32 500 68 c 1
472 32 445 0 417 -25 c 0
375 -63 327 -90 266 -90 c 0
141 -90 70 22 70 150 c 0
437 150 m 1
403 193 376 227 349 251 c 0
318 279 293 290 266 290 c 0
238 290 216 278 200 257 c 0
183 235 170 199 170 150 c 0
170 101 183 65 200 43 c 0
216 22 238 10 266 10 c 0
293 10 318 21 349 49 c 0
376 73 403 107 437 150 c 1
563 150 m 1
597 107 624 73 651 49 c 0
682 21 707 10 734 10 c 0
762 10 784 22 800 43 c 0
817 65 830 101 830 150 c 0
830 199 817 235 800 257 c 0
784 278 762 290 734 290 c 0
707 290 682 279 651 251 c 0
624 227 597 193 563 150 c 1"#,
),
GlyphDescriptor::new_with_anchor("anpa", Anchor::new_stack(AnchorType::Base),
r#"
200 390 m 0
228 390 250 368 250 340 c 2
250 190 l 1
750 190 l 1
750 340 l 2
750 368 772 390 800 390 c 0
828 390 850 368 850 340 c 2
850 140 l 2
850 112 828 90 800 90 c 2
200 90 l 2
172 90 150 112 150 140 c 2
150 340 l 2
150 368 172 390 200 390 c 0
575 -15 m 0
575 -56 541 -90 500 -90 c 0
459 -90 425 -56 425 -15 c 0
425 26 459 60 500 60 c 0
541 60 575 26 575 -15 c 0"#,
),
GlyphDescriptor::new_with_anchor("ante", Anchor::new_stack(AnchorType::Base),
r#"
730 390 m 0
755 390 780 370 780 340 c 0
780 323 771 305 755 296 c 2
525 166 l 2
517 162 508 160 500 160 c 0
492 160 483 162 475 166 c 2
245 296 l 2
229 305 220 323 220 340 c 0
220 370 245 390 270 390 c 0
278 390 287 388 295 384 c 2
500 267 l 1
705 384 l 2
713 388 722 390 730 390 c 0
270 -90 m 0
245 -90 220 -70 220 -40 c 0
220 -23 229 -5 245 4 c 2
475 134 l 2
483 138 492 140 500 140 c 0
508 140 517 138 525 134 c 2
755 4 l 2
771 -5 780 -23 780 -40 c 0
780 -70 755 -90 730 -90 c 0
722 -90 713 -88 705 -84 c 2
500 33 l 1
295 -84 l 2
287 -88 278 -90 270 -90 c 0"#,
),
GlyphDescriptor::new_with_anchor("anu", Anchor::new_stack(AnchorType::Base),
r#"
287 340 m 0
287 367 310 390 337 390 c 0
350 390 363 385 373 375 c 2
500 244 l 1
627 375 l 2
637 385 650 390 663 390 c 0
690 390 713 367 713 340 c 0
713 327 708 315 699 305 c 2
550 152 l 1
550 -40 l 2
550 -68 528 -90 500 -90 c 0
472 -90 450 -68 450 -40 c 2
450 152 l 1
301 305 l 2
292 315 287 327 287 340 c 0"#,
),
GlyphDescriptor::new_with_anchor("awen", Anchor::new_stack(AnchorType::Base),
r#"
500 390 m 0
521 390 540 377 547 357 c 2
671 10 l 1
717 10 l 2
745 10 767 -12 767 -40 c 0
767 -68 745 -90 717 -90 c 2
636 -90 l 2
615 -90 596 -77 589 -57 c 2
500 191 l 1
411 -57 l 2
404 -77 385 -90 364 -90 c 2
283 -90 l 2
255 -90 233 -68 233 -40 c 0
233 -12 255 10 283 10 c 2
329 10 l 1
453 357 l 2
460 377 479 390 500 390 c 0"#,
),
GlyphDescriptor::new_with_anchor("e", Anchor::new_stack(AnchorType::Base),
r#"
260 340 m 0
260 367 283 390 310 390 c 0
323 390 335 385 345 375 c 2
535 185 l 2
545 175 550 163 550 150 c 0
550 137 545 125 535 115 c 2
345 -75 l 2
335 -85 323 -90 310 -90 c 0
283 -90 260 -67 260 -40 c 0
260 -27 265 -15 275 -5 c 2
429 150 l 1
275 305 l 2
265 315 260 327 260 340 c 0
450 340 m 0
450 367 473 390 500 390 c 0
513 390 525 385 535 375 c 2
725 185 l 2
735 175 740 163 740 150 c 0
740 137 735 125 725 115 c 2
535 -75 l 2
525 -85 513 -90 500 -90 c 0
473 -90 450 -67 450 -40 c 0
450 -27 455 -15 465 -5 c 2
619 150 l 1
465 305 l 2
455 315 450 327 450 340 c 0"#,
),
GlyphDescriptor::new_with_anchor("en", Anchor::new_stack(AnchorType::Base),
r#"
550 340 m 2
550 200 l 1
690 200 l 2
718 200 740 178 740 150 c 0
740 122 718 100 690 100 c 2
550 100 l 1
550 -40 l 2
550 -68 528 -90 500 -90 c 0
472 -90 450 -68 450 -40 c 2
450 100 l 1
310 100 l 2
282 100 260 122 260 150 c 0
260 178 282 200 310 200 c 2
450 200 l 1
450 340 l 2
450 368 472 390 500 390 c 0
528 390 550 368 550 340 c 2"#,
),
GlyphDescriptor::new_with_anchor("esun", Anchor::new_stack(AnchorType::Base),
r#"
399 58 m 2
372 58 360 50 360 23 c 0
360 10 360 10 373 10 c 0
398 10 404 17 416 58 c 1
399 58 l 2
584 242 m 2
640 242 640 262 640 277 c 0
640 290 640 290 627 290 c 0
608 290 597 287 583 242 c 1
584 242 l 2
451 163 m 1
310 210 260 312 260 340 c 0
260 365 280 390 310 390 c 0
329 390 347 379 356 360 c 0
360 352 393 287 483 257 c 1
502 315 526 390 627 390 c 0
655 390 740 382 740 277 c 0
740 213 703 142 584 142 c 0
572 142 561 142 550 143 c 1
549 138 l 1
695 94 740 -10 740 -40 c 0
740 -65 720 -90 690 -90 c 0
668 -90 648 -75 642 -53 c 0
640 -45 617 15 517 43 c 1
497 -17 474 -90 373 -90 c 0
345 -90 260 -82 260 23 c 0
260 102 312 158 399 158 c 0
417 158 433 157 449 156 c 1
451 163 l 1"#,
),
GlyphDescriptor::new_with_anchor("ijo", Anchor::new_stack(AnchorType::Base),
r#"
500 10 m 0
577 10 640 73 640 150 c 0
640 227 577 290 500 290 c 0
423 290 360 227 360 150 c 0
360 73 423 10 500 10 c 0
500 -90 m 0
367 -90 260 17 260 150 c 0
260 283 367 390 500 390 c 0
633 390 740 283 740 150 c 0
740 17 633 -90 500 -90 c 0"#,
),
GlyphDescriptor::new_with_anchor("ike", Anchor::new_stack(AnchorType::Base),
r#"
900 25 m 0
900 0 880 -25 850 -25 c 0
828 -25 808 -10 802 12 c 0
768 132 648 225 500 225 c 0
352 225 232 132 198 12 c 0
192 -10 172 -25 150 -25 c 0
120 -25 100 0 100 25 c 0
100 29 101 34 102 38 c 0
149 205 311 325 500 325 c 0
689 325 851 205 898 38 c 0
899 34 900 29 900 25 c 0"#,
),
GlyphDescriptor::new_with_anchor("ilo", Anchor::new_stack(AnchorType::Base),
r#"
273 340 m 2
273 368 295 390 323 390 c 2
677 390 l 2
705 390 727 368 727 340 c 2
727 163 l 2
727 135 705 113 677 113 c 2
550 113 l 1
550 -40 l 2
550 -68 528 -90 500 -90 c 0
472 -90 450 -68 450 -40 c 2
450 113 l 1
323 113 l 2
295 113 273 135 273 163 c 2
273 340 l 2
550 213 m 1
627 213 l 1
627 290 l 1
550 290 l 1
550 213 l 1
450 290 m 1
373 290 l 1
373 213 l 1
450 213 l 1
450 290 l 1"#,
),
GlyphDescriptor::new_with_anchor("insa", Anchor::new_stack(AnchorType::Base),
r#"
575 225 m 0
575 184 541 150 500 150 c 0
459 150 425 184 425 225 c 0
425 266 459 300 500 300 c 0
541 300 575 266 575 225 c 0
200 350 m 0
228 350 250 328 250 300 c 2
250 50 l 1
750 50 l 1
750 300 l 2
750 328 772 350 800 350 c 0
828 350 850 328 850 300 c 2
850 0 l 2
850 -28 828 -50 800 -50 c 2
200 -50 l 2
172 -50 150 -28 150 0 c 2
150 300 l 2
150 328 172 350 200 350 c 0"#,
),
GlyphDescriptor::new_with_anchor("jaki", Anchor::new_stack(AnchorType::Base),
r#"
608 -80 m 0
569 -80 537 -72 502 -62 c 1
463 -80 409 -90 339 -90 c 0
327 -90 315 -90 304 -90 c 0
223 -90 157 -85 157 -3 c 0
157 22 171 39 182 49 c 1
156 62 125 80 125 119 c 0
125 156 151 175 174 187 c 0
193 196 219 204 251 212 c 1
225 228 185 246 185 292 c 0
185 329 220 350 247 362 c 0
266 370 292 378 325 388 c 0
330 389 334 390 339 390 c 0
362 390 384 374 388 349 c 1
475 369 564 379 621 379 c 0
688 379 743 376 782 369 c 0
815 363 841 356 860 334 c 0
868 325 875 311 875 295 c 0
875 261 852 245 833 234 c 0
818 225 798 218 776 211 c 0
730 197 664 183 575 170 c 0
555 167 534 164 516 161 c 1
524 155 532 148 539 142 c 1
588 151 657 169 698 173 c 0
711 174 732 178 803 195 c 0
807 196 811 196 815 196 c 0
841 196 865 175 865 146 c 0
865 136 862 126 856 118 c 0
806 44 774 -18 661 -70 c 0
644 -78 625 -80 608 -80 c 0
405 117 m 1
395 123 385 131 375 137 c 1
337 130 306 123 281 117 c 1
295 112 311 106 328 101 c 1
353 107 379 112 405 117 c 1
578 23 m 1
584 22 589 21 594 20 c 0
611 18 618 20 619 20 c 2
619 20 l 1
655 36 678 54 698 73 c 1
670 70 617 56 582 49 c 1
582 41 580 32 578 23 c 1
395 242 m 0
456 253 556 266 628 279 c 1
626 279 623 279 621 279 c 0
567 279 476 267 389 246 c 1
391 245 393 243 395 242 c 0"#,
),
GlyphDescriptor::new_with_anchor("jan", Anchor::new_stack(AnchorType::Base),
r#"
310 -90 m 0
284 -90 260 -68 260 -40 c 0
260 -26 265 -12 277 -2 c 2
328 42 l 1
300 79 283 124 283 174 c 0
283 293 380 390 500 390 c 0
620 390 716 293 716 174 c 0
716 124 699 79 671 42 c 1
722 -2 l 2
734 -12 740 -26 740 -40 c 0
740 -68 716 -90 690 -90 c 0
679 -90 666 -86 657 -78 c 2
592 -22 l 1
564 -35 533 -42 500 -42 c 0
467 -42 436 -35 408 -22 c 1
343 -78 l 2
334 -86 322 -90 310 -90 c 0
383 174 m 0
383 110 436 58 500 58 c 0
564 58 616 110 616 174 c 0
616 238 564 290 500 290 c 0
436 290 383 238 383 174 c 0"#,
),
GlyphDescriptor::new_with_anchor("jelo", Anchor::new_stack(AnchorType::Base),
r#"
550 340 m 2
550 307 l 2
550 306 550 303 550 302 c 0
556 297 561 292 566 286 c 0
567 286 570 286 571 286 c 2
604 286 l 2
632 286 654 264 654 236 c 0
654 208 632 186 604 186 c 2
571 186 l 2
570 186 567 186 566 186 c 0
561 180 556 175 550 170 c 0
550 168 550 167 550 165 c 2
550 144 l 1
648 -16 l 2
653 -25 656 -34 656 -42 c 0
656 -70 631 -93 606 -93 c 2
394 -93 l 2
369 -93 344 -70 344 -42 c 0
344 -34 347 -25 352 -16 c 2
450 144 l 1
450 165 l 2
450 167 450 168 450 170 c 0
444 175 439 180 434 186 c 0
433 186 430 186 429 186 c 2
396 186 l 2
368 186 346 208 346 236 c 0
346 264 368 286 396 286 c 2
429 286 l 2
430 286 433 286 434 286 c 0
439 292 444 297 450 302 c 0
450 303 450 306 450 307 c 2
450 340 l 2
450 368 472 390 500 390 c 0
528 390 550 368 550 340 c 2
483 236 m 0
483 227 490 219 500 219 c 0
510 219 517 227 517 236 c 0
517 245 510 253 500 253 c 0
490 253 483 245 483 236 c 0
500 34 m 1
484 7 l 1
516 7 l 1
500 34 l 1"#,
),
GlyphDescriptor::new_with_anchor("jo", Anchor::new_stack(AnchorType::Base),
r#"
601 289 m 0
601 233 556 188 500 188 c 0
484 188 469 191 455 198 c 1
431 181 413 150 413 112 c 0
413 51 457 10 500 10 c 0
530 10 560 30 576 62 c 1
523 62 l 2
495 62 473 84 473 112 c 0
473 140 495 162 523 162 c 2
637 162 l 2
665 162 687 140 687 112 c 0
687 6 608 -90 500 -90 c 0
392 -90 313 6 313 112 c 0
313 182 346 245 399 282 c 0
399 284 399 287 399 289 c 0
399 345 444 390 500 390 c 0
556 390 601 345 601 289 c 0
500 290 m 0
501 290 501 290 501 289 c 0
501 288 501 288 500 288 c 0
499 288 499 288 499 289 c 0
499 290 499 290 500 290 c 0"#,
),
GlyphDescriptor::new_with_anchor("kala", Anchor::new_stack(AnchorType::Base),
r#"
291 227 m 1
374 312 479 390 615 390 c 0
689 390 755 372 806 334 c 0
862 291 893 227 893 150 c 0
893 73 862 9 806 -34 c 0
755 -72 689 -90 615 -90 c 0
479 -90 374 -11 291 74 c 1
254 25 228 -14 202 -63 c 0
193 -80 175 -90 157 -90 c 0
127 -90 108 -65 108 -41 c 0
108 -31 110 -21 115 -12 c 0
146 45 177 91 224 150 c 1
175 212 145 256 113 317 c 0
109 324 107 332 107 340 c 0
107 365 128 390 157 390 c 0
176 390 193 378 203 360 c 0
228 314 255 274 291 227 c 1
356 150 m 1
433 69 516 9 615 9 c 0
673 9 717 23 745 45 c 0
774 67 793 100 793 150 c 0
793 200 774 233 745 255 c 0
717 277 673 291 615 291 c 0
515 291 433 231 356 150 c 1"#,
),
GlyphDescriptor::new_with_anchor("kalama", Anchor::new_stack(AnchorType::Base),
r#"
286 125 m 0
286 153 308 175 336 175 c 2
664 175 l 2
692 175 714 153 714 125 c 0
714 7 618 -89 500 -89 c 0
382 -89 286 7 286 125 c 0
398 75 m 1
416 37 455 11 500 11 c 0
545 11 584 37 602 75 c 1
398 75 l 1
608 180 m 0
578 180 558 205 558 230 c 0
558 236 559 243 562 249 c 2
600 341 l 2
608 360 626 372 646 372 c 0
676 372 696 347 696 322 c 0
696 316 696 309 693 303 c 2
654 210 l 2
646 191 628 180 608 180 c 0
304 322 m 0
304 347 324 372 354 372 c 0
374 372 392 360 400 341 c 2
438 249 l 2
441 243 442 236 442 230 c 0
442 205 422 180 392 180 c 0
372 180 354 191 346 210 c 2
307 303 l 2
304 309 304 316 304 322 c 0
500 391 m 0
528 391 550 369 550 341 c 2
550 240 l 2
550 212 528 190 500 190 c 0
472 190 450 212 450 240 c 2
450 341 l 2
450 369 472 391 500 391 c 0"#,
),
GlyphDescriptor::new_with_anchor("kama", Anchor::new_stack(AnchorType::Base),
r#"
221 8 m 0
221 36 245 58 271 58 c 0
282 58 294 54 303 46 c 0
324 28 347 18 373 13 c 1
496 357 l 2
504 379 523 389 543 389 c 0
562 389 581 379 589 359 c 2
725 26 l 2
728 19 729 13 729 7 c 0
729 -24 696 -47 672 -60 c 0
643 -76 601 -90 543 -90 c 0
515 -90 493 -68 493 -40 c 0
493 -12 515 10 543 10 c 0
579 10 603 18 618 25 c 1
546 200 l 1
454 -57 l 2
447 -77 428 -90 407 -90 c 0
342 -90 288 -72 239 -31 c 0
227 -21 221 -6 221 8 c 0"#,
),
GlyphDescriptor::new_with_anchor("kasi", Anchor::new_stack(AnchorType::Base),
r#"
708 378 m 0
739 361 743 336 743 298 c 0
743 261 734 227 713 199 c 0
692 171 662 156 631 147 c 0
606 140 577 134 550 134 c 1
550 -40 l 2
550 -68 528 -90 500 -90 c 0
472 -90 450 -68 450 -40 c 2
450 108 l 1
446 108 442 107 438 107 c 0
412 107 387 113 362 121 c 0
307 139 257 185 257 256 c 0
257 259 257 262 257 265 c 0
257 290 259 318 271 347 c 0
279 365 296 377 316 377 c 0
374 377 417 364 445 348 c 0
457 341 474 331 490 318 c 1
499 332 508 344 519 354 c 0
549 382 584 390 618 390 c 2
619 390 l 2
620 390 620 390 621 390 c 0
638 390 652 390 664 389 c 0
675 388 693 387 708 378 c 0
643 290 m 1
636 290 628 290 618 290 c 0
602 290 594 286 587 280 c 0
579 273 570 260 558 235 c 1
571 236 588 238 604 243 c 0
622 248 630 254 634 260 c 0
638 265 642 274 643 290 c 1
447 208 m 1
444 220 439 227 435 232 c 0
427 242 415 249 396 260 c 0
388 264 376 270 358 273 c 1
358 268 357 263 357 256 c 0
357 245 361 239 365 234 c 0
376 221 417 207 440 207 c 0
442 207 445 208 447 208 c 1"#,
),
GlyphDescriptor::new_with_anchor("ken", Anchor::new_stack(AnchorType::Base),
r#"
668 340 m 0
668 329 665 219 565 150 c 1
621 111 668 42 668 -40 c 0
668 -68 646 -90 618 -90 c 0
586 -90 571 -66 568 -36 c 0
566 -11 550 78 430 96 c 1
430 -40 l 2
430 -68 408 -90 380 -90 c 0
352 -90 330 -68 330 -40 c 2
330 340 l 2
330 368 352 390 380 390 c 0
408 390 430 368 430 340 c 2
430 204 l 1
498 214 530 246 546 272 c 0
561 295 567 314 568 340 c 0
568 367 591 390 618 390 c 0
646 390 668 368 668 340 c 0"#,
),
GlyphDescriptor::new_with_anchor("kepeken", Anchor::new_stack(AnchorType::Base),
r#"
232 23 m 0
232 50 255 73 280 73 c 0
294 73 308 67 319 57 c 1
343 97 386 124 437 135 c 1
437 152 l 1
319 152 l 2
291 152 269 174 269 202 c 2
269 340 l 2
269 368 291 390 319 390 c 2
655 390 l 2
683 390 705 368 705 340 c 2
705 202 l 2
705 174 683 152 655 152 c 2
537 152 l 1
537 135 l 1
606 120 674 70 674 -7 c 2
674 -40 l 2
674 -68 652 -90 624 -90 c 0
596 -90 574 -68 574 -40 c 2
574 -7 l 2
574 21 525 40 488 40 c 0
451 40 401 21 401 -7 c 2
401 -40 l 2
401 -68 376 -90 350 -90 c 0
339 -90 328 -86 318 -77 c 2
248 -14 l 2
237 -4 232 9 232 23 c 0
537 252 m 1
605 252 l 1
605 290 l 1
537 290 l 1
537 252 l 1
437 290 m 1
369 290 l 1
369 252 l 1
437 252 l 1
437 290 l 1"#,
),
GlyphDescriptor::new_with_anchor("kili", Anchor::new_stack(AnchorType::Base),
r#"
422 340 m 0
422 366 444 390 471 390 c 0
487 390 503 383 513 370 c 0
531 348 545 318 549 279 c 1
569 288 595 292 621 292 c 0
673 292 728 251 728 136 c 0
728 9 627 -92 500 -92 c 0
373 -92 272 9 272 136 c 0
272 253 327 294 377 294 c 0
403 294 428 288 448 279 c 1
442 302 422 315 422 340 c 0
420 179 m 0
416 184 405 193 382 194 c 1
379 188 372 172 372 136 c 0
372 64 428 8 500 8 c 0
572 8 628 64 628 136 c 0
628 172 621 187 618 192 c 1
597 191 588 190 580 179 c 0
560 153 530 139 500 139 c 0
470 139 440 153 420 179 c 0"#,
),
GlyphDescriptor::new_with_anchor("kiwen", Anchor::new_stack(AnchorType::Base),
r#"
540 -70 m 2
530 -83 515 -90 500 -90 c 0
485 -90 470 -83 460 -70 c 2
260 195 l 2
253 204 250 214 250 225 c 0
250 235 252 245 258 253 c 2
335 368 l 2
344 382 359 390 376 390 c 2
624 390 l 2
641 390 656 382 665 368 c 2
742 253 l 2
748 245 750 235 750 225 c 0
750 214 747 204 740 195 c 2
540 -70 l 2
639 227 m 1
597 290 l 1
403 290 l 1
361 227 l 1
500 43 l 1
639 227 l 1"#,
),
GlyphDescriptor::new_with_anchor("ko", Anchor::new_stack(AnchorType::Base),
r#"
432 -91 m 0
364 -91 298 -34 298 44 c 0
298 54 300 64 302 74 c 1
274 100 257 136 257 175 c 0
257 252 315 303 377 309 c 1
397 357 445 390 500 390 c 0
555 390 603 357 623 309 c 1
685 303 743 252 743 175 c 0
743 136 727 100 698 74 c 1
700 64 701 54 701 44 c 0
701 -37 634 -91 568 -91 c 0
544 -91 521 -84 500 -72 c 1
479 -84 456 -91 432 -91 c 0
602 43 m 0
602 64 586 68 586 94 c 0
586 115 598 135 620 142 c 0
634 147 643 159 643 174 c 0
643 195 626 209 609 209 c 0
600 209 592 205 583 205 c 0
558 205 534 224 534 255 c 0
534 275 518 290 500 290 c 0
482 290 467 277 466 258 c 0
466 257 466 256 466 255 c 0
466 224 441 205 416 205 c 0
407 205 400 209 391 209 c 0
374 209 357 195 357 174 c 0
357 159 366 147 380 142 c 0
402 135 414 114 414 93 c 0
414 66 398 64 398 44 c 0
398 26 413 10 432 10 c 0
463 10 462 45 500 45 c 0
538 45 537 10 568 10 c 0
587 10 602 26 602 43 c 0"#,
),
GlyphDescriptor::new_with_anchor("kon", Anchor::new_stack(AnchorType::Base),
r#"
631 390 m 0
656 390 681 370 681 340 c 0
681 318 666 297 643 291 c 0
625 286 617 284 612 280 c 0
609 278 597 261 597 245 c 0
597 239 599 234 602 228 c 0
608 216 622 197 638 177 c 0
661 148 692 108 692 54 c 0
692 1 667 -39 636 -61 c 0
614 -76 591 -83 573 -88 c 0
568 -89 564 -90 559 -90 c 0
534 -90 509 -70 509 -40 c 0
509 -18 523 2 545 8 c 0
565 14 573 17 579 21 c 0
582 23 592 36 592 52 c 0
592 70 579 89 552 123 c 0
529 152 498 192 498 246 c 0
498 299 523 339 554 361 c 0
576 377 601 385 619 389 c 0
623 390 627 390 631 390 c 0
441 390 m 0
466 390 491 370 491 340 c 0
491 318 476 297 453 291 c 0
435 286 427 284 422 280 c 0
419 278 407 261 407 245 c 0
407 239 409 234 412 228 c 0
418 216 432 197 448 177 c 0
471 148 502 108 502 54 c 0
502 1 477 -39 446 -61 c 0
424 -76 401 -83 383 -88 c 0
378 -89 374 -90 369 -90 c 0
344 -90 319 -70 319 -40 c 0
319 -18 333 2 355 8 c 0
375 14 383 17 389 21 c 0
392 23 402 36 402 52 c 0
402 70 389 89 362 123 c 0
339 152 308 192 308 246 c 0
308 299 333 339 364 361 c 0
386 377 411 385 429 389 c 0
433 390 437 390 441 390 c 0"#,
),
GlyphDescriptor::new_with_anchor("kule", Anchor::new_stack(AnchorType::Base),
r#"
543 365 m 2
651 179 l 1
725 179 l 2
753 179 775 157 775 129 c 0
775 101 753 79 725 79 c 2
708 79 l 1
763 -15 l 2
768 -24 770 -32 770 -40 c 0
770 -68 743 -90 719 -90 c 2
281 -90 l 2
257 -90 230 -68 230 -40 c 0
230 -32 232 -24 237 -15 c 2
292 79 l 1
275 79 l 2
247 79 225 101 225 129 c 0
225 157 247 179 275 179 c 2
349 179 l 1
457 365 l 2
466 380 482 390 500 390 c 0
518 390 534 380 543 365 c 2
535 179 m 1
500 240 l 1
465 179 l 1
535 179 l 1
407 79 m 1
367 10 l 1
633 10 l 1
593 79 l 1
407 79 l 1"#,
),
GlyphDescriptor::new_with_anchor("kulupu", Anchor::new_stack(AnchorType::Base),
r#"
500 254 m 0
510 254 518 262 518 272 c 0
518 282 510 290 500 290 c 0
490 290 482 282 482 272 c 0
482 262 490 254 500 254 c 0
500 154 m 0
435 154 382 207 382 272 c 0
382 337 435 390 500 390 c 0
565 390 618 337 618 272 c 0
618 207 565 154 500 154 c 0
641 10 m 0
651 10 659 18 659 28 c 0
659 38 651 46 641 46 c 0
631 46 622 38 622 28 c 0
622 18 631 10 641 10 c 0
641 -90 m 0
576 -90 522 -37 522 28 c 0
522 93 576 146 641 146 c 0
706 146 759 93 759 28 c 0
759 -37 706 -90 641 -90 c 0
359 10 m 0
369 10 378 18 378 28 c 0
378 38 369 46 359 46 c 0
349 46 341 38 341 28 c 0
341 18 349 10 359 10 c 0
359 -90 m 0
294 -90 241 -37 241 28 c 0
241 93 294 146 359 146 c 0
424 146 478 93 478 28 c 0
478 -37 424 -90 359 -90 c 0"#,
),
GlyphDescriptor::new_with_anchor("kute", Anchor::new_stack(AnchorType::Base),
r#"
416 194 m 1
416 194 l 1
507 262 m 0
537 262 557 237 557 212 c 0
557 207 557 202 555 197 c 0
537 141 507 94 454 60 c 0
446 55 437 52 428 52 c 0
403 52 377 73 377 102 c 0
377 118 386 134 401 144 c 0
430 162 447 187 460 227 c 0
467 248 486 262 507 262 c 0
368 159 m 0
336 159 319 184 319 209 c 0
319 216 320 224 323 231 c 0
332 256 340 275 358 302 c 0
382 339 428 390 500 390 c 0
621 390 682 292 682 209 c 0
682 98 605 22 541 -22 c 0
492 -55 458 -72 409 -88 c 0
404 -90 399 -90 394 -90 c 0
369 -90 344 -70 344 -40 c 0
344 -18 358 0 380 8 c 0
464 37 582 110 582 209 c 0
582 250 554 290 500 290 c 0
481 290 461 276 442 247 c 0
430 228 422 213 416 194 c 0
409 173 389 159 368 159 c 0"#,
),
GlyphDescriptor::new_with_anchor("la", Anchor::new_stack(AnchorType::Base),
r#"
453 -90 m 0
428 -90 403 -70 403 -40 c 0
403 -20 414 -2 434 6 c 0
472 21 498 89 498 150 c 0
498 211 472 279 434 294 c 0
414 302 403 320 403 340 c 0
403 370 428 390 453 390 c 0
459 390 466 388 472 386 c 0
552 354 598 258 598 150 c 0
598 42 552 -54 472 -86 c 0
466 -88 459 -90 453 -90 c 0"#,
),
GlyphDescriptor::new_with_anchor("lape", Anchor::new_stack(AnchorType::Base),
r#"
556 100 m 1
100 100 l 2
72 100 50 122 50 150 c 0
50 178 72 200 100 200 c 2
556 200 l 1
578 286 657 350 750 350 c 0
860 350 950 260 950 150 c 0
950 40 860 -50 750 -50 c 0
657 -50 578 14 556 100 c 1
750 250 m 0
695 250 650 205 650 150 c 0
650 95 695 50 750 50 c 0
805 50 850 95 850 150 c 0
850 205 805 250 750 250 c 0"#,
),
GlyphDescriptor::new_with_anchor("laso", Anchor::new_stack(AnchorType::Base),
r#"
500 53 m 1
474 10 l 1
527 10 l 1
500 53 l 1
501 340 m 1
541 382 619 389 640 390 c 1
643 390 l 2
670 390 693 367 693 340 c 0
693 338 692 335 692 332 c 0
690 320 680 255 643 216 c 0
616 189 576 178 551 173 c 1
551 161 l 1
659 -14 l 2
664 -23 667 -32 667 -40 c 0
667 -68 641 -90 616 -90 c 2
384 -90 l 2
359 -90 334 -68 334 -40 c 0
334 -32 337 -23 342 -14 c 2
451 163 l 1
451 173 l 1
377 188 340 219 318 292 c 0
313 308 309 324 309 339 c 0
309 367 330 390 358 390 c 2
361 390 l 1
383 389 459 382 501 340 c 1"#,
),
GlyphDescriptor::new_with_anchor("lawa", Anchor::new_stack(AnchorType::Base),
r#"
658 170 m 1
658 163 659 157 659 150 c 0
659 25 571 -90 446 -90 c 0
321 -90 233 25 233 150 c 0
233 275 321 390 446 390 c 0
528 390 594 340 630 270 c 1
716 270 l 2
744 270 766 248 766 220 c 0
766 192 744 170 716 170 c 2
658 170 l 1
388 270 m 1
503 270 l 1
485 283 466 290 446 290 c 0
426 290 406 283 388 270 c 1
334 170 m 1
333 164 333 157 333 150 c 0
333 65 391 10 446 10 c 0
501 10 559 65 559 150 c 0
559 157 558 164 557 170 c 1
334 170 l 1"#,
),
GlyphDescriptor::new_with_anchor("len", Anchor::new_stack(AnchorType::Base),
r#"
398 111 m 1
602 111 l 1
602 290 l 1
398 290 l 1
398 111 l 1
348 390 m 2
652 390 l 2
680 390 702 368 702 340 c 2
702 -40 l 2
702 -68 680 -90 652 -90 c 0
624 -90 602 -68 602 -40 c 2
602 11 l 1
550 11 l 1
550 -40 l 2
550 -68 528 -90 500 -90 c 0
472 -90 450 -68 450 -40 c 2
450 11 l 1
398 11 l 1
398 -40 l 2
398 -68 376 -90 348 -90 c 0
320 -90 298 -68 298 -40 c 2
298 340 l 2
298 368 320 390 348 390 c 2"#,
),
GlyphDescriptor::new_with_anchor("lete", Anchor::new_stack(AnchorType::Base),
r#"
660 -40 m 0
660 -65 639 -90 610 -90 c 0
593 -90 575 -81 566 -65 c 2
500 50 l 1
434 -65 l 2
425 -81 407 -90 390 -90 c 0
361 -90 340 -65 340 -40 c 0
340 -32 342 -23 347 -15 c 2
413 100 l 1
281 100 l 2
253 100 231 122 231 150 c 0
231 178 253 200 281 200 c 2
413 200 l 1
347 315 l 2
342 323 340 332 340 340 c 0
340 365 361 390 390 390 c 0
407 390 425 381 434 365 c 2
500 250 l 1
566 365 l 2
575 381 593 390 610 390 c 0
639 390 660 365 660 340 c 0
660 332 658 323 653 315 c 2
587 200 l 1
719 200 l 2
747 200 769 178 769 150 c 0
769 122 747 100 719 100 c 2
587 100 l 1
653 -15 l 2
658 -23 660 -32 660 -40 c 0"#,
),
GlyphDescriptor::new_with_anchor("li", Anchor::new_stack(AnchorType::Base),
r#"
325 340 m 0
325 368 349 390 375 390 c 0
386 390 398 386 407 378 c 2
632 188 l 2
643 178 650 165 650 150 c 0
650 135 643 122 632 112 c 2
407 -78 l 2
398 -86 386 -90 375 -90 c 0
349 -90 325 -68 325 -40 c 0
325 -26 331 -12 343 -2 c 2
523 150 l 1
343 302 l 2
331 312 325 326 325 340 c 0"#,
),
GlyphDescriptor::new_with_anchor("lili", Anchor::new_stack(AnchorType::Base),
r#"
590 260 m 0
618 260 640 236 640 210 c 0
640 200 637 189 630 180 c 2
540 60 l 2
531 47 516 40 500 40 c 0
484 40 469 47 460 60 c 2
370 180 l 2
363 189 360 200 360 210 c 0
360 236 382 260 410 260 c 0
425 260 440 253 450 240 c 2
500 173 l 1
550 240 l 2
560 253 575 260 590 260 c 0"#,
),
GlyphDescriptor::new_with_anchor("linja", Anchor::new_stack(AnchorType::Base),
r#"
950 37 m 0
950 12 930 -12 900 -12 c 0
878 -12 858 3 852 25 c 0
827 117 819 208 737 241 c 0
721 247 705 250 689 250 c 0
630 250 571 209 548 135 c 0
513 24 417 -50 312 -50 c 0
284 -50 255 -44 226 -33 c 0
103 16 86 121 52 250 c 0
51 254 50 259 50 263 c 0
50 288 70 313 100 313 c 0
122 313 142 297 148 275 c 0
173 183 181 92 263 59 c 0
279 53 295 50 311 50 c 0
370 50 429 91 452 165 c 0
487 276 583 350 688 350 c 0
716 350 745 344 774 333 c 0
897 284 914 179 948 50 c 0
949 46 950 41 950 37 c 0"#,
),
GlyphDescriptor::new_with_anchor("lipu", Anchor::new_stack(AnchorType::Base),
r#"
287 340 m 2
287 368 309 390 337 390 c 2
663 390 l 2
691 390 713 368 713 340 c 2
713 -40 l 2
713 -68 691 -90 663 -90 c 2
337 -90 l 2
309 -90 287 -68 287 -40 c 2
287 340 l 2
387 290 m 1
387 10 l 1
613 10 l 1
613 290 l 1
387 290 l 1"#,
),
GlyphDescriptor::new_with_anchor("loje", Anchor::new_stack(AnchorType::Base),
r#"
367 390 m 2
633 390 l 2
661 390 683 368 683 340 c 0
683 256 626 185 548 164 c 1
657 -14 l 2
662 -23 665 -32 665 -40 c 0
665 -68 640 -91 615 -91 c 2
385 -91 l 2
360 -91 335 -69 335 -41 c 0
335 -33 338 -23 343 -14 c 2
452 164 l 1
374 185 317 256 317 340 c 0
317 368 339 390 367 390 c 2
500 257 m 0
527 257 551 270 566 290 c 1
434 290 l 1
449 270 473 257 500 257 c 0
500 51 m 1
475 9 l 1
525 9 l 1
500 51 l 1"#,
),
GlyphDescriptor::new_with_anchor("lon", Anchor::new_stack(AnchorType::Base),
r#"
50 0 m 0
50 28 72 50 100 50 c 2
900 50 l 2
928 50 950 28 950 0 c 0
950 -28 928 -50 900 -50 c 2
100 -50 l 2
72 -50 50 -28 50 0 c 0
575 275 m 0
575 234 541 200 500 200 c 0
459 200 425 234 425 275 c 0
425 316 459 350 500 350 c 0
541 350 575 316 575 275 c 0"#,
),
GlyphDescriptor::new_with_anchor("luka", Anchor::new_stack(AnchorType::Base),
r#"
247 137 m 0
247 162 267 187 297 187 c 0
314 187 331 178 340 162 c 2
350 146 l 1
355 216 372 273 403 315 c 0
441 366 494 390 551 390 c 0
608 390 661 366 699 315 c 0
735 266 753 195 753 107 c 2
753 -40 l 2
753 -68 731 -90 703 -90 c 0
675 -90 653 -68 653 -40 c 2
653 107 l 2
653 184 637 230 618 256 c 0
600 280 578 290 551 290 c 0
524 290 501 280 483 256 c 0
464 230 449 184 449 107 c 2
449 -40 l 2
449 -71 423 -89 397 -89 c 0
381 -89 365 -82 355 -65 c 2
254 112 l 2
250 120 247 129 247 137 c 0"#,
),
GlyphDescriptor::new_with_anchor("lukin", Anchor::new_stack(AnchorType::Base),
r#"
580 150 m 0
580 106 544 70 500 70 c 0
456 70 420 106 420 150 c 0
420 194 456 230 500 230 c 0
544 230 580 194 580 150 c 0
149 99 m 0
140 115 135 132 135 150 c 0
135 168 140 185 149 201 c 0
219 319 366 390 500 390 c 0
634 390 781 319 851 201 c 0
860 185 865 168 865 150 c 0
865 132 860 115 851 99 c 0
781 -19 634 -90 500 -90 c 0
366 -90 219 -19 149 99 c 0
500 10 m 0
605 10 716 67 765 150 c 1
716 233 605 290 500 290 c 0
395 290 284 233 235 150 c 1
284 67 395 10 500 10 c 0"#,
),
GlyphDescriptor::new_with_anchor("lupa", Anchor::new_stack(AnchorType::Base),
r#"
500 10 m 0
555 10 613 65 613 150 c 2
613 340 l 2
613 368 635 390 663 390 c 0
691 390 713 368 713 340 c 2
713 150 l 2
713 25 625 -90 500 -90 c 0
375 -90 287 25 287 150 c 2
287 340 l 2
287 368 309 390 337 390 c 0
365 390 387 368 387 340 c 2
387 150 l 2
387 65 445 10 500 10 c 0"#,
),
GlyphDescriptor::new_with_anchor("ma", Anchor::new_stack(AnchorType::Base),
r#"
500 -90 m 0
367 -90 260 17 260 150 c 0
260 283 367 390 500 390 c 0
633 390 740 283 740 150 c 0
740 17 633 -90 500 -90 c 0
550 19 m 1
587 33 617 63 631 100 c 1
550 100 l 1
550 19 l 1
450 19 m 1
450 100 l 1
369 100 l 1
383 63 413 33 450 19 c 1
550 200 m 1
631 200 l 1
617 237 587 267 550 281 c 1
550 200 l 1
450 281 m 1
413 267 383 237 369 200 c 1
450 200 l 1
450 281 l 1"#,
),
GlyphDescriptor::new_with_anchor("mama", Anchor::new_stack(AnchorType::Base),
r#"
605 51 m 1
608 41 609 30 609 19 c 0
609 -41 560 -90 500 -90 c 0
440 -90 391 -41 391 19 c 0
391 30 392 41 395 51 c 1
331 80 284 136 284 209 c 0
284 319 392 390 500 390 c 0
608 390 716 319 716 209 c 0
716 136 669 80 605 51 c 1
384 209 m 0
384 175 424 129 500 129 c 0
576 129 616 175 616 209 c 0
616 243 576 290 500 290 c 0
424 290 384 243 384 209 c 0
500 29 m 0
495 29 491 24 491 19 c 0
491 14 495 10 500 10 c 0
505 10 509 14 509 19 c 0
509 24 505 29 500 29 c 0"#,
),
GlyphDescriptor::new_with_anchor("mani", Anchor::new_stack(AnchorType::Base),
r#"
260 340 m 0
260 365 280 390 310 390 c 0
332 390 352 376 358 353 c 0
362 339 366 331 371 327 c 0
375 323 381 318 393 314 c 1
425 332 461 342 500 342 c 0
539 342 575 332 607 314 c 1
619 318 625 323 629 327 c 0
634 331 638 339 642 353 c 0
648 376 668 390 690 390 c 0
720 390 740 365 740 340 c 0
740 336 739 331 738 327 c 0
729 293 711 262 683 242 c 1
704 209 716 169 716 126 c 0
716 7 619 -90 500 -90 c 0
381 -90 284 7 284 126 c 0
284 169 296 209 317 242 c 1
289 262 271 293 262 327 c 0
261 331 260 336 260 340 c 0
616 126 m 0
616 190 564 242 500 242 c 0
436 242 384 190 384 126 c 0
384 62 436 10 500 10 c 0
564 10 616 62 616 126 c 0"#,
),
GlyphDescriptor::new_with_anchor("meli", Anchor::new_stack(AnchorType::Base),
r#"
500 290 m 0
423 290 360 227 360 150 c 2
360 -40 l 2
360 -68 338 -90 310 -90 c 0
282 -90 260 -68 260 -40 c 2
260 150 l 2
260 283 367 390 500 390 c 0
633 390 740 283 740 150 c 2
740 -40 l 2
| rust | MIT | e5ab7a391aa703cd7beef3656c9c86045aeef485 | 2026-01-04T20:18:26.798176Z | true |
etbcor/nasin-nanpa | https://github.com/etbcor/nasin-nanpa/blob/e5ab7a391aa703cd7beef3656c9c86045aeef485/font-forge-tool/src/glyph_blocks/mod.rs | font-forge-tool/src/glyph_blocks/mod.rs | #![cfg_attr(rustfmt, rustfmt_skip)]
pub mod ctrl;
pub mod base;
pub mod outer;
pub mod inner;
pub mod lower;
//MARK: HEADERS
pub const HEADER: &str = r#"SplineFontDB: 3.2
FontName: nasin-nanpa
FullName: nasin-nanpa
FamilyName: nasin-nanpa
Weight: Regular
Copyright: jan Itan li mama. jan mute a li pona e pali ona.
"#;
pub const VERSION: &str = "5.0.0-beta.4";
pub const DETAILS1: &str = r#"ItalicAngle: 0
UnderlinePosition: 0
UnderlineWidth: 0
Ascent: 900
Descent: 100
InvalidEm: 0
sfntRevision: 0x00010000
LayerCount: 2
Layer: 0 0 "Back" 1
Layer: 1 0 "Fore" 0
XUID: [1021 700 1229584016 12833]
StyleMap: 0x0040
FSType: 0
OS2Version: 4
OS2_WeightWidthSlopeOnly: 0
OS2_UseTypoMetrics: 0
CreationTime: 1640950552
"#;
pub const DETAILS2: &str = r#"
PfmFamily: 81
TTFWeight: 400
TTFWidth: 5
LineGap: 0
VLineGap: 0
Panose: 0 0 8 0 0 0 0 6 0 0
OS2TypoAscent: 1000
OS2TypoAOffset: 0
OS2TypoDescent: 0
OS2TypoDOffset: 0
OS2TypoLinegap: 0
OS2WinAscent: 1000
OS2WinAOffset: 0
OS2WinDescent: 386
OS2WinDOffset: 0
HheadAscent: 1000
HheadAOffset: 0
HheadDescent: -386
HheadDOffset: 0
OS2SubXSize: 650
OS2SubYSize: 699
OS2SubXOff: 0
OS2SubYOff: 140
OS2SupXSize: 650
OS2SupYSize: 699
OS2SupXOff: 0
OS2SupYOff: 479
OS2StrikeYSize: 49
OS2StrikeYPos: 258
OS2CapHeight: 1000
OS2XHeight: 500
OS2Vendor: 'XXXX'
OS2CodePages: 00000001.00000000
OS2UnicodeRanges: 0000000f.00000000.00000000.00000000
"#;
pub const LOOKUPS: &str = r#"Lookup: 4 0 0 "'liga' SPACE" { "'liga' SPACE" } ['liga' ('DFLT' <'dflt' 'latn' > 'latn' <'dflt' > ) ]
Lookup: 4 0 0 "'liga' WORDS" { "'liga' WORD" } ['liga' ('DFLT' <'dflt' 'latn' > 'latn' <'dflt' > ) ]
Lookup: 3 0 0 "'rand' RAND VARIATIONS" { "'rand' RAND VARIATIONS" } ['rand' ('DFLT' <'dflt' 'latn' > 'latn' <'dflt' > ) ]
Lookup: 4 0 0 "'liga' VARIATIONS" { "'liga' VAR" } ['liga' ('DFLT' <'dflt' 'latn' > 'latn' <'dflt' > ) ]
Lookup: 4 0 0 "'liga' START CONTAINER" { "'liga' START CONTAINER" } ['liga' ('DFLT' <'dflt' 'latn' > 'latn' <'dflt' > ) ]
Lookup: 5 0 0 "'calt' CHANGE ZWJ" { "'calt' CHANGE ZWJ" } ['calt' ('DFLT' <'dflt' 'latn' > 'latn' <'dflt' > ) ]
Lookup: 1 0 0 "'ss01' ZWJ TO SCALE" { "'ss01' ZWJ TO SCALE" } ['ss01' ('DFLT' <'dflt' 'latn' > 'latn' <'dflt' > ) ]
Lookup: 1 0 0 "'ss02' ZWJ TO STACK" { "'ss02' ZWJ TO STACK" } ['ss02' ('DFLT' <'dflt' 'latn' > 'latn' <'dflt' > ) ]
Lookup: 4 0 0 "'liga' GLYPH THEN JOINER" { "'liga' GLYPH THEN JOINER" } ['liga' ('DFLT' <'dflt' 'latn' > 'latn' <'dflt' > ) ]
Lookup: 2 0 0 "'ccmp' RESPAWN JOINER" { "'ccmp' RESPAWN JOINER" } ['ccmp' ('DFLT' <'dflt' 'latn' > 'latn' <'dflt' > ) ]
Lookup: 4 0 0 "'liga' JOINER THEN GLYPH" { "'liga' JOINER THEN GLYPH" } ['liga' ('DFLT' <'dflt' 'latn' > 'latn' <'dflt' > ) ]
Lookup: 6 0 0 "'calt' CART AND CONT" { "'calt' CART AND CONT" } ['calt' ('DFLT' <'dflt' 'latn' > 'latn' <'dflt' > ) ]
Lookup: 2 2 0 "'cc01' CART" { "'cc01' CART" } ['cc01' ('DFLT' <'dflt' 'latn' > 'latn' <'dflt' > ) ]
Lookup: 2 2 0 "'cc02' CONT" { "'cc02' CONT" } ['cc02' ('DFLT' <'dflt' 'latn' > 'latn' <'dflt' > ) ]
Lookup: 4 0 0 "'liga' CC CLEANUP" { "'liga' CC CLEANUP" } ['liga' ('DFLT' <'dflt' 'latn' > 'latn' <'dflt' > ) ]
Lookup: 257 0 0 "'sqsh' SPACE SHIFT" { "'sqsh' SPACE SHIFT" } ['sqsh' ('DFLT' <'dflt' 'latn' > 'latn' <'dflt' > ) ]
Lookup: 263 0 0 "'kern' FIX SPACE" { "'kern' FIX SPACE" } ['kern' ('DFLT' <'dflt' 'latn' > 'latn' <'dflt' > ) ]
Lookup: 260 0 0 "'mark' POSITION COMBO" { "'mark' SPECIAL" "'mark' STACK" "'mark' SCALE" } ['mark' ('DFLT' <'dflt' 'latn' > 'latn' <'dflt' > ) ]
MarkAttachClasses: 1
"#;
pub const AFTER_SPACE_CALT: &str = r#" 2 0 0
ClsList: 1 2
BClsList:
FClsList:
1
SeqLookup: 1 "'sqsh' SPACE SHIFT"
ClassNames: "All_Others" "other" "space"
BClassNames: "All_Others"
FClassNames: "All_Others"
EndFPST
"#;
pub const AFTER_ZWJ_CALT: &str = r#" 2 0 0
ClsList: 2 1
BClsList:
FClsList:
1
SeqLookup: 1 "'ss01' ZWJ TO SCALE"
2 0 0
ClsList: 3 1
BClsList:
FClsList:
1
SeqLookup: 1 "'ss02' ZWJ TO STACK"
ClassNames: "other" "zwj" "scale" "stack"
BClassNames: "other" "zwj" "scale" "stack"
FClassNames: "other" "zwj" "scale" "stack"
EndFPST
"#;
pub const AFTER_CHAIN_CALT: &str = r#" 1 1 0
ClsList: 1
BClsList: 2
FClsList:
1
SeqLookup: 0 "'cc01' CART"
1 1 0
ClsList: 1
BClsList: 3
FClsList:
1
SeqLookup: 0 "'cc02' CONT"
ClassNames: "other" "base" "cart" "cont"
BClassNames: "other" "base" "cart" "cont"
FClassNames: "other" "base" "cart" "cont"
EndFPST
LangName: 1033 "" "" "" "" "" ""#;
pub const OTHER: &str = r#"" "" "+ACIA-jan Itan 2023+ACIA" "+ACIAIgAA" "+ACIA-jan Itan+ACIA" "+ACIAIgAA" "+ACIAIgAA" "+ACIA-https://etbcor.com/+ACIA" "+ACIA-MIT License+ACIA" "+ACIA-https://opensource.org/licenses/MIT+ACIA" "" "nasin-nanpa" "Regular"
Encoding: Custom
UnicodeInterp: none
NameList: AGL For New Fonts
DisplaySize: -72
AntiAlias: 1
FitToEm: 1
WinInfo: 304 16 6
BeginPrivate: 12
BlueValues 22 [-2 1 414 417 796 797]
OtherBlues 11 [-385 -384]
BlueFuzz 1 1
BlueScale 8 0.039625
BlueShift 1 7
StdHW 5 [100]
StdVW 5 [100]
StemSnapH 5 [100]
StemSnapV 5 [100]
ForceBold 5 false
LanguageGroup 1 0
ExpansionFactor 4 0.06
EndPrivate
AnchorClass2: "tokipona" "'mark' SPECIAL" "stack" "'mark' STACK" "scale" "'mark' SCALE"
"#;
| rust | MIT | e5ab7a391aa703cd7beef3656c9c86045aeef485 | 2026-01-04T20:18:26.798176Z | false |
xnorpx/blue-onyx | https://github.com/xnorpx/blue-onyx/blob/dc00f8bd73857e721a73e0ea804da436fef3ebed/build.rs | build.rs | use std::{env, fs::File, path::Path, process::Command};
use zip::ZipArchive;
const ONNX_SOURCE: (&str, &str) = (
"onnxruntime-1.22.0",
"https://github.com/microsoft/onnxruntime/archive/refs/tags/v1.22.0.zip",
);
const DIRECTML_SOURCE: (&str, &str) = (
"Microsoft.AI.DirectML.1.15.4",
"https://www.nuget.org/api/v2/package/Microsoft.AI.DirectML/1.15.4",
);
macro_rules! build_error {
($($tokens: tt)*) => {
println!("cargo::error={}", format!($($tokens)*))
}
}
macro_rules! build_warning {
($($tokens: tt)*) => {
println!("cargo::warning={}", format!($($tokens)*))
}
}
fn get_build_config() -> &'static str {
match env::var("PROFILE").as_deref() {
Ok("release") => "Release",
Ok("debug") => "Debug",
_ => "Release",
}
}
fn main() {
build_warning!("Starting build script for ONNX Runtime");
let target_dir = env::var("OUT_DIR").expect("OUT_DIR environment variable not set");
check_and_download_onnx_source(&target_dir);
if cfg!(windows) {
check_and_download_directml(&target_dir);
}
let build_dir = Path::new(&target_dir).join(ONNX_SOURCE.0).join("build");
let shared_lib_name = if cfg!(windows) {
"onnxruntime.dll"
} else if cfg!(target_os = "macos") {
"libonnxruntime.dylib"
} else {
"libonnxruntime.so"
};
let expected_binary = build_dir
.join(if cfg!(windows) { "Windows" } else { "Linux" })
.join(get_build_config())
.join(if cfg!(windows) {
get_build_config()
} else {
""
})
.join(shared_lib_name);
if build_dir.exists() && !expected_binary.exists() {
build_warning!(
"Build directory exists but expected binary missing, cleaning build directory"
);
std::fs::remove_dir_all(&build_dir).expect("Failed to clean build directory");
}
if !expected_binary.exists() {
build_onnx(&target_dir);
}
if !expected_binary.exists() {
build_error!("Expected ONNX Runtime binary not found after build");
panic!("Build failed: ONNX Runtime binary missing");
}
let output_dir = Path::new(&target_dir)
.ancestors()
.nth(3)
.expect("Failed to determine output directory");
if !output_dir.exists() {
std::fs::create_dir_all(output_dir).expect("Failed to create output directory");
}
std::fs::copy(&expected_binary, output_dir.join(shared_lib_name))
.expect("Failed to copy ONNX Runtime binary to output directory");
// On Windows, also copy DirectML.dll to the output directory if it does not exist
if cfg!(windows) {
let directml_dll = Path::new(&target_dir)
.join(DIRECTML_SOURCE.0)
.join("bin/x64-win/DirectML.dll");
let output_dll = output_dir.join("DirectML.dll");
if !output_dll.exists() {
std::fs::copy(&directml_dll, &output_dll)
.expect("Failed to copy DirectML.dll to output directory");
build_warning!("Copied DirectML.dll to output directory");
}
}
println!(
"cargo:rustc-env=ORT_LIB_LOCATION={:?}",
expected_binary.parent().unwrap()
);
}
fn check_and_download_onnx_source(target_dir: &str) {
let onnx_dir = Path::new(target_dir).join(ONNX_SOURCE.0);
let zip_path = Path::new(target_dir).join("onnxruntime.zip");
if !onnx_dir.exists() {
if !zip_path.exists() {
build_warning!("Downloading ONNX Runtime source");
let mut response = reqwest::blocking::get(ONNX_SOURCE.1)
.expect("Failed to download ONNX Runtime source");
let mut file = File::create(&zip_path).expect("Failed to create ONNX Runtime zip file");
response
.copy_to(&mut file)
.expect("Failed to write ONNX Runtime zip file");
}
build_warning!("Extracting ONNX Runtime source");
let zip_file = File::open(&zip_path).expect("Failed to open ONNX Runtime zip file");
let mut archive =
ZipArchive::new(zip_file).expect("Failed to read ONNX Runtime zip archive");
archive
.extract(target_dir)
.expect("Failed to extract ONNX Runtime source");
// Apply patch to fix Eigen dependency GitLab issues
apply_eigen_patch(&onnx_dir);
}
}
// TODO: Remove this patch when upgrading from ONNX Runtime 1.22.0
// This patch is only needed for version 1.22.0 to fix GitLab Eigen dependency issues
fn apply_eigen_patch(onnx_dir: &Path) {
build_warning!("Applying Eigen dependency patch to fix GitLab issues (ONNX 1.22.0 only)");
let deps_file = onnx_dir.join("cmake").join("deps.txt");
let content = std::fs::read_to_string(&deps_file).expect("Failed to read cmake/deps.txt");
// Apply the patch: replace GitLab Eigen URL with GitHub mirror
// This is specific to ONNX Runtime 1.22.0 and should be removed when upgrading
let old_eigen_line = "eigen;https://gitlab.com/libeigen/eigen/-/archive/1d8b82b0740839c0de7f1242a3585e3390ff5f33/eigen-1d8b82b0740839c0de7f1242a3585e3390ff5f33.zip;5ea4d05e62d7f954a46b3213f9b2535bdd866803";
let new_eigen_line = "eigen;https://github.com/eigen-mirror/eigen/archive/1d8b82b0740839c0de7f1242a3585e3390ff5f33/eigen-1d8b82b0740839c0de7f1242a3585e3390ff5f33.zip;05b19b49e6fbb91246be711d801160528c135e34";
let patched_content = content.replace(old_eigen_line, new_eigen_line);
std::fs::write(&deps_file, patched_content).expect("Failed to write patched cmake/deps.txt");
build_warning!("Successfully applied Eigen dependency patch");
}
fn check_and_download_directml(target_dir: &str) {
let directml_dir = Path::new(target_dir).join(DIRECTML_SOURCE.0);
let zip_path = Path::new(target_dir).join("directml.zip");
let directml_for_build_dir = Path::new(target_dir).join("directml");
if !directml_dir.exists() {
if !zip_path.exists() {
build_warning!("Downloading DirectML");
let mut response =
reqwest::blocking::get(DIRECTML_SOURCE.1).expect("Failed to download DirectML");
let mut file = File::create(&zip_path).expect("Failed to create DirectML zip file");
response
.copy_to(&mut file)
.expect("Failed to write DirectML zip file");
}
build_warning!("Extracting DirectML");
let zip_file = File::open(&zip_path).expect("Failed to open DirectML zip file");
let mut archive = ZipArchive::new(zip_file).expect("Failed to read DirectML zip archive");
archive
.extract(&directml_dir)
.expect("Failed to extract DirectML");
}
let required_files = [
directml_dir.join("bin/x64-win/DirectML.lib"),
directml_dir.join("bin/x64-win/DirectML.dll"),
directml_dir.join("include/DirectML.h"),
directml_dir.join("include/DirectMLConfig.h"),
];
for file in &required_files {
if !file.exists() {
build_error!("Required DirectML file missing: {:?}", file);
panic!("DirectML setup incomplete");
}
}
let directml_lib_dir = directml_dir.join("bin/x64-win");
let directml_include_dir = directml_dir.join("include");
let directml_lib_path = directml_lib_dir.join("DirectML.lib");
let directml_dll_path = directml_lib_dir.join("DirectML.dll");
let directml_include_path = directml_include_dir.join("DirectML.h");
let directml_config_path = directml_include_dir.join("DirectMLConfig.h");
let bin_dir = directml_for_build_dir.join("bin");
let lib_dir = directml_for_build_dir.join("lib");
let include_dir = directml_for_build_dir.join("include");
std::fs::create_dir_all(&directml_for_build_dir)
.expect("Failed to create direct ml for bin directory");
std::fs::create_dir_all(&bin_dir).expect("Failed to create bin directory");
std::fs::create_dir_all(&lib_dir).expect("Failed to create lib directory");
std::fs::create_dir_all(&include_dir).expect("Failed to create include directory");
std::fs::copy(&directml_lib_path, lib_dir.join("DirectML.lib"))
.expect("Failed to copy DirectML.lib");
std::fs::copy(&directml_dll_path, bin_dir.join("DirectML.dll"))
.expect("Failed to copy DirectML.dll");
std::fs::copy(&directml_include_path, include_dir.join("DirectML.h"))
.expect("Failed to copy DirectML.h");
std::fs::copy(&directml_config_path, include_dir.join("DirectMLConfig.h"))
.expect("Failed to copy DirectMLConfig.h");
// Verify files
let copied_files = [
lib_dir.join("DirectML.lib"),
bin_dir.join("DirectML.dll"),
include_dir.join("DirectML.h"),
include_dir.join("DirectMLConfig.h"),
];
for file in &copied_files {
if !file.exists() {
build_error!("Failed to verify copied file: {:?}", file);
panic!("DirectML file copy verification failed");
}
}
build_warning!("DirectML files copied and verified successfully");
}
fn build_onnx(target_dir: &str) {
let onnx_dir = Path::new(target_dir).join(ONNX_SOURCE.0);
let build_script = if cfg!(windows) {
onnx_dir.join("build.bat")
} else {
onnx_dir.join("build.sh")
};
if !build_script.exists() {
build_error!("Build script not found: {:?}", build_script);
panic!("ONNX Runtime build script missing");
}
let mut build_commands = vec![
"--config".to_string(),
get_build_config().to_string(),
"--build_shared_lib".to_string(),
"--parallel".to_string(),
num_cpus::get_physical().to_string(),
"--compile_no_warning_as_error".to_string(),
"--skip_tests".to_string(),
"--enable_lto".to_string(),
"--disable_contrib_ops".to_string(),
"--cmake_extra_defines".to_string(),
"onnxruntime_BUILD_UNIT_TESTS=OFF".to_string(),
];
if cfg!(windows) {
// Enable DirectML on Windows
build_commands.extend([
"--enable_msvc_static_runtime".to_string(),
"--use_dml".to_string(),
"--dml_path".to_string(),
target_dir.to_string() + "\\directml",
]);
} else if cfg!(target_os = "macos") {
// Enable Core ML on macOS
build_commands.push("--use_coreml".to_string());
}
build_warning!("Running ONNX Runtime build script");
let status = Command::new(build_script)
.args(&build_commands)
.current_dir(&onnx_dir)
.status()
.expect("Failed to execute ONNX Runtime build script");
if !status.success() {
build_error!("ONNX Runtime build failed with status: {}", status);
panic!("ONNX Runtime build failed");
} else {
build_warning!("ONNX Runtime build completed successfully");
}
}
| rust | MIT | dc00f8bd73857e721a73e0ea804da436fef3ebed | 2026-01-04T20:24:46.035395Z | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.