repo stringlengths 6 65 | file_url stringlengths 81 311 | file_path stringlengths 6 227 | content stringlengths 0 32.8k | language stringclasses 1
value | license stringclasses 7
values | commit_sha stringlengths 40 40 | retrieved_at stringdate 2026-01-04 15:31:58 2026-01-04 20:25:31 | truncated bool 2
classes |
|---|---|---|---|---|---|---|---|---|
jdx/mise | https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/src/task/deps.rs | src/task/deps.rs | use crate::task::Task;
use crate::{config::Config, task::task_list::resolve_depends};
use itertools::Itertools;
use petgraph::Direction;
use petgraph::graph::DiGraph;
use std::{
collections::{HashMap, HashSet},
sync::Arc,
};
use tokio::sync::mpsc;
#[derive(Debug, Clone)]
pub struct Deps {
pub graph: DiGraph<Task, ()>,
sent: HashSet<(String, Vec<String>)>, // tasks+args that have already started so should not run again
removed: HashSet<(String, Vec<String>)>, // tasks+args that have already finished to track if we are in an infinitve loop
tx: mpsc::UnboundedSender<Option<Task>>,
// not clone, notify waiters via tx None
}
pub fn task_key(task: &Task) -> (String, Vec<String>) {
(task.name.clone(), task.args.clone())
}
/// manages a dependency graph of tasks so `mise run` knows what to run next
impl Deps {
pub async fn new(config: &Arc<Config>, tasks: Vec<Task>) -> eyre::Result<Self> {
let mut graph = DiGraph::new();
let mut indexes = HashMap::new();
let mut stack = vec![];
let mut seen = HashSet::new();
let mut add_idx = |task: &Task, graph: &mut DiGraph<Task, ()>| {
*indexes
.entry(task_key(task))
.or_insert_with(|| graph.add_node(task.clone()))
};
// first we add all tasks to the graph, create a stack of work for this function, and
// store the index of each task in the graph
for t in &tasks {
stack.push(t.clone());
add_idx(t, &mut graph);
}
let all_tasks_to_run = resolve_depends(config, tasks).await?;
while let Some(a) = stack.pop() {
if seen.contains(&a) {
// prevent infinite loop
continue;
}
let a_idx = add_idx(&a, &mut graph);
let (pre, post) = a.resolve_depends(config, &all_tasks_to_run).await?;
for b in pre {
let b_idx = add_idx(&b, &mut graph);
graph.update_edge(a_idx, b_idx, ());
stack.push(b.clone());
}
for b in post {
let b_idx = add_idx(&b, &mut graph);
graph.update_edge(b_idx, a_idx, ());
stack.push(b.clone());
}
seen.insert(a);
}
let (tx, _) = mpsc::unbounded_channel();
let sent = HashSet::new();
let removed = HashSet::new();
Ok(Self {
graph,
tx,
sent,
removed,
})
}
/// main method to emit tasks that no longer have dependencies being waited on
fn emit_leaves(&mut self) {
let leaves = leaves(&self.graph);
let leaves_is_empty = leaves.is_empty();
for task in leaves {
let key = (task.name.clone(), task.args.clone());
if self.sent.insert(key) {
trace!("Scheduling task {0}", task.name);
if let Err(e) = self.tx.send(Some(task)) {
trace!("Error sending task: {e:?}");
}
}
}
if self.is_empty() {
trace!("All tasks finished");
if let Err(e) = self.tx.send(None) {
trace!("Error closing task stream: {e:?}");
}
} else if leaves_is_empty && self.sent.len() == self.removed.len() {
panic!(
"Infinitive loop detected, all tasks are finished but the graph isn't empty {0} {1:#?}",
self.all().map(|t| t.name.clone()).join(", "),
self.graph
)
}
}
/// listened to by `mise run` which gets a stream of tasks to run
pub fn subscribe(&mut self) -> mpsc::UnboundedReceiver<Option<Task>> {
let (tx, rx) = mpsc::unbounded_channel();
self.tx = tx;
self.emit_leaves();
rx
}
pub fn is_empty(&self) -> bool {
self.graph.node_count() == 0
}
// use contracts::{ensures, requires};
// #[requires(self.graph.node_count() > 0)]
// #[ensures(self.graph.node_count() == old(self.graph.node_count()) - 1)]
pub fn remove(&mut self, task: &Task) {
if let Some(idx) = self.node_idx(task) {
self.graph.remove_node(idx);
let key = (task.name.clone(), task.args.clone());
self.removed.insert(key);
self.emit_leaves();
}
}
fn node_idx(&self, task: &Task) -> Option<petgraph::graph::NodeIndex> {
self.graph
.node_indices()
.find(|&idx| &self.graph[idx] == task)
}
pub fn all(&self) -> impl Iterator<Item = &Task> {
self.graph.node_indices().map(|idx| &self.graph[idx])
}
pub fn is_linear(&self) -> bool {
let mut graph = self.graph.clone();
// pop dependencies off, if we get multiple dependencies at once it's not linear
loop {
let leaves = leaves(&graph);
if leaves.is_empty() {
return true;
} else if leaves.len() > 1 {
return false;
} else {
let idx = self
.graph
.node_indices()
.find(|&idx| graph[idx] == leaves[0])
.unwrap();
graph.remove_node(idx);
}
}
}
}
fn leaves(graph: &DiGraph<Task, ()>) -> Vec<Task> {
graph
.externals(Direction::Outgoing)
.map(|idx| graph[idx].clone())
.collect()
}
| rust | MIT | 3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb | 2026-01-04T15:39:11.175160Z | false |
jdx/mise | https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/src/task/task_results_display.rs | src/task/task_results_display.rs | use crate::exit;
use crate::task::task_output::TaskOutput;
use crate::task::task_output_handler::OutputHandler;
use crate::task::{FailedTasks, Task};
use crate::ui::{style, time};
/// Handles display of task execution results and failure summaries
pub struct TaskResultsDisplay {
output_handler: OutputHandler,
failed_tasks: FailedTasks,
continue_on_error: bool,
show_timings: bool,
}
impl TaskResultsDisplay {
pub fn new(
output_handler: OutputHandler,
failed_tasks: FailedTasks,
continue_on_error: bool,
show_timings: bool,
) -> Self {
Self {
output_handler,
failed_tasks,
continue_on_error,
show_timings,
}
}
/// Display final results and handle failures
pub fn display_results(&self, num_tasks: usize, timer: std::time::Instant) {
self.display_keep_order_output();
self.display_timing_summary(num_tasks, timer);
self.maybe_print_failure_summary();
self.exit_if_failed();
}
/// Display keep-order output if using that mode
fn display_keep_order_output(&self) {
if self.output_handler.output(None) != TaskOutput::KeepOrder {
return;
}
let output = self.output_handler.keep_order_output.lock().unwrap();
for (out, err) in output.values() {
for (prefix, line) in out {
if console::colors_enabled() {
prefix_println!(prefix, "{line}\x1b[0m");
} else {
prefix_println!(prefix, "{line}");
}
}
for (prefix, line) in err {
if console::colors_enabled_stderr() {
prefix_eprintln!(prefix, "{line}\x1b[0m");
} else {
prefix_eprintln!(prefix, "{line}");
}
}
}
}
/// Display timing summary if enabled
fn display_timing_summary(&self, num_tasks: usize, timer: std::time::Instant) {
if self.show_timings && num_tasks > 1 {
let msg = format!("Finished in {}", time::format_duration(timer.elapsed()));
eprintln!("{}", style::edim(msg));
}
}
/// Print failure summary if in continue-on-error mode
fn maybe_print_failure_summary(&self) {
if !self.continue_on_error {
return;
}
let failed = self.failed_tasks.lock().unwrap().clone();
if failed.is_empty() {
return;
}
let count = failed.len();
eprintln!("{} {} task(s) failed:", style::ered("ERROR"), count);
for (task, status) in &failed {
let prefix = task.estyled_prefix();
let status_str = status
.map(|s| s.to_string())
.unwrap_or_else(|| "unknown".to_string());
self.eprint(task, &prefix, &format!("exited with status {}", status_str));
}
}
/// Exit if any tasks failed
fn exit_if_failed(&self) {
if let Some((task, status)) = self.failed_tasks.lock().unwrap().first() {
let prefix = task.estyled_prefix();
self.eprint(
task,
&prefix,
&format!("{} task failed", style::ered("ERROR")),
);
exit(status.unwrap_or(1));
}
}
/// Print error message for a task
fn eprint(&self, task: &Task, prefix: &str, line: &str) {
self.output_handler.eprint(task, prefix, line);
}
}
| rust | MIT | 3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb | 2026-01-04T15:39:11.175160Z | false |
jdx/mise | https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/src/task/task_dep.rs | src/task/task_dep.rs | use crate::config::config_file::toml::deserialize_arr;
use serde::ser::SerializeSeq;
use serde::{Deserialize, Deserializer, Serialize};
use std::fmt;
use std::fmt::{Display, Formatter};
use std::str::FromStr;
#[derive(Debug, Clone, Eq, PartialEq)]
pub struct TaskDep {
pub task: String,
pub args: Vec<String>,
}
impl TaskDep {
pub fn render(
&mut self,
tera: &mut tera::Tera,
tera_ctx: &tera::Context,
) -> crate::Result<&mut Self> {
self.task = tera.render_str(&self.task, tera_ctx)?;
for a in &mut self.args {
*a = tera.render_str(a, tera_ctx)?;
}
if self.args.is_empty() {
let s = self.task.clone();
let mut split = s.split_whitespace().map(|s| s.to_string());
if let Some(task) = split.next() {
self.task = task;
}
self.args = split.collect();
}
Ok(self)
}
}
impl Display for TaskDep {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.task)?;
if !self.args.is_empty() {
write!(f, " {}", self.args.join(" "))?;
}
Ok(())
}
}
impl From<String> for TaskDep {
fn from(s: String) -> Self {
s.parse().unwrap()
}
}
impl FromStr for TaskDep {
type Err = String;
fn from_str(s: &str) -> Result<Self, Self::Err> {
Ok(Self {
task: s.to_string(),
args: Default::default(),
})
}
}
impl<'de> Deserialize<'de> for TaskDep {
fn deserialize<D: Deserializer<'de>>(deserializer: D) -> Result<Self, D::Error> {
let input: Vec<String> = deserialize_arr(deserializer)?;
if input.is_empty() {
Err(serde::de::Error::custom("Task name is required"))
} else if input.len() == 1 {
Ok(input[0].to_string().into())
} else {
Ok(Self {
task: input[0].clone(),
args: input[1..].to_vec(),
})
}
}
}
impl Serialize for TaskDep {
fn serialize<S: serde::Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> {
if self.args.is_empty() {
serializer.serialize_str(&self.task)
} else {
// TODO: it would be possible to track if the user specified a string and if so, continue that format
let mut seq = serializer.serialize_seq(Some(1 + self.args.len()))?;
seq.serialize_element(&self.task)?;
for arg in &self.args {
seq.serialize_element(arg)?;
}
seq.end()
}
}
}
mod tests {
#[allow(unused_imports)] // no idea why I need this
use super::*;
#[test]
fn test_task_dep_from_str() {
let td: TaskDep = "task".parse().unwrap();
assert_eq!(td.task, "task");
assert!(td.args.is_empty());
// TODO: td.render()
// let td: TaskDep = "task arg1 arg2".parse().unwrap();
// assert_eq!(td.task, "task");
// assert_eq!(td.args, vec!["arg1", "arg2"]);
}
#[test]
fn test_task_dep_display() {
let td = TaskDep {
task: "task".to_string(),
args: vec!["arg1".to_string(), "arg2".to_string()],
};
assert_eq!(td.to_string(), "task arg1 arg2");
}
#[test]
fn test_task_dep_deserialize() {
let td: TaskDep = serde_json::from_str(r#""task""#).unwrap();
assert_eq!(td.task, "task");
assert!(td.args.is_empty());
assert_eq!(&serde_json::to_string(&td).unwrap(), r#""task""#);
let td: TaskDep = serde_json::from_str(r#"["task", "arg1", "arg2"]"#).unwrap();
assert_eq!(td.task, "task");
assert_eq!(td.args, vec!["arg1", "arg2"]);
assert_eq!(
&serde_json::to_string(&td).unwrap(),
r#"["task","arg1","arg2"]"#
);
}
}
| rust | MIT | 3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb | 2026-01-04T15:39:11.175160Z | false |
jdx/mise | https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/src/task/mod.rs | src/task/mod.rs | use crate::cli::version::VERSION;
use crate::config::config_file::mise_toml::EnvList;
use crate::config::config_file::toml::{TomlParser, deserialize_arr};
use crate::config::env_directive::{EnvDirective, EnvResolveOptions, EnvResults, ToolsFilter};
use crate::config::{self, Config};
use crate::path_env::PathEnv;
use crate::task::task_script_parser::TaskScriptParser;
use crate::tera::get_tera;
use crate::ui::tree::TreeItem;
use crate::{dirs, env, file};
use console::{Color, measure_text_width, truncate_str};
use eyre::{Result, bail, eyre};
use fuzzy_matcher::FuzzyMatcher;
use fuzzy_matcher::skim::SkimMatcherV2;
use globset::GlobBuilder;
use indexmap::IndexMap;
use itertools::Itertools;
use petgraph::prelude::*;
use serde::{Deserialize, Serialize};
use std::borrow::Cow;
use std::cmp::Ordering;
use std::collections::BTreeMap;
use std::collections::HashSet;
use std::fmt::{Debug, Display, Formatter};
use std::hash::{Hash, Hasher};
use std::iter::once;
use std::path::{Path, PathBuf};
use std::sync::Arc;
use std::sync::LazyLock as Lazy;
use std::{ffi, fmt, path};
use xx::regex;
static FUZZY_MATCHER: Lazy<SkimMatcherV2> =
Lazy::new(|| SkimMatcherV2::default().use_cache(true).smart_case());
/// Type alias for tracking failed tasks with their exit codes
pub type FailedTasks = Arc<std::sync::Mutex<Vec<(Task, Option<i32>)>>>;
mod deps;
pub mod task_context_builder;
mod task_dep;
pub mod task_executor;
pub mod task_fetcher;
pub mod task_file_providers;
pub mod task_helpers;
pub mod task_list;
mod task_load_context;
pub mod task_output;
pub mod task_output_handler;
pub mod task_results_display;
pub mod task_scheduler;
mod task_script_parser;
pub mod task_source_checker;
pub mod task_sources;
pub mod task_tool_installer;
pub use task_load_context::{TaskLoadContext, expand_colon_task_syntax};
pub use task_output::TaskOutput;
pub use task_script_parser::has_any_args_defined;
use crate::config::config_file::ConfigFile;
use crate::env_diff::EnvMap;
use crate::file::display_path;
use crate::toolset::Toolset;
use crate::ui::style;
pub use deps::Deps;
use task_dep::TaskDep;
use task_sources::TaskOutputs;
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Hash)]
#[serde(untagged)]
pub enum RunEntry {
/// Shell script entry
Script(String),
/// Run a single task with optional args
SingleTask { task: String },
/// Run multiple tasks in parallel
TaskGroup { tasks: Vec<String> },
}
#[derive(Debug, Clone, PartialEq, Eq, Hash, Default)]
pub enum Silent {
#[default]
Off,
Bool(bool),
Stdout,
Stderr,
}
impl Silent {
pub fn is_silent(&self) -> bool {
matches!(self, Silent::Bool(true) | Silent::Stdout | Silent::Stderr)
}
pub fn suppresses_stdout(&self) -> bool {
matches!(self, Silent::Bool(true) | Silent::Stdout)
}
pub fn suppresses_stderr(&self) -> bool {
matches!(self, Silent::Bool(true) | Silent::Stderr)
}
pub fn suppresses_both(&self) -> bool {
matches!(self, Silent::Bool(true))
}
}
impl Serialize for Silent {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
match self {
Silent::Off | Silent::Bool(false) => serializer.serialize_bool(false),
Silent::Bool(true) => serializer.serialize_bool(true),
Silent::Stdout => serializer.serialize_str("stdout"),
Silent::Stderr => serializer.serialize_str("stderr"),
}
}
}
impl From<bool> for Silent {
fn from(b: bool) -> Self {
if b { Silent::Bool(true) } else { Silent::Off }
}
}
impl std::str::FromStr for Silent {
type Err = String;
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s {
"true" => Ok(Silent::Bool(true)),
"false" => Ok(Silent::Off),
"stdout" => Ok(Silent::Stdout),
"stderr" => Ok(Silent::Stderr),
_ => Err(format!(
"invalid silent value: {}, expected true, false, 'stdout', or 'stderr'",
s
)),
}
}
}
impl<'de> Deserialize<'de> for Silent {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::Deserializer<'de>,
{
struct SilentVisitor;
impl<'de> serde::de::Visitor<'de> for SilentVisitor {
type Value = Silent;
fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
formatter.write_str("a boolean or a string ('stdout' or 'stderr')")
}
fn visit_bool<E>(self, value: bool) -> Result<Silent, E>
where
E: serde::de::Error,
{
Ok(Silent::from(value))
}
fn visit_str<E>(self, value: &str) -> Result<Silent, E>
where
E: serde::de::Error,
{
match value {
"stdout" => Ok(Silent::Stdout),
"stderr" => Ok(Silent::Stderr),
_ => Err(E::custom(format!(
"invalid silent value: '{}', expected 'stdout' or 'stderr'",
value
))),
}
}
}
deserializer.deserialize_any(SilentVisitor)
}
}
impl std::str::FromStr for RunEntry {
type Err = String;
fn from_str(s: &str) -> Result<Self, Self::Err> {
Ok(RunEntry::Script(s.to_string()))
}
}
impl Display for RunEntry {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
match self {
RunEntry::Script(s) => write!(f, "{}", s),
RunEntry::SingleTask { task } => write!(f, "task: {task}"),
RunEntry::TaskGroup { tasks } => write!(f, "tasks: {}", tasks.join(", ")),
}
}
}
#[derive(Debug, Clone, Deserialize)]
#[serde(deny_unknown_fields)]
pub struct Task {
#[serde(skip)]
pub name: String,
#[serde(skip)]
pub display_name: String,
#[serde(default)]
pub description: String,
#[serde(default, rename = "alias", deserialize_with = "deserialize_arr")]
pub aliases: Vec<String>,
#[serde(skip)]
pub config_source: PathBuf,
#[serde(skip)]
pub cf: Option<Arc<dyn ConfigFile>>,
#[serde(skip)]
pub config_root: Option<PathBuf>,
#[serde(default)]
pub confirm: Option<String>,
#[serde(default, deserialize_with = "deserialize_arr")]
pub depends: Vec<TaskDep>,
#[serde(default, deserialize_with = "deserialize_arr")]
pub depends_post: Vec<TaskDep>,
#[serde(default, deserialize_with = "deserialize_arr")]
pub wait_for: Vec<TaskDep>,
#[serde(default)]
pub env: EnvList,
#[serde(default)]
pub dir: Option<String>,
#[serde(default)]
pub hide: bool,
#[serde(default)]
pub global: bool,
#[serde(default)]
pub raw: bool,
#[serde(default)]
pub sources: Vec<String>,
#[serde(default)]
pub outputs: TaskOutputs,
#[serde(default)]
pub shell: Option<String>,
#[serde(default)]
pub quiet: bool,
#[serde(default)]
pub silent: Silent,
#[serde(default)]
pub tools: IndexMap<String, String>,
#[serde(default)]
pub usage: String,
#[serde(default)]
pub timeout: Option<String>,
// normal type
#[serde(default, deserialize_with = "deserialize_arr")]
pub run: Vec<RunEntry>,
#[serde(default, deserialize_with = "deserialize_arr")]
pub run_windows: Vec<RunEntry>,
// command type
// pub command: Option<String>,
#[serde(default)]
pub args: Vec<String>,
// script type
// pub script: Option<String>,
// file type
#[serde(default)]
pub file: Option<PathBuf>,
// Store the original remote file source (git::/http:/https:) before it's replaced with local path
// This is used to determine if the task should use monorepo config file context
#[serde(skip)]
pub remote_file_source: Option<String>,
}
impl Task {
pub fn new(path: &Path, prefix: &Path, config_root: &Path) -> Result<Task> {
Ok(Self {
name: name_from_path(prefix, path)?,
config_source: path.to_path_buf(),
config_root: Some(config_root.to_path_buf()),
..Default::default()
})
}
pub async fn from_path(
config: &Arc<Config>,
path: &Path,
prefix: &Path,
config_root: &Path,
) -> Result<Task> {
let mut task = Task::new(path, prefix, config_root)?;
let info = file::read_to_string(path)?
.lines()
.filter_map(|line| {
debug_assert!(
!VERSION.starts_with("2026.3"),
"remove old syntax `# mise`"
);
if let Some(captures) =
regex!(r"^(?:#|//|::)(?:MISE| ?\[MISE\]) ([a-z_]+=.+)$").captures(line)
{
Some(captures)
} else if let Some(captures) = regex!(r"^(?:#|//) mise ([a-z_]+=.+)$")
.captures(line)
{
deprecated!(
"file_task_headers_old_syntax",
"The `# mise ...` syntax for task headers is deprecated and will be removed in mise 2026.3.0. Use the new `#MISE ...` syntax instead."
);
Some(captures)
} else {
None
}
})
.map(|captures| captures.extract().1)
.map(|[toml]| {
toml.parse::<toml::Value>()
.map_err(|e| eyre::eyre!("failed to parse task header TOML '{}': {}", toml, e))
})
.collect::<Result<Vec<_>>>()?
.into_iter()
.filter_map(|toml| toml.as_table().cloned())
.flatten()
.fold(toml::Table::new(), |mut map, (key, value)| {
map.insert(key, value);
map
});
let info = toml::Value::Table(info);
let p = TomlParser::new(&info);
// trace!("task info: {:#?}", info);
task.description = p.parse_str("description").unwrap_or_default();
task.aliases = p
.parse_array("alias")
.or(p.parse_array("aliases"))
.or(p.parse_str("alias").map(|s| vec![s]))
.or(p.parse_str("aliases").map(|s| vec![s]))
.unwrap_or_default();
task.confirm = p.parse_str("confirm");
task.depends = p.parse_array("depends").unwrap_or_default();
task.depends_post = p.parse_array("depends_post").unwrap_or_default();
task.wait_for = p.parse_array("wait_for").unwrap_or_default();
task.env = p.parse_env("env")?.unwrap_or_default();
task.dir = p.parse_str("dir");
task.hide = !file::is_executable(path) || p.parse_bool("hide").unwrap_or_default();
task.raw = p.parse_bool("raw").unwrap_or_default();
task.sources = p.parse_array("sources").unwrap_or_default();
task.outputs = info.get("outputs").map(|to| to.into()).unwrap_or_default();
task.file = Some(path.to_path_buf());
task.shell = p.parse_str("shell");
task.quiet = p.parse_bool("quiet").unwrap_or_default();
task.silent = info
.get("silent")
.and_then(|v| {
// Try to deserialize as Silent enum (handles bool, "stdout", "stderr")
Silent::deserialize(v.clone()).ok()
})
.unwrap_or_default();
task.tools = p
.parse_table("tools")
.map(|t| {
t.into_iter()
.filter_map(|(k, v)| v.as_str().map(|v| (k, v.to_string())))
.collect()
})
.unwrap_or_default();
task.render(config, config_root).await?;
Ok(task)
}
pub fn derive_env(&self, env_directives: &[EnvDirective]) -> Self {
let mut new_task = self.clone();
new_task.env.0.extend_from_slice(env_directives);
new_task
}
/// prints the task name without an extension
pub fn display_name(&self, all_tasks: &BTreeMap<String, Task>) -> String {
// For task names, only strip extensions after the last colon (:)
// This handles monorepo task names like "//projects/my.app:build.sh"
// where we want to strip ".sh" but keep "my.app" intact
let display_name = if let Some((prefix, task_part)) = self.name.rsplit_once(':') {
// Has a colon separator (e.g., "//projects/my.app:build.sh")
// Strip extension from the task part only
let task_without_ext = task_part.rsplitn(2, '.').last().unwrap_or_default();
format!("{}:{}", prefix, task_without_ext)
} else {
// No colon separator (e.g., "build.sh")
// Strip extension from the whole name
self.name
.rsplitn(2, '.')
.last()
.unwrap_or_default()
.to_string()
};
if all_tasks.contains_key(&display_name) {
// this means another task has the name without an extension so use the full name
self.name.clone()
} else {
display_name
}
}
pub fn is_match(&self, pat: &str) -> bool {
if self.name == pat || self.aliases.contains(&pat.to_string()) {
return true;
}
// For pattern matching, we need to handle several cases:
// 1. Simple pattern (e.g., "build") should match monorepo tasks (e.g., "//projects/my.app:build")
// 2. Full pattern (e.g., "//projects/my.app:build") should only match exact path
// 3. Extensions should be stripped for comparison
let matches = if let Some((prefix, task_part)) = self.name.rsplit_once(':') {
// Task name has a colon (e.g., "//projects/my.app:build.sh")
let task_stripped = task_part.rsplitn(2, '.').last().unwrap_or_default();
if let Some((pat_prefix, pat_task)) = pat.rsplit_once(':') {
// Pattern also has a colon - compare full paths
let pat_task_stripped = pat_task.rsplitn(2, '.').last().unwrap_or_default();
prefix == pat_prefix && task_stripped == pat_task_stripped
} else {
// Pattern is simple (no colon) - just compare task names
let pat_stripped = pat.rsplitn(2, '.').last().unwrap_or_default();
task_stripped == pat_stripped
}
} else {
// Simple task name without colon (e.g., "build.sh")
let name_stripped = self.name.rsplitn(2, '.').last().unwrap_or_default();
let pat_stripped = pat.rsplitn(2, '.').last().unwrap_or_default();
name_stripped == pat_stripped
};
matches || self.aliases.contains(&pat.to_string())
}
pub async fn task_dir() -> PathBuf {
let config = Config::get().await.unwrap();
let cwd = dirs::CWD.clone().unwrap_or_default();
let project_root = config.project_root.clone().unwrap_or(cwd);
for dir in config::task_includes_for_dir(&project_root, &config.config_files) {
if dir.is_dir() && project_root.join(&dir).exists() {
return project_root.join(dir);
}
}
project_root.join("mise-tasks")
}
pub fn with_args(mut self, args: Vec<String>) -> Self {
self.args = args;
self
}
pub fn prefix(&self) -> String {
format!("[{}]", self.display_name)
}
pub fn run(&self) -> &Vec<RunEntry> {
if cfg!(windows) && !self.run_windows.is_empty() {
&self.run_windows
} else {
&self.run
}
}
/// Returns only the script strings from the run entries (without rendering)
pub fn run_script_strings(&self) -> Vec<String> {
self.run()
.iter()
.filter_map(|e| match e {
RunEntry::Script(s) => Some(s.clone()),
_ => None,
})
.collect()
}
pub fn all_depends(&self, tasks: &BTreeMap<String, Task>) -> Result<Vec<Task>> {
let tasks_ref = build_task_ref_map(tasks.iter());
let mut path = vec![self.name.clone()];
self.all_depends_recursive(&tasks_ref, &mut path)
}
fn all_depends_recursive(
&self,
tasks: &BTreeMap<String, &Task>,
path: &mut Vec<String>,
) -> Result<Vec<Task>> {
let mut depends: Vec<Task> = self
.depends
.iter()
.chain(self.depends_post.iter())
.map(|td| match_tasks_with_context(tasks, td, Some(self)))
.flatten_ok()
.filter_ok(|t| t.name != self.name)
.collect::<Result<Vec<_>>>()?;
// Collect transitive dependencies with cycle detection
for dep in depends.clone() {
if path.contains(&dep.name) {
// Circular dependency detected - build path string for error message
let cycle_path = path
.iter()
.skip_while(|&name| name != &dep.name)
.chain(std::iter::once(&dep.name))
.join(" -> ");
return Err(eyre!("circular dependency detected: {}", cycle_path));
}
path.push(dep.name.clone());
let mut extra = dep.all_depends_recursive(tasks, path)?;
path.pop(); // Remove from path after processing this branch
extra.retain(|t| t.name != self.name); // prevent depending on ourself
depends.extend(extra);
}
let depends = depends.into_iter().unique().collect();
Ok(depends)
}
pub async fn resolve_depends(
&self,
config: &Arc<Config>,
tasks_to_run: &[Task],
) -> Result<(Vec<Task>, Vec<Task>)> {
use crate::task::TaskLoadContext;
let tasks_to_run: HashSet<&Task> = tasks_to_run.iter().collect();
// Build context with path hints from self, tasks_to_run, and dependency patterns
// Resolve patterns before extracting paths to handle local deps (e.g., ":A")
let path_hints: Vec<String> = once(&self.name)
.chain(tasks_to_run.iter().map(|t| &t.name))
.filter_map(|name| extract_monorepo_path(name))
.chain(
self.depends
.iter()
.chain(self.wait_for.iter())
.chain(self.depends_post.iter())
.map(|td| resolve_task_pattern(&td.task, Some(self)))
.filter_map(|resolved| extract_monorepo_path(&resolved)),
)
.unique()
.collect();
let ctx = if !path_hints.is_empty() {
Some(TaskLoadContext {
path_hints,
load_all: false,
})
} else {
None
};
let all_tasks = config.tasks_with_context(ctx.as_ref()).await?;
let tasks = build_task_ref_map(all_tasks.iter());
let depends = self
.depends
.iter()
.map(|td| match_tasks_with_context(&tasks, td, Some(self)))
.flatten_ok()
.collect_vec();
let wait_for = self
.wait_for
.iter()
.map(|td| match_tasks_with_context(&tasks, td, Some(self)))
.flatten_ok()
.filter_ok(|t| tasks_to_run.contains(t))
.collect_vec();
let depends_post = self
.depends_post
.iter()
.map(|td| match_tasks_with_context(&tasks, td, Some(self)))
.flatten_ok()
.filter_ok(|t| t.name != self.name)
.collect::<Result<Vec<_>>>()?;
let depends = depends
.into_iter()
.chain(wait_for)
.filter_ok(|t| t.name != self.name)
.collect::<Result<_>>()?;
Ok((depends, depends_post))
}
fn populate_spec_metadata(&self, spec: &mut usage::Spec) {
spec.name = self.display_name.clone();
spec.bin = self.display_name.clone();
if spec.cmd.help.is_none() {
spec.cmd.help = Some(self.description.clone());
}
spec.cmd.name = self.display_name.clone();
spec.cmd.aliases = self.aliases.clone();
if spec.cmd.before_help.is_none()
&& spec.cmd.before_help_long.is_none()
&& !self.depends.is_empty()
{
spec.cmd.before_help_long =
Some(format!("- Depends: {}", self.depends.iter().join(", ")));
}
spec.cmd.usage = spec.cmd.usage();
}
pub async fn parse_usage_spec(
&self,
config: &Arc<Config>,
cwd: Option<PathBuf>,
env: &EnvMap,
) -> Result<(usage::Spec, Vec<String>)> {
let (mut spec, scripts) = if let Some(file) = self.file_path(config).await? {
let spec = usage::Spec::parse_script(&file)
.inspect_err(|e| {
warn!(
"failed to parse task file {} with usage: {e:?}",
file::display_path(&file)
)
})
.unwrap_or_default();
(spec, vec![])
} else {
let scripts_only = self.run_script_strings();
let (scripts, spec) = TaskScriptParser::new(cwd)
.parse_run_scripts(config, self, &scripts_only, env)
.await?;
(spec, scripts)
};
self.populate_spec_metadata(&mut spec);
Ok((spec, scripts))
}
/// Parse usage spec for display purposes without expensive environment rendering
pub async fn parse_usage_spec_for_display(&self, config: &Arc<Config>) -> Result<usage::Spec> {
let dir = self.dir(config).await?;
let mut spec = if let Some(file) = self.file_path(config).await? {
usage::Spec::parse_script(&file)
.inspect_err(|e| {
warn!(
"failed to parse task file {} with usage: {e:?}",
file::display_path(&file)
)
})
.unwrap_or_default()
} else {
let scripts_only = self.run_script_strings();
TaskScriptParser::new(dir)
.parse_run_scripts_for_spec_only(config, self, &scripts_only)
.await?
};
self.populate_spec_metadata(&mut spec);
Ok(spec)
}
pub async fn render_run_scripts_with_args(
&self,
config: &Arc<Config>,
cwd: Option<PathBuf>,
args: &[String],
env: &EnvMap,
) -> Result<Vec<(String, Vec<String>)>> {
let (spec, scripts) = self.parse_usage_spec(config, cwd.clone(), env).await?;
if has_any_args_defined(&spec) {
let scripts_only = self.run_script_strings();
let scripts = TaskScriptParser::new(cwd)
.parse_run_scripts_with_args(config, self, &scripts_only, env, args, &spec)
.await?;
Ok(scripts.into_iter().map(|s| (s, vec![])).collect())
} else {
Ok(scripts
.iter()
.enumerate()
.map(|(i, script)| {
// only pass args to the last script if no formal args are defined
match i == self.run_script_strings().len() - 1 {
true => (script.clone(), args.iter().cloned().collect_vec()),
false => (script.clone(), vec![]),
}
})
.collect())
}
}
pub async fn render_markdown(&self, config: &Arc<Config>) -> Result<String> {
let spec = self.parse_usage_spec_for_display(config).await?;
let ctx = usage::docs::markdown::MarkdownRenderer::new(spec)
.with_replace_pre_with_code_fences(true)
.with_header_level(2);
Ok(ctx.render_spec()?)
}
pub fn estyled_prefix(&self) -> String {
static COLORS: Lazy<Vec<Color>> = Lazy::new(|| {
vec![
Color::Blue,
Color::Magenta,
Color::Cyan,
Color::Green,
Color::Yellow,
Color::Red,
]
});
let idx = self.display_name.chars().map(|c| c as usize).sum::<usize>() % COLORS.len();
style::ereset() + &style::estyle(self.prefix()).fg(COLORS[idx]).to_string()
}
pub async fn dir(&self, config: &Arc<Config>) -> Result<Option<PathBuf>> {
if let Some(dir) = self.dir.clone().or_else(|| {
self.cf(config)
.as_ref()
.and_then(|cf| cf.task_config().dir.clone())
}) {
let config_root = self.config_root.clone().unwrap_or_default();
let mut tera = get_tera(Some(&config_root));
let tera_ctx = self.tera_ctx(config).await?;
let dir = tera.render_str(&dir, &tera_ctx)?;
let dir = file::replace_path(&dir);
if dir.is_absolute() {
Ok(Some(dir.to_path_buf()))
} else if let Some(root) = &self.config_root {
Ok(Some(root.join(dir)))
} else {
Ok(Some(dir.clone()))
}
} else {
Ok(self.config_root.clone())
}
}
pub async fn file_path(&self, config: &Arc<Config>) -> Result<Option<PathBuf>> {
if let Some(file) = &self.file {
let file_str = file.to_string_lossy().to_string();
let config_root = self.config_root.clone().unwrap_or_default();
let mut tera = get_tera(Some(&config_root));
let tera_ctx = self.tera_ctx(config).await?;
let rendered = tera.render_str(&file_str, &tera_ctx)?;
let rendered_path = file::replace_path(&rendered);
if rendered_path.is_absolute() {
Ok(Some(rendered_path))
} else if let Some(root) = &self.config_root {
Ok(Some(root.join(rendered_path)))
} else {
Ok(Some(rendered_path))
}
} else {
Ok(None)
}
}
/// Get file path without templating (for display purposes)
/// This is a non-async version used when we just need the path for display
fn file_path_raw(&self) -> Option<PathBuf> {
self.file.as_ref().map(|file| {
if file.is_absolute() {
file.clone()
} else if let Some(root) = &self.config_root {
root.join(file)
} else {
file.clone()
}
})
}
pub async fn tera_ctx(&self, config: &Arc<Config>) -> Result<tera::Context> {
let ts = config.get_toolset().await?;
let mut tera_ctx = ts.tera_ctx(config).await?.clone();
tera_ctx.insert("config_root", &self.config_root);
Ok(tera_ctx)
}
pub fn cf<'a>(&'a self, config: &'a Config) -> Option<&'a Arc<dyn ConfigFile>> {
// For monorepo tasks, use the stored config file reference
if let Some(ref cf) = self.cf {
return Some(cf);
}
// Fallback to looking up in config.config_files
config.config_files.get(&self.config_source)
}
/// Check if this task is a remote task (loaded from git:// or http:// URL)
/// Remote tasks should not use monorepo config file context because they need
/// access to tools from the full config hierarchy, not just the local config file
pub fn is_remote(&self) -> bool {
// Check the stored remote file source (set before file is replaced with local path)
if let Some(source) = &self.remote_file_source {
return source.starts_with("git::")
|| source.starts_with("http://")
|| source.starts_with("https://");
}
false
}
pub fn shell(&self) -> Option<Vec<String>> {
self.shell.as_ref().and_then(|shell| {
let shell_cmd = shell
.split_whitespace()
.map(|s| s.to_string())
.collect::<Vec<_>>();
if shell_cmd.is_empty() || shell_cmd[0].trim().is_empty() {
warn!("invalid shell '{shell}', expected '<program> <argument>' (e.g. sh -c)");
None
} else {
Some(shell_cmd)
}
})
}
pub async fn render(&mut self, config: &Arc<Config>, config_root: &Path) -> Result<()> {
let mut tera = get_tera(Some(config_root));
let tera_ctx = self.tera_ctx(config).await?;
for a in &mut self.aliases {
*a = tera.render_str(a, &tera_ctx)?;
}
self.confirm = self
.confirm
.as_ref()
.map(|c| tera.render_str(c, &tera_ctx))
.transpose()?;
self.description = tera.render_str(&self.description, &tera_ctx)?;
for s in &mut self.sources {
*s = tera.render_str(s, &tera_ctx)?;
}
if !self.sources.is_empty() && self.outputs.is_empty() {
self.outputs = TaskOutputs::Auto;
}
self.outputs.render(&mut tera, &tera_ctx)?;
for d in &mut self.depends {
d.render(&mut tera, &tera_ctx)?;
}
for d in &mut self.depends_post {
d.render(&mut tera, &tera_ctx)?;
}
for d in &mut self.wait_for {
d.render(&mut tera, &tera_ctx)?;
}
if let Some(dir) = &mut self.dir {
*dir = tera.render_str(dir, &tera_ctx)?;
}
if let Some(shell) = &mut self.shell {
*shell = tera.render_str(shell, &tera_ctx)?;
}
for (_, v) in &mut self.tools {
*v = tera.render_str(v, &tera_ctx)?;
}
Ok(())
}
pub fn name_to_path(&self) -> PathBuf {
self.name.replace(':', path::MAIN_SEPARATOR_STR).into()
}
pub async fn render_env(
&self,
config: &Arc<Config>,
ts: &Toolset,
) -> Result<(EnvMap, Vec<(String, String)>)> {
let mut tera_ctx = ts.tera_ctx(config).await?.clone();
let mut env = ts.full_env(config).await?;
if let Some(root) = &config.project_root {
tera_ctx.insert("config_root", &root);
}
// Convert task env directives to (EnvDirective, PathBuf) pairs
// Use the config file path as source for proper path resolution
let env_directives = self
.env
.0
.iter()
.map(|directive| (directive.clone(), self.config_source.clone()))
.collect();
// Resolve environment directives using the same system as global env
let env_results = EnvResults::resolve(
config,
tera_ctx.clone(),
&env,
env_directives,
EnvResolveOptions {
vars: false,
tools: ToolsFilter::Both,
warn_on_missing_required: false,
},
)
.await?;
let task_env = env_results.env.into_iter().map(|(k, (v, _))| (k, v));
// Apply the resolved environment variables
env.extend(task_env.clone());
// Remove environment variables that were explicitly unset
for key in &env_results.env_remove {
env.remove(key);
}
// Apply path additions from _.path directives
if !env_results.env_paths.is_empty() {
let mut path_env = PathEnv::from_iter(env::split_paths(
&env.get(&*env::PATH_KEY).cloned().unwrap_or_default(),
));
for path in env_results.env_paths {
path_env.add(path);
}
env.insert(env::PATH_KEY.to_string(), path_env.to_string());
}
Ok((env, task_env.collect()))
}
}
fn name_from_path(prefix: impl AsRef<Path>, path: impl AsRef<Path>) -> Result<String> {
let name = path
.as_ref()
.strip_prefix(prefix)
.map(|p| match p {
p if p.starts_with("mise-tasks") => p.strip_prefix("mise-tasks"),
p if p.starts_with(".mise-tasks") => p.strip_prefix(".mise-tasks"),
p if p.starts_with(".mise/tasks") => p.strip_prefix(".mise/tasks"),
p if p.starts_with("mise/tasks") => p.strip_prefix("mise/tasks"),
p if p.starts_with(".config/mise/tasks") => p.strip_prefix(".config/mise/tasks"),
_ => Ok(p),
})??
.components()
| rust | MIT | 3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb | 2026-01-04T15:39:11.175160Z | true |
jdx/mise | https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/src/task/task_fetcher.rs | src/task/task_fetcher.rs | use crate::config::Settings;
use crate::task::Task;
use crate::task::task_file_providers::TaskFileProvidersBuilder;
use eyre::{Result, bail};
/// Handles fetching remote task files and converting them to local paths
pub struct TaskFetcher {
no_cache: bool,
}
impl TaskFetcher {
pub fn new(no_cache: bool) -> Self {
Self { no_cache }
}
/// Fetch remote task files, converting remote paths to local cached paths
pub async fn fetch_tasks(&self, tasks: &mut Vec<Task>) -> Result<()> {
let no_cache = self.no_cache || Settings::get().task_remote_no_cache.unwrap_or(false);
let task_file_providers = TaskFileProvidersBuilder::new()
.with_cache(!no_cache)
.build();
for t in tasks {
if let Some(file) = &t.file {
let source = file.to_string_lossy().to_string();
let provider = task_file_providers.get_provider(&source);
if provider.is_none() {
bail!("No provider found for file: {}", source);
}
let local_path = provider.unwrap().get_local_path(&source).await?;
// Store the original remote source before replacing with local path
// This is used to determine if the task should use monorepo config file context
t.remote_file_source = Some(source);
t.file = Some(local_path);
}
}
Ok(())
}
}
| rust | MIT | 3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb | 2026-01-04T15:39:11.175160Z | false |
jdx/mise | https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/src/task/task_executor.rs | src/task/task_executor.rs | use crate::cli::args::ToolArg;
use crate::cmd::CmdLineRunner;
use crate::config::{Config, Settings, env_directive::EnvDirective};
use crate::file::{display_path, is_executable};
use crate::task::task_context_builder::TaskContextBuilder;
use crate::task::task_list::split_task_spec;
use crate::task::task_output::{TaskOutput, trunc};
use crate::task::task_output_handler::OutputHandler;
use crate::task::task_source_checker::{save_checksum, sources_are_fresh, task_cwd};
use crate::task::{Deps, FailedTasks, GetMatchingExt, Task};
use crate::ui::{style, time};
use duct::IntoExecutablePath;
use eyre::{Report, Result, ensure, eyre};
use itertools::Itertools;
#[cfg(unix)]
use nix::errno::Errno;
use std::collections::BTreeMap;
use std::iter::once;
use std::ops::Deref;
use std::path::{Path, PathBuf};
use std::process::Stdio;
use std::sync::{Arc, Mutex as StdMutex};
use std::time::{Duration, SystemTime};
use tokio::sync::Mutex;
use tokio::sync::{mpsc, oneshot};
use xx::file;
/// Configuration for TaskExecutor
pub struct TaskExecutorConfig {
pub force: bool,
pub cd: Option<PathBuf>,
pub shell: Option<String>,
pub tool: Vec<ToolArg>,
pub timings: bool,
pub continue_on_error: bool,
pub dry_run: bool,
pub skip_deps: bool,
}
/// Executes tasks with proper context, environment, and output handling
pub struct TaskExecutor {
pub context_builder: TaskContextBuilder,
pub output_handler: OutputHandler,
pub failed_tasks: FailedTasks,
// CLI flags
pub force: bool,
pub cd: Option<PathBuf>,
pub shell: Option<String>,
pub tool: Vec<ToolArg>,
pub timings: bool,
pub continue_on_error: bool,
pub dry_run: bool,
pub skip_deps: bool,
}
impl TaskExecutor {
pub fn new(
context_builder: TaskContextBuilder,
output_handler: OutputHandler,
config: TaskExecutorConfig,
) -> Self {
Self {
context_builder,
output_handler,
failed_tasks: Arc::new(StdMutex::new(Vec::new())),
force: config.force,
cd: config.cd,
shell: config.shell,
tool: config.tool,
timings: config.timings,
continue_on_error: config.continue_on_error,
dry_run: config.dry_run,
skip_deps: config.skip_deps,
}
}
pub fn is_stopping(&self) -> bool {
!self.failed_tasks.lock().unwrap().is_empty()
}
pub fn add_failed_task(&self, task: Task, status: Option<i32>) {
let mut failed = self.failed_tasks.lock().unwrap();
failed.push((task, status.or(Some(1))));
}
fn eprint(&self, task: &Task, prefix: &str, line: &str) {
self.output_handler.eprint(task, prefix, line);
}
fn output(&self, task: Option<&Task>) -> crate::task::task_output::TaskOutput {
self.output_handler.output(task)
}
fn quiet(&self, task: Option<&Task>) -> bool {
self.output_handler.quiet(task)
}
fn raw(&self, task: Option<&Task>) -> bool {
self.output_handler.raw(task)
}
pub fn task_timings(&self) -> bool {
let output_mode = self.output_handler.output(None);
self.timings
|| Settings::get().task_timings.unwrap_or(
output_mode == TaskOutput::Prefix
|| output_mode == TaskOutput::Timed
|| output_mode == TaskOutput::KeepOrder,
)
}
pub async fn run_task_sched(
&self,
task: &Task,
config: &Arc<Config>,
sched_tx: Arc<mpsc::UnboundedSender<(Task, Arc<Mutex<Deps>>)>>,
) -> Result<()> {
let prefix = task.estyled_prefix();
let total_start = std::time::Instant::now();
if Settings::get().task_skip.contains(&task.name) {
if !self.quiet(Some(task)) {
self.eprint(task, &prefix, "skipping task");
}
return Ok(());
}
if !self.force && sources_are_fresh(task, config).await? {
if !self.quiet(Some(task)) {
self.eprint(task, &prefix, "sources up-to-date, skipping");
}
return Ok(());
}
if let Some(message) = &task.confirm
&& !Settings::get().yes
&& !crate::ui::confirm(message).unwrap_or(false)
{
return Err(eyre!("aborted by user"));
}
let mut tools = self.tool.clone();
for (k, v) in &task.tools {
tools.push(format!("{k}@{v}").parse()?);
}
let ts_build_start = std::time::Instant::now();
// Check if we need special handling for monorepo tasks with config file context
// Remote tasks (from git::/http:/https: URLs) should NOT use config file context
// because they need tools from the full config hierarchy, not just the local config
let task_cf = if task.is_remote() {
None
} else {
task.cf(config)
};
// Build toolset - either from task's config file or standard way
let ts = self
.context_builder
.build_toolset_for_task(config, task, task_cf, &tools)
.await?;
trace!(
"task {} ToolsetBuilder::build took {}ms",
task.name,
ts_build_start.elapsed().as_millis()
);
let env_render_start = std::time::Instant::now();
// Build environment - either from task's config file context or standard way
let (mut env, task_env) = if let Some(task_cf) = task_cf {
self.context_builder
.resolve_task_env_with_config(config, task, task_cf, &ts)
.await?
} else {
// Fallback to standard behavior
task.render_env(config, &ts).await?
};
trace!(
"task {} render_env took {}ms",
task.name,
env_render_start.elapsed().as_millis()
);
if !self.timings {
env.insert("MISE_TASK_TIMINGS".to_string(), "0".to_string());
}
if let Some(cwd) = &*crate::dirs::CWD {
env.insert("MISE_ORIGINAL_CWD".into(), cwd.display().to_string());
}
if let Some(root) = config.project_root.clone().or(task.config_root.clone()) {
env.insert("MISE_PROJECT_ROOT".into(), root.display().to_string());
}
env.insert("MISE_TASK_NAME".into(), task.name.clone());
let task_file = task
.file_path(config)
.await?
.unwrap_or(task.config_source.clone());
env.insert("MISE_TASK_FILE".into(), task_file.display().to_string());
if let Some(dir) = task_file.parent() {
env.insert("MISE_TASK_DIR".into(), dir.display().to_string());
}
if let Some(config_root) = &task.config_root {
env.insert("MISE_CONFIG_ROOT".into(), config_root.display().to_string());
}
let timer = std::time::Instant::now();
if let Some(file) = task.file_path(config).await? {
let exec_start = std::time::Instant::now();
self.exec_file(config, &file, task, &env, &prefix).await?;
trace!(
"task {} exec_file took {}ms (total {}ms)",
task.name,
exec_start.elapsed().as_millis(),
total_start.elapsed().as_millis()
);
} else {
let rendered_run_scripts = task
.render_run_scripts_with_args(config, self.cd.clone(), &task.args, &env)
.await?;
let get_args = || {
[String::new()]
.iter()
.chain(task.args.iter())
.cloned()
.collect()
};
self.parse_usage_spec_and_init_env(config, task, &mut env, get_args)
.await?;
let exec_start = std::time::Instant::now();
self.exec_task_run_entries(
config,
task,
(&env, &task_env),
&prefix,
rendered_run_scripts,
sched_tx,
)
.await?;
trace!(
"task {} exec_task_run_entries took {}ms (total {}ms)",
task.name,
exec_start.elapsed().as_millis(),
total_start.elapsed().as_millis()
);
}
if self.task_timings()
&& (task.file.as_ref().is_some() || !task.run_script_strings().is_empty())
{
self.eprint(
task,
&prefix,
&format!("Finished in {}", time::format_duration(timer.elapsed())),
);
}
save_checksum(task)?;
Ok(())
}
async fn exec_task_run_entries(
&self,
config: &Arc<Config>,
task: &Task,
full_env: (&BTreeMap<String, String>, &[(String, String)]),
prefix: &str,
rendered_scripts: Vec<(String, Vec<String>)>,
sched_tx: Arc<mpsc::UnboundedSender<(Task, Arc<Mutex<Deps>>)>>,
) -> Result<()> {
let (env, task_env) = full_env;
use crate::task::RunEntry;
let mut script_iter = rendered_scripts.into_iter();
for entry in task.run() {
match entry {
RunEntry::Script(_) => {
if let Some((script, args)) = script_iter.next() {
self.exec_script(&script, &args, task, env, prefix).await?;
}
}
RunEntry::SingleTask { task: spec } => {
let resolved_spec = crate::task::resolve_task_pattern(spec, Some(task));
self.inject_and_wait(config, &[resolved_spec], task_env, sched_tx.clone())
.await?;
}
RunEntry::TaskGroup { tasks } => {
let resolved_tasks: Vec<String> = tasks
.iter()
.map(|t| crate::task::resolve_task_pattern(t, Some(task)))
.collect();
self.inject_and_wait(config, &resolved_tasks, task_env, sched_tx.clone())
.await?;
}
}
}
Ok(())
}
async fn inject_and_wait(
&self,
config: &Arc<Config>,
specs: &[String],
task_env: &[(String, String)],
sched_tx: Arc<mpsc::UnboundedSender<(Task, Arc<Mutex<Deps>>)>>,
) -> Result<()> {
use crate::task::TaskLoadContext;
trace!("inject start: {}", specs.join(", "));
// Build tasks list from specs
// Create a TaskLoadContext from the specs to ensure project tasks are loaded
let ctx = TaskLoadContext::from_patterns(specs.iter().map(|s| {
let (name, _) = split_task_spec(s);
name
}));
let tasks = config.tasks_with_context(Some(&ctx)).await?;
let tasks_map: BTreeMap<String, Task> = tasks
.iter()
.flat_map(|(_, t)| {
t.aliases
.iter()
.map(|a| (a.to_string(), t.clone()))
.chain(once((t.name.clone(), t.clone())))
.collect::<Vec<_>>()
})
.collect();
let mut to_run: Vec<Task> = vec![];
for spec in specs {
let (name, args) = split_task_spec(spec);
let matches = tasks_map.get_matching(name)?;
ensure!(!matches.is_empty(), "task not found: {}", name);
for t in matches {
let mut t = (*t).clone();
t.args = args.clone();
if self.skip_deps {
t.depends.clear();
t.depends_post.clear();
t.wait_for.clear();
}
to_run.push(t);
}
}
let sub_deps = Deps::new(config, to_run).await?;
let sub_deps = Arc::new(Mutex::new(sub_deps));
// Pump subgraph into scheduler and signal completion via oneshot when done
let (done_tx, mut done_rx) = oneshot::channel::<()>();
let task_env_directives: Vec<EnvDirective> =
task_env.iter().cloned().map(Into::into).collect();
{
let sub_deps_clone = sub_deps.clone();
let sched_tx = sched_tx.clone();
// forward initial leaves synchronously
{
let mut rx = sub_deps_clone.lock().await.subscribe();
let mut any = false;
loop {
match rx.try_recv() {
Ok(Some(task)) => {
any = true;
let task = task.derive_env(&task_env_directives);
trace!("inject initial leaf: {} {}", task.name, task.args.join(" "));
let _ = sched_tx.send((task, sub_deps_clone.clone()));
}
Ok(None) => {
trace!("inject initial done");
break;
}
Err(tokio::sync::mpsc::error::TryRecvError::Empty) => {
break;
}
Err(tokio::sync::mpsc::error::TryRecvError::Disconnected) => {
break;
}
}
}
if !any {
trace!("inject had no initial leaves");
}
}
// then forward remaining leaves asynchronously
tokio::spawn(async move {
let mut rx = sub_deps_clone.lock().await.subscribe();
while let Some(msg) = rx.recv().await {
match msg {
Some(task) => {
trace!(
"inject leaf scheduled: {} {}",
task.name,
task.args.join(" ")
);
let task = task.derive_env(&task_env_directives);
let _ = sched_tx.send((task, sub_deps_clone.clone()));
}
None => {
let _ = done_tx.send(());
trace!("inject complete");
break;
}
}
}
});
}
// Wait for completion with a check for early stopping
loop {
// Check if we should stop early due to failure
if self.is_stopping() && !self.continue_on_error {
trace!("inject_and_wait: stopping early due to failure");
// Clean up the dependency graph to ensure completion
let mut deps = sub_deps.lock().await;
let tasks_to_remove: Vec<Task> = deps.all().cloned().collect();
for task in tasks_to_remove {
deps.remove(&task);
}
drop(deps);
// Give a short time for the spawned task to finish cleanly
let _ = tokio::time::timeout(Duration::from_millis(100), done_rx).await;
return Err(eyre!("task sequence aborted due to failure"));
}
// Try to receive the done signal with a short timeout
match tokio::time::timeout(Duration::from_millis(100), &mut done_rx).await {
Ok(Ok(())) => {
trace!("inject_and_wait: received done signal");
break;
}
Ok(Err(e)) => {
return Err(eyre!(e));
}
Err(_) => {
// Timeout, check again if we should stop
continue;
}
}
}
// Final check if we failed during the execution
if self.is_stopping() && !self.continue_on_error {
return Err(eyre!("task sequence aborted due to failure"));
}
Ok(())
}
async fn exec_script(
&self,
script: &str,
args: &[String],
task: &Task,
env: &BTreeMap<String, String>,
prefix: &str,
) -> Result<()> {
let config = Config::get().await?;
let script = script.trim_start();
let cmd = format!("$ {script} {args}", args = args.join(" ")).to_string();
if !self.quiet(Some(task)) {
let msg = style::ebold(trunc(prefix, config.redact(cmd).trim()))
.bright()
.to_string();
self.eprint(task, prefix, &msg)
}
if script.starts_with("#!") {
let dir = tempfile::tempdir()?;
let file = dir.path().join("script");
tokio::fs::write(&file, script.as_bytes()).await?;
file::make_executable(&file)?;
self.exec_with_text_file_busy_retry(&file, args, task, env, prefix)
.await
} else {
let (program, args) = self.get_cmd_program_and_args(script, task, args)?;
self.exec_program(&program, &args, task, env, prefix).await
}
}
fn get_file_program_and_args(
&self,
file: &Path,
task: &Task,
args: &[String],
) -> Result<(String, Vec<String>)> {
let display = file.display().to_string();
if is_executable(file) && !Settings::get().use_file_shell_for_executable_tasks {
if cfg!(windows) && file.extension().is_some_and(|e| e == "ps1") {
let args = vec!["-File".to_string(), display]
.into_iter()
.chain(args.iter().cloned())
.collect_vec();
return Ok(("pwsh".to_string(), args));
}
return Ok((display, args.to_vec()));
}
let shell = task
.shell()
.unwrap_or(Settings::get().default_file_shell()?);
trace!("using shell: {}", shell.join(" "));
let mut full_args = shell.clone();
full_args.push(display);
if !args.is_empty() {
full_args.extend(args.iter().cloned());
}
Ok((shell[0].clone(), full_args[1..].to_vec()))
}
fn get_cmd_program_and_args(
&self,
script: &str,
task: &Task,
args: &[String],
) -> Result<(String, Vec<String>)> {
let shell = task.shell().unwrap_or(self.clone_default_inline_shell()?);
trace!("using shell: {}", shell.join(" "));
let mut full_args = shell.clone();
#[cfg(windows)]
{
full_args.push(script.to_string());
full_args.extend(args.iter().cloned());
}
#[cfg(unix)]
{
let mut script = script.to_string();
if !args.is_empty() {
script = format!("{script} {}", shell_words::join(args));
}
full_args.push(script);
}
Ok((full_args[0].clone(), full_args[1..].to_vec()))
}
fn clone_default_inline_shell(&self) -> Result<Vec<String>> {
if let Some(shell) = &self.shell {
Ok(shell_words::split(shell)?)
} else {
Settings::get().default_inline_shell()
}
}
async fn exec_file(
&self,
config: &Arc<Config>,
file: &Path,
task: &Task,
env: &BTreeMap<String, String>,
prefix: &str,
) -> Result<()> {
let mut env = env.clone();
let command = file.to_string_lossy().to_string();
let args = task.args.iter().cloned().collect_vec();
let get_args = || once(command.clone()).chain(args.clone()).collect_vec();
self.parse_usage_spec_and_init_env(config, task, &mut env, get_args)
.await?;
if !self.quiet(Some(task)) {
let cmd = format!("{} {}", display_path(file), args.join(" "))
.trim()
.to_string();
let cmd = style::ebold(format!("$ {cmd}")).bright().to_string();
let cmd = trunc(prefix, config.redact(cmd).trim());
self.eprint(task, prefix, &cmd);
}
self.exec(file, &args, task, &env, prefix).await
}
async fn exec(
&self,
file: &Path,
args: &[String],
task: &Task,
env: &BTreeMap<String, String>,
prefix: &str,
) -> Result<()> {
let (program, args) = self.get_file_program_and_args(file, task, args)?;
self.exec_program(&program, &args, task, env, prefix).await
}
async fn exec_with_text_file_busy_retry(
&self,
file: &Path,
args: &[String],
task: &Task,
env: &BTreeMap<String, String>,
prefix: &str,
) -> Result<()> {
const ETXTBUSY_RETRIES: usize = 3;
const ETXTBUSY_SLEEP_MS: u64 = 50;
let mut attempt = 0;
loop {
match self.exec(file, args, task, env, prefix).await {
Ok(()) => break Ok(()),
Err(err) if Self::is_text_file_busy(&err) && attempt < ETXTBUSY_RETRIES => {
attempt += 1;
trace!(
"retrying execution of {} after ETXTBUSY (attempt {}/{})",
display_path(file),
attempt,
ETXTBUSY_RETRIES
);
// Exponential backoff: 50ms, 100ms, 200ms
let sleep_ms = ETXTBUSY_SLEEP_MS * (1 << (attempt - 1));
tokio::time::sleep(Duration::from_millis(sleep_ms)).await;
}
Err(err) => break Err(err),
}
}
}
async fn exec_program(
&self,
program: &str,
args: &[String],
task: &Task,
env: &BTreeMap<String, String>,
prefix: &str,
) -> Result<()> {
let config = Config::get().await?;
let program = program.to_executable();
let redactions = config.redactions();
let raw = self.raw(Some(task));
let mut cmd = CmdLineRunner::new(program.clone())
.args(args)
.envs(env)
.redact(redactions.deref().clone())
.raw(raw);
if raw && !redactions.is_empty() {
hint!(
"raw_redactions",
"--raw will prevent mise from being able to use redactions",
""
);
}
let output = self.output(Some(task));
cmd.with_pass_signals();
match output {
TaskOutput::Prefix => {
if !task.silent.suppresses_stdout() {
cmd = cmd.with_on_stdout(|line| {
if console::colors_enabled() {
prefix_println!(prefix, "{line}\x1b[0m");
} else {
prefix_println!(prefix, "{line}");
}
});
} else {
cmd = cmd.stdout(Stdio::null());
}
if !task.silent.suppresses_stderr() {
cmd = cmd.with_on_stderr(|line| {
if console::colors_enabled() {
self.eprint(task, prefix, &format!("{line}\x1b[0m"));
} else {
self.eprint(task, prefix, &line);
}
});
} else {
cmd = cmd.stderr(Stdio::null());
}
}
TaskOutput::KeepOrder => {
if !task.silent.suppresses_stdout() {
cmd = cmd.with_on_stdout(|line| {
let mut map = self.output_handler.keep_order_output.lock().unwrap();
if !map.contains_key(task) {
map.insert(task.clone(), Default::default());
}
if let Some(entry) = map.get_mut(task) {
entry.0.push((prefix.to_string(), line));
}
});
} else {
cmd = cmd.stdout(Stdio::null());
}
if !task.silent.suppresses_stderr() {
cmd = cmd.with_on_stderr(|line| {
let mut map = self.output_handler.keep_order_output.lock().unwrap();
if !map.contains_key(task) {
map.insert(task.clone(), Default::default());
}
if let Some(entry) = map.get_mut(task) {
entry.1.push((prefix.to_string(), line));
}
});
} else {
cmd = cmd.stderr(Stdio::null());
}
}
TaskOutput::Replacing => {
// Replacing mode shows a progress indicator unless both streams are suppressed
if task.silent.suppresses_stdout() {
cmd = cmd.stdout(Stdio::null());
}
if task.silent.suppresses_stderr() {
cmd = cmd.stderr(Stdio::null());
}
// Show progress indicator except when both streams are fully suppressed
if !task.silent.suppresses_both() {
let pr = self.output_handler.task_prs.get(task).unwrap().clone();
cmd = cmd.with_pr_arc(pr);
}
}
TaskOutput::Timed => {
if !task.silent.suppresses_stdout() {
let timed_outputs = self.output_handler.timed_outputs.clone();
cmd = cmd.with_on_stdout(move |line| {
timed_outputs
.lock()
.unwrap()
.insert(prefix.to_string(), (SystemTime::now(), line));
});
} else {
cmd = cmd.stdout(Stdio::null());
}
if !task.silent.suppresses_stderr() {
cmd = cmd.with_on_stderr(|line| {
if console::colors_enabled() {
self.eprint(task, prefix, &format!("{line}\x1b[0m"));
} else {
self.eprint(task, prefix, &line);
}
});
} else {
cmd = cmd.stderr(Stdio::null());
}
}
TaskOutput::Silent => {
cmd = cmd.stdout(Stdio::null()).stderr(Stdio::null());
}
TaskOutput::Quiet | TaskOutput::Interleave => {
if raw || redactions.is_empty() {
cmd = cmd.stdin(Stdio::inherit());
if !task.silent.suppresses_stdout() {
cmd = cmd.stdout(Stdio::inherit());
} else {
cmd = cmd.stdout(Stdio::null());
}
if !task.silent.suppresses_stderr() {
cmd = cmd.stderr(Stdio::inherit());
} else {
cmd = cmd.stderr(Stdio::null());
}
}
}
}
let dir = task_cwd(task, &config).await?;
if !dir.exists() {
self.eprint(
task,
prefix,
&format!(
"{} task directory does not exist: {}",
style::eyellow("WARN"),
display_path(&dir)
),
);
}
cmd = cmd.current_dir(dir);
if self.dry_run {
return Ok(());
}
cmd.execute()?;
trace!("{prefix} exited successfully");
Ok(())
}
#[cfg(unix)]
fn is_text_file_busy(err: &Report) -> bool {
err.chain().any(|cause| {
if let Some(io_err) = cause.downcast_ref::<std::io::Error>()
&& let Some(code) = io_err.raw_os_error()
{
// ETXTBUSY (Text file busy) on Unix
return code == Errno::ETXTBSY as i32;
}
false
})
}
#[cfg(not(unix))]
#[allow(unused_variables)]
fn is_text_file_busy(err: &Report) -> bool {
false
}
async fn parse_usage_spec_and_init_env(
&self,
config: &Arc<Config>,
task: &Task,
env: &mut BTreeMap<String, String>,
get_args: impl Fn() -> Vec<String>,
) -> Result<()> {
let (spec, _) = task.parse_usage_spec(config, self.cd.clone(), env).await?;
if !spec.cmd.args.is_empty() || !spec.cmd.flags.is_empty() {
let args: Vec<String> = get_args();
trace!("Parsing usage spec for {:?}", args);
let po = usage::parse(&spec, &args).map_err(|err| eyre!(err))?;
for (k, v) in po.as_env() {
trace!("Adding key {} value {} in env", k, v);
env.insert(k, v);
}
} else {
trace!("Usage spec has no args or flags");
}
Ok(())
}
}
| rust | MIT | 3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb | 2026-01-04T15:39:11.175160Z | false |
jdx/mise | https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/src/task/task_sources.rs | src/task/task_sources.rs | use crate::dirs;
use crate::task::Task;
use serde::ser::{SerializeMap, SerializeSeq};
use serde::{Deserialize, Deserializer, Serialize};
use std::hash::{DefaultHasher, Hash, Hasher};
#[derive(Debug, Clone, Eq, PartialEq, strum::EnumIs)]
pub enum TaskOutputs {
Files(Vec<String>),
Auto,
}
impl Default for TaskOutputs {
fn default() -> Self {
TaskOutputs::Files(vec![])
}
}
impl TaskOutputs {
pub fn is_empty(&self) -> bool {
match self {
TaskOutputs::Files(files) => files.is_empty(),
TaskOutputs::Auto => false,
}
}
pub fn paths(&self, task: &Task) -> Vec<String> {
match self {
TaskOutputs::Files(files) => files.clone(),
TaskOutputs::Auto => vec![self.auto_path(task)],
}
}
fn auto_path(&self, task: &Task) -> String {
let mut hasher = DefaultHasher::new();
task.hash(&mut hasher);
task.config_source.hash(&mut hasher);
let hash = format!("{:x}", hasher.finish());
dirs::STATE
.join("task-auto-outputs")
.join(&hash)
.to_string_lossy()
.to_string()
}
pub fn render(&mut self, tera: &mut tera::Tera, ctx: &tera::Context) -> eyre::Result<()> {
match self {
TaskOutputs::Files(files) => {
for file in files.iter_mut() {
*file = tera.render_str(file, ctx)?;
}
}
TaskOutputs::Auto => {}
}
Ok(())
}
}
impl From<&toml::Value> for TaskOutputs {
fn from(value: &toml::Value) -> Self {
match value {
toml::Value::String(file) => TaskOutputs::Files(vec![file.to_string()]),
toml::Value::Array(files) => TaskOutputs::Files(
files
.iter()
.map(|v| v.as_str().unwrap().to_string())
.collect(),
),
toml::Value::Table(table) => {
let auto = table
.get("auto")
.and_then(|v| v.as_bool())
.unwrap_or_default();
if auto {
TaskOutputs::Auto
} else {
TaskOutputs::default()
}
}
_ => TaskOutputs::default(),
}
}
}
impl<'de> Deserialize<'de> for TaskOutputs {
fn deserialize<D: Deserializer<'de>>(deserializer: D) -> Result<Self, D::Error> {
struct TaskOutputsVisitor;
impl<'de> serde::de::Visitor<'de> for TaskOutputsVisitor {
type Value = TaskOutputs;
fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
formatter.write_str("a string, a sequence of strings, or a map")
}
fn visit_str<E: serde::de::Error>(self, value: &str) -> Result<Self::Value, E> {
Ok(TaskOutputs::Files(vec![value.to_string()]))
}
fn visit_seq<A: serde::de::SeqAccess<'de>>(
self,
mut seq: A,
) -> Result<Self::Value, A::Error> {
let mut files = vec![];
while let Some(file) = seq.next_element()? {
files.push(file);
}
Ok(TaskOutputs::Files(files))
}
fn visit_map<A: serde::de::MapAccess<'de>>(
self,
mut map: A,
) -> Result<Self::Value, A::Error> {
if let Some(key) = map.next_key::<String>()? {
if key == "auto" {
if map.next_value::<bool>()? {
Ok(TaskOutputs::Auto)
} else {
Ok(TaskOutputs::default())
}
} else {
Err(serde::de::Error::custom("Invalid TaskOutputs map"))
}
} else {
Ok(TaskOutputs::default())
}
}
}
deserializer.deserialize_any(TaskOutputsVisitor)
}
}
impl Serialize for TaskOutputs {
fn serialize<S: serde::Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> {
match self {
TaskOutputs::Files(files) => {
let mut seq = serializer.serialize_seq(Some(files.len()))?;
for file in files {
seq.serialize_element(file)?;
}
seq.end()
}
TaskOutputs::Auto => {
let mut m = serializer.serialize_map(Some(1))?;
m.serialize_entry("auto", &true)?;
m.end()
}
}
}
}
mod tests {
#[allow(unused_imports)]
use super::*;
#[test]
fn test_task_outputs_from_toml() {
let value: toml::Table = toml::from_str("outputs = \"file1\"").unwrap();
let value = value.get("outputs").unwrap();
let outputs = TaskOutputs::from(value);
assert_eq!(outputs, TaskOutputs::Files(vec!["file1".to_string()]));
let value: toml::Table = toml::from_str("outputs = [\"file1\"]").unwrap();
let value = value.get("outputs").unwrap();
let outputs = TaskOutputs::from(value);
assert_eq!(outputs, TaskOutputs::Files(vec!["file1".to_string()]));
let value: toml::Table = toml::from_str("outputs = { auto = true }").unwrap();
let value = value.get("outputs").unwrap();
let outputs = TaskOutputs::from(value);
assert_eq!(outputs, TaskOutputs::Auto);
}
#[test]
fn test_task_outputs_serialize() {
let outputs = TaskOutputs::Files(vec!["file1".to_string()]);
let serialized = serde_json::to_string(&outputs).unwrap();
assert_eq!(serialized, "[\"file1\"]");
let outputs = TaskOutputs::Auto;
let serialized = serde_json::to_string(&outputs).unwrap();
assert_eq!(serialized, "{\"auto\":true}");
}
#[test]
fn test_task_outputs_deserialize() {
let deserialized: TaskOutputs = serde_json::from_str("\"file1\"").unwrap();
assert_eq!(deserialized, TaskOutputs::Files(vec!["file1".to_string()]));
let deserialized: TaskOutputs = serde_json::from_str("[\"file1\"]").unwrap();
assert_eq!(deserialized, TaskOutputs::Files(vec!["file1".to_string()]));
let deserialized: TaskOutputs = serde_json::from_str("{ \"auto\": true }").unwrap();
assert_eq!(deserialized, TaskOutputs::Auto);
}
}
| rust | MIT | 3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb | 2026-01-04T15:39:11.175160Z | false |
jdx/mise | https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/src/task/task_tool_installer.rs | src/task/task_tool_installer.rs | use crate::cli::args::ToolArg;
use crate::config::{Config, Settings};
use crate::task::Deps;
use crate::task::task_context_builder::TaskContextBuilder;
use crate::task::task_helpers::canonicalize_path;
use crate::toolset::{InstallOptions, ToolSource, Toolset};
use eyre::Result;
use std::sync::Arc;
/// Handles collection and installation of tools required by tasks
pub struct TaskToolInstaller<'a> {
context_builder: &'a TaskContextBuilder,
cli_tools: &'a [ToolArg],
}
impl<'a> TaskToolInstaller<'a> {
pub fn new(context_builder: &'a TaskContextBuilder, cli_tools: &'a [ToolArg]) -> Self {
Self {
context_builder,
cli_tools,
}
}
/// Collect and install all tools needed by tasks
pub async fn install_tools(&self, config: &mut Arc<Config>, tasks: &Deps) -> Result<()> {
let mut all_tools = self.cli_tools.to_vec();
let mut all_tool_requests = vec![];
let all_tasks: Vec<_> = tasks.all().collect();
trace!("Collecting tools from {} tasks", all_tasks.len());
// Collect tools from tasks
for t in &all_tasks {
// Collect tools from task.tools (task-level tool overrides)
for (k, v) in &t.tools {
all_tools.push(format!("{k}@{v}").parse()?);
}
// Collect tools from monorepo task config files
if let Some(task_cf) = t.cf(config) {
let tool_requests = self
.collect_tools_from_config_file(config, task_cf.clone(), &t.name)
.await?;
all_tool_requests.extend(tool_requests);
}
}
// Build and install toolset
let toolset = self
.build_toolset(config, all_tools, all_tool_requests)
.await?;
self.install_toolset(config, toolset).await?;
Ok(())
}
/// Collect tools from a task's config file with caching
async fn collect_tools_from_config_file(
&self,
_config: &Arc<Config>,
task_cf: Arc<dyn crate::config::config_file::ConfigFile>,
task_name: &str,
) -> Result<Vec<crate::toolset::ToolRequest>> {
let config_path = canonicalize_path(task_cf.get_path());
// Check cache first
let cache = self
.context_builder
.tool_request_set_cache()
.read()
.expect("tool_request_set_cache RwLock poisoned");
let tool_request_set = if let Some(cached) = cache.get(&config_path) {
trace!(
"Using cached tool request set from {}",
config_path.display()
);
Arc::clone(cached)
} else {
drop(cache); // Release read lock before write
match task_cf.to_tool_request_set() {
Ok(trs) => {
let trs = Arc::new(trs);
let mut cache = self
.context_builder
.tool_request_set_cache()
.write()
.expect("tool_request_set_cache RwLock poisoned");
cache.entry(config_path.clone()).or_insert_with(|| {
trace!("Cached tool request set to {}", config_path.display());
Arc::clone(&trs)
});
trs
}
Err(e) => {
warn!(
"Failed to parse tools from {} for task {}: {}",
task_cf.get_path().display(),
task_name,
e
);
return Ok(vec![]);
}
}
};
trace!(
"Found {} tools in config file for task {}",
tool_request_set.tools.len(),
task_name
);
// Extract all tool requests from the tool request set
let mut tool_requests = vec![];
for (_, reqs) in tool_request_set.tools.iter() {
tool_requests.extend(reqs.iter().cloned());
}
Ok(tool_requests)
}
/// Build a toolset from CLI tools and collected tool requests
async fn build_toolset(
&self,
config: &Arc<Config>,
all_tools: Vec<ToolArg>,
all_tool_requests: Vec<crate::toolset::ToolRequest>,
) -> Result<Toolset> {
let source = ToolSource::Argument;
let mut ts = Toolset::new(source.clone());
// Add tools from CLI args and task.tools
for tool_arg in all_tools {
if let Some(tvr) = tool_arg.tvr {
ts.add_version(tvr);
}
}
// Add tools from config files
for tr in all_tool_requests {
trace!("Adding tool from config: {}", tr);
ts.add_version(tr);
}
ts.resolve(config).await?;
Ok(ts)
}
/// Install missing versions from the toolset
async fn install_toolset(&self, config: &mut Arc<Config>, mut ts: Toolset) -> Result<()> {
ts.install_missing_versions(
config,
&InstallOptions {
missing_args_only: !Settings::get().task_run_auto_install,
skip_auto_install: !Settings::get().task_run_auto_install
|| !Settings::get().auto_install,
..Default::default()
},
)
.await?;
Ok(())
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_task_tool_installer_new() {
let context_builder = TaskContextBuilder::new();
let cli_tools: Vec<ToolArg> = vec![];
let installer = TaskToolInstaller::new(&context_builder, &cli_tools);
assert_eq!(installer.cli_tools.len(), 0);
}
}
| rust | MIT | 3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb | 2026-01-04T15:39:11.175160Z | false |
jdx/mise | https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/src/task/task_file_providers/remote_task_http.rs | src/task/task_file_providers/remote_task_http.rs | use std::path::PathBuf;
use async_trait::async_trait;
use crate::{Result, dirs, env, file, hash, http::HTTP};
use super::TaskFileProvider;
#[derive(Debug)]
pub struct RemoteTaskHttpBuilder {
store_path: PathBuf,
use_cache: bool,
}
impl RemoteTaskHttpBuilder {
pub fn new() -> Self {
Self {
store_path: env::temp_dir(),
use_cache: false,
}
}
pub fn with_cache(mut self, use_cache: bool) -> Self {
if use_cache {
self.store_path = dirs::CACHE.join("remote-http-tasks-cache");
self.use_cache = true;
}
self
}
pub fn build(self) -> RemoteTaskHttp {
RemoteTaskHttp {
storage_path: self.store_path,
is_cached: self.use_cache,
}
}
}
#[derive(Debug)]
pub struct RemoteTaskHttp {
storage_path: PathBuf,
is_cached: bool,
}
impl RemoteTaskHttp {
fn get_cache_key(&self, file: &str) -> String {
hash::hash_sha256_to_str(file)
}
async fn download_file(&self, file: &str, destination: &PathBuf) -> Result<()> {
trace!("Downloading file: {}", file);
HTTP.download_file(file, destination, None).await?;
file::make_executable(destination)?;
Ok(())
}
}
#[async_trait]
impl TaskFileProvider for RemoteTaskHttp {
fn is_match(&self, file: &str) -> bool {
let url = url::Url::parse(file);
// Check if the URL is valid and the scheme is http or https
// and the path is not empty
// and the path is not a directory
url.is_ok_and(|url| {
(url.scheme() == "http" || url.scheme() == "https")
&& url.path().len() > 1
&& !url.path().ends_with('/')
})
}
async fn get_local_path(&self, file: &str) -> Result<PathBuf> {
let cache_key = self.get_cache_key(file);
let destination = self.storage_path.join(&cache_key);
match self.is_cached {
true => {
trace!("Cache mode enabled");
if destination.exists() {
debug!("Using cached file: {:?}", destination);
return Ok(destination);
}
}
false => {
trace!("Cache mode disabled");
if destination.exists() {
file::remove_file(&destination)?;
}
}
}
self.download_file(file, &destination).await?;
Ok(destination)
}
}
#[cfg(test)]
mod tests {
use super::*;
#[tokio::test]
async fn test_is_match() {
let provider = RemoteTaskHttpBuilder::new().build();
// Positive cases
assert!(provider.is_match("http://myhost.com/test.txt"));
assert!(provider.is_match("https://myhost.com/test.txt"));
assert!(provider.is_match("https://mydomain.com/myfile.py"));
assert!(provider.is_match("https://subdomain.mydomain.com/myfile.sh"));
assert!(provider.is_match("https://subdomain.mydomain.com/myfile.sh?query=1"));
// Negative cases
assert!(!provider.is_match("https://myhost.com/js/"));
assert!(!provider.is_match("https://myhost.com"));
assert!(!provider.is_match("https://myhost.com/"));
}
#[tokio::test]
async fn test_http_remote_task_get_local_path_without_cache() {
let paths = vec![
"/myfile.py",
"/subpath/myfile.sh",
"/myfile.sh?query=1&sdfsdf=2",
];
let mut server = mockito::Server::new_async().await;
for request_path in paths {
let mocked_server: mockito::Mock = server
.mock("GET", request_path)
.with_status(200)
.with_body("Random content")
.expect(2)
.create_async()
.await;
let provider = RemoteTaskHttpBuilder::new().build();
let request_url = format!("{}{}", server.url(), request_path);
let cache_key = provider.get_cache_key(&request_url);
for _ in 0..2 {
let local_path = provider.get_local_path(&request_url).await.unwrap();
assert!(local_path.exists());
assert!(local_path.is_file());
assert!(local_path.ends_with(&cache_key));
}
mocked_server.assert();
}
}
#[tokio::test]
async fn test_http_remote_task_get_local_path_with_cache() {
let paths = vec![
"/myfile.py",
"/subpath/myfile.sh",
"/myfile.sh?query=1&sdfsdf=2",
];
let mut server = mockito::Server::new_async().await;
for request_path in paths {
let mocked_server = server
.mock("GET", request_path)
.with_status(200)
.with_body("Random content")
.expect(1)
.create_async()
.await;
let provider = RemoteTaskHttpBuilder::new().with_cache(true).build();
let request_url = format!("{}{}", server.url(), request_path);
let cache_key = provider.get_cache_key(&request_url);
for _ in 0..2 {
let path = provider.get_local_path(&request_url).await.unwrap();
assert!(path.exists());
assert!(path.is_file());
assert!(path.ends_with(&cache_key));
}
mocked_server.assert();
}
}
}
| rust | MIT | 3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb | 2026-01-04T15:39:11.175160Z | false |
jdx/mise | https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/src/task/task_file_providers/remote_task_git.rs | src/task/task_file_providers/remote_task_git.rs | use crate::Result;
use std::path::PathBuf;
use async_trait::async_trait;
use eyre::eyre;
use regex::Regex;
use crate::{
dirs, env,
git::{self, CloneOptions},
hash,
};
use super::TaskFileProvider;
#[derive(Debug)]
pub struct RemoteTaskGitBuilder {
store_path: PathBuf,
use_cache: bool,
}
impl RemoteTaskGitBuilder {
pub fn new() -> Self {
Self {
store_path: env::temp_dir(),
use_cache: false,
}
}
pub fn with_cache(mut self, use_cache: bool) -> Self {
if use_cache {
self.store_path = dirs::CACHE.join("remote-git-tasks-cache");
self.use_cache = true;
}
self
}
pub fn build(self) -> RemoteTaskGit {
RemoteTaskGit {
storage_path: self.store_path,
is_cached: self.use_cache,
}
}
}
#[derive(Debug)]
pub struct RemoteTaskGit {
storage_path: PathBuf,
is_cached: bool,
}
#[derive(Debug, Clone)]
struct GitRepoStructure {
url_without_path: String,
path: String,
branch: Option<String>,
}
impl GitRepoStructure {
pub fn new(url_without_path: &str, path: &str, branch: Option<String>) -> Self {
Self {
url_without_path: url_without_path.to_string(),
path: path.to_string(),
branch,
}
}
}
impl RemoteTaskGit {
fn get_cache_key(&self, repo_structure: &GitRepoStructure) -> String {
let key = format!(
"{}{}",
&repo_structure.url_without_path,
&repo_structure.branch.to_owned().unwrap_or("".to_string())
);
hash::hash_sha256_to_str(&key)
}
fn get_repo_structure(&self, file: &str) -> GitRepoStructure {
if self.detect_ssh(file).is_ok() {
return self.detect_ssh(file).unwrap();
}
self.detect_https(file).unwrap()
}
fn detect_ssh(&self, file: &str) -> Result<GitRepoStructure> {
let re = Regex::new(r"^git::(?P<url>ssh://((?P<user>[^@]+)@)(?P<host>[^/]+)/(?P<repo>.+)\.git)//(?P<path>[^?]+)(\?ref=(?P<branch>[^?]+))?$").unwrap();
if !re.is_match(file) {
return Err(eyre!("Invalid SSH URL"));
}
let captures = re.captures(file).unwrap();
let url_without_path = captures.name("url").unwrap().as_str();
let path = captures.name("path").unwrap().as_str();
let branch: Option<String> = captures.name("branch").map(|m| m.as_str().to_string());
Ok(GitRepoStructure::new(url_without_path, path, branch))
}
fn detect_https(&self, file: &str) -> Result<GitRepoStructure> {
let re = Regex::new(r"^git::(?P<url>https?://(?P<host>[^/]+)/(?P<repo>.+)\.git)//(?P<path>[^?]+)(\?ref=(?P<branch>[^?]+))?$").unwrap();
if !re.is_match(file) {
return Err(eyre!("Invalid HTTPS URL"));
}
let captures = re.captures(file).unwrap();
let url_without_path = captures.name("url").unwrap().as_str();
let path = captures.name("path").unwrap().as_str();
let branch: Option<String> = captures.name("branch").map(|m| m.as_str().to_string());
Ok(GitRepoStructure::new(url_without_path, path, branch))
}
}
#[async_trait]
impl TaskFileProvider for RemoteTaskGit {
fn is_match(&self, file: &str) -> bool {
if self.detect_ssh(file).is_ok() {
return true;
}
if self.detect_https(file).is_ok() {
return true;
}
false
}
async fn get_local_path(&self, file: &str) -> Result<PathBuf> {
let repo_structure = self.get_repo_structure(file);
let cache_key = self.get_cache_key(&repo_structure);
let destination = self.storage_path.join(&cache_key);
let repo_file_path = repo_structure.path.clone();
let full_path = destination.join(&repo_file_path);
debug!("Repo structure: {:?}", repo_structure);
match self.is_cached {
true => {
trace!("Cache mode enabled");
if full_path.exists() {
debug!("Using cached file: {:?}", full_path);
return Ok(full_path);
}
}
false => {
trace!("Cache mode disabled");
if full_path.exists() {
crate::file::remove_all(&destination)?;
}
}
}
let git_repo = git::Git::new(destination);
let mut clone_options = CloneOptions::default();
if let Some(branch) = &repo_structure.branch {
trace!("Use specific branch {}", branch);
clone_options = clone_options.branch(branch);
}
git_repo.clone(repo_structure.url_without_path.as_str(), clone_options)?;
Ok(full_path)
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_valid_detect_ssh() {
let remote_task_git = RemoteTaskGitBuilder::new().build();
let test_cases = vec![
"git::ssh://git@github.com/myorg/example.git//myfile?ref=v1.0.0",
"git::ssh://git@github.com/myorg/example.git//terraform/myfile?ref=master",
"git::ssh://git@git.acme.com:1222/myorg/example.git//terraform/myfile?ref=master",
"git::ssh://git@myserver.com/example.git//terraform/myfile",
"git::ssh://user@myserver.com/example.git//myfile?ref=master",
];
for url in test_cases {
let result = remote_task_git.detect_ssh(url);
assert!(result.is_ok());
}
}
#[test]
fn test_invalid_detect_ssh() {
let remote_task_git = RemoteTaskGitBuilder::new().build();
let test_cases = vec![
"git::ssh://myserver.com/example.git//myfile?ref=master",
"git::ssh://user@myserver.com/example.git?ref=master",
"git::ssh://user@myserver.com/example.git",
"git::https://github.com/myorg/example.git//myfile?ref=v1.0.0",
];
for url in test_cases {
let result = remote_task_git.detect_ssh(url);
assert!(result.is_err());
}
}
#[test]
fn test_valid_detect_https() {
let remote_task_git = RemoteTaskGitBuilder::new().build();
let test_cases = vec![
"git::https://github.com/myorg/example.git//myfile?ref=v1.0.0",
"git::https://github.com/myorg/example.git//terraform/myfile?ref=master",
"git::https://git.acme.com:8080/myorg/example.git//terraform/myfile?ref=master",
"git::https://myserver.com/example.git//terraform/myfile",
"git::https://myserver.com/example.git//myfile?ref=master",
"git::http://localhost:8080/repo.git//xtasks/lint/ripgrep", // HTTP support for local testing
];
for url in test_cases {
let result = remote_task_git.detect_https(url);
assert!(result.is_ok());
}
}
#[test]
fn test_invalid_detect_https() {
let remote_task_git = RemoteTaskGitBuilder::new().build();
let test_cases = vec![
"git::https://myserver.com/example.git?ref=master",
"git::https://user@myserver.com/example.git",
"git::ssh://git@github.com/myorg/example.git//myfile?ref=v1.0.0",
];
for url in test_cases {
let result = remote_task_git.detect_https(url);
assert!(result.is_err());
}
}
#[test]
fn test_extract_ssh_url_information() {
let remote_task_git = RemoteTaskGitBuilder::new().build();
let test_cases: Vec<(&str, &str, &str, Option<String>)> = vec![
(
"git::ssh://git@github.com/myorg/example.git//myfile?ref=v1.0.0",
"ssh://git@github.com/myorg/example.git",
"myfile",
Some("v1.0.0".to_string()),
),
(
"git::ssh://git@github.com/myorg/example.git//terraform/myfile?ref=master",
"ssh://git@github.com/myorg/example.git",
"terraform/myfile",
Some("master".to_string()),
),
(
"git::ssh://git@myserver.com/example.git//terraform/myfile",
"ssh://git@myserver.com/example.git",
"terraform/myfile",
None,
),
];
for (url, expected_repo, expected_path, expected_branch) in test_cases {
let repo = remote_task_git.detect_ssh(url).unwrap();
assert_eq!(expected_repo, repo.url_without_path);
assert_eq!(expected_path, repo.path);
assert_eq!(expected_branch, repo.branch);
}
}
#[test]
fn test_extract_https_url_information() {
let remote_task_git = RemoteTaskGitBuilder::new().build();
let test_cases: Vec<(&str, &str, &str, Option<String>)> = vec![
(
"git::https://github.com/myorg/example.git//myfile?ref=v1.0.0",
"https://github.com/myorg/example.git",
"myfile",
Some("v1.0.0".to_string()),
),
(
"git::https://github.com/myorg/example.git//terraform/myfile?ref=master",
"https://github.com/myorg/example.git",
"terraform/myfile",
Some("master".to_string()),
),
(
"git::https://myserver.com/example.git//terraform/myfile",
"https://myserver.com/example.git",
"terraform/myfile",
None,
),
];
for (url, expected_repo, expected_path, expected_branch) in test_cases {
let repo = remote_task_git.detect_https(url).unwrap();
assert_eq!(expected_repo, repo.url_without_path);
assert_eq!(expected_path, repo.path);
assert_eq!(expected_branch, repo.branch);
}
}
#[test]
fn test_compare_ssh_get_cache_key() {
let remote_task_git = RemoteTaskGitBuilder::new().build();
let test_cases = vec![
(
"git::ssh://git@github.com/myorg/example.git//myfile?ref=v1.0.0",
"git::ssh://git@github.com/myorg/example.git//myfile?ref=v2.0.0",
false,
),
(
"git::ssh://git@github.com/myorg/example.git//myfile?ref=v1.0.0",
"git::ssh://user@myserver.com/example.git//myfile?ref=master",
false,
),
(
"git::ssh://git@github.com/example.git//myfile?ref=v1.0.0",
"git::ssh://git@github.com/example.git//subfolder/mysecondfile?ref=v1.0.0",
true,
),
(
"git::ssh://git@github.com/myorg/example.git//myfile?ref=v1.0.0",
"git::ssh://git@github.com/myorg/example.git//subfolder/mysecondfile?ref=v1.0.0",
true,
),
];
for (first_url, second_url, expected) in test_cases {
let first_repo = remote_task_git.detect_ssh(first_url).unwrap();
let second_repo = remote_task_git.detect_ssh(second_url).unwrap();
let first_cache_key = remote_task_git.get_cache_key(&first_repo);
let second_cache_key = remote_task_git.get_cache_key(&second_repo);
assert_eq!(expected, first_cache_key == second_cache_key);
}
}
#[test]
fn test_compare_https_get_cache_key() {
let remote_task_git = RemoteTaskGitBuilder::new().build();
let test_cases = vec![
(
"git::https://github.com/myorg/example.git//myfile?ref=v1.0.0",
"git::https://github.com/myorg/example.git//myfile?ref=v2.0.0",
false,
),
(
"git::https://github.com/myorg/example.git//myfile?ref=v1.0.0",
"git::https://bitbucket.com/myorg/example.git//myfile?ref=v1.0.0",
false,
),
(
"git::https://github.com/myorg/example.git//myfile?ref=v1.0.0",
"git::https://github.com/myorg/example.git//subfolder/myfile?ref=v1.0.0",
true,
),
(
"git::https://github.com/example.git//myfile?ref=v1.0.0",
"git::https://github.com/example.git//subfolder/myfile?ref=v1.0.0",
true,
),
];
for (first_url, second_url, expected) in test_cases {
let first_repo = remote_task_git.detect_https(first_url).unwrap();
let second_repo = remote_task_git.detect_https(second_url).unwrap();
let first_cache_key = remote_task_git.get_cache_key(&first_repo);
let second_cache_key = remote_task_git.get_cache_key(&second_repo);
assert_eq!(expected, first_cache_key == second_cache_key);
}
}
}
| rust | MIT | 3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb | 2026-01-04T15:39:11.175160Z | false |
jdx/mise | https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/src/task/task_file_providers/local_task.rs | src/task/task_file_providers/local_task.rs | use std::path::{Path, PathBuf};
use async_trait::async_trait;
use crate::Result;
use super::TaskFileProvider;
#[derive(Debug)]
pub struct LocalTask;
#[async_trait]
impl TaskFileProvider for LocalTask {
fn is_match(&self, file: &str) -> bool {
let path = Path::new(file);
path.is_relative() || path.is_absolute()
}
async fn get_local_path(&self, file: &str) -> Result<PathBuf> {
Ok(PathBuf::from(file))
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_is_match() {
let provider = LocalTask;
assert!(provider.is_match("filetask.bat"));
assert!(provider.is_match("filetask"));
assert!(provider.is_match("/test.txt"));
assert!(provider.is_match("./test.txt"));
assert!(provider.is_match("../test.txt"));
}
#[tokio::test]
async fn test_get_local_path() {
let provider = LocalTask;
assert_eq!(
provider.get_local_path("/test.txt").await.unwrap(),
PathBuf::from("/test.txt")
);
assert_eq!(
provider.get_local_path("./test.txt").await.unwrap(),
PathBuf::from("./test.txt")
);
assert_eq!(
provider.get_local_path("../test.txt").await.unwrap(),
PathBuf::from("../test.txt")
);
}
}
| rust | MIT | 3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb | 2026-01-04T15:39:11.175160Z | false |
jdx/mise | https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/src/task/task_file_providers/mod.rs | src/task/task_file_providers/mod.rs | use std::{fmt::Debug, path::PathBuf};
mod local_task;
mod remote_task_git;
mod remote_task_http;
use crate::Result;
use async_trait::async_trait;
use local_task::LocalTask;
use remote_task_git::RemoteTaskGitBuilder;
use remote_task_http::RemoteTaskHttpBuilder;
#[async_trait]
pub trait TaskFileProvider: Debug {
fn is_match(&self, file: &str) -> bool;
async fn get_local_path(&self, file: &str) -> Result<PathBuf>;
}
pub struct TaskFileProvidersBuilder {
use_cache: bool,
}
impl TaskFileProvidersBuilder {
pub fn new() -> Self {
Self { use_cache: false }
}
pub fn with_cache(mut self, use_cache: bool) -> Self {
self.use_cache = use_cache;
self
}
pub fn build(self) -> TaskFileProviders {
TaskFileProviders::new(self.use_cache)
}
}
pub struct TaskFileProviders {
use_cache: bool,
}
impl TaskFileProviders {
pub fn new(use_cache: bool) -> Self {
Self { use_cache }
}
fn get_providers(&self) -> Vec<Box<dyn TaskFileProvider>> {
vec![
Box::new(
RemoteTaskGitBuilder::new()
.with_cache(self.use_cache)
.build(),
),
Box::new(
RemoteTaskHttpBuilder::new()
.with_cache(self.use_cache)
.build(),
),
Box::new(LocalTask), // Must be the last provider
]
}
pub fn get_provider(&self, file: &str) -> Option<Box<dyn TaskFileProvider>> {
self.get_providers().into_iter().find(|p| p.is_match(file))
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_get_providers() {
let task_file_providers = TaskFileProvidersBuilder::new().build();
let providers = task_file_providers.get_providers();
assert_eq!(providers.len(), 3);
}
#[test]
fn test_local_file_match_local_provider() {
let task_file_providers = TaskFileProvidersBuilder::new().build();
let cases = vec!["file.txt", "./file.txt", "../file.txt", "/file.txt"];
for file in cases {
let provider = task_file_providers.get_provider(file);
assert!(provider.is_some());
let provider_name = format!("{:?}", provider.unwrap());
assert!(provider_name.contains("LocalTask"));
}
}
#[test]
fn test_http_file_match_http_remote_task_provider() {
let task_file_providers = TaskFileProvidersBuilder::new().build();
let cases = vec![
"http://example.com/file.txt",
"https://example.com/file.txt",
"https://example.com/subfolder/file.txt",
];
for file in cases {
let provider = task_file_providers.get_provider(file);
assert!(provider.is_some());
let provider_name = format!("{:?}", provider.unwrap());
assert!(provider_name.contains("RemoteTaskHttp"));
}
}
#[test]
fn test_git_file_match_git_remote_task_provider() {
let task_file_providers = TaskFileProvidersBuilder::new().build();
let cases = vec![
"git::ssh://git@github.com/myorg/example.git//myfile?ref=v1.0.0",
"git::https://github.com/myorg/example.git//myfile?ref=v1.0.0",
"git::ssh://user@myserver.com/example.git//subfolder/myfile.py",
"git::https://myserver.com/example.git//subfolder/myfile.sh",
];
for file in cases {
let provider = task_file_providers.get_provider(file);
assert!(provider.is_some());
let provider_name = format!("{:?}", provider.unwrap());
assert!(provider_name.contains("RemoteTaskGit"));
}
}
}
| rust | MIT | 3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb | 2026-01-04T15:39:11.175160Z | false |
jdx/mise | https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/src/ui/theme.rs | src/ui/theme.rs | use demand::Theme;
use crate::config::Settings;
/// Returns the demand theme based on the `color_theme` setting.
///
/// Available themes:
/// - "default" or "charm" - Default charm theme (good for dark terminals)
/// - "base16" - Base16 theme (good for light terminals)
/// - "catppuccin" - Catppuccin theme
/// - "dracula" - Dracula theme
pub fn get_theme() -> Theme {
let settings = Settings::get();
match settings.color_theme.to_lowercase().as_str() {
"base16" => Theme::base16(),
"catppuccin" => Theme::catppuccin(),
"dracula" => Theme::dracula(),
"charm" | "default" | "" => Theme::charm(),
other => {
warn!("Unknown color theme '{}', using default", other);
Theme::charm()
}
}
}
| rust | MIT | 3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb | 2026-01-04T15:39:11.175160Z | false |
jdx/mise | https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/src/ui/time.rs | src/ui/time.rs | use std::time::Duration;
pub fn format_duration(dur: Duration) -> String {
if dur < Duration::from_millis(1) {
format!("{dur:.0?}")
} else if dur < Duration::from_secs(1) {
format!("{dur:.1?}")
} else {
format!("{dur:.2?}")
}
}
| rust | MIT | 3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb | 2026-01-04T15:39:11.175160Z | false |
jdx/mise | https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/src/ui/table.rs | src/ui/table.rs | use crate::Result;
use crate::env::TERM_WIDTH;
use comfy_table::{Attribute, Cell, Color, ContentArrangement, Row};
use console::style;
use itertools::Itertools;
use tabled::Table;
use tabled::settings::object::{Columns, Rows};
use tabled::settings::peaker::PriorityMax;
use tabled::settings::width::{MinWidth, Wrap};
use tabled::settings::{Format, Margin, Modify, Padding, Remove, Settings, Style, Width};
use xx::regex;
type SettingPriority = Settings<Settings, Wrap<usize, PriorityMax>>;
type SettingMinWidth = Settings<SettingPriority, MinWidth>;
// type SettingCellHeightLimit = Settings<SettingMinWidth, CellHeightLimit>;
// type SettingCellHeightIncrease = Settings<SettingCellHeightLimit, CellHeightIncrease>;
pub fn term_size_settings() -> SettingMinWidth {
Settings::default()
.with(Width::wrap(*TERM_WIDTH).priority(PriorityMax::default()))
.with(Width::increase(*TERM_WIDTH))
// .with(Height::limit(*TERM_HEIGHT))
// .with(Height::increase(*TERM_HEIGHT))
}
pub fn default_style(table: &mut Table, no_headers: bool) {
let header = |h: &_| style(h).italic().magenta().to_string();
if no_headers || !console::user_attended() || cfg!(test) {
table.with(Remove::row(Rows::first()));
} else {
table.with(Modify::new(Rows::first()).with(Format::content(header)));
}
table.with(Style::empty());
if console::user_attended() && !cfg!(test) {
table.with(term_size_settings());
}
table
.with(Margin::new(0, 0, 0, 0))
.with(Modify::new(Columns::first()).with(Padding::new(0, 1, 0, 0)))
.with(Modify::new(Columns::last()).with(Padding::zero()));
}
pub struct MiseTable {
table: comfy_table::Table,
truncate: bool,
}
impl MiseTable {
pub fn new(no_header: bool, headers: &[&str]) -> Self {
let mut table = comfy_table::Table::new();
table
.load_preset(comfy_table::presets::NOTHING)
.set_content_arrangement(ContentArrangement::Dynamic);
if !console::colors_enabled() {
table.force_no_tty();
}
if !no_header && console::user_attended() {
let headers = headers.iter().map(Self::header).collect_vec();
table.set_header(headers);
}
Self {
table,
truncate: false,
}
}
pub fn truncate(&mut self, truncate: bool) -> &mut Self {
self.truncate = truncate;
self
}
fn header(title: impl ToString) -> Cell {
Cell::new(title)
.add_attribute(Attribute::Italic)
.fg(Color::Magenta)
}
pub fn add_row(&mut self, row: impl Into<Row>) {
let mut row = row.into();
row.max_height(1);
self.table.add_row(row);
}
pub fn print(&self) -> Result<()> {
let table = self.table.to_string();
// trim first character, skipping color characters
let re = regex!(r"^(\x{1b}[^ ]*\d+m) ");
for line in table.lines() {
let line = re.replacen(line.trim(), 1, "$1");
println!("{line}");
}
Ok(())
}
}
| rust | MIT | 3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb | 2026-01-04T15:39:11.175160Z | false |
jdx/mise | https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/src/ui/info.rs | src/ui/info.rs | use crate::file;
use console::style;
use indenter::indented;
use std::fmt::{Display, Write};
pub fn section<S: Display>(header: &str, body: S) -> eyre::Result<()> {
let body = file::replace_paths_in_string(body);
let out = format!("\n{}: \n{}", style(header).bold(), indent_by(body, " "));
miseprintln!("{}", trim_line_end_whitespace(&out));
Ok(())
}
pub fn inline_section<S: Display>(header: &str, body: S) -> eyre::Result<()> {
let body = file::replace_paths_in_string(body);
let out = format!("{}: {body}", style(header).bold());
miseprintln!("{}", trim_line_end_whitespace(&out));
Ok(())
}
pub fn indent_by<S: Display>(s: S, ind: &'static str) -> String {
let mut out = String::new();
write!(indented(&mut out).with_str(ind), "{s}").unwrap();
out
}
pub fn trim_line_end_whitespace(s: &str) -> String {
s.lines().map(str::trim_end).collect::<Vec<_>>().join("\n")
}
| rust | MIT | 3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb | 2026-01-04T15:39:11.175160Z | false |
jdx/mise | https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/src/ui/multi_progress_report.rs | src/ui/multi_progress_report.rs | use std::collections::HashMap;
use std::sync::{Arc, Mutex};
use indicatif::{MultiProgress, ProgressBar};
use crate::cli::version::VERSION_PLAIN;
use crate::config::Settings;
use crate::ui::osc::{self, ProgressState};
use crate::ui::progress_report::{ProgressReport, QuietReport, SingleReport, VerboseReport};
use crate::ui::style;
#[derive(Debug)]
pub struct MultiProgressReport {
mp: Option<MultiProgress>,
quiet: bool,
footer: Mutex<Option<Box<dyn SingleReport>>>,
footer_pb: Mutex<Option<ProgressBar>>, // Reference to footer bar for inserting before it
// Track overall progress: total expected progress units and current progress per report
total_count: Mutex<usize>,
report_progress: Mutex<HashMap<usize, (u64, u64)>>, // report_id -> (position, length)
next_report_id: Mutex<usize>,
last_osc_percentage: Mutex<Option<u8>>, // Last OSC percentage sent, to avoid duplicate updates
}
static INSTANCE: Mutex<Option<Arc<MultiProgressReport>>> = Mutex::new(None);
impl MultiProgressReport {
pub fn try_get() -> Option<Arc<Self>> {
INSTANCE.lock().unwrap().as_ref().cloned()
}
pub fn get() -> Arc<Self> {
let mut guard = INSTANCE.lock().unwrap();
if let Some(existing) = guard.as_ref() {
return existing.clone();
}
let mpr = Arc::new(Self::new());
*guard = Some(mpr.clone());
mpr
}
fn new() -> Self {
let settings = Settings::get();
let has_stderr = console::user_attended_stderr();
let force_progress = *crate::env::MISE_PROGRESS_TRACE; // Force progress bars when tracing
progress_trace!(
"MultiProgressReport::new: raw={}, quiet={}, verbose={}, user_attended_stderr={}, force_progress={}",
settings.raw,
settings.quiet,
settings.verbose,
has_stderr,
force_progress
);
let mp = match (settings.raw || settings.quiet || settings.verbose || !has_stderr)
&& !force_progress
{
true => {
progress_trace!(
"MultiProgressReport::new: mp=None (one of the conditions is true)"
);
None
}
false => {
progress_trace!("MultiProgressReport::new: mp=Some(MultiProgress)");
Some(MultiProgress::new())
}
};
MultiProgressReport {
mp,
quiet: settings.quiet,
footer: Mutex::new(None),
footer_pb: Mutex::new(None),
total_count: Mutex::new(0),
report_progress: Mutex::new(HashMap::new()),
next_report_id: Mutex::new(0),
last_osc_percentage: Mutex::new(None),
}
}
pub fn add(&self, prefix: &str) -> Box<dyn SingleReport> {
self.add_with_options(prefix, false)
}
pub fn add_with_options(&self, prefix: &str, dry_run: bool) -> Box<dyn SingleReport> {
match &self.mp {
_ if self.quiet => {
progress_trace!(
"add_with_options[{}]: creating QuietReport (quiet=true)",
prefix
);
Box::new(QuietReport::new())
}
Some(mp) if !dry_run => {
progress_trace!(
"add_with_options[{}]: creating ProgressReport with MultiProgress",
prefix
);
let mut pr = ProgressReport::new(prefix.into());
// Insert before footer if it exists, otherwise just add
let footer_pb = self.footer_pb.lock().unwrap();
pr.pb = if let Some(footer) = footer_pb.as_ref() {
mp.insert_before(footer, pr.pb)
} else {
mp.add(pr.pb)
};
Box::new(pr)
}
_ => {
progress_trace!(
"add_with_options[{}]: creating VerboseReport (mp={:?}, dry_run={})",
prefix,
self.mp.is_some(),
dry_run
);
Box::new(VerboseReport::new(prefix.to_string()))
}
}
}
pub fn init_footer(&self, dry_run: bool, message: &str, total_count: usize) {
let mut footer = self.footer.lock().unwrap();
if let Some(_footer) = footer.as_ref() {
return;
}
// Set total count for overall progress tracking
*self.total_count.lock().unwrap() = total_count;
progress_trace!(
"init_footer: total_count={}, total_units={}",
total_count,
total_count * 1_000_000
);
// Don't show footer when there's only 1 tool - individual progress bar is sufficient
if total_count == 1 {
return;
}
// Initialize OSC progress if enabled
if Settings::get().terminal_progress {
osc::set_progress(ProgressState::Normal, 0);
progress_trace!("init_footer: initialized OSC progress at 0%");
}
let version = &*VERSION_PLAIN;
let version_text = format!("{version} by @jdx");
*footer = Some(match &self.mp {
_ if self.quiet => return,
Some(mp) if !dry_run => {
// Footer text with "mise" prefix for progress bar overlay
let footer_text = format!(
"{} {}",
style::emagenta("mise").bold(),
style::edim(&version_text),
);
// Footer length is total_count * 1,000,000 to show progress with high granularity
let footer_length = (total_count * 1_000_000) as u64;
let mut footer_bar =
ProgressReport::new_footer(footer_text, footer_length, message.to_string());
// Add footer to the end (it will be the last bar initially)
footer_bar.pb = mp.add(footer_bar.pb);
// Store reference to footer bar for inserting other bars before it
*self.footer_pb.lock().unwrap() = Some(footer_bar.pb.clone());
// Set initial message AFTER adding to MultiProgress to prevent ghost output
footer_bar.set_position(0);
Box::new(footer_bar)
}
_ => {
// Don't include "mise" prefix for VerboseReport since logger already adds it
let verbose = VerboseReport::new(version_text);
verbose.set_message(message.to_string());
Box::new(verbose)
}
});
}
pub fn footer_inc(&self, n: usize) {
if n == 0 {
return;
}
if let Some(f) = &*self.footer.lock().unwrap() {
f.inc(n as u64);
}
}
pub fn footer_finish(&self) {
if let Some(f) = &*self.footer.lock().unwrap() {
f.finish();
}
// Clear terminal progress when finished
if Settings::get().terminal_progress {
osc::clear_progress();
}
}
/// Allocate a new report ID for progress tracking
pub fn allocate_report_id(&self) -> usize {
let mut next_id = self.next_report_id.lock().unwrap();
let id = *next_id;
*next_id += 1;
progress_trace!("allocate_report_id: allocated report_id={}", id);
id
}
/// Update a report's progress and recalculate overall progress
pub fn update_report_progress(&self, report_id: usize, position: u64, length: u64) {
progress_trace!(
"update_report_progress: report_id={}, position={}, length={}",
report_id,
position,
length
);
let mut progress = self.report_progress.lock().unwrap();
progress.insert(report_id, (position, length));
drop(progress); // Release lock before calling update_overall_progress
self.update_overall_progress();
}
/// Calculate and send overall progress update to terminal
/// Each report gets equal weight (1/total_count)
/// Reports use 0-1,000,000 scale internally
fn update_overall_progress(&self) {
let total_count = *self.total_count.lock().unwrap();
if total_count == 0 {
progress_trace!("update_overall_progress: skipping, total_count=0");
return;
}
let progress = self.report_progress.lock().unwrap();
// Calculate weighted progress: each report contributes equally (1/N)
// Reports provide position/length in 0-1,000,000 range
let weight_per_report = 1.0 / total_count as f64;
let mut total_progress = 0.0f64;
progress_trace!(
"update_overall_progress: total_count={}, weight_per_report={:.3}, num_reports={}",
total_count,
weight_per_report,
progress.len()
);
for (report_id, (position, length)) in progress.iter() {
let report_progress = if *length > 0 {
(*position as f64 / *length as f64).clamp(0.0, 1.0)
} else {
0.0
};
let weighted_progress = weight_per_report * report_progress;
total_progress += weighted_progress;
progress_trace!(
" report_id={}: pos={}, len={}, progress={:.3}, weighted={:.3}",
report_id,
position,
length,
report_progress,
weighted_progress
);
}
total_progress = total_progress.clamp(0.0, 1.0);
progress_trace!(
"update_overall_progress: total_progress={:.3}",
total_progress
);
// Update footer bar - convert to units for display
let footer_units = (total_progress * (total_count * 1_000_000) as f64).round() as u64;
if let Some(f) = &*self.footer.lock().unwrap() {
f.set_position(footer_units);
}
// Update terminal OSC progress - only if percentage changed
if Settings::get().terminal_progress {
let overall_percentage = (total_progress * 100.0).clamp(0.0, 100.0) as u8;
let mut last_pct = self.last_osc_percentage.lock().unwrap();
if *last_pct != Some(overall_percentage) {
progress_trace!(
"update_overall_progress: OSC progress={}%",
overall_percentage
);
osc::set_progress(ProgressState::Normal, overall_percentage);
*last_pct = Some(overall_percentage);
}
}
}
pub fn suspend_if_active<F: FnOnce() -> R, R>(f: F) -> R {
match Self::try_get() {
Some(mpr) => mpr.suspend(f),
None => f(),
}
}
pub fn suspend<F: FnOnce() -> R, R>(&self, f: F) -> R {
match &self.mp {
Some(mp) => mp.suspend(f),
None => f(),
}
}
pub fn stop(&self) -> eyre::Result<()> {
if let Some(mp) = &self.mp {
mp.clear()?;
}
if Settings::get().terminal_progress {
osc::clear_progress();
}
Ok(())
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_multi_progress_report() {
let mpr = MultiProgressReport::get();
let pr = mpr.add("PREFIX");
pr.finish_with_message("test".into());
pr.println("".into());
pr.set_message("test".into());
}
}
| rust | MIT | 3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb | 2026-01-04T15:39:11.175160Z | false |
jdx/mise | https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/src/ui/tree.rs | src/ui/tree.rs | use std::borrow::Cow;
pub trait TreeItem: Clone {
type Child: TreeItem;
fn write_self(&self) -> std::io::Result<()>;
fn children(&self) -> Cow<'_, [Self::Child]>;
}
struct TreeItemIndentChars {
/// Character for pointing down and right (`├`).
pub down_and_right: &'static str,
/// Character for pointing straight down (`|`).
pub down: &'static str,
/// Character for turning from down to right (`└`).
pub turn_right: &'static str,
/// Character for pointing right (`─`).
pub right: &'static str,
/// Empty character (` `).
pub empty: &'static str,
}
const TREE_ITEM_CHARS: TreeItemIndentChars = TreeItemIndentChars {
down_and_right: "├",
down: "│",
turn_right: "└",
right: "─",
empty: " ",
};
struct TreeItemIndent {
pub regular_prefix: String,
pub child_prefix: String,
pub last_regular_prefix: String,
pub last_child_prefix: String,
}
impl TreeItemIndent {
pub fn new(
indent_size: usize,
padding: usize,
characters: &TreeItemIndentChars,
) -> TreeItemIndent {
let m = 1 + padding;
let n = indent_size.saturating_sub(m);
let right_pad = characters.right.repeat(n);
let empty_pad = characters.empty.repeat(n);
let item_pad = characters.empty.repeat(padding);
TreeItemIndent {
regular_prefix: format!("{}{}{}", characters.down_and_right, right_pad, item_pad),
child_prefix: format!("{}{}{}", characters.down, empty_pad, item_pad),
last_regular_prefix: format!("{}{}{}", characters.turn_right, right_pad, item_pad),
last_child_prefix: format!("{}{}{}", characters.empty, empty_pad, item_pad),
}
}
}
pub fn print_tree<T: TreeItem>(item: &T) -> std::io::Result<()> {
let indent = TreeItemIndent::new(4, 1, &TREE_ITEM_CHARS);
print_tree_item(item, String::from(""), String::from(""), &indent, 0)
}
fn print_tree_item<T: TreeItem>(
item: &T,
prefix: String,
child_prefix: String,
indent: &TreeItemIndent,
level: u32,
) -> std::io::Result<()> {
miseprint!("{}", prefix)?;
item.write_self()?;
miseprintln!("");
if level < u32::MAX {
let children = item.children();
if let Some((last_child, children)) = children.split_last() {
let rp = child_prefix.clone() + &indent.regular_prefix;
let cp = child_prefix.clone() + &indent.child_prefix;
for c in children {
print_tree_item(c, rp.clone(), cp.clone(), indent, level + 1)?;
}
let rp = child_prefix.clone() + &indent.last_regular_prefix;
let cp = child_prefix.clone() + &indent.last_child_prefix;
print_tree_item(last_child, rp, cp, indent, level + 1)?;
}
}
Ok(())
}
| rust | MIT | 3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb | 2026-01-04T15:39:11.175160Z | false |
jdx/mise | https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/src/ui/progress_report.rs | src/ui/progress_report.rs | #![allow(unknown_lints)]
#![allow(clippy::literal_string_with_formatting_args)]
use std::time::Duration;
use std::{
fmt::{Display, Formatter},
sync::Mutex,
};
use indicatif::{ProgressBar, ProgressStyle};
use std::sync::LazyLock as Lazy;
use crate::ui::style;
use crate::{backend, env, ui};
#[derive(Debug, Clone, Copy)]
pub enum ProgressIcon {
Success,
Skipped,
Warning,
Error,
}
impl Display for ProgressIcon {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
match self {
ProgressIcon::Success => write!(f, "{}", style::egreen("✓").bright()),
ProgressIcon::Skipped => write!(f, "{}", style::eyellow("⇢").bright()),
ProgressIcon::Warning => write!(f, "{}", style::eyellow("⚠").bright()),
ProgressIcon::Error => write!(f, "{}", style::ered("✗").bright()),
}
}
}
pub trait SingleReport: Send + Sync + std::fmt::Debug {
fn println(&self, _message: String) {}
fn set_message(&self, _message: String) {}
fn inc(&self, _delta: u64) {}
fn set_position(&self, _delta: u64) {}
fn set_length(&self, _length: u64) {}
fn abandon(&self) {}
fn finish(&self) {
self.finish_with_message(String::new());
}
fn finish_with_message(&self, message: String) {
self.finish_with_icon(message, ProgressIcon::Success);
}
fn finish_with_icon(&self, _message: String, _icon: ProgressIcon) {}
/// Declare how many operations this progress report will have
/// Each operation will get equal space (1/count)
/// For example, if there are 3 operations (download, checksum, extract):
/// - start_operations(3) at the beginning
///
/// Then each set_length() call will allocate 33.33% of the total progress
fn start_operations(&self, _count: usize) {}
}
static SPIN_TEMPLATE: Lazy<ProgressStyle> = Lazy::new(|| {
let tmpl = "{prefix} {wide_msg} {spinner:.blue} {elapsed:>3.dim.italic}";
ProgressStyle::with_template(tmpl).unwrap()
});
const TICK_INTERVAL: Duration = Duration::from_millis(250);
static PROG_TEMPLATE: Lazy<ProgressStyle> = Lazy::new(|| {
let tmpl = match *env::TERM_WIDTH {
0..=89 => "{prefix} {wide_msg} {bar:10.cyan/blue} {percent:>2}%",
90..=99 => "{prefix} {wide_msg} {bar:15.cyan/blue} {percent:>2}%",
100..=114 => "{prefix} {wide_msg} {bytes}/{total_bytes:10} {bar:10.cyan/blue}",
_ => {
"{prefix} {wide_msg} {bytes}/{total_bytes} ({eta}) {bar:20.cyan/blue} {elapsed:>3.dim.italic}"
}
};
ProgressStyle::with_template(tmpl).unwrap()
});
/// Renders a progress bar with text overlaid on top
/// The text background alternates based on progress:
/// - Filled portion: black text on cyan background
/// - Unfilled portion: dim text on default background
fn render_progress_bar_with_overlay(text: &str, progress: f64, width: usize) -> String {
use console::Style;
let progress = progress.clamp(0.0, 1.0);
let filled_width = (width as f64 * progress) as usize;
// Strip any existing ANSI codes from text
let clean_text = console::strip_ansi_codes(text);
// If text is longer than width, truncate it
let display_text = if clean_text.chars().count() > width {
clean_text.chars().take(width - 3).collect::<String>() + "..."
} else {
clean_text.to_string()
};
let text_len = display_text.chars().count();
let padding = (width.saturating_sub(text_len)) / 2;
// Build the bar with text overlay
let mut result = String::new();
// Styles for different regions
let filled_bar_style = Style::new().cyan();
let filled_text_style = Style::new().black().on_cyan();
let empty_text_style = Style::new().dim();
for i in 0..width {
if i < padding || i >= padding + text_len {
// No text here, just show the bar
if i < filled_width {
result.push_str(&filled_bar_style.apply_to('█').to_string());
} else {
result.push('░');
}
} else {
// Text overlay
let text_idx = i - padding;
let ch = display_text.chars().nth(text_idx).unwrap();
if i < filled_width {
// Filled portion: black text on cyan background
result.push_str(&filled_text_style.apply_to(ch).to_string());
} else {
// Unfilled portion: dim text
result.push_str(&empty_text_style.apply_to(ch).to_string());
}
}
}
result
}
static FOOTER_TEMPLATE: Lazy<ProgressStyle> = Lazy::new(|| {
// Simple template - we'll update the message with our custom rendered bar
ProgressStyle::with_template("{wide_msg}").unwrap()
});
#[derive(Debug)]
pub struct ProgressReport {
pub pb: ProgressBar,
report_id: Option<usize>,
total_operations: Mutex<Option<usize>>, // Total operations declared upfront (None if unknown)
operation_count: Mutex<u32>, // How many operations have started (1, 2, 3...)
operation_base: Mutex<u64>, // Base progress for current operation (0, 333333, 666666...)
operation_length: Mutex<u64>, // Allocated length for current operation
footer_text: Option<String>, // If set, this is a footer bar with text overlay
}
static LONGEST_PLUGIN_NAME: Lazy<usize> = Lazy::new(|| {
backend::list()
.into_iter()
.map(|p| p.id().len())
.max()
.unwrap_or_default()
.clamp(15, 35)
});
fn pad_prefix(w: usize, s: &str) -> String {
console::pad_str(s, w, console::Alignment::Left, None).to_string()
}
fn normal_prefix(pad: usize, prefix: &str) -> String {
let prefix = format!("{} {prefix}", style::edim("mise"));
pad_prefix(pad, &prefix)
}
impl ProgressReport {
pub fn new(prefix: String) -> ProgressReport {
ui::ctrlc::show_cursor_after_ctrl_c();
let pad = *LONGEST_PLUGIN_NAME;
let pb = ProgressBar::new(100)
.with_style(SPIN_TEMPLATE.clone())
.with_prefix(normal_prefix(pad, &prefix));
pb.enable_steady_tick(TICK_INTERVAL);
// Allocate a report ID for multi-progress tracking
let report_id = ui::multi_progress_report::MultiProgressReport::try_get()
.map(|mpr| mpr.allocate_report_id());
ProgressReport {
pb,
report_id,
total_operations: Mutex::new(Some(1)), // Default to 1 operation (100% of progress)
operation_count: Mutex::new(0),
operation_base: Mutex::new(0),
operation_length: Mutex::new(1_000_000), // Full range initially
footer_text: None,
}
}
pub fn new_footer(footer_text: String, length: u64, _message: String) -> ProgressReport {
ui::ctrlc::show_cursor_after_ctrl_c();
// Footer shows text inside the progress bar with custom overlay rendering
let pb = ProgressBar::new(length).with_style(FOOTER_TEMPLATE.clone());
// Don't enable steady tick for footer - it doesn't use a spinner template
// and the tick causes unnecessary redraws
// Don't set initial message here - it will be set after adding to MultiProgress
// to prevent ghost output before the bar is part of the managed display
ProgressReport {
pb,
report_id: None,
total_operations: Mutex::new(None),
operation_count: Mutex::new(0),
operation_base: Mutex::new(0),
operation_length: Mutex::new(length),
footer_text: Some(footer_text),
}
}
fn update_footer_display(&self) {
// Update footer bar with custom text overlay rendering
if let Some(footer_text) = &self.footer_text {
let pos = self.pb.position();
let len = self.pb.length().unwrap_or(1);
let progress = if len > 0 {
pos as f64 / len as f64
} else {
0.0
};
let width = *env::TERM_WIDTH;
let rendered = render_progress_bar_with_overlay(footer_text, progress, width);
self.pb.set_message(rendered);
}
}
fn update_terminal_progress(&self) {
// Map progress bar position to allocated range to prevent backwards progress
if let Some(report_id) = self.report_id
&& let Some(mpr) = ui::multi_progress_report::MultiProgressReport::try_get()
{
// Check if we're spinning (no length set yet)
if self.pb.length().is_none() {
// During spinning, report minimal progress to show activity
progress_trace!(
"update_terminal_progress[{}]: spinning, reporting 1%",
report_id
);
mpr.update_report_progress(report_id, 10_000, 1_000_000); // 1%
return;
}
let base = *self.operation_base.lock().unwrap();
let allocated_length = *self.operation_length.lock().unwrap();
// Get progress bar state (position/length in bytes)
let pb_pos = self.pb.position();
let pb_len = self.pb.length().unwrap(); // Safe because we checked above
// Calculate progress as 0.0-1.0
let pb_progress = if pb_len > 0 {
(pb_pos as f64 / pb_len as f64).clamp(0.0, 1.0)
} else {
0.0
};
// Map to allocated range (base to base+allocated_length)
let mapped_position = base + (pb_progress * allocated_length as f64) as u64;
progress_trace!(
"update_terminal_progress[{}]: pb=({}/{}) {:.1}%, base={}, alloc={}, mapped={}",
report_id,
pb_pos,
pb_len,
pb_progress * 100.0,
base,
allocated_length,
mapped_position
);
// Always report against fixed 1,000,000 scale
mpr.update_report_progress(report_id, mapped_position, 1_000_000);
}
}
}
impl SingleReport for ProgressReport {
fn println(&self, message: String) {
// Suspend the entire MultiProgress to prevent footer duplication
crate::ui::multi_progress_report::MultiProgressReport::suspend_if_active(|| {
eprintln!("{message}");
});
}
fn set_message(&self, message: String) {
self.pb.set_message(message.replace('\r', ""));
}
fn inc(&self, delta: u64) {
self.pb.inc(delta);
progress_trace!(
"inc[{:?}]: delta={}, new_pos={}",
self.report_id,
delta,
self.pb.position()
);
self.update_terminal_progress();
if Some(self.pb.position()) == self.pb.length() {
self.pb.set_style(SPIN_TEMPLATE.clone());
self.pb.enable_steady_tick(TICK_INTERVAL);
}
}
fn set_position(&self, pos: u64) {
self.pb.set_position(pos);
progress_trace!("set_position[{:?}]: pos={}", self.report_id, pos);
self.update_terminal_progress();
self.update_footer_display();
if Some(self.pb.position()) == self.pb.length() {
self.pb.set_style(SPIN_TEMPLATE.clone());
self.pb.enable_steady_tick(Duration::from_millis(250));
}
}
fn set_length(&self, length: u64) {
// Atomically update operation count and base together to prevent race conditions
let mut op_count = self.operation_count.lock().unwrap();
*op_count += 1;
let count = *op_count;
// When starting a new operation (count > 1), complete the previous operation first
let (base, per_operation) = if count > 1 {
let mut base_guard = self.operation_base.lock().unwrap();
let prev_allocated = *self.operation_length.lock().unwrap();
let prev_base = *base_guard;
let completed_position = prev_base + prev_allocated;
progress_trace!(
"set_length[{:?}]: completing op {}, moving base {} -> {}",
self.report_id,
count - 1,
prev_base,
completed_position
);
// Report completion of previous operation
if let Some(report_id) = self.report_id
&& let Some(mpr) = ui::multi_progress_report::MultiProgressReport::try_get()
{
mpr.update_report_progress(report_id, completed_position, 1_000_000);
}
// New operation starts where previous ended
*base_guard = completed_position;
// Calculate allocation with the new base
let total_ops = self.total_operations.lock().unwrap();
let total = (*total_ops).unwrap_or(1).max(1); // Ensure at least 1 to prevent division by zero
let per_operation = 1_000_000 / total as u64;
(completed_position, per_operation)
} else {
// First operation
let total_ops = self.total_operations.lock().unwrap();
let total = (*total_ops).unwrap_or(1).max(1); // Ensure at least 1 to prevent division by zero
let base = *self.operation_base.lock().unwrap();
let per_operation = 1_000_000 / total as u64;
(base, per_operation)
};
drop(op_count); // Release operation_count lock
*self.operation_length.lock().unwrap() = per_operation;
let total = self.total_operations.lock().unwrap().unwrap_or(1).max(1);
progress_trace!(
"set_length[{:?}]: op={}/{}, base={}, allocated={}, pb_length={}",
self.report_id,
count,
total,
base,
per_operation,
length
);
self.pb.set_position(0);
self.pb.set_style(PROG_TEMPLATE.clone());
self.pb.disable_steady_tick();
self.pb.set_length(length);
self.update_terminal_progress();
}
fn abandon(&self) {
self.pb.abandon();
}
fn finish_with_icon(&self, _message: String, _icon: ProgressIcon) {
progress_trace!("finish_with_icon[{:?}]", self.report_id);
// For footer bars with text overlay, use finish_with_message to clear it
if self.footer_text.is_some() {
self.pb.finish_with_message("");
} else {
self.pb.finish_and_clear();
}
// Mark this report as complete (100%) using fixed 0-1,000,000 range
if let Some(report_id) = self.report_id
&& let Some(mpr) = ui::multi_progress_report::MultiProgressReport::try_get()
{
progress_trace!("finish_with_icon[{}]: marking as 100% complete", report_id);
mpr.update_report_progress(report_id, 1_000_000, 1_000_000);
}
}
fn start_operations(&self, count: usize) {
progress_trace!(
"start_operations[{:?}]: declaring {} operations",
self.report_id,
count
);
*self.total_operations.lock().unwrap() = Some(count.max(1));
}
}
#[derive(Debug)]
pub struct QuietReport {}
impl QuietReport {
pub fn new() -> QuietReport {
QuietReport {}
}
}
impl SingleReport for QuietReport {}
#[derive(Debug)]
pub struct VerboseReport {
prefix: String,
prev_message: Mutex<String>,
pad: usize,
}
impl VerboseReport {
pub fn new(prefix: String) -> VerboseReport {
VerboseReport {
prefix,
prev_message: Mutex::new("".to_string()),
pad: *LONGEST_PLUGIN_NAME,
}
}
}
impl SingleReport for VerboseReport {
fn println(&self, message: String) {
eprintln!("{message}");
}
fn set_message(&self, message: String) {
let mut prev_message = self.prev_message.lock().unwrap();
if *prev_message == message {
return;
}
let prefix = pad_prefix(self.pad, &self.prefix);
log::info!("{prefix} {message}");
*prev_message = message.clone();
}
fn finish(&self) {
self.finish_with_message(style::egreen("done").to_string());
}
fn finish_with_icon(&self, message: String, icon: ProgressIcon) {
let prefix = pad_prefix(self.pad - 2, &self.prefix);
log::info!("{prefix} {icon} {message}");
}
}
#[cfg(test)]
mod tests {
use crate::config::Config;
use super::*;
#[tokio::test]
async fn test_progress_report() {
let _config = Config::get().await.unwrap();
let pr = ProgressReport::new("foo".into());
pr.set_message("message".into());
pr.finish_with_message("message".into());
}
#[tokio::test]
async fn test_progress_report_verbose() {
let _config = Config::get().await.unwrap();
let pr = VerboseReport::new("PREFIX".to_string());
pr.set_message("message".into());
pr.finish_with_message("message".into());
}
#[tokio::test]
async fn test_progress_report_quiet() {
let _config = Config::get().await.unwrap();
let pr = QuietReport::new();
pr.set_message("message".into());
pr.finish_with_message("message".into());
}
}
| rust | MIT | 3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb | 2026-01-04T15:39:11.175160Z | false |
jdx/mise | https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/src/ui/prompt.rs | src/ui/prompt.rs | use std::sync::Mutex;
use demand::{Confirm, Dialog, DialogButton};
use crate::env;
use crate::ui::ctrlc;
use crate::ui::theme::get_theme;
static MUTEX: Mutex<()> = Mutex::new(());
static SKIP_PROMPT: Mutex<bool> = Mutex::new(false);
pub fn confirm<S: Into<String>>(message: S) -> eyre::Result<bool> {
let _lock = MUTEX.lock().unwrap(); // Prevent multiple prompts at once
ctrlc::show_cursor_after_ctrl_c();
if !console::user_attended_stderr() || env::__USAGE.is_some() {
return Ok(false);
}
let theme = get_theme();
let result = Confirm::new(message)
.clear_screen(true)
.theme(&theme)
.run()?;
Ok(result)
}
pub fn confirm_with_all<S: Into<String>>(message: S) -> eyre::Result<bool> {
let _lock = MUTEX.lock().unwrap(); // Prevent multiple prompts at once
ctrlc::show_cursor_after_ctrl_c();
if !console::user_attended_stderr() || env::__USAGE.is_some() {
return Ok(false);
}
let mut skip_prompt = SKIP_PROMPT.lock().unwrap();
if *skip_prompt {
return Ok(true);
}
let theme = get_theme();
let answer = Dialog::new(message)
.buttons(vec![
DialogButton::new("Yes"),
DialogButton::new("No"),
DialogButton::new("All"),
])
.selected_button(1)
.clear_screen(true)
.theme(&theme)
.run()?;
let result = match answer.as_str() {
"Yes" => true,
"No" => false,
"All" => {
*skip_prompt = true;
true
}
_ => false,
};
Ok(result)
}
| rust | MIT | 3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb | 2026-01-04T15:39:11.175160Z | false |
jdx/mise | https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/src/ui/mod.rs | src/ui/mod.rs | pub use prompt::confirm;
#[cfg_attr(any(test, windows), path = "ctrlc_stub.rs")]
pub mod ctrlc;
pub(crate) mod info;
pub mod multi_progress_report;
pub mod osc;
pub mod progress_report;
pub mod prompt;
pub mod style;
pub mod table;
pub mod theme;
pub mod time;
pub mod tree;
| rust | MIT | 3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb | 2026-01-04T15:39:11.175160Z | false |
jdx/mise | https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/src/ui/style.rs | src/ui/style.rs | use std::path::Path;
use crate::file::display_path;
use console::{StyledObject, style};
pub fn ereset() -> String {
if console::colors_enabled_stderr() {
"\x1b[0m".to_string()
} else {
"".to_string()
}
}
pub fn estyle<D>(val: D) -> StyledObject<D> {
style(val).for_stderr()
}
pub fn ecyan<D>(val: D) -> StyledObject<D> {
estyle(val).cyan()
}
pub fn eblue<D>(val: D) -> StyledObject<D> {
estyle(val).blue()
}
pub fn emagenta<D>(val: D) -> StyledObject<D> {
estyle(val).magenta()
}
pub fn egreen<D>(val: D) -> StyledObject<D> {
estyle(val).green()
}
pub fn eyellow<D>(val: D) -> StyledObject<D> {
estyle(val).yellow()
}
pub fn ered<D>(val: D) -> StyledObject<D> {
estyle(val).red()
}
pub fn eblack<D>(val: D) -> StyledObject<D> {
estyle(val).black()
}
pub fn eunderline<D>(val: D) -> StyledObject<D> {
estyle(val).underlined()
}
pub fn edim<D>(val: D) -> StyledObject<D> {
estyle(val).dim()
}
pub fn ebold<D>(val: D) -> StyledObject<D> {
estyle(val).bold()
}
pub fn nbold<D>(val: D) -> StyledObject<D> {
nstyle(val).bold()
}
pub fn epath(path: &Path) -> StyledObject<String> {
estyle(display_path(path))
}
pub fn nstyle<D>(val: D) -> StyledObject<D> {
style(val).for_stdout()
}
pub fn ncyan<D>(val: D) -> StyledObject<D> {
nstyle(val).cyan()
}
pub fn nunderline<D>(val: D) -> StyledObject<D> {
nstyle(val).underlined()
}
pub fn nyellow<D>(val: D) -> StyledObject<D> {
nstyle(val).yellow()
}
pub fn nred<D>(val: D) -> StyledObject<D> {
nstyle(val).red()
}
pub fn ndim<D>(val: D) -> StyledObject<D> {
nstyle(val).dim()
}
pub fn nbright<D>(val: D) -> StyledObject<D> {
nstyle(val).bright()
}
| rust | MIT | 3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb | 2026-01-04T15:39:11.175160Z | false |
jdx/mise | https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/src/ui/ctrlc_stub.rs | src/ui/ctrlc_stub.rs | pub fn init() {}
// pub fn add_handler(_func: impl Fn() + Send + Sync + 'static) {}
pub fn exit_on_ctrl_c(_do_exit: bool) {}
/// ensures cursor is displayed on ctrl-c
pub fn show_cursor_after_ctrl_c() {}
| rust | MIT | 3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb | 2026-01-04T15:39:11.175160Z | false |
jdx/mise | https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/src/ui/ctrlc.rs | src/ui/ctrlc.rs | use crate::exit;
use std::sync::atomic::{AtomicBool, Ordering};
use crate::cmd::CmdLineRunner;
use console::Term;
static EXIT: AtomicBool = AtomicBool::new(true);
static SHOW_CURSOR: AtomicBool = AtomicBool::new(false);
// static HANDLERS: OnceCell<Vec<Box<dyn Fn() + Send + Sync + 'static>>> = OnceCell::new();
pub fn init() {
tokio::spawn(async move {
loop {
tokio::signal::ctrl_c().await.unwrap();
if SHOW_CURSOR.load(Ordering::Relaxed) {
let _ = Term::stderr().show_cursor();
}
CmdLineRunner::kill_all(nix::sys::signal::SIGINT);
if EXIT.load(Ordering::Relaxed) {
debug!("Ctrl-C pressed, exiting...");
exit(1);
}
}
});
}
pub fn exit_on_ctrl_c(do_exit: bool) {
EXIT.store(do_exit, Ordering::Relaxed);
}
/// ensures cursor is displayed on ctrl-c
pub fn show_cursor_after_ctrl_c() {
SHOW_CURSOR.store(true, Ordering::Relaxed);
}
| rust | MIT | 3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb | 2026-01-04T15:39:11.175160Z | false |
jdx/mise | https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/src/ui/osc.rs | src/ui/osc.rs | /// OSC (Operating System Command) escape sequences for terminal integration
///
/// This module provides support for OSC escape sequences that allow terminal
/// integration features like progress bars in Ghostty, VS Code, Windows Terminal,
/// and VTE-based terminals.
use std::io::{self, Write};
use std::sync::OnceLock;
/// OSC 9;4 states for terminal progress indication
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
#[allow(dead_code)]
pub enum ProgressState {
/// No progress indicator (clears any existing progress)
None,
/// Normal progress bar with percentage (typically shows as default color, often blue/cyan)
Normal,
/// Error state (typically shows as red)
Error,
/// Indeterminate progress (spinner/activity indicator)
Indeterminate,
/// Warning state (typically shows as yellow)
Warning,
}
impl ProgressState {
fn as_code(&self) -> u8 {
match self {
ProgressState::None => 0,
ProgressState::Normal => 1,
ProgressState::Error => 2,
ProgressState::Indeterminate => 3,
ProgressState::Warning => 4,
}
}
}
/// Checks if the current terminal supports OSC 9;4 progress sequences
fn terminal_supports_osc_9_4() -> bool {
static SUPPORTS_OSC_9_4: OnceLock<bool> = OnceLock::new();
*SUPPORTS_OSC_9_4.get_or_init(|| {
// Check TERM_PROGRAM environment variable for terminal detection
if let Some(term_program) = &*crate::env::TERM_PROGRAM {
match term_program.as_str() {
// Supported terminals
"ghostty" => return true,
"iTerm.app" => return true,
"vscode" => return true,
// Unsupported terminals
"WezTerm" => return false,
"Alacritty" => return false,
_ => {}
}
}
// Check for Windows Terminal
if *crate::env::WT_SESSION {
return true;
}
// Check for VTE-based terminals (GNOME Terminal, etc.)
if *crate::env::VTE_VERSION {
return true;
}
// Default to false for unknown terminals to avoid escape sequence pollution
false
})
}
/// Sends an OSC 9;4 sequence to set terminal progress
///
/// # Arguments
/// * `state` - The progress state to display
/// * `progress` - Progress percentage (0-100), ignored if state is None or Indeterminate
///
/// # Example
/// ```no_run
/// use mise::ui::osc::{set_progress, ProgressState};
///
/// // Show 50% progress with normal (blue/cyan) color
/// set_progress(ProgressState::Normal, 50);
///
/// // Show indeterminate progress
/// set_progress(ProgressState::Indeterminate, 0);
///
/// // Clear progress
/// set_progress(ProgressState::None, 0);
/// ```
pub fn set_progress(state: ProgressState, progress: u8) {
let progress = progress.min(100);
let _ = write_progress(state, progress);
}
fn write_progress(state: ProgressState, progress: u8) -> io::Result<()> {
// Only write OSC sequences if stderr is actually a terminal
if !console::user_attended_stderr() {
return Ok(());
}
// Only write OSC 9;4 sequences if the terminal supports them
if !terminal_supports_osc_9_4() {
return Ok(());
}
let mut stderr = io::stderr();
// OSC 9;4 format: ESC ] 9 ; 4 ; <state> ; <progress> BEL
// Note: The color is controlled by the terminal theme
// Ghostty may show cyan automatically for normal progress
write!(stderr, "\x1b]9;4;{};{}\x1b\\", state.as_code(), progress)?;
stderr.flush()
}
/// Clears any terminal progress indicator
pub fn clear_progress() {
set_progress(ProgressState::None, 0);
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_progress_state_codes() {
assert_eq!(ProgressState::None.as_code(), 0);
assert_eq!(ProgressState::Normal.as_code(), 1);
assert_eq!(ProgressState::Error.as_code(), 2);
assert_eq!(ProgressState::Indeterminate.as_code(), 3);
assert_eq!(ProgressState::Warning.as_code(), 4);
}
#[test]
fn test_set_progress_doesnt_panic() {
// Just ensure it doesn't panic when called
set_progress(ProgressState::Normal, 50);
set_progress(ProgressState::Indeterminate, 0);
clear_progress();
}
#[test]
fn test_progress_clamping() {
// Verify that progress values over 100 are clamped
set_progress(ProgressState::Normal, 150);
}
}
| rust | MIT | 3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb | 2026-01-04T15:39:11.175160Z | false |
jdx/mise | https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/src/shell/pwsh.rs | src/shell/pwsh.rs | #![allow(unknown_lints)]
#![allow(clippy::literal_string_with_formatting_args)]
use crate::config::Settings;
use std::borrow::Cow;
use std::fmt::Display;
use indoc::formatdoc;
use crate::shell::{self, ActivateOptions, Shell};
#[derive(Default)]
pub struct Pwsh {}
impl Pwsh {}
impl Shell for Pwsh {
fn activate(&self, opts: ActivateOptions) -> String {
let exe = opts.exe;
let flags = opts.flags;
let exe = exe.to_string_lossy();
let mut out = String::new();
out.push_str(&shell::build_deactivation_script(self));
out.push_str(&self.format_activate_prelude(&opts.prelude));
out.push_str(&formatdoc! {r#"
$env:MISE_SHELL = 'pwsh'
if (-not (Test-Path -Path Env:/__MISE_ORIG_PATH)) {{
$env:__MISE_ORIG_PATH = $env:PATH
}}
function mise {{
[CmdletBinding()]
param(
[Parameter(ValueFromRemainingArguments=$true)] # Allow any number of arguments, including none
[string[]] $arguments
)
$previous_out_encoding = $OutputEncoding
$previous_console_out_encoding = [Console]::OutputEncoding
$OutputEncoding = [Console]::OutputEncoding = [Text.UTF8Encoding]::UTF8
function _reset_output_encoding {{
$OutputEncoding = $previous_out_encoding
[Console]::OutputEncoding = $previous_console_out_encoding
}}
if ($arguments.count -eq 0) {{
& "{exe}"
_reset_output_encoding
return
}} elseif ($arguments -contains '-h' -or $arguments -contains '--help') {{
& "{exe}" @arguments
_reset_output_encoding
return
}}
$command = $arguments[0]
if ($arguments.Length -gt 1) {{
$remainingArgs = $arguments[1..($arguments.Length - 1)]
}} else {{
$remainingArgs = @()
}}
switch ($command) {{
{{ $_ -in 'deactivate', 'shell', 'sh' }} {{
& "{exe}" $command @remainingArgs | Out-String | Invoke-Expression -ErrorAction SilentlyContinue
_reset_output_encoding
}}
default {{
& "{exe}" $command @remainingArgs
$status = $LASTEXITCODE
if ($(Test-Path -Path Function:\_mise_hook)){{
_mise_hook
}}
_reset_output_encoding
# Pass down exit code from mise after _mise_hook
if ($PSVersionTable.PSVersion.Major -ge 7) {{
pwsh -NoProfile -Command exit $status
}} else {{
powershell -NoProfile -Command exit $status
}}
}}
}}
}}
"#});
if !opts.no_hook_env {
out.push_str(&formatdoc! {r#"
function Global:_mise_hook {{
if ($env:MISE_SHELL -eq "pwsh"){{
& "{exe}" hook-env{flags} $args -s pwsh | Out-String | Invoke-Expression -ErrorAction SilentlyContinue
}}
}}
function __enable_mise_chpwd{{
if ($PSVersionTable.PSVersion.Major -lt 7) {{
if ($env:MISE_PWSH_CHPWD_WARNING -ne '0') {{
Write-Warning "mise: chpwd functionality requires PowerShell version 7 or higher. Your current version is $($PSVersionTable.PSVersion). You can add `$env:MISE_PWSH_CHPWD_WARNING=0` to your environment to disable this warning."
}}
return
}}
if (-not $__mise_pwsh_chpwd){{
$Global:__mise_pwsh_chpwd= $true
$_mise_chpwd_hook = [EventHandler[System.Management.Automation.LocationChangedEventArgs]] {{
param([object] $source, [System.Management.Automation.LocationChangedEventArgs] $eventArgs)
end {{
_mise_hook
}}
}};
$__mise_pwsh_previous_chpwd_function=$ExecutionContext.SessionState.InvokeCommand.LocationChangedAction;
if ($__mise_original_pwsh_chpwd_function) {{
$ExecutionContext.SessionState.InvokeCommand.LocationChangedAction = [Delegate]::Combine($__mise_pwsh_previous_chpwd_function, $_mise_chpwd_hook)
}}
else {{
$ExecutionContext.SessionState.InvokeCommand.LocationChangedAction = $_mise_chpwd_hook
}}
}}
}}
__enable_mise_chpwd
Remove-Item -ErrorAction SilentlyContinue -Path Function:/__enable_mise_chpwd
function __enable_mise_prompt {{
if (-not $__mise_pwsh_previous_prompt_function){{
$Global:__mise_pwsh_previous_prompt_function=$function:prompt
function global:prompt {{
if (Test-Path -Path Function:\_mise_hook){{
_mise_hook
}}
& $__mise_pwsh_previous_prompt_function
}}
}}
}}
__enable_mise_prompt
Remove-Item -ErrorAction SilentlyContinue -Path Function:/__enable_mise_prompt
_mise_hook
"#});
}
if Settings::get().not_found_auto_install {
out.push_str(&formatdoc! {r#"
if (-not $__mise_pwsh_command_not_found){{
$Global:__mise_pwsh_command_not_found= $true
function __enable_mise_command_not_found {{
$_mise_pwsh_cmd_not_found_hook = [EventHandler[System.Management.Automation.CommandLookupEventArgs]] {{
param([object] $Name, [System.Management.Automation.CommandLookupEventArgs] $eventArgs)
end {{
if ([Microsoft.PowerShell.PSConsoleReadLine]::GetHistoryItems()[-1].CommandLine -match ([regex]::Escape($Name))) {{
if (& "{exe}" hook-not-found -s pwsh -- $Name){{
_mise_hook
if (Get-Command $Name -ErrorAction SilentlyContinue){{
$EventArgs.Command = Get-Command $Name
$EventArgs.StopSearch = $true
}}
}}
}}
}}
}}
$current_command_not_found_function = $ExecutionContext.SessionState.InvokeCommand.CommandNotFoundAction
if ($current_command_not_found_function) {{
$ExecutionContext.SessionState.InvokeCommand.CommandNotFoundAction = [Delegate]::Combine($current_command_not_found_function, $_mise_pwsh_cmd_not_found_hook)
}}
else {{
$ExecutionContext.SessionState.InvokeCommand.CommandNotFoundAction = $_mise_pwsh_cmd_not_found_hook
}}
}}
__enable_mise_command_not_found
Remove-Item -ErrorAction SilentlyContinue -Path Function:/__enable_mise_command_not_found
}}
"#});
}
out
}
fn deactivate(&self) -> String {
formatdoc! {r#"
Remove-Item -ErrorAction SilentlyContinue function:mise
Remove-Item -ErrorAction SilentlyContinue -Path Env:/MISE_SHELL
Remove-Item -ErrorAction SilentlyContinue -Path Env:/__MISE_DIFF
Remove-Item -ErrorAction SilentlyContinue -Path Env:/__MISE_SESSION
"#}
}
fn set_env(&self, k: &str, v: &str) -> String {
let k = powershell_escape(k.into());
let v = powershell_escape(v.into());
format!("$Env:{k}='{v}'\n")
}
fn prepend_env(&self, k: &str, v: &str) -> String {
let k = powershell_escape(k.into());
let v = powershell_escape(v.into());
format!("$Env:{k}='{v}'+[IO.Path]::PathSeparator+$env:{k}\n")
}
fn unset_env(&self, k: &str) -> String {
let k = powershell_escape(k.into());
format!("Remove-Item -ErrorAction SilentlyContinue -Path Env:/{k}\n")
}
}
impl Display for Pwsh {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "pwsh")
}
}
fn powershell_escape(s: Cow<str>) -> Cow<str> {
let needs_escape = s.is_empty();
if !needs_escape {
return s;
}
let mut es = String::with_capacity(s.len());
let mut chars = s.chars().peekable();
loop {
match chars.next() {
Some('\t') => {
es.push_str("`t");
}
Some('\n') => {
es.push_str("`n");
}
Some('\r') => {
es.push_str("`r");
}
Some('\'') => {
es.push_str("`'");
}
Some('`') => {
es.push_str("``");
}
Some(c) => {
es.push(c);
}
None => {
break;
}
}
}
es.into()
}
#[cfg(test)]
mod tests {
use insta::assert_snapshot;
use std::path::Path;
use test_log::test;
use crate::test::replace_path;
use super::*;
#[test]
fn test_activate() {
// Unset __MISE_ORIG_PATH to avoid PATH restoration logic in output
unsafe {
std::env::remove_var("__MISE_ORIG_PATH");
std::env::remove_var("__MISE_DIFF");
}
let pwsh = Pwsh::default();
let exe = Path::new("/some/dir/mise");
let opts = ActivateOptions {
exe: exe.to_path_buf(),
flags: " --status".into(),
no_hook_env: false,
prelude: vec![],
};
assert_snapshot!(pwsh.activate(opts));
}
#[test]
fn test_set_env() {
assert_snapshot!(Pwsh::default().set_env("FOO", "1"));
}
#[test]
fn test_prepend_env() {
let pwsh = Pwsh::default();
assert_snapshot!(replace_path(&pwsh.prepend_env("PATH", "/some/dir:/2/dir")));
}
#[test]
fn test_unset_env() {
assert_snapshot!(Pwsh::default().unset_env("FOO"));
}
#[test]
fn test_deactivate() {
let deactivate = Pwsh::default().deactivate();
assert_snapshot!(replace_path(&deactivate));
}
}
| rust | MIT | 3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb | 2026-01-04T15:39:11.175160Z | false |
jdx/mise | https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/src/shell/elvish.rs | src/shell/elvish.rs | #![allow(unknown_lints)]
#![allow(clippy::literal_string_with_formatting_args)]
use std::fmt::Display;
use crate::shell::{self, ActivateOptions, Shell};
use indoc::formatdoc;
use shell_escape::unix::escape;
#[derive(Default)]
pub struct Elvish {}
impl Shell for Elvish {
fn activate(&self, opts: ActivateOptions) -> String {
let exe = opts.exe;
let flags = opts.flags;
let exe = escape(exe.to_string_lossy());
let mut out = String::new();
out.push_str(&shell::build_deactivation_script(self));
out.push_str(&self.format_activate_prelude(&opts.prelude));
out.push_str(&formatdoc! {r#"
var hook-enabled = $false
fn hook-env {{
if $hook-enabled {{
eval ((external {exe}) hook-env{flags} -s elvish | slurp)
}}
}}
set after-chdir = (conj $after-chdir {{|_| hook-env }})
set edit:before-readline = (conj $edit:before-readline $hook-env~)
fn activate {{
set-env MISE_SHELL elvish
set hook-enabled = ${hook_enabled}
hook-env
}}
fn deactivate {{
set hook-enabled = $false
eval ((external {exe}) deactivate | slurp)
}}
fn mise {{|@a|
if (== (count $a) 0) {{
(external {exe})
return
}}
if (not (or (has-value $a -h) (has-value $a --help))) {{
var command = $a[0]
if (==s $command shell) {{
try {{ eval ((external {exe}) $@a) }} catch {{ }}
return
}} elif (==s $command deactivate) {{
deactivate
return
}} elif (==s $command activate) {{
activate
return
}}
}}
(external {exe}) $@a
}}
"#, hook_enabled = !opts.no_hook_env});
out
}
fn deactivate(&self) -> String {
formatdoc! {r#"
unset-env MISE_SHELL
unset-env __MISE_DIFF
unset-env __MISE_SESSION
"#}
}
fn set_env(&self, k: &str, v: &str) -> String {
let k = shell_escape::unix::escape(k.into());
let v = shell_escape::unix::escape(v.into());
let v = v.replace("\\n", "\n");
format!("set-env {k} {v}\n")
}
fn prepend_env(&self, k: &str, v: &str) -> String {
let k = shell_escape::unix::escape(k.into());
let v = shell_escape::unix::escape(v.into());
format!("set-env {k} {v}(get-env {k})\n")
}
fn unset_env(&self, k: &str) -> String {
format!("unset-env {k}\n", k = shell_escape::unix::escape(k.into()))
}
}
impl Display for Elvish {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "elvish")
}
}
#[cfg(test)]
mod tests {
use insta::assert_snapshot;
use std::path::Path;
use test_log::test;
use crate::test::replace_path;
use super::*;
#[test]
fn test_hook_init() {
let elvish = Elvish::default();
let exe = Path::new("/some/dir/mise");
let opts = ActivateOptions {
exe: exe.to_path_buf(),
flags: " --status".into(),
no_hook_env: false,
prelude: vec![],
};
assert_snapshot!(elvish.activate(opts));
}
#[test]
fn test_set_env() {
assert_snapshot!(Elvish::default().set_env("FOO", "1"));
}
#[test]
fn test_prepend_env() {
let sh = Elvish::default();
assert_snapshot!(replace_path(&sh.prepend_env("PATH", "/some/dir:/2/dir")));
}
#[test]
fn test_unset_env() {
assert_snapshot!(Elvish::default().unset_env("FOO"));
}
#[test]
fn test_deactivate() {
let deactivate = Elvish::default().deactivate();
assert_snapshot!(replace_path(&deactivate));
}
}
| rust | MIT | 3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb | 2026-01-04T15:39:11.175160Z | false |
jdx/mise | https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/src/shell/nushell.rs | src/shell/nushell.rs | #![allow(unknown_lints)]
#![allow(clippy::literal_string_with_formatting_args)]
use std::fmt::Display;
use indoc::formatdoc;
use crate::shell::{self, ActivateOptions, ActivatePrelude, Shell};
use itertools::Itertools;
#[derive(Default)]
pub struct Nushell {}
enum EnvOp<'a> {
Set { key: &'a str, val: &'a str },
Hide { key: &'a str },
}
impl Display for EnvOp<'_> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
EnvOp::Set { key, val } => writeln!(f, "set,{key},{val}"),
EnvOp::Hide { key } => writeln!(f, "hide,{key},"),
}
}
}
impl Nushell {
fn escape_csv_value(s: &str) -> String {
if s.contains(['\r', '\n', '"', ',']) {
format!("\"{}\"", s.replace('"', "\"\""))
} else {
s.to_owned()
}
}
fn format_activate_prelude_inline(&self, prelude: &[ActivatePrelude]) -> String {
prelude
.iter()
.map(|p| match p {
ActivatePrelude::SetEnv(k, v) => format!("$env.{k} = r#'{v}'#\n"),
ActivatePrelude::PrependEnv(k, v) => self.prepend_env(k, v),
})
.join("")
}
fn build_deactivation_script(&self) -> String {
let deactivation_ops = shell::build_deactivation_script(self);
deactivation_ops.trim_end_matches('\n').to_owned()
}
}
impl Shell for Nushell {
fn activate(&self, opts: ActivateOptions) -> String {
let exe = opts.exe;
let flags = opts.flags;
let exe = exe.to_string_lossy().replace('\\', r#"\\"#);
let mut out = String::new();
out.push_str(&formatdoc! {r#"
def "parse vars" [] {{
$in | from csv --noheaders --no-infer | rename 'op' 'name' 'value'
}}
def --env "update-env" [] {{
for $var in $in {{
if $var.op == "set" {{
if ($var.name | str upcase) == 'PATH' {{
$env.PATH = ($var.value | split row (char esep))
}} else {{
load-env {{($var.name): $var.value}}
}}
}} else if $var.op == "hide" and $var.name in $env {{
hide-env $var.name
}}
}}
}}
"#});
let deactivation_ops_csv = self.build_deactivation_script();
let inline_prelude = self.format_activate_prelude_inline(&opts.prelude);
out.push_str(&formatdoc! {r#"
export-env {{
{inline_prelude}
'{deactivation_ops_csv}' | parse vars | update-env
$env.MISE_SHELL = "nu"
let mise_hook = {{
condition: {{ "MISE_SHELL" in $env }}
code: {{ mise_hook }}
}}
add-hook hooks.pre_prompt $mise_hook
add-hook hooks.env_change.PWD $mise_hook
}}
def --env add-hook [field: cell-path new_hook: any] {{
let field = $field | split cell-path | update optional true | into cell-path
let old_config = $env.config? | default {{}}
let old_hooks = $old_config | get $field | default []
$env.config = ($old_config | upsert $field ($old_hooks ++ [$new_hook]))
}}
export def --env --wrapped main [command?: string, --help, ...rest: string] {{
let commands = ["deactivate", "shell", "sh"]
if ($command == null) {{
^"{exe}"
}} else if ($command == "activate") {{
$env.MISE_SHELL = "nu"
}} else if ($command in $commands) {{
^"{exe}" $command ...$rest
| parse vars
| update-env
}} else {{
^"{exe}" $command ...$rest
}}
}}
def --env mise_hook [] {{
^"{exe}" hook-env{flags} -s nu
| parse vars
| update-env
}}
"#});
out
}
fn deactivate(&self) -> String {
[
self.unset_env("MISE_SHELL"),
self.unset_env("__MISE_DIFF"),
self.unset_env("__MISE_DIFF"),
]
.join("")
}
fn set_env(&self, k: &str, v: &str) -> String {
let k = Nushell::escape_csv_value(k);
let v = Nushell::escape_csv_value(v);
EnvOp::Set { key: &k, val: &v }.to_string()
}
fn prepend_env(&self, k: &str, v: &str) -> String {
format!("$env.{k} = ($env.{k} | prepend r#'{v}'#)\n")
}
fn unset_env(&self, k: &str) -> String {
let k = Nushell::escape_csv_value(k);
EnvOp::Hide { key: k.as_ref() }.to_string()
}
}
impl Display for Nushell {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "nu")
}
}
#[cfg(test)]
mod tests {
use insta::assert_snapshot;
use std::path::Path;
use test_log::test;
use crate::test::replace_path;
use super::*;
#[test]
fn test_hook_init() {
let nushell = Nushell::default();
let exe = Path::new("/some/dir/mise");
let opts = ActivateOptions {
exe: exe.to_path_buf(),
flags: " --status".into(),
no_hook_env: false,
prelude: vec![],
};
assert_snapshot!(nushell.activate(opts));
}
#[test]
fn test_set_env() {
assert_snapshot!(Nushell::default().set_env("FOO", "1"));
}
#[test]
fn test_prepend_env() {
let sh = Nushell::default();
assert_snapshot!(replace_path(&sh.prepend_env("PATH", "/some/dir:/2/dir")));
}
#[test]
fn test_unset_env() {
assert_snapshot!(Nushell::default().unset_env("FOO"));
}
#[test]
fn test_deactivate() {
let deactivate = Nushell::default().deactivate();
assert_snapshot!(replace_path(&deactivate));
}
}
| rust | MIT | 3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb | 2026-01-04T15:39:11.175160Z | false |
jdx/mise | https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/src/shell/bash.rs | src/shell/bash.rs | #![allow(unknown_lints)]
#![allow(clippy::literal_string_with_formatting_args)]
use std::fmt::Display;
use indoc::formatdoc;
use shell_escape::unix::escape;
use crate::config::Settings;
use crate::shell::{self, ActivateOptions, Shell};
#[derive(Default)]
pub struct Bash {}
impl Bash {}
impl Shell for Bash {
fn activate(&self, opts: ActivateOptions) -> String {
let exe = opts.exe;
let flags = opts.flags;
let settings = Settings::get();
let exe = escape(exe.to_string_lossy());
let mut out = String::new();
out.push_str(&shell::build_deactivation_script(self));
out.push_str(&self.format_activate_prelude(&opts.prelude));
out.push_str(&formatdoc! {r#"
export MISE_SHELL=bash
# On first activation, save the original PATH
# On re-activation, we keep the saved original
if [ -z "${{__MISE_ORIG_PATH:-}}" ]; then
export __MISE_ORIG_PATH="$PATH"
fi
mise() {{
local command
command="${{1:-}}"
if [ "$#" = 0 ]; then
command {exe}
return
fi
shift
case "$command" in
deactivate|shell|sh)
# if argv doesn't contains -h,--help
if [[ ! " $@ " =~ " --help " ]] && [[ ! " $@ " =~ " -h " ]]; then
eval "$(command {exe} "$command" "$@")"
return $?
fi
;;
esac
command {exe} "$command" "$@"
}}
_mise_hook() {{
local previous_exit_status=$?;
eval "$(mise hook-env{flags} -s bash)";
return $previous_exit_status;
}};
"#});
if !opts.no_hook_env {
out.push_str(&formatdoc! {r#"
if [[ ";${{PROMPT_COMMAND:-}};" != *";_mise_hook;"* ]]; then
PROMPT_COMMAND="_mise_hook${{PROMPT_COMMAND:+;$PROMPT_COMMAND}}"
fi
{chpwd_functions}
{chpwd_load}
chpwd_functions+=(_mise_hook)
_mise_hook
"#,
chpwd_functions = include_str!("../assets/bash_zsh_support/chpwd/function.sh"),
chpwd_load = include_str!("../assets/bash_zsh_support/chpwd/load.sh")
});
}
if settings.not_found_auto_install {
out.push_str(&formatdoc! {r#"
if [ -z "${{_mise_cmd_not_found:-}}" ]; then
_mise_cmd_not_found=1
if [ -n "$(declare -f command_not_found_handle)" ]; then
_mise_cmd_not_found_handle=$(declare -f command_not_found_handle)
eval "${{_mise_cmd_not_found_handle/command_not_found_handle/_command_not_found_handle}}"
fi
command_not_found_handle() {{
if [[ "$1" != "mise" && "$1" != "mise-"* ]] && {exe} hook-not-found -s bash -- "$1"; then
_mise_hook
"$@"
elif [ -n "$(declare -f _command_not_found_handle)" ]; then
_command_not_found_handle "$@"
else
echo "bash: command not found: $1" >&2
return 127
fi
}}
fi
"#});
}
out
}
fn deactivate(&self) -> String {
formatdoc! {r#"
if [[ ${{PROMPT_COMMAND-}} == *_mise_hook* ]]; then
PROMPT_COMMAND="${{PROMPT_COMMAND//_mise_hook;/}}"
PROMPT_COMMAND="${{PROMPT_COMMAND//_mise_hook/}}"
fi
unset -f _mise_hook
unset -f mise
unset MISE_SHELL
unset __MISE_DIFF
unset __MISE_SESSION
"#}
}
fn set_env(&self, k: &str, v: &str) -> String {
let k = shell_escape::unix::escape(k.into());
let v = shell_escape::unix::escape(v.into());
format!("export {k}={v}\n")
}
fn prepend_env(&self, k: &str, v: &str) -> String {
format!("export {k}=\"{v}:${k}\"\n")
}
fn unset_env(&self, k: &str) -> String {
format!("unset {k}\n", k = shell_escape::unix::escape(k.into()))
}
fn set_alias(&self, name: &str, cmd: &str) -> String {
let name = shell_escape::unix::escape(name.into());
let cmd = shell_escape::unix::escape(cmd.into());
format!("alias {name}={cmd}\n")
}
fn unset_alias(&self, name: &str) -> String {
let name = shell_escape::unix::escape(name.into());
format!("unalias {name} 2>/dev/null || true\n")
}
}
impl Display for Bash {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "bash")
}
}
#[cfg(test)]
mod tests {
use insta::assert_snapshot;
use std::path::Path;
use test_log::test;
use crate::test::replace_path;
use super::*;
#[test]
fn test_activate() {
unsafe {
std::env::remove_var("__MISE_ORIG_PATH");
std::env::remove_var("__MISE_DIFF");
}
let bash = Bash::default();
let exe = Path::new("/some/dir/mise");
let opts = ActivateOptions {
exe: exe.to_path_buf(),
flags: " --status".into(),
no_hook_env: false,
prelude: vec![],
};
assert_snapshot!(bash.activate(opts));
}
#[test]
fn test_set_env() {
assert_snapshot!(Bash::default().set_env("FOO", "1"));
}
#[test]
fn test_prepend_env() {
let bash = Bash::default();
assert_snapshot!(replace_path(&bash.prepend_env("PATH", "/some/dir:/2/dir")));
}
#[test]
fn test_unset_env() {
assert_snapshot!(Bash::default().unset_env("FOO"));
}
#[test]
fn test_deactivate() {
let deactivate = Bash::default().deactivate();
assert_snapshot!(replace_path(&deactivate));
}
}
| rust | MIT | 3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb | 2026-01-04T15:39:11.175160Z | false |
jdx/mise | https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/src/shell/xonsh.rs | src/shell/xonsh.rs | #![allow(unknown_lints)]
#![allow(clippy::literal_string_with_formatting_args)]
use std::borrow::Cow;
use std::fmt::Display;
use indoc::formatdoc;
use crate::shell::{self, ActivateOptions, Shell};
#[derive(Default)]
pub struct Xonsh {}
fn xonsh_escape_sq(input: &str) -> Cow<'_, str> {
for (i, ch) in input.char_indices() {
if xonsh_escape_char(ch).is_some() {
let mut escaped_string = String::with_capacity(input.len());
escaped_string.push_str(&input[..i]);
for ch in input[i..].chars() {
match xonsh_escape_char(ch) {
Some(escaped_char) => escaped_string.push_str(escaped_char),
None => escaped_string.push(ch),
};
}
return Cow::Owned(escaped_string);
}
}
Cow::Borrowed(input)
}
fn xonsh_escape_char(ch: char) -> Option<&'static str> {
match ch {
// escape ' \  (docs.python.org/3/reference/lexical_analysis.html#strings)
'\'' => Some("\\'"),
'\\' => Some("\\\\"),
'\n' => Some("\\n"),
_ => None,
}
}
impl Shell for Xonsh {
fn activate(&self, opts: ActivateOptions) -> String {
let exe = opts.exe;
let flags = opts.flags;
let exe = exe.display();
let mut out = String::new();
out.push_str(&shell::build_deactivation_script(self));
out.push_str(&self.format_activate_prelude(&opts.prelude));
// use xonsh API instead of $.xsh to allow use inside of .py configs, which start faster due to being compiled to .pyc
out.push_str(&formatdoc! {r#"
from xonsh.built_ins import XSH
def _mise(args):
if args and args[0] in ('deactivate', 'shell', 'sh'):
execx($(mise @(args)))
else:
mise @(args)
XSH.env['MISE_SHELL'] = 'xonsh'
XSH.aliases['mise'] = _mise
"#});
if !opts.no_hook_env {
out.push_str(&formatdoc! {r#"
import shlex
import subprocess
extra_args = shlex.split('{flags}')
def mise_hook(**kwargs): # Hook Events
script = subprocess.run(
['{exe}', 'hook-env', *extra_args, '-s', 'xonsh'],
env=XSH.env.detype(),
stdout=subprocess.PIPE,
).stdout.decode()
execx(script)
XSH.builtins.events.on_pre_prompt(mise_hook) # Activate hook: before showing the prompt
XSH.builtins.events.on_chdir(mise_hook) # Activate hook: when the working directory changes
"#});
}
out
}
fn deactivate(&self) -> String {
formatdoc! {r#"
import os
from xonsh.built_ins import XSH
hooks = {{
'on_pre_prompt' : ['mise_hook'],
'on_chdir': ['mise_hook'],
}}
for hook_type, hook_fns in hooks.items():
for hook_fn in hook_fns:
hndl = getattr(XSH.builtins.events, hook_type)
for fn in hndl:
if fn.__name__ == hook_fn:
hndl.remove(fn)
break
XSH.aliases.pop('mise', None)
XSH.env.pop('MISE_SHELL', None)
XSH.env.pop('__MISE_DIFF', None)
XSH.env.pop('__MISE_SESSION', None)
"#}
}
fn set_env(&self, k: &str, v: &str) -> String {
formatdoc!(
r#"
from xonsh.built_ins import XSH
XSH.env['{k}'] = '{v}'
"#,
k = shell_escape::unix::escape(k.into()), // todo: drop illegal chars, not escape?
v = xonsh_escape_sq(v)
)
}
fn prepend_env(&self, k: &str, v: &str) -> String {
formatdoc!(
r#"
from xonsh.built_ins import XSH
XSH.env['{k}'].add('{v}', front=True)
"#,
k = shell_escape::unix::escape(k.into()),
v = xonsh_escape_sq(v)
)
}
fn unset_env(&self, k: &str) -> String {
formatdoc!(
r#"
from xonsh.built_ins import XSH
XSH.env.pop('{k}',None)
"#,
k = shell_escape::unix::escape(k.into()) // todo: drop illegal chars, not escape?
)
}
}
impl Display for Xonsh {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "xonsh")
}
}
#[cfg(test)]
mod tests {
use insta::assert_snapshot;
use pretty_assertions::assert_eq;
use std::path::Path;
use crate::test::replace_path;
use super::*;
#[test]
fn test_hook_init() {
let xonsh = Xonsh::default();
let exe = Path::new("/some/dir/mise");
let opts = ActivateOptions {
exe: exe.to_path_buf(),
flags: " --status".into(),
no_hook_env: false,
prelude: vec![],
};
insta::assert_snapshot!(xonsh.activate(opts));
}
#[test]
fn test_set_env() {
insta::assert_snapshot!(Xonsh::default().set_env("FOO", "1"));
}
#[test]
fn test_prepend_env() {
let sh = Xonsh::default();
assert_snapshot!(replace_path(&sh.prepend_env("PATH", "/some/dir:/2/dir")));
}
#[test]
fn test_unset_env() {
insta::assert_snapshot!(Xonsh::default().unset_env("FOO"));
}
#[test]
fn test_xonsh_escape_sq() {
assert_eq!(xonsh_escape_sq("foo"), "foo");
assert_eq!(xonsh_escape_sq("foo'bar"), "foo\\'bar");
assert_eq!(xonsh_escape_sq("foo\\bar"), "foo\\\\bar");
assert_eq!(xonsh_escape_sq("foo\nbar"), "foo\\nbar");
}
#[test]
fn test_xonsh_deactivate() {
let deactivate = Xonsh::default().deactivate();
assert_snapshot!(replace_path(&deactivate));
}
}
| rust | MIT | 3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb | 2026-01-04T15:39:11.175160Z | false |
jdx/mise | https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/src/shell/zsh.rs | src/shell/zsh.rs | #![allow(unknown_lints)]
#![allow(clippy::literal_string_with_formatting_args)]
use std::fmt::Display;
use indoc::formatdoc;
use shell_escape::unix::escape;
use crate::config::Settings;
use crate::shell::bash::Bash;
use crate::shell::{self, ActivateOptions, Shell};
#[derive(Default)]
pub struct Zsh {}
impl Zsh {}
impl Shell for Zsh {
fn activate(&self, opts: ActivateOptions) -> String {
let exe = opts.exe;
let flags = opts.flags;
let exe = escape(exe.to_string_lossy());
let mut out = String::new();
out.push_str(&shell::build_deactivation_script(self));
out.push_str(&self.format_activate_prelude(&opts.prelude));
// much of this is from direnv
// https://github.com/direnv/direnv/blob/cb5222442cb9804b1574954999f6073cc636eff0/internal/cmd/shell_zsh.go#L10-L22
out.push_str(&formatdoc! {r#"
export MISE_SHELL=zsh
if [ -z "${{__MISE_ORIG_PATH:-}}" ]; then
export __MISE_ORIG_PATH="$PATH"
fi
export __MISE_ZSH_PRECMD_RUN=0
mise() {{
local command
command="${{1:-}}"
if [ "$#" = 0 ]; then
command {exe}
return
fi
shift
case "$command" in
deactivate|shell|sh)
# if argv doesn't contains -h,--help
if [[ ! " $@ " =~ " --help " ]] && [[ ! " $@ " =~ " -h " ]]; then
eval "$(command {exe} "$command" "$@")"
return $?
fi
;;
esac
command {exe} "$command" "$@"
}}
"#});
if !opts.no_hook_env {
out.push_str(&formatdoc! {r#"
_mise_hook() {{
eval "$({exe} hook-env{flags} -s zsh)";
}}
_mise_hook_precmd() {{
eval "$({exe} hook-env{flags} -s zsh --reason precmd)";
}}
_mise_hook_chpwd() {{
eval "$({exe} hook-env{flags} -s zsh --reason chpwd)";
}}
typeset -ag precmd_functions;
if [[ -z "${{precmd_functions[(r)_mise_hook_precmd]+1}}" ]]; then
precmd_functions=( _mise_hook_precmd ${{precmd_functions[@]}} )
fi
typeset -ag chpwd_functions;
if [[ -z "${{chpwd_functions[(r)_mise_hook_chpwd]+1}}" ]]; then
chpwd_functions=( _mise_hook_chpwd ${{chpwd_functions[@]}} )
fi
_mise_hook
"#});
}
if Settings::get().not_found_auto_install {
out.push_str(&formatdoc! {r#"
if [ -z "${{_mise_cmd_not_found:-}}" ]; then
_mise_cmd_not_found=1
[ -n "$(declare -f command_not_found_handler)" ] && eval "${{$(declare -f command_not_found_handler)/command_not_found_handler/_command_not_found_handler}}"
function command_not_found_handler() {{
if [[ "$1" != "mise" && "$1" != "mise-"* ]] && {exe} hook-not-found -s zsh -- "$1"; then
_mise_hook
"$@"
elif [ -n "$(declare -f _command_not_found_handler)" ]; then
_command_not_found_handler "$@"
else
echo "zsh: command not found: $1" >&2
return 127
fi
}}
fi
"#});
}
out
}
fn deactivate(&self) -> String {
formatdoc! {r#"
precmd_functions=( ${{precmd_functions:#_mise_hook_precmd}} )
chpwd_functions=( ${{chpwd_functions:#_mise_hook_chpwd}} )
(( $+functions[_mise_hook_precmd] )) && unset -f _mise_hook_precmd
(( $+functions[_mise_hook_chpwd] )) && unset -f _mise_hook_chpwd
(( $+functions[_mise_hook] )) && unset -f _mise_hook
(( $+functions[mise] )) && unset -f mise
unset MISE_SHELL
unset __MISE_DIFF
unset __MISE_SESSION
unset __MISE_ZSH_PRECMD_RUN
"#}
}
fn set_env(&self, k: &str, v: &str) -> String {
Bash::default().set_env(k, v)
}
fn prepend_env(&self, k: &str, v: &str) -> String {
format!("export {k}=\"{v}:${k}\"\n")
}
fn unset_env(&self, k: &str) -> String {
Bash::default().unset_env(k)
}
fn set_alias(&self, name: &str, cmd: &str) -> String {
Bash::default().set_alias(name, cmd)
}
fn unset_alias(&self, name: &str) -> String {
Bash::default().unset_alias(name)
}
}
impl Display for Zsh {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "zsh")
}
}
#[cfg(test)]
mod tests {
use insta::assert_snapshot;
use std::path::Path;
use test_log::test;
use crate::test::replace_path;
use super::*;
#[test]
fn test_activate() {
// Unset __MISE_ORIG_PATH to avoid PATH restoration logic in output
unsafe {
std::env::remove_var("__MISE_ORIG_PATH");
std::env::remove_var("__MISE_DIFF");
}
let zsh = Zsh::default();
let exe = Path::new("/some/dir/mise");
let opts = ActivateOptions {
exe: exe.to_path_buf(),
flags: " --status".into(),
no_hook_env: false,
prelude: vec![],
};
assert_snapshot!(zsh.activate(opts));
}
#[test]
fn test_set_env() {
assert_snapshot!(Zsh::default().set_env("FOO", "1"));
}
#[test]
fn test_prepend_env() {
let sh = Bash::default();
assert_snapshot!(replace_path(&sh.prepend_env("PATH", "/some/dir:/2/dir")));
}
#[test]
fn test_unset_env() {
assert_snapshot!(Zsh::default().unset_env("FOO"));
}
#[test]
fn test_deactivate() {
let deactivate = Zsh::default().deactivate();
assert_snapshot!(replace_path(&deactivate));
}
}
| rust | MIT | 3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb | 2026-01-04T15:39:11.175160Z | false |
jdx/mise | https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/src/shell/mod.rs | src/shell/mod.rs | use crate::env;
use crate::hook_env;
use itertools::Itertools;
use std::fmt::{Display, Formatter};
use std::path::PathBuf;
use std::str::FromStr;
mod bash;
mod elvish;
mod fish;
mod nushell;
mod pwsh;
mod xonsh;
mod zsh;
#[derive(Debug, Clone, Copy, PartialEq, Eq, clap::ValueEnum)]
pub enum ShellType {
Bash,
Elvish,
Fish,
Nu,
Xonsh,
Zsh,
Pwsh,
}
impl ShellType {
pub fn as_shell(&self) -> Box<dyn Shell> {
match self {
Self::Bash => Box::<bash::Bash>::default(),
Self::Elvish => Box::<elvish::Elvish>::default(),
Self::Fish => Box::<fish::Fish>::default(),
Self::Nu => Box::<nushell::Nushell>::default(),
Self::Xonsh => Box::<xonsh::Xonsh>::default(),
Self::Zsh => Box::<zsh::Zsh>::default(),
Self::Pwsh => Box::<pwsh::Pwsh>::default(),
}
}
}
impl Display for ShellType {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
match self {
Self::Bash => write!(f, "bash"),
Self::Elvish => write!(f, "elvish"),
Self::Fish => write!(f, "fish"),
Self::Nu => write!(f, "nu"),
Self::Xonsh => write!(f, "xonsh"),
Self::Zsh => write!(f, "zsh"),
Self::Pwsh => write!(f, "pwsh"),
}
}
}
impl FromStr for ShellType {
type Err = String;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let s = s.to_lowercase();
let s = s.rsplit_once('/').map(|(_, s)| s).unwrap_or(&s);
match s {
"bash" | "sh" => Ok(Self::Bash),
"elvish" => Ok(Self::Elvish),
"fish" => Ok(Self::Fish),
"nu" => Ok(Self::Nu),
"xonsh" => Ok(Self::Xonsh),
"zsh" => Ok(Self::Zsh),
"pwsh" => Ok(Self::Pwsh),
_ => Err(format!("unsupported shell type: {s}")),
}
}
}
pub trait Shell: Display {
fn activate(&self, opts: ActivateOptions) -> String;
fn deactivate(&self) -> String;
fn set_env(&self, k: &str, v: &str) -> String;
fn prepend_env(&self, k: &str, v: &str) -> String;
fn unset_env(&self, k: &str) -> String;
/// Set a shell alias. Returns empty string if not supported by this shell.
fn set_alias(&self, name: &str, cmd: &str) -> String {
// Default implementation returns empty string (unsupported)
let _ = (name, cmd);
String::new()
}
/// Unset a shell alias. Returns empty string if not supported by this shell.
fn unset_alias(&self, name: &str) -> String {
// Default implementation returns empty string (unsupported)
let _ = name;
String::new()
}
fn format_activate_prelude(&self, prelude: &[ActivatePrelude]) -> String {
prelude
.iter()
.map(|p| match p {
ActivatePrelude::SetEnv(k, v) => self.set_env(k, v),
ActivatePrelude::PrependEnv(k, v) => self.prepend_env(k, v),
})
.join("")
}
}
pub enum ActivatePrelude {
SetEnv(String, String),
PrependEnv(String, String),
}
pub struct ActivateOptions {
pub exe: PathBuf,
pub flags: String,
pub no_hook_env: bool,
pub prelude: Vec<ActivatePrelude>,
}
pub fn build_deactivation_script(shell: &dyn Shell) -> String {
if !env::is_activated() {
return String::new();
}
let mut out = hook_env::clear_old_env(shell);
out.push_str(&hook_env::clear_aliases(shell));
out.push_str(&shell.deactivate());
out
}
pub fn get_shell(shell: Option<ShellType>) -> Option<Box<dyn Shell>> {
shell.or(*env::MISE_SHELL).map(|st| st.as_shell())
}
| rust | MIT | 3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb | 2026-01-04T15:39:11.175160Z | false |
jdx/mise | https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/src/shell/fish.rs | src/shell/fish.rs | #![allow(unknown_lints)]
#![allow(clippy::literal_string_with_formatting_args)]
use std::fmt::{Display, Formatter};
use crate::config::Settings;
use crate::env::{self};
use crate::shell::{self, ActivateOptions, Shell};
use indoc::formatdoc;
use itertools::Itertools;
use shell_escape::unix::escape;
#[derive(Default)]
pub struct Fish {}
impl Fish {}
impl Shell for Fish {
fn activate(&self, opts: ActivateOptions) -> String {
let exe = opts.exe;
let flags = opts.flags;
let exe = escape(exe.to_string_lossy());
let description = "'Update mise environment when changing directories'";
let mut out = String::new();
out.push_str(&shell::build_deactivation_script(self));
out.push_str(&self.format_activate_prelude(&opts.prelude));
// much of this is from direnv
// https://github.com/direnv/direnv/blob/cb5222442cb9804b1574954999f6073cc636eff0/internal/cmd/shell_fish.go#L14-L36
out.push_str(&formatdoc! {r#"
set -gx MISE_SHELL fish
if not set -q __MISE_ORIG_PATH
set -gx __MISE_ORIG_PATH $PATH
end
function mise
if test (count $argv) -eq 0
command {exe}
return
end
set command $argv[1]
set -e argv[1]
if contains -- --help $argv
command {exe} "$command" $argv
return $status
end
switch "$command"
case deactivate shell sh
# if help is requested, don't eval
if contains -- -h $argv
command {exe} "$command" $argv
else if contains -- --help $argv
command {exe} "$command" $argv
else
source (command {exe} "$command" $argv |psub)
end
case '*'
command {exe} "$command" $argv
end
end
"#});
if !opts.no_hook_env {
out.push_str(&formatdoc! {r#"
function __mise_env_eval --on-event fish_prompt --description {description};
{exe} hook-env{flags} -s fish | source;
if test "$mise_fish_mode" != "disable_arrow";
function __mise_cd_hook --on-variable PWD --description {description};
if test "$mise_fish_mode" = "eval_after_arrow";
set -g __mise_env_again 0;
else;
{exe} hook-env{flags} -s fish | source;
end;
end;
end;
end;
function __mise_env_eval_2 --on-event fish_preexec --description {description};
if set -q __mise_env_again;
set -e __mise_env_again;
{exe} hook-env{flags} -s fish | source;
echo;
end;
functions --erase __mise_cd_hook;
end;
__mise_env_eval
"#});
}
if Settings::get().not_found_auto_install {
out.push_str(&formatdoc! {r#"
if functions -q fish_command_not_found; and not functions -q __mise_fish_command_not_found
functions -e __mise_fish_command_not_found
functions -c fish_command_not_found __mise_fish_command_not_found
end
function fish_command_not_found
if string match -qrv -- '^(?:mise$|mise-)' $argv[1] &&
{exe} hook-not-found -s fish -- $argv[1]
{exe} hook-env{flags} -s fish | source
else if functions -q __mise_fish_command_not_found
__mise_fish_command_not_found $argv
else
__fish_default_command_not_found_handler $argv
end
end
"#});
}
out
}
fn deactivate(&self) -> String {
formatdoc! {r#"
functions --erase __mise_env_eval
functions --erase __mise_env_eval_2
functions --erase __mise_cd_hook
functions --erase mise
set -e MISE_SHELL
set -e __MISE_DIFF
set -e __MISE_SESSION
"#}
}
fn set_env(&self, key: &str, v: &str) -> String {
let k = escape(key.into());
// Fish uses space-separated list for PATH, not colon-separated string
if key == "PATH" {
let paths = v.split(':').map(|p| escape(p.into())).join(" ");
format!("set -gx PATH {paths}\n")
} else {
let v = escape(v.into());
format!("set -gx {k} {v}\n")
}
}
fn prepend_env(&self, key: &str, value: &str) -> String {
let k = escape(key.into());
match key {
env_key if env_key == *env::PATH_KEY => env::split_paths(value)
.filter_map(|path| {
let path_str = path.to_str()?;
if path_str.is_empty() {
None
} else {
Some(format!(
"fish_add_path --global --path {}\n",
escape(path_str.into())
))
}
})
.collect::<String>(),
_ => {
let v = escape(value.into());
format!("set -gx {k} {v} ${k}\n")
}
}
}
fn unset_env(&self, k: &str) -> String {
format!("set -e {k}\n", k = escape(k.into()))
}
fn set_alias(&self, name: &str, cmd: &str) -> String {
let name = escape(name.into());
let cmd = escape(cmd.into());
format!("alias {name} {cmd}\n")
}
fn unset_alias(&self, name: &str) -> String {
let name = escape(name.into());
format!("functions -e {name}\n")
}
}
impl Display for Fish {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
write!(f, "fish")
}
}
#[cfg(all(test, not(windows)))]
mod tests {
use insta::assert_snapshot;
use std::path::Path;
use test_log::test;
use crate::test::replace_path;
use super::*;
#[test]
fn test_activate() {
// Unset __MISE_ORIG_PATH to avoid PATH restoration logic in output
unsafe {
std::env::remove_var("__MISE_ORIG_PATH");
std::env::remove_var("__MISE_DIFF");
}
let fish = Fish::default();
let exe = Path::new("/some/dir/mise");
let opts = ActivateOptions {
exe: exe.to_path_buf(),
flags: " --status".into(),
no_hook_env: false,
prelude: vec![],
};
assert_snapshot!(fish.activate(opts));
}
#[test]
fn test_set_env() {
assert_snapshot!(Fish::default().set_env("FOO", "1"));
}
#[test]
fn test_prepend_env() {
let sh = Fish::default();
assert_snapshot!(replace_path(&sh.prepend_env("PATH", "/some/dir:/2/dir")));
}
#[test]
fn test_unset_env() {
assert_snapshot!(Fish::default().unset_env("FOO"));
}
#[test]
fn test_deactivate() {
let deactivate = Fish::default().deactivate();
assert_snapshot!(replace_path(&deactivate));
}
}
| rust | MIT | 3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb | 2026-01-04T15:39:11.175160Z | false |
jdx/mise | https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/src/prepare/engine.rs | src/prepare/engine.rs | use std::collections::BTreeMap;
use std::path::{Path, PathBuf};
use std::sync::Arc;
use std::time::SystemTime;
use eyre::Result;
use crate::cmd::CmdLineRunner;
use crate::config::{Config, Settings};
use crate::parallel;
use crate::ui::multi_progress_report::MultiProgressReport;
use super::PrepareProvider;
use super::providers::{
BunPrepareProvider, BundlerPrepareProvider, ComposerPrepareProvider, CustomPrepareProvider,
GoPrepareProvider, NpmPrepareProvider, PipPrepareProvider, PnpmPrepareProvider,
PoetryPrepareProvider, UvPrepareProvider, YarnPrepareProvider,
};
use super::rule::{BUILTIN_PROVIDERS, PrepareConfig};
/// Options for running prepare steps
#[derive(Debug, Default)]
pub struct PrepareOptions {
/// Only check if prepare is needed, don't run commands
pub dry_run: bool,
/// Force run all prepare steps even if outputs are fresh
pub force: bool,
/// Run specific prepare rule(s) only
pub only: Option<Vec<String>>,
/// Skip specific prepare rule(s)
pub skip: Vec<String>,
/// Environment variables to pass to prepare commands (e.g., toolset PATH)
pub env: BTreeMap<String, String>,
/// If true, only run providers with auto=true
pub auto_only: bool,
}
/// Result of a prepare step
#[derive(Debug)]
pub enum PrepareStepResult {
/// Step ran successfully
Ran(String),
/// Step would have run (dry-run mode)
WouldRun(String),
/// Step was skipped because outputs are fresh
Fresh(String),
/// Step was skipped by user request
Skipped(String),
}
/// Result of running all prepare steps
#[derive(Debug)]
pub struct PrepareResult {
pub steps: Vec<PrepareStepResult>,
}
impl PrepareResult {
/// Returns true if any steps ran or would have run
pub fn had_work(&self) -> bool {
self.steps.iter().any(|s| {
matches!(
s,
PrepareStepResult::Ran(_) | PrepareStepResult::WouldRun(_)
)
})
}
}
/// Engine that discovers and runs prepare providers
pub struct PrepareEngine {
config: Arc<Config>,
providers: Vec<Box<dyn PrepareProvider>>,
}
impl PrepareEngine {
/// Create a new PrepareEngine, discovering all applicable providers
pub fn new(config: Arc<Config>) -> Result<Self> {
let providers = Self::discover_providers(&config)?;
// Only require experimental when prepare is actually configured
if !providers.is_empty() {
Settings::get().ensure_experimental("prepare")?;
}
Ok(Self { config, providers })
}
/// Discover all applicable prepare providers for the current project
fn discover_providers(config: &Config) -> Result<Vec<Box<dyn PrepareProvider>>> {
let project_root = config
.project_root
.clone()
.unwrap_or_else(|| std::env::current_dir().unwrap_or_default());
let mut providers: Vec<Box<dyn PrepareProvider>> = vec![];
// Load prepare config from mise.toml
let prepare_config = config
.config_files
.values()
.filter_map(|cf| cf.prepare_config())
.fold(PrepareConfig::default(), |acc, pc| acc.merge(&pc));
// Iterate over all configured providers
for (id, provider_config) in &prepare_config.providers {
let provider: Box<dyn PrepareProvider> = if BUILTIN_PROVIDERS.contains(&id.as_str()) {
// Built-in provider with specialized implementation
match id.as_str() {
// Node.js package managers
"npm" => Box::new(NpmPrepareProvider::new(
&project_root,
provider_config.clone(),
)),
"yarn" => Box::new(YarnPrepareProvider::new(
&project_root,
provider_config.clone(),
)),
"pnpm" => Box::new(PnpmPrepareProvider::new(
&project_root,
provider_config.clone(),
)),
"bun" => Box::new(BunPrepareProvider::new(
&project_root,
provider_config.clone(),
)),
// Go
"go" => Box::new(GoPrepareProvider::new(
&project_root,
provider_config.clone(),
)),
// Python
"pip" => Box::new(PipPrepareProvider::new(
&project_root,
provider_config.clone(),
)),
"poetry" => Box::new(PoetryPrepareProvider::new(
&project_root,
provider_config.clone(),
)),
"uv" => Box::new(UvPrepareProvider::new(
&project_root,
provider_config.clone(),
)),
// Ruby
"bundler" => Box::new(BundlerPrepareProvider::new(
&project_root,
provider_config.clone(),
)),
// PHP
"composer" => Box::new(ComposerPrepareProvider::new(
&project_root,
provider_config.clone(),
)),
_ => continue, // Skip unimplemented built-ins
}
} else {
// Custom provider
Box::new(CustomPrepareProvider::new(
id.clone(),
provider_config.clone(),
project_root.clone(),
))
};
if provider.is_applicable() {
providers.push(provider);
}
}
// Filter disabled providers
providers.retain(|p| !prepare_config.disable.contains(&p.id().to_string()));
Ok(providers)
}
/// List all discovered providers
pub fn list_providers(&self) -> Vec<&dyn PrepareProvider> {
self.providers.iter().map(|p| p.as_ref()).collect()
}
/// Check if any auto-enabled provider has stale outputs (without running)
/// Returns the IDs of stale providers
pub fn check_staleness(&self) -> Vec<&str> {
self.providers
.iter()
.filter(|p| p.is_auto())
.filter(|p| !self.check_freshness(p.as_ref()).unwrap_or(true))
.map(|p| p.id())
.collect()
}
/// Run all stale prepare steps in parallel
pub async fn run(&self, opts: PrepareOptions) -> Result<PrepareResult> {
let mut results = vec![];
// Collect providers that need to run with their commands
let mut to_run: Vec<(String, super::PrepareCommand)> = vec![];
for provider in &self.providers {
let id = provider.id().to_string();
// Check auto_only filter
if opts.auto_only && !provider.is_auto() {
trace!("prepare step {} is not auto, skipping", id);
results.push(PrepareStepResult::Skipped(id));
continue;
}
// Check skip list
if opts.skip.contains(&id) {
results.push(PrepareStepResult::Skipped(id));
continue;
}
// Check only list
if let Some(ref only) = opts.only
&& !only.contains(&id)
{
results.push(PrepareStepResult::Skipped(id));
continue;
}
let is_fresh = if opts.force {
false
} else {
self.check_freshness(provider.as_ref())?
};
if !is_fresh {
let cmd = provider.prepare_command()?;
if opts.dry_run {
// Just record that it would run, let CLI handle output
results.push(PrepareStepResult::WouldRun(id));
} else {
to_run.push((id, cmd));
}
} else {
trace!("prepare step {} is fresh, skipping", id);
results.push(PrepareStepResult::Fresh(id));
}
}
// Run stale providers in parallel
if !to_run.is_empty() {
let mpr = MultiProgressReport::get();
let project_root = self
.config
.project_root
.clone()
.unwrap_or_else(|| std::env::current_dir().unwrap_or_default());
let toolset_env = opts.env.clone();
// Include all data in the tuple so closure doesn't capture anything
let to_run_with_context: Vec<_> = to_run
.into_iter()
.map(|(id, cmd)| {
(
id,
cmd,
mpr.clone(),
project_root.clone(),
toolset_env.clone(),
)
})
.collect();
let run_results = parallel::parallel(
to_run_with_context,
|(id, cmd, mpr, project_root, toolset_env)| async move {
let pr = mpr.add(&cmd.description);
match Self::execute_prepare_static(&cmd, &toolset_env, &project_root) {
Ok(()) => {
pr.finish_with_message(format!("{} done", cmd.description));
Ok(PrepareStepResult::Ran(id))
}
Err(e) => {
pr.finish_with_message(format!("{} failed: {}", cmd.description, e));
Err(e)
}
}
},
)
.await?;
results.extend(run_results);
}
Ok(PrepareResult { steps: results })
}
/// Check if outputs are newer than sources (stateless mtime comparison)
fn check_freshness(&self, provider: &dyn PrepareProvider) -> Result<bool> {
let sources = provider.sources();
let outputs = provider.outputs();
if outputs.is_empty() {
return Ok(false); // No outputs defined, always run to be safe
}
// Note: empty sources is handled below - last_modified([]) returns None,
// and if outputs don't exist either, (_, None) takes precedence → stale
let sources_mtime = Self::last_modified(&sources)?;
let outputs_mtime = Self::last_modified(&outputs)?;
match (sources_mtime, outputs_mtime) {
(Some(src), Some(out)) => Ok(src <= out), // Fresh if outputs newer or equal to sources
(_, None) => Ok(false), // No outputs exist, not fresh (takes precedence)
(None, _) => Ok(true), // No sources exist, consider fresh
}
}
/// Get the most recent modification time from a list of paths
/// For directories, recursively finds the newest file within (up to 3 levels deep)
fn last_modified(paths: &[PathBuf]) -> Result<Option<SystemTime>> {
let mut mtimes: Vec<SystemTime> = vec![];
for path in paths.iter().filter(|p| p.exists()) {
if path.is_dir() {
// For directories, find the newest file within (limited depth for performance)
if let Some(mtime) = Self::newest_file_in_dir(path, 3) {
mtimes.push(mtime);
}
} else if let Some(mtime) = path.metadata().ok().and_then(|m| m.modified().ok()) {
mtimes.push(mtime);
}
}
Ok(mtimes.into_iter().max())
}
/// Recursively find the newest file modification time in a directory
fn newest_file_in_dir(dir: &Path, max_depth: usize) -> Option<SystemTime> {
if max_depth == 0 {
return dir.metadata().ok().and_then(|m| m.modified().ok());
}
let mut newest: Option<SystemTime> = None;
if let Ok(entries) = std::fs::read_dir(dir) {
for entry in entries.flatten() {
let path = entry.path();
let mtime = if path.is_dir() {
Self::newest_file_in_dir(&path, max_depth - 1)
} else {
path.metadata().ok().and_then(|m| m.modified().ok())
};
if let Some(t) = mtime {
newest = Some(newest.map_or(t, |n| n.max(t)));
}
}
}
newest
}
/// Execute a prepare command (static version for parallel execution)
fn execute_prepare_static(
cmd: &super::PrepareCommand,
toolset_env: &BTreeMap<String, String>,
default_project_root: &Path,
) -> Result<()> {
let cwd = cmd
.cwd
.clone()
.unwrap_or_else(|| default_project_root.to_path_buf());
let mut runner = CmdLineRunner::new(&cmd.program)
.args(&cmd.args)
.current_dir(cwd);
// Apply toolset environment (includes PATH with installed tools)
for (k, v) in toolset_env {
runner = runner.env(k, v);
}
// Apply command-specific environment (can override toolset env)
for (k, v) in &cmd.env {
runner = runner.env(k, v);
}
// Use raw output for better UX during dependency installation
if Settings::get().raw {
runner = runner.raw(true);
}
runner.execute()?;
Ok(())
}
}
| rust | MIT | 3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb | 2026-01-04T15:39:11.175160Z | false |
jdx/mise | https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/src/prepare/mod.rs | src/prepare/mod.rs | use std::collections::BTreeMap;
use std::fmt::Debug;
use std::path::{Path, PathBuf};
use std::sync::Arc;
use eyre::{Result, bail};
use crate::config::{Config, Settings};
use crate::env;
pub use engine::{PrepareEngine, PrepareOptions, PrepareStepResult};
pub use rule::PrepareConfig;
mod engine;
pub mod providers;
mod rule;
/// A command to execute for preparation
#[derive(Debug, Clone)]
pub struct PrepareCommand {
/// The program to execute
pub program: String,
/// Arguments to pass to the program
pub args: Vec<String>,
/// Environment variables to set
pub env: BTreeMap<String, String>,
/// Working directory (defaults to project root)
pub cwd: Option<PathBuf>,
/// Human-readable description of what this command does
pub description: String,
}
impl PrepareCommand {
/// Create a PrepareCommand from a run string like "npm install"
///
/// Uses shell-aware parsing to handle quoted arguments correctly.
pub fn from_string(
run: &str,
project_root: &Path,
config: &rule::PrepareProviderConfig,
) -> Result<Self> {
let parts = shell_words::split(run).map_err(|e| eyre::eyre!("invalid command: {e}"))?;
if parts.is_empty() {
bail!("prepare run command cannot be empty");
}
let (program, args) = parts.split_first().unwrap();
Ok(Self {
program: program.to_string(),
args: args.to_vec(),
env: config.env.clone(),
cwd: config
.dir
.as_ref()
.map(|d| project_root.join(d))
.or_else(|| Some(project_root.to_path_buf())),
description: config
.description
.clone()
.unwrap_or_else(|| run.to_string()),
})
}
}
/// Trait for prepare providers that can check and install dependencies
pub trait PrepareProvider: Debug + Send + Sync {
/// Unique identifier for this provider (e.g., "npm", "cargo", "codegen")
fn id(&self) -> &str;
/// Returns the source files to check for freshness (lock files, config files)
fn sources(&self) -> Vec<PathBuf>;
/// Returns the output files/directories that should be newer than sources
fn outputs(&self) -> Vec<PathBuf>;
/// The command to run when outputs are stale relative to sources
fn prepare_command(&self) -> Result<PrepareCommand>;
/// Whether this provider is applicable (e.g., lockfile exists)
fn is_applicable(&self) -> bool;
/// Whether this provider should auto-run before mise x/run
fn is_auto(&self) -> bool {
false
}
}
/// Warn if any auto-enabled prepare providers are stale
pub fn notify_if_stale(config: &Arc<Config>) {
// Skip in shims or quiet mode
if *env::__MISE_SHIM || Settings::get().quiet {
return;
}
// Check if this feature is enabled
if !Settings::get().status.show_prepare_stale {
return;
}
let Ok(engine) = PrepareEngine::new(config.clone()) else {
return;
};
let stale = engine.check_staleness();
if !stale.is_empty() {
let providers = stale.join(", ");
warn!("prepare: {providers} may need update, run `mise prep`");
}
}
| rust | MIT | 3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb | 2026-01-04T15:39:11.175160Z | false |
jdx/mise | https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/src/prepare/rule.rs | src/prepare/rule.rs | use std::collections::BTreeMap;
use serde::{Deserialize, Serialize};
/// List of built-in provider names that have specialized implementations
pub const BUILTIN_PROVIDERS: &[&str] = &[
"npm", "yarn", "pnpm", "bun", // Node.js
"go", // Go
"pip", // Python (requirements.txt)
"poetry", // Python (poetry)
"uv", // Python (uv)
"bundler", // Ruby
"composer", // PHP
];
/// Configuration for a prepare provider (both built-in and custom)
///
/// Built-in providers have auto-detected sources/outputs and default run commands.
/// Custom providers require explicit sources, outputs, and run.
#[derive(Debug, Clone, Default, Deserialize, Serialize)]
#[serde(deny_unknown_fields)]
pub struct PrepareProviderConfig {
/// Whether to auto-run this provider before mise x/run (default: false)
#[serde(default)]
pub auto: bool,
/// Command to run when stale (required for custom, optional override for built-in)
pub run: Option<String>,
/// Files/patterns to check for changes (required for custom, auto-detected for built-in)
#[serde(default)]
pub sources: Vec<String>,
/// Files/directories that should be newer than sources (required for custom, auto-detected for built-in)
#[serde(default)]
pub outputs: Vec<String>,
/// Environment variables to set
#[serde(default)]
pub env: BTreeMap<String, String>,
/// Working directory
pub dir: Option<String>,
/// Optional description
pub description: Option<String>,
}
impl PrepareProviderConfig {
/// Check if this is a custom rule (has explicit run command and is not a built-in name)
pub fn is_custom(&self, name: &str) -> bool {
!BUILTIN_PROVIDERS.contains(&name) && self.run.is_some()
}
}
/// Top-level [prepare] configuration section
///
/// All providers are configured at the same level:
/// - `[prepare.npm]` - built-in npm provider
/// - `[prepare.codegen]` - custom provider
#[derive(Debug, Clone, Default, Deserialize, Serialize)]
pub struct PrepareConfig {
/// List of provider IDs to disable at runtime
#[serde(default)]
pub disable: Vec<String>,
/// All provider configurations (both built-in and custom)
#[serde(flatten)]
pub providers: BTreeMap<String, PrepareProviderConfig>,
}
impl PrepareConfig {
/// Merge two PrepareConfigs, with `other` taking precedence
pub fn merge(&self, other: &PrepareConfig) -> PrepareConfig {
let mut providers = self.providers.clone();
for (k, v) in &other.providers {
providers.insert(k.clone(), v.clone());
}
let mut disable = self.disable.clone();
disable.extend(other.disable.clone());
PrepareConfig { disable, providers }
}
/// Get a provider config by name
pub fn get(&self, name: &str) -> Option<&PrepareProviderConfig> {
self.providers.get(name)
}
}
| rust | MIT | 3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb | 2026-01-04T15:39:11.175160Z | false |
jdx/mise | https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/src/prepare/providers/yarn.rs | src/prepare/providers/yarn.rs | use std::path::{Path, PathBuf};
use eyre::Result;
use crate::prepare::rule::PrepareProviderConfig;
use crate::prepare::{PrepareCommand, PrepareProvider};
/// Prepare provider for yarn (yarn.lock)
#[derive(Debug)]
pub struct YarnPrepareProvider {
project_root: PathBuf,
config: PrepareProviderConfig,
}
impl YarnPrepareProvider {
pub fn new(project_root: &Path, config: PrepareProviderConfig) -> Self {
Self {
project_root: project_root.to_path_buf(),
config,
}
}
}
impl PrepareProvider for YarnPrepareProvider {
fn id(&self) -> &str {
"yarn"
}
fn sources(&self) -> Vec<PathBuf> {
vec![
self.project_root.join("yarn.lock"),
self.project_root.join("package.json"),
]
}
fn outputs(&self) -> Vec<PathBuf> {
vec![self.project_root.join("node_modules")]
}
fn prepare_command(&self) -> Result<PrepareCommand> {
if let Some(run) = &self.config.run {
return PrepareCommand::from_string(run, &self.project_root, &self.config);
}
Ok(PrepareCommand {
program: "yarn".to_string(),
args: vec!["install".to_string()],
env: self.config.env.clone(),
cwd: Some(self.project_root.clone()),
description: self
.config
.description
.clone()
.unwrap_or_else(|| "yarn install".to_string()),
})
}
fn is_applicable(&self) -> bool {
self.project_root.join("yarn.lock").exists()
}
fn is_auto(&self) -> bool {
self.config.auto
}
}
| rust | MIT | 3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb | 2026-01-04T15:39:11.175160Z | false |
jdx/mise | https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/src/prepare/providers/go.rs | src/prepare/providers/go.rs | use std::path::{Path, PathBuf};
use eyre::Result;
use crate::prepare::rule::PrepareProviderConfig;
use crate::prepare::{PrepareCommand, PrepareProvider};
/// Prepare provider for Go (go.sum)
#[derive(Debug)]
pub struct GoPrepareProvider {
project_root: PathBuf,
config: PrepareProviderConfig,
}
impl GoPrepareProvider {
pub fn new(project_root: &Path, config: PrepareProviderConfig) -> Self {
Self {
project_root: project_root.to_path_buf(),
config,
}
}
}
impl PrepareProvider for GoPrepareProvider {
fn id(&self) -> &str {
"go"
}
fn sources(&self) -> Vec<PathBuf> {
// go.mod defines dependencies - changes here trigger downloads
vec![self.project_root.join("go.mod")]
}
fn outputs(&self) -> Vec<PathBuf> {
// Go downloads modules to GOPATH/pkg/mod, but we can check vendor/ if used
let vendor = self.project_root.join("vendor");
if vendor.exists() {
vec![vendor]
} else {
// go.sum gets updated after go mod download completes
vec![self.project_root.join("go.sum")]
}
}
fn prepare_command(&self) -> Result<PrepareCommand> {
if let Some(run) = &self.config.run {
return PrepareCommand::from_string(run, &self.project_root, &self.config);
}
// Use `go mod vendor` if vendor/ exists, otherwise `go mod download`
let vendor = self.project_root.join("vendor");
let (args, desc) = if vendor.exists() {
(
vec!["mod".to_string(), "vendor".to_string()],
"go mod vendor",
)
} else {
(
vec!["mod".to_string(), "download".to_string()],
"go mod download",
)
};
Ok(PrepareCommand {
program: "go".to_string(),
args,
env: self.config.env.clone(),
cwd: Some(self.project_root.clone()),
description: self
.config
.description
.clone()
.unwrap_or_else(|| desc.to_string()),
})
}
fn is_applicable(&self) -> bool {
// Check for go.mod (the source/lockfile), not go.sum (which may be an output)
self.project_root.join("go.mod").exists()
}
fn is_auto(&self) -> bool {
self.config.auto
}
}
| rust | MIT | 3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb | 2026-01-04T15:39:11.175160Z | false |
jdx/mise | https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/src/prepare/providers/pnpm.rs | src/prepare/providers/pnpm.rs | use std::path::{Path, PathBuf};
use eyre::Result;
use crate::prepare::rule::PrepareProviderConfig;
use crate::prepare::{PrepareCommand, PrepareProvider};
/// Prepare provider for pnpm (pnpm-lock.yaml)
#[derive(Debug)]
pub struct PnpmPrepareProvider {
project_root: PathBuf,
config: PrepareProviderConfig,
}
impl PnpmPrepareProvider {
pub fn new(project_root: &Path, config: PrepareProviderConfig) -> Self {
Self {
project_root: project_root.to_path_buf(),
config,
}
}
}
impl PrepareProvider for PnpmPrepareProvider {
fn id(&self) -> &str {
"pnpm"
}
fn sources(&self) -> Vec<PathBuf> {
vec![
self.project_root.join("pnpm-lock.yaml"),
self.project_root.join("package.json"),
]
}
fn outputs(&self) -> Vec<PathBuf> {
vec![self.project_root.join("node_modules")]
}
fn prepare_command(&self) -> Result<PrepareCommand> {
if let Some(run) = &self.config.run {
return PrepareCommand::from_string(run, &self.project_root, &self.config);
}
Ok(PrepareCommand {
program: "pnpm".to_string(),
args: vec!["install".to_string()],
env: self.config.env.clone(),
cwd: Some(self.project_root.clone()),
description: self
.config
.description
.clone()
.unwrap_or_else(|| "pnpm install".to_string()),
})
}
fn is_applicable(&self) -> bool {
self.project_root.join("pnpm-lock.yaml").exists()
}
fn is_auto(&self) -> bool {
self.config.auto
}
}
| rust | MIT | 3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb | 2026-01-04T15:39:11.175160Z | false |
jdx/mise | https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/src/prepare/providers/poetry.rs | src/prepare/providers/poetry.rs | use std::path::{Path, PathBuf};
use eyre::Result;
use crate::prepare::rule::PrepareProviderConfig;
use crate::prepare::{PrepareCommand, PrepareProvider};
/// Prepare provider for Poetry (poetry.lock)
#[derive(Debug)]
pub struct PoetryPrepareProvider {
project_root: PathBuf,
config: PrepareProviderConfig,
}
impl PoetryPrepareProvider {
pub fn new(project_root: &Path, config: PrepareProviderConfig) -> Self {
Self {
project_root: project_root.to_path_buf(),
config,
}
}
}
impl PrepareProvider for PoetryPrepareProvider {
fn id(&self) -> &str {
"poetry"
}
fn sources(&self) -> Vec<PathBuf> {
vec![
self.project_root.join("poetry.lock"),
self.project_root.join("pyproject.toml"),
]
}
fn outputs(&self) -> Vec<PathBuf> {
vec![self.project_root.join(".venv")]
}
fn prepare_command(&self) -> Result<PrepareCommand> {
if let Some(run) = &self.config.run {
return PrepareCommand::from_string(run, &self.project_root, &self.config);
}
Ok(PrepareCommand {
program: "poetry".to_string(),
args: vec!["install".to_string()],
env: self.config.env.clone(),
cwd: Some(self.project_root.clone()),
description: self
.config
.description
.clone()
.unwrap_or_else(|| "poetry install".to_string()),
})
}
fn is_applicable(&self) -> bool {
self.project_root.join("poetry.lock").exists()
}
fn is_auto(&self) -> bool {
self.config.auto
}
}
| rust | MIT | 3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb | 2026-01-04T15:39:11.175160Z | false |
jdx/mise | https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/src/prepare/providers/mod.rs | src/prepare/providers/mod.rs | mod bun;
mod bundler;
mod composer;
mod custom;
mod go;
mod npm;
mod pip;
mod pnpm;
mod poetry;
mod uv;
mod yarn;
pub use bun::BunPrepareProvider;
pub use bundler::BundlerPrepareProvider;
pub use composer::ComposerPrepareProvider;
pub use custom::CustomPrepareProvider;
pub use go::GoPrepareProvider;
pub use npm::NpmPrepareProvider;
pub use pip::PipPrepareProvider;
pub use pnpm::PnpmPrepareProvider;
pub use poetry::PoetryPrepareProvider;
pub use uv::UvPrepareProvider;
pub use yarn::YarnPrepareProvider;
| rust | MIT | 3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb | 2026-01-04T15:39:11.175160Z | false |
jdx/mise | https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/src/prepare/providers/bun.rs | src/prepare/providers/bun.rs | use std::path::{Path, PathBuf};
use eyre::Result;
use crate::prepare::rule::PrepareProviderConfig;
use crate::prepare::{PrepareCommand, PrepareProvider};
/// Prepare provider for bun (bun.lockb or bun.lock)
#[derive(Debug)]
pub struct BunPrepareProvider {
project_root: PathBuf,
config: PrepareProviderConfig,
}
impl BunPrepareProvider {
pub fn new(project_root: &Path, config: PrepareProviderConfig) -> Self {
Self {
project_root: project_root.to_path_buf(),
config,
}
}
fn lockfile_path(&self) -> Option<PathBuf> {
// Bun supports both bun.lockb (binary) and bun.lock (text)
let binary_lock = self.project_root.join("bun.lockb");
if binary_lock.exists() {
return Some(binary_lock);
}
let text_lock = self.project_root.join("bun.lock");
if text_lock.exists() {
return Some(text_lock);
}
None
}
}
impl PrepareProvider for BunPrepareProvider {
fn id(&self) -> &str {
"bun"
}
fn sources(&self) -> Vec<PathBuf> {
let mut sources = vec![];
if let Some(lockfile) = self.lockfile_path() {
sources.push(lockfile);
}
sources.push(self.project_root.join("package.json"));
sources
}
fn outputs(&self) -> Vec<PathBuf> {
vec![self.project_root.join("node_modules")]
}
fn prepare_command(&self) -> Result<PrepareCommand> {
if let Some(run) = &self.config.run {
return PrepareCommand::from_string(run, &self.project_root, &self.config);
}
Ok(PrepareCommand {
program: "bun".to_string(),
args: vec!["install".to_string()],
env: self.config.env.clone(),
cwd: Some(self.project_root.clone()),
description: self
.config
.description
.clone()
.unwrap_or_else(|| "bun install".to_string()),
})
}
fn is_applicable(&self) -> bool {
self.lockfile_path().is_some()
}
fn is_auto(&self) -> bool {
self.config.auto
}
}
| rust | MIT | 3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb | 2026-01-04T15:39:11.175160Z | false |
jdx/mise | https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/src/prepare/providers/npm.rs | src/prepare/providers/npm.rs | use std::path::{Path, PathBuf};
use eyre::Result;
use crate::prepare::rule::PrepareProviderConfig;
use crate::prepare::{PrepareCommand, PrepareProvider};
/// Prepare provider for npm (package-lock.json)
#[derive(Debug)]
pub struct NpmPrepareProvider {
project_root: PathBuf,
config: PrepareProviderConfig,
}
impl NpmPrepareProvider {
pub fn new(project_root: &Path, config: PrepareProviderConfig) -> Self {
Self {
project_root: project_root.to_path_buf(),
config,
}
}
}
impl PrepareProvider for NpmPrepareProvider {
fn id(&self) -> &str {
"npm"
}
fn sources(&self) -> Vec<PathBuf> {
vec![
self.project_root.join("package-lock.json"),
self.project_root.join("package.json"),
]
}
fn outputs(&self) -> Vec<PathBuf> {
vec![self.project_root.join("node_modules")]
}
fn prepare_command(&self) -> Result<PrepareCommand> {
if let Some(run) = &self.config.run {
return PrepareCommand::from_string(run, &self.project_root, &self.config);
}
Ok(PrepareCommand {
program: "npm".to_string(),
args: vec!["install".to_string()],
env: self.config.env.clone(),
cwd: Some(self.project_root.clone()),
description: self
.config
.description
.clone()
.unwrap_or_else(|| "npm install".to_string()),
})
}
fn is_applicable(&self) -> bool {
self.project_root.join("package-lock.json").exists()
}
fn is_auto(&self) -> bool {
self.config.auto
}
}
| rust | MIT | 3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb | 2026-01-04T15:39:11.175160Z | false |
jdx/mise | https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/src/prepare/providers/composer.rs | src/prepare/providers/composer.rs | use std::path::{Path, PathBuf};
use eyre::Result;
use crate::prepare::rule::PrepareProviderConfig;
use crate::prepare::{PrepareCommand, PrepareProvider};
/// Prepare provider for PHP Composer (composer.lock)
#[derive(Debug)]
pub struct ComposerPrepareProvider {
project_root: PathBuf,
config: PrepareProviderConfig,
}
impl ComposerPrepareProvider {
pub fn new(project_root: &Path, config: PrepareProviderConfig) -> Self {
Self {
project_root: project_root.to_path_buf(),
config,
}
}
}
impl PrepareProvider for ComposerPrepareProvider {
fn id(&self) -> &str {
"composer"
}
fn sources(&self) -> Vec<PathBuf> {
vec![
self.project_root.join("composer.lock"),
self.project_root.join("composer.json"),
]
}
fn outputs(&self) -> Vec<PathBuf> {
vec![self.project_root.join("vendor")]
}
fn prepare_command(&self) -> Result<PrepareCommand> {
if let Some(run) = &self.config.run {
return PrepareCommand::from_string(run, &self.project_root, &self.config);
}
Ok(PrepareCommand {
program: "composer".to_string(),
args: vec!["install".to_string()],
env: self.config.env.clone(),
cwd: Some(self.project_root.clone()),
description: self
.config
.description
.clone()
.unwrap_or_else(|| "composer install".to_string()),
})
}
fn is_applicable(&self) -> bool {
self.project_root.join("composer.lock").exists()
}
fn is_auto(&self) -> bool {
self.config.auto
}
}
| rust | MIT | 3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb | 2026-01-04T15:39:11.175160Z | false |
jdx/mise | https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/src/prepare/providers/bundler.rs | src/prepare/providers/bundler.rs | use std::path::{Path, PathBuf};
use eyre::Result;
use crate::prepare::rule::PrepareProviderConfig;
use crate::prepare::{PrepareCommand, PrepareProvider};
/// Prepare provider for Ruby Bundler (Gemfile.lock)
#[derive(Debug)]
pub struct BundlerPrepareProvider {
project_root: PathBuf,
config: PrepareProviderConfig,
}
impl BundlerPrepareProvider {
pub fn new(project_root: &Path, config: PrepareProviderConfig) -> Self {
Self {
project_root: project_root.to_path_buf(),
config,
}
}
}
impl PrepareProvider for BundlerPrepareProvider {
fn id(&self) -> &str {
"bundler"
}
fn sources(&self) -> Vec<PathBuf> {
vec![
self.project_root.join("Gemfile.lock"),
self.project_root.join("Gemfile"),
]
}
fn outputs(&self) -> Vec<PathBuf> {
// Check for vendor/bundle if using --path vendor/bundle
let vendor = self.project_root.join("vendor/bundle");
if vendor.exists() {
vec![vendor]
} else {
// Use .bundle directory as fallback indicator
vec![self.project_root.join(".bundle")]
}
}
fn prepare_command(&self) -> Result<PrepareCommand> {
if let Some(run) = &self.config.run {
return PrepareCommand::from_string(run, &self.project_root, &self.config);
}
Ok(PrepareCommand {
program: "bundle".to_string(),
args: vec!["install".to_string()],
env: self.config.env.clone(),
cwd: Some(self.project_root.clone()),
description: self
.config
.description
.clone()
.unwrap_or_else(|| "bundle install".to_string()),
})
}
fn is_applicable(&self) -> bool {
self.project_root.join("Gemfile.lock").exists()
}
fn is_auto(&self) -> bool {
self.config.auto
}
}
| rust | MIT | 3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb | 2026-01-04T15:39:11.175160Z | false |
jdx/mise | https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/src/prepare/providers/pip.rs | src/prepare/providers/pip.rs | use std::path::{Path, PathBuf};
use eyre::Result;
use crate::prepare::rule::PrepareProviderConfig;
use crate::prepare::{PrepareCommand, PrepareProvider};
/// Prepare provider for pip (requirements.txt)
#[derive(Debug)]
pub struct PipPrepareProvider {
project_root: PathBuf,
config: PrepareProviderConfig,
}
impl PipPrepareProvider {
pub fn new(project_root: &Path, config: PrepareProviderConfig) -> Self {
Self {
project_root: project_root.to_path_buf(),
config,
}
}
}
impl PrepareProvider for PipPrepareProvider {
fn id(&self) -> &str {
"pip"
}
fn sources(&self) -> Vec<PathBuf> {
vec![self.project_root.join("requirements.txt")]
}
fn outputs(&self) -> Vec<PathBuf> {
// Check for .venv directory as output indicator
vec![self.project_root.join(".venv")]
}
fn prepare_command(&self) -> Result<PrepareCommand> {
if let Some(run) = &self.config.run {
return PrepareCommand::from_string(run, &self.project_root, &self.config);
}
Ok(PrepareCommand {
program: "pip".to_string(),
args: vec![
"install".to_string(),
"-r".to_string(),
"requirements.txt".to_string(),
],
env: self.config.env.clone(),
cwd: Some(self.project_root.clone()),
description: self
.config
.description
.clone()
.unwrap_or_else(|| "pip install".to_string()),
})
}
fn is_applicable(&self) -> bool {
self.project_root.join("requirements.txt").exists()
}
fn is_auto(&self) -> bool {
self.config.auto
}
}
| rust | MIT | 3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb | 2026-01-04T15:39:11.175160Z | false |
jdx/mise | https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/src/prepare/providers/uv.rs | src/prepare/providers/uv.rs | use std::path::{Path, PathBuf};
use eyre::Result;
use crate::prepare::rule::PrepareProviderConfig;
use crate::prepare::{PrepareCommand, PrepareProvider};
/// Prepare provider for uv (uv.lock)
#[derive(Debug)]
pub struct UvPrepareProvider {
project_root: PathBuf,
config: PrepareProviderConfig,
}
impl UvPrepareProvider {
pub fn new(project_root: &Path, config: PrepareProviderConfig) -> Self {
Self {
project_root: project_root.to_path_buf(),
config,
}
}
}
impl PrepareProvider for UvPrepareProvider {
fn id(&self) -> &str {
"uv"
}
fn sources(&self) -> Vec<PathBuf> {
vec![
self.project_root.join("uv.lock"),
self.project_root.join("pyproject.toml"),
]
}
fn outputs(&self) -> Vec<PathBuf> {
vec![self.project_root.join(".venv")]
}
fn prepare_command(&self) -> Result<PrepareCommand> {
if let Some(run) = &self.config.run {
return PrepareCommand::from_string(run, &self.project_root, &self.config);
}
Ok(PrepareCommand {
program: "uv".to_string(),
args: vec!["sync".to_string()],
env: self.config.env.clone(),
cwd: Some(self.project_root.clone()),
description: self
.config
.description
.clone()
.unwrap_or_else(|| "uv sync".to_string()),
})
}
fn is_applicable(&self) -> bool {
self.project_root.join("uv.lock").exists()
}
fn is_auto(&self) -> bool {
self.config.auto
}
}
| rust | MIT | 3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb | 2026-01-04T15:39:11.175160Z | false |
jdx/mise | https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/src/prepare/providers/custom.rs | src/prepare/providers/custom.rs | use std::path::PathBuf;
use eyre::Result;
use glob::glob;
use crate::prepare::rule::PrepareProviderConfig;
use crate::prepare::{PrepareCommand, PrepareProvider};
/// Prepare provider for user-defined custom rules from mise.toml [prepare.*]
#[derive(Debug)]
pub struct CustomPrepareProvider {
id: String,
config: PrepareProviderConfig,
project_root: PathBuf,
}
impl CustomPrepareProvider {
pub fn new(id: String, config: PrepareProviderConfig, project_root: PathBuf) -> Self {
Self {
id,
config,
project_root,
}
}
/// Expand glob patterns in sources/outputs
fn expand_globs(&self, patterns: &[String]) -> Vec<PathBuf> {
let mut paths = vec![];
for pattern in patterns {
let full_pattern = if PathBuf::from(pattern).is_relative() {
self.project_root.join(pattern)
} else {
PathBuf::from(pattern)
};
// Check if it's a glob pattern
if pattern.contains('*') || pattern.contains('{') || pattern.contains('?') {
if let Ok(entries) = glob(full_pattern.to_string_lossy().as_ref()) {
for entry in entries.flatten() {
paths.push(entry);
}
}
} else if full_pattern.exists() {
paths.push(full_pattern);
} else {
// Include even if doesn't exist (for outputs that may not exist yet)
paths.push(full_pattern);
}
}
paths
}
}
impl PrepareProvider for CustomPrepareProvider {
fn id(&self) -> &str {
&self.id
}
fn sources(&self) -> Vec<PathBuf> {
self.expand_globs(&self.config.sources)
}
fn outputs(&self) -> Vec<PathBuf> {
self.expand_globs(&self.config.outputs)
}
fn prepare_command(&self) -> Result<PrepareCommand> {
let run = self
.config
.run
.as_ref()
.ok_or_else(|| eyre::eyre!("prepare rule {} has no run command", self.id))?;
PrepareCommand::from_string(run, &self.project_root, &self.config)
}
fn is_applicable(&self) -> bool {
// Custom providers require a run command to be applicable
self.config.run.is_some()
}
fn is_auto(&self) -> bool {
self.config.auto
}
}
| rust | MIT | 3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb | 2026-01-04T15:39:11.175160Z | false |
jdx/mise | https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/crates/aqua-registry/build.rs | crates/aqua-registry/build.rs | use std::env;
use std::fs;
use std::path::Path;
fn main() {
let out_dir = env::var("OUT_DIR").expect("OUT_DIR environment variable must be set");
generate_baked_registry(&out_dir);
}
fn generate_baked_registry(out_dir: &str) {
let dest_path = Path::new(out_dir).join("aqua_standard_registry.rs");
let registry_dir = find_registry_dir();
let registries =
collect_aqua_registries(®istry_dir).expect("Failed to collect aqua registry files");
if registries.is_empty() {
panic!(
"No aqua registry files found in {}/pkgs/",
registry_dir.display()
);
}
let mut code = String::from("HashMap::from([\n");
for (id, content) in registries {
code.push_str(&format!(" ({:?}, {:?}),\n", id, content));
}
code.push_str("])");
fs::write(dest_path, code).expect("Failed to write baked registry file");
}
fn find_registry_dir() -> std::path::PathBuf {
// Registry location is constant: crates/aqua-registry/aqua-registry
let manifest_dir = env::var("CARGO_MANIFEST_DIR")
.expect("CARGO_MANIFEST_DIR environment variable must be set");
let embedded = std::path::Path::new(&manifest_dir).join("aqua-registry");
if embedded.exists() {
return embedded;
}
panic!("Registry directory not found");
}
fn collect_aqua_registries(
dir: &Path,
) -> Result<Vec<(String, String)>, Box<dyn std::error::Error>> {
let mut registries = Vec::new();
if !dir.exists() {
return Ok(registries);
}
let pkgs_dir = dir.join("pkgs");
if !pkgs_dir.exists() {
return Ok(registries);
}
collect_registries_recursive(&pkgs_dir, &mut registries, String::new())?;
Ok(registries)
}
fn collect_registries_recursive(
dir: &Path,
registries: &mut Vec<(String, String)>,
prefix: String,
) -> Result<(), Box<dyn std::error::Error>> {
for entry in fs::read_dir(dir)? {
let entry = entry?;
let path = entry.path();
if path.is_dir() {
let dir_name = path.file_name().unwrap().to_string_lossy();
let new_prefix = if prefix.is_empty() {
dir_name.to_string()
} else {
format!("{}/{}", prefix, dir_name)
};
collect_registries_recursive(&path, registries, new_prefix)?;
} else if path.file_name() == Some(std::ffi::OsStr::new("registry.yaml")) {
let content = fs::read_to_string(&path)?;
registries.push((prefix.clone(), content.clone()));
// Process aliases if they exist
#[allow(clippy::collapsible_if)]
if content.contains("aliases") {
if let Ok(registry) = serde_yaml::from_str::<serde_yaml::Value>(&content) {
if let Some(packages) = registry.get("packages").and_then(|p| p.as_sequence()) {
for package in packages {
if let Some(aliases) =
package.get("aliases").and_then(|a| a.as_sequence())
{
for alias in aliases {
if let Some(name) = alias.get("name").and_then(|n| n.as_str()) {
registries.push((name.to_string(), content.clone()));
}
}
}
}
}
}
}
}
}
Ok(())
}
| rust | MIT | 3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb | 2026-01-04T15:39:11.175160Z | false |
jdx/mise | https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/crates/aqua-registry/src/lib.rs | crates/aqua-registry/src/lib.rs | //! Aqua Registry
//!
//! This crate provides functionality for working with Aqua package registry files.
//! It can load registry data from baked-in files, local repositories, or remote HTTP sources.
mod registry;
mod template;
mod types;
// Re-export only what's needed by the main mise crate
pub use registry::{
AQUA_STANDARD_REGISTRY_FILES, AquaRegistry, DefaultRegistryFetcher, FileCacheStore,
NoOpCacheStore,
};
pub use types::{
AquaChecksum, AquaChecksumType, AquaMinisignType, AquaPackage, AquaPackageType, RegistryYaml,
};
use thiserror::Error;
/// Errors that can occur when working with the Aqua registry
#[derive(Error, Debug)]
pub enum AquaRegistryError {
#[error("package not found: {0}")]
PackageNotFound(String),
#[error("registry not available: {0}")]
RegistryNotAvailable(String),
#[error("template error: {0}")]
TemplateError(#[from] eyre::Error),
#[error("yaml parse error: {0}")]
YamlError(#[from] serde_yaml::Error),
#[error("io error: {0}")]
IoError(#[from] std::io::Error),
#[error("expression error: {0}")]
ExpressionError(String),
}
pub type Result<T> = std::result::Result<T, AquaRegistryError>;
/// Configuration for the Aqua registry
#[derive(Debug, Clone)]
pub struct AquaRegistryConfig {
/// Path to cache directory for cloned repositories
pub cache_dir: std::path::PathBuf,
/// URL of the registry repository (if None, only baked registry will be used)
pub registry_url: Option<String>,
/// Whether to use the baked-in registry
pub use_baked_registry: bool,
/// Whether to skip network operations (prefer offline mode)
pub prefer_offline: bool,
}
impl Default for AquaRegistryConfig {
fn default() -> Self {
Self {
cache_dir: std::env::temp_dir().join("aqua-registry"),
registry_url: Some("https://github.com/aquaproj/aqua-registry".to_string()),
use_baked_registry: true,
prefer_offline: false,
}
}
}
/// Trait for fetching registry files from various sources
#[allow(async_fn_in_trait)]
pub trait RegistryFetcher {
/// Fetch and parse a registry YAML file for the given package ID
async fn fetch_registry(&self, package_id: &str) -> Result<crate::types::RegistryYaml>;
}
/// Trait for caching registry data
pub trait CacheStore {
/// Check if cached data exists and is fresh
fn is_fresh(&self, key: &str) -> bool;
/// Store data in cache
fn store(&self, key: &str, data: &[u8]) -> std::io::Result<()>;
/// Retrieve data from cache
fn retrieve(&self, key: &str) -> std::io::Result<Option<Vec<u8>>>;
}
| rust | MIT | 3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb | 2026-01-04T15:39:11.175160Z | false |
jdx/mise | https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/crates/aqua-registry/src/template.rs | crates/aqua-registry/src/template.rs | use eyre::{ContextCompat, Result, bail};
use heck::ToTitleCase;
use itertools::Itertools;
use std::collections::HashMap;
use std::fmt::Debug;
use std::sync::LazyLock;
use versions::Versioning;
type Context = HashMap<String, String>;
/// AST node representing an expression in the template
#[derive(Debug, Clone, PartialEq)]
enum Expr {
/// Variable reference: .Version
Var(String),
/// String literal: "foo"
Literal(String),
/// Function call: func arg1 arg2
FuncCall(String, Vec<Expr>),
/// Property access: expr.Property
PropertyAccess(Box<Expr>, String),
/// Pipe: expr | func
Pipe(Box<Expr>, Box<Expr>),
}
/// Runtime value trait - implemented by different value types
trait Value: Debug {
fn as_string(&self) -> String;
fn get_property(&self, prop: &str) -> Result<String>;
}
/// String value type
#[derive(Debug, Clone)]
struct StringValue(String);
impl Value for StringValue {
fn as_string(&self) -> String {
self.0.clone()
}
fn get_property(&self, _prop: &str) -> Result<String> {
bail!("cannot access property on string")
}
}
/// Semantic version value type
#[derive(Debug, Clone)]
struct SemVerValue {
major: u32,
minor: u32,
patch: u32,
original: String,
}
impl Value for SemVerValue {
fn as_string(&self) -> String {
self.original.clone()
}
fn get_property(&self, prop: &str) -> Result<String> {
Ok(match prop {
"Major" => self.major.to_string(),
"Minor" => self.minor.to_string(),
"Patch" => self.patch.to_string(),
_ => bail!("unknown semver property: {prop}"),
})
}
}
pub fn render(tmpl: &str, ctx: &Context) -> Result<String> {
let mut result = String::new();
let mut in_tag = false;
let mut tag = String::new();
let chars = tmpl.chars().collect_vec();
let mut i = 0;
let evaluator = Evaluator::new(ctx);
while i < chars.len() {
let c = chars[i];
let next = chars.get(i + 1).cloned().unwrap_or(' ');
if !in_tag && c == '{' && next == '{' {
in_tag = true;
i += 1;
} else if in_tag && c == '}' && next == '}' {
in_tag = false;
let tokens = lex(&tag)?;
let ast = parse_tokens(&tokens)?;
result += &evaluator.eval(&ast)?;
tag.clear();
i += 1;
} else if in_tag {
tag.push(c);
} else {
result.push(c);
}
i += 1;
}
Ok(result)
}
#[derive(Debug, Clone, PartialEq, strum::EnumIs)]
enum Token<'a> {
Key(&'a str),
String(&'a str),
Func(&'a str),
Whitespace(&'a str),
Pipe,
LParen,
RParen,
Dot,
Ident(&'a str),
}
fn lex(code: &str) -> Result<Vec<Token<'_>>> {
let mut tokens = vec![];
let mut code = code.trim();
while !code.is_empty() {
if code.starts_with(" ") {
let end = code
.chars()
.enumerate()
.find(|(_, c)| !c.is_whitespace())
.map(|(i, _)| i);
if let Some(end) = end {
tokens.push(Token::Whitespace(&code[..end]));
code = &code[end..];
} else {
break;
}
} else if code.starts_with("(") {
tokens.push(Token::LParen);
code = &code[1..];
} else if code.starts_with(")") {
tokens.push(Token::RParen);
code = &code[1..];
} else if code.starts_with("|") {
tokens.push(Token::Pipe);
code = &code[1..];
} else if code.starts_with('"') {
for (end, _) in code[1..].match_indices('"') {
if code.chars().nth(end) != Some('\\') {
tokens.push(Token::String(&code[1..end + 1]));
code = &code[end + 2..];
break;
}
}
} else if code.starts_with(".") {
// Check if this is a property access (after ) or identifier)
let next_char = code.chars().nth(1);
if next_char.is_some_and(|c| c.is_alphabetic()) {
// This could be .Key or .Property
let end = code[1..]
.chars()
.enumerate()
.find(|(_, c)| !c.is_alphanumeric() && *c != '_')
.map(|(i, _)| i + 1)
.unwrap_or(code.len());
// If preceded by RParen, it's a property access
if tokens.last().is_some_and(|t| t.is_r_paren()) {
tokens.push(Token::Dot);
tokens.push(Token::Ident(&code[1..end]));
} else {
// Otherwise it's a key reference
tokens.push(Token::Key(&code[1..end]));
}
code = &code[end..];
} else {
tokens.push(Token::Dot);
code = &code[1..];
}
} else {
// Check if it's an identifier (alphanumeric starting with letter)
let end = code
.chars()
.enumerate()
.find(|(_, c)| !c.is_alphanumeric() && *c != '_' && *c != '-')
.map(|(i, _)| i)
.unwrap_or(code.len());
if end > 0 {
let token_str = &code[..end];
// Determine if this is a function or identifier based on context
tokens.push(Token::Func(token_str));
code = &code[end..];
} else {
bail!("unexpected character: {}", code.chars().next().unwrap());
}
}
}
Ok(tokens)
}
/// Parse tokens into an AST
fn parse_tokens(tokens: &[Token]) -> Result<Expr> {
let mut tokens = tokens.iter().peekable();
parse_pipe(&mut tokens)
}
/// Parse pipe expressions (lowest precedence)
fn parse_pipe(tokens: &mut std::iter::Peekable<std::slice::Iter<Token>>) -> Result<Expr> {
let mut left = parse_primary(tokens)?;
// Skip whitespace before checking for pipe
skip_whitespace(tokens);
while matches!(tokens.peek(), Some(Token::Pipe)) {
tokens.next(); // consume pipe
skip_whitespace(tokens);
let right = parse_primary(tokens)?;
left = Expr::Pipe(Box::new(left), Box::new(right));
// Skip whitespace before checking for next pipe
skip_whitespace(tokens);
}
Ok(left)
}
/// Parse primary expressions
fn parse_primary(tokens: &mut std::iter::Peekable<std::slice::Iter<Token>>) -> Result<Expr> {
skip_whitespace(tokens);
let token = tokens.next().wrap_err("unexpected end of expression")?;
let mut expr = match token {
Token::Key(k) => Expr::Var(k.to_string()),
Token::String(s) => Expr::Literal(s.to_string()),
Token::LParen => {
// Parenthesized expression: (func arg)
skip_whitespace(tokens);
let inner = parse_pipe(tokens)?;
skip_whitespace(tokens);
if !matches!(tokens.next(), Some(Token::RParen)) {
bail!("expected closing parenthesis");
}
inner
}
Token::Func(f) => {
// Function call: func arg1 arg2
let func_name = f.to_string();
let mut args = Vec::new();
// Collect arguments until we hit pipe, rparen, or end
loop {
skip_whitespace(tokens);
match tokens.peek() {
None | Some(Token::Pipe) | Some(Token::RParen) => break,
Some(Token::Dot) | Some(Token::Ident(_)) => break, // Stop before property access
_ => {
args.push(parse_arg(tokens)?);
}
}
}
Expr::FuncCall(func_name, args)
}
_ => bail!("unexpected token: {token:?}"),
};
// Handle property access: expr.Property
while matches!(tokens.peek(), Some(Token::Dot)) {
tokens.next(); // consume dot
skip_whitespace(tokens);
if let Some(Token::Ident(prop)) = tokens.next() {
expr = Expr::PropertyAccess(Box::new(expr), prop.to_string());
} else {
bail!("expected identifier after dot");
}
}
Ok(expr)
}
/// Parse a function argument
fn parse_arg(tokens: &mut std::iter::Peekable<std::slice::Iter<Token>>) -> Result<Expr> {
skip_whitespace(tokens);
match tokens.peek() {
Some(Token::LParen) => {
tokens.next(); // consume lparen
skip_whitespace(tokens);
let expr = parse_pipe(tokens)?;
skip_whitespace(tokens);
if !matches!(tokens.next(), Some(Token::RParen)) {
bail!("expected closing parenthesis");
}
// Check for property access after paren
let mut result = expr;
while matches!(tokens.peek(), Some(Token::Dot)) {
tokens.next(); // consume dot
skip_whitespace(tokens);
if let Some(Token::Ident(prop)) = tokens.next() {
result = Expr::PropertyAccess(Box::new(result), prop.to_string());
} else {
bail!("expected identifier after dot");
}
}
Ok(result)
}
Some(Token::Key(k)) => {
tokens.next();
Ok(Expr::Var(k.to_string()))
}
Some(Token::String(s)) => {
tokens.next();
Ok(Expr::Literal(s.to_string()))
}
_ => bail!("expected argument"),
}
}
fn skip_whitespace(tokens: &mut std::iter::Peekable<std::slice::Iter<Token>>) {
while matches!(tokens.peek(), Some(Token::Whitespace(_))) {
tokens.next();
}
}
/// Function signature for template functions that return Value trait objects
type TemplateFn = fn(&[Box<dyn Value>]) -> Result<Box<dyn Value>>;
/// Static registry of available template functions
static FUNCTION_REGISTRY: LazyLock<HashMap<&'static str, TemplateFn>> = LazyLock::new(|| {
let mut registry: HashMap<&'static str, TemplateFn> = HashMap::new();
registry.insert("semver", |args| {
if args.len() != 1 {
bail!("semver requires exactly 1 argument");
}
let input = args[0].as_string();
let clean_version = input.strip_prefix('v').unwrap_or(&input);
let version = Versioning::new(clean_version)
.wrap_err_with(|| format!("invalid semver version: {input}"))?;
Ok(Box::new(SemVerValue {
major: version.nth(0).unwrap_or(0),
minor: version.nth(1).unwrap_or(0),
patch: version.nth(2).unwrap_or(0),
original: clean_version.to_string(),
}) as Box<dyn Value>)
});
registry.insert("title", |args| {
if args.len() != 1 {
bail!("title requires exactly 1 argument");
}
Ok(Box::new(StringValue(args[0].as_string().to_title_case())) as Box<dyn Value>)
});
registry.insert("trimV", |args| {
if args.len() != 1 {
bail!("trimV requires exactly 1 argument");
}
Ok(Box::new(StringValue(
args[0].as_string().trim_start_matches('v').to_string(),
)) as Box<dyn Value>)
});
registry.insert("trimPrefix", |args| {
if args.len() != 2 {
bail!("trimPrefix requires exactly 2 arguments");
}
let prefix = args[0].as_string();
let text = args[1].as_string();
Ok(Box::new(StringValue(
text.strip_prefix(&prefix).unwrap_or(&text).to_string(),
)) as Box<dyn Value>)
});
registry.insert("trimSuffix", |args| {
if args.len() != 2 {
bail!("trimSuffix requires exactly 2 arguments");
}
let suffix = args[0].as_string();
let text = args[1].as_string();
Ok(Box::new(StringValue(
text.strip_suffix(&suffix).unwrap_or(&text).to_string(),
)) as Box<dyn Value>)
});
registry.insert("replace", |args| {
if args.len() != 3 {
bail!("replace requires exactly 3 arguments");
}
let from = args[0].as_string();
let to = args[1].as_string();
let text = args[2].as_string();
Ok(Box::new(StringValue(text.replace(&from, &to))) as Box<dyn Value>)
});
registry
});
/// Evaluator walks the AST and produces results
struct Evaluator<'a> {
ctx: &'a Context,
}
impl<'a> Evaluator<'a> {
fn new(ctx: &'a Context) -> Self {
Self { ctx }
}
/// Evaluate an AST node and return a string (public interface)
fn eval(&self, expr: &Expr) -> Result<String> {
let value = self.eval_value(expr)?;
Ok(value.as_string())
}
/// Evaluate an AST node and return a Value trait object (internal)
fn eval_value(&self, expr: &Expr) -> Result<Box<dyn Value>> {
match expr {
Expr::Var(name) => {
let s = self
.ctx
.get(name)
.wrap_err_with(|| format!("variable not found: {name}"))?;
Ok(Box::new(StringValue(s.clone())) as Box<dyn Value>)
}
Expr::Literal(s) => Ok(Box::new(StringValue(s.clone())) as Box<dyn Value>),
Expr::FuncCall(func, args) => self.eval_func(func, args),
Expr::PropertyAccess(expr, prop) => self.eval_property(expr, prop),
Expr::Pipe(left, right) => {
let left_val = self.eval_value(left)?;
self.eval_with_input(right, left_val)
}
}
}
/// Evaluate an expression with a piped input value
fn eval_with_input(&self, expr: &Expr, input: Box<dyn Value>) -> Result<Box<dyn Value>> {
match expr {
Expr::FuncCall(func, args) => {
// For piped functions, append the input as last argument
let mut full_args = args.clone();
full_args.push(Expr::Literal(input.as_string()));
self.eval_func(func, &full_args)
}
_ => bail!("can only pipe to function calls"),
}
}
/// Evaluate property access
fn eval_property(&self, expr: &Expr, prop: &str) -> Result<Box<dyn Value>> {
let value = self.eval_value(expr)?;
let prop_value = value.get_property(prop)?;
Ok(Box::new(StringValue(prop_value)) as Box<dyn Value>)
}
/// Evaluate a function call
fn eval_func(&self, func: &str, args: &[Expr]) -> Result<Box<dyn Value>> {
// Evaluate all arguments first
let evaluated_args: Result<Vec<Box<dyn Value>>> =
args.iter().map(|arg| self.eval_value(arg)).collect();
let evaluated_args = evaluated_args?;
// Look up function in registry
if let Some(func_impl) = FUNCTION_REGISTRY.get(func) {
func_impl(&evaluated_args)
} else {
bail!("unknown function: {func}")
}
}
}
#[cfg(test)]
mod tests {
use super::*;
fn hashmap(data: Vec<(&str, &str)>) -> HashMap<String, String> {
data.iter()
.map(|(k, v)| (k.to_string(), v.to_string()))
.collect()
}
#[test]
fn test_render() {
let tmpl = "Hello, {{.OS}}!";
let ctx = hashmap(vec![("OS", "world")]);
assert_eq!(render(tmpl, &ctx).unwrap(), "Hello, world!");
}
#[test]
fn test_render_semver_maven() {
let tmpl = "https://archive.apache.org/dist/maven/maven-{{(semver .SemVer).Major}}/{{.SemVer}}/binaries/apache-maven-{{.SemVer}}-bin.tar.gz";
let ctx = hashmap(vec![("SemVer", "3.9.11")]);
assert_eq!(
render(tmpl, &ctx).unwrap(),
"https://archive.apache.org/dist/maven/maven-3/3.9.11/binaries/apache-maven-3.9.11-bin.tar.gz"
);
}
#[test]
fn test_render_nested_semver_in_function() {
// The semver function handles 'v' prefix internally, so (semver .Version).Major
// correctly extracts "3" from "v3.9.11". Then trimV is called on "3" (no-op).
let tmpl = "{{trimV (semver .Version).Major}}";
let ctx = hashmap(vec![("Version", "v3.9.11")]);
assert_eq!(render(tmpl, &ctx).unwrap(), "3");
}
#[test]
fn test_render_semver_handles_v_prefix() {
// semver function automatically strips 'v' prefix - no need for trimV
let tmpl = "{{semver .Version}}";
let ctx = hashmap(vec![("Version", "v3.9.11")]);
assert_eq!(render(tmpl, &ctx).unwrap(), "3.9.11");
}
#[test]
fn test_versioning_nth() {
// Test the versions crate directly
let v = Versioning::new("3.6.0").unwrap();
assert_eq!(v.nth(0).unwrap_or(0), 3);
assert_eq!(v.nth(1).unwrap_or(0), 6);
assert_eq!(v.nth(2).unwrap_or(0), 0);
}
#[test]
fn test_two_semver_calls() {
// Test calling semver twice in same template
let tmpl = "{{(semver .Version).Major}}.{{(semver .Version).Minor}}";
let ctx = hashmap(vec![("Version", "4.6.0")]);
let result = render(tmpl, &ctx).unwrap();
assert_eq!(result, "4.6", "Expected '4.6' but got '{}'", result);
}
#[test]
fn test_parse_second_semver() {
// Debug: parse just the second semver call
let tokens = lex("(semver .Version).Minor").unwrap();
let ast = parse_tokens(&tokens).unwrap();
// Should be: PropertyAccess(FuncCall("semver", [Var("Version")]), "Minor")
if let Expr::PropertyAccess(inner, prop) = ast {
assert_eq!(prop, "Minor");
if let Expr::FuncCall(func, args) = *inner {
assert_eq!(func, "semver");
assert_eq!(args.len(), 1);
} else {
panic!("Inner should be FuncCall, got: {:?}", inner);
}
} else {
panic!("Should be PropertyAccess, got: {:?}", ast);
}
}
#[test]
fn test_semver_property_major() {
let tmpl = "{{(semver .Version).Major}}";
let ctx = hashmap(vec![("Version", "3.6.0")]);
let result = render(tmpl, &ctx).unwrap();
assert_eq!(result, "3");
}
#[test]
fn test_semver_property_minor() {
let tmpl = "{{(semver .Version).Minor}}";
let ctx = hashmap(vec![("Version", "3.6.0")]);
let result = render(tmpl, &ctx).unwrap();
assert_eq!(result, "6");
}
#[test]
fn test_render_blender_url() {
// Exact pattern from blender registry with version 3.6.0 (failing case)
let tmpl = "https://download.blender.org/release/Blender{{(semver .Version).Major}}.{{(semver .Version).Minor}}/blender-{{trimV .Version}}-linux-x64.tar.xz";
let ctx = hashmap(vec![("Version", "3.6.0")]);
let result = render(tmpl, &ctx).unwrap();
assert_eq!(
result,
"https://download.blender.org/release/Blender3.6/blender-3.6.0-linux-x64.tar.xz"
);
}
#[test]
fn test_render_blender_url_4_3() {
// Test with 4.3.2
let tmpl = "https://download.blender.org/release/Blender{{(semver .Version).Major}}.{{(semver .Version).Minor}}/blender-{{trimV .Version}}-linux-x64.tar.xz";
let ctx = hashmap(vec![("Version", "4.3.2")]);
let result = render(tmpl, &ctx).unwrap();
assert_eq!(
result,
"https://download.blender.org/release/Blender4.3/blender-4.3.2-linux-x64.tar.xz"
);
}
#[test]
fn test_render_semver_as_function_arg() {
let tmpl = "{{title (semver .Version).Major}}";
let ctx = hashmap(vec![("Version", "3.9.11")]);
assert_eq!(render(tmpl, &ctx).unwrap(), "3");
}
#[test]
fn test_lex_semver_with_property() {
let tokens = lex("(semver .Version).Major").unwrap();
// Should be: LParen, Func(semver), Whitespace, Key(Version), RParen, Dot, Ident(Major)
assert!(
tokens.len() >= 6,
"Expected at least 6 tokens, got {}: {:?}",
tokens.len(),
tokens
);
}
#[test]
fn test_render_just_semver_paren() {
let tmpl = "{{(semver .Version)}}";
let ctx = hashmap(vec![("Version", "1.2.3")]);
assert_eq!(render(tmpl, &ctx).unwrap(), "1.2.3");
}
macro_rules! parse_tests {
($($name:ident: $value:expr,)*) => {
$(
#[test]
fn $name() {
let (input, expected, ctx_data): (&str, &str, Vec<(&str, &str)>) = $value;
let ctx = hashmap(ctx_data);
let tmpl = format!("{{{{{}}}}}", input);
assert_eq!(expected, render(&tmpl, &ctx).unwrap());
}
)*
}}
parse_tests!(
test_parse_key: (".OS", "world", vec![("OS", "world")]),
test_parse_string: ("\"world\"", "world", vec![]),
test_parse_title: (r#"title "world""#, "World", vec![]),
test_parse_trimv: (r#"trimV "v1.0.0""#, "1.0.0", vec![]),
test_parse_trim_prefix: (r#"trimPrefix "v" "v1.0.0""#, "1.0.0", vec![]),
test_parse_trim_prefix2: (r#"trimPrefix "v" "1.0.0""#, "1.0.0", vec![]),
test_parse_trim_suffix: (r#"trimSuffix "-v1.0.0" "foo-v1.0.0""#, "foo", vec![]),
test_parse_pipe: (r#"trimPrefix "foo-" "foo-v1.0.0" | trimV"#, "1.0.0", vec![]),
test_parse_multiple_pipes: (
r#"trimPrefix "foo-" "foo-v1.0.0-beta" | trimSuffix "-beta" | trimV"#,
"1.0.0",
vec![],
),
test_parse_replace: (r#"replace "foo" "bar" "foo-bar""#, "bar-bar", vec![]),
test_parse_semver_major: (r#"(semver .Version).Major"#, "3", vec![("Version", "3.9.11")]),
test_parse_semver_minor: (r#"(semver .Version).Minor"#, "9", vec![("Version", "3.9.11")]),
test_parse_semver_patch: (r#"(semver .Version).Patch"#, "11", vec![("Version", "3.9.11")]),
test_parse_semver_major_v_prefix: (r#"(semver .Version).Major"#, "1", vec![("Version", "v1.2.3")]),
test_parse_semver_no_property: (r#"(semver .Version)"#, "1.2.3", vec![("Version", "1.2.3")]),
test_parse_nested_semver_in_trimv: (r#"trimV (semver .Version).Major"#, "3", vec![("Version", "v3.9.11")]),
test_parse_nested_semver_in_title: (r#"title (semver .Version).Minor"#, "9", vec![("Version", "3.9.11")]),
test_parse_semver_standalone: (r#"semver .Version"#, "1.2.3", vec![("Version", "v1.2.3")]),
test_parse_semver_standalone_no_v: (r#"semver .Version"#, "1.2.3", vec![("Version", "1.2.3")]),
);
#[test]
fn test_parse_err() {
let ctx = HashMap::new();
let result = render("{{foo}}", &ctx);
assert!(result.is_err());
}
#[test]
fn test_lex() {
assert_eq!(
lex(r#"trimPrefix "foo-" "foo-v1.0.0" | trimV"#).unwrap(),
vec![
Token::Func("trimPrefix"),
Token::Whitespace(" "),
Token::String("foo-"),
Token::Whitespace(" "),
Token::String("foo-v1.0.0"),
Token::Whitespace(" "),
Token::Pipe,
Token::Whitespace(" "),
Token::Func("trimV"),
]
);
}
#[test]
fn test_gradle_src_template() {
// Test the gradle src template pattern: {{.AssetWithoutExt | trimSuffix "-bin"}}/bin/gradle
// This tests that pipe expressions work correctly when preceded by whitespace
let tmpl = r#"{{.AssetWithoutExt | trimSuffix "-bin"}}/bin/gradle"#;
let ctx = hashmap(vec![("AssetWithoutExt", "gradle-8.14.3-bin")]);
assert_eq!(render(tmpl, &ctx).unwrap(), "gradle-8.14.3/bin/gradle");
}
}
| rust | MIT | 3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb | 2026-01-04T15:39:11.175160Z | false |
jdx/mise | https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/crates/aqua-registry/src/types.rs | crates/aqua-registry/src/types.rs | use expr::{Context, Environment, Program, Value};
use eyre::{Result, eyre};
use indexmap::IndexSet;
use itertools::Itertools;
use serde_derive::Deserialize;
use std::cmp::PartialEq;
use std::collections::HashMap;
use versions::Versioning;
/// Type of Aqua package
#[derive(Debug, Deserialize, Default, Clone, PartialEq, strum::Display)]
#[strum(serialize_all = "snake_case")]
#[serde(rename_all = "snake_case")]
pub enum AquaPackageType {
GithubArchive,
GithubContent,
#[default]
GithubRelease,
Http,
GoInstall,
Cargo,
}
/// Main Aqua package definition
#[derive(Debug, Deserialize, Clone)]
#[serde(default)]
pub struct AquaPackage {
pub r#type: AquaPackageType,
pub repo_owner: String,
pub repo_name: String,
pub name: Option<String>,
pub asset: String,
pub url: String,
pub description: Option<String>,
pub format: String,
pub rosetta2: bool,
pub windows_arm_emulation: bool,
pub complete_windows_ext: bool,
pub supported_envs: Vec<String>,
pub files: Vec<AquaFile>,
pub replacements: HashMap<String, String>,
pub version_prefix: Option<String>,
version_filter: Option<String>,
#[serde(skip)]
version_filter_expr: Option<Program>,
pub version_source: Option<String>,
pub checksum: Option<AquaChecksum>,
pub slsa_provenance: Option<AquaSlsaProvenance>,
pub minisign: Option<AquaMinisign>,
pub github_artifact_attestations: Option<AquaGithubArtifactAttestations>,
overrides: Vec<AquaOverride>,
version_constraint: String,
pub version_overrides: Vec<AquaPackage>,
pub no_asset: bool,
pub error_message: Option<String>,
pub path: Option<String>,
}
/// Override configuration for specific OS/architecture combinations
#[derive(Debug, Deserialize, Clone)]
struct AquaOverride {
#[serde(flatten)]
pkg: AquaPackage,
goos: Option<String>,
goarch: Option<String>,
}
/// File definition within a package
#[derive(Debug, Deserialize, Clone)]
pub struct AquaFile {
pub name: String,
pub src: Option<String>,
}
/// Checksum algorithm options
#[derive(Debug, Deserialize, Clone, strum::AsRefStr, strum::Display)]
#[serde(rename_all = "lowercase")]
#[strum(serialize_all = "lowercase")]
pub enum AquaChecksumAlgorithm {
Sha1,
Sha256,
Sha512,
Md5,
}
/// Type of checksum source
#[derive(Debug, Deserialize, Clone)]
#[serde(rename_all = "snake_case")]
pub enum AquaChecksumType {
GithubRelease,
Http,
}
/// Type of minisign source
#[derive(Debug, Deserialize, Clone)]
#[serde(rename_all = "snake_case")]
pub enum AquaMinisignType {
GithubRelease,
Http,
}
/// Cosign signature configuration
#[derive(Debug, Deserialize, Clone)]
pub struct AquaCosignSignature {
pub r#type: Option<String>,
pub repo_owner: Option<String>,
pub repo_name: Option<String>,
pub url: Option<String>,
pub asset: Option<String>,
}
/// Cosign verification configuration
#[derive(Debug, Deserialize, Clone)]
pub struct AquaCosign {
pub enabled: Option<bool>,
pub signature: Option<AquaCosignSignature>,
pub key: Option<AquaCosignSignature>,
pub certificate: Option<AquaCosignSignature>,
pub bundle: Option<AquaCosignSignature>,
#[serde(skip_serializing_if = "Vec::is_empty", default)]
opts: Vec<String>,
}
/// SLSA provenance configuration
#[derive(Debug, Deserialize, Clone)]
pub struct AquaSlsaProvenance {
pub enabled: Option<bool>,
pub r#type: Option<String>,
pub repo_owner: Option<String>,
pub repo_name: Option<String>,
pub url: Option<String>,
pub asset: Option<String>,
pub source_uri: Option<String>,
pub source_tag: Option<String>,
}
/// Minisign verification configuration
#[derive(Debug, Deserialize, Clone)]
pub struct AquaMinisign {
pub enabled: Option<bool>,
pub r#type: Option<AquaMinisignType>,
pub repo_owner: Option<String>,
pub repo_name: Option<String>,
pub url: Option<String>,
pub asset: Option<String>,
pub public_key: Option<String>,
}
/// GitHub artifact attestations configuration
#[derive(Debug, Deserialize, Clone)]
pub struct AquaGithubArtifactAttestations {
pub enabled: Option<bool>,
pub signer_workflow: Option<String>,
}
/// Checksum verification configuration
#[derive(Debug, Deserialize, Clone)]
pub struct AquaChecksum {
pub r#type: Option<AquaChecksumType>,
pub algorithm: Option<AquaChecksumAlgorithm>,
pub pattern: Option<AquaChecksumPattern>,
pub cosign: Option<AquaCosign>,
file_format: Option<String>,
enabled: Option<bool>,
asset: Option<String>,
url: Option<String>,
}
/// Checksum pattern configuration
#[derive(Debug, Deserialize, Clone)]
pub struct AquaChecksumPattern {
pub checksum: String,
pub file: Option<String>,
}
/// Registry YAML file structure
#[derive(Debug, Deserialize)]
pub struct RegistryYaml {
pub packages: Vec<AquaPackage>,
}
impl Default for AquaPackage {
fn default() -> Self {
Self {
r#type: AquaPackageType::GithubRelease,
repo_owner: String::new(),
repo_name: String::new(),
name: None,
asset: String::new(),
url: String::new(),
description: None,
format: String::new(),
rosetta2: false,
windows_arm_emulation: false,
complete_windows_ext: true,
supported_envs: Vec::new(),
files: Vec::new(),
replacements: HashMap::new(),
version_prefix: None,
version_filter: None,
version_filter_expr: None,
version_source: None,
checksum: None,
slsa_provenance: None,
minisign: None,
github_artifact_attestations: None,
overrides: Vec::new(),
version_constraint: String::new(),
version_overrides: Vec::new(),
no_asset: false,
error_message: None,
path: None,
}
}
}
impl AquaPackage {
/// Apply version-specific configurations and overrides
pub fn with_version(mut self, versions: &[&str], os: &str, arch: &str) -> AquaPackage {
self = apply_override(self.clone(), self.version_override(versions));
if let Some(avo) = self.overrides.clone().into_iter().find(|o| {
if let (Some(goos), Some(goarch)) = (&o.goos, &o.goarch) {
goos == os && goarch == arch
} else if let Some(goos) = &o.goos {
goos == os
} else if let Some(goarch) = &o.goarch {
goarch == arch
} else {
false
}
}) {
self = apply_override(self, &avo.pkg)
}
self
}
fn version_override(&self, versions: &[&str]) -> &AquaPackage {
let expressions = versions
.iter()
.map(|v| (self.expr_parser(v), self.expr_ctx(v)))
.collect_vec();
vec![self]
.into_iter()
.chain(self.version_overrides.iter())
.find(|vo| {
if vo.version_constraint.is_empty() {
true
} else {
expressions.iter().any(|(expr, ctx)| {
expr.eval(&vo.version_constraint, ctx)
.map_err(|e| {
log::debug!("error parsing {}: {e}", vo.version_constraint)
})
.unwrap_or(false.into())
.as_bool()
.unwrap()
})
}
})
.unwrap_or(self)
}
/// Detect the format of an archive based on its filename
fn detect_format(&self, asset_name: &str) -> &'static str {
let formats = [
"tar.br", "tar.bz2", "tar.gz", "tar.lz4", "tar.sz", "tar.xz", "tbr", "tbz", "tbz2",
"tgz", "tlz4", "tsz", "txz", "tar.zst", "zip", "gz", "bz2", "lz4", "sz", "xz", "zst",
"dmg", "pkg", "rar", "tar",
];
for format in formats {
if asset_name.ends_with(&format!(".{format}")) {
return match format {
"tgz" => "tar.gz",
"txz" => "tar.xz",
"tbz2" | "tbz" => "tar.bz2",
_ => format,
};
}
}
"raw"
}
/// Get the format for this package and version
pub fn format(&self, v: &str, os: &str, arch: &str) -> Result<&str> {
if self.r#type == AquaPackageType::GithubArchive {
return Ok("tar.gz");
}
let format = if self.format.is_empty() {
let asset = if !self.asset.is_empty() {
self.asset(v, os, arch)?
} else if !self.url.is_empty() {
self.url.to_string()
} else {
log::debug!("no asset or url for {}/{}", self.repo_owner, self.repo_name);
String::new()
};
self.detect_format(&asset)
} else {
match self.format.as_str() {
"tgz" => "tar.gz",
"txz" => "tar.xz",
"tbz2" | "tbz" => "tar.bz2",
format => format,
}
};
Ok(format)
}
/// Get the asset name for this package and version
pub fn asset(&self, v: &str, os: &str, arch: &str) -> Result<String> {
if self.asset.is_empty() && self.url.split("/").count() > "//".len() {
let asset = self.url.rsplit("/").next().unwrap_or("");
self.parse_aqua_str(asset, v, &Default::default(), os, arch)
} else {
self.parse_aqua_str(&self.asset, v, &Default::default(), os, arch)
}
}
/// Get all possible asset strings for this package, version and platform
pub fn asset_strs(&self, v: &str, os: &str, arch: &str) -> Result<IndexSet<String>> {
let mut strs =
IndexSet::from([self.parse_aqua_str(&self.asset, v, &Default::default(), os, arch)?]);
if os == "darwin" {
let mut ctx = HashMap::default();
ctx.insert("Arch".to_string(), "universal".to_string());
strs.insert(self.parse_aqua_str(&self.asset, v, &ctx, os, arch)?);
} else if os == "windows" {
let mut ctx = HashMap::default();
let asset = self.parse_aqua_str(&self.asset, v, &ctx, os, arch)?;
if self.complete_windows_ext && self.format(v, os, arch)? == "raw" {
strs.insert(format!("{asset}.exe"));
} else {
strs.insert(asset);
}
if arch == "arm64" {
ctx.insert("Arch".to_string(), "amd64".to_string());
strs.insert(self.parse_aqua_str(&self.asset, v, &ctx, os, arch)?);
let asset = self.parse_aqua_str(&self.asset, v, &ctx, os, arch)?;
if self.complete_windows_ext && self.format(v, os, arch)? == "raw" {
strs.insert(format!("{asset}.exe"));
} else {
strs.insert(asset);
}
}
}
Ok(strs)
}
/// Get the URL for this package and version
pub fn url(&self, v: &str, os: &str, arch: &str) -> Result<String> {
let mut url = self.url.clone();
if os == "windows" && self.complete_windows_ext && self.format(v, os, arch)? == "raw" {
url.push_str(".exe");
}
self.parse_aqua_str(&url, v, &Default::default(), os, arch)
}
/// Parse an Aqua template string with variable substitution and platform info
pub fn parse_aqua_str(
&self,
s: &str,
v: &str,
overrides: &HashMap<String, String>,
os: &str,
arch: &str,
) -> Result<String> {
let mut actual_arch = arch;
if os == "darwin" && arch == "arm64" && self.rosetta2 {
actual_arch = "amd64";
}
if os == "windows" && arch == "arm64" && self.windows_arm_emulation {
actual_arch = "amd64";
}
let replace = |s: &str| {
self.replacements
.get(s)
.map(|s| s.to_string())
.unwrap_or_else(|| s.to_string())
};
let semver = if let Some(prefix) = &self.version_prefix {
v.strip_prefix(prefix).unwrap_or(v)
} else {
v
};
let mut ctx = HashMap::new();
ctx.insert("Version".to_string(), replace(v));
ctx.insert("SemVer".to_string(), replace(semver));
ctx.insert("OS".to_string(), replace(os));
ctx.insert("GOOS".to_string(), replace(os));
ctx.insert("GOARCH".to_string(), replace(actual_arch));
ctx.insert("Arch".to_string(), replace(actual_arch));
ctx.insert("Format".to_string(), replace(&self.format));
ctx.extend(overrides.clone());
crate::template::render(s, &ctx)
}
/// Set up version filter expression if configured
pub fn setup_version_filter(&mut self) -> Result<()> {
if let Some(version_filter) = &self.version_filter {
self.version_filter_expr = Some(expr::compile(version_filter)?);
}
Ok(())
}
/// Check if a version passes the version filter
pub fn version_filter_ok(&self, v: &str) -> Result<bool> {
if let Some(filter) = self.version_filter_expr.clone() {
if let Value::Bool(expr) = self.expr(v, filter)? {
Ok(expr)
} else {
log::warn!(
"invalid response from version filter: {}",
self.version_filter.as_ref().unwrap()
);
Ok(true)
}
} else {
Ok(true)
}
}
fn expr(&self, v: &str, program: Program) -> Result<Value> {
let expr = self.expr_parser(v);
expr.run(program, &self.expr_ctx(v)).map_err(|e| eyre!(e))
}
fn expr_parser(&self, v: &str) -> Environment<'_> {
let (_, v) = split_version_prefix(v);
let ver = Versioning::new(v);
let mut env = Environment::new();
env.add_function("semver", move |c| {
if c.args.len() != 1 {
return Err("semver() takes exactly one argument".to_string().into());
}
let requirements = c.args[0]
.as_string()
.unwrap()
.replace(' ', "")
.split(',')
.map(versions::Requirement::new)
.collect::<Vec<_>>();
if requirements.iter().any(|r| r.is_none()) {
return Err("invalid semver requirement".to_string().into());
}
if let Some(ver) = &ver {
Ok(requirements
.iter()
.all(|r| r.clone().is_some_and(|r| r.matches(ver)))
.into())
} else {
Err("invalid version".to_string().into())
}
});
env
}
fn expr_ctx(&self, v: &str) -> Context {
let mut ctx = Context::default();
ctx.insert("Version", v);
ctx
}
}
/// splits a version number into an optional prefix and the remaining version string
fn split_version_prefix(version: &str) -> (String, String) {
version
.char_indices()
.find_map(|(i, c)| {
if c.is_ascii_digit() {
if i == 0 {
return Some(i);
}
// If the previous char is a delimiter or 'v', we found a split point.
let prev_char = version.chars().nth(i - 1).unwrap();
if ['-', '_', '/', '.', 'v', 'V'].contains(&prev_char) {
return Some(i);
}
}
None
})
.map_or_else(
|| ("".into(), version.into()),
|i| {
let (prefix, version) = version.split_at(i);
(prefix.into(), version.into())
},
)
}
impl AquaFile {
/// Get the source path for this file within the package
pub fn src(&self, pkg: &AquaPackage, v: &str, os: &str, arch: &str) -> Result<Option<String>> {
let asset = pkg.asset(v, os, arch)?;
let asset = asset.strip_suffix(".tar.gz").unwrap_or(&asset);
let asset = asset.strip_suffix(".tar.xz").unwrap_or(asset);
let asset = asset.strip_suffix(".tar.bz2").unwrap_or(asset);
let asset = asset.strip_suffix(".gz").unwrap_or(asset);
let asset = asset.strip_suffix(".xz").unwrap_or(asset);
let asset = asset.strip_suffix(".bz2").unwrap_or(asset);
let asset = asset.strip_suffix(".zip").unwrap_or(asset);
let asset = asset.strip_suffix(".tar").unwrap_or(asset);
let asset = asset.strip_suffix(".tgz").unwrap_or(asset);
let asset = asset.strip_suffix(".txz").unwrap_or(asset);
let asset = asset.strip_suffix(".tbz2").unwrap_or(asset);
let asset = asset.strip_suffix(".tbz").unwrap_or(asset);
let mut ctx = HashMap::new();
ctx.insert("AssetWithoutExt".to_string(), asset.to_string());
ctx.insert("FileName".to_string(), self.name.to_string());
self.src
.as_ref()
.map(|src| pkg.parse_aqua_str(src, v, &ctx, os, arch))
.transpose()
}
}
fn apply_override(mut orig: AquaPackage, avo: &AquaPackage) -> AquaPackage {
if avo.r#type != AquaPackageType::GithubRelease {
orig.r#type = avo.r#type.clone();
}
if !avo.repo_owner.is_empty() {
orig.repo_owner = avo.repo_owner.clone();
}
if !avo.repo_name.is_empty() {
orig.repo_name = avo.repo_name.clone();
}
if !avo.asset.is_empty() {
orig.asset = avo.asset.clone();
}
if !avo.url.is_empty() {
orig.url = avo.url.clone();
}
if !avo.format.is_empty() {
orig.format = avo.format.clone();
}
if avo.rosetta2 {
orig.rosetta2 = true;
}
if avo.windows_arm_emulation {
orig.windows_arm_emulation = true;
}
if !avo.complete_windows_ext {
orig.complete_windows_ext = false;
}
if !avo.supported_envs.is_empty() {
orig.supported_envs = avo.supported_envs.clone();
}
if !avo.files.is_empty() {
orig.files = avo.files.clone();
}
orig.replacements.extend(avo.replacements.clone());
if let Some(avo_version_prefix) = avo.version_prefix.clone() {
orig.version_prefix = Some(avo_version_prefix);
}
if !avo.overrides.is_empty() {
orig.overrides = avo.overrides.clone();
}
if let Some(avo_checksum) = avo.checksum.clone() {
match &mut orig.checksum {
Some(checksum) => {
checksum.merge(avo_checksum.clone());
}
None => {
orig.checksum = Some(avo_checksum.clone());
}
}
}
if let Some(avo_slsa_provenance) = avo.slsa_provenance.clone() {
match &mut orig.slsa_provenance {
Some(slsa_provenance) => {
slsa_provenance.merge(avo_slsa_provenance.clone());
}
None => {
orig.slsa_provenance = Some(avo_slsa_provenance.clone());
}
}
}
if let Some(avo_minisign) = avo.minisign.clone() {
match &mut orig.minisign {
Some(minisign) => {
minisign.merge(avo_minisign.clone());
}
None => {
orig.minisign = Some(avo_minisign.clone());
}
}
}
if let Some(avo_attestations) = avo.github_artifact_attestations.clone() {
match &mut orig.github_artifact_attestations {
Some(orig_attestations) => {
orig_attestations.merge(avo_attestations.clone());
}
None => {
orig.github_artifact_attestations = Some(avo_attestations.clone());
}
}
}
if avo.no_asset {
orig.no_asset = true;
}
if let Some(error_message) = avo.error_message.clone() {
orig.error_message = Some(error_message);
}
if let Some(path) = avo.path.clone() {
orig.path = Some(path);
}
orig
}
// Implementation of merge methods for various types
impl AquaChecksum {
pub fn _type(&self) -> &AquaChecksumType {
self.r#type.as_ref().unwrap()
}
pub fn algorithm(&self) -> &AquaChecksumAlgorithm {
self.algorithm.as_ref().unwrap()
}
pub fn asset_strs(
&self,
pkg: &AquaPackage,
v: &str,
os: &str,
arch: &str,
) -> Result<IndexSet<String>> {
let mut asset_strs = IndexSet::new();
for asset in pkg.asset_strs(v, os, arch)? {
let checksum_asset = self.asset.as_ref().unwrap();
let mut ctx = HashMap::new();
ctx.insert("Asset".to_string(), asset.to_string());
asset_strs.insert(pkg.parse_aqua_str(checksum_asset, v, &ctx, os, arch)?);
}
Ok(asset_strs)
}
pub fn pattern(&self) -> &AquaChecksumPattern {
self.pattern.as_ref().unwrap()
}
pub fn enabled(&self) -> bool {
self.enabled.unwrap_or(true)
}
pub fn file_format(&self) -> &str {
self.file_format.as_deref().unwrap_or("raw")
}
pub fn url(&self, pkg: &AquaPackage, v: &str, os: &str, arch: &str) -> Result<String> {
pkg.parse_aqua_str(self.url.as_ref().unwrap(), v, &Default::default(), os, arch)
}
fn merge(&mut self, other: Self) {
if let Some(r#type) = other.r#type {
self.r#type = Some(r#type);
}
if let Some(algorithm) = other.algorithm {
self.algorithm = Some(algorithm);
}
if let Some(pattern) = other.pattern {
self.pattern = Some(pattern);
}
if let Some(enabled) = other.enabled {
self.enabled = Some(enabled);
}
if let Some(asset) = other.asset {
self.asset = Some(asset);
}
if let Some(url) = other.url {
self.url = Some(url);
}
if let Some(file_format) = other.file_format {
self.file_format = Some(file_format);
}
if let Some(cosign) = other.cosign {
if self.cosign.is_none() {
self.cosign = Some(cosign.clone());
}
self.cosign.as_mut().unwrap().merge(cosign);
}
}
}
impl AquaCosign {
// TODO: This does not support `{{.Asset}}`.
pub fn opts(&self, pkg: &AquaPackage, v: &str, os: &str, arch: &str) -> Result<Vec<String>> {
self.opts
.iter()
.map(|opt| pkg.parse_aqua_str(opt, v, &Default::default(), os, arch))
.collect()
}
fn merge(&mut self, other: Self) {
if let Some(enabled) = other.enabled {
self.enabled = Some(enabled);
}
if let Some(signature) = other.signature.clone() {
if self.signature.is_none() {
self.signature = Some(signature.clone());
}
self.signature.as_mut().unwrap().merge(signature);
}
if let Some(key) = other.key.clone() {
if self.key.is_none() {
self.key = Some(key.clone());
}
self.key.as_mut().unwrap().merge(key);
}
if let Some(certificate) = other.certificate.clone() {
if self.certificate.is_none() {
self.certificate = Some(certificate.clone());
}
self.certificate.as_mut().unwrap().merge(certificate);
}
if let Some(bundle) = other.bundle.clone() {
if self.bundle.is_none() {
self.bundle = Some(bundle.clone());
}
self.bundle.as_mut().unwrap().merge(bundle);
}
if !other.opts.is_empty() {
self.opts = other.opts.clone();
}
}
}
impl AquaCosignSignature {
pub fn url(&self, pkg: &AquaPackage, v: &str, os: &str, arch: &str) -> Result<String> {
pkg.parse_aqua_str(self.url.as_ref().unwrap(), v, &Default::default(), os, arch)
}
pub fn asset_strs(
&self,
pkg: &AquaPackage,
v: &str,
os: &str,
arch: &str,
) -> Result<IndexSet<String>> {
let mut asset_strs = IndexSet::new();
if let Some(cosign_asset_template) = &self.asset {
for asset in pkg.asset_strs(v, os, arch)? {
let mut ctx = HashMap::new();
ctx.insert("Asset".to_string(), asset.to_string());
asset_strs.insert(pkg.parse_aqua_str(cosign_asset_template, v, &ctx, os, arch)?);
}
}
Ok(asset_strs)
}
fn merge(&mut self, other: Self) {
if let Some(r#type) = other.r#type {
self.r#type = Some(r#type);
}
if let Some(repo_owner) = other.repo_owner {
self.repo_owner = Some(repo_owner);
}
if let Some(repo_name) = other.repo_name {
self.repo_name = Some(repo_name);
}
if let Some(url) = other.url {
self.url = Some(url);
}
if let Some(asset) = other.asset {
self.asset = Some(asset);
}
}
}
impl AquaSlsaProvenance {
pub fn asset_strs(
&self,
pkg: &AquaPackage,
v: &str,
os: &str,
arch: &str,
) -> Result<IndexSet<String>> {
let mut asset_strs = IndexSet::new();
if let Some(slsa_asset_template) = &self.asset {
for asset in pkg.asset_strs(v, os, arch)? {
let mut ctx = HashMap::new();
ctx.insert("Asset".to_string(), asset.to_string());
asset_strs.insert(pkg.parse_aqua_str(slsa_asset_template, v, &ctx, os, arch)?);
}
}
Ok(asset_strs)
}
pub fn url(&self, pkg: &AquaPackage, v: &str, os: &str, arch: &str) -> Result<String> {
pkg.parse_aqua_str(self.url.as_ref().unwrap(), v, &Default::default(), os, arch)
}
fn merge(&mut self, other: Self) {
if let Some(enabled) = other.enabled {
self.enabled = Some(enabled);
}
if let Some(r#type) = other.r#type {
self.r#type = Some(r#type);
}
if let Some(repo_owner) = other.repo_owner {
self.repo_owner = Some(repo_owner);
}
if let Some(repo_name) = other.repo_name {
self.repo_name = Some(repo_name);
}
if let Some(url) = other.url {
self.url = Some(url);
}
if let Some(asset) = other.asset {
self.asset = Some(asset);
}
if let Some(source_uri) = other.source_uri {
self.source_uri = Some(source_uri);
}
if let Some(source_tag) = other.source_tag {
self.source_tag = Some(source_tag);
}
}
}
impl AquaMinisign {
pub fn _type(&self) -> &AquaMinisignType {
self.r#type.as_ref().unwrap()
}
pub fn url(&self, pkg: &AquaPackage, v: &str, os: &str, arch: &str) -> Result<String> {
pkg.parse_aqua_str(self.url.as_ref().unwrap(), v, &Default::default(), os, arch)
}
pub fn asset(&self, pkg: &AquaPackage, v: &str, os: &str, arch: &str) -> Result<String> {
pkg.parse_aqua_str(
self.asset.as_ref().unwrap(),
v,
&Default::default(),
os,
arch,
)
}
pub fn public_key(&self, pkg: &AquaPackage, v: &str, os: &str, arch: &str) -> Result<String> {
pkg.parse_aqua_str(
self.public_key.as_ref().unwrap(),
v,
&Default::default(),
os,
arch,
)
}
fn merge(&mut self, other: Self) {
if let Some(enabled) = other.enabled {
self.enabled = Some(enabled);
}
if let Some(r#type) = other.r#type {
self.r#type = Some(r#type);
}
if let Some(repo_owner) = other.repo_owner {
self.repo_owner = Some(repo_owner);
}
if let Some(repo_name) = other.repo_name {
self.repo_name = Some(repo_name);
}
if let Some(url) = other.url {
self.url = Some(url);
}
if let Some(asset) = other.asset {
self.asset = Some(asset);
}
if let Some(public_key) = other.public_key {
self.public_key = Some(public_key);
}
}
}
impl AquaGithubArtifactAttestations {
fn merge(&mut self, other: Self) {
if let Some(enabled) = other.enabled {
self.enabled = Some(enabled);
}
if let Some(signer_workflow) = other.signer_workflow {
self.signer_workflow = Some(signer_workflow);
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_aqua_file_src_gradle() {
// Test the gradle package src template: {{.AssetWithoutExt | trimSuffix "-bin"}}/bin/gradle
let pkg = AquaPackage {
repo_owner: "gradle".to_string(),
repo_name: "gradle-distributions".to_string(),
asset: "gradle-{{trimV .Version}}-bin.zip".to_string(),
..Default::default()
};
let file = AquaFile {
name: "gradle".to_string(),
src: Some("{{.AssetWithoutExt | trimSuffix \"-bin\"}}/bin/gradle".to_string()),
};
let result = file.src(&pkg, "8.14.3", "darwin", "arm64").unwrap();
assert_eq!(result, Some("gradle-8.14.3/bin/gradle".to_string()));
}
}
| rust | MIT | 3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb | 2026-01-04T15:39:11.175160Z | false |
jdx/mise | https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/crates/aqua-registry/src/registry.rs | crates/aqua-registry/src/registry.rs | use crate::types::{AquaPackage, RegistryYaml};
use crate::{AquaRegistryConfig, AquaRegistryError, CacheStore, RegistryFetcher, Result};
use std::collections::HashMap;
use std::path::PathBuf;
use std::sync::LazyLock;
use tokio::sync::Mutex;
/// The main Aqua registry implementation
#[derive(Debug)]
pub struct AquaRegistry<F = DefaultRegistryFetcher, C = NoOpCacheStore>
where
F: RegistryFetcher,
C: CacheStore,
{
#[allow(dead_code)]
config: AquaRegistryConfig,
fetcher: F,
#[allow(dead_code)]
cache_store: C,
#[allow(dead_code)]
repo_exists: bool,
}
/// Default implementation of RegistryFetcher
#[derive(Debug, Clone)]
pub struct DefaultRegistryFetcher {
config: AquaRegistryConfig,
}
/// No-op implementation of CacheStore
#[derive(Debug, Clone, Default)]
pub struct NoOpCacheStore;
/// File-based cache store implementation
#[derive(Debug, Clone)]
pub struct FileCacheStore {
cache_dir: PathBuf,
}
/// Baked registry files (compiled into binary)
pub static AQUA_STANDARD_REGISTRY_FILES: LazyLock<HashMap<&'static str, &'static str>> =
LazyLock::new(|| include!(concat!(env!("OUT_DIR"), "/aqua_standard_registry.rs")));
impl AquaRegistry {
/// Create a new AquaRegistry with the given configuration
pub fn new(config: AquaRegistryConfig) -> Self {
let repo_exists = Self::check_repo_exists(&config.cache_dir);
let fetcher = DefaultRegistryFetcher {
config: config.clone(),
};
Self {
config,
fetcher,
cache_store: NoOpCacheStore,
repo_exists,
}
}
/// Create a new AquaRegistry with custom fetcher and cache store
pub fn with_fetcher_and_cache<F, C>(
config: AquaRegistryConfig,
fetcher: F,
cache_store: C,
) -> AquaRegistry<F, C>
where
F: RegistryFetcher,
C: CacheStore,
{
let repo_exists = Self::check_repo_exists(&config.cache_dir);
AquaRegistry {
config,
fetcher,
cache_store,
repo_exists,
}
}
fn check_repo_exists(cache_dir: &std::path::Path) -> bool {
cache_dir.join(".git").exists()
}
}
impl<F, C> AquaRegistry<F, C>
where
F: RegistryFetcher,
C: CacheStore,
{
/// Get a package definition by ID
pub async fn package(&self, id: &str) -> Result<AquaPackage> {
static CACHE: LazyLock<Mutex<HashMap<String, AquaPackage>>> =
LazyLock::new(|| Mutex::new(HashMap::new()));
if let Some(pkg) = CACHE.lock().await.get(id) {
return Ok(pkg.clone());
}
let registry = self.fetcher.fetch_registry(id).await?;
let mut pkg = registry
.packages
.into_iter()
.next()
.ok_or_else(|| AquaRegistryError::PackageNotFound(id.to_string()))?;
pkg.setup_version_filter()?;
CACHE.lock().await.insert(id.to_string(), pkg.clone());
Ok(pkg)
}
/// Get a package definition configured for specific versions
pub async fn package_with_version(
&self,
id: &str,
versions: &[&str],
os: &str,
arch: &str,
) -> Result<AquaPackage> {
Ok(self.package(id).await?.with_version(versions, os, arch))
}
}
impl RegistryFetcher for DefaultRegistryFetcher {
async fn fetch_registry(&self, package_id: &str) -> Result<RegistryYaml> {
let path_id = package_id
.split('/')
.collect::<Vec<_>>()
.join(std::path::MAIN_SEPARATOR_STR);
let path = self
.config
.cache_dir
.join("pkgs")
.join(&path_id)
.join("registry.yaml");
// Try to read from local repository first
if self.config.cache_dir.join(".git").exists() && path.exists() {
log::trace!("reading aqua-registry for {package_id} from repo at {path:?}");
let contents = std::fs::read_to_string(&path)?;
return Ok(serde_yaml::from_str(&contents)?);
}
// Fall back to baked registry if enabled
#[allow(clippy::collapsible_if)]
if self.config.use_baked_registry && AQUA_STANDARD_REGISTRY_FILES.contains_key(package_id) {
if let Some(content) = AQUA_STANDARD_REGISTRY_FILES.get(package_id) {
log::trace!("reading baked-in aqua-registry for {package_id}");
return Ok(serde_yaml::from_str(content)?);
}
}
Err(AquaRegistryError::RegistryNotAvailable(format!(
"no aqua-registry found for {package_id}"
)))
}
}
impl CacheStore for NoOpCacheStore {
fn is_fresh(&self, _key: &str) -> bool {
false
}
fn store(&self, _key: &str, _data: &[u8]) -> std::io::Result<()> {
Ok(())
}
fn retrieve(&self, _key: &str) -> std::io::Result<Option<Vec<u8>>> {
Ok(None)
}
}
impl FileCacheStore {
pub fn new(cache_dir: PathBuf) -> Self {
Self { cache_dir }
}
}
impl CacheStore for FileCacheStore {
fn is_fresh(&self, key: &str) -> bool {
// Check if cache entry exists and is less than a week old
#[allow(clippy::collapsible_if)]
if let Ok(metadata) = std::fs::metadata(self.cache_dir.join(key)) {
if let Ok(modified) = metadata.modified() {
let age = std::time::SystemTime::now()
.duration_since(modified)
.unwrap_or_default();
return age < std::time::Duration::from_secs(7 * 24 * 60 * 60); // 1 week
}
}
false
}
fn store(&self, key: &str, data: &[u8]) -> std::io::Result<()> {
let path = self.cache_dir.join(key);
if let Some(parent) = path.parent() {
std::fs::create_dir_all(parent)?;
}
std::fs::write(path, data)
}
fn retrieve(&self, key: &str) -> std::io::Result<Option<Vec<u8>>> {
let path = self.cache_dir.join(key);
match std::fs::read(path) {
Ok(data) => Ok(Some(data)),
Err(e) if e.kind() == std::io::ErrorKind::NotFound => Ok(None),
Err(e) => Err(e),
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[tokio::test]
async fn test_registry_creation() {
let config = AquaRegistryConfig::default();
let registry = AquaRegistry::new(config);
// This should not panic - registry should be created successfully
drop(registry);
}
#[test]
fn test_cache_store() {
let cache = NoOpCacheStore;
assert!(!cache.is_fresh("test"));
assert!(cache.store("test", b"data").is_ok());
assert!(cache.retrieve("test").unwrap().is_none());
}
}
| rust | MIT | 3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb | 2026-01-04T15:39:11.175160Z | false |
jdx/mise | https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/crates/vfox/build.rs | crates/vfox/build.rs | use std::collections::BTreeMap;
use std::env;
use std::fs;
use std::path::Path;
fn main() {
codegen_embedded_plugins();
}
/// Convert a path to a string with forward slashes (required for include_str! on Windows)
fn path_to_forward_slashes(path: &Path) -> String {
path.to_string_lossy().replace('\\', "/")
}
fn codegen_embedded_plugins() {
let out_dir = env::var_os("OUT_DIR").unwrap();
let dest_path = Path::new(&out_dir).join("embedded_plugins.rs");
let manifest_dir = env::var("CARGO_MANIFEST_DIR").unwrap();
let embedded_dir = Path::new(&manifest_dir).join("embedded-plugins");
// Tell Cargo to re-run if any embedded plugin files change
println!("cargo:rerun-if-changed=embedded-plugins");
if !embedded_dir.exists() {
// Generate empty implementation if no embedded plugins
let code = r#"
#[derive(Debug)]
pub struct EmbeddedPlugin {
pub metadata: &'static str,
pub hooks: &'static [(&'static str, &'static str)],
pub lib: &'static [(&'static str, &'static str)],
}
pub fn get_embedded_plugin(_name: &str) -> Option<&'static EmbeddedPlugin> {
None
}
pub fn list_embedded_plugins() -> &'static [&'static str] {
&[]
}
"#;
fs::write(&dest_path, code).unwrap();
return;
}
let mut plugins: BTreeMap<String, PluginFiles> = BTreeMap::new();
// Scan for plugin directories
for entry in fs::read_dir(&embedded_dir).unwrap() {
let entry = entry.unwrap();
let path = entry.path();
if !path.is_dir() {
continue;
}
let dir_name = path.file_name().unwrap().to_string_lossy().to_string();
if !dir_name.starts_with("vfox-") {
continue;
}
// Tell Cargo to re-run if this plugin directory or any Lua files change
println!("cargo:rerun-if-changed={}", path.display());
// Also track subdirectories and individual Lua files
let hooks_dir = path.join("hooks");
if hooks_dir.exists() {
println!("cargo:rerun-if-changed={}", hooks_dir.display());
for entry in fs::read_dir(&hooks_dir).unwrap().flatten() {
if entry.path().extension().is_some_and(|ext| ext == "lua") {
println!("cargo:rerun-if-changed={}", entry.path().display());
}
}
}
let lib_dir = path.join("lib");
if lib_dir.exists() {
println!("cargo:rerun-if-changed={}", lib_dir.display());
for entry in fs::read_dir(&lib_dir).unwrap().flatten() {
if entry.path().extension().is_some_and(|ext| ext == "lua") {
println!("cargo:rerun-if-changed={}", entry.path().display());
}
}
}
let metadata_file = path.join("metadata.lua");
if metadata_file.exists() {
println!("cargo:rerun-if-changed={}", metadata_file.display());
}
let plugin = collect_plugin_files(&path);
plugins.insert(dir_name, plugin);
}
// Generate Rust code
let mut code = String::new();
// Struct definition
code.push_str(
r#"
#[derive(Debug)]
pub struct EmbeddedPlugin {
pub metadata: &'static str,
pub hooks: &'static [(&'static str, &'static str)],
pub lib: &'static [(&'static str, &'static str)],
}
"#,
);
// Generate static instances for each plugin
for (name, files) in &plugins {
let var_name = name.replace('-', "_").to_uppercase();
code.push_str(&format!(
"static {var_name}: EmbeddedPlugin = EmbeddedPlugin {{\n"
));
// Metadata - use absolute path with forward slashes for cross-platform include_str!
let metadata_path = embedded_dir.join(name).join("metadata.lua");
code.push_str(&format!(
" metadata: include_str!(\"{}\"),\n",
path_to_forward_slashes(&metadata_path)
));
// Hooks
code.push_str(" hooks: &[\n");
for hook in &files.hooks {
let hook_path = embedded_dir
.join(name)
.join("hooks")
.join(format!("{}.lua", hook));
code.push_str(&format!(
" (\"{}\", include_str!(\"{}\")),\n",
hook,
path_to_forward_slashes(&hook_path)
));
}
code.push_str(" ],\n");
// Lib files
code.push_str(" lib: &[\n");
for lib in &files.lib {
let lib_path = embedded_dir
.join(name)
.join("lib")
.join(format!("{}.lua", lib));
code.push_str(&format!(
" (\"{}\", include_str!(\"{}\")),\n",
lib,
path_to_forward_slashes(&lib_path)
));
}
code.push_str(" ],\n");
code.push_str("};\n\n");
}
// Generate lookup function
code.push_str("pub fn get_embedded_plugin(name: &str) -> Option<&'static EmbeddedPlugin> {\n");
code.push_str(" match name {\n");
for name in plugins.keys() {
let var_name = name.replace('-', "_").to_uppercase();
let short_name = name.strip_prefix("vfox-").unwrap_or(name);
code.push_str(&format!(
" \"{}\" | \"{}\" => Some(&{}),\n",
name, short_name, var_name
));
}
code.push_str(" _ => None,\n");
code.push_str(" }\n");
code.push_str("}\n\n");
// Generate list function
code.push_str("pub fn list_embedded_plugins() -> &'static [&'static str] {\n");
code.push_str(" &[\n");
for name in plugins.keys() {
code.push_str(&format!(" \"{}\",\n", name));
}
code.push_str(" ]\n");
code.push_str("}\n");
fs::write(&dest_path, code).unwrap();
}
struct PluginFiles {
hooks: Vec<String>,
lib: Vec<String>,
}
fn collect_plugin_files(plugin_dir: &Path) -> PluginFiles {
let mut hooks = Vec::new();
let mut lib = Vec::new();
// Collect hooks
let hooks_dir = plugin_dir.join("hooks");
if hooks_dir.exists() {
for entry in fs::read_dir(&hooks_dir).unwrap() {
let entry = entry.unwrap();
let path = entry.path();
if path.extension().is_some_and(|ext| ext == "lua") {
let name = path.file_stem().unwrap().to_string_lossy().to_string();
hooks.push(name);
}
}
}
hooks.sort();
// Collect lib files
let lib_dir = plugin_dir.join("lib");
if lib_dir.exists() {
for entry in fs::read_dir(&lib_dir).unwrap() {
let entry = entry.unwrap();
let path = entry.path();
if path.extension().is_some_and(|ext| ext == "lua") {
let name = path.file_stem().unwrap().to_string_lossy().to_string();
lib.push(name);
}
}
}
lib.sort();
PluginFiles { hooks, lib }
}
| rust | MIT | 3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb | 2026-01-04T15:39:11.175160Z | false |
jdx/mise | https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/crates/vfox/src/vfox.rs | crates/vfox/src/vfox.rs | use itertools::Itertools;
use reqwest::Url;
use std::collections::BTreeMap;
use std::path::{Path, PathBuf};
use std::str::FromStr;
use std::sync::mpsc;
use tempfile::TempDir;
use xx::file;
use crate::error::Result;
use crate::hooks::available::AvailableVersion;
use crate::hooks::backend_exec_env::BackendExecEnvContext;
use crate::hooks::backend_install::BackendInstallContext;
use crate::hooks::backend_list_versions::BackendListVersionsContext;
use crate::hooks::env_keys::{EnvKey, EnvKeysContext};
use crate::hooks::mise_env::MiseEnvContext;
use crate::hooks::mise_path::MisePathContext;
use crate::hooks::parse_legacy_file::ParseLegacyFileResponse;
use crate::hooks::post_install::PostInstallContext;
use crate::hooks::pre_install::PreInstall;
use crate::http::CLIENT;
use crate::metadata::Metadata;
use crate::plugin::Plugin;
use crate::registry;
use crate::sdk_info::SdkInfo;
#[derive(Debug)]
pub struct Vfox {
pub runtime_version: String,
pub install_dir: PathBuf,
pub plugin_dir: PathBuf,
pub cache_dir: PathBuf,
pub download_dir: PathBuf,
log_tx: Option<mpsc::Sender<String>>,
}
impl Vfox {
pub fn new() -> Self {
Self::default()
}
pub fn log_subscribe(&mut self) -> mpsc::Receiver<String> {
let (tx, rx) = mpsc::channel();
self.log_tx = Some(tx);
rx
}
fn log_emit(&self, msg: String) {
if let Some(tx) = &self.log_tx {
let _ = tx.send(msg);
}
}
pub fn list_available_sdks() -> &'static BTreeMap<String, Url> {
registry::list_sdks()
}
pub async fn list_available_versions(&self, sdk: &str) -> Result<Vec<AvailableVersion>> {
let sdk = self.get_sdk(sdk)?;
sdk.available_async().await
}
pub fn list_installed_versions(&self, sdk: &str) -> Result<Vec<SdkInfo>> {
let path = self.install_dir.join(sdk);
if !path.exists() {
return Ok(Default::default());
}
let sdk = self.get_sdk(sdk)?;
let versions = xx::file::ls(&path)?;
versions
.into_iter()
.filter_map(|p| {
p.file_name()
.and_then(|f| f.to_str())
.map(|s| s.to_string())
})
.sorted()
.map(|version| {
let path = path.join(&version);
sdk.sdk_info(version, path)
})
.collect::<Result<_>>()
}
pub fn list_sdks(&self) -> Result<Vec<Plugin>> {
if !self.plugin_dir.exists() {
return Ok(Default::default());
}
let plugins = xx::file::ls(&self.plugin_dir)?;
plugins
.into_iter()
.filter_map(|p| {
p.file_name()
.and_then(|f| f.to_str())
.map(|s| s.to_string())
})
.sorted()
.map(|name| self.get_sdk(&name))
.collect()
}
pub fn get_sdk(&self, name: &str) -> Result<Plugin> {
Plugin::from_name_or_dir(name, &self.plugin_dir.join(name))
}
pub fn install_plugin(&self, sdk: &str) -> Result<Plugin> {
// Check filesystem first - allows user to override embedded plugins
let plugin_dir = self.plugin_dir.join(sdk);
if plugin_dir.exists() {
return Plugin::from_dir(&plugin_dir);
}
// Fall back to embedded plugin if available
if let Some(embedded) = crate::embedded_plugins::get_embedded_plugin(sdk) {
return Plugin::from_embedded(sdk, embedded);
}
// Otherwise install from registry
let url = registry::sdk_url(sdk).ok_or_else(|| format!("Unknown SDK: {sdk}"))?;
self.install_plugin_from_url(url)
}
pub fn install_plugin_from_url(&self, url: &Url) -> Result<Plugin> {
let sdk = url
.path_segments()
.and_then(|mut s| {
let filename = s.next_back().unwrap();
filename
.strip_prefix("vfox-")
.map(|s| s.to_string())
.or_else(|| Some(filename.to_string()))
})
.ok_or("No filename in URL")?;
let plugin_dir = self.plugin_dir.join(&sdk);
if !plugin_dir.exists() {
debug!("Installing plugin {sdk}");
xx::git::clone(url.as_ref(), &plugin_dir, &Default::default())?;
}
Plugin::from_dir(&plugin_dir)
}
pub fn uninstall_plugin(&self, sdk: &str) -> Result<()> {
let plugin_dir = self.plugin_dir.join(sdk);
if plugin_dir.exists() {
file::remove_dir_all(&plugin_dir)?;
}
Ok(())
}
pub async fn install<ID: AsRef<Path>>(
&self,
sdk: &str,
version: &str,
install_dir: ID,
) -> Result<()> {
self.install_plugin(sdk)?;
let sdk = self.get_sdk(sdk)?;
let pre_install = sdk.pre_install(version).await?;
let install_dir = install_dir.as_ref();
trace!("{pre_install:?}");
if let Some(url) = pre_install.url.as_ref().map(|s| Url::from_str(s)) {
let file = self.download(&url?, &sdk, version).await?;
self.verify(&pre_install, &file).await?;
self.extract(&file, install_dir)?;
}
if sdk.get_metadata()?.hooks.contains("post_install") {
let sdk_info = sdk.sdk_info(version.to_string(), install_dir.to_path_buf())?;
sdk.post_install(PostInstallContext {
root_path: install_dir.to_path_buf(),
runtime_version: self.runtime_version.clone(),
sdk_info: BTreeMap::from([(sdk_info.name.clone(), sdk_info)]),
})
.await?;
}
Ok(())
}
pub fn uninstall(&self, sdk: &str, version: &str) -> Result<()> {
let path = self.install_dir.join(sdk).join(version);
file::remove_dir_all(&path)?;
Ok(())
}
pub async fn pre_install_for_platform(
&self,
sdk: &str,
version: &str,
os: &str,
arch: &str,
) -> Result<PreInstall> {
let sdk = self.get_sdk(sdk)?;
sdk.pre_install_for_platform(version, os, arch).await
}
pub async fn metadata(&self, sdk: &str) -> Result<Metadata> {
self.get_sdk(sdk)?.get_metadata()
}
pub async fn env_keys<T: serde::Serialize>(
&self,
sdk: &str,
version: &str,
options: T,
) -> Result<Vec<EnvKey>> {
debug!("Getting env keys for {sdk} version {version}");
let sdk = self.get_sdk(sdk)?;
let sdk_info = sdk.sdk_info(
version.to_string(),
self.install_dir.join(&sdk.name).join(version),
)?;
let ctx = EnvKeysContext {
args: vec![],
version: version.to_string(),
path: sdk_info.path.clone(),
sdk_info: BTreeMap::from([(sdk_info.name.clone(), sdk_info.clone())]),
main: sdk_info,
options,
};
sdk.env_keys(ctx).await
}
pub async fn mise_env<T: serde::Serialize>(&self, sdk: &str, opts: T) -> Result<Vec<EnvKey>> {
let plugin = self.get_sdk(sdk)?;
if !plugin.get_metadata()?.hooks.contains("mise_env") {
return Ok(vec![]);
}
let ctx = MiseEnvContext {
args: vec![],
options: opts,
};
plugin.mise_env(ctx).await
}
pub async fn backend_list_versions(&self, sdk: &str, tool: &str) -> Result<Vec<String>> {
let plugin = self.get_sdk(sdk)?;
let ctx = BackendListVersionsContext {
tool: tool.to_string(),
};
plugin.backend_list_versions(ctx).await.map(|r| r.versions)
}
pub async fn backend_install(
&self,
sdk: &str,
tool: &str,
version: &str,
install_path: PathBuf,
) -> Result<()> {
let plugin = self.get_sdk(sdk)?;
let ctx = BackendInstallContext {
tool: tool.to_string(),
version: version.to_string(),
install_path,
};
plugin.backend_install(ctx).await?;
Ok(())
}
pub async fn backend_exec_env(
&self,
sdk: &str,
tool: &str,
version: &str,
install_path: PathBuf,
) -> Result<Vec<EnvKey>> {
let plugin = self.get_sdk(sdk)?;
let ctx = BackendExecEnvContext {
tool: tool.to_string(),
version: version.to_string(),
install_path,
};
plugin.backend_exec_env(ctx).await.map(|r| r.env_vars)
}
pub async fn mise_path<T: serde::Serialize>(&self, sdk: &str, opts: T) -> Result<Vec<String>> {
let plugin = self.get_sdk(sdk)?;
if !plugin.get_metadata()?.hooks.contains("mise_path") {
return Ok(vec![]);
}
let ctx = MisePathContext {
args: vec![],
options: opts,
};
plugin.mise_path(ctx).await
}
pub async fn parse_legacy_file(
&self,
sdk: &str,
file: &Path,
) -> Result<ParseLegacyFileResponse> {
let sdk = self.get_sdk(sdk)?;
sdk.parse_legacy_file(file).await
}
async fn download(&self, url: &Url, sdk: &Plugin, version: &str) -> Result<PathBuf> {
self.log_emit(format!("Downloading {url}"));
let filename = url
.path_segments()
.and_then(|mut s| s.next_back())
.ok_or("No filename in URL")?;
let path = self
.download_dir
.join(format!("{sdk}-{version}"))
.join(filename);
let resp = CLIENT.get(url.clone()).send().await?;
resp.error_for_status_ref()?;
file::mkdirp(path.parent().unwrap())?;
let mut file = tokio::fs::File::create(&path).await?;
let bytes = resp.bytes().await?;
tokio::io::AsyncWriteExt::write_all(&mut file, &bytes).await?;
file.sync_all().await?;
Ok(path)
}
async fn verify(&self, pre_install: &PreInstall, file: &Path) -> Result<()> {
self.log_emit(format!("Verifying {file:?} checksum"));
if let Some(sha256) = &pre_install.sha256 {
xx::hash::ensure_checksum_sha256(file, sha256)?;
}
if let Some(sha512) = &pre_install.sha512 {
xx::hash::ensure_checksum_sha512(file, sha512)?;
}
if let Some(_sha1) = &pre_install.sha1 {
unimplemented!("sha1")
}
if let Some(_md5) = &pre_install.md5 {
unimplemented!("md5")
}
if let Some(attestation) = &pre_install.attestation {
self.log_emit(format!("Verify {file:?} attestation"));
if let Some(owner) = &attestation.github_owner
&& let Some(repo) = &attestation.github_repo
{
let token = std::env::var("MISE_GITHUB_TOKEN")
.or_else(|_| std::env::var("GITHUB_TOKEN"))
.or(Err("GitHub attestation verification requires either the MISE_GITHUB_TOKEN or GITHUB_TOKEN environment variable set"))?;
sigstore_verification::verify_github_attestation(
file,
owner.as_str(),
repo.as_str(),
Some(token.as_str()),
attestation.github_signer_workflow.as_deref(),
)
.await?;
}
if let Some(sig_or_bundle_path) = &attestation.cosign_sig_or_bundle_path {
if let Some(public_key_path) = &attestation.cosign_public_key_path {
sigstore_verification::verify_cosign_signature_with_key(
file,
sig_or_bundle_path,
public_key_path,
)
.await?;
} else {
sigstore_verification::verify_cosign_signature(file, sig_or_bundle_path)
.await?;
}
}
if let Some(provenance_path) = &attestation.slsa_provenance_path {
let min_level = attestation.slsa_min_level.unwrap_or(1u8);
sigstore_verification::verify_slsa_provenance(file, provenance_path, min_level)
.await?;
}
}
Ok(())
}
fn extract(&self, file: &Path, install_dir: &Path) -> Result<()> {
self.log_emit(format!("Extracting {file:?} to {install_dir:?}"));
let filename = file.file_name().unwrap().to_string_lossy().to_string();
let parent = install_dir.parent().unwrap();
file::mkdirp(parent)?;
let tmp = TempDir::with_prefix_in(&filename, parent)?;
file::remove_dir_all(install_dir)?;
let move_to_install = || {
let subdirs = file::ls(tmp.path())?;
if subdirs.len() == 1 && subdirs.first().unwrap().is_dir() {
let subdir = subdirs.first().unwrap();
file::mv(subdir, install_dir)?;
} else {
file::mv(tmp.path(), install_dir)?;
}
Result::Ok(())
};
if filename.ends_with(".tar.gz") || filename.ends_with(".tgz") {
xx::archive::untar_gz(file, tmp.path())?;
move_to_install()?;
} else if filename.ends_with(".tar.xz") || filename.ends_with(".txz") {
xx::archive::untar_xz(file, tmp.path())?;
move_to_install()?;
} else if filename.ends_with(".tar.bz2")
|| filename.ends_with(".tbz2")
|| filename.ends_with(".tbz")
{
xx::archive::untar_bz2(file, tmp.path())?;
move_to_install()?;
} else if filename.ends_with(".zip") {
xx::archive::unzip(file, tmp.path())?;
move_to_install()?;
} else {
file::mv(file, install_dir.join(&filename))?;
#[cfg(unix)]
file::make_executable(install_dir.join(&filename))?;
}
Ok(())
}
}
impl Default for Vfox {
fn default() -> Self {
Self {
runtime_version: "1.0.0".to_string(),
plugin_dir: home().join(".version-fox/plugin"),
cache_dir: home().join(".version-fox/cache"),
download_dir: home().join(".version-fox/downloads"),
install_dir: home().join(".version-fox/installs"),
log_tx: None,
}
}
}
fn home() -> PathBuf {
homedir::my_home()
.ok()
.flatten()
.unwrap_or_else(|| PathBuf::from("/"))
}
#[cfg(test)]
mod tests {
use super::*;
impl Vfox {
pub fn test() -> Self {
Self {
runtime_version: "1.0.0".to_string(),
plugin_dir: PathBuf::from("plugins"),
cache_dir: PathBuf::from("test/cache"),
download_dir: PathBuf::from("test/downloads"),
install_dir: PathBuf::from("test/installs"),
log_tx: None,
}
}
}
#[tokio::test]
async fn test_env_keys() {
let vfox = Vfox::test();
// dummy plugin already exists in plugins/dummy, no need to install
let keys = vfox
.env_keys(
"dummy",
"1.0.0",
serde_json::Value::Object(Default::default()),
)
.await
.unwrap();
let output = format!("{keys:?}").replace(
&vfox.install_dir.to_string_lossy().to_string(),
"<INSTALL_DIR>",
);
assert_snapshot!(output);
}
#[tokio::test]
async fn test_install_plugin() {
let vfox = Vfox::test();
// dummy plugin already exists in plugins/dummy, just verify it's there
assert!(vfox.plugin_dir.join("dummy").exists());
let plugin = Plugin::from_dir(&vfox.plugin_dir.join("dummy")).unwrap();
assert_eq!(plugin.name, "dummy");
}
#[tokio::test]
async fn test_install() {
let vfox = Vfox::test();
let install_dir = vfox.install_dir.join("dummy").join("1.0.0");
// dummy plugin already exists in plugins/dummy
vfox.install("dummy", "1.0.0", &install_dir).await.unwrap();
// dummy plugin doesn't actually install binaries, so we just check the directory
assert!(vfox.install_dir.join("dummy").join("1.0.0").exists());
vfox.uninstall("dummy", "1.0.0").unwrap();
assert!(!vfox.install_dir.join("dummy").join("1.0.0").exists());
file::remove_dir_all(vfox.install_dir).unwrap();
file::remove_dir_all(vfox.download_dir).unwrap();
}
#[tokio::test]
#[ignore] // disable for now
async fn test_install_cmake() {
let vfox = Vfox::test();
vfox.install_plugin("cmake").unwrap();
let install_dir = vfox.install_dir.join("cmake").join("3.21.0");
vfox.install("cmake", "3.21.0", &install_dir).await.unwrap();
if cfg!(target_os = "linux") {
assert!(
vfox.install_dir
.join("cmake")
.join("3.21.0")
.join("bin")
.join("cmake")
.exists()
);
} else if cfg!(target_os = "macos") {
assert!(
vfox.install_dir
.join("cmake")
.join("3.21.0")
.join("CMake.app")
.join("Contents")
.join("bin")
.join("cmake")
.exists()
);
} else if cfg!(target_os = "windows") {
assert!(
vfox.install_dir
.join("cmake")
.join("3.21.0")
.join("bin")
.join("cmake.exe")
.exists()
);
}
vfox.uninstall_plugin("cmake").unwrap();
assert!(!vfox.plugin_dir.join("cmake").exists());
vfox.uninstall("cmake", "3.21.0").unwrap();
assert!(!vfox.install_dir.join("cmake").join("3.21.0").exists());
file::remove_dir_all(vfox.plugin_dir.join("cmake")).unwrap();
file::remove_dir_all(vfox.install_dir).unwrap();
file::remove_dir_all(vfox.download_dir).unwrap();
}
#[tokio::test]
async fn test_metadata() {
let vfox = Vfox::test();
// dummy plugin already exists in plugins/dummy
let metadata = vfox.metadata("dummy").await.unwrap();
let out = format!("{metadata:?}");
assert_snapshot!(out);
}
}
| rust | MIT | 3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb | 2026-01-04T15:39:11.175160Z | false |
jdx/mise | https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/crates/vfox/src/config.rs | crates/vfox/src/config.rs | use std::env::consts::{ARCH, OS};
use std::path::PathBuf;
use std::sync::{Mutex, MutexGuard};
#[derive(Debug, Clone)]
pub struct Config {
pub plugin_dir: PathBuf,
}
static CONFIG: Mutex<Option<Config>> = Mutex::new(None);
impl Config {
pub fn get() -> Self {
Self::_get().as_ref().unwrap().clone()
}
fn _get() -> MutexGuard<'static, Option<Config>> {
let mut config = CONFIG.lock().unwrap();
if config.is_none() {
let home = homedir::my_home()
.ok()
.flatten()
.unwrap_or_else(|| PathBuf::from("/"));
*config = Some(Config {
plugin_dir: home.join(".version-fox/plugin"),
});
}
config
}
}
pub fn os() -> String {
match OS {
"macos" => "darwin".to_string(),
os => os.to_string(),
}
}
pub fn arch() -> String {
match ARCH {
"aarch64" => "arm64".to_string(),
"x86_64" => "amd64".to_string(),
arch => arch.to_string(),
}
}
| rust | MIT | 3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb | 2026-01-04T15:39:11.175160Z | false |
jdx/mise | https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/crates/vfox/src/embedded_plugins.rs | crates/vfox/src/embedded_plugins.rs | // This module provides access to embedded vfox plugin Lua code.
// The actual code is generated at build time by build.rs
include!(concat!(env!("OUT_DIR"), "/embedded_plugins.rs"));
| rust | MIT | 3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb | 2026-01-04T15:39:11.175160Z | false |
jdx/mise | https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/crates/vfox/src/sdk_info.rs | crates/vfox/src/sdk_info.rs | use mlua::{IntoLua, Lua, Value};
use std::path::PathBuf;
#[derive(Debug, Clone)]
pub struct SdkInfo {
pub name: String,
pub version: String,
pub path: PathBuf,
}
impl SdkInfo {
pub fn new(name: String, version: String, path: PathBuf) -> Self {
Self {
name,
version,
path,
}
}
}
impl IntoLua for SdkInfo {
fn into_lua(self, lua: &Lua) -> mlua::Result<Value> {
let table = lua.create_table()?;
table.set("name", self.name)?;
table.set("version", self.version)?;
table.set("path", self.path.to_string_lossy().to_string())?;
Ok(Value::Table(table))
}
}
| rust | MIT | 3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb | 2026-01-04T15:39:11.175160Z | false |
jdx/mise | https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/crates/vfox/src/lib.rs | crates/vfox/src/lib.rs | #[cfg(test)]
#[macro_use]
extern crate insta;
#[macro_use]
extern crate log;
#[macro_use]
extern crate mlua;
pub use error::Result as VfoxResult;
pub use error::VfoxError;
pub use plugin::Plugin;
pub use vfox::Vfox;
mod config;
mod context;
pub mod embedded_plugins;
mod error;
mod hooks;
mod http;
mod lua_mod;
mod metadata;
mod plugin;
mod registry;
mod runtime;
mod sdk_info;
mod vfox;
| rust | MIT | 3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb | 2026-01-04T15:39:11.175160Z | false |
jdx/mise | https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/crates/vfox/src/http.rs | crates/vfox/src/http.rs | use reqwest::{Client, ClientBuilder};
use std::sync::LazyLock;
pub static CLIENT: LazyLock<Client> = LazyLock::new(|| {
ClientBuilder::new()
.user_agent(format!("vfox.rs/{}", env!("CARGO_PKG_VERSION")))
.build()
.expect("Failed to create reqwest client")
});
| rust | MIT | 3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb | 2026-01-04T15:39:11.175160Z | false |
jdx/mise | https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/crates/vfox/src/error.rs | crates/vfox/src/error.rs | use mlua::Error as MLuaError;
use thiserror::Error;
use xx::XXError;
#[derive(Error, Debug)]
#[non_exhaustive]
pub enum VfoxError {
#[error("{0}")]
Error(String),
#[error(transparent)]
LuaError(#[from] MLuaError),
#[error("serde_json")]
SerdeJsonError(#[from] serde_json::Error),
#[error(transparent)]
XXError(#[from] XXError),
#[error(transparent)]
ReqwestError(#[from] reqwest::Error),
#[error(transparent)]
IoError(#[from] std::io::Error),
#[error(transparent)]
UrlParseError(#[from] url::ParseError),
#[error(transparent)]
AttestationError(#[from] sigstore_verification::AttestationError),
}
pub type Result<T> = std::result::Result<T, VfoxError>;
impl From<String> for VfoxError {
fn from(s: String) -> Self {
VfoxError::Error(s)
}
}
impl From<&str> for VfoxError {
fn from(s: &str) -> Self {
VfoxError::Error(s.to_string())
}
}
#[macro_export]
macro_rules! error {
($($arg:tt)*) => {
return Err(VfoxError::Error(format!($($arg)*)));
};
}
| rust | MIT | 3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb | 2026-01-04T15:39:11.175160Z | false |
jdx/mise | https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/crates/vfox/src/runtime.rs | crates/vfox/src/runtime.rs | use crate::config::{arch, os};
use mlua::{UserData, UserDataFields};
use once_cell::sync::Lazy;
use std::path::PathBuf;
use std::sync::Mutex;
#[derive(Debug, Clone)]
pub(crate) struct Runtime {
pub(crate) os: String,
pub(crate) arch: String,
pub(crate) version: String,
pub(crate) plugin_dir_path: PathBuf,
}
static RUNTIME: Lazy<Mutex<Runtime>> = Lazy::new(|| {
Mutex::new(Runtime {
os: os(),
arch: arch(),
version: "0.6.0".to_string(), // https://github.com/version-fox/vfox/releases
plugin_dir_path: PathBuf::new(),
})
});
impl Runtime {
pub(crate) fn get(plugin_dir_path: PathBuf) -> Runtime {
let mut runtime = RUNTIME.lock().unwrap().clone();
runtime.plugin_dir_path = plugin_dir_path;
runtime
}
#[cfg(test)]
pub(crate) fn set_os(os: String) {
let mut runtime = RUNTIME.lock().unwrap();
runtime.os = os;
}
#[cfg(test)]
pub(crate) fn set_arch(arch: String) {
let mut runtime = RUNTIME.lock().unwrap();
runtime.arch = arch;
}
#[cfg(test)]
pub(crate) fn reset() {
let mut runtime = RUNTIME.lock().unwrap();
runtime.os = os();
runtime.arch = arch();
}
}
impl UserData for Runtime {
fn add_fields<F: UserDataFields<Self>>(fields: &mut F) {
fields.add_field_method_get("osType", |_, t| Ok(t.os.clone()));
fields.add_field_method_get("archType", |_, t| Ok(t.arch.clone()));
fields.add_field_method_get("version", |_, t| Ok(t.version.clone()));
fields.add_field_method_get("pluginDirPath", |_, t| Ok(t.plugin_dir_path.clone()));
}
}
| rust | MIT | 3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb | 2026-01-04T15:39:11.175160Z | false |
jdx/mise | https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/crates/vfox/src/bin.rs | crates/vfox/src/bin.rs | #[cfg(feature = "cli")]
#[macro_use]
extern crate log;
#[cfg(feature = "cli")]
mod cli;
#[allow(clippy::needless_return)]
#[cfg(feature = "cli")]
#[tokio::main]
async fn main() {
env_logger::init_from_env(env_logger::Env::default().filter_or("VFOX_LOG", "info"));
if let Err(err) = cli::run().await {
error!("{err}");
std::process::exit(1);
}
}
#[cfg(not(feature = "cli"))]
fn main() {
panic!("cli feature is not enabled");
}
| rust | MIT | 3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb | 2026-01-04T15:39:11.175160Z | false |
jdx/mise | https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/crates/vfox/src/registry.rs | crates/vfox/src/registry.rs | use std::collections::BTreeMap;
use std::str::FromStr;
use once_cell::sync::Lazy;
use url::Url;
static SDKS: Lazy<BTreeMap<String, Url>> = Lazy::new(|| {
[
("nodejs", "https://github.com/version-fox/vfox-nodejs"),
("cmake", "https://github.com/version-fox/vfox-cmake"),
]
.iter()
.map(|(name, url)| (name.to_string(), Url::from_str(url).unwrap()))
.collect()
});
pub fn sdk_url(name: &str) -> Option<&Url> {
SDKS.get(name)
}
pub fn list_sdks() -> &'static BTreeMap<String, Url> {
&SDKS
}
| rust | MIT | 3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb | 2026-01-04T15:39:11.175160Z | false |
jdx/mise | https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/crates/vfox/src/context.rs | crates/vfox/src/context.rs | use mlua::{UserData, UserDataFields};
#[derive(Debug)]
pub(crate) struct Context {
pub args: Vec<String>,
pub(crate) version: Option<String>,
// pub(crate) runtime_version: String,
}
impl UserData for Context {
fn add_fields<F: UserDataFields<Self>>(fields: &mut F) {
fields.add_field_method_get("args", |_, t| Ok(t.args.clone()));
fields.add_field_method_get("version", |_, t| Ok(t.version.clone()));
// fields.add_field_method_get("runtimeVersion", |_, t| Ok(t.runtime_version.clone()));
}
}
| rust | MIT | 3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb | 2026-01-04T15:39:11.175160Z | false |
jdx/mise | https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/crates/vfox/src/metadata.rs | crates/vfox/src/metadata.rs | use mlua::Table;
use std::collections::BTreeSet;
use crate::error::Result;
use crate::error::VfoxError;
#[derive(Debug, Clone)]
pub struct Metadata {
pub name: String,
pub legacy_filenames: Vec<String>,
pub version: String,
pub description: Option<String>,
pub author: Option<String>,
pub license: Option<String>,
pub homepage: Option<String>,
pub hooks: BTreeSet<&'static str>,
}
impl TryFrom<Table> for Metadata {
type Error = VfoxError;
fn try_from(t: Table) -> Result<Self> {
let legacy_filenames = t
.get::<Option<Vec<String>>>("legacyFilenames")?
.unwrap_or_default();
Ok(Metadata {
name: t.get("name")?,
legacy_filenames,
version: t.get("version")?,
description: t.get("description")?,
author: t.get("author")?,
license: t.get("license")?,
homepage: t.get("homepage")?,
hooks: Default::default(),
})
}
}
| rust | MIT | 3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb | 2026-01-04T15:39:11.175160Z | false |
jdx/mise | https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/crates/vfox/src/plugin.rs | crates/vfox/src/plugin.rs | use std::cmp::Ordering;
use std::fmt::Display;
use std::path::{Path, PathBuf};
use mlua::{AsChunk, FromLuaMulti, IntoLua, Lua, Table, Value};
use once_cell::sync::OnceCell;
use crate::config::Config;
use crate::context::Context;
use crate::embedded_plugins::{self, EmbeddedPlugin};
use crate::error::Result;
use crate::metadata::Metadata;
use crate::runtime::Runtime;
use crate::sdk_info::SdkInfo;
use crate::{VfoxError, config, error, lua_mod};
#[derive(Debug)]
pub enum PluginSource {
Filesystem(PathBuf),
Embedded(&'static EmbeddedPlugin),
}
#[derive(Debug)]
pub struct Plugin {
pub name: String,
pub dir: PathBuf,
source: PluginSource,
lua: Lua,
metadata: OnceCell<Metadata>,
}
impl Plugin {
pub fn from_dir(dir: &Path) -> Result<Self> {
if !dir.exists() {
error!("Plugin directory not found: {:?}", dir);
}
let lua = Lua::new();
lua.set_named_registry_value("plugin_dir", dir.to_path_buf())?;
Ok(Self {
name: dir.file_name().unwrap().to_string_lossy().to_string(),
dir: dir.to_path_buf(),
source: PluginSource::Filesystem(dir.to_path_buf()),
lua,
metadata: OnceCell::new(),
})
}
pub fn from_embedded(name: &str, embedded: &'static EmbeddedPlugin) -> Result<Self> {
let lua = Lua::new();
// Use a dummy path for embedded plugins
let dummy_dir = PathBuf::from(format!("embedded:{}", name));
lua.set_named_registry_value("plugin_dir", dummy_dir.clone())?;
lua.set_named_registry_value("embedded_plugin", true)?;
Ok(Self {
name: name.to_string(),
dir: dummy_dir,
source: PluginSource::Embedded(embedded),
lua,
metadata: OnceCell::new(),
})
}
pub fn from_name(name: &str) -> Result<Self> {
// Check filesystem first - allows user to override embedded plugins
let dir = Config::get().plugin_dir.join(name);
if dir.exists() {
return Self::from_dir(&dir);
}
// Fall back to embedded plugin if available
if let Some(embedded) = embedded_plugins::get_embedded_plugin(name) {
return Self::from_embedded(name, embedded);
}
Self::from_dir(&dir)
}
pub fn from_name_or_dir(name: &str, dir: &Path) -> Result<Self> {
// Check filesystem first - allows user to override embedded plugins
if dir.exists() {
return Self::from_dir(dir);
}
// Fall back to embedded plugin if available
if let Some(embedded) = embedded_plugins::get_embedded_plugin(name) {
return Self::from_embedded(name, embedded);
}
Self::from_dir(dir)
}
pub fn is_embedded(&self) -> bool {
matches!(self.source, PluginSource::Embedded(_))
}
pub fn list() -> Result<Vec<String>> {
let config = Config::get();
if !config.plugin_dir.exists() {
return Ok(vec![]);
}
let plugins = xx::file::ls(&config.plugin_dir)?;
let plugins = plugins
.iter()
.filter_map(|p| {
p.file_name()
.and_then(|f| f.to_str())
.map(|s| s.to_string())
})
.collect();
Ok(plugins)
}
pub fn get_metadata(&self) -> Result<Metadata> {
Ok(self.load()?.clone())
}
pub fn sdk_info(&self, version: String, install_dir: PathBuf) -> Result<SdkInfo> {
Ok(SdkInfo::new(
self.get_metadata()?.name.clone(),
version,
install_dir,
))
}
#[cfg(test)]
pub(crate) fn test(name: &str) -> Self {
let dir = PathBuf::from("plugins").join(name);
Self::from_dir(&dir).unwrap()
}
pub(crate) fn context(&self, version: Option<String>) -> Result<Context> {
let ctx = Context {
args: vec![],
version,
// version: "1.0.0".to_string(),
// runtime_version: "xxx".to_string(),
};
Ok(ctx)
}
pub(crate) async fn exec_async(&self, chunk: impl AsChunk) -> Result<()> {
self.load()?;
let chunk = self.lua.load(chunk);
chunk.exec_async().await?;
Ok(())
}
pub(crate) async fn eval_async<R>(&self, chunk: impl AsChunk) -> Result<R>
where
R: FromLuaMulti,
{
self.load()?;
let chunk = self.lua.load(chunk);
let result = chunk.eval_async().await?;
Ok(result)
}
// Backend plugin methods
fn load(&self) -> Result<&Metadata> {
self.metadata.get_or_try_init(|| {
debug!("[vfox] Getting metadata for {self}");
// For filesystem plugins, set Lua package paths
if let PluginSource::Filesystem(dir) = &self.source {
set_paths(
&self.lua,
&[
dir.join("?.lua"),
dir.join("hooks/?.lua"),
dir.join("lib/?.lua"),
],
)?;
}
// Load standard Lua modules (http, json, etc.) FIRST
// These must be available before loading embedded lib files
lua_mod::archiver(&self.lua)?;
lua_mod::cmd(&self.lua)?;
lua_mod::file(&self.lua)?;
lua_mod::html(&self.lua)?;
lua_mod::http(&self.lua)?;
lua_mod::json(&self.lua)?;
lua_mod::strings(&self.lua)?;
lua_mod::env(&self.lua)?;
// For embedded plugins, load lib modules AFTER standard modules
// (lib files may require http, json, etc.)
if let PluginSource::Embedded(embedded) = &self.source {
self.load_embedded_libs(embedded)?;
}
let metadata = self.load_metadata()?;
self.set_global("PLUGIN", metadata.clone())?;
self.set_global("RUNTIME", Runtime::get(self.dir.clone()))?;
self.set_global("OS_TYPE", config::os())?;
self.set_global("ARCH_TYPE", config::arch())?;
let mut metadata: Metadata = metadata.try_into()?;
metadata.hooks = match &self.source {
PluginSource::Filesystem(dir) => lua_mod::hooks(&self.lua, dir)?,
PluginSource::Embedded(embedded) => lua_mod::hooks_embedded(&self.lua, embedded)?,
};
Ok(metadata)
})
}
fn load_embedded_libs(&self, embedded: &EmbeddedPlugin) -> Result<()> {
let package: Table = self.lua.globals().get("package")?;
let preload: Table = package.get("preload")?;
// Register lib modules in package.preload so require() works regardless of load order
// This allows lib files to require each other without alphabetical ordering issues
for (name, code) in embedded.lib {
let lua = self.lua.clone();
let code = *code;
let loader = lua.create_function(move |lua, _: ()| {
let module: Value = lua.load(code).eval()?;
Ok(module)
})?;
preload.set(*name, loader)?;
}
Ok(())
}
fn set_global<V>(&self, name: &str, value: V) -> Result<()>
where
V: IntoLua,
{
self.lua.globals().set(name, value)?;
Ok(())
}
fn load_metadata(&self) -> Result<Table> {
match &self.source {
PluginSource::Filesystem(_) => {
let metadata = self
.lua
.load(
r#"
require "metadata"
return PLUGIN
"#,
)
.eval()?;
Ok(metadata)
}
PluginSource::Embedded(embedded) => {
// Load metadata from embedded string
self.lua.load(embedded.metadata).exec()?;
let metadata = self.lua.globals().get("PLUGIN")?;
Ok(metadata)
}
}
}
}
fn get_package(lua: &Lua) -> Result<Table> {
let package = lua.globals().get::<Table>("package")?;
Ok(package)
}
fn set_paths(lua: &Lua, paths: &[PathBuf]) -> Result<()> {
let paths = paths
.iter()
.map(|p| p.to_string_lossy().to_string())
.collect::<Vec<String>>()
.join(";");
get_package(lua)?.set("path", paths)?;
Ok(())
}
impl Display for Plugin {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.name)
}
}
impl PartialEq<Self> for Plugin {
fn eq(&self, other: &Self) -> bool {
self.dir == other.dir
}
}
impl Eq for Plugin {}
impl PartialOrd for Plugin {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
}
}
impl Ord for Plugin {
fn cmp(&self, other: &Self) -> Ordering {
self.name.cmp(&other.name)
}
}
| rust | MIT | 3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb | 2026-01-04T15:39:11.175160Z | false |
jdx/mise | https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/crates/vfox/src/hooks/mise_env.rs | crates/vfox/src/hooks/mise_env.rs | use mlua::{IntoLua, Lua, LuaSerdeExt, Value};
use crate::Plugin;
use crate::error::Result;
use crate::hooks::env_keys::EnvKey;
#[derive(Debug)]
pub struct MiseEnvContext<T: serde::Serialize> {
pub args: Vec<String>,
pub options: T,
}
impl Plugin {
pub async fn mise_env<T: serde::Serialize>(
&self,
ctx: MiseEnvContext<T>,
) -> Result<Vec<EnvKey>> {
debug!("[vfox:{}] mise_env", &self.name);
let env_keys = self
.eval_async(chunk! {
require "hooks/mise_env"
return PLUGIN:MiseEnv($ctx)
})
.await?;
Ok(env_keys)
}
}
impl<T: serde::Serialize> IntoLua for MiseEnvContext<T> {
fn into_lua(self, lua: &Lua) -> mlua::Result<Value> {
let table = lua.create_table()?;
table.set("options", lua.to_value(&self.options)?)?;
Ok(Value::Table(table))
}
}
| rust | MIT | 3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb | 2026-01-04T15:39:11.175160Z | false |
jdx/mise | https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/crates/vfox/src/hooks/parse_legacy_file.rs | crates/vfox/src/hooks/parse_legacy_file.rs | use mlua::prelude::LuaError;
use mlua::{FromLua, IntoLua, Lua, MultiValue, Value};
use std::path::{Path, PathBuf};
use crate::Plugin;
use crate::error::Result;
#[derive(Debug)]
pub struct LegacyFileContext {
pub args: Vec<String>,
pub filepath: PathBuf,
}
#[derive(Debug)]
pub struct ParseLegacyFileResponse {
pub version: Option<String>,
}
impl Plugin {
pub async fn parse_legacy_file(&self, legacy_file: &Path) -> Result<ParseLegacyFileResponse> {
debug!("[vfox:{}] parse_legacy_file", &self.name);
let ctx = LegacyFileContext {
args: vec![],
filepath: legacy_file.to_path_buf(),
};
let legacy_file_response = self
.eval_async(chunk! {
require "hooks/available"
require "hooks/parse_legacy_file"
return PLUGIN:ParseLegacyFile($ctx)
})
.await?;
Ok(legacy_file_response)
}
}
impl IntoLua for LegacyFileContext {
fn into_lua(self, lua: &Lua) -> mlua::Result<Value> {
let table = lua.create_table()?;
table.set("args", self.args)?;
table.set(
"filename",
self.filepath
.file_name()
.ok_or(LuaError::RuntimeError(String::from(
"No basename for legacy file",
)))?
.to_os_string()
.into_string()
.or(Err(LuaError::RuntimeError(String::from(
"Could not convert basename to string",
))))?,
)?;
table.set("filepath", self.filepath.to_string_lossy().to_string())?;
table.set(
"getInstalledVersions",
lua.create_async_function(|lua, _input: MultiValue| async move {
let plugin_dir = lua.named_registry_value::<PathBuf>("plugin_dir")?;
Ok(Plugin::from_dir(plugin_dir.as_path())
.map_err(|e| LuaError::RuntimeError(e.to_string()))?
.available_async()
.await
.map_err(|e| LuaError::RuntimeError(e.to_string()))?
.into_iter()
.map(|v| v.version)
.collect::<Vec<String>>())
})?,
)?;
Ok(Value::Table(table))
}
}
impl FromLua for ParseLegacyFileResponse {
fn from_lua(value: Value, _: &Lua) -> std::result::Result<Self, LuaError> {
match value {
Value::Table(table) => Ok(ParseLegacyFileResponse {
version: table.get::<Option<String>>("version")?,
}),
_ => panic!("Expected table"),
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::Vfox;
#[tokio::test]
async fn test_parse_legacy_file_test_nodejs() {
let vfox = Vfox::test();
let response = vfox
.parse_legacy_file("test-nodejs", Path::new("test/data/.node-version"))
.await
.unwrap();
let out = format!("{response:?}");
assert_snapshot!(out, @r###"ParseLegacyFileResponse { version: Some("20.0.0") }"###);
}
#[tokio::test]
async fn test_parse_legacy_file_dummy() {
let vfox = Vfox::test();
let response = vfox
.parse_legacy_file("dummy", Path::new("test/data/.dummy-version"))
.await
.unwrap();
let out = format!("{response:?}");
assert_snapshot!(out, @r###"ParseLegacyFileResponse { version: Some("1.0.0") }"###);
}
}
| rust | MIT | 3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb | 2026-01-04T15:39:11.175160Z | false |
jdx/mise | https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/crates/vfox/src/hooks/backend_list_versions.rs | crates/vfox/src/hooks/backend_list_versions.rs | use crate::{Plugin, error::Result};
use mlua::{FromLua, IntoLua, Lua, Value, prelude::LuaError};
#[derive(Debug, Clone)]
pub struct BackendListVersionsContext {
pub tool: String,
}
#[derive(Debug, Clone)]
pub struct BackendListVersionsResponse {
pub versions: Vec<String>,
}
impl Plugin {
pub async fn backend_list_versions(
&self,
ctx: BackendListVersionsContext,
) -> Result<BackendListVersionsResponse> {
debug!("[vfox:{}] backend_list_versions", &self.name);
self.eval_async(chunk! {
require "hooks/backend_list_versions"
return PLUGIN:BackendListVersions($ctx)
})
.await
}
}
impl IntoLua for BackendListVersionsContext {
fn into_lua(self, lua: &mlua::Lua) -> mlua::Result<Value> {
let table = lua.create_table()?;
table.set("tool", self.tool)?;
Ok(Value::Table(table))
}
}
impl FromLua for BackendListVersionsResponse {
fn from_lua(value: Value, _: &Lua) -> std::result::Result<Self, LuaError> {
match value {
Value::Table(table) => Ok(BackendListVersionsResponse {
versions: table.get::<Vec<String>>("versions")?,
}),
_ => Err(LuaError::FromLuaConversionError {
from: value.type_name(),
to: "BackendListVersionsResponse".to_string(),
message: Some("Expected table".to_string()),
}),
}
}
}
| rust | MIT | 3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb | 2026-01-04T15:39:11.175160Z | false |
jdx/mise | https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/crates/vfox/src/hooks/env_keys.rs | crates/vfox/src/hooks/env_keys.rs | use mlua::prelude::LuaError;
use mlua::{FromLua, IntoLua, Lua, LuaSerdeExt, Value};
use std::collections::BTreeMap;
use std::path::PathBuf;
use crate::Plugin;
use crate::error::Result;
use crate::sdk_info::SdkInfo;
#[derive(Debug)]
pub struct EnvKey {
pub key: String,
pub value: String,
}
#[derive(Debug)]
pub struct EnvKeysContext<T: serde::Serialize> {
pub args: Vec<String>,
pub version: String,
pub path: PathBuf,
pub main: SdkInfo,
pub sdk_info: BTreeMap<String, SdkInfo>,
pub options: T,
}
impl Plugin {
pub async fn env_keys<T: serde::Serialize>(
&self,
ctx: EnvKeysContext<T>,
) -> Result<Vec<EnvKey>> {
debug!("[vfox:{}] env_keys", &self.name);
let env_keys = self
.eval_async(chunk! {
require "hooks/env_keys"
return PLUGIN:EnvKeys($ctx)
})
.await?;
Ok(env_keys)
}
}
impl<T: serde::Serialize> IntoLua for EnvKeysContext<T> {
fn into_lua(self, lua: &Lua) -> mlua::Result<Value> {
let table = lua.create_table()?;
table.set("version", self.version)?;
table.set("path", self.path.to_string_lossy().to_string())?;
table.set("sdkInfo", self.sdk_info)?;
table.set("main", self.main)?;
table.set("options", lua.to_value(&self.options)?)?;
Ok(Value::Table(table))
}
}
impl FromLua for EnvKey {
fn from_lua(value: Value, _: &Lua) -> std::result::Result<Self, LuaError> {
match value {
Value::Table(table) => Ok(EnvKey {
key: table.get::<String>("key")?,
value: table.get::<String>("value")?,
}),
_ => panic!("Expected table"),
}
}
}
| rust | MIT | 3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb | 2026-01-04T15:39:11.175160Z | false |
jdx/mise | https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/crates/vfox/src/hooks/available.rs | crates/vfox/src/hooks/available.rs | use mlua::prelude::LuaError;
use mlua::{FromLua, Lua, Value};
use crate::Plugin;
use crate::error::Result;
impl Plugin {
#[allow(clippy::needless_return)] // seems to be a clippy bug
#[tokio::main(flavor = "current_thread")]
pub async fn available(&self) -> Result<Vec<AvailableVersion>> {
self.available_async().await
}
pub async fn available_async(&self) -> Result<Vec<AvailableVersion>> {
debug!("[vfox:{}] available_async", &self.name);
let ctx = self.context(None)?;
let available = self
.eval_async(chunk! {
require "hooks/available"
return PLUGIN:Available($ctx)
})
.await?;
Ok(available)
}
}
#[derive(Debug)]
pub struct AvailableVersion {
pub version: String,
pub note: Option<String>,
// pub addition: Option<Table>,
}
impl FromLua for AvailableVersion {
fn from_lua(value: Value, _: &Lua) -> std::result::Result<Self, LuaError> {
match value {
Value::Table(table) => {
// TODO: try to default this to an empty table or something
// let addition = table.get::<Option<Table>>("addition")?;
Ok(AvailableVersion {
version: table.get::<String>("version")?,
note: table.get::<Option<String>>("note")?,
// addition,
})
}
_ => panic!("Expected table"),
}
}
}
#[cfg(test)]
mod tests {
use crate::Plugin;
#[test]
fn dummy() {
let versions = run("dummy");
assert_debug_snapshot!(versions, @r###"
[
"1.0.0",
"1.0.1",
]
"###);
}
#[tokio::test]
async fn dummy_async() {
let versions = run_async("dummy").await;
assert_debug_snapshot!(versions, @r###"
[
"1.0.0",
"1.0.1",
]
"###);
}
#[tokio::test]
async fn test_nodejs_async() {
let versions = run_async("test-nodejs").await;
assert!(versions.contains(&"20.0.0".to_string()));
}
fn run(plugin: &str) -> Vec<String> {
let p = Plugin::test(plugin);
let r = p.available().unwrap();
r.iter().map(|v| v.version.clone()).collect()
}
async fn run_async(plugin: &str) -> Vec<String> {
let p = Plugin::test(plugin);
let r = p.available_async().await.unwrap();
r.iter().map(|v| v.version.clone()).collect()
}
}
| rust | MIT | 3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb | 2026-01-04T15:39:11.175160Z | false |
jdx/mise | https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/crates/vfox/src/hooks/post_install.rs | crates/vfox/src/hooks/post_install.rs | use crate::Plugin;
use crate::error::Result;
use crate::sdk_info::SdkInfo;
use mlua::{IntoLua, Lua, Value};
use std::collections::BTreeMap;
use std::path::PathBuf;
impl Plugin {
pub async fn post_install(&self, ctx: PostInstallContext) -> Result<()> {
debug!("[vfox:{}] post_install", &self.name);
self.exec_async(chunk! {
require "hooks/post_install"
PLUGIN:PostInstall($ctx)
})
.await
}
}
pub struct PostInstallContext {
pub root_path: PathBuf,
pub runtime_version: String,
pub sdk_info: BTreeMap<String, SdkInfo>,
}
impl IntoLua for PostInstallContext {
fn into_lua(self, lua: &Lua) -> mlua::Result<Value> {
let table = lua.create_table()?;
table.set("rootPath", self.root_path.to_string_lossy().to_string())?;
table.set("runtimeVersion", self.runtime_version)?;
table.set("sdkInfo", self.sdk_info)?;
Ok(Value::Table(table))
}
}
#[cfg(test)]
mod tests {
use crate::Plugin;
use tokio::test;
use super::*;
#[test]
async fn dummy() {
let p = Plugin::test("dummy");
let ctx = PostInstallContext {
root_path: PathBuf::from("root_path"),
runtime_version: "runtime_version".to_string(),
sdk_info: BTreeMap::new(),
};
p.post_install(ctx).await.unwrap();
}
}
| rust | MIT | 3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb | 2026-01-04T15:39:11.175160Z | false |
jdx/mise | https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/crates/vfox/src/hooks/pre_use.rs | crates/vfox/src/hooks/pre_use.rs | use indexmap::IndexMap;
use std::path::Path;
use crate::error::Result;
use crate::sdk_info::SdkInfo;
use crate::{Plugin, Vfox};
#[allow(dead_code)]
#[derive(Debug)]
pub struct PreUseContext {
pub installed_sdks: IndexMap<String, SdkInfo>,
}
#[derive(Debug)]
pub struct PreUseResponse {
pub version: Option<String>,
}
impl Plugin {
pub async fn pre_use(&self, _vfox: &Vfox, _legacy_file: &Path) -> Result<PreUseResponse> {
debug!("[vfox:{}] pre_use", &self.name);
// let ctx = PreUseContext {
// installed_sdks: vfox.list_installed_versions(&self.name)?,
// };
unimplemented!("pre_use hook is not implemented");
}
}
| rust | MIT | 3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb | 2026-01-04T15:39:11.175160Z | false |
jdx/mise | https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/crates/vfox/src/hooks/mod.rs | crates/vfox/src/hooks/mod.rs | pub mod available;
pub mod backend_exec_env;
pub mod backend_install;
pub mod backend_list_versions;
pub mod env_keys;
pub mod mise_env;
pub mod mise_path;
pub mod parse_legacy_file;
pub mod post_install;
pub mod pre_install;
pub mod pre_use;
| rust | MIT | 3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb | 2026-01-04T15:39:11.175160Z | false |
jdx/mise | https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/crates/vfox/src/hooks/backend_install.rs | crates/vfox/src/hooks/backend_install.rs | use mlua::{FromLua, IntoLua, Lua, Value, prelude::LuaError};
use std::path::PathBuf;
use crate::{Plugin, error::Result};
#[derive(Debug)]
pub struct BackendInstallContext {
pub tool: String,
pub version: String,
pub install_path: PathBuf,
}
#[derive(Debug)]
pub struct BackendInstallResponse {}
impl Plugin {
pub async fn backend_install(
&self,
ctx: BackendInstallContext,
) -> Result<BackendInstallResponse> {
debug!("[vfox:{}] backend_install", &self.name);
self.eval_async(chunk! {
require "hooks/backend_install"
return PLUGIN:BackendInstall($ctx)
})
.await
}
}
impl IntoLua for BackendInstallContext {
fn into_lua(self, lua: &mlua::Lua) -> mlua::Result<Value> {
let table = lua.create_table()?;
table.set("tool", self.tool)?;
table.set("version", self.version)?;
table.set(
"install_path",
self.install_path.to_string_lossy().to_string(),
)?;
Ok(Value::Table(table))
}
}
impl FromLua for BackendInstallResponse {
fn from_lua(value: Value, _: &Lua) -> std::result::Result<Self, LuaError> {
match value {
Value::Table(_) => Ok(BackendInstallResponse {}),
_ => Err(LuaError::FromLuaConversionError {
from: value.type_name(),
to: "BackendInstallResponse".to_string(),
message: Some("Expected table".to_string()),
}),
}
}
}
| rust | MIT | 3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb | 2026-01-04T15:39:11.175160Z | false |
jdx/mise | https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/crates/vfox/src/hooks/pre_install.rs | crates/vfox/src/hooks/pre_install.rs | use std::path::PathBuf;
use mlua::prelude::LuaError;
use mlua::{FromLua, Lua, Table, Value};
use crate::Plugin;
use crate::error::Result;
impl Plugin {
pub async fn pre_install(&self, version: &str) -> Result<PreInstall> {
debug!("[vfox:{}] pre_install", &self.name);
let ctx = self.context(Some(version.to_string()))?;
let pre_install = self
.eval_async(chunk! {
require "hooks/pre_install"
return PLUGIN:PreInstall($ctx)
})
.await?;
Ok(pre_install)
}
pub async fn pre_install_for_platform(
&self,
version: &str,
os: &str,
arch: &str,
) -> Result<PreInstall> {
debug!(
"[vfox:{}] pre_install_for_platform os={} arch={}",
&self.name, os, arch
);
let ctx = self.context(Some(version.to_string()))?;
let target_os = os.to_string();
let target_arch = arch.to_string();
let pre_install = self
.eval_async(chunk! {
require "hooks/pre_install"
-- Override globals with target platform for cross-platform URL generation
local saved_os = OS_TYPE
local saved_arch = ARCH_TYPE
OS_TYPE = $target_os
ARCH_TYPE = $target_arch
local result = PLUGIN:PreInstall($ctx)
-- Restore original values
OS_TYPE = saved_os
ARCH_TYPE = saved_arch
return result
})
.await?;
Ok(pre_install)
}
}
/// Optional attestation parameters provided by the return value of the preinstall hook.
#[derive(Debug)]
pub struct PreInstallAttestation {
// GitHub
pub github_owner: Option<String>,
pub github_repo: Option<String>,
pub github_signer_workflow: Option<String>,
// Cosign
pub cosign_sig_or_bundle_path: Option<PathBuf>,
pub cosign_public_key_path: Option<PathBuf>,
// SLSA
pub slsa_provenance_path: Option<PathBuf>,
pub slsa_min_level: Option<u8>,
}
impl FromLua for PreInstallAttestation {
fn from_lua(value: Value, _: &Lua) -> std::result::Result<Self, LuaError> {
match value {
Value::Table(table) => {
validate_github_attestation_params(&table)?;
validate_cosign_attestation_params(&table)?;
validate_slsa_attestation_params(&table)?;
Ok(PreInstallAttestation {
github_owner: table.get::<Option<String>>("github_owner")?,
github_repo: table.get::<Option<String>>("github_repo")?,
github_signer_workflow: table
.get::<Option<String>>("github_signer_workflow")?,
cosign_sig_or_bundle_path: table
.get::<Option<PathBuf>>("cosign_sig_or_bundle_path")?,
cosign_public_key_path: table
.get::<Option<PathBuf>>("cosign_public_key_path")?,
slsa_provenance_path: table.get::<Option<PathBuf>>("slsa_provenance_path")?,
slsa_min_level: table.get::<Option<u8>>("slsa_min_level")?,
})
}
_ => Err(LuaError::FromLuaConversionError {
from: "table",
to: "PreInstallAttestation".into(),
message: Some("expected table for attestation field".to_string()),
}),
}
}
}
/// Validates that if one of the GitHub attestation parameters are set, the other requisite
/// parameters are also set.
///
/// `github_repo` requires `github_owner` and vice versa, and `github_signer_workflow` requires
/// both aforementioned parameters.
fn validate_github_attestation_params(table: &Table) -> std::result::Result<(), LuaError> {
if table.contains_key("github_owner")? && !table.contains_key("github_repo")? {
return Err(LuaError::FromLuaConversionError {
from: "table",
to: "PreInstallAttestation".into(),
message: Some("github_owner requires github_repo for attestation".to_string()),
});
}
if table.contains_key("github_repo")? && !table.contains_key("github_owner")? {
return Err(LuaError::FromLuaConversionError {
from: "table",
to: "PreInstallAttestation".into(),
message: Some("github_repo requires github_owner for attestation".to_string()),
});
}
if table.contains_key("github_signer_workflow")?
&& (!table.contains_key("github_owner")? || !table.contains_key("github_repo")?)
{
return Err(LuaError::FromLuaConversionError {
from: "table",
to: "PreInstallAttestation".into(),
message: Some(
"github_signer_workflow requires github_owner and github_repo for attestation"
.to_string(),
),
});
}
Ok(())
}
/// Validates that if the public key path is set, then the sig/bundle path must also be set.
fn validate_cosign_attestation_params(table: &Table) -> std::result::Result<(), LuaError> {
if table.contains_key("cosign_public_key_path")?
&& !table.contains_key("cosign_sig_or_bundle_path")?
{
return Err(LuaError::FromLuaConversionError {
from: "table",
to: "PreInstallAttestation".into(),
message: Some(
"cosign_public_key_path requires cosign_sig_or_bundle_path for attestation"
.to_string(),
),
});
}
Ok(())
}
/// Validates that if the SLSA min level is set, then the provenance path must also be set.
fn validate_slsa_attestation_params(table: &Table) -> std::result::Result<(), LuaError> {
if table.contains_key("slsa_min_level")? && !table.contains_key("slsa_provenance_path")? {
return Err(LuaError::FromLuaConversionError {
from: "table",
to: "PreInstallAttestation".into(),
message: Some(
"slsa_min_level requires slsa_provenance_path for attestation".to_string(),
),
});
}
Ok(())
}
#[derive(Debug)]
pub struct PreInstall {
pub version: String,
pub url: Option<String>,
pub note: Option<String>,
pub sha256: Option<String>,
pub md5: Option<String>,
pub sha1: Option<String>,
pub sha512: Option<String>,
pub attestation: Option<PreInstallAttestation>,
// pub addition: Option<Table>,
}
impl FromLua for PreInstall {
fn from_lua(value: Value, _: &Lua) -> std::result::Result<Self, LuaError> {
match value {
Value::Table(table) => {
if !table.contains_key("version")? {
return Err(LuaError::FromLuaConversionError {
from: "table",
to: "PreInstall".into(),
message: Some("no version returned from vfox plugin".to_string()),
});
}
Ok(PreInstall {
version: table.get::<String>("version")?,
url: table.get::<Option<String>>("url")?,
note: table.get::<Option<String>>("note")?,
sha256: table.get::<Option<String>>("sha256")?,
md5: table.get::<Option<String>>("md5")?,
sha1: table.get::<Option<String>>("sha1")?,
sha512: table.get::<Option<String>>("sha512")?,
attestation: table.get::<Option<PreInstallAttestation>>("attestation")?,
// addition,
})
}
_ => panic!("Expected table"),
}
}
}
#[cfg(test)]
mod tests {
use crate::Plugin;
use crate::hooks::pre_install::PreInstall;
use crate::runtime::Runtime;
use std::string::ToString;
use tokio::test;
#[test]
async fn dummy() {
let pre_install = run("dummy", "1.0.1").await;
assert_debug_snapshot!(pre_install);
}
#[test]
async fn test_nodejs() {
Runtime::set_os("linux".to_string());
Runtime::set_arch("x64".to_string());
let pre_install = run("test-nodejs", "20.0.0").await;
assert_debug_snapshot!(pre_install);
Runtime::set_os("macos".to_string());
Runtime::set_arch("arm64".to_string());
let pre_install = run("test-nodejs", "20.1.0").await;
assert_debug_snapshot!(pre_install);
Runtime::set_os("windows".to_string());
Runtime::set_arch("x64".to_string());
let pre_install = run("test-nodejs", "20.3.0").await;
assert_debug_snapshot!(pre_install);
Runtime::reset();
}
async fn run(plugin: &str, v: &str) -> PreInstall {
let p = Plugin::test(plugin);
p.pre_install(v).await.unwrap()
}
}
| rust | MIT | 3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb | 2026-01-04T15:39:11.175160Z | false |
jdx/mise | https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/crates/vfox/src/hooks/mise_path.rs | crates/vfox/src/hooks/mise_path.rs | use mlua::{IntoLua, Lua, LuaSerdeExt, Value};
use crate::Plugin;
use crate::error::Result;
#[derive(Debug)]
pub struct MisePathContext<T: serde::Serialize> {
pub args: Vec<String>,
pub options: T,
}
impl Plugin {
pub async fn mise_path<T: serde::Serialize>(
&self,
ctx: MisePathContext<T>,
) -> Result<Vec<String>> {
debug!("[vfox:{}] mise_path", &self.name);
let path = self
.eval_async(chunk! {
require "hooks/mise_path"
return PLUGIN:MisePath($ctx)
})
.await?;
Ok(path)
}
}
impl<T: serde::Serialize> IntoLua for MisePathContext<T> {
fn into_lua(self, lua: &Lua) -> mlua::Result<Value> {
let table = lua.create_table()?;
table.set("options", lua.to_value(&self.options)?)?;
Ok(Value::Table(table))
}
}
| rust | MIT | 3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb | 2026-01-04T15:39:11.175160Z | false |
jdx/mise | https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/crates/vfox/src/hooks/backend_exec_env.rs | crates/vfox/src/hooks/backend_exec_env.rs | use mlua::{FromLua, IntoLua, Lua, Value, prelude::LuaError};
use std::path::PathBuf;
use crate::{Plugin, error::Result, hooks::env_keys::EnvKey};
#[derive(Debug, Clone)]
pub struct BackendExecEnvContext {
pub tool: String,
pub version: String,
pub install_path: PathBuf,
}
#[derive(Debug)]
pub struct BackendExecEnvResponse {
pub env_vars: Vec<EnvKey>,
}
impl Plugin {
pub async fn backend_exec_env(
&self,
ctx: BackendExecEnvContext,
) -> Result<BackendExecEnvResponse> {
debug!("[vfox:{}] backend_exec_env", &self.name);
self.eval_async(chunk! {
require "hooks/backend_exec_env"
return PLUGIN:BackendExecEnv($ctx)
})
.await
}
}
impl IntoLua for BackendExecEnvContext {
fn into_lua(self, lua: &mlua::Lua) -> mlua::Result<Value> {
let table = lua.create_table()?;
table.set("tool", self.tool)?;
table.set("version", self.version)?;
table.set(
"install_path",
self.install_path.to_string_lossy().to_string(),
)?;
Ok(Value::Table(table))
}
}
impl FromLua for BackendExecEnvResponse {
fn from_lua(value: Value, _: &Lua) -> std::result::Result<Self, LuaError> {
match value {
Value::Table(table) => Ok(BackendExecEnvResponse {
env_vars: table.get::<Vec<crate::hooks::env_keys::EnvKey>>("env_vars")?,
}),
_ => Err(LuaError::FromLuaConversionError {
from: value.type_name(),
to: "BackendExecEnvResponse".to_string(),
message: Some("Expected table".to_string()),
}),
}
}
}
| rust | MIT | 3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb | 2026-01-04T15:39:11.175160Z | false |
jdx/mise | https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/crates/vfox/src/lua_mod/html.rs | crates/vfox/src/lua_mod/html.rs | use mlua::{Lua, Table};
pub fn mod_html(lua: &Lua) -> mlua::Result<()> {
let package: Table = lua.globals().get("package")?;
let loaded: Table = package.get("loaded")?;
loaded.set(
"htmlparser.voidelements",
lua.load(include_str!("../../lua/htmlparser/voidelements.lua"))
.eval::<Table>()?,
)?;
loaded.set(
"htmlparser.ElementNode",
lua.load(include_str!("../../lua/htmlparser/ElementNode.lua"))
.eval::<Table>()?,
)?;
loaded.set(
"htmlparser",
lua.load(include_str!("../../lua/htmlparser.lua"))
.eval::<Table>()?,
)?;
loaded.set(
"html",
lua.load(mlua::chunk! {
local htmlparser = require("htmlparser")
return {
parse = function(s)
Node = {
find = function(self, tag)
local nodes = self.node:select(tag)
return Node.new(nodes)
end,
first = function(self)
return Node.new({self.nodes[1]})
end,
eq = function(self, idx)
local node = self.nodes[idx + 1]
return Node.new({node})
end,
each = function(self, f)
for i, node in ipairs(self.nodes) do
f(i - 1, Node.new({node}))
end
end,
text = function(self)
if self.node == nil then
return ""
end
return self.node:getcontent()
end,
attr = function(self, key)
if self.node == nil then
return ""
end
return self.node.attributes[key]
end,
}
Node.new = function(nodes)
return setmetatable({nodes = nodes, node = nodes[1]}, {__index = Node})
end
local root = htmlparser.parse(s, 100000)
return Node.new({root})
end
}
})
.eval::<Table>()?,
)?;
Ok(())
}
#[cfg(test)]
mod tests {
use super::*;
use crate::lua_mod::http::mod_http;
#[test]
fn test_html() {
let lua = Lua::new();
mod_html(&lua).unwrap();
lua.load(mlua::chunk! {
local html = require("html")
local doc = html.parse("<html><body><div id='t2' name='123'>456</div><DIV foo='bar'>222</DIV></body></html>")
local f = doc:find("div"):eq(0)
local s = doc:find("div"):eq(1)
assert(s:text() == "222")
assert(f:text() == "456")
assert(s:attr("foo") == "bar")
doc:find("div"):each(function(i, e)
if i == 0 then
assert(e:text() == "456")
else
assert(e:text() == "222")
end
end)
})
.exec()
.unwrap();
}
#[tokio::test]
#[ignore] // TODO: make this actually work
async fn test_html_go() {
let lua = Lua::new();
mod_html(&lua).unwrap();
mod_http(&lua).unwrap();
lua.load(mlua::chunk! {
local http = require("http")
local html = require("html")
table = {}
resp, err = http.get({
url = "https://go.dev/dl/"
})
if err ~= nil or resp.status_code ~= 200 then
error("parsing release info failed." .. err)
end
local doc = html.parse(resp.body)
local listDoc = doc:find("div#archive")
listDoc:find(".toggle"):each(function(i, selection)
local versionStr = selection:attr("id")
if versionStr ~= nil then
selection:find("table.downloadtable tr"):each(function(ti, ts)
local td = ts:find("td")
local filename = td:eq(0):text()
local kind = td:eq(1):text()
local os = td:eq(2):text()
local arch = td:eq(3):text()
local checksum = td:eq(5):text()
if kind == "Archive" and os == "Windows" and arch == "x86-64" then
table.insert(result, {
version = string.sub(versionStr, 3),
url = "https://go.dev/dl/" .. filename,
note = "",
sha256 = checksum,
})
end
end)
end
end)
print(table)
// TODO: check results
})
.exec_async()
.await
.unwrap();
}
}
| rust | MIT | 3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb | 2026-01-04T15:39:11.175160Z | false |
jdx/mise | https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/crates/vfox/src/lua_mod/http.rs | crates/vfox/src/lua_mod/http.rs | use mlua::{BorrowedStr, ExternalResult, Lua, MultiValue, Result, Table};
use reqwest::header::{HeaderMap, HeaderName, HeaderValue};
use crate::http::CLIENT;
pub fn mod_http(lua: &Lua) -> Result<()> {
let package: Table = lua.globals().get("package")?;
let loaded: Table = package.get("loaded")?;
loaded.set(
"http",
lua.create_table_from(vec![
(
"get",
lua.create_async_function(|lua: mlua::Lua, input| async move {
get(&lua, input).await
})?,
),
(
"head",
lua.create_async_function(|lua: mlua::Lua, input| async move {
head(&lua, input).await
})?,
),
(
"download_file",
lua.create_async_function(|_lua: mlua::Lua, input| async move {
download_file(&_lua, input).await
})?,
),
])?,
)
}
fn into_headers(table: &Table) -> Result<HeaderMap> {
let mut map = HeaderMap::new();
for entry in table.pairs::<BorrowedStr, BorrowedStr>() {
let (k, v) = entry?;
map.insert(
HeaderName::from_bytes(k.as_bytes()).into_lua_err()?,
HeaderValue::from_str(&v).into_lua_err()?,
);
}
Ok(map)
}
async fn get(lua: &Lua, input: Table) -> Result<Table> {
let url: String = input.get("url").into_lua_err()?;
let headers = match input.get::<Option<Table>>("headers").into_lua_err()? {
Some(tbl) => into_headers(&tbl)?,
None => HeaderMap::default(),
};
let resp = CLIENT
.get(&url)
.headers(headers)
.send()
.await
.into_lua_err()?;
let t = lua.create_table()?;
t.set("status_code", resp.status().as_u16())?;
t.set("headers", get_headers(lua, resp.headers())?)?;
t.set("body", resp.text().await.into_lua_err()?)?;
Ok(t)
}
async fn download_file(_lua: &Lua, input: MultiValue) -> Result<()> {
let t: &Table = input.iter().next().unwrap().as_table().unwrap();
let url: String = t.get("url").into_lua_err()?;
let headers = match t.get::<Option<Table>>("headers").into_lua_err()? {
Some(tbl) => into_headers(&tbl)?,
None => HeaderMap::default(),
};
let path: String = input.iter().nth(1).unwrap().to_string()?;
let resp = CLIENT
.get(&url)
.headers(headers)
.send()
.await
.into_lua_err()?;
resp.error_for_status_ref().into_lua_err()?;
let mut file = tokio::fs::File::create(&path).await.into_lua_err()?;
let bytes = resp.bytes().await.into_lua_err()?;
tokio::io::AsyncWriteExt::write_all(&mut file, &bytes)
.await
.into_lua_err()?;
Ok(())
}
async fn head(lua: &Lua, input: Table) -> Result<Table> {
let url: String = input.get("url").into_lua_err()?;
let headers = match input.get::<Option<Table>>("headers").into_lua_err()? {
Some(tbl) => into_headers(&tbl)?,
None => HeaderMap::default(),
};
let resp = CLIENT
.head(&url)
.headers(headers)
.send()
.await
.into_lua_err()?;
let t = lua.create_table()?;
t.set("status_code", resp.status().as_u16())?;
t.set("headers", get_headers(lua, resp.headers())?)?;
Ok(t)
}
fn get_headers(lua: &Lua, headers: &reqwest::header::HeaderMap) -> Result<Table> {
let t = lua.create_table()?;
for (name, value) in headers.iter() {
t.set(name.as_str(), value.to_str().into_lua_err()?)?;
}
Ok(t)
}
#[cfg(test)]
mod tests {
use super::*;
use wiremock::matchers::{header, method, path};
use wiremock::{Mock, MockServer, ResponseTemplate};
#[tokio::test]
async fn test_get() {
// Start a local mock server
let server = MockServer::start().await;
// Create a mock endpoint
Mock::given(method("GET"))
.and(path("/get"))
.respond_with(
ResponseTemplate::new(200)
.set_body_json(serde_json::json!({
"message": "test response"
}))
.insert_header("content-type", "application/json"),
)
.mount(&server)
.await;
let lua = Lua::new();
mod_http(&lua).unwrap();
let url = server.uri() + "/get";
lua.load(mlua::chunk! {
local http = require("http")
local resp = http.get({ url = $url })
assert(resp.status_code == 200)
assert(type(resp.body) == "string")
})
.exec_async()
.await
.unwrap();
}
#[tokio::test]
async fn test_get_headers() {
// Start a local mock server
let server = MockServer::start().await;
// Create a mock endpoint
Mock::given(method("GET"))
.and(path("/get"))
.and(header("Authorization", "Bearer abc"))
.respond_with(
ResponseTemplate::new(200)
.set_body_json(serde_json::json!({
"message": "test response"
}))
.insert_header("content-type", "application/json"),
)
.mount(&server)
.await;
let lua = Lua::new();
mod_http(&lua).unwrap();
let url = server.uri() + "/get";
lua.load(mlua::chunk! {
local http = require("http")
local resp = http.get({
url = $url,
headers = {
["Authorization"] = "Bearer abc"
}
})
assert(resp.status_code == 200)
assert(type(resp.body) == "string")
})
.exec_async()
.await
.unwrap();
}
#[tokio::test]
async fn test_head() {
let server = MockServer::start().await;
Mock::given(method("HEAD"))
.and(path("/get"))
.respond_with(
ResponseTemplate::new(200)
.insert_header("content-type", "application/json")
.insert_header("x-test-header", "test-value"),
)
.mount(&server)
.await;
let lua = Lua::new();
mod_http(&lua).unwrap();
let url = server.uri() + "/get";
lua.load(mlua::chunk! {
local http = require("http")
local resp = http.head({ url = $url })
assert(resp.status_code == 200)
assert(type(resp.headers) == "table")
assert(resp.headers["content-type"] == "application/json")
assert(resp.headers["x-test-header"] == "test-value")
assert(resp.content_length == nil)
})
.exec_async()
.await
.unwrap();
}
#[tokio::test]
async fn test_download_file() {
let server = MockServer::start().await;
// Create test content
let test_content = r#"{"name": "vfox-nodejs", "version": "1.0.0"}"#;
Mock::given(method("GET"))
.and(path("/index.json"))
.respond_with(
ResponseTemplate::new(200)
.set_body_string(test_content)
.insert_header("content-type", "application/json"),
)
.expect(1) // Expect exactly one request
.mount(&server)
.await;
let lua = Lua::new();
mod_http(&lua).unwrap();
// Use isolated temp directory for test isolation
let temp_dir = tempfile::TempDir::new().unwrap();
let path = temp_dir.path().join("download_file.txt");
let path_str = path.to_string_lossy().to_string();
let url = server.uri() + "/index.json";
lua.load(mlua::chunk! {
local http = require("http")
err = http.download_file({
url = $url,
headers = {}
}, $path_str)
assert(err == nil, [[must be nil]])
})
.exec_async()
.await
.unwrap();
// Add a small delay to ensure file write is completed
tokio::time::sleep(tokio::time::Duration::from_millis(100)).await;
// Verify file was downloaded correctly with better error handling
let content = tokio::fs::read_to_string(&path)
.await
.unwrap_or_else(|e| panic!("Failed to read file at {:?}: {}", path, e));
assert!(
content.contains("vfox-nodejs"),
"Expected content to contain 'vfox-nodejs', but got: {:?}",
content
);
// TempDir automatically cleans up when dropped
}
}
| rust | MIT | 3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb | 2026-01-04T15:39:11.175160Z | false |
jdx/mise | https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/crates/vfox/src/lua_mod/env.rs | crates/vfox/src/lua_mod/env.rs | use mlua::Table;
use mlua::prelude::*;
pub fn mod_env(lua: &Lua) -> LuaResult<()> {
let package: Table = lua.globals().get("package")?;
let loaded: Table = package.get("loaded")?;
let env = lua.create_table_from(vec![("setenv", lua.create_function(setenv)?)])?;
loaded.set("env", env.clone())?;
loaded.set("vfox.env", env)?;
Ok(())
}
fn setenv(_lua: &Lua, (key, val): (String, String)) -> LuaResult<()> {
unsafe {
std::env::set_var(key, val);
}
Ok(())
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_env() {
let lua = Lua::new();
mod_env(&lua).unwrap();
if cfg!(windows) {
lua.load(mlua::chunk! {
local env = require("env")
env.setenv("TEST_ENV", "myvar")
handle = io.popen("pwsh -Command \"echo $env:TEST_ENV\"")
result = handle:read("*a")
handle:close()
assert(result == "myvar\n")
})
.exec()
.unwrap();
} else {
lua.load(mlua::chunk! {
local env = require("env")
env.setenv("TEST_ENV", "myvar")
handle = io.popen("echo $TEST_ENV")
result = handle:read("*a")
handle:close()
assert(result == "myvar\n")
})
.exec()
.unwrap();
}
}
}
| rust | MIT | 3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb | 2026-01-04T15:39:11.175160Z | false |
jdx/mise | https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/crates/vfox/src/lua_mod/json.rs | crates/vfox/src/lua_mod/json.rs | use mlua::{ExternalResult, Lua, LuaSerdeExt, Result, Table, Value};
pub fn mod_json(lua: &Lua) -> Result<()> {
let package: Table = lua.globals().get("package")?;
let loaded: Table = package.get("loaded")?;
loaded.set(
"json",
lua.create_table_from(vec![
("encode", lua.create_function(encode)?),
("decode", lua.create_function(decode)?),
])?,
)
}
fn encode(_lua: &Lua, value: Value) -> Result<String> {
serde_json::to_string(&value).into_lua_err()
}
fn decode(lua: &Lua, value: String) -> Result<Value> {
let value: serde_json::Value = serde_json::from_str(&value).into_lua_err()?;
lua.to_value(&value)
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_encode() {
let lua = Lua::new();
mod_json(&lua).unwrap();
lua.load(mlua::chunk! {
local json = require("json")
local obj = { "a", 1, "b", 2, "c", 3 }
local jsonStr = json.encode(obj)
assert(jsonStr == "[\"a\",1,\"b\",2,\"c\",3]")
})
.exec()
.unwrap();
}
#[test]
fn test_decode() {
let lua = Lua::new();
mod_json(&lua).unwrap();
lua.load(mlua::chunk! {
local json = require("json")
local obj = json.decode("[\"a\",1,\"b\",2,\"c\",3]")
assert(obj[1] == "a")
assert(obj[2] == 1)
assert(obj[3] == "b")
assert(obj[4] == 2)
assert(obj[5] == "c")
assert(obj[6] == 3)
})
.exec()
.unwrap();
}
}
| rust | MIT | 3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb | 2026-01-04T15:39:11.175160Z | false |
jdx/mise | https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/crates/vfox/src/lua_mod/file.rs | crates/vfox/src/lua_mod/file.rs | use crate::error::Result;
use mlua::{ExternalResult, Lua, MultiValue, Table};
#[cfg(unix)]
use std::os::unix::fs::symlink as _symlink;
#[cfg(windows)]
use std::os::windows::fs::symlink_dir;
#[cfg(windows)]
use std::os::windows::fs::symlink_file;
use std::path::Path;
fn join_path(_lua: &Lua, args: MultiValue) -> mlua::Result<String> {
let sep = std::path::MAIN_SEPARATOR;
let mut parts = vec![];
for v in args {
let s = v.to_string()?;
if !s.is_empty() {
parts.push(s);
}
}
Ok(parts.join(&sep.to_string()))
}
pub fn mod_file(lua: &Lua) -> Result<()> {
let package: Table = lua.globals().get("package")?;
let loaded: Table = package.get("loaded")?;
Ok(loaded.set(
"file",
lua.create_table_from(vec![
(
"read",
lua.create_async_function(|_lua: mlua::Lua, input| async move {
read(&_lua, input).await
})?,
),
(
"symlink",
lua.create_async_function(|_lua: mlua::Lua, input| async move {
symlink(&_lua, input).await
})?,
),
("join_path", lua.create_function(join_path)?),
(
"exists",
lua.create_async_function(|_lua: mlua::Lua, input| async move {
exists(&_lua, input).await
})?,
),
])?,
)?)
}
async fn read(_lua: &Lua, input: MultiValue) -> mlua::Result<String> {
let args: Vec<String> = input
.into_iter()
.map(|v| v.to_string())
.collect::<mlua::Result<_>>()?;
let path = Path::new(&args[0]);
std::fs::read_to_string(path).into_lua_err()
}
async fn symlink(_lua: &Lua, input: MultiValue) -> mlua::Result<()> {
let input: Vec<String> = input
.into_iter()
.map(|v| v.to_string())
.collect::<mlua::Result<_>>()?;
let src = Path::new(&input[0]);
let dst = Path::new(&input[1]);
#[cfg(windows)]
{
if src.is_dir() {
symlink_dir(src, dst).into_lua_err()?;
} else {
symlink_file(src, dst).into_lua_err()?;
}
}
#[cfg(unix)]
_symlink(src, dst).into_lua_err()?;
Ok(())
}
async fn exists(_lua: &Lua, input: MultiValue) -> mlua::Result<bool> {
let args: Vec<String> = input
.into_iter()
.map(|v| v.to_string())
.collect::<mlua::Result<_>>()?;
let path = Path::new(&args[0]);
std::fs::exists(path).into_lua_err()
}
#[cfg(test)]
mod tests {
use super::*;
use std::fs;
#[test]
fn test_read() {
let temp_dir = tempfile::TempDir::new().unwrap();
let filepath = temp_dir.path().join("file-read.txt");
let filepath_str = filepath.to_string_lossy().to_string();
fs::write(&filepath, "hello world").unwrap();
let lua = Lua::new();
mod_file(&lua).unwrap();
lua.load(mlua::chunk! {
local file = require("file")
local success, contents = pcall(file.read, $filepath_str)
if not success then
error("Failed to read: " .. contents)
end
if contents == nil then
error("contents should not be nil")
elseif contents ~= "hello world" then
error("contents expected to be 'hello world', was actually:" .. contents)
end
})
.exec()
.unwrap();
// TempDir automatically cleans up when dropped
}
#[test]
fn test_symlink() {
let temp_dir = tempfile::TempDir::new().unwrap();
let src_path = temp_dir.path().join("symlink_src");
let dst_path = temp_dir.path().join("symlink_dst");
let src_path_str = src_path.to_string_lossy().to_string();
let dst_path_str = dst_path.to_string_lossy().to_string();
let lua = Lua::new();
mod_file(&lua).unwrap();
lua.load(mlua::chunk! {
local file = require("file")
file.symlink($src_path_str, $dst_path_str)
})
.exec()
.unwrap();
assert_eq!(fs::read_link(&dst_path).unwrap(), src_path);
// TempDir automatically cleans up when dropped
}
#[test]
fn test_exists() {
let temp_dir = tempfile::TempDir::new().unwrap();
let existing_file = temp_dir.path().join("exists.txt");
let existing_file_str = existing_file.to_string_lossy().to_string();
let nonexistent_file_str = temp_dir
.path()
.join("nonexistent.txt")
.to_string_lossy()
.to_string();
fs::write(&existing_file, "test content").unwrap();
let lua = Lua::new();
mod_file(&lua).unwrap();
lua.load(mlua::chunk! {
local file = require("file")
local existing_exists = file.exists($existing_file_str)
local nonexistent_exists = file.exists($nonexistent_file_str)
if not existing_exists then
error("Expected existing file to exist")
end
if nonexistent_exists then
error("Expected nonexistent file to not exist")
end
})
.exec()
.unwrap();
}
}
| rust | MIT | 3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb | 2026-01-04T15:39:11.175160Z | false |
jdx/mise | https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/crates/vfox/src/lua_mod/hooks.rs | crates/vfox/src/lua_mod/hooks.rs | use crate::embedded_plugins::EmbeddedPlugin;
use crate::error::Result;
use mlua::Lua;
use std::collections::BTreeSet;
use std::path::Path;
pub struct HookFunc {
_name: &'static str,
pub filename: &'static str,
}
#[rustfmt::skip]
pub const HOOK_FUNCS: [HookFunc; 12] = [
HookFunc { _name: "Available", filename: "available" },
HookFunc { _name: "PreInstall", filename: "pre_install" },
HookFunc { _name: "EnvKeys", filename: "env_keys" },
HookFunc { _name: "PostInstall", filename: "post_install" },
HookFunc { _name: "PreUse", filename: "pre_use" },
HookFunc { _name: "ParseLegacyFile", filename: "parse_legacy_file" },
HookFunc { _name: "PreUninstall", filename: "pre_uninstall" },
// backend
HookFunc { _name: "BackendListVersions", filename: "backend_list_versions" },
HookFunc { _name: "BackendInstall", filename: "backend_install" },
HookFunc { _name: "BackendExecEnv", filename: "backend_exec_env" },
// mise
HookFunc { _name: "MiseEnv", filename: "mise_env" },
HookFunc { _name: "MisePath", filename: "mise_path" },
];
pub fn mod_hooks(lua: &Lua, root: &Path) -> Result<BTreeSet<&'static str>> {
let mut hooks = BTreeSet::new();
for hook in &HOOK_FUNCS {
let hook_path = root.join("hooks").join(format!("{}.lua", hook.filename));
if hook_path.exists() {
lua.load(hook_path).exec()?;
hooks.insert(hook.filename);
}
}
Ok(hooks)
}
pub fn hooks_embedded(lua: &Lua, embedded: &EmbeddedPlugin) -> Result<BTreeSet<&'static str>> {
let mut hooks = BTreeSet::new();
// Get package.loaded table to preload hooks
let package: mlua::Table = lua.globals().get("package")?;
let loaded: mlua::Table = package.get("loaded")?;
for (hook_name, hook_code) in embedded.hooks {
// Execute the hook code to define the function
lua.load(*hook_code).exec()?;
// Also preload into package.loaded so require("hooks/<name>") works
// The hook code typically defines a function on the PLUGIN table
// We need to register a module that can be required
let module_name = format!("hooks/{}", hook_name);
// Create a simple module that returns true (the hook code has already been executed)
loaded.set(module_name, true)?;
// Find the matching hook filename from HOOK_FUNCS
for hook in &HOOK_FUNCS {
if hook.filename == *hook_name {
hooks.insert(hook.filename);
break;
}
}
}
Ok(hooks)
}
| rust | MIT | 3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb | 2026-01-04T15:39:11.175160Z | false |
jdx/mise | https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/crates/vfox/src/lua_mod/mod.rs | crates/vfox/src/lua_mod/mod.rs | mod archiver;
mod cmd;
mod env;
mod file;
mod hooks;
mod html;
mod http;
mod json;
mod strings;
pub use archiver::mod_archiver as archiver;
pub use cmd::mod_cmd as cmd;
pub use env::mod_env as env;
pub use file::mod_file as file;
pub use hooks::hooks_embedded;
pub use hooks::mod_hooks as hooks;
pub use html::mod_html as html;
pub use http::mod_http as http;
pub use json::mod_json as json;
pub use strings::mod_strings as strings;
| rust | MIT | 3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb | 2026-01-04T15:39:11.175160Z | false |
jdx/mise | https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/crates/vfox/src/lua_mod/strings.rs | crates/vfox/src/lua_mod/strings.rs | use mlua::prelude::*;
use mlua::{Table, Value};
pub fn mod_strings(lua: &Lua) -> LuaResult<()> {
let package: Table = lua.globals().get("package")?;
let loaded: Table = package.get("loaded")?;
let strings = lua.create_table_from(vec![
("split", lua.create_function(split)?),
("has_prefix", lua.create_function(has_prefix)?),
("has_suffix", lua.create_function(has_suffix)?),
("trim", lua.create_function(trim)?),
("trim_space", lua.create_function(trim_space)?),
("contains", lua.create_function(contains)?),
("join", lua.create_function(join)?),
])?;
loaded.set("strings", strings.clone())?;
loaded.set("vfox.strings", strings)?;
Ok(())
}
fn split(_lua: &Lua, (s, sep): (String, String)) -> LuaResult<Vec<String>> {
Ok(s.split(&sep).map(|s| s.to_string()).collect())
}
fn has_prefix(_lua: &Lua, (s, prefix): (String, String)) -> LuaResult<bool> {
Ok(s.starts_with(&prefix))
}
fn has_suffix(_lua: &Lua, (s, suffix): (String, String)) -> LuaResult<bool> {
Ok(s.ends_with(&suffix))
}
fn trim(_lua: &Lua, (s, suffix): (String, String)) -> LuaResult<String> {
Ok(s.trim_end_matches(&suffix).to_string())
}
fn trim_space(_lua: &Lua, s: String) -> LuaResult<String> {
Ok(s.trim().to_string())
}
fn contains(_lua: &Lua, (s, substr): (String, String)) -> LuaResult<bool> {
Ok(s.contains(&substr))
}
fn join(_lua: &Lua, (arr, sep): (Vec<Value>, String)) -> LuaResult<String> {
let mut res = String::new();
for (i, v) in arr.iter().enumerate() {
if i > 0 {
res.push_str(&sep);
}
res.push_str(&v.to_string()?);
}
Ok(res)
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_strings() {
let lua = Lua::new();
mod_strings(&lua).unwrap();
lua.load(mlua::chunk! {
local strings = require("strings")
local str_parts = strings.split("hello world", " ")
print(str_parts[1]) -- hello
assert(strings.has_prefix("hello world", "hello"), [[not strings.has_prefix("hello")]])
assert(strings.has_suffix("hello world", "world"), [[not strings.has_suffix("world")]])
assert(strings.trim("hello world", "world") == "hello ", "strings.trim()")
assert(strings.contains("hello world", "hello ") == true, "strings.contains()")
// got = strings.trim_space(tt.input)
//
// local str = strings.join({"1",3,"4"},";")
// assert(str == "1;3;4", "strings.join()")
})
.exec()
.unwrap();
}
}
| rust | MIT | 3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb | 2026-01-04T15:39:11.175160Z | false |
jdx/mise | https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/crates/vfox/src/lua_mod/cmd.rs | crates/vfox/src/lua_mod/cmd.rs | use mlua::Table;
use mlua::prelude::*;
use std::path::Path;
pub fn mod_cmd(lua: &Lua) -> LuaResult<()> {
let package: Table = lua.globals().get("package")?;
let loaded: Table = package.get("loaded")?;
let cmd = lua.create_table_from(vec![("exec", lua.create_function(exec)?)])?;
loaded.set("cmd", cmd.clone())?;
loaded.set("vfox.cmd", cmd)?;
Ok(())
}
fn exec(_lua: &Lua, args: mlua::MultiValue) -> LuaResult<String> {
use std::process::Command;
let (command, options) = match args.len() {
1 => {
let command: String = args.into_iter().next().unwrap().to_string()?;
(command, None)
}
2 => {
let mut iter = args.into_iter();
let command: String = iter.next().unwrap().to_string()?;
let options: Table = iter.next().unwrap().as_table().unwrap().clone();
(command, Some(options))
}
_ => {
return Err(mlua::Error::RuntimeError(
"cmd.exec takes 1 or 2 arguments: (command) or (command, options)".to_string(),
));
}
};
let mut cmd = if cfg!(target_os = "windows") {
Command::new("cmd")
} else {
Command::new("sh")
};
if cfg!(target_os = "windows") {
cmd.args(["/C", &command]);
} else {
cmd.args(["-c", &command]);
}
// Apply options if provided
if let Some(options) = options {
// Set working directory if specified
if let Ok(cwd) = options.get::<String>("cwd") {
cmd.current_dir(Path::new(&cwd));
}
// Set environment variables if specified
if let Ok(env) = options.get::<Table>("env") {
for pair in env.pairs::<String, String>() {
let (key, value) = pair?;
cmd.env(key, value);
}
}
// Set timeout if specified (future feature)
if let Ok(_timeout) = options.get::<u64>("timeout") {
// TODO: Implement timeout functionality
// For now, just ignore the timeout option
}
}
let output = cmd
.output()
.map_err(|e| mlua::Error::RuntimeError(format!("Failed to execute command: {e}")))?;
let stdout = String::from_utf8_lossy(&output.stdout);
let stderr = String::from_utf8_lossy(&output.stderr);
if output.status.success() {
Ok(stdout.to_string())
} else {
Err(mlua::Error::RuntimeError(format!(
"Command failed with status {}: {}",
output.status, stderr
)))
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_cmd() {
let lua = Lua::new();
mod_cmd(&lua).unwrap();
lua.load(mlua::chunk! {
local cmd = require("cmd")
local result = cmd.exec("echo hello world")
assert(result == "hello world\n")
})
.exec()
.unwrap();
}
#[test]
fn test_cmd_with_cwd() {
let temp_dir = tempfile::TempDir::new().unwrap();
let temp_path = temp_dir.path();
// Canonicalize to resolve symlinks (e.g., /var -> /private/var on macOS)
let temp_path_canonical = temp_path
.canonicalize()
.unwrap_or_else(|_| temp_path.to_path_buf());
let temp_dir_str = temp_path_canonical.to_string_lossy().to_string();
let expected_path = temp_dir_str.trim_end_matches('/').to_string();
let lua = Lua::new();
mod_cmd(&lua).unwrap();
lua.load(mlua::chunk! {
local cmd = require("cmd")
-- Test with working directory
local result = cmd.exec("pwd", {cwd = $temp_dir_str})
-- Check that result contains the expected path (handles trailing slashes/newlines)
assert(result:find($expected_path) ~= nil, "Expected result to contain: " .. $expected_path .. " but got: " .. result)
})
.exec()
.unwrap();
// TempDir automatically cleans up when dropped
}
#[test]
fn test_cmd_with_env() {
let lua = Lua::new();
mod_cmd(&lua).unwrap();
lua.load(mlua::chunk! {
local cmd = require("cmd")
-- Test with environment variables
local result = cmd.exec("echo $TEST_VAR", {env = {TEST_VAR = "hello"}})
assert(result:find("hello") ~= nil)
})
.exec()
.unwrap();
}
#[test]
fn test_cmd_windows_compatibility() {
let lua = Lua::new();
mod_cmd(&lua).unwrap();
let test_command = "echo hello world";
lua.load(format!(
r#"
local cmd = require("cmd")
local result = cmd.exec("{test_command}")
assert(result:find("hello world") ~= nil)
"#
))
.exec()
.unwrap();
}
}
| rust | MIT | 3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb | 2026-01-04T15:39:11.175160Z | false |
jdx/mise | https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/crates/vfox/src/lua_mod/archiver.rs | crates/vfox/src/lua_mod/archiver.rs | use crate::error::Result;
use mlua::{ExternalResult, Lua, MultiValue, Table};
use std::path::PathBuf;
pub fn mod_archiver(lua: &Lua) -> Result<()> {
let package: Table = lua.globals().get("package")?;
let loaded: Table = package.get("loaded")?;
Ok(loaded.set(
"archiver",
lua.create_table_from(vec![(
"decompress",
lua.create_async_function(
|_lua: mlua::Lua, input| async move { decompress(&_lua, input) },
)?,
)])?,
)?)
}
fn decompress(_lua: &Lua, input: MultiValue) -> mlua::Result<()> {
let paths: Vec<mlua::Value> = input.into_iter().collect();
let archive: PathBuf = PathBuf::from(paths[0].to_string()?);
let destination: PathBuf = PathBuf::from(paths[1].to_string()?);
let filename = archive.file_name().unwrap().to_str().unwrap();
if filename.ends_with(".zip") {
xx::archive::unzip(&archive, &destination).into_lua_err()?;
} else if filename.ends_with(".tar.gz") {
xx::archive::untar_gz(&archive, &destination).into_lua_err()?;
} else if filename.ends_with(".tar.xz") {
xx::archive::untar_xz(&archive, &destination).into_lua_err()?;
} else if filename.ends_with(".tar.bz2") {
xx::archive::untar_bz2(&archive, &destination).into_lua_err()?;
} else {
unimplemented!("Unsupported archive format {:?}", archive);
}
Ok(())
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_zip() {
let temp_dir = tempfile::TempDir::new().unwrap();
let dst_path = temp_dir.path().join("unzip");
let dst_path_str = dst_path.to_string_lossy().to_string();
let lua = Lua::new();
mod_archiver(&lua).unwrap();
lua.load(mlua::chunk! {
local archiver = require("archiver")
archiver.decompress("test/data/foo.zip", $dst_path_str)
})
.exec()
.unwrap();
assert_eq!(
std::fs::read_to_string(dst_path.join("foo/test.txt")).unwrap(),
"yep\n"
);
// TempDir automatically cleans up when dropped
}
}
| rust | MIT | 3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb | 2026-01-04T15:39:11.175160Z | false |
jdx/mise | https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/crates/vfox/src/cli/env_keys.rs | crates/vfox/src/cli/env_keys.rs | use vfox::{Vfox, VfoxResult};
#[derive(clap::Args)]
pub struct EnvKeys {
pub sdk: String,
pub version: String,
}
impl EnvKeys {
pub async fn run(&self) -> VfoxResult<()> {
let vfox = Vfox::new();
let env_keys = vfox
.env_keys(
&self.sdk,
&self.version,
serde_json::Value::Object(Default::default()),
)
.await?;
for env_key in env_keys {
println!("{}={}", env_key.key, env_key.value);
}
Ok(())
}
}
| rust | MIT | 3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb | 2026-01-04T15:39:11.175160Z | false |
jdx/mise | https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/crates/vfox/src/cli/install.rs | crates/vfox/src/cli/install.rs | use std::path::PathBuf;
use vfox::{Vfox, VfoxResult};
#[derive(clap::Args)]
pub struct Install {
pub sdk: String,
pub version: String,
#[clap(short, long)]
pub output_dir: Option<PathBuf>,
}
impl Install {
pub async fn run(&self) -> VfoxResult<()> {
let vfox = Vfox::new();
let out = self
.output_dir
.clone()
.unwrap_or_else(|| vfox.install_dir.join(&self.sdk).join(&self.version));
info!(
"Installing {} version {} to {out:?}",
self.sdk, self.version
);
vfox.install(&self.sdk, &self.version, &out).await?;
Ok(())
}
}
| rust | MIT | 3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb | 2026-01-04T15:39:11.175160Z | false |
jdx/mise | https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/crates/vfox/src/cli/available.rs | crates/vfox/src/cli/available.rs | use vfox::{Vfox, VfoxResult};
#[derive(clap::Args)]
pub struct Available {}
impl Available {
pub async fn run(&self) -> VfoxResult<()> {
for (name, url) in Vfox::list_available_sdks() {
println!("{name} {url}");
}
Ok(())
}
}
| rust | MIT | 3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb | 2026-01-04T15:39:11.175160Z | false |
jdx/mise | https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/crates/vfox/src/cli/mod.rs | crates/vfox/src/cli/mod.rs | use clap::Parser;
use vfox::VfoxResult;
mod available;
mod env_keys;
mod install;
mod plugins;
#[derive(Parser)]
#[command(version)]
pub(crate) struct Cli {
#[command(subcommand)]
command: Commands,
}
#[derive(clap::Subcommand)]
enum Commands {
Available(available::Available),
EnvKeys(env_keys::EnvKeys),
Install(install::Install),
#[command(alias = "plugin")]
Plugins(plugins::Plugins),
}
impl Commands {
pub async fn run(self) -> VfoxResult<()> {
match self {
Commands::Available(available) => available.run().await,
Commands::EnvKeys(env_keys) => env_keys.run().await,
Commands::Install(install) => install.run().await,
Commands::Plugins(plugins) => plugins.run().await,
}
}
}
pub async fn run() -> VfoxResult<()> {
Cli::parse().command.run().await
}
| rust | MIT | 3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb | 2026-01-04T15:39:11.175160Z | false |
jdx/mise | https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/crates/vfox/src/cli/plugins/list.rs | crates/vfox/src/cli/plugins/list.rs | use vfox::Vfox;
use vfox::VfoxResult;
#[derive(clap::Args)]
#[command(alias = "ls")]
pub struct List {}
impl List {
pub async fn run(&self) -> VfoxResult<()> {
let vfox = Vfox::new();
let sdks = vfox.list_sdks()?;
for sdk in sdks {
println!("{sdk}");
}
Ok(())
}
}
| rust | MIT | 3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb | 2026-01-04T15:39:11.175160Z | false |
jdx/mise | https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/crates/vfox/src/cli/plugins/mod.rs | crates/vfox/src/cli/plugins/mod.rs | use vfox::VfoxResult;
mod list;
#[derive(clap::Subcommand)]
pub(crate) enum Commands {
// Install(install::Install),
List(list::List),
}
#[derive(clap::Args)]
pub(crate) struct Plugins {
#[clap(subcommand)]
command: Commands,
}
impl Plugins {
pub(crate) async fn run(&self) -> VfoxResult<()> {
match &self.command {
Commands::List(list) => list.run().await,
}
}
}
| rust | MIT | 3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb | 2026-01-04T15:39:11.175160Z | false |
rome/tools | https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/fuzz/fuzz_targets/rome_parse_jsx.rs | fuzz/fuzz_targets/rome_parse_jsx.rs | #![cfg_attr(not(feature = "rome_all"), no_main)]
#[path = "rome_common.rs"]
mod rome_common;
use libfuzzer_sys::Corpus;
use rome_js_syntax::JsFileSource;
pub fn do_fuzz(case: &[u8]) -> Corpus {
let parse_type = JsFileSource::jsx();
rome_common::fuzz_js_parser_with_source_type(case, parse_type)
}
#[cfg(not(feature = "rome_all"))]
libfuzzer_sys::fuzz_target!(|case: &[u8]| -> Corpus { do_fuzz(case) });
| rust | MIT | 392d188a49d70e495f13b1bb08cd7d9c43690f9b | 2026-01-04T15:38:12.578592Z | false |
rome/tools | https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/fuzz/fuzz_targets/rome_format_d_ts.rs | fuzz/fuzz_targets/rome_format_d_ts.rs | #![cfg_attr(not(feature = "rome_all"), no_main)]
#[path = "rome_common.rs"]
mod rome_common;
use libfuzzer_sys::Corpus;
use rome_js_syntax::JsFileSource;
pub fn do_fuzz(case: &[u8]) -> Corpus {
let parse_type = JsFileSource::d_ts();
rome_common::fuzz_js_formatter_with_source_type(case, parse_type)
}
#[cfg(not(feature = "rome_all"))]
libfuzzer_sys::fuzz_target!(|case: &[u8]| -> Corpus { do_fuzz(case) });
| rust | MIT | 392d188a49d70e495f13b1bb08cd7d9c43690f9b | 2026-01-04T15:38:12.578592Z | false |
rome/tools | https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/fuzz/fuzz_targets/rome_format_json.rs | fuzz/fuzz_targets/rome_format_json.rs | #![cfg_attr(not(feature = "rome_all"), no_main)]
#[path = "rome_common.rs"]
mod rome_common;
use libfuzzer_sys::Corpus;
pub fn do_fuzz(case: &[u8]) -> Corpus {
rome_common::fuzz_json_formatter(case)
}
#[cfg(not(feature = "rome_all"))]
libfuzzer_sys::fuzz_target!(|case: &[u8]| -> Corpus { do_fuzz(case) });
| rust | MIT | 392d188a49d70e495f13b1bb08cd7d9c43690f9b | 2026-01-04T15:38:12.578592Z | false |
rome/tools | https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/fuzz/fuzz_targets/rome_format_all.rs | fuzz/fuzz_targets/rome_format_all.rs | #![no_main]
mod rome_format_d_ts;
mod rome_format_jsx;
mod rome_format_module;
mod rome_format_script;
mod rome_format_tsx;
mod rome_format_typescript;
use libfuzzer_sys::{fuzz_target, Corpus};
fn do_fuzz(data: &[u8]) -> Corpus {
let mut keep = Corpus::Reject;
if let Corpus::Keep = rome_format_d_ts::do_fuzz(data) {
keep = Corpus::Keep;
}
if let Corpus::Keep = rome_format_jsx::do_fuzz(data) {
keep = Corpus::Keep;
}
if let Corpus::Keep = rome_format_module::do_fuzz(data) {
keep = Corpus::Keep;
}
if let Corpus::Keep = rome_format_script::do_fuzz(data) {
keep = Corpus::Keep;
}
if let Corpus::Keep = rome_format_tsx::do_fuzz(data) {
keep = Corpus::Keep;
}
if let Corpus::Keep = rome_format_typescript::do_fuzz(data) {
keep = Corpus::Keep;
}
keep
}
fuzz_target!(|case: &[u8]| -> Corpus { do_fuzz(case) });
| rust | MIT | 392d188a49d70e495f13b1bb08cd7d9c43690f9b | 2026-01-04T15:38:12.578592Z | false |
rome/tools | https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/fuzz/fuzz_targets/rome_parse_typescript.rs | fuzz/fuzz_targets/rome_parse_typescript.rs | #![cfg_attr(not(feature = "rome_all"), no_main)]
#[path = "rome_common.rs"]
mod rome_common;
use libfuzzer_sys::Corpus;
use rome_js_syntax::JsFileSource;
pub fn do_fuzz(case: &[u8]) -> Corpus {
let parse_type = JsFileSource::ts();
rome_common::fuzz_js_parser_with_source_type(case, parse_type)
}
#[cfg(not(feature = "rome_all"))]
libfuzzer_sys::fuzz_target!(|case: &[u8]| -> Corpus { do_fuzz(case) });
| rust | MIT | 392d188a49d70e495f13b1bb08cd7d9c43690f9b | 2026-01-04T15:38:12.578592Z | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.